42 #if SPH_SMALL_FOOTPRINT && !defined SPH_SMALL_FOOTPRINT_SHA2 43 #define SPH_SMALL_FOOTPRINT_SHA2 1 46 #define CH(X, Y, Z) ((((Y) ^ (Z)) & (X)) ^ (Z)) 47 #define MAJ(X, Y, Z) (((Y) & (Z)) | (((Y) | (Z)) & (X))) 49 #define ROTR SPH_ROTR32 51 #define BSG2_0(x) (ROTR(x, 2) ^ ROTR(x, 13) ^ ROTR(x, 22)) 52 #define BSG2_1(x) (ROTR(x, 6) ^ ROTR(x, 11) ^ ROTR(x, 25)) 53 #define SSG2_0(x) (ROTR(x, 7) ^ ROTR(x, 18) ^ SPH_T32((x) >> 3)) 54 #define SSG2_1(x) (ROTR(x, 17) ^ ROTR(x, 19) ^ SPH_T32((x) >> 10)) 56 static const sph_u32 H224[8] = {
62 static const sph_u32 H256[8] = {
78 #if SPH_SMALL_FOOTPRINT_SHA2 115 #define SHA2_MEXP1(in, pc) do { \ 119 #define SHA2_MEXP2(in, pc) do { \ 120 W[(pc) & 0x0F] = SPH_T32(SSG2_1(W[((pc) - 2) & 0x0F]) \ 121 + W[((pc) - 7) & 0x0F] \ 122 + SSG2_0(W[((pc) - 15) & 0x0F]) + W[(pc) & 0x0F]); \ 125 #define SHA2_STEPn(n, a, b, c, d, e, f, g, h, in, pc) do { \ 127 SHA2_MEXP ## n(in, pc); \ 128 t1 = SPH_T32(h + BSG2_1(e) + CH(e, f, g) \ 129 + K[pcount + (pc)] + W[(pc) & 0x0F]); \ 130 t2 = SPH_T32(BSG2_0(a) + MAJ(a, b, c)); \ 131 d = SPH_T32(d + t1); \ 132 h = SPH_T32(t1 + t2); \ 135 #define SHA2_STEP1(a, b, c, d, e, f, g, h, in, pc) \ 136 SHA2_STEPn(1, a, b, c, d, e, f, g, h, in, pc) 137 #define SHA2_STEP2(a, b, c, d, e, f, g, h, in, pc) \ 138 SHA2_STEPn(2, a, b, c, d, e, f, g, h, in, pc) 140 #define SHA2_ROUND_BODY(in, r) do { \ 141 sph_u32 A, B, C, D, E, F, G, H; \ 154 SHA2_STEP1(A, B, C, D, E, F, G, H, in, 0); \ 155 SHA2_STEP1(H, A, B, C, D, E, F, G, in, 1); \ 156 SHA2_STEP1(G, H, A, B, C, D, E, F, in, 2); \ 157 SHA2_STEP1(F, G, H, A, B, C, D, E, in, 3); \ 158 SHA2_STEP1(E, F, G, H, A, B, C, D, in, 4); \ 159 SHA2_STEP1(D, E, F, G, H, A, B, C, in, 5); \ 160 SHA2_STEP1(C, D, E, F, G, H, A, B, in, 6); \ 161 SHA2_STEP1(B, C, D, E, F, G, H, A, in, 7); \ 162 SHA2_STEP1(A, B, C, D, E, F, G, H, in, 8); \ 163 SHA2_STEP1(H, A, B, C, D, E, F, G, in, 9); \ 164 SHA2_STEP1(G, H, A, B, C, D, E, F, in, 10); \ 165 SHA2_STEP1(F, G, H, A, B, C, D, E, in, 11); \ 166 SHA2_STEP1(E, F, G, H, A, B, C, D, in, 12); \ 167 SHA2_STEP1(D, E, F, G, H, A, B, C, in, 13); \ 168 SHA2_STEP1(C, D, E, F, G, H, A, B, in, 14); \ 169 SHA2_STEP1(B, C, D, E, F, G, H, A, in, 15); \ 170 for (pcount = 16; pcount < 64; pcount += 16) { \ 171 SHA2_STEP2(A, B, C, D, E, F, G, H, in, 0); \ 172 SHA2_STEP2(H, A, B, C, D, E, F, G, in, 1); \ 173 SHA2_STEP2(G, H, A, B, C, D, E, F, in, 2); \ 174 SHA2_STEP2(F, G, H, A, B, C, D, E, in, 3); \ 175 SHA2_STEP2(E, F, G, H, A, B, C, D, in, 4); \ 176 SHA2_STEP2(D, E, F, G, H, A, B, C, in, 5); \ 177 SHA2_STEP2(C, D, E, F, G, H, A, B, in, 6); \ 178 SHA2_STEP2(B, C, D, E, F, G, H, A, in, 7); \ 179 SHA2_STEP2(A, B, C, D, E, F, G, H, in, 8); \ 180 SHA2_STEP2(H, A, B, C, D, E, F, G, in, 9); \ 181 SHA2_STEP2(G, H, A, B, C, D, E, F, in, 10); \ 182 SHA2_STEP2(F, G, H, A, B, C, D, E, in, 11); \ 183 SHA2_STEP2(E, F, G, H, A, B, C, D, in, 12); \ 184 SHA2_STEP2(D, E, F, G, H, A, B, C, in, 13); \ 185 SHA2_STEP2(C, D, E, F, G, H, A, B, in, 14); \ 186 SHA2_STEP2(B, C, D, E, F, G, H, A, in, 15); \ 188 (r)[0] = SPH_T32((r)[0] + A); \ 189 (r)[1] = SPH_T32((r)[1] + B); \ 190 (r)[2] = SPH_T32((r)[2] + C); \ 191 (r)[3] = SPH_T32((r)[3] + D); \ 192 (r)[4] = SPH_T32((r)[4] + E); \ 193 (r)[5] = SPH_T32((r)[5] + F); \ 194 (r)[6] = SPH_T32((r)[6] + G); \ 195 (r)[7] = SPH_T32((r)[7] + H); \ 200 #define SHA2_ROUND_BODY(in, r) do { \ 201 sph_u32 A, B, C, D, E, F, G, H, T1, T2; \ 202 sph_u32 W00, W01, W02, W03, W04, W05, W06, W07; \ 203 sph_u32 W08, W09, W10, W11, W12, W13, W14, W15; \ 216 T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \ 217 + SPH_C32(0x428A2F98) + W00); \ 218 T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \ 219 D = SPH_T32(D + T1); \ 220 H = SPH_T32(T1 + T2); \ 222 T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \ 223 + SPH_C32(0x71374491) + W01); \ 224 T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \ 225 C = SPH_T32(C + T1); \ 226 G = SPH_T32(T1 + T2); \ 228 T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \ 229 + SPH_C32(0xB5C0FBCF) + W02); \ 230 T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \ 231 B = SPH_T32(B + T1); \ 232 F = SPH_T32(T1 + T2); \ 234 T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \ 235 + SPH_C32(0xE9B5DBA5) + W03); \ 236 T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \ 237 A = SPH_T32(A + T1); \ 238 E = SPH_T32(T1 + T2); \ 240 T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \ 241 + SPH_C32(0x3956C25B) + W04); \ 242 T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \ 243 H = SPH_T32(H + T1); \ 244 D = SPH_T32(T1 + T2); \ 246 T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \ 247 + SPH_C32(0x59F111F1) + W05); \ 248 T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \ 249 G = SPH_T32(G + T1); \ 250 C = SPH_T32(T1 + T2); \ 252 T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \ 253 + SPH_C32(0x923F82A4) + W06); \ 254 T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \ 255 F = SPH_T32(F + T1); \ 256 B = SPH_T32(T1 + T2); \ 258 T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \ 259 + SPH_C32(0xAB1C5ED5) + W07); \ 260 T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \ 261 E = SPH_T32(E + T1); \ 262 A = SPH_T32(T1 + T2); \ 264 T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \ 265 + SPH_C32(0xD807AA98) + W08); \ 266 T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \ 267 D = SPH_T32(D + T1); \ 268 H = SPH_T32(T1 + T2); \ 270 T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \ 271 + SPH_C32(0x12835B01) + W09); \ 272 T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \ 273 C = SPH_T32(C + T1); \ 274 G = SPH_T32(T1 + T2); \ 276 T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \ 277 + SPH_C32(0x243185BE) + W10); \ 278 T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \ 279 B = SPH_T32(B + T1); \ 280 F = SPH_T32(T1 + T2); \ 282 T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \ 283 + SPH_C32(0x550C7DC3) + W11); \ 284 T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \ 285 A = SPH_T32(A + T1); \ 286 E = SPH_T32(T1 + T2); \ 288 T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \ 289 + SPH_C32(0x72BE5D74) + W12); \ 290 T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \ 291 H = SPH_T32(H + T1); \ 292 D = SPH_T32(T1 + T2); \ 294 T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \ 295 + SPH_C32(0x80DEB1FE) + W13); \ 296 T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \ 297 G = SPH_T32(G + T1); \ 298 C = SPH_T32(T1 + T2); \ 300 T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \ 301 + SPH_C32(0x9BDC06A7) + W14); \ 302 T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \ 303 F = SPH_T32(F + T1); \ 304 B = SPH_T32(T1 + T2); \ 306 T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \ 307 + SPH_C32(0xC19BF174) + W15); \ 308 T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \ 309 E = SPH_T32(E + T1); \ 310 A = SPH_T32(T1 + T2); \ 311 W00 = SPH_T32(SSG2_1(W14) + W09 + SSG2_0(W01) + W00); \ 312 T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \ 313 + SPH_C32(0xE49B69C1) + W00); \ 314 T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \ 315 D = SPH_T32(D + T1); \ 316 H = SPH_T32(T1 + T2); \ 317 W01 = SPH_T32(SSG2_1(W15) + W10 + SSG2_0(W02) + W01); \ 318 T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \ 319 + SPH_C32(0xEFBE4786) + W01); \ 320 T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \ 321 C = SPH_T32(C + T1); \ 322 G = SPH_T32(T1 + T2); \ 323 W02 = SPH_T32(SSG2_1(W00) + W11 + SSG2_0(W03) + W02); \ 324 T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \ 325 + SPH_C32(0x0FC19DC6) + W02); \ 326 T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \ 327 B = SPH_T32(B + T1); \ 328 F = SPH_T32(T1 + T2); \ 329 W03 = SPH_T32(SSG2_1(W01) + W12 + SSG2_0(W04) + W03); \ 330 T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \ 331 + SPH_C32(0x240CA1CC) + W03); \ 332 T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \ 333 A = SPH_T32(A + T1); \ 334 E = SPH_T32(T1 + T2); \ 335 W04 = SPH_T32(SSG2_1(W02) + W13 + SSG2_0(W05) + W04); \ 336 T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \ 337 + SPH_C32(0x2DE92C6F) + W04); \ 338 T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \ 339 H = SPH_T32(H + T1); \ 340 D = SPH_T32(T1 + T2); \ 341 W05 = SPH_T32(SSG2_1(W03) + W14 + SSG2_0(W06) + W05); \ 342 T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \ 343 + SPH_C32(0x4A7484AA) + W05); \ 344 T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \ 345 G = SPH_T32(G + T1); \ 346 C = SPH_T32(T1 + T2); \ 347 W06 = SPH_T32(SSG2_1(W04) + W15 + SSG2_0(W07) + W06); \ 348 T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \ 349 + SPH_C32(0x5CB0A9DC) + W06); \ 350 T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \ 351 F = SPH_T32(F + T1); \ 352 B = SPH_T32(T1 + T2); \ 353 W07 = SPH_T32(SSG2_1(W05) + W00 + SSG2_0(W08) + W07); \ 354 T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \ 355 + SPH_C32(0x76F988DA) + W07); \ 356 T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \ 357 E = SPH_T32(E + T1); \ 358 A = SPH_T32(T1 + T2); \ 359 W08 = SPH_T32(SSG2_1(W06) + W01 + SSG2_0(W09) + W08); \ 360 T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \ 361 + SPH_C32(0x983E5152) + W08); \ 362 T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \ 363 D = SPH_T32(D + T1); \ 364 H = SPH_T32(T1 + T2); \ 365 W09 = SPH_T32(SSG2_1(W07) + W02 + SSG2_0(W10) + W09); \ 366 T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \ 367 + SPH_C32(0xA831C66D) + W09); \ 368 T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \ 369 C = SPH_T32(C + T1); \ 370 G = SPH_T32(T1 + T2); \ 371 W10 = SPH_T32(SSG2_1(W08) + W03 + SSG2_0(W11) + W10); \ 372 T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \ 373 + SPH_C32(0xB00327C8) + W10); \ 374 T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \ 375 B = SPH_T32(B + T1); \ 376 F = SPH_T32(T1 + T2); \ 377 W11 = SPH_T32(SSG2_1(W09) + W04 + SSG2_0(W12) + W11); \ 378 T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \ 379 + SPH_C32(0xBF597FC7) + W11); \ 380 T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \ 381 A = SPH_T32(A + T1); \ 382 E = SPH_T32(T1 + T2); \ 383 W12 = SPH_T32(SSG2_1(W10) + W05 + SSG2_0(W13) + W12); \ 384 T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \ 385 + SPH_C32(0xC6E00BF3) + W12); \ 386 T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \ 387 H = SPH_T32(H + T1); \ 388 D = SPH_T32(T1 + T2); \ 389 W13 = SPH_T32(SSG2_1(W11) + W06 + SSG2_0(W14) + W13); \ 390 T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \ 391 + SPH_C32(0xD5A79147) + W13); \ 392 T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \ 393 G = SPH_T32(G + T1); \ 394 C = SPH_T32(T1 + T2); \ 395 W14 = SPH_T32(SSG2_1(W12) + W07 + SSG2_0(W15) + W14); \ 396 T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \ 397 + SPH_C32(0x06CA6351) + W14); \ 398 T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \ 399 F = SPH_T32(F + T1); \ 400 B = SPH_T32(T1 + T2); \ 401 W15 = SPH_T32(SSG2_1(W13) + W08 + SSG2_0(W00) + W15); \ 402 T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \ 403 + SPH_C32(0x14292967) + W15); \ 404 T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \ 405 E = SPH_T32(E + T1); \ 406 A = SPH_T32(T1 + T2); \ 407 W00 = SPH_T32(SSG2_1(W14) + W09 + SSG2_0(W01) + W00); \ 408 T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \ 409 + SPH_C32(0x27B70A85) + W00); \ 410 T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \ 411 D = SPH_T32(D + T1); \ 412 H = SPH_T32(T1 + T2); \ 413 W01 = SPH_T32(SSG2_1(W15) + W10 + SSG2_0(W02) + W01); \ 414 T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \ 415 + SPH_C32(0x2E1B2138) + W01); \ 416 T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \ 417 C = SPH_T32(C + T1); \ 418 G = SPH_T32(T1 + T2); \ 419 W02 = SPH_T32(SSG2_1(W00) + W11 + SSG2_0(W03) + W02); \ 420 T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \ 421 + SPH_C32(0x4D2C6DFC) + W02); \ 422 T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \ 423 B = SPH_T32(B + T1); \ 424 F = SPH_T32(T1 + T2); \ 425 W03 = SPH_T32(SSG2_1(W01) + W12 + SSG2_0(W04) + W03); \ 426 T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \ 427 + SPH_C32(0x53380D13) + W03); \ 428 T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \ 429 A = SPH_T32(A + T1); \ 430 E = SPH_T32(T1 + T2); \ 431 W04 = SPH_T32(SSG2_1(W02) + W13 + SSG2_0(W05) + W04); \ 432 T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \ 433 + SPH_C32(0x650A7354) + W04); \ 434 T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \ 435 H = SPH_T32(H + T1); \ 436 D = SPH_T32(T1 + T2); \ 437 W05 = SPH_T32(SSG2_1(W03) + W14 + SSG2_0(W06) + W05); \ 438 T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \ 439 + SPH_C32(0x766A0ABB) + W05); \ 440 T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \ 441 G = SPH_T32(G + T1); \ 442 C = SPH_T32(T1 + T2); \ 443 W06 = SPH_T32(SSG2_1(W04) + W15 + SSG2_0(W07) + W06); \ 444 T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \ 445 + SPH_C32(0x81C2C92E) + W06); \ 446 T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \ 447 F = SPH_T32(F + T1); \ 448 B = SPH_T32(T1 + T2); \ 449 W07 = SPH_T32(SSG2_1(W05) + W00 + SSG2_0(W08) + W07); \ 450 T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \ 451 + SPH_C32(0x92722C85) + W07); \ 452 T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \ 453 E = SPH_T32(E + T1); \ 454 A = SPH_T32(T1 + T2); \ 455 W08 = SPH_T32(SSG2_1(W06) + W01 + SSG2_0(W09) + W08); \ 456 T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \ 457 + SPH_C32(0xA2BFE8A1) + W08); \ 458 T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \ 459 D = SPH_T32(D + T1); \ 460 H = SPH_T32(T1 + T2); \ 461 W09 = SPH_T32(SSG2_1(W07) + W02 + SSG2_0(W10) + W09); \ 462 T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \ 463 + SPH_C32(0xA81A664B) + W09); \ 464 T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \ 465 C = SPH_T32(C + T1); \ 466 G = SPH_T32(T1 + T2); \ 467 W10 = SPH_T32(SSG2_1(W08) + W03 + SSG2_0(W11) + W10); \ 468 T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \ 469 + SPH_C32(0xC24B8B70) + W10); \ 470 T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \ 471 B = SPH_T32(B + T1); \ 472 F = SPH_T32(T1 + T2); \ 473 W11 = SPH_T32(SSG2_1(W09) + W04 + SSG2_0(W12) + W11); \ 474 T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \ 475 + SPH_C32(0xC76C51A3) + W11); \ 476 T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \ 477 A = SPH_T32(A + T1); \ 478 E = SPH_T32(T1 + T2); \ 479 W12 = SPH_T32(SSG2_1(W10) + W05 + SSG2_0(W13) + W12); \ 480 T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \ 481 + SPH_C32(0xD192E819) + W12); \ 482 T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \ 483 H = SPH_T32(H + T1); \ 484 D = SPH_T32(T1 + T2); \ 485 W13 = SPH_T32(SSG2_1(W11) + W06 + SSG2_0(W14) + W13); \ 486 T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \ 487 + SPH_C32(0xD6990624) + W13); \ 488 T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \ 489 G = SPH_T32(G + T1); \ 490 C = SPH_T32(T1 + T2); \ 491 W14 = SPH_T32(SSG2_1(W12) + W07 + SSG2_0(W15) + W14); \ 492 T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \ 493 + SPH_C32(0xF40E3585) + W14); \ 494 T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \ 495 F = SPH_T32(F + T1); \ 496 B = SPH_T32(T1 + T2); \ 497 W15 = SPH_T32(SSG2_1(W13) + W08 + SSG2_0(W00) + W15); \ 498 T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \ 499 + SPH_C32(0x106AA070) + W15); \ 500 T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \ 501 E = SPH_T32(E + T1); \ 502 A = SPH_T32(T1 + T2); \ 503 W00 = SPH_T32(SSG2_1(W14) + W09 + SSG2_0(W01) + W00); \ 504 T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \ 505 + SPH_C32(0x19A4C116) + W00); \ 506 T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \ 507 D = SPH_T32(D + T1); \ 508 H = SPH_T32(T1 + T2); \ 509 W01 = SPH_T32(SSG2_1(W15) + W10 + SSG2_0(W02) + W01); \ 510 T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \ 511 + SPH_C32(0x1E376C08) + W01); \ 512 T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \ 513 C = SPH_T32(C + T1); \ 514 G = SPH_T32(T1 + T2); \ 515 W02 = SPH_T32(SSG2_1(W00) + W11 + SSG2_0(W03) + W02); \ 516 T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \ 517 + SPH_C32(0x2748774C) + W02); \ 518 T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \ 519 B = SPH_T32(B + T1); \ 520 F = SPH_T32(T1 + T2); \ 521 W03 = SPH_T32(SSG2_1(W01) + W12 + SSG2_0(W04) + W03); \ 522 T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \ 523 + SPH_C32(0x34B0BCB5) + W03); \ 524 T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \ 525 A = SPH_T32(A + T1); \ 526 E = SPH_T32(T1 + T2); \ 527 W04 = SPH_T32(SSG2_1(W02) + W13 + SSG2_0(W05) + W04); \ 528 T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \ 529 + SPH_C32(0x391C0CB3) + W04); \ 530 T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \ 531 H = SPH_T32(H + T1); \ 532 D = SPH_T32(T1 + T2); \ 533 W05 = SPH_T32(SSG2_1(W03) + W14 + SSG2_0(W06) + W05); \ 534 T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \ 535 + SPH_C32(0x4ED8AA4A) + W05); \ 536 T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \ 537 G = SPH_T32(G + T1); \ 538 C = SPH_T32(T1 + T2); \ 539 W06 = SPH_T32(SSG2_1(W04) + W15 + SSG2_0(W07) + W06); \ 540 T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \ 541 + SPH_C32(0x5B9CCA4F) + W06); \ 542 T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \ 543 F = SPH_T32(F + T1); \ 544 B = SPH_T32(T1 + T2); \ 545 W07 = SPH_T32(SSG2_1(W05) + W00 + SSG2_0(W08) + W07); \ 546 T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \ 547 + SPH_C32(0x682E6FF3) + W07); \ 548 T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \ 549 E = SPH_T32(E + T1); \ 550 A = SPH_T32(T1 + T2); \ 551 W08 = SPH_T32(SSG2_1(W06) + W01 + SSG2_0(W09) + W08); \ 552 T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \ 553 + SPH_C32(0x748F82EE) + W08); \ 554 T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \ 555 D = SPH_T32(D + T1); \ 556 H = SPH_T32(T1 + T2); \ 557 W09 = SPH_T32(SSG2_1(W07) + W02 + SSG2_0(W10) + W09); \ 558 T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \ 559 + SPH_C32(0x78A5636F) + W09); \ 560 T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \ 561 C = SPH_T32(C + T1); \ 562 G = SPH_T32(T1 + T2); \ 563 W10 = SPH_T32(SSG2_1(W08) + W03 + SSG2_0(W11) + W10); \ 564 T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \ 565 + SPH_C32(0x84C87814) + W10); \ 566 T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \ 567 B = SPH_T32(B + T1); \ 568 F = SPH_T32(T1 + T2); \ 569 W11 = SPH_T32(SSG2_1(W09) + W04 + SSG2_0(W12) + W11); \ 570 T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \ 571 + SPH_C32(0x8CC70208) + W11); \ 572 T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \ 573 A = SPH_T32(A + T1); \ 574 E = SPH_T32(T1 + T2); \ 575 W12 = SPH_T32(SSG2_1(W10) + W05 + SSG2_0(W13) + W12); \ 576 T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \ 577 + SPH_C32(0x90BEFFFA) + W12); \ 578 T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \ 579 H = SPH_T32(H + T1); \ 580 D = SPH_T32(T1 + T2); \ 581 W13 = SPH_T32(SSG2_1(W11) + W06 + SSG2_0(W14) + W13); \ 582 T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \ 583 + SPH_C32(0xA4506CEB) + W13); \ 584 T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \ 585 G = SPH_T32(G + T1); \ 586 C = SPH_T32(T1 + T2); \ 587 W14 = SPH_T32(SSG2_1(W12) + W07 + SSG2_0(W15) + W14); \ 588 T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \ 589 + SPH_C32(0xBEF9A3F7) + W14); \ 590 T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \ 591 F = SPH_T32(F + T1); \ 592 B = SPH_T32(T1 + T2); \ 593 W15 = SPH_T32(SSG2_1(W13) + W08 + SSG2_0(W00) + W15); \ 594 T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \ 595 + SPH_C32(0xC67178F2) + W15); \ 596 T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \ 597 E = SPH_T32(E + T1); \ 598 A = SPH_T32(T1 + T2); \ 599 (r)[0] = SPH_T32((r)[0] + A); \ 600 (r)[1] = SPH_T32((r)[1] + B); \ 601 (r)[2] = SPH_T32((r)[2] + C); \ 602 (r)[3] = SPH_T32((r)[3] + D); \ 603 (r)[4] = SPH_T32((r)[4] + E); \ 604 (r)[5] = SPH_T32((r)[5] + F); \ 605 (r)[6] = SPH_T32((r)[6] + G); \ 606 (r)[7] = SPH_T32((r)[7] + H); \ 616 sha2_round(
const unsigned char *data,
sph_u32 r[8])
618 #define SHA2_IN(x) sph_dec32be_aligned(data + (4 * (x))) 653 #define RFUN sha2_round 662 sha224_close(cc, dst, 7);
670 sha224_addbits_and_close(cc, ub, n, dst, 7);
678 sha224_close(cc, dst, 8);
686 sha224_addbits_and_close(cc, ub, n, dst, 8);
694 #define SHA2_IN(x) msg[x]
void sph_sha224_close(void *cc, void *dst)
Terminate the current SHA-224 computation and output the result into the provided buffer...
void sph_sha224_addbits_and_close(void *cc, unsigned ub, unsigned n, void *dst)
Add a few additional bits (0 to 7) to the current computation, then terminate it and output the resul...
void sph_sha224_comp(const sph_u32 msg[16], sph_u32 val[8])
Apply the SHA-224 compression function on the provided data.
#define SHA2_ROUND_BODY(in, r)
void sph_sha224_init(void *cc)
Initialize a SHA-224 context.
SHA-224, SHA-256, SHA-384 and SHA-512 interface.
void sph_sha256_init(void *cc)
Initialize a SHA-256 context.
This structure is a context for SHA-224 computations: it contains the intermediate values and some da...
void sph_sha256_addbits_and_close(void *cc, unsigned ub, unsigned n, void *dst)
Add a few additional bits (0 to 7) to the current computation, then terminate it and output the resul...
void * memcpy(void *a, const void *b, size_t c)
void sph_sha256_close(void *cc, void *dst)
Terminate the current SHA-256 computation and output the result into the provided buffer...