Lines Matching refs:T3

571 .macro CALC_AAD_HASH GHASH_MUL AAD AADLEN T1 T2 T3 T4 T5 T6 T7 T8
587 \GHASH_MUL \T8, \T2, \T1, \T3, \T4, \T5, \T6
635 \GHASH_MUL \T7, \T2, \T1, \T3, \T4, \T5, \T6
863 .macro GHASH_MUL_AVX GH HK T1 T2 T3 T4 T5
866 vpshufd $0b01001110, \HK, \T3
868 vpxor \HK , \T3, \T3 # T3 = (b1+b0)
872 vpclmulqdq $0x00, \T3, \T2, \T2 # T2 = (a1+a0)*(b1+b0)
876 vpslldq $8, \T2,\T3 # shift-L T3 2 DWs
878 vpxor \T3, \GH, \GH
883 vpslld $30, \GH, \T3 # packed right shifting shift << 30
886 vpxor \T3, \T2, \T2 # xor the shifted versions
897 vpsrld $2,\GH, \T3 # packed left shifting >> 2
899 vpxor \T3, \T2, \T2 # xor the shifted versions
909 .macro PRECOMPUTE_AVX HK T1 T2 T3 T4 T5 T6
918 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^2<<1 mod poly
924 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^3<<1 mod poly
930 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^4<<1 mod poly
936 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^5<<1 mod poly
942 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^6<<1 mod poly
948 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^7<<1 mod poly
954 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^8<<1 mod poly
969 .macro INITIAL_BLOCKS_AVX REP num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8 T6 T_key ENC_DEC
1043 GHASH_MUL_AVX reg_j, \T2, \T1, \T3, \T4, \T5, \T6 # apply GHASH on num_initial_blocks blocks
1051 vmovdqa \XMM8, \T3
1204 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX REP T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8 T7 loop_idx ENC_DEC
1310 vpclmulqdq $0x11, \T5, \T1, \T3
1311 vpxor \T3, \T4, \T4
1312 vpclmulqdq $0x00, \T5, \T1, \T3
1313 vpxor \T3, \T7, \T7
1315 vpshufd $0b01001110, \T1, \T3
1316 vpxor \T1, \T3, \T3
1318 vpclmulqdq $0x10, \T5, \T3, \T3
1319 vpxor \T3, \T6, \T6
1335 vpclmulqdq $0x11, \T5, \T1, \T3
1336 vpxor \T3, \T4, \T4
1337 vpclmulqdq $0x00, \T5, \T1, \T3
1338 vpxor \T3, \T7, \T7
1340 vpshufd $0b01001110, \T1, \T3
1341 vpxor \T1, \T3, \T3
1343 vpclmulqdq $0x10, \T5, \T3, \T3
1344 vpxor \T3, \T6, \T6
1358 vpclmulqdq $0x11, \T5, \T1, \T3
1359 vpxor \T3, \T4, \T4
1360 vpclmulqdq $0x00, \T5, \T1, \T3
1361 vpxor \T3, \T7, \T7
1363 vpshufd $0b01001110, \T1, \T3
1364 vpxor \T1, \T3, \T3
1366 vpclmulqdq $0x10, \T5, \T3, \T3
1367 vpxor \T3, \T6, \T6
1382 vpclmulqdq $0x11, \T5, \T1, \T3
1383 vpxor \T3, \T4, \T4
1384 vpclmulqdq $0x00, \T5, \T1, \T3
1385 vpxor \T3, \T7, \T7
1387 vpshufd $0b01001110, \T1, \T3
1388 vpxor \T1, \T3, \T3
1390 vpclmulqdq $0x10, \T5, \T3, \T3
1391 vpxor \T3, \T6, \T6
1405 vpclmulqdq $0x11, \T5, \T1, \T3
1406 vpxor \T3, \T4, \T4
1407 vpclmulqdq $0x00, \T5, \T1, \T3
1408 vpxor \T3, \T7, \T7
1410 vpshufd $0b01001110, \T1, \T3
1411 vpxor \T1, \T3, \T3
1413 vpclmulqdq $0x10, \T5, \T3, \T3
1414 vpxor \T3, \T6, \T6
1429 vpclmulqdq $0x11, \T5, \T1, \T3
1430 vpxor \T3, \T4, \T4
1431 vpclmulqdq $0x00, \T5, \T1, \T3
1432 vpxor \T3, \T7, \T7
1434 vpshufd $0b01001110, \T1, \T3
1435 vpxor \T1, \T3, \T3
1437 vpclmulqdq $0x10, \T5, \T3, \T3
1438 vpxor \T3, \T6, \T6
1454 vpclmulqdq $0x11, \T5, \T1, \T3
1455 vpxor \T3, \T4, \T4
1456 vpclmulqdq $0x00, \T5, \T1, \T3
1457 vpxor \T3, \T7, \T7
1459 vpshufd $0b01001110, \T1, \T3
1460 vpxor \T1, \T3, \T3
1462 vpclmulqdq $0x10, \T5, \T3, \T3
1463 vpxor \T3, \T6, \T6
1496 vaesenclast \T2, reg_j, \T3
1498 vmovdqu \T3, 16*i(arg3, %r11)
1507 vpslldq $8, \T6, \T3 # shift-L T3 2 DWs
1509 vpxor \T3, \T7, \T7
1518 vpslld $30, \T7, \T3 # packed right shifting shift << 30
1521 vpxor \T3, \T2, \T2 # xor the shifted versions
1543 vpsrld $2, \T7, \T3 # packed left shifting >> 2
1545 vpxor \T3, \T2, \T2 # xor the shifted versions
1571 .macro GHASH_LAST_8_AVX T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8
1582 vmovdqu HashKey_8_k(arg2), \T3
1583 vpclmulqdq $0x00, \T3, \T2, \XMM1
1596 vmovdqu HashKey_7_k(arg2), \T3
1597 vpclmulqdq $0x00, \T3, \T2, \T2
1611 vmovdqu HashKey_6_k(arg2), \T3
1612 vpclmulqdq $0x00, \T3, \T2, \T2
1626 vmovdqu HashKey_5_k(arg2), \T3
1627 vpclmulqdq $0x00, \T3, \T2, \T2
1641 vmovdqu HashKey_4_k(arg2), \T3
1642 vpclmulqdq $0x00, \T3, \T2, \T2
1656 vmovdqu HashKey_3_k(arg2), \T3
1657 vpclmulqdq $0x00, \T3, \T2, \T2
1671 vmovdqu HashKey_2_k(arg2), \T3
1672 vpclmulqdq $0x00, \T3, \T2, \T2
1686 vmovdqu HashKey_k(arg2), \T3
1687 vpclmulqdq $0x00, \T3, \T2, \T2
1706 vpslld $30, \T7, \T3 # packed right shifting shift << 30
1709 vpxor \T3, \T2, \T2 # xor the shifted versions
1721 vpsrld $2, \T7, \T3 # packed left shifting >> 2
1723 vpxor \T3, \T2, \T2 # xor the shifted versions
1845 .macro GHASH_MUL_AVX2 GH HK T1 T2 T3 T4 T5
1849 vpclmulqdq $0x01,\HK,\GH,\T3 # T3 = a1*b0
1851 vpxor \T3, \GH, \GH
1854 vpsrldq $8 , \GH, \T3 # shift-R GH 2 DWs
1857 vpxor \T3, \T1, \T1
1862 vmovdqa POLY2(%rip), \T3
1864 vpclmulqdq $0x01, \GH, \T3, \T2
1870 vpclmulqdq $0x00, \GH, \T3, \T2
1873 vpclmulqdq $0x10, \GH, \T3, \GH
1883 .macro PRECOMPUTE_AVX2 HK T1 T2 T3 T4 T5 T6
1887 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^2<<1 mod poly
1890 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^3<<1 mod poly
1893 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^4<<1 mod poly
1896 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^5<<1 mod poly
1899 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^6<<1 mod poly
1902 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^7<<1 mod poly
1905 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^8<<1 mod poly
1917 .macro INITIAL_BLOCKS_AVX2 REP num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8 T6 T_key ENC_DEC VER
1993 GHASH_MUL_AVX2 reg_j, \T2, \T1, \T3, \T4, \T5, \T6 # apply GHASH on num_initial_blocks blocks
2001 vmovdqa \XMM8, \T3
2159 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX2 REP T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8 T7 loop_idx ENC_DEC
2262 vpclmulqdq $0x11, \T5, \T1, \T3
2263 vpxor \T3, \T4, \T4
2265 vpclmulqdq $0x00, \T5, \T1, \T3
2266 vpxor \T3, \T7, \T7
2268 vpclmulqdq $0x01, \T5, \T1, \T3
2269 vpxor \T3, \T6, \T6
2271 vpclmulqdq $0x10, \T5, \T1, \T3
2272 vpxor \T3, \T6, \T6
2288 vpclmulqdq $0x11, \T5, \T1, \T3
2289 vpxor \T3, \T4, \T4
2291 vpclmulqdq $0x00, \T5, \T1, \T3
2292 vpxor \T3, \T7, \T7
2294 vpclmulqdq $0x01, \T5, \T1, \T3
2295 vpxor \T3, \T6, \T6
2297 vpclmulqdq $0x10, \T5, \T1, \T3
2298 vpxor \T3, \T6, \T6
2312 vpclmulqdq $0x11, \T5, \T1, \T3
2313 vpxor \T3, \T4, \T4
2315 vpclmulqdq $0x00, \T5, \T1, \T3
2316 vpxor \T3, \T7, \T7
2318 vpclmulqdq $0x01, \T5, \T1, \T3
2319 vpxor \T3, \T6, \T6
2321 vpclmulqdq $0x10, \T5, \T1, \T3
2322 vpxor \T3, \T6, \T6
2337 vpclmulqdq $0x11, \T5, \T1, \T3
2338 vpxor \T3, \T4, \T4
2340 vpclmulqdq $0x00, \T5, \T1, \T3
2341 vpxor \T3, \T7, \T7
2343 vpclmulqdq $0x01, \T5, \T1, \T3
2344 vpxor \T3, \T6, \T6
2346 vpclmulqdq $0x10, \T5, \T1, \T3
2347 vpxor \T3, \T6, \T6
2361 vpclmulqdq $0x11, \T5, \T1, \T3
2362 vpxor \T3, \T4, \T4
2364 vpclmulqdq $0x00, \T5, \T1, \T3
2365 vpxor \T3, \T7, \T7
2367 vpclmulqdq $0x01, \T5, \T1, \T3
2368 vpxor \T3, \T6, \T6
2370 vpclmulqdq $0x10, \T5, \T1, \T3
2371 vpxor \T3, \T6, \T6
2385 vpclmulqdq $0x11, \T5, \T1, \T3
2386 vpxor \T3, \T4, \T4
2388 vpclmulqdq $0x00, \T5, \T1, \T3
2389 vpxor \T3, \T7, \T7
2391 vpclmulqdq $0x01, \T5, \T1, \T3
2392 vpxor \T3, \T6, \T6
2394 vpclmulqdq $0x10, \T5, \T1, \T3
2395 vpxor \T3, \T6, \T6
2413 vpclmulqdq $0x00, \T5, \T1, \T3
2414 vpxor \T3, \T7, \T7
2416 vpclmulqdq $0x01, \T5, \T1, \T3
2417 vpxor \T3, \T6, \T6
2419 vpclmulqdq $0x10, \T5, \T1, \T3
2420 vpxor \T3, \T6, \T6
2422 vpclmulqdq $0x11, \T5, \T1, \T3
2423 vpxor \T3, \T4, \T1
2453 vaesenclast \T2, reg_j, \T3
2455 vmovdqu \T3, 16*i(arg3, %r11)
2464 vpslldq $8, \T6, \T3 # shift-L T3 2 DWs
2466 vpxor \T3, \T7, \T7
2473 vmovdqa POLY2(%rip), \T3
2475 vpclmulqdq $0x01, \T7, \T3, \T2
2493 vpclmulqdq $0x00, \T7, \T3, \T2
2496 vpclmulqdq $0x10, \T7, \T3, \T4
2521 .macro GHASH_LAST_8_AVX2 T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8
2528 vpshufd $0b01001110, \T5, \T3
2530 vpxor \T5, \T3, \T3
2535 vpclmulqdq $0x00, \T3, \T2, \XMM1
2541 vpshufd $0b01001110, \T5, \T3
2543 vpxor \T5, \T3, \T3
2551 vpclmulqdq $0x00, \T3, \T2, \T2
2559 vpshufd $0b01001110, \T5, \T3
2561 vpxor \T5, \T3, \T3
2569 vpclmulqdq $0x00, \T3, \T2, \T2
2577 vpshufd $0b01001110, \T5, \T3
2579 vpxor \T5, \T3, \T3
2587 vpclmulqdq $0x00, \T3, \T2, \T2
2595 vpshufd $0b01001110, \T5, \T3
2597 vpxor \T5, \T3, \T3
2605 vpclmulqdq $0x00, \T3, \T2, \T2
2613 vpshufd $0b01001110, \T5, \T3
2615 vpxor \T5, \T3, \T3
2623 vpclmulqdq $0x00, \T3, \T2, \T2
2631 vpshufd $0b01001110, \T5, \T3
2633 vpxor \T5, \T3, \T3
2641 vpclmulqdq $0x00, \T3, \T2, \T2
2649 vpshufd $0b01001110, \T5, \T3
2651 vpxor \T5, \T3, \T3
2659 vpclmulqdq $0x00, \T3, \T2, \T2
2677 vmovdqa POLY2(%rip), \T3
2679 vpclmulqdq $0x01, \T7, \T3, \T2
2687 vpclmulqdq $0x00, \T7, \T3, \T2
2690 vpclmulqdq $0x10, \T7, \T3, \T4