Lines Matching refs:T3

605 .macro CALC_AAD_HASH GHASH_MUL AAD AADLEN T1 T2 T3 T4 T5 T6 T7 T8
621 \GHASH_MUL \T8, \T2, \T1, \T3, \T4, \T5, \T6
667 \GHASH_MUL \T7, \T2, \T1, \T3, \T4, \T5, \T6
895 .macro GHASH_MUL_AVX GH HK T1 T2 T3 T4 T5
898 vpshufd $0b01001110, \HK, \T3
900 vpxor \HK , \T3, \T3 # T3 = (b1+b0)
904 vpclmulqdq $0x00, \T3, \T2, \T2 # T2 = (a1+a0)*(b1+b0)
908 vpslldq $8, \T2,\T3 # shift-L T3 2 DWs
910 vpxor \T3, \GH, \GH
915 vpslld $30, \GH, \T3 # packed right shifting shift << 30
918 vpxor \T3, \T2, \T2 # xor the shifted versions
929 vpsrld $2,\GH, \T3 # packed left shifting >> 2
931 vpxor \T3, \T2, \T2 # xor the shifted versions
941 .macro PRECOMPUTE_AVX HK T1 T2 T3 T4 T5 T6
950 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^2<<1 mod poly
956 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^3<<1 mod poly
962 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^4<<1 mod poly
968 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^5<<1 mod poly
974 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^6<<1 mod poly
980 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^7<<1 mod poly
986 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^8<<1 mod poly
1001 .macro INITIAL_BLOCKS_AVX REP num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8 T6 T_key ENC_DEC
1075 GHASH_MUL_AVX reg_j, \T2, \T1, \T3, \T4, \T5, \T6 # apply GHASH on num_initial_blocks blocks
1083 vmovdqa \XMM8, \T3
1236 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX REP T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8 T7 loop_idx ENC_DEC
1342 vpclmulqdq $0x11, \T5, \T1, \T3
1343 vpxor \T3, \T4, \T4
1344 vpclmulqdq $0x00, \T5, \T1, \T3
1345 vpxor \T3, \T7, \T7
1347 vpshufd $0b01001110, \T1, \T3
1348 vpxor \T1, \T3, \T3
1350 vpclmulqdq $0x10, \T5, \T3, \T3
1351 vpxor \T3, \T6, \T6
1367 vpclmulqdq $0x11, \T5, \T1, \T3
1368 vpxor \T3, \T4, \T4
1369 vpclmulqdq $0x00, \T5, \T1, \T3
1370 vpxor \T3, \T7, \T7
1372 vpshufd $0b01001110, \T1, \T3
1373 vpxor \T1, \T3, \T3
1375 vpclmulqdq $0x10, \T5, \T3, \T3
1376 vpxor \T3, \T6, \T6
1390 vpclmulqdq $0x11, \T5, \T1, \T3
1391 vpxor \T3, \T4, \T4
1392 vpclmulqdq $0x00, \T5, \T1, \T3
1393 vpxor \T3, \T7, \T7
1395 vpshufd $0b01001110, \T1, \T3
1396 vpxor \T1, \T3, \T3
1398 vpclmulqdq $0x10, \T5, \T3, \T3
1399 vpxor \T3, \T6, \T6
1414 vpclmulqdq $0x11, \T5, \T1, \T3
1415 vpxor \T3, \T4, \T4
1416 vpclmulqdq $0x00, \T5, \T1, \T3
1417 vpxor \T3, \T7, \T7
1419 vpshufd $0b01001110, \T1, \T3
1420 vpxor \T1, \T3, \T3
1422 vpclmulqdq $0x10, \T5, \T3, \T3
1423 vpxor \T3, \T6, \T6
1437 vpclmulqdq $0x11, \T5, \T1, \T3
1438 vpxor \T3, \T4, \T4
1439 vpclmulqdq $0x00, \T5, \T1, \T3
1440 vpxor \T3, \T7, \T7
1442 vpshufd $0b01001110, \T1, \T3
1443 vpxor \T1, \T3, \T3
1445 vpclmulqdq $0x10, \T5, \T3, \T3
1446 vpxor \T3, \T6, \T6
1461 vpclmulqdq $0x11, \T5, \T1, \T3
1462 vpxor \T3, \T4, \T4
1463 vpclmulqdq $0x00, \T5, \T1, \T3
1464 vpxor \T3, \T7, \T7
1466 vpshufd $0b01001110, \T1, \T3
1467 vpxor \T1, \T3, \T3
1469 vpclmulqdq $0x10, \T5, \T3, \T3
1470 vpxor \T3, \T6, \T6
1486 vpclmulqdq $0x11, \T5, \T1, \T3
1487 vpxor \T3, \T4, \T4
1488 vpclmulqdq $0x00, \T5, \T1, \T3
1489 vpxor \T3, \T7, \T7
1491 vpshufd $0b01001110, \T1, \T3
1492 vpxor \T1, \T3, \T3
1494 vpclmulqdq $0x10, \T5, \T3, \T3
1495 vpxor \T3, \T6, \T6
1528 vaesenclast \T2, reg_j, \T3
1530 vmovdqu \T3, 16*i(arg3, %r11)
1539 vpslldq $8, \T6, \T3 # shift-L T3 2 DWs
1541 vpxor \T3, \T7, \T7
1550 vpslld $30, \T7, \T3 # packed right shifting shift << 30
1553 vpxor \T3, \T2, \T2 # xor the shifted versions
1575 vpsrld $2, \T7, \T3 # packed left shifting >> 2
1577 vpxor \T3, \T2, \T2 # xor the shifted versions
1603 .macro GHASH_LAST_8_AVX T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8
1614 vmovdqu HashKey_8_k(arg2), \T3
1615 vpclmulqdq $0x00, \T3, \T2, \XMM1
1628 vmovdqu HashKey_7_k(arg2), \T3
1629 vpclmulqdq $0x00, \T3, \T2, \T2
1643 vmovdqu HashKey_6_k(arg2), \T3
1644 vpclmulqdq $0x00, \T3, \T2, \T2
1658 vmovdqu HashKey_5_k(arg2), \T3
1659 vpclmulqdq $0x00, \T3, \T2, \T2
1673 vmovdqu HashKey_4_k(arg2), \T3
1674 vpclmulqdq $0x00, \T3, \T2, \T2
1688 vmovdqu HashKey_3_k(arg2), \T3
1689 vpclmulqdq $0x00, \T3, \T2, \T2
1703 vmovdqu HashKey_2_k(arg2), \T3
1704 vpclmulqdq $0x00, \T3, \T2, \T2
1718 vmovdqu HashKey_k(arg2), \T3
1719 vpclmulqdq $0x00, \T3, \T2, \T2
1738 vpslld $30, \T7, \T3 # packed right shifting shift << 30
1741 vpxor \T3, \T2, \T2 # xor the shifted versions
1753 vpsrld $2, \T7, \T3 # packed left shifting >> 2
1755 vpxor \T3, \T2, \T2 # xor the shifted versions
1877 .macro GHASH_MUL_AVX2 GH HK T1 T2 T3 T4 T5
1881 vpclmulqdq $0x01,\HK,\GH,\T3 # T3 = a1*b0
1883 vpxor \T3, \GH, \GH
1886 vpsrldq $8 , \GH, \T3 # shift-R GH 2 DWs
1889 vpxor \T3, \T1, \T1
1894 vmovdqa POLY2(%rip), \T3
1896 vpclmulqdq $0x01, \GH, \T3, \T2
1902 vpclmulqdq $0x00, \GH, \T3, \T2
1905 vpclmulqdq $0x10, \GH, \T3, \GH
1915 .macro PRECOMPUTE_AVX2 HK T1 T2 T3 T4 T5 T6
1919 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^2<<1 mod poly
1922 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^3<<1 mod poly
1925 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^4<<1 mod poly
1928 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^5<<1 mod poly
1931 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^6<<1 mod poly
1934 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^7<<1 mod poly
1937 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^8<<1 mod poly
1949 .macro INITIAL_BLOCKS_AVX2 REP num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8 T6 T_key ENC_DEC VER
2025 GHASH_MUL_AVX2 reg_j, \T2, \T1, \T3, \T4, \T5, \T6 # apply GHASH on num_initial_blocks blocks
2033 vmovdqa \XMM8, \T3
2191 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX2 REP T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8 T7 loop_idx ENC_DEC
2294 vpclmulqdq $0x11, \T5, \T1, \T3
2295 vpxor \T3, \T4, \T4
2297 vpclmulqdq $0x00, \T5, \T1, \T3
2298 vpxor \T3, \T7, \T7
2300 vpclmulqdq $0x01, \T5, \T1, \T3
2301 vpxor \T3, \T6, \T6
2303 vpclmulqdq $0x10, \T5, \T1, \T3
2304 vpxor \T3, \T6, \T6
2320 vpclmulqdq $0x11, \T5, \T1, \T3
2321 vpxor \T3, \T4, \T4
2323 vpclmulqdq $0x00, \T5, \T1, \T3
2324 vpxor \T3, \T7, \T7
2326 vpclmulqdq $0x01, \T5, \T1, \T3
2327 vpxor \T3, \T6, \T6
2329 vpclmulqdq $0x10, \T5, \T1, \T3
2330 vpxor \T3, \T6, \T6
2344 vpclmulqdq $0x11, \T5, \T1, \T3
2345 vpxor \T3, \T4, \T4
2347 vpclmulqdq $0x00, \T5, \T1, \T3
2348 vpxor \T3, \T7, \T7
2350 vpclmulqdq $0x01, \T5, \T1, \T3
2351 vpxor \T3, \T6, \T6
2353 vpclmulqdq $0x10, \T5, \T1, \T3
2354 vpxor \T3, \T6, \T6
2369 vpclmulqdq $0x11, \T5, \T1, \T3
2370 vpxor \T3, \T4, \T4
2372 vpclmulqdq $0x00, \T5, \T1, \T3
2373 vpxor \T3, \T7, \T7
2375 vpclmulqdq $0x01, \T5, \T1, \T3
2376 vpxor \T3, \T6, \T6
2378 vpclmulqdq $0x10, \T5, \T1, \T3
2379 vpxor \T3, \T6, \T6
2393 vpclmulqdq $0x11, \T5, \T1, \T3
2394 vpxor \T3, \T4, \T4
2396 vpclmulqdq $0x00, \T5, \T1, \T3
2397 vpxor \T3, \T7, \T7
2399 vpclmulqdq $0x01, \T5, \T1, \T3
2400 vpxor \T3, \T6, \T6
2402 vpclmulqdq $0x10, \T5, \T1, \T3
2403 vpxor \T3, \T6, \T6
2417 vpclmulqdq $0x11, \T5, \T1, \T3
2418 vpxor \T3, \T4, \T4
2420 vpclmulqdq $0x00, \T5, \T1, \T3
2421 vpxor \T3, \T7, \T7
2423 vpclmulqdq $0x01, \T5, \T1, \T3
2424 vpxor \T3, \T6, \T6
2426 vpclmulqdq $0x10, \T5, \T1, \T3
2427 vpxor \T3, \T6, \T6
2445 vpclmulqdq $0x00, \T5, \T1, \T3
2446 vpxor \T3, \T7, \T7
2448 vpclmulqdq $0x01, \T5, \T1, \T3
2449 vpxor \T3, \T6, \T6
2451 vpclmulqdq $0x10, \T5, \T1, \T3
2452 vpxor \T3, \T6, \T6
2454 vpclmulqdq $0x11, \T5, \T1, \T3
2455 vpxor \T3, \T4, \T1
2485 vaesenclast \T2, reg_j, \T3
2487 vmovdqu \T3, 16*i(arg3, %r11)
2496 vpslldq $8, \T6, \T3 # shift-L T3 2 DWs
2498 vpxor \T3, \T7, \T7
2505 vmovdqa POLY2(%rip), \T3
2507 vpclmulqdq $0x01, \T7, \T3, \T2
2525 vpclmulqdq $0x00, \T7, \T3, \T2
2528 vpclmulqdq $0x10, \T7, \T3, \T4
2553 .macro GHASH_LAST_8_AVX2 T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8
2560 vpshufd $0b01001110, \T5, \T3
2562 vpxor \T5, \T3, \T3
2567 vpclmulqdq $0x00, \T3, \T2, \XMM1
2573 vpshufd $0b01001110, \T5, \T3
2575 vpxor \T5, \T3, \T3
2583 vpclmulqdq $0x00, \T3, \T2, \T2
2591 vpshufd $0b01001110, \T5, \T3
2593 vpxor \T5, \T3, \T3
2601 vpclmulqdq $0x00, \T3, \T2, \T2
2609 vpshufd $0b01001110, \T5, \T3
2611 vpxor \T5, \T3, \T3
2619 vpclmulqdq $0x00, \T3, \T2, \T2
2627 vpshufd $0b01001110, \T5, \T3
2629 vpxor \T5, \T3, \T3
2637 vpclmulqdq $0x00, \T3, \T2, \T2
2645 vpshufd $0b01001110, \T5, \T3
2647 vpxor \T5, \T3, \T3
2655 vpclmulqdq $0x00, \T3, \T2, \T2
2663 vpshufd $0b01001110, \T5, \T3
2665 vpxor \T5, \T3, \T3
2673 vpclmulqdq $0x00, \T3, \T2, \T2
2681 vpshufd $0b01001110, \T5, \T3
2683 vpxor \T5, \T3, \T3
2691 vpclmulqdq $0x00, \T3, \T2, \T2
2709 vmovdqa POLY2(%rip), \T3
2711 vpclmulqdq $0x01, \T7, \T3, \T2
2719 vpclmulqdq $0x00, \T7, \T3, \T2
2722 vpclmulqdq $0x10, \T7, \T3, \T4