Lines Matching refs:t2
56 t1x, t2, t2x, t3, t3x, t4, t5) \
66 vinserti128 $1, t2x, t3, t2; /* ab: le0 ; cd: le1 */ \
67 vpshufb t1, t2, x0; \
70 add2_le128(t2, t0, t4, t3, t5); /* ab: le2 ; cd: le3 */ \
71 vpshufb t1, t2, x1; \
72 add2_le128(t2, t0, t4, t3, t5); \
73 vpshufb t1, t2, x2; \
74 add2_le128(t2, t0, t4, t3, t5); \
75 vpshufb t1, t2, x3; \
76 add2_le128(t2, t0, t4, t3, t5); \
77 vpshufb t1, t2, x4; \
78 add2_le128(t2, t0, t4, t3, t5); \
79 vpshufb t1, t2, x5; \
80 add2_le128(t2, t0, t4, t3, t5); \
81 vpshufb t1, t2, x6; \
82 add2_le128(t2, t0, t4, t3, t5); \
83 vpshufb t1, t2, x7; \
84 vextracti128 $1, t2, t2x; \
119 tivx, t0, t0x, t1, t1x, t2, t2x, t3, \
128 vbroadcasti128 xts_gf128mul_and_shl1_mask_1, t2; \
134 gf128mul_x2_ble(tiv, t1, t2, t0, t3); \
138 gf128mul_x2_ble(tiv, t1, t2, t0, t3); \
142 gf128mul_x2_ble(tiv, t1, t2, t0, t3); \
146 gf128mul_x2_ble(tiv, t1, t2, t0, t3); \
150 gf128mul_x2_ble(tiv, t1, t2, t0, t3); \
154 gf128mul_x2_ble(tiv, t1, t2, t0, t3); \
158 gf128mul_x2_ble(tiv, t1, t2, t0, t3); \