Lines Matching refs:dst
18 #define store_8way(dst, x0, x1, x2, x3, x4, x5, x6, x7) \
19 vmovdqu x0, (0*16)(dst); \
20 vmovdqu x1, (1*16)(dst); \
21 vmovdqu x2, (2*16)(dst); \
22 vmovdqu x3, (3*16)(dst); \
23 vmovdqu x4, (4*16)(dst); \
24 vmovdqu x5, (5*16)(dst); \
25 vmovdqu x6, (6*16)(dst); \
26 vmovdqu x7, (7*16)(dst);
28 #define store_cbc_8way(src, dst, x0, x1, x2, x3, x4, x5, x6, x7) \
36 store_8way(dst, x0, x1, x2, x3, x4, x5, x6, x7);
72 #define store_ctr_8way(src, dst, x0, x1, x2, x3, x4, x5, x6, x7) \
81 store_8way(dst, x0, x1, x2, x3, x4, x5, x6, x7);
90 #define load_xts_8way(iv, src, dst, x0, x1, x2, x3, x4, x5, x6, x7, tiv, t0, \
97 vmovdqu tiv, (0*16)(dst); \
102 vmovdqu tiv, (1*16)(dst); \
106 vmovdqu tiv, (2*16)(dst); \
110 vmovdqu tiv, (3*16)(dst); \
114 vmovdqu tiv, (4*16)(dst); \
118 vmovdqu tiv, (5*16)(dst); \
122 vmovdqu tiv, (6*16)(dst); \
126 vmovdqu tiv, (7*16)(dst); \
131 #define store_xts_8way(dst, x0, x1, x2, x3, x4, x5, x6, x7) \
132 vpxor (0*16)(dst), x0, x0; \
133 vpxor (1*16)(dst), x1, x1; \
134 vpxor (2*16)(dst), x2, x2; \
135 vpxor (3*16)(dst), x3, x3; \
136 vpxor (4*16)(dst), x4, x4; \
137 vpxor (5*16)(dst), x5, x5; \
138 vpxor (6*16)(dst), x6, x6; \
139 vpxor (7*16)(dst), x7, x7; \
140 store_8way(dst, x0, x1, x2, x3, x4, x5, x6, x7);