Lines Matching defs:xmm_crc3
33 __m128i xmm_crc3 = _mm_loadu_si128((__m128i *)s->crc0 + 3);\
40 _mm_storeu_si128((__m128i *)s->crc0 + 3, xmm_crc3);\
51 xmm_crc3 = _mm_setzero_si128();
60 __m128i *xmm_crc2, __m128i *xmm_crc3)
69 x_tmp3 = *xmm_crc3;
71 *xmm_crc3 = *xmm_crc0;
73 *xmm_crc3 = _mm_clmulepi64_si128(*xmm_crc3, xmm_fold4, 0x10);
75 ps_crc3 = _mm_castsi128_ps(*xmm_crc3);
81 *xmm_crc3 = _mm_castps_si128(ps_res);
86 __m128i *xmm_crc2, __m128i *xmm_crc3)
95 x_tmp3 = *xmm_crc3;
98 *xmm_crc3 = *xmm_crc1;
100 *xmm_crc3 = _mm_clmulepi64_si128(*xmm_crc3, xmm_fold4, 0x10);
101 ps_crc3 = _mm_castsi128_ps(*xmm_crc3);
115 *xmm_crc3 = _mm_castps_si128(ps_res31);
120 __m128i *xmm_crc2, __m128i *xmm_crc3)
129 x_tmp3 = *xmm_crc3;
131 *xmm_crc3 = *xmm_crc2;
133 *xmm_crc3 = _mm_clmulepi64_si128(*xmm_crc3, xmm_fold4, 0x10);
135 ps_crc3 = _mm_castsi128_ps(*xmm_crc3);
155 *xmm_crc3 = _mm_castps_si128(ps_res32);
160 __m128i *xmm_crc2, __m128i *xmm_crc3)
174 x_tmp3 = *xmm_crc3;
194 *xmm_crc3 = _mm_clmulepi64_si128(*xmm_crc3, xmm_fold4, 0x01);
196 ps_crc3 = _mm_castsi128_ps(*xmm_crc3);
203 *xmm_crc3 = _mm_castps_si128(ps_res3);
226 __m128i *xmm_crc2, __m128i *xmm_crc3,
254 xmm_tmp3 = _mm_shuffle_epi8(*xmm_crc3, xmm_shl);
257 *xmm_crc3 = _mm_shuffle_epi8(*xmm_crc3, xmm_shr);
259 *xmm_crc3 = _mm_or_si128(*xmm_crc3, *xmm_crc_part);
264 ps_crc3 = _mm_castsi128_ps(*xmm_crc3);
271 *xmm_crc3 = _mm_castps_si128(ps_res);
297 partial_fold(s, algn_diff, &xmm_crc0, &xmm_crc1, &xmm_crc2, &xmm_crc3,
307 fold_4(s, &xmm_crc0, &xmm_crc1, &xmm_crc2, &xmm_crc3);
317 xmm_crc3 = _mm_xor_si128(xmm_crc3, xmm_t3);
333 fold_3(s, &xmm_crc0, &xmm_crc1, &xmm_crc2, &xmm_crc3);
341 xmm_crc3 = _mm_xor_si128(xmm_crc3, xmm_t2);
354 fold_2(s, &xmm_crc0, &xmm_crc1, &xmm_crc2, &xmm_crc3);
360 xmm_crc3 = _mm_xor_si128(xmm_crc3, xmm_t1);
372 fold_1(s, &xmm_crc0, &xmm_crc1, &xmm_crc2, &xmm_crc3);
376 xmm_crc3 = _mm_xor_si128(xmm_crc3, xmm_t0);
407 partial_fold(s, len, &xmm_crc0, &xmm_crc1, &xmm_crc2, &xmm_crc3,
441 __m128i xmm_crc3 = _mm_loadu_si128((__m128i *)s->crc0 + 3);
460 xmm_crc3 = _mm_xor_si128(xmm_crc3, x_tmp2);
461 xmm_crc3 = _mm_xor_si128(xmm_crc3, xmm_crc2);
468 xmm_crc0 = xmm_crc3;
469 xmm_crc3 = _mm_clmulepi64_si128(xmm_crc3, crc_fold, 0);
471 xmm_crc3 = _mm_xor_si128(xmm_crc3, xmm_crc0);
473 xmm_crc0 = xmm_crc3;
474 xmm_crc3 = _mm_slli_si128(xmm_crc3, 4);
475 xmm_crc3 = _mm_clmulepi64_si128(xmm_crc3, crc_fold, 0x10);
476 xmm_crc3 = _mm_xor_si128(xmm_crc3, xmm_crc0);
477 xmm_crc3 = _mm_and_si128(xmm_crc3, xmm_mask2);
482 xmm_crc1 = xmm_crc3;
483 xmm_crc2 = xmm_crc3;
486 xmm_crc3 = _mm_clmulepi64_si128(xmm_crc3, crc_fold, 0);
487 xmm_crc3 = _mm_xor_si128(xmm_crc3, xmm_crc2);
488 xmm_crc3 = _mm_and_si128(xmm_crc3, xmm_mask);
490 xmm_crc2 = xmm_crc3;
491 xmm_crc3 = _mm_clmulepi64_si128(xmm_crc3, crc_fold, 0x10);
492 xmm_crc3 = _mm_xor_si128(xmm_crc3, xmm_crc2);
493 xmm_crc3 = _mm_xor_si128(xmm_crc3, xmm_crc1);
495 crc = _mm_extract_epi32(xmm_crc3, 2);