Searched refs:xmm_shr (Results 1 - 3 of 3) sorted by relevance
/third_party/node/deps/zlib/ |
H A D | crc_folding.c | 235 __m128i xmm_shl, xmm_shr, xmm_tmp1, xmm_tmp2, xmm_tmp3; in partial_fold() local 240 xmm_shr = xmm_shl; in partial_fold() 241 xmm_shr = _mm_xor_si128(xmm_shr, xmm_mask3); in partial_fold() 245 *xmm_crc0 = _mm_shuffle_epi8(*xmm_crc0, xmm_shr); in partial_fold() 249 *xmm_crc1 = _mm_shuffle_epi8(*xmm_crc1, xmm_shr); in partial_fold() 253 *xmm_crc2 = _mm_shuffle_epi8(*xmm_crc2, xmm_shr); in partial_fold() 257 *xmm_crc3 = _mm_shuffle_epi8(*xmm_crc3, xmm_shr); in partial_fold()
|
/third_party/node/deps/v8/third_party/zlib/ |
H A D | crc_folding.c | 235 __m128i xmm_shl, xmm_shr, xmm_tmp1, xmm_tmp2, xmm_tmp3; in partial_fold() local 240 xmm_shr = xmm_shl; in partial_fold() 241 xmm_shr = _mm_xor_si128(xmm_shr, xmm_mask3); in partial_fold() 245 *xmm_crc0 = _mm_shuffle_epi8(*xmm_crc0, xmm_shr); in partial_fold() 249 *xmm_crc1 = _mm_shuffle_epi8(*xmm_crc1, xmm_shr); in partial_fold() 253 *xmm_crc2 = _mm_shuffle_epi8(*xmm_crc2, xmm_shr); in partial_fold() 257 *xmm_crc3 = _mm_shuffle_epi8(*xmm_crc3, xmm_shr); in partial_fold()
|
/third_party/skia/third_party/externals/zlib/ |
H A D | crc_folding.c | 235 __m128i xmm_shl, xmm_shr, xmm_tmp1, xmm_tmp2, xmm_tmp3; in partial_fold() local 240 xmm_shr = xmm_shl; in partial_fold() 241 xmm_shr = _mm_xor_si128(xmm_shr, xmm_mask3); in partial_fold() 245 *xmm_crc0 = _mm_shuffle_epi8(*xmm_crc0, xmm_shr); in partial_fold() 249 *xmm_crc1 = _mm_shuffle_epi8(*xmm_crc1, xmm_shr); in partial_fold() 253 *xmm_crc2 = _mm_shuffle_epi8(*xmm_crc2, xmm_shr); in partial_fold() 257 *xmm_crc3 = _mm_shuffle_epi8(*xmm_crc3, xmm_shr); in partial_fold()
|
Completed in 2 milliseconds