Lines Matching defs:v_x0

9461       uint32_t v_x0;
26236 __m128i v_x0 = {0};
26266 v_x0 = _mm_lddqu_si128((const __m128i*)(const void*)(a_x.ptr + 0));
26270 v_x0 = _mm_xor_si128(v_x0, _mm_cvtsi32_si128((int32_t)(v_s)));
26278 v_y0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(0));
26282 v_x0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(17));
26286 v_x0 = _mm_xor_si128(_mm_xor_si128(v_x0, v_y0), _mm_lddqu_si128((const __m128i*)(const void*)(v_p.ptr + 0)));
26295 v_y0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(0));
26296 v_x0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(17));
26297 v_x0 = _mm_xor_si128(v_x0, v_x1);
26298 v_x0 = _mm_xor_si128(v_x0, v_y0);
26299 v_y0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(0));
26300 v_x0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(17));
26301 v_x0 = _mm_xor_si128(v_x0, v_x2);
26302 v_x0 = _mm_xor_si128(v_x0, v_y0);
26303 v_y0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(0));
26304 v_x0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(17));
26305 v_x0 = _mm_xor_si128(v_x0, v_x3);
26306 v_x0 = _mm_xor_si128(v_x0, v_y0);
26307 v_x1 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(16));
26309 v_x0 = _mm_srli_si128(v_x0, (int32_t)(8));
26310 v_x0 = _mm_xor_si128(v_x0, v_x1);
26312 v_x1 = _mm_srli_si128(v_x0, (int32_t)(4));
26313 v_x0 = _mm_and_si128(v_x0, v_x2);
26314 v_x0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(0));
26315 v_x0 = _mm_xor_si128(v_x0, v_x1);
26317 v_x1 = _mm_and_si128(v_x0, v_x2);
26321 v_x0 = _mm_xor_si128(v_x0, v_x1);
26322 v_s = ((uint32_t)(_mm_extract_epi32(v_x0, (int32_t)(1))));
26355 __m128i v_x0 = {0};
26385 v_x0 = _mm_lddqu_si128((const __m128i*)(const void*)(a_x.ptr + 0));
26389 v_x0 = _mm_xor_si128(v_x0, _mm_cvtsi32_si128((int32_t)(v_s)));
26397 v_y0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(0));
26401 v_x0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(17));
26405 v_x0 = _mm_xor_si128(_mm_xor_si128(v_x0, v_y0), _mm_lddqu_si128((const __m128i*)(const void*)(v_p.ptr + 0)));
26414 v_y0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(0));
26415 v_x0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(17));
26416 v_x0 = _mm_xor_si128(v_x0, v_x1);
26417 v_x0 = _mm_xor_si128(v_x0, v_y0);
26418 v_y0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(0));
26419 v_x0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(17));
26420 v_x0 = _mm_xor_si128(v_x0, v_x2);
26421 v_x0 = _mm_xor_si128(v_x0, v_y0);
26422 v_y0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(0));
26423 v_x0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(17));
26424 v_x0 = _mm_xor_si128(v_x0, v_x3);
26425 v_x0 = _mm_xor_si128(v_x0, v_y0);
26426 v_x1 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(16));
26428 v_x0 = _mm_srli_si128(v_x0, (int32_t)(8));
26429 v_x0 = _mm_xor_si128(v_x0, v_x1);
26431 v_x1 = _mm_srli_si128(v_x0, (int32_t)(4));
26432 v_x0 = _mm_and_si128(v_x0, v_x2);
26433 v_x0 = _mm_clmulepi64_si128(v_x0, v_k, (int32_t)(0));
26434 v_x0 = _mm_xor_si128(v_x0, v_x1);
26436 v_x1 = _mm_and_si128(v_x0, v_x2);
26440 v_x0 = _mm_xor_si128(v_x0, v_x1);
26441 v_s = ((uint32_t)(_mm_extract_epi32(v_x0, (int32_t)(1))));
39128 uint32_t v_x0 = 0;
39146 v_x0 = self->private_data.s_decode_fctl[0].v_x0;
39185 v_x0 = t_0;
39187 if (v_x0 != self->private_impl.f_next_animation_seq_num) {
39280 v_x0 = t_3;
39311 v_x1 += v_x0;
39313 if ((v_x0 >= v_x1) ||
39314 (v_x0 > self->private_impl.f_width) ||
39322 self->private_impl.f_frame_rect_x0 = v_x0;
39353 v_x0 = t_5;
39385 self->private_impl.f_frame_duration = (((uint64_t)(v_x0)) * 7056000);
39387 self->private_impl.f_frame_duration = ((((uint64_t)(v_x0)) * 705600000) / ((uint64_t)(v_x1)));
39396 v_x0 = t_7;
39398 if (v_x0 == 0) {
39400 } else if (v_x0 == 1) {
39402 } else if (v_x0 == 2) {
39415 v_x0 = t_8;
39417 if (v_x0 == 0) {
39419 } else if (v_x0 == 1) {
39444 self->private_data.s_decode_fctl[0].v_x0 = v_x0;