Lines Matching refs:q5

724     __m128i p7, p6, p5, p4, p3, p2, p1, p0, q0, q1, q2, q3, q4, q5, q6, q7;
751 DUP2_ARG2(__lsx_vldx, dst_tmp1, stride, dst_tmp1, stride2, q5, q6);
753 VP9_FLAT5(p7, p6, p5, p4, p0, q0, q4, q5, q6, q7, flat, flat2);
907 q5_l_in = (v8u16)__lsx_vilvl_b(zero, q5);
915 q5_h_in = (v8u16)__lsx_vilvh_b(zero, q5);
1043 /* q5 */
1057 q5 = __lsx_vbitsel_v(q5, out_l, flat2);
1058 __lsx_vst(q5, dst, 0);
1108 __m128i p3, p2, p1, p0, q3, q2, q1, q0, p7, p6, p5, p4, q4, q5, q6, q7;
1171 dst_tmp1 + stride2, 0, dst_tmp1 + stride3, 0, q4, q5, q6, q7);
1173 VP9_FLAT5(p7, p6, p5, p4, p0, q0, q4, q5, q6, q7, flat, flat2);
1193 DUP4_ARG2(__lsx_vilvl_b, zero, q4, zero, q5, zero, q6, zero, q7,
1328 /* calculation of q5 and q6 */
1341 p0_filter16 = __lsx_vbitsel_v(q5, p0_filter16, flat2);
2135 __m128i p7, p6, p5, p4, p3, p2, p1, p0, q0, q1, q2, q3, q4, q5, q6, q7;
2152 DUP4_ARG2(__lsx_vbsrl_v, q0, 8, q2, 8, q4, 8, q6, 8, q1, q3, q5, q7);
2167 __lsx_vst(q5, output, 208);
2176 __m128i p7, p6, p5, p4, p3, p2, p1, p0, q0, q1, q2, q3, q4, q5, q6, q7;
2188 q4, q5, q6, q7);
2189 LSX_TRANSPOSE16x8_B(p7, p6, p5, p4, p3, p2, p1, p0, q0, q1, q2, q3, q4, q5,
2202 __m128i p7, p6, p5, p4, p3, p2, p1, p0, q0, q1, q2, q3, q4, q5, q6, q7;
2224 q5 = __lsx_vpackod_d(row10, row2);
2231 DUP2_ARG2(__lsx_vpackev_b, q6, q7, q4, q5, tmp0, tmp1);
2232 DUP2_ARG2(__lsx_vpackod_b, q6, q7, q4, q5, tmp4, tmp5);
2234 DUP2_ARG2(__lsx_vpackev_b, q2, q3, q0, q1, q5, q7);
2237 DUP2_ARG2(__lsx_vpackev_h, tmp1, tmp0, q7, q5, tmp2, tmp3);
2242 tmp3 = __lsx_vpackod_h(q7, q5);
2248 q5 = __lsx_vpackod_w(tmp3, tmp2);
2258 LSX_ST_8(q0, q1, q2, q3, q4, q5, q6, q7, output, out_stride,
2362 __m128i p7, p6, p5, p4, p3, p2, p1, p0, q0, q1, q2, q3, q4, q5, q6, q7;
2377 DUP4_ARG2(__lsx_vld, dst, 64, dst, 80, dst, 96, dst, 112, q4, q5, q6, q7);
2382 VP9_FLAT5(p7, p6, p5, p4, p0, q0, q4, q5, q6, q7, flat, flat2);
2503 q5_l_in = (v8u16)__lsx_vilvl_b(zero, q5);
2587 /* q5 */
2594 q5 = __lsx_vbitsel_v(q5, out_l, flat2);
2595 __lsx_vstelm_d(q5, dst, 0, 0);
2755 __m128i p7, p6, p5, p4, p3, p2, p1, p0, q0, q1, q2, q3, q4, q5, q6, q7;
2775 DUP4_ARG2(__lsx_vld, dst, 64, dst, 80, dst, 96, dst, 112, q4, q5, q6, q7);
2777 VP9_FLAT5(p7, p6, p5, p4, p0, q0, q4, q5, q6, q7, flat, flat2);
2969 q5_l_in = (v8u16)__lsx_vilvl_b(zero, q5);
2976 q5_h_in = (v8u16)__lsx_vilvh_b(zero, q5);
3084 /* q5 */
3096 q5 = __lsx_vbitsel_v(q5, out_l, flat2);
3097 __lsx_vst(q5, dst, 16*12);