Lines Matching defs:q0

238     __m128i p3, p2, p1, p0, q3, q2, q1, q0, p1_out, p0_out, q0_out, q1_out;
242 q0 = __lsx_vld(dst, 0);
250 LPF_MASK_HEV(p3, p2, p1, p0, q0, q1, q2, q3, limit, b_limit, thresh,
253 VP9_LPF_FILTER4_4W(p1, p0, q0, q1, mask, hev, p1_out, p0_out, q0_out,
272 __m128i p3, p2, p1, p0, q3, q2, q1, q0;
276 q0 = __lsx_vld(dst, 0);
292 LPF_MASK_HEV(p3, p2, p1, p0, q0, q1, q2, q3, limit0, b_limit0, thresh0,
294 VP9_LPF_FILTER4_4W(p1, p0, q0, q1, mask, hev, p1, p0, q0, q1);
298 __lsx_vst(q0, dst , 0);
311 __m128i p3, p2, p1, p0, q3, q2, q1, q0;
320 q0 = __lsx_vld(dst, 0);
328 LPF_MASK_HEV(p3, p2, p1, p0, q0, q1, q2, q3, limit, b_limit, thresh,
330 VP9_FLAT4(p3, p2, p0, q0, q2, q3, flat);
331 VP9_LPF_FILTER4_4W(p1, p0, q0, q1, mask, hev, p1_out, p0_out, q0_out,
345 DUP4_ARG2(__lsx_vilvl_b, zero, q0, zero, q1, zero, q2, zero, q3,
382 __m128i p3, p2, p1, p0, q3, q2, q1, q0;
396 q0 = __lsx_vld(dst, 0);
413 LPF_MASK_HEV(p3, p2, p1, p0, q0, q1, q2, q3, limit, b_limit, thresh,
415 VP9_FLAT4(p3, p2, p0, q0, q2, q3, flat);
416 VP9_LPF_FILTER4_4W(p1, p0, q0, q1, mask, hev, p1_out, p0_out, q0_out,
428 DUP4_ARG2(__lsx_vilvl_b, zero, q0, zero, q1, zero, q2, zero, q3,
435 DUP4_ARG2(__lsx_vilvh_b, zero, q0, zero, q1, zero, q2, zero, q3,
473 __m128i p3, p2, p1, p0, q3, q2, q1, q0;
484 q0 = __lsx_vld(dst, 0);
501 LPF_MASK_HEV(p3, p2, p1, p0, q0, q1, q2, q3, limit, b_limit, thresh,
503 VP9_FLAT4(p3, p2, p0, q0, q2, q3, flat);
504 VP9_LPF_FILTER4_4W(p1, p0, q0, q1, mask, hev, p1_out, p0_out, q0_out,
518 DUP4_ARG2(__lsx_vilvl_b, zero, q0, zero, q1, zero, q2, zero, q3,
555 __m128i p3, p2, p1, p0, q3, q2, q1, q0;
566 q0 = __lsx_vld(dst, 0);
583 LPF_MASK_HEV(p3, p2, p1, p0, q0, q1, q2, q3, limit, b_limit, thresh,
585 VP9_FLAT4(p3, p2, p0, q0, q2, q3, flat);
586 VP9_LPF_FILTER4_4W(p1, p0, q0, q1, mask, hev, p1_out, p0_out, q0_out,
600 DUP4_ARG2(__lsx_vilvh_b, zero, q0, zero, q1, zero, q2, zero, q3,
638 __m128i p3, p2, p1, p0, q3, q2, q1, q0;
652 q0 = __lsx_vld(dst, 0);
661 LPF_MASK_HEV(p3, p2, p1, p0, q0, q1, q2, q3, limit, b_limit, thresh,
663 VP9_FLAT4(p3, p2, p0, q0, q2, q3, flat);
664 VP9_LPF_FILTER4_4W(p1, p0, q0, q1, mask, hev, p1_out, p0_out, q0_out,
677 DUP4_ARG2(__lsx_vilvl_b, zero, q0, zero, q1, zero, q2, zero, q3,
684 DUP4_ARG2(__lsx_vilvh_b, zero, q0, zero, q1, zero, q2, zero, q3,
724 __m128i p7, p6, p5, p4, p3, p2, p1, p0, q0, q1, q2, q3, q4, q5, q6, q7;
746 q0 = __lsx_vld(dst, 0);
753 VP9_FLAT5(p7, p6, p5, p4, p0, q0, q4, q5, q6, q7, flat, flat2);
758 48, p2, p1, p0, q0);
764 __lsx_vst(q0, dst, 0);
779 q0_l_in = (v8u16)__lsx_vilvl_b(zero, q0);
804 q0_h_in = (v8u16)__lsx_vilvh_b(zero, q0);
948 /* q0 */
1108 __m128i p3, p2, p1, p0, q3, q2, q1, q0, p7, p6, p5, p4, q4, q5, q6, q7;
1120 q0 = __lsx_vld(dst, 0);
1128 LPF_MASK_HEV(p3, p2, p1, p0, q0, q1, q2, q3, limit, b_limit, thresh,
1130 VP9_FLAT4(p3, p2, p0, q0, q2, q3, flat);
1131 VP9_LPF_FILTER4_4W(p1, p0, q0, q1, mask, hev, p1_out, p0_out, q0_out,
1146 DUP4_ARG2(__lsx_vilvl_b, zero, q0, zero, q1, zero, q2, zero, q3,
1173 VP9_FLAT5(p7, p6, p5, p4, p0, q0, q4, q5, q6, q7, flat, flat2);
1268 /* calculation of p0 and q0 */
1361 __m128i p3, p2, p1, p0, q3, q2, q1, q0;
1367 q0 = __lsx_vld(dst_tmp2, 0);
1375 LSX_TRANSPOSE8x8_B(p3, p2, p1, p0, q0, q1, q2, q3,
1376 p3, p2, p1, p0, q0, q1, q2, q3);
1377 LPF_MASK_HEV(p3, p2, p1, p0, q0, q1, q2, q3, limit, b_limit, thresh,
1379 VP9_LPF_FILTER4_4W(p1, p0, q0, q1, mask, hev, p1, p0, q0, q1);
1380 DUP2_ARG2(__lsx_vilvl_b, p0, p1, q1, q0, vec0, vec1);
1407 __m128i p3, p2, p1, p0, q3, q2, q1, q0;
1430 p3, p2, p1, p0, q0, q1, q2, q3);
1444 LPF_MASK_HEV(p3, p2, p1, p0, q0, q1, q2, q3, limit0, b_limit0, thresh0,
1446 VP9_LPF_FILTER4_4W(p1, p0, q0, q1, mask, hev, p1, p0, q0, q1);
1447 DUP2_ARG2(__lsx_vilvl_b, p0, p1, q1, q0, tmp0, tmp1);
1450 DUP2_ARG2(__lsx_vilvh_b, p0, p1, q1, q0, tmp0, tmp1);
1485 __m128i p3, p2, p1, p0, q3, q2, q1, q0;
1499 q0 = __lsx_vld(dst_tmp, 0);
1503 LSX_TRANSPOSE8x8_B(p3, p2, p1, p0, q0, q1, q2, q3,
1504 p3, p2, p1, p0, q0, q1, q2, q3);
1511 LPF_MASK_HEV(p3, p2, p1, p0, q0, q1, q2, q3, limit, b_limit, thresh,
1514 VP9_FLAT4(p3, p2, p0, q0, q2, q3, flat);
1516 VP9_LPF_FILTER4_4W(p1, p0, q0, q1, mask, hev, p1_out, p0_out, q0_out,
1541 DUP4_ARG2(__lsx_vilvl_b, zero, q0, zero, q1, zero, q2, zero, q3,
1557 q0 = __lsx_vbitsel_v(q0_out, q0_filt8_l, flat);
1562 DUP2_ARG2(__lsx_vilvl_b, p1, p2, q0, p0, vec0, vec1);
1603 __m128i p3, p2, p1, p0, q3, q2, q1, q0;
1626 q0 = __lsx_vldx(dst_tmp, stride3);
1634 q3, q2, q1, q0, row12, row13, row14, row15,
1635 p3, p2, p1, p0, q0, q1, q2, q3);
1650 LPF_MASK_HEV(p3, p2, p1, p0, q0, q1, q2, q3, limit, b_limit, thresh,
1653 VP9_FLAT4(p3, p2, p0, q0, q2, q3, flat);
1655 VP9_LPF_FILTER4_4W(p1, p0, q0, q1, mask, hev, p1_out, p0_out, q0_out,
1690 DUP4_ARG2(__lsx_vilvl_b, zero, q0, zero, q1, zero, q2, zero, q3,
1697 DUP4_ARG2(__lsx_vilvh_b, zero, q0, zero, q1, zero, q2, zero, q3,
1715 q0 = __lsx_vbitsel_v(q0_out, q0_filt8_l, flat);
1719 DUP2_ARG2(__lsx_vilvl_b, p1, p2, q0, p0, vec0, vec1);
1722 DUP2_ARG2(__lsx_vilvh_b, p1, p2, q0, p0, vec0, vec1);
1788 __m128i p3, p2, p1, p0, q3, q2, q1, q0;
1808 q0 = __lsx_vldx(dst_tmp, stride3);
1816 q3, q2, q1, q0, row12, row13, row14, row15,
1817 p3, p2, p1, p0, q0, q1, q2, q3);
1832 LPF_MASK_HEV(p3, p2, p1, p0, q0, q1, q2, q3, limit, b_limit, thresh,
1835 VP9_FLAT4(p3, p2, p0, q0, q2, q3, flat);
1837 VP9_LPF_FILTER4_4W(p1, p0, q0, q1, mask, hev, p1_out, p0_out, q0_out,
1874 DUP4_ARG2(__lsx_vilvl_b, zero, q0, zero, q1, zero, q2, zero, q3,
1890 q0 = __lsx_vbitsel_v(q0_out, q0_filt8_l, flat);
1894 DUP2_ARG2(__lsx_vilvl_b, p1, p2, q0, p0, vec0, vec1);
1897 DUP2_ARG2(__lsx_vilvh_b, p1, p2, q0, p0, vec0, vec1);
1963 __m128i p3, p2, p1, p0, q3, q2, q1, q0;
1983 q0 = __lsx_vldx(dst_tmp, stride3);
1991 q3, q2, q1, q0, row12, row13, row14, row15,
1992 p3, p2, p1, p0, q0, q1, q2, q3);
2007 LPF_MASK_HEV(p3, p2, p1, p0, q0, q1, q2, q3, limit, b_limit, thresh,
2010 VP9_FLAT4(p3, p2, p0, q0, q2, q3, flat);
2012 VP9_LPF_FILTER4_4W(p1, p0, q0, q1, mask, hev, p1_out, p0_out, q0_out,
2049 DUP4_ARG2(__lsx_vilvh_b, zero, q0, zero, q1, zero, q2, zero, q3,
2066 q0 = __lsx_vbitsel_v(q0_out, q0_filt8_h, flat);
2070 DUP2_ARG2(__lsx_vilvl_b, p1, p2, q0, p0, vec0, vec1);
2073 DUP2_ARG2(__lsx_vilvh_b, p1, p2, q0, p0, vec0, vec1);
2135 __m128i p7, p6, p5, p4, p3, p2, p1, p0, q0, q1, q2, q3, q4, q5, q6, q7;
2150 DUP2_ARG2(__lsx_vilvl_w, tmp6, tmp4, tmp7, tmp5, q0, q4);
2152 DUP4_ARG2(__lsx_vbsrl_v, q0, 8, q2, 8, q4, 8, q6, 8, q1, q3, q5, q7);
2162 __lsx_vst(q0, output, 128);
2176 __m128i p7, p6, p5, p4, p3, p2, p1, p0, q0, q1, q2, q3, q4, q5, q6, q7;
2186 q0, q1, q2, q3);
2189 LSX_TRANSPOSE16x8_B(p7, p6, p5, p4, p3, p2, p1, p0, q0, q1, q2, q3, q4, q5,
2202 __m128i p7, p6, p5, p4, p3, p2, p1, p0, q0, q1, q2, q3, q4, q5, q6, q7;
2229 q0 = __lsx_vpackod_d(row15, row7);
2234 DUP2_ARG2(__lsx_vpackev_b, q2, q3, q0, q1, q5, q7);
2235 DUP2_ARG2(__lsx_vpackod_b, q2, q3, q0, q1, tmp6, tmp7);
2238 q0 = __lsx_vpackev_w(tmp3, tmp2);
2258 LSX_ST_8(q0, q1, q2, q3, q4, q5, q6, q7, output, out_stride,
2268 __m128i p3, p2, p1, p0, q3, q2, q1, q0;
2280 DUP4_ARG2(__lsx_vld, src, 0, src, 16, src, 32, src, 48, q0, q1, q2, q3);
2287 LPF_MASK_HEV(p3, p2, p1, p0, q0, q1, q2, q3, limit, b_limit, thresh,
2290 VP9_FLAT4(p3, p2, p0, q0, q2, q3, flat);
2292 VP9_LPF_FILTER4_4W(p1, p0, q0, q1, mask, hev, p1_out, p0_out, q0_out,
2323 DUP4_ARG2(__lsx_vilvl_b, zero, q0, zero, q1, zero, q2, zero, q3,
2362 __m128i p7, p6, p5, p4, p3, p2, p1, p0, q0, q1, q2, q3, q4, q5, q6, q7;
2376 DUP4_ARG2(__lsx_vld, dst, 0, dst, 16, dst, 32, dst, 48, q0, q1, q2, q3);
2382 VP9_FLAT5(p7, p6, p5, p4, p0, q0, q4, q5, q6, q7, flat, flat2);
2389 filter48, 48, p2, p1, p0, q0);
2392 DUP2_ARG2(__lsx_vilvl_b, p1, p2, q0, p0, vec0, vec1);
2433 q0_l_in = (v8u16)__lsx_vilvl_b(zero, q0);
2528 /* q0 */
2646 __m128i p3, p2, p1, p0, q3, q2, q1, q0;
2661 DUP4_ARG2(__lsx_vld, dst, 0, dst, 16, dst, 32, dst, 48, q0, q1, q2, q3);
2668 LPF_MASK_HEV(p3, p2, p1, p0, q0, q1, q2, q3, limit, b_limit, thresh,
2671 VP9_FLAT4(p3, p2, p0, q0, q2, q3, flat);
2673 VP9_LPF_FILTER4_4W(p1, p0, q0, q1, mask, hev, p1_out, p0_out, q0_out,
2710 DUP4_ARG2(__lsx_vilvl_b, zero, q0, zero, q1, zero, q2, zero, q3,
2716 DUP4_ARG2(__lsx_vilvh_b, zero, q0, zero, q1, zero, q2, zero, q3,
2755 __m128i p7, p6, p5, p4, p3, p2, p1, p0, q0, q1, q2, q3, q4, q5, q6, q7;
2774 DUP4_ARG2(__lsx_vld, dst, 0, dst, 16, dst, 32, dst, 48, q0, q1, q2, q3);
2777 VP9_FLAT5(p7, p6, p5, p4, p0, q0, q4, q5, q6, q7, flat, flat2);
2784 filter48, 48, p2, p1, p0, q0);
2787 DUP2_ARG2(__lsx_vilvl_b, p1, p2, q0, p0, vec0, vec1);
2790 DUP2_ARG2(__lsx_vilvh_b, p1, p2, q0, p0, vec0, vec1);
2857 q0_l_in = (v8u16)__lsx_vilvl_b(zero, q0);
2880 q0_h_in = (v8u16)__lsx_vilvh_b(zero, q0);
3004 /* q0 */