Lines Matching defs:vec0
613 __m128i vec0, vec1, vec2, vec3;
634 src0, mask2, src1, src0, mask3, vec0, vec1, vec2, vec3);
635 dst0 = __lsx_vdp2_h_bu_b(vec0, filt0);
640 src2, mask2, src3, src2, mask3, vec0, vec1, vec2, vec3);
641 dst1 = __lsx_vdp2_h_bu_b(vec0, filt0);
646 src4, mask2, src5, src4, mask3, vec0, vec1, vec2, vec3);
647 dst2 = __lsx_vdp2_h_bu_b(vec0, filt0);
652 src6, mask2, src7, src6, mask3, vec0, vec1, vec2, vec3);
653 dst3 = __lsx_vdp2_h_bu_b(vec0, filt0);
673 src0, mask2, src1, src0, mask3, vec0, vec1, vec2, vec3);
674 dst0 = __lsx_vdp2_h_bu_b(vec0, filt0);
699 __m128i vec0, vec1, vec2, vec3;
717 src0, mask2, src0, src0, mask3, vec0, vec1, vec2, vec3);
718 dst0 = __lsx_vdp2_h_bu_b(vec0, filt0);
723 src1, mask2, src1, src1, mask3, vec0, vec1, vec2, vec3);
724 dst1 = __lsx_vdp2_h_bu_b(vec0, filt0);
729 src2, mask2, src2, src2, mask3, vec0, vec1, vec2, vec3);
730 dst2 = __lsx_vdp2_h_bu_b(vec0, filt0);
735 src3, mask2, src3, src3, mask3, vec0, vec1, vec2, vec3);
736 dst3 = __lsx_vdp2_h_bu_b(vec0, filt0);
760 __m128i vec0, vec1, vec2, vec3, vec4, vec5;
787 vec0, vec1);
792 DUP4_ARG2(__lsx_vdp2_h_bu_b, vec0, filt0, vec1, filt0, vec2, filt0,
796 vec0, vec1);
801 DUP4_ARG3(__lsx_vdp2add_h_bu_b, dst0, vec0, filt1, dst1, vec1, filt1,
806 vec0, vec1);
811 DUP4_ARG3(__lsx_vdp2add_h_bu_b, dst0, vec0, filt2, dst1, vec1, filt2,
816 vec0, vec1);
821 DUP4_ARG3(__lsx_vdp2add_h_bu_b, dst0, vec0, filt3, dst1, vec1, filt3,
849 __m128i vec0, vec1, vec2, vec3;
868 vec0, vec1);
871 DUP4_ARG2(__lsx_vdp2_h_bu_b, vec0, filt0, vec1, filt0, vec2, filt0,
874 vec0, vec1);
877 DUP4_ARG3(__lsx_vdp2add_h_bu_b, dst0, vec0, filt1, dst1, vec1, filt1,
880 vec0, vec1);
883 DUP4_ARG3(__lsx_vdp2add_h_bu_b, dst0, vec0, filt2, dst1, vec1, filt2,
886 vec0, vec1);
889 DUP4_ARG3(__lsx_vdp2add_h_bu_b, dst0, vec0, filt3, dst1, vec1, filt3,
909 __m128i vec0, vec1, vec2, vec3, vec4, vec5;
929 src1, mask0, src2, src2, mask0, vec0, vec1, vec2, vec3);
932 DUP4_ARG2(__lsx_vdp2_h_bu_b, vec0, filt0, vec1, filt0, vec2, filt0,
936 src1, mask1, src2, src2, mask1, vec0, vec1, vec2, vec3);
939 DUP4_ARG3(__lsx_vdp2add_h_bu_b, dst0, vec0, filt1, dst1, vec1, filt1,
944 src1, mask2, src2, src2, mask2, vec0, vec1, vec2, vec3);
947 DUP4_ARG3(__lsx_vdp2add_h_bu_b, dst0, vec0, filt2, dst1, vec1, filt2,
952 src1, mask3, src2, src2, mask3, vec0, vec1, vec2, vec3);
955 DUP4_ARG3(__lsx_vdp2add_h_bu_b, dst0, vec0, filt3, dst1, vec1, filt3,
979 __m128i vec0, vec1, vec2, vec3;
998 src0, mask2, src0, src0, mask3, vec0, vec1, vec2, vec3);
999 dst0 = __lsx_vdp2_h_bu_b(vec0, filt0);
1005 src0, mask6, src1, src0, mask7, vec0, vec1, vec2, vec3);
1006 dst1 = __lsx_vdp2_h_bu_b(vec0, filt0);
1011 mask2, src1, src1, mask3, vec0, vec1, vec2, vec3);
1012 dst2 = __lsx_vdp2_h_bu_b(vec0, filt0);
1017 mask2, src2, src2, mask3, vec0, vec1, vec2, vec3);
1018 dst3 = __lsx_vdp2_h_bu_b(vec0, filt0);
1039 __m128i vec0, vec1, vec2, vec3, vec4, vec5;
1059 mask0, src2, src1, mask4, vec0, vec1, vec2, vec3);
1060 DUP4_ARG2(__lsx_vdp2_h_bu_b, vec0, filt0, vec1, filt0, vec2, filt0,
1063 src1, mask1, src2, src1, mask5, vec0, vec1, vec2, vec3);
1064 DUP4_ARG3(__lsx_vdp2add_h_bu_b, dst0, vec0, filt1, dst1, vec1, filt1,
1067 src1, mask2, src2, src1, mask6, vec0, vec1, vec2, vec3);
1068 DUP4_ARG3(__lsx_vdp2add_h_bu_b, dst0, vec0, filt2, dst1, vec1, filt2,
1071 src1, mask3, src2, src1, mask7, vec0, vec1, vec2, vec3);
1072 DUP4_ARG3(__lsx_vdp2add_h_bu_b, dst0, vec0, filt3, dst1, vec1, filt3,
1108 __m128i vec0, vec1, vec2, vec3;
1128 src0, mask2, src0, src0, mask3, vec0, vec1, vec2, vec3);
1129 dst0 = __lsx_vdp2_h_bu_b(vec0, filt0);
1136 src0, mask6, src1, src0, mask7, vec0, vec1, vec2, vec3);
1137 dst1 = __lsx_vdp2_h_bu_b(vec0, filt0);
1144 src1, mask2, src1, src1, mask3, vec0, vec1, vec2, vec3);
1145 dst2 = __lsx_vdp2_h_bu_b(vec0, filt0);
1152 src1, mask6, src2, src1, mask7, vec0, vec1, vec2, vec3);
1153 dst3 = __lsx_vdp2_h_bu_b(vec0, filt0);
1160 src2, mask2, src2, src2, mask3, vec0, vec1, vec2, vec3);
1161 dst4 = __lsx_vdp2_h_bu_b(vec0, filt0);
1168 src2, mask6, src3, src2, mask7, vec0, vec1, vec2, vec3);
1169 dst5 = __lsx_vdp2_h_bu_b(vec0, filt0);
1176 src3, mask2, src3, src3, mask3, vec0, vec1, vec2, vec3);
1177 dst6 = __lsx_vdp2_h_bu_b(vec0, filt0);
1184 src4, mask2, src4, src4, mask3, vec0, vec1, vec2, vec3);
1185 dst7 = __lsx_vdp2_h_bu_b(vec0, filt0);
1670 __m128i vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7;
1700 mask2, src3, src0, mask3, vec0, vec1, vec2, vec3);
1707 dst30 = __lsx_vdp2_h_bu_b(vec0, filt0);
1737 mask2, src9, src7, mask3, vec0, vec1, vec2, vec3);
1741 dst97 = __lsx_vdp2_h_bu_b(vec0, filt0);
1808 __m128i vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7;
1844 src0, mask2, src0, src0, mask3, vec0, vec1, vec2, vec3);
1851 dst0 = __lsx_vdp2_h_bu_b(vec0, filt0);
1870 src4, mask2, src4, src4, mask3, vec0, vec1, vec2, vec3);
1875 dst4 = __lsx_vdp2_h_bu_b(vec0, filt0);
1893 src7, mask2, src7, src7, mask3, vec0, vec1, vec2, vec3);
1894 dst7 = __lsx_vdp2_h_bu_b(vec0, filt0);
1953 __m128i vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7;
1992 mask2, src0, src0, mask3, vec0, vec1, vec2, vec3);
1999 dst0 = __lsx_vdp2_h_bu_b(vec0, filt0);
2018 mask2, src4, src4, mask3, vec0, vec1, vec2, vec3);
2023 dst4 = __lsx_vdp2_h_bu_b(vec0, filt0);
2041 src7, mask2, src7, src7, mask3, vec0, vec1, vec2, vec3);
2042 dst7 = __lsx_vdp2_h_bu_b(vec0, filt0);
2088 mask6, src3, src0, mask7, vec0, vec1, vec2, vec3);
2095 dst30 = __lsx_vdp2_h_bu_b(vec0, filt0);
2126 src7, mask6, src9, src7, mask7, vec0, vec1, vec2, vec3);
2129 dst97 = __lsx_vdp2_h_bu_b(vec0, filt0);
2235 __m128i vec0, vec1, vec2, vec3;
2249 vec0, vec1);
2252 DUP4_ARG2(__lsx_vdp2_h_bu_b, vec0, filt0, vec1, filt0, vec2, filt0,
2255 vec0, vec1);
2258 DUP4_ARG3(__lsx_vdp2add_h_bu_b, dst0, vec0, filt1, dst1, vec1, filt1,
2547 __m128i vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7, vec8, vec9;
2567 DUP2_ARG3(__lsx_vshuf_b, src0, src0, mask0, src0, src0, mask1, vec0, vec1);
2573 DUP4_ARG2(__lsx_vdp2_h_bu_b, vec0, filt0, vec2, filt0, vec4, filt0,
2611 __m128i vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7;
2637 vec0, vec1);
2643 DUP2_ARG2(__lsx_vdp2_h_bu_b, vec0, filt0, vec2, filt0, dst0, dst1);
2653 vec0, vec1);
2660 DUP4_ARG2(__lsx_vdp2_h_bu_b, vec0, filt0, vec2, filt0, vec4, filt0,
2713 __m128i vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7, vec8, vec9;
2740 mask0, src1, src1, mask1, vec0, vec1, vec2, vec3);
2750 DUP4_ARG2(__lsx_vdp2_h_bu_b, vec0, filt0, vec2, filt0, vec4, filt0, vec6,
2830 __m128i vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7;
2855 vec0, vec1);
2861 DUP2_ARG2(__lsx_vdp2_h_bu_b, vec0, filt0, vec2, filt0, dst0, dst1);
2878 vec0, vec1);
2886 DUP4_ARG2(__lsx_vdp2_h_bu_b, vec0, filt0, vec2, filt0, vec4, filt0,
2976 __m128i vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7;
3004 DUP2_ARG3(__lsx_vshuf_b, src0, src0, mask0, src0, src0, mask1, vec0, vec1);
3008 DUP2_ARG2(__lsx_vdp2_h_bu_b, vec0, filt0, vec2, filt0, dst0, dst1);
3025 vec0, vec1);
3033 DUP4_ARG2(__lsx_vdp2_h_bu_b, vec0, filt0, vec2, filt0, vec4, filt0,
3082 DUP2_ARG3(__lsx_vshuf_b, src1, src0, mask2, src1, src0, mask3, vec0, vec1);
3084 DUP2_ARG2(__lsx_vdp2_h_bu_b, vec0, filt0, vec2, filt0, dst10, dst21);
3101 vec0, vec1);
3109 DUP4_ARG2(__lsx_vdp2_h_bu_b, vec0, filt0, vec2, filt0, vec4, filt0,