Lines Matching refs:mask1
41 __m128i mask0, mask1, mask2, mask3, out1, out2;
54 DUP2_ARG2(__lsx_vaddi_bu, mask0, 2, mask0, 4, mask1, mask2);
76 DUP2_ARG3(__lsx_vshuf_b, src0, src0, mask1, src1, src1, mask1,
78 DUP2_ARG3(__lsx_vshuf_b, src2, src2, mask1, src3, src3, mask1,
106 DUP2_ARG3(__lsx_vshuf_b, src4, src4, mask1, src5, src5, mask1,
108 DUP2_ARG3(__lsx_vshuf_b, src6, src6, mask1, src7, src7, mask1,
354 __m128i mask1, mask2, mask3;
375 DUP2_ARG2(__lsx_vaddi_bu, mask0, 2, mask0, 4, mask1, mask2);
393 DUP4_ARG3(__lsx_vshuf_b, src0, src0, mask0, src0, src0, mask1, src0,
395 DUP4_ARG3(__lsx_vshuf_b, src1, src1, mask0, src1, src1, mask1, src1,
397 DUP4_ARG3(__lsx_vshuf_b, src2, src2, mask0, src2, src2, mask1, src2,
399 DUP4_ARG3(__lsx_vshuf_b, src3, src3, mask0, src3, src3, mask1, src3,
410 DUP4_ARG3(__lsx_vshuf_b, src4, src4, mask0, src4, src4, mask1, src4,
412 DUP4_ARG3(__lsx_vshuf_b, src5, src5, mask0, src5, src5, mask1, src5,
414 DUP4_ARG3(__lsx_vshuf_b, src6, src6, mask0, src6, src6, mask1, src6,
435 DUP4_ARG3(__lsx_vshuf_b, src7, src7, mask0, src7, src7, mask1, src7,
452 DUP4_ARG3(__lsx_vshuf_b, src8, src8, mask0, src8, src8, mask1, src8,
760 __m128i mask1;
775 mask1 = __lsx_vaddi_bu(mask0, 2);
780 DUP4_ARG3(__lsx_vshuf_b, src0, src0, mask0, src0, src0, mask1, src1, src1,
781 mask0, src1, src1, mask1, vec0, vec1, vec2, vec3);
782 DUP4_ARG3(__lsx_vshuf_b, src2, src2, mask0, src2, src2, mask1, src3, src3,
783 mask0, src3, src3, mask1, vec4, vec5, vec6, vec7);
784 DUP2_ARG3(__lsx_vshuf_b, src4, src4, mask0, src4, src4, mask1, vec8, vec9);
821 __m128i src0, src1, src2, src3, src4, src5, src6, mask0, mask1;
837 mask1 = __lsx_vaddi_bu(mask0, 2);
847 DUP2_ARG3(__lsx_vshuf_b, src0, src0, mask0, src0, src0, mask1,
849 DUP2_ARG3(__lsx_vshuf_b, src1, src1, mask0, src1, src1, mask1,
851 DUP2_ARG3(__lsx_vshuf_b, src2, src2, mask0, src2, src2, mask1,
863 DUP2_ARG3(__lsx_vshuf_b, src3, src3, mask0, src3, src3, mask1,
865 DUP2_ARG3(__lsx_vshuf_b, src4, src4, mask0, src4, src4, mask1,
867 DUP2_ARG3(__lsx_vshuf_b, src5, src5, mask0, src5, src5, mask1,
869 DUP2_ARG3(__lsx_vshuf_b, src6, src6, mask0, src6, src6, mask1,
920 __m128i mask1;
939 mask1 = __lsx_vaddi_bu(mask0, 2);
948 DUP4_ARG3(__lsx_vshuf_b, src0, src0, mask0, src0, src0, mask1, src1, src1,
949 mask0, src1, src1, mask1, vec0, vec1, vec2, vec3);
950 DUP4_ARG3(__lsx_vshuf_b, src2, src2, mask0, src2, src2, mask1, src3, src3,
951 mask0, src3, src3, mask1, vec4, vec5, vec6, vec7);
952 DUP4_ARG3(__lsx_vshuf_b, src4, src4, mask0, src4, src4, mask1, src5, src5,
953 mask0, src5, src5, mask1, vec8, vec9, vec10, vec11);
954 DUP4_ARG3(__lsx_vshuf_b, src6, src6, mask0, src6, src6, mask1, src7, src7,
955 mask0, src7, src7, mask1, vec12, vec13, vec14, vec15);
956 DUP2_ARG3(__lsx_vshuf_b, src8, src8, mask0, src8, src8, mask1, vec16, vec17);
1032 __m128i mask1;
1047 mask1 = __lsx_vaddi_bu(mask0, 2);
1058 DUP2_ARG3(__lsx_vshuf_b, src0, src0, mask0, src0, src0, mask1,
1060 DUP2_ARG3(__lsx_vshuf_b, src1, src1, mask0, src1, src1, mask1,
1062 DUP2_ARG3(__lsx_vshuf_b, src2, src2, mask0, src2, src2, mask1,
1081 DUP4_ARG3(__lsx_vshuf_b, src3, src3, mask0, src3, src3, mask1, src4,
1082 src4, mask0, src4, src4, mask1, vec0, vec1, vec2, vec3);
1083 DUP4_ARG3(__lsx_vshuf_b, src5, src5, mask0, src5, src5, mask1, src6,
1084 src6, mask0, src6, src6, mask1, vec4, vec5, vec6, vec7);
1164 __m128i mask0, mask1, mask2, mask3;
1182 mask1 = __lsx_vaddi_bu(mask0, 2);
1192 DUP2_ARG3(__lsx_vshuf_b, src0, src0, mask0, src0, src0, mask1, vec0, vec1);
1193 DUP2_ARG3(__lsx_vshuf_b, src1, src1, mask0, src1, src1, mask1, vec2, vec3);
1194 DUP2_ARG3(__lsx_vshuf_b, src2, src2, mask0, src2, src2, mask1, vec4, vec5);
1212 DUP4_ARG3(__lsx_vshuf_b, src3, src3, mask0, src3, src3, mask1, src4,
1213 src4, mask0, src4, src4, mask1, vec0, vec1, vec2, vec3);
1214 DUP4_ARG3(__lsx_vshuf_b, src5, src5, mask0, src5, src5, mask1, src6,
1215 src6, mask0, src6, src6, mask1, vec4, vec5, vec6, vec7);