Lines Matching refs:mask2

677     v16i8 mask1, mask2, mask3;
690 mask2 = mask0 + 4;
712 VSHF_B4_SB(src0, src1, mask0, mask1, mask2, mask3,
716 VSHF_B4_SB(src2, src3, mask0, mask1, mask2, mask3,
749 v16i8 mask1, mask2, mask3;
773 mask2 = mask0 + 4;
783 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
787 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
791 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3,
795 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3,
828 v16i8 mask0, mask1, mask2, mask3, mask4, mask5, mask6, mask7;
851 mask2 = mask0 + 4;
862 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3, vec0, vec1, vec2,
866 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3, vec0, vec1, vec2,
870 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3, vec0, vec1, vec2,
874 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3, vec0, vec1, vec2,
925 v16i8 mask1, mask2, mask3;
948 mask2 = mask0 + 4;
962 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
966 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
970 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3,
974 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3,
1010 v16i8 mask1, mask2, mask3, mask4, mask5, mask6, mask7;
1034 mask2 = mask0 + 4;
1049 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
1057 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
1088 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3, vec0, vec1, vec2, vec3);
1094 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3, vec0, vec1, vec2, vec3);
1132 v16i8 mask1, mask2, mask3, mask4, mask5, mask6, mask7;
1154 mask2 = mask0 + 4;
1170 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
1178 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
1182 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3,
1218 v16i8 mask1, mask2, mask3, mask4, mask5, mask6, mask7;
1240 mask2 = mask0 + 4;
1256 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
1264 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
1268 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3,
1283 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3,
1287 VSHF_B4_SB(src4, src4, mask0, mask1, mask2, mask3,
1325 v16i8 mask1, mask2, mask3, mask4, mask5, mask6, mask7;
1347 mask2 = mask0 + 4;
1367 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
1375 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
1379 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3,
1945 v16i8 mask1, mask2, mask3;
1967 mask2 = mask0 + 4;
1986 VSHF_B4_SB(src0, src3, mask0, mask1, mask2, mask3, vec0, vec1, vec2, vec3);
1987 VSHF_B4_SB(src1, src4, mask0, mask1, mask2, mask3, vec4, vec5, vec6, vec7);
1988 VSHF_B4_SB(src2, src5, mask0, mask1, mask2, mask3,
1990 VSHF_B4_SB(src3, src6, mask0, mask1, mask2, mask3,
2020 VSHF_B4_SB(src7, src9, mask0, mask1, mask2, mask3,
2022 VSHF_B4_SB(src8, src10, mask0, mask1, mask2, mask3,
2094 v16i8 mask1, mask2, mask3;
2129 mask2 = mask0 + 4;
2144 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
2146 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
2148 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3,
2150 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3,
2163 VSHF_B4_SB(src4, src4, mask0, mask1, mask2, mask3,
2165 VSHF_B4_SB(src5, src5, mask0, mask1, mask2, mask3,
2167 VSHF_B4_SB(src6, src6, mask0, mask1, mask2, mask3,
2192 VSHF_B4_SB(src7, src7, mask0, mask1, mask2, mask3,
2207 VSHF_B4_SB(src8, src8, mask0, mask1, mask2, mask3,
2296 v16i8 mask0, mask1, mask2, mask3, mask4, mask5, mask6, mask7;
2330 mask2 = mask0 + 4;
2341 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3, vec0, vec1, vec2, vec3);
2342 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3, vec4, vec5, vec6, vec7);
2343 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3, vec8, vec9, vec10,
2345 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3, vec12, vec13, vec14,
2355 VSHF_B4_SB(src4, src4, mask0, mask1, mask2, mask3, vec0, vec1, vec2, vec3);
2356 VSHF_B4_SB(src5, src5, mask0, mask1, mask2, mask3, vec4, vec5, vec6, vec7);
2357 VSHF_B4_SB(src6, src6, mask0, mask1, mask2, mask3, vec8, vec9, vec10,
2381 VSHF_B4_SB(src7, src7, mask0, mask1, mask2, mask3, vec0, vec1, vec2,
2394 VSHF_B4_SB(src8, src8, mask0, mask1, mask2, mask3, vec0, vec1, vec2,
3183 v16i8 mask2 = {
3209 mask3 = mask2 + 2;
3228 VSHF_B2_SB(src0, src1, src0, src1, mask2, mask3, vec0, vec1);
3230 VSHF_B2_SB(src2, src3, src2, src3, mask2, mask3, vec0, vec1);
3357 v16i8 mask1, mask2, mask3;
3381 mask2 = mask0 + 8;
3396 VSHF_B2_SB(src0, src1, src0, src1, mask2, mask3, vec0, vec1);
3400 VSHF_B2_SB(src2, src3, src2, src3, mask2, mask3, vec0, vec1);
3444 v16i8 mask1, mask2, mask3;
3468 mask2 = mask0 + 8;
3481 VSHF_B2_SB(src0, src1, src0, src1, mask2, mask3, vec0, vec1);
5618 v16i8 mask0, mask1, mask2, mask3;
5743 mask2 = LD_SB(ff_hevc_mask_arr + 16);
5744 mask3 = mask2 + 2;
5749 VSHF_B2_SB(src0, src1, src0, src1, mask2, mask3, vec0, vec1);
5750 VSHF_B2_SB(src1, src2, src1, src2, mask2, mask3, vec2, vec3);
5763 VSHF_B2_SB(src3, src7, src3, src7, mask2, mask3, vec0, vec1);
5764 VSHF_B2_SB(src4, src8, src4, src8, mask2, mask3, vec2, vec3);
5765 VSHF_B2_SB(src5, src9, src5, src9, mask2, mask3, vec4, vec5);
5766 VSHF_B2_SB(src6, src10, src6, src10, mask2, mask3, vec6, vec7);