Lines Matching refs:mask2
541 v16i8 mask1, mask2, mask3;
555 mask2 = mask0 + 4;
584 VSHF_B2_SB(src0, src1, src2, src3, mask2, mask2, vec0, vec1);
585 VSHF_B2_SB(src4, src5, src6, src7, mask2, mask2, vec2, vec3);
614 v16i8 mask1, mask2, mask3;
630 mask2 = mask0 + 4;
652 VSHF_B2_SB(src0, src0, src1, src1, mask2, mask2, vec0, vec1);
653 VSHF_B2_SB(src2, src2, src3, src3, mask2, mask2, vec2, vec3);
685 v16i8 mask0, mask1, mask2, mask3, mask4, mask5, mask6, mask7;
699 mask2 = mask0 + 4;
729 VSHF_B3_SB(src0, src0, src1, src3, src2, src2, mask2, mask6, mask2,
770 v16i8 mask1, mask2, mask3;
785 mask2 = mask0 + 4;
811 VSHF_B2_SB(src0, src0, src1, src1, mask2, mask2, vec0, vec1);
812 VSHF_B2_SB(src2, src2, src3, src3, mask2, mask2, vec2, vec3);
842 v16i8 mask1, mask2, mask3, mask4, mask5, mask6, mask7;
857 mask2 = mask0 + 4;
880 VSHF_B2_SB(src0, src0, src0, src1, mask2, mask6, vec2, vec3);
883 VSHF_B2_SB(src1, src1, src0, src0, mask2, mask3, vec0, vec1);
913 v16i8 mask1, mask2, mask3, mask4, mask5, mask6, mask7;
928 mask2 = mask0 + 4;
955 VSHF_B2_SB(src0, src0, src0, src1, mask2, mask6, vec0, vec1);
956 VSHF_B2_SB(src1, src1, src2, src2, mask2, mask2, vec2, vec3);
986 v16i8 mask1, mask2, mask3, mask4, mask5, mask6, mask7;
1002 mask2 = mask0 + 4;
1029 VSHF_B2_SB(src0, src0, src0, src1, mask2, mask6, vec0, vec1);
1030 VSHF_B2_SB(src1, src1, src1, src2, mask2, mask6, vec2, vec3);
1052 VSHF_B2_SB(src2, src2, src3, src3, mask2, mask2, vec0, vec1);
1078 v16i8 mask1, mask2, mask3, mask4, mask5, mask6, mask7;
1093 mask2 = mask0 + 4;
1121 VSHF_B2_SB(src0, src0, src0, src1, mask2, mask6, vec0, vec1);
1122 VSHF_B2_SB(src1, src1, src2, src2, mask2, mask2, vec2, vec3);
1156 VSHF_B2_SB(src0, src0, src0, src1, mask2, mask6, vec0, vec1);
1157 VSHF_B2_SB(src1, src1, src2, src2, mask2, mask2, vec2, vec3);
1636 v16i8 mask1, mask2, mask3;
1656 mask2 = mask0 + 4;
1667 VSHF_B4_SB(src0, src3, mask0, mask1, mask2, mask3, vec0, vec1, vec2, vec3);
1668 VSHF_B4_SB(src1, src4, mask0, mask1, mask2, mask3, vec4, vec5, vec6, vec7);
1669 VSHF_B4_SB(src2, src5, mask0, mask1, mask2, mask3,
1671 VSHF_B4_SB(src3, src6, mask0, mask1, mask2, mask3,
1701 VSHF_B4_SB(src7, src9, mask0, mask1, mask2, mask3,
1703 VSHF_B4_SB(src8, src10, mask0, mask1, mask2, mask3,
1765 v16i8 mask1, mask2, mask3;
1787 mask2 = mask0 + 4;
1801 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
1803 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
1805 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3,
1807 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3,
1818 VSHF_B4_SB(src4, src4, mask0, mask1, mask2, mask3,
1820 VSHF_B4_SB(src5, src5, mask0, mask1, mask2, mask3,
1822 VSHF_B4_SB(src6, src6, mask0, mask1, mask2, mask3,
1839 VSHF_B4_SB(src7, src7, mask0, mask1, mask2, mask3,
1908 v16i8 mask0, mask1, mask2, mask3, mask4, mask5, mask6, mask7;
1935 mask2 = mask0 + 4;
1948 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3, vec0, vec1, vec2,
1950 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3, vec4, vec5, vec6,
1952 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3, vec8, vec9, vec10,
1954 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3, vec12, vec13, vec14,
1964 VSHF_B4_SB(src4, src4, mask0, mask1, mask2, mask3, vec0, vec1, vec2,
1966 VSHF_B4_SB(src5, src5, mask0, mask1, mask2, mask3, vec4, vec5, vec6,
1968 VSHF_B4_SB(src6, src6, mask0, mask1, mask2, mask3, vec8, vec9, vec10,
1985 VSHF_B4_SB(src7, src7, mask0, mask1, mask2, mask3, vec0, vec1, vec2,
2617 v16i8 mask2 = {
2634 mask3 = mask2 + 2;
2654 VSHF_B2_SB(src0, src1, src2, src3, mask2, mask2, vec4, vec5);
2753 v16i8 mask1, mask2, mask3;
2767 mask2 = mask0 + 8;
2786 VSHF_B2_SB(src0, src0, src0, src1, mask0, mask2, vec0, vec1);
2787 VSHF_B2_SB(src2, src2, src2, src3, mask0, mask2, vec2, vec3);
2799 VSHF_B2_SB(src4, src4, src4, src5, mask0, mask2, vec0, vec1);
2800 VSHF_B2_SB(src6, src6, src6, src7, mask0, mask2, vec2, vec3);
2857 v16i8 mask1, mask2, mask3;
2871 mask2 = mask0 + 8;
2886 VSHF_B2_SB(src0, src0, src0, src1, mask0, mask2, vec0, vec1);
4675 v16i8 mask0, mask1, mask2, mask3;
4779 mask2 = LD_SB(ff_hevc_mask_arr + 16);
4780 mask3 = mask2 + 2;
4785 VSHF_B2_SB(src0, src1, src0, src1, mask2, mask3, vec0, vec1);
4786 VSHF_B2_SB(src1, src2, src1, src2, mask2, mask3, vec2, vec3);
4799 VSHF_B2_SB(src3, src7, src3, src7, mask2, mask3, vec0, vec1);
4800 VSHF_B2_SB(src4, src8, src4, src8, mask2, mask3, vec2, vec3);
4801 VSHF_B2_SB(src5, src9, src5, src9, mask2, mask3, vec4, vec5);
4802 VSHF_B2_SB(src6, src10, src6, src10, mask2, mask3, vec6, vec7);