Lines Matching defs:mask0
603 v16i8 mask0, mask1, mask2, mask3, vec11, vec12, vec13, vec14, vec15;
627 mask0 = LD_SB(&ff_hevc_mask_arr[16]);
628 mask1 = mask0 + 2;
629 mask2 = mask0 + 4;
630 mask3 = mask0 + 6;
637 VSHF_B4_SB(src0, src1, mask0, mask1, mask2, mask3,
639 VSHF_B4_SB(src2, src3, mask0, mask1, mask2, mask3,
641 VSHF_B4_SB(src4, src5, mask0, mask1, mask2, mask3,
643 VSHF_B4_SB(src6, src7, mask0, mask1, mask2, mask3,
678 v16i8 mask0, mask1, mask2, mask3;
705 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
706 mask1 = mask0 + 2;
707 mask2 = mask0 + 4;
708 mask3 = mask0 + 6;
715 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
717 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
719 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3,
721 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3,
756 v16i8 mask0, mask1, mask2, mask3, mask4, mask5, mask6, mask7;
783 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
784 mask1 = mask0 + 2;
785 mask2 = mask0 + 4;
786 mask3 = mask0 + 6;
798 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
800 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
802 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3,
804 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3,
850 v16i8 mask0, mask1, mask2, mask3;
876 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
877 mask1 = mask0 + 2;
878 mask2 = mask0 + 4;
879 mask3 = mask0 + 6;
887 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
889 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
891 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3,
893 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3,
928 v16i8 mask0, mask1, mask2, mask3, mask4, mask5, mask6, mask7;
953 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
954 mask1 = mask0 + 2;
955 mask2 = mask0 + 4;
956 mask3 = mask0 + 6;
957 mask4 = mask0 + 8;
958 mask5 = mask0 + 10;
959 mask6 = mask0 + 12;
960 mask7 = mask0 + 14;
968 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
972 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
974 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3,
987 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3,
1021 v16i8 mask0, mask1, mask2, mask3;
1047 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
1048 mask1 = mask0 + 2;
1049 mask2 = mask0 + 4;
1050 mask3 = mask0 + 6;
1059 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
1061 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
1063 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3,
1065 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3,
1076 VSHF_B4_SB(src4, src4, mask0, mask1, mask2, mask3,
1078 VSHF_B4_SB(src5, src5, mask0, mask1, mask2, mask3,
1080 VSHF_B4_SB(src6, src6, mask0, mask1, mask2, mask3,
1082 VSHF_B4_SB(src7, src7, mask0, mask1, mask2, mask3,
1123 v16i8 mask0, mask1, mask2, mask3, mask4, mask5, mask6, mask7;
1149 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
1150 mask1 = mask0 + 2;
1151 mask2 = mask0 + 4;
1152 mask3 = mask0 + 6;
1153 mask4 = mask0 + 8;
1154 mask5 = mask0 + 10;
1155 mask6 = mask0 + 12;
1156 mask7 = mask0 + 14;
1164 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
1168 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
1181 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3,
1183 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3,
1219 v16i8 mask0, mask1, mask2, mask3, mask4, mask5, mask6, mask7;
1244 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
1245 mask1 = mask0 + 2;
1246 mask2 = mask0 + 4;
1247 mask3 = mask0 + 6;
1248 mask4 = mask0 + 8;
1249 mask5 = mask0 + 10;
1250 mask6 = mask0 + 12;
1251 mask7 = mask0 + 14;
1263 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
1267 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
1269 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3,
1782 v16i8 mask0 = LD_SB(ff_hevc_mask_arr + 16);
1793 mask1 = mask0 + 2;
1794 mask2 = mask0 + 4;
1795 mask3 = mask0 + 6;
1811 VSHF_B4_SB(src0, src3, mask0, mask1, mask2, mask3, vec0, vec1, vec2, vec3);
1812 VSHF_B4_SB(src1, src4, mask0, mask1, mask2, mask3, vec4, vec5, vec6, vec7);
1813 VSHF_B4_SB(src2, src5, mask0, mask1, mask2, mask3,
1815 VSHF_B4_SB(src3, src6, mask0, mask1, mask2, mask3,
1837 VSHF_B4_SB(src7, src9, mask0, mask1, mask2, mask3,
1839 VSHF_B4_SB(src8, src10, mask0, mask1, mask2, mask3,
1911 v16i8 mask0 = LD_SB(ff_hevc_mask_arr);
1931 mask1 = mask0 + 2;
1932 mask2 = mask0 + 4;
1933 mask3 = mask0 + 6;
1943 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
1945 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
1947 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3,
1949 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3,
1960 VSHF_B4_SB(src4, src4, mask0, mask1, mask2, mask3,
1962 VSHF_B4_SB(src5, src5, mask0, mask1, mask2, mask3,
1964 VSHF_B4_SB(src6, src6, mask0, mask1, mask2, mask3,
1985 VSHF_B4_SB(src7, src7, mask0, mask1, mask2, mask3,
1999 VSHF_B4_SB(src8, src8, mask0, mask1, mask2, mask3,
2075 v16i8 mask0, mask1, mask2, mask3, mask4, mask5, mask6, mask7;
2106 mask0 = LD_SB(ff_hevc_mask_arr);
2107 mask1 = mask0 + 2;
2108 mask2 = mask0 + 4;
2109 mask3 = mask0 + 6;
2119 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3, vec0, vec1, vec2, vec3);
2120 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3, vec4, vec5, vec6, vec7);
2121 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3, vec8, vec9, vec10,
2123 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3, vec12, vec13, vec14,
2133 VSHF_B4_SB(src4, src4, mask0, mask1, mask2, mask3, vec0, vec1, vec2, vec3);
2134 VSHF_B4_SB(src5, src5, mask0, mask1, mask2, mask3, vec4, vec5, vec6, vec7);
2135 VSHF_B4_SB(src6, src6, mask0, mask1, mask2, mask3, vec8, vec9, vec10,
2149 VSHF_B4_SB(src7, src7, mask0, mask1, mask2, mask3, vec0, vec1, vec2,
2362 v16i8 mask0 = LD_SB(&ff_hevc_mask_arr[16]);
2369 mask1 = mask0 + 2;
2389 VSHF_B2_SB(src0, src1, src0, src1, mask0, mask1, vec0, vec1);
2419 v16i8 mask0 = LD_SB(&ff_hevc_mask_arr[16]);
2427 mask1 = mask0 + 2;
2447 VSHF_B2_SB(src0, src1, src0, src1, mask0, mask1, vec0, vec1);
2448 VSHF_B2_SB(src2, src3, src2, src3, mask0, mask1, vec2, vec3);
2479 v16i8 mask0 = LD_SB(&ff_hevc_mask_arr[16]);
2501 mask1 = mask0 + 2;
2509 VSHF_B2_SB(src0, src1, src0, src1, mask0, mask1, vec0, vec1);
2510 VSHF_B2_SB(src2, src3, src2, src3, mask0, mask1, vec2, vec3);
2511 VSHF_B2_SB(src4, src5, src4, src5, mask0, mask1, vec4, vec5);
2512 VSHF_B2_SB(src6, src7, src6, src7, mask0, mask1, vec6, vec7);
2564 v16i8 mask0 = LD_SB(ff_hevc_mask_arr);
2591 mask1 = mask0 + 2;
2595 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
2596 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
2597 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
2598 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec6, vec7);
2603 VSHF_B2_SB(src4, src4, src4, src4, mask0, mask1, vec0, vec1);
2604 VSHF_B2_SB(src5, src5, src5, src5, mask0, mask1, vec2, vec3);
2605 VSHF_B2_SB(src6, src6, src6, src6, mask0, mask1, vec4, vec5);
2606 VSHF_B2_SB(src7, src7, src7, src7, mask0, mask1, vec6, vec7);
2644 v16i8 mask0 = LD_SB(ff_hevc_mask_arr);
2670 mask1 = mask0 + 2;
2675 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
2676 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
2698 v16i8 mask0, mask1, vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7;
2722 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
2723 mask1 = mask0 + 2;
2727 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
2728 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
2729 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
2730 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec6, vec7);
2756 v16i8 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
2784 mask1 = mask0 + 2;
2789 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
2790 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
2791 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
2792 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec6, vec7);
2793 VSHF_B2_SB(src4, src4, src4, src4, mask0, mask1, vec8, vec9);
2794 VSHF_B2_SB(src5, src5, src5, src5, mask0, mask1, vec10, vec11);
2828 v16i8 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
2855 mask1 = mask0 + 2;
2862 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
2863 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
2864 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
2865 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec6, vec7);
2870 VSHF_B2_SB(src4, src4, src4, src4, mask0, mask1, vec0, vec1);
2871 VSHF_B2_SB(src5, src5, src5, src5, mask0, mask1, vec2, vec3);
2872 VSHF_B2_SB(src6, src6, src6, src6, mask0, mask1, vec4, vec5);
2873 VSHF_B2_SB(src7, src7, src7, src7, mask0, mask1, vec6, vec7);
2934 v16i8 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
2964 mask1 = mask0 + 2;
2973 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
2974 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
2975 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
2976 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec6, vec7);
3014 v16i8 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
3041 mask1 = mask0 + 2;
3050 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
3051 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
3052 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
3053 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec6, vec7);
3058 VSHF_B2_SB(src4, src4, src4, src4, mask0, mask1, vec0, vec1);
3059 VSHF_B2_SB(src5, src5, src5, src5, mask0, mask1, vec2, vec3);
3060 VSHF_B2_SB(src6, src6, src6, src6, mask0, mask1, vec4, vec5);
3061 VSHF_B2_SB(src7, src7, src7, src7, mask0, mask1, vec6, vec7);
3097 v16i8 mask0, mask1, mask2, mask3;
3122 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
3123 mask1 = mask0 + 2;
3124 mask2 = mask0 + 8;
3125 mask3 = mask0 + 10;
3134 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
3136 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
3142 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec0, vec1);
3143 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec2, vec3);
3175 v16i8 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
3202 mask1 = mask0 + 2;
3203 mask2 = mask0 + 8;
3204 mask3 = mask0 + 10;
3214 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
3216 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec4, vec5);
3217 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec6, vec7);
3222 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec0, vec1);
3224 VSHF_B2_SB(src4, src4, src4, src4, mask0, mask1, vec4, vec5);
3225 VSHF_B2_SB(src5, src5, src5, src5, mask0, mask1, vec6, vec7);
4204 v16i8 mask0 = LD_SB(ff_hevc_mask_arr + 16);
4222 mask1 = mask0 + 2;
4234 VSHF_B2_SB(src0, src2, src0, src2, mask0, mask1, vec0, vec1);
4235 VSHF_B2_SB(src1, src3, src1, src3, mask0, mask1, vec2, vec3);
4236 VSHF_B2_SB(src2, src4, src2, src4, mask0, mask1, vec4, vec5);
4269 v16i8 mask0 = LD_SB(ff_hevc_mask_arr + 16);
4286 mask1 = mask0 + 2;
4298 VSHF_B2_SB(src0, src3, src0, src3, mask0, mask1, vec0, vec1);
4299 VSHF_B2_SB(src1, src4, src1, src4, mask0, mask1, vec2, vec3);
4300 VSHF_B2_SB(src2, src5, src2, src5, mask0, mask1, vec4, vec5);
4301 VSHF_B2_SB(src3, src6, src3, src6, mask0, mask1, vec6, vec7);
4339 v16i8 mask0 = LD_SB(ff_hevc_mask_arr + 16);
4359 mask1 = mask0 + 2;
4373 VSHF_B2_SB(src0, src1, src0, src1, mask0, mask1, vec0, vec1);
4374 VSHF_B2_SB(src1, src2, src1, src2, mask0, mask1, vec2, vec3);
4386 VSHF_B2_SB(src3, src7, src3, src7, mask0, mask1, vec0, vec1);
4387 VSHF_B2_SB(src4, src8, src4, src8, mask0, mask1, vec2, vec3);
4388 VSHF_B2_SB(src5, src9, src5, src9, mask0, mask1, vec4, vec5);
4389 VSHF_B2_SB(src6, src10, src6, src10, mask0, mask1, vec6, vec7);
4471 v16i8 mask0 = LD_SB(ff_hevc_mask_arr);
4495 mask1 = mask0 + 2;
4509 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
4510 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
4511 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
4520 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec0, vec1);
4521 VSHF_B2_SB(src4, src4, src4, src4, mask0, mask1, vec2, vec3);
4522 VSHF_B2_SB(src5, src5, src5, src5, mask0, mask1, vec4, vec5);
4523 VSHF_B2_SB(src6, src6, src6, src6, mask0, mask1, vec6, vec7);
4528 VSHF_B2_SB(src7, src7, src7, src7, mask0, mask1, vec0, vec1);
4529 VSHF_B2_SB(src8, src8, src8, src8, mask0, mask1, vec2, vec3);
4530 VSHF_B2_SB(src9, src9, src9, src9, mask0, mask1, vec4, vec5);
4531 VSHF_B2_SB(src10, src10, src10, src10, mask0, mask1, vec6, vec7);
4598 v16i8 mask0 = LD_SB(ff_hevc_mask_arr);
4619 mask1 = mask0 + 2;
4631 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
4632 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
4633 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
4634 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec6, vec7);
4635 VSHF_B2_SB(src4, src4, src4, src4, mask0, mask1, vec8, vec9);
4673 v16i8 src0, src1, src2, src3, src4, src5, src6, mask0, mask1;
4693 mask0 = LD_SB(ff_hevc_mask_arr);
4694 mask1 = mask0 + 2;
4708 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
4709 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
4710 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
4716 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec0, vec1);
4717 VSHF_B2_SB(src4, src4, src4, src4, mask0, mask1, vec2, vec3);
4718 VSHF_B2_SB(src5, src5, src5, src5, mask0, mask1, vec4, vec5);
4719 VSHF_B2_SB(src6, src6, src6, src6, mask0, mask1, vec6, vec7);
4769 v16i8 mask0 = LD_SB(ff_hevc_mask_arr);
4793 mask1 = mask0 + 2;
4808 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
4809 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
4810 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
4811 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec6, vec7);
4812 VSHF_B2_SB(src4, src4, src4, src4, mask0, mask1, vec8, vec9);
4813 VSHF_B2_SB(src5, src5, src5, src5, mask0, mask1, vec10, vec11);
4814 VSHF_B2_SB(src6, src6, src6, src6, mask0, mask1, vec12, vec13);
4815 VSHF_B2_SB(src7, src7, src7, src7, mask0, mask1, vec14, vec15);
4816 VSHF_B2_SB(src8, src8, src8, src8, mask0, mask1, vec16, vec17);
4890 v16i8 mask0 = LD_SB(ff_hevc_mask_arr);
4911 mask1 = mask0 + 2;
4929 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
4930 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
4931 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
4944 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec0, vec1);
4945 VSHF_B2_SB(src4, src4, src4, src4, mask0, mask1, vec2, vec3);
4946 VSHF_B2_SB(src5, src5, src5, src5, mask0, mask1, vec4, vec5);
4947 VSHF_B2_SB(src6, src6, src6, src6, mask0, mask1, vec6, vec7);
5040 v16i8 mask0, mask1, mask2, mask3;
5061 mask0 = LD_SB(ff_hevc_mask_arr);
5062 mask1 = mask0 + 2;
5078 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
5079 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
5080 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
5091 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec0, vec1);
5092 VSHF_B2_SB(src4, src4, src4, src4, mask0, mask1, vec2, vec3);
5093 VSHF_B2_SB(src5, src5, src5, src5, mask0, mask1, vec4, vec5);
5094 VSHF_B2_SB(src6, src6, src6, src6, mask0, mask1, vec6, vec7);