Lines Matching defs:mask0
35 mask0, mask1, mask2, mask3, \
41 VSHF_B2_SB(src0, src1, src2, src3, mask0, mask0, vec0_m, vec1_m); \
52 mask0, mask1, mask2, mask3, \
58 VSHF_B2_SB(src0, src0, src1, src1, mask0, mask0, vec0_m, vec1_m); \
59 VSHF_B2_SB(src2, src2, src3, src3, mask0, mask0, vec2_m, vec3_m); \
77 mask0, mask1, filt0, filt1, \
82 VSHF_B2_SB(src0, src1, src2, src3, mask0, mask0, vec0_m, vec1_m); \
89 mask0, mask1, filt0, filt1, \
94 VSHF_B2_SB(src0, src0, src1, src1, mask0, mask0, vec0_m, vec1_m); \
95 VSHF_B2_SB(src2, src2, src3, src3, mask0, mask0, vec2_m, vec3_m); \
290 v16u8 mask0, mask1, mask2, mask3, out;
294 mask0 = LD_UB(&ff_hevc_mask_arr[16]);
301 mask1 = mask0 + 2;
302 mask2 = mask0 + 4;
303 mask3 = mask0 + 6;
307 HORIZ_8TAP_4WID_4VECS_FILT(src0, src1, src2, src3, mask0, mask1, mask2,
321 v16u8 mask0, mask1, mask2, mask3, out;
324 mask0 = LD_UB(&ff_hevc_mask_arr[16]);
331 mask1 = mask0 + 2;
332 mask2 = mask0 + 4;
333 mask3 = mask0 + 6;
338 HORIZ_8TAP_4WID_4VECS_FILT(src0, src1, src2, src3, mask0, mask1, mask2,
342 HORIZ_8TAP_4WID_4VECS_FILT(src0, src1, src2, src3, mask0, mask1, mask2,
356 v16u8 mask0, mask1, mask2, mask3, out;
360 mask0 = LD_UB(&ff_hevc_mask_arr[16]);
367 mask1 = mask0 + 2;
368 mask2 = mask0 + 4;
369 mask3 = mask0 + 6;
374 HORIZ_8TAP_4WID_4VECS_FILT(src0, src1, src2, src3, mask0, mask1, mask2,
379 HORIZ_8TAP_4WID_4VECS_FILT(src0, src1, src2, src3, mask0, mask1, mask2,
392 HORIZ_8TAP_4WID_4VECS_FILT(src0, src1, src2, src3, mask0, mask1, mask2,
397 HORIZ_8TAP_4WID_4VECS_FILT(src0, src1, src2, src3, mask0, mask1, mask2,
427 v16u8 mask0, mask1, mask2, mask3, tmp0, tmp1;
431 mask0 = LD_UB(&ff_hevc_mask_arr[0]);
438 mask1 = mask0 + 2;
439 mask2 = mask0 + 4;
440 mask3 = mask0 + 6;
447 VSHF_B2_SB(src0, src0, src1, src1, mask0, mask0, vec0_m, vec1_m);
448 VSHF_B2_SB(src2, src2, src3, src3, mask0, mask0, vec2_m, vec3_m);
478 v16u8 mask0, mask1, mask2, mask3, mask4, mask5, mask6, mask00;
486 mask0 = LD_UB(&ff_hevc_mask_arr[16]);
497 mask4 = mask0 + 2;
498 mask5 = mask0 + 4;
499 mask6 = mask0 + 6;
529 VSHF_B2_SB(src4, src5, src6, src7, mask0, mask0, vec0, vec1);
557 v16u8 mask0, mask1, mask2, mask3, out;
562 mask0 = LD_UB(&ff_hevc_mask_arr[0]);
569 mask1 = mask0 + 2;
570 mask2 = mask0 + 4;
571 mask3 = mask0 + 6;
584 HORIZ_8TAP_8WID_4VECS_FILT(src0, src1, src2, src3, mask0, mask1, mask2,
596 HORIZ_8TAP_8WID_4VECS_FILT(src4, src5, src6, src7, mask0, mask1, mask2,
616 v16u8 mask0, mask1, mask2, mask3, mask4, mask5, mask6, mask7, out;
621 mask0 = LD_UB(&ff_hevc_mask_arr[0]);
628 mask1 = mask0 + 2;
629 mask2 = mask0 + 4;
630 mask3 = mask0 + 6;
631 mask4 = mask0 + 8;
632 mask5 = mask0 + 10;
633 mask6 = mask0 + 12;
634 mask7 = mask0 + 14;
641 VSHF_B2_SB(src0, src0, src1, src1, mask0, mask0, vec0, vec8);
642 VSHF_B2_SB(src2, src2, src3, src3, mask0, mask0, vec2, vec9);
685 v16u8 mask0, mask1, mask2, mask3, out;
690 mask0 = LD_UB(&ff_hevc_mask_arr[0]);
697 mask1 = mask0 + 2;
698 mask2 = mask0 + 4;
699 mask3 = mask0 + 6;
716 HORIZ_8TAP_8WID_4VECS_FILT(src0, src1, src2, src3, mask0, mask1, mask2,
728 HORIZ_8TAP_8WID_4VECS_FILT(src4, src5, src6, src7, mask0, mask1, mask2,
748 v16u8 mask0, mask1, mask2, mask3, mask4, mask5, mask6, mask7, out;
751 mask0 = LD_UB(&ff_hevc_mask_arr[0]);
758 mask1 = mask0 + 2;
759 mask2 = mask0 + 4;
760 mask3 = mask0 + 6;
761 mask4 = mask0 + 8;
762 mask5 = mask0 + 10;
763 mask6 = mask0 + 12;
764 mask7 = mask0 + 14;
777 VSHF_B3_SB(src0, src0, src1, src1, src2, src2, mask0, mask0, mask0,
800 VSHF_B3_SB(src2, src3, src3, src3, src4, src4, mask4, mask0, mask0,
832 v16u8 mask0, mask1, mask2, mask3, out;
838 mask0 = LD_UB(&ff_hevc_mask_arr[0]);
845 mask1 = mask0 + 2;
846 mask2 = mask0 + 4;
847 mask3 = mask0 + 6;
855 VSHF_B2_SB(src0, src0, src1, src1, mask0, mask0, vec0, vec1);
856 VSHF_B2_SB(src2, src2, src3, src3, mask0, mask0, vec2, vec3);
879 VSHF_B2_SB(src4, src4, src5, src5, mask0, mask0, vec0, vec1);
880 VSHF_B2_SB(src6, src6, src7, src7, mask0, mask0, vec2, vec3);
1357 v16i8 mask0 = LD_SB(ff_hevc_mask_arr + 16);
1368 mask1 = mask0 + 2;
1369 mask2 = mask0 + 4;
1370 mask3 = mask0 + 6;
1376 VSHF_B4_SB(src0, src3, mask0, mask1, mask2, mask3, vec0, vec1, vec2, vec3);
1377 VSHF_B4_SB(src1, src4, mask0, mask1, mask2, mask3, vec4, vec5, vec6, vec7);
1378 VSHF_B4_SB(src2, src5, mask0, mask1, mask2, mask3,
1380 VSHF_B4_SB(src3, src6, mask0, mask1, mask2, mask3,
1404 VSHF_B4_SB(src7, src11, mask0, mask1, mask2, mask3,
1406 VSHF_B4_SB(src8, src12, mask0, mask1, mask2, mask3,
1408 VSHF_B4_SB(src9, src13, mask0, mask1, mask2, mask3,
1410 VSHF_B4_SB(src10, src14, mask0, mask1, mask2, mask3,
1494 v16i8 mask0 = LD_SB(ff_hevc_mask_arr);
1506 mask1 = mask0 + 2;
1507 mask2 = mask0 + 4;
1508 mask3 = mask0 + 6;
1519 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
1521 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
1523 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3,
1525 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3,
1536 VSHF_B4_SB(src4, src4, mask0, mask1, mask2, mask3,
1538 VSHF_B4_SB(src5, src5, mask0, mask1, mask2, mask3,
1540 VSHF_B4_SB(src6, src6, mask0, mask1, mask2, mask3,
1561 VSHF_B4_SB(src7, src7, mask0, mask1, mask2, mask3,
1574 VSHF_B4_SB(src8, src8, mask0, mask1, mask2, mask3,
1633 v16i8 mask0, mask1, mask2, mask3, mask4, mask5, mask6, mask7;
1656 mask0 = LD_SB(ff_hevc_mask_arr);
1657 mask1 = mask0 + 2;
1658 mask2 = mask0 + 4;
1659 mask3 = mask0 + 6;
1669 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3, vec0, vec1, vec2, vec3);
1670 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3, vec4, vec5, vec6, vec7);
1671 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3, vec8, vec9, vec10,
1673 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3, vec12, vec13, vec14,
1684 VSHF_B4_SB(src4, src4, mask0, mask1, mask2, mask3, vec0, vec1, vec2, vec3);
1685 VSHF_B4_SB(src5, src5, mask0, mask1, mask2, mask3, vec4, vec5, vec6, vec7);
1686 VSHF_B4_SB(src6, src6, mask0, mask1, mask2, mask3, vec8, vec9, vec10,
1707 VSHF_B4_SB(src7, src7, mask0, mask1, mask2, mask3, vec0, vec1, vec2,
1720 VSHF_B4_SB(src8, src8, mask0, mask1, mask2, mask3, vec0, vec1, vec2,
1918 v16i8 filt0, filt1, src0, src1, mask0, mask1, vec0, vec1;
1922 mask0 = LD_SB(&ff_hevc_mask_arr[16]);
1929 mask1 = mask0 + 2;
1933 VSHF_B2_SB(src0, src1, src0, src1, mask0, mask1, vec0, vec1);
1945 v16i8 src0, src1, src2, src3, filt0, filt1, mask0, mask1;
1949 mask0 = LD_SB(&ff_hevc_mask_arr[16]);
1956 mask1 = mask0 + 2;
1960 HORIZ_4TAP_4WID_4VECS_FILT(src0, src1, src2, src3, mask0, mask1,
1972 v16i8 src0, src1, src2, src3, filt0, filt1, mask0, mask1;
1976 mask0 = LD_SB(&ff_hevc_mask_arr[16]);
1983 mask1 = mask0 + 2;
1989 HORIZ_4TAP_4WID_4VECS_FILT(src0, src1, src2, src3, mask0, mask1,
1993 HORIZ_4TAP_4WID_4VECS_FILT(src0, src1, src2, src3, mask0, mask1,
2008 v16i8 filt0, filt1, mask0, mask1;
2012 mask0 = LD_SB(&ff_hevc_mask_arr[16]);
2019 mask1 = mask0 + 2;
2024 HORIZ_4TAP_4WID_4VECS_FILT(src0, src1, src2, src3, mask0, mask1,
2026 HORIZ_4TAP_4WID_4VECS_FILT(src4, src5, src6, src7, mask0, mask1,
2039 HORIZ_4TAP_4WID_4VECS_FILT(src0, src1, src2, src3, mask0, mask1,
2041 HORIZ_4TAP_4WID_4VECS_FILT(src4, src5, src6, src7, mask0, mask1,
2070 v16i8 src0, src1, src2, src3, filt0, filt1, mask0, mask1;
2074 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
2081 mask1 = mask0 + 2;
2087 HORIZ_4TAP_8WID_4VECS_FILT(src0, src1, src2, src3, mask0, mask1, filt0,
2103 HORIZ_4TAP_8WID_4VECS_FILT(src0, src1, src2, src3, mask0, mask1, filt0,
2120 v16i8 src0, src1, filt0, filt1, mask0, mask1;
2124 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
2130 mask1 = mask0 + 2;
2137 VSHF_B2_SH(src0, src0, src1, src1, mask0, mask0, vec0, vec1);
2154 v16i8 src0, src1, src2, src3, filt0, filt1, mask0, mask1;
2158 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
2165 mask1 = mask0 + 2;
2172 HORIZ_4TAP_8WID_4VECS_FILT(src0, src1, src2, src3, mask0, mask1, filt0,
2201 v16i8 src0, src1, src2, src3, filt0, filt1, mask0, mask1, mask2, mask3;
2207 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
2216 mask1 = mask0 + 2;
2233 VSHF_B2_SB(src0, src0, src1, src1, mask0, mask0, vec4, vec5);
2234 VSHF_B2_SB(src2, src2, src3, src3, mask0, mask0, vec6, vec7);
2256 v16i8 filt0, filt1, mask0, mask1;
2261 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
2268 mask1 = mask0 + 2;
2277 VSHF_B2_SB(src0, src0, src1, src1, mask0, mask0, vec0_m, vec1_m);
2278 VSHF_B2_SB(src2, src2, src3, src3, mask0, mask0, vec2_m, vec3_m);
2294 VSHF_B2_SB(src4, src4, src5, src5, mask0, mask0, vec0_m, vec1_m);
2295 VSHF_B2_SB(src6, src6, src7, src7, mask0, mask0, vec2_m, vec3_m);
2321 v16i8 filt0, filt1, mask0, mask1, mask00, mask11;
2325 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
2332 mask1 = mask0 + 2;
2333 mask00 = mask0 + 8;
2334 mask11 = mask0 + 10;
2342 VSHF_B2_SB(src0, src0, src0, src1, mask0, mask00, vec0, vec1);
2343 VSHF_B2_SB(src2, src2, src2, src3, mask0, mask00, vec2, vec3);
2359 VSHF_B2_SB(src4, src4, src4, src5, mask0, mask00, vec0, vec1);
2360 VSHF_B2_SB(src6, src6, src6, src7, mask0, mask00, vec2, vec3);
2377 VSHF_B2_SB(src1, src1, src3, src3, mask0, mask0, vec0, vec1);
2378 VSHF_B2_SB(src5, src5, src7, src7, mask0, mask0, vec2, vec3);
2402 v16i8 filt0, filt1, mask0, mask1;
2407 mask0 = LD_SB(&ff_hevc_mask_arr[0]);
2414 mask1 = mask0 + 2;
2430 VSHF_B2_SB(src0, src0, src1, src1, mask0, mask0, vec0_m, vec1_m);
2431 VSHF_B2_SB(src2, src2, src3, src3, mask0, mask0, vec2_m, vec3_m);
2439 VSHF_B2_SB(src4, src4, src5, src5, mask0, mask0, vec0_m, vec1_m);
2440 VSHF_B2_SB(src6, src6, src7, src7, mask0, mask0, vec2_m, vec3_m);
3098 v16i8 mask0 = LD_SB(ff_hevc_mask_arr + 16);
3115 mask1 = mask0 + 2;
3120 VSHF_B2_SB(src0, src2, src0, src2, mask0, mask1, vec0, vec1);
3121 VSHF_B2_SB(src1, src3, src1, src3, mask0, mask1, vec2, vec3);
3122 VSHF_B2_SB(src2, src4, src2, src4, mask0, mask1, vec4, vec5);
3153 v16i8 mask0 = LD_SB(ff_hevc_mask_arr + 16);
3171 mask1 = mask0 + 2;
3176 VSHF_B2_SB(src0, src3, src0, src3, mask0, mask1, vec0, vec1);
3177 VSHF_B2_SB(src1, src4, src1, src4, mask0, mask1, vec2, vec3);
3178 VSHF_B2_SB(src2, src5, src2, src5, mask0, mask1, vec4, vec5);
3179 VSHF_B2_SB(src3, src6, src3, src6, mask0, mask1, vec6, vec7);
3215 v16i8 mask0 = LD_SB(ff_hevc_mask_arr + 16);
3235 mask1 = mask0 + 2;
3242 VSHF_B2_SB(src0, src1, src0, src1, mask0, mask1, vec0, vec1);
3243 VSHF_B2_SB(src1, src2, src1, src2, mask0, mask1, vec2, vec3);
3256 VSHF_B2_SB(src3, src7, src3, src7, mask0, mask1, vec0, vec1);
3257 VSHF_B2_SB(src4, src8, src4, src8, mask0, mask1, vec2, vec3);
3258 VSHF_B2_SB(src5, src9, src5, src9, mask0, mask1, vec4, vec5);
3259 VSHF_B2_SB(src6, src10, src6, src10, mask0, mask1, vec6, vec7);
3332 v16i8 mask0 = LD_SB(ff_hevc_mask_arr);
3355 mask1 = mask0 + 2;
3362 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
3363 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
3364 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
3376 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec0, vec1);
3377 VSHF_B2_SB(src4, src4, src4, src4, mask0, mask1, vec2, vec3);
3378 VSHF_B2_SB(src5, src5, src5, src5, mask0, mask1, vec4, vec5);
3379 VSHF_B2_SB(src6, src6, src6, src6, mask0, mask1, vec6, vec7);
3386 VSHF_B2_SB(src7, src7, src7, src7, mask0, mask1, vec0, vec1);
3387 VSHF_B2_SB(src8, src8, src8, src8, mask0, mask1, vec2, vec3);
3388 VSHF_B2_SB(src9, src9, src9, src9, mask0, mask1, vec4, vec5);
3389 VSHF_B2_SB(src10, src10, src10, src10, mask0, mask1, vec6, vec7);
3449 v16i8 mask0 = LD_SB(ff_hevc_mask_arr);
3468 mask1 = mask0 + 2;
3473 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
3474 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
3475 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
3476 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec6, vec7);
3477 VSHF_B2_SB(src4, src4, src4, src4, mask0, mask1, vec8, vec9);
3510 v16i8 src0, src1, src2, src3, src4, src5, src6, mask0, mask1;
3528 mask0 = LD_SB(ff_hevc_mask_arr);
3529 mask1 = mask0 + 2;
3536 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
3537 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
3538 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
3547 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec0, vec1);
3548 VSHF_B2_SB(src4, src4, src4, src4, mask0, mask1, vec2, vec3);
3549 VSHF_B2_SB(src5, src5, src5, src5, mask0, mask1, vec4, vec5);
3550 VSHF_B2_SB(src6, src6, src6, src6, mask0, mask1, vec6, vec7);
3596 v16i8 mask0 = LD_SB(ff_hevc_mask_arr);
3619 mask1 = mask0 + 2;
3628 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
3629 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
3630 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
3631 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec6, vec7);
3632 VSHF_B2_SB(src4, src4, src4, src4, mask0, mask1, vec8, vec9);
3633 VSHF_B2_SB(src5, src5, src5, src5, mask0, mask1, vec10, vec11);
3634 VSHF_B2_SB(src6, src6, src6, src6, mask0, mask1, vec12, vec13);
3635 VSHF_B2_SB(src7, src7, src7, src7, mask0, mask1, vec14, vec15);
3636 VSHF_B2_SB(src8, src8, src8, src8, mask0, mask1, vec16, vec17);
3704 v16i8 mask0 = LD_SB(ff_hevc_mask_arr);
3724 mask1 = mask0 + 2;
3735 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
3736 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
3737 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
3752 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec0, vec1);
3753 VSHF_B2_SB(src4, src4, src4, src4, mask0, mask1, vec2, vec3);
3754 VSHF_B2_SB(src5, src5, src5, src5, mask0, mask1, vec4, vec5);
3755 VSHF_B2_SB(src6, src6, src6, src6, mask0, mask1, vec6, vec7);
3838 v16i8 mask0, mask1, mask2, mask3;
3858 mask0 = LD_SB(ff_hevc_mask_arr);
3859 mask1 = mask0 + 2;
3869 VSHF_B2_SB(src0, src0, src0, src0, mask0, mask1, vec0, vec1);
3870 VSHF_B2_SB(src1, src1, src1, src1, mask0, mask1, vec2, vec3);
3871 VSHF_B2_SB(src2, src2, src2, src2, mask0, mask1, vec4, vec5);
3885 VSHF_B2_SB(src3, src3, src3, src3, mask0, mask1, vec0, vec1);
3886 VSHF_B2_SB(src4, src4, src4, src4, mask0, mask1, vec2, vec3);
3887 VSHF_B2_SB(src5, src5, src5, src5, mask0, mask1, vec4, vec5);
3888 VSHF_B2_SB(src6, src6, src6, src6, mask0, mask1, vec6, vec7);