Lines Matching refs:mask3
35 mask0, mask1, mask2, mask3, \
47 VSHF_B2_SB(src0, src1, src2, src3, mask3, mask3, vec6_m, vec7_m); \
52 mask0, mask1, mask2, mask3, \
70 VSHF_B2_SB(src0, src0, src1, src1, mask3, mask3, vec4_m, vec5_m); \
71 VSHF_B2_SB(src2, src2, src3, src3, mask3, mask3, vec6_m, vec7_m); \
290 v16u8 mask0, mask1, mask2, mask3, out;
303 mask3 = mask0 + 6;
308 mask3, filt0, filt1, filt2, filt3, out0, out1);
321 v16u8 mask0, mask1, mask2, mask3, out;
333 mask3 = mask0 + 6;
339 mask3, filt0, filt1, filt2, filt3, out0, out1);
343 mask3, filt0, filt1, filt2, filt3, out2, out3);
356 v16u8 mask0, mask1, mask2, mask3, out;
369 mask3 = mask0 + 6;
375 mask3, filt0, filt1, filt2, filt3, out0, out1);
380 mask3, filt0, filt1, filt2, filt3, out2, out3);
393 mask3, filt0, filt1, filt2, filt3, out0, out1);
398 mask3, filt0, filt1, filt2, filt3, out2, out3);
427 v16u8 mask0, mask1, mask2, mask3, tmp0, tmp1;
440 mask3 = mask0 + 6;
459 VSHF_B2_SB(src0, src0, src1, src1, mask3, mask3, vec4_m, vec5_m);
460 VSHF_B2_SB(src2, src2, src3, src3, mask3, mask3, vec6_m, vec7_m);
478 v16u8 mask0, mask1, mask2, mask3, mask4, mask5, mask6, mask00;
496 mask3 = mask00 + 6;
523 VSHF_B2_SB(src0, src0, src1, src1, mask3, mask3, vec4, vec5);
524 VSHF_B2_SB(src2, src2, src3, src3, mask3, mask3, vec6, vec7);
557 v16u8 mask0, mask1, mask2, mask3, out;
571 mask3 = mask0 + 6;
585 mask3, filt0, filt1, filt2, filt3, out0,
597 mask3, filt0, filt1, filt2, filt3, out0,
616 v16u8 mask0, mask1, mask2, mask3, mask4, mask5, mask6, mask7, out;
630 mask3 = mask0 + 6;
659 VSHF_B2_SB(src0, src0, src1, src1, mask3, mask3, vec4, vec10);
660 VSHF_B2_SB(src2, src2, src3, src3, mask3, mask3, vec6, vec11);
685 v16u8 mask0, mask1, mask2, mask3, out;
699 mask3 = mask0 + 6;
717 mask3, filt0, filt1, filt2, filt3, out0,
729 mask3, filt0, filt1, filt2, filt3, out0,
748 v16u8 mask0, mask1, mask2, mask3, mask4, mask5, mask6, mask7, out;
760 mask3 = mask0 + 6;
789 VSHF_B3_SB(src0, src0, src1, src1, src2, src2, mask3, mask3, mask3,
811 VSHF_B3_SB(src2, src3, src3, src3, src4, src4, mask7, mask3, mask3,
832 v16u8 mask0, mask1, mask2, mask3, out;
847 mask3 = mask0 + 6;
867 VSHF_B2_SB(src0, src0, src1, src1, mask3, mask3, vec4, vec5);
868 VSHF_B2_SB(src2, src2, src3, src3, mask3, mask3, vec6, vec7);
891 VSHF_B2_SB(src4, src4, src5, src5, mask3, mask3, vec4, vec5);
892 VSHF_B2_SB(src6, src6, src7, src7, mask3, mask3, vec6, vec7);
1349 v16i8 mask1, mask2, mask3;
1370 mask3 = mask0 + 6;
1376 VSHF_B4_SB(src0, src3, mask0, mask1, mask2, mask3, vec0, vec1, vec2, vec3);
1377 VSHF_B4_SB(src1, src4, mask0, mask1, mask2, mask3, vec4, vec5, vec6, vec7);
1378 VSHF_B4_SB(src2, src5, mask0, mask1, mask2, mask3,
1380 VSHF_B4_SB(src3, src6, mask0, mask1, mask2, mask3,
1404 VSHF_B4_SB(src7, src11, mask0, mask1, mask2, mask3,
1406 VSHF_B4_SB(src8, src12, mask0, mask1, mask2, mask3,
1408 VSHF_B4_SB(src9, src13, mask0, mask1, mask2, mask3,
1410 VSHF_B4_SB(src10, src14, mask0, mask1, mask2, mask3,
1484 v16i8 mask1, mask2, mask3;
1508 mask3 = mask0 + 6;
1519 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3,
1521 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3,
1523 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3,
1525 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3,
1536 VSHF_B4_SB(src4, src4, mask0, mask1, mask2, mask3,
1538 VSHF_B4_SB(src5, src5, mask0, mask1, mask2, mask3,
1540 VSHF_B4_SB(src6, src6, mask0, mask1, mask2, mask3,
1561 VSHF_B4_SB(src7, src7, mask0, mask1, mask2, mask3,
1574 VSHF_B4_SB(src8, src8, mask0, mask1, mask2, mask3,
1633 v16i8 mask0, mask1, mask2, mask3, mask4, mask5, mask6, mask7;
1659 mask3 = mask0 + 6;
1669 VSHF_B4_SB(src0, src0, mask0, mask1, mask2, mask3, vec0, vec1, vec2, vec3);
1670 VSHF_B4_SB(src1, src1, mask0, mask1, mask2, mask3, vec4, vec5, vec6, vec7);
1671 VSHF_B4_SB(src2, src2, mask0, mask1, mask2, mask3, vec8, vec9, vec10,
1673 VSHF_B4_SB(src3, src3, mask0, mask1, mask2, mask3, vec12, vec13, vec14,
1684 VSHF_B4_SB(src4, src4, mask0, mask1, mask2, mask3, vec0, vec1, vec2, vec3);
1685 VSHF_B4_SB(src5, src5, mask0, mask1, mask2, mask3, vec4, vec5, vec6, vec7);
1686 VSHF_B4_SB(src6, src6, mask0, mask1, mask2, mask3, vec8, vec9, vec10,
1707 VSHF_B4_SB(src7, src7, mask0, mask1, mask2, mask3, vec0, vec1, vec2,
1720 VSHF_B4_SB(src8, src8, mask0, mask1, mask2, mask3, vec0, vec1, vec2,
2201 v16i8 src0, src1, src2, src3, filt0, filt1, mask0, mask1, mask2, mask3;
2217 mask3 = mask2 + 2;
2226 VSHF_B2_SB(src0, src1, src2, src3, mask3, mask3, vec2, vec3);
3838 v16i8 mask0, mask1, mask2, mask3;
3932 mask3 = mask2 + 2;
3937 VSHF_B2_SB(src0, src1, src0, src1, mask2, mask3, vec0, vec1);
3938 VSHF_B2_SB(src1, src2, src1, src2, mask2, mask3, vec2, vec3);
3951 VSHF_B2_SB(src3, src7, src3, src7, mask2, mask3, vec0, vec1);
3952 VSHF_B2_SB(src4, src8, src4, src8, mask2, mask3, vec2, vec3);
3953 VSHF_B2_SB(src5, src9, src5, src9, mask2, mask3, vec4, vec5);
3954 VSHF_B2_SB(src6, src10, src6, src10, mask2, mask3, vec6, vec7);