Lines Matching defs:vec2

741     __m128i vec0, vec1, vec2, vec3, loc0, loc1, loc2, loc3;
747 vec2 = __lsx_vld(tmp_odd_buf, 14 * 16);
754 DUP4_ARG2(__lsx_vadd_h,loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
760 __lsx_vst(SUB(loc1, vec2), tmp_buf, 23 * 16);
767 vec2 = __lsx_vld(tmp_odd_buf, 10 * 16);
774 DUP4_ARG2(__lsx_vadd_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
778 __lsx_vst(SUB(loc1, vec2), tmp_buf, 21 * 16);
785 vec2 = __lsx_vld(tmp_odd_buf, 12 * 16);
792 DUP4_ARG2(__lsx_vadd_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
796 __lsx_vst(SUB(loc1, vec2), tmp_buf, 22 * 16);
803 vec2 = __lsx_vld(tmp_odd_buf, 8 * 16);
810 DUP4_ARG2(__lsx_vadd_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
814 __lsx_vst(SUB(loc1, vec2), tmp_buf, 20 * 16);
882 __m128i vec0, vec1, vec2, vec3, loc0, loc1, loc2, loc3;
906 LSX_BUTTERFLY_4_H(reg1, reg7, reg3, reg5, vec1, vec3, vec2, vec0);
907 VP9_DOTP_CONST_PAIR(vec2, vec0, cospi_16_64, cospi_16_64, loc2, loc3);
914 LSX_BUTTERFLY_4_H(reg4, reg0, reg2, reg6, vec1, vec3, vec2, vec0);
916 LSX_BUTTERFLY_4_H(vec2, vec3, loc3, loc2, stp2, stp1, stp6, stp5);
997 __m128i vec0, vec1, vec2, vec3, loc0, loc1, loc2, loc3;
1047 LSX_BUTTERFLY_4_H(reg0, reg7, reg6, reg1, vec0, vec1, vec2, vec3);
1050 VP9_DOTP_CONST_PAIR(vec2, vec3, cospi_24_64, cospi_8_64, vec2, vec3);
1051 __lsx_vst(vec2, tmp_odd_buf, 2 * 16);
1081 vec0, vec1, vec2, vec3);
1083 VP9_DOTP_CONST_PAIR(vec3, vec2, -cospi_20_64, cospi_12_64, loc2, loc3);
1084 LSX_BUTTERFLY_4_H(loc2, loc3, loc1, loc0, vec0, vec1, vec3, vec2);
1087 VP9_DOTP_CONST_PAIR(vec3, vec2, -cospi_8_64, cospi_24_64, vec0, vec1);
1093 vec0, vec1, vec2, vec3);
1094 LSX_BUTTERFLY_4_H(vec0, vec3, vec2, vec1, reg0, reg1, reg3, reg2);
1157 __m128i vec0, vec1, vec2, vec3, loc0, loc1, loc2, loc3;
1163 vec2 = __lsx_vld(tmp_odd_buf, 14 * 16);
1170 DUP4_ARG2(__lsx_vadd_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
1175 DUP4_ARG2(__lsx_vsub_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
1184 vec2 = __lsx_vld(tmp_odd_buf, 10 * 16);
1191 DUP4_ARG2(__lsx_vadd_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
1197 DUP4_ARG2(__lsx_vsub_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
1206 vec2 = __lsx_vld(tmp_odd_buf, 12 * 16);
1213 DUP4_ARG2(__lsx_vadd_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
1218 DUP4_ARG2(__lsx_vsub_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
1227 vec2 = __lsx_vld(tmp_odd_buf, 8 * 16);
1234 DUP4_ARG2(__lsx_vadd_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
1239 DUP4_ARG2(__lsx_vsub_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,