Lines Matching defs:loc3

375     __m128i loc0, loc1, loc2, loc3;
411 VP9_DOTP_CONST_PAIR(reg14, reg2, cospi_16_64, cospi_16_64, loc2, loc3);
420 reg4 = __lsx_vsub_h(reg6, loc3);
421 reg6 = __lsx_vadd_h(reg6, loc3);
427 VP9_DOTP_CONST_PAIR(reg9, reg7, cospi_14_64, cospi_18_64, loc2, loc3);
431 reg7 = __lsx_vsub_h(reg15, loc3);
432 reg15 = __lsx_vadd_h(reg15, loc3);
500 __m128i loc0, loc1, loc2, loc3;
536 VP9_DOTP_CONST_PAIR(reg14, reg2, cospi_16_64, cospi_16_64, loc2, loc3);
545 reg4 = __lsx_vsub_h(reg6, loc3);
546 reg6 = __lsx_vadd_h(reg6, loc3);
552 VP9_DOTP_CONST_PAIR(reg9, reg7, cospi_14_64, cospi_18_64, loc2, loc3);
556 reg7 = __lsx_vsub_h(reg15, loc3);
557 reg15 = __lsx_vadd_h(reg15, loc3);
741 __m128i vec0, vec1, vec2, vec3, loc0, loc1, loc2, loc3;
752 loc3 = __lsx_vld(tmp_eve_buf, 12 * 16);
754 DUP4_ARG2(__lsx_vadd_h,loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
762 __lsx_vst(SUB(loc3, vec0), tmp_buf, 19 * 16);
772 loc3 = __lsx_vld(tmp_eve_buf, 14 * 16);
774 DUP4_ARG2(__lsx_vadd_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
780 __lsx_vst(SUB(loc3, vec0), tmp_buf, 17 * 16);
790 loc3 = __lsx_vld(tmp_eve_buf, 13 * 16);
792 DUP4_ARG2(__lsx_vadd_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
798 __lsx_vst(SUB(loc3, vec0), tmp_buf, 18 * 16);
808 loc3 = __lsx_vld(tmp_eve_buf, 15 * 16);
810 DUP4_ARG2(__lsx_vadd_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
816 __lsx_vst(SUB(loc3, vec0), tmp_buf, 16 * 16);
882 __m128i vec0, vec1, vec2, vec3, loc0, loc1, loc2, loc3;
907 VP9_DOTP_CONST_PAIR(vec2, vec0, cospi_16_64, cospi_16_64, loc2, loc3);
916 LSX_BUTTERFLY_4_H(vec2, vec3, loc3, loc2, stp2, stp1, stp6, stp5);
969 LSX_BUTTERFLY_4_H(stp0, stp1, reg7, reg5, loc1, loc3, loc2, loc0);
971 __lsx_vst(loc3, tmp_eve_buf, 16);
974 LSX_BUTTERFLY_4_H(stp2, stp3, reg4, reg1, loc1, loc3, loc2, loc0);
976 __lsx_vst(loc3, tmp_eve_buf, 2 * 16 + 16);
981 LSX_BUTTERFLY_4_H(stp4, stp5, reg6, reg3, loc1, loc3, loc2, loc0);
983 __lsx_vst(loc3, tmp_eve_buf, 4 * 16 + 16);
987 LSX_BUTTERFLY_4_H(stp6, stp7, reg2, reg0, loc1, loc3, loc2, loc0);
989 __lsx_vst(loc3, tmp_eve_buf, 6 * 16 + 16);
997 __m128i vec0, vec1, vec2, vec3, loc0, loc1, loc2, loc3;
1083 VP9_DOTP_CONST_PAIR(vec3, vec2, -cospi_20_64, cospi_12_64, loc2, loc3);
1084 LSX_BUTTERFLY_4_H(loc2, loc3, loc1, loc0, vec0, vec1, vec3, vec2);
1111 loc0, loc1, loc2, loc3);
1115 __lsx_vst(loc3, tmp_odd_buf, 48);
1120 VP9_DOTP_CONST_PAIR(vec1, vec0, cospi_16_64, cospi_16_64, loc2, loc3);
1124 __lsx_vst(loc3, tmp_odd_buf, 8 * 16 + 48);
1135 loc0, loc1, loc2, loc3);
1139 __lsx_vst(loc3, tmp_odd_buf, 4 * 16 + 48);
1145 VP9_DOTP_CONST_PAIR(vec1, vec0, cospi_16_64, cospi_16_64, loc2, loc3);
1149 __lsx_vst(loc3, tmp_odd_buf, 12 * 16 + 48);
1157 __m128i vec0, vec1, vec2, vec3, loc0, loc1, loc2, loc3;
1168 loc3 = __lsx_vld(tmp_eve_buf, 12 * 16);
1170 DUP4_ARG2(__lsx_vadd_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
1175 DUP4_ARG2(__lsx_vsub_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
1189 loc3 = __lsx_vld(tmp_eve_buf, 14 * 16);
1191 DUP4_ARG2(__lsx_vadd_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
1197 DUP4_ARG2(__lsx_vsub_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
1211 loc3 = __lsx_vld(tmp_eve_buf, 13 * 16);
1213 DUP4_ARG2(__lsx_vadd_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
1218 DUP4_ARG2(__lsx_vsub_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
1232 loc3 = __lsx_vld(tmp_eve_buf, 15 * 16);
1234 DUP4_ARG2(__lsx_vadd_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,
1239 DUP4_ARG2(__lsx_vsub_h, loc0, vec3, loc1, vec2, loc2, vec1, loc3, vec0,