Lines Matching refs:temp2

30     __m256i temp0, temp1, temp2, temp3, t1, t2, t3, t4, t5, t6, t7, t8;
73 temp2 = __lasx_xvdp2_w_h(const_5, temp0);
74 t1 = __lasx_xvdp2add_w_h(temp2, temp1, const_6);
75 temp2 = __lasx_xvdp2_w_h(const_7, temp0);
76 t2 = __lasx_xvdp2add_w_h(temp2, temp1, const_8);
77 temp2 = __lasx_xvdp2_w_h(const_9, temp0);
78 t3 = __lasx_xvdp2add_w_h(temp2, temp1, const_10);
79 temp2 = __lasx_xvdp2_w_h(const_11, temp0);
80 t4 = __lasx_xvdp2add_w_h(temp2, temp1, const_12);
83 temp0, temp1, temp2, temp3);
86 DUP4_ARG2(__lasx_xvsrai_w, temp0, 3, temp1, 3, temp2, 3, temp3, 3,
87 temp0, temp1, temp2, temp3);
92 DUP4_ARG2(__lasx_xvpackev_h, temp1, temp0, temp3, temp2, in1, in0,
93 in3, in2, temp0, temp1, temp2, temp3);
94 DUP2_ARG2(__lasx_xvilvl_w, temp1, temp0, temp3, temp2, t1, t3);
95 DUP2_ARG2(__lasx_xvilvh_w, temp1, temp0, temp3, temp2, t2, t4);
110 temp2 = __lasx_xvdp2_w_h(const_5, temp0);
111 t1 = __lasx_xvdp2add_w_h(temp2, temp1, const_6);
112 temp2 = __lasx_xvdp2_w_h(const_7, temp0);
113 t2 = __lasx_xvdp2add_w_h(temp2, temp1, const_8);
114 temp2 = __lasx_xvdp2_w_h(const_9, temp0);
115 t3 = __lasx_xvdp2add_w_h(temp2, temp1, const_10);
116 temp2 = __lasx_xvdp2_w_h(const_11, temp0);
117 t4 = __lasx_xvdp2add_w_h(temp2, temp1, const_12);
120 temp0, temp1, temp2, temp3);
125 DUP4_ARG3(__lasx_xvsrani_h_w, temp1, temp0, 7, temp3, temp2, 7,
143 __m256i const_dc, temp0, temp1, temp2, temp3;
156 temp0, temp1, temp2, temp3);
157 DUP4_ARG1(__lasx_vext2xv_hu_bu, temp0, temp1, temp2, temp3,
158 temp0, temp1, temp2, temp3);
160 DUP4_ARG2(__lasx_xvadd_h, temp0, const_dc, temp1, const_dc, temp2,
205 __m256i temp0, temp1, temp2, temp3, t1, t2, t3, t4;
212 temp2 = __lasx_xvpickev_w(temp1, temp0);
215 DUP2_ARG2(__lasx_xvdp2_w_h, temp2, const_1, temp2, const_2, temp0, temp1);
226 temp2 = __lasx_xvpickev_w(t2, t1);
229 t1 = __lasx_xvadd_w(temp2, t3);
232 temp1 = __lasx_xvsub_w(t3, temp2);
234 DUP2_ARG3(__lasx_xvsrani_h_w, t2, t1, 3, temp1, temp0, 3, temp2, temp3);
236 temp0 = __lasx_xvpermi_q(temp3, temp2, 0x20);
237 temp1 = __lasx_xvpermi_q(temp3, temp2, 0x31);
243 temp2 = __lasx_xvadd_w(t2, t4);
245 DUP4_ARG2(__lasx_xvsrai_w, temp0, 7, temp1, 7, temp2, 7, temp3, 7,
250 dest + stride3, 0, temp0, temp1, temp2, temp3);
251 DUP4_ARG1(__lasx_vext2xv_wu_bu, temp0, temp1, temp2, temp3,
252 temp0, temp1, temp2, temp3);
253 DUP4_ARG2(__lasx_xvadd_w, temp0, t1, temp1, t2, temp2, t3, temp3, t4,
257 temp2 = __lasx_xvpickev_b(temp1, temp0);
258 temp0 = __lasx_xvperm_w(temp2, shift);
298 __m256i const_dc, temp0, temp1, temp2, temp3, reg0, reg1;
310 temp0, temp1, temp2, temp3);
311 DUP2_ARG2(__lasx_xvilvl_d, temp1, temp0, temp3, temp2, reg0, reg1);
331 __m256i temp0, temp1, temp2, temp3, t1, t2, t3, t4;
376 temp2 = __lasx_xvadd_w(t2, t4);
378 DUP4_ARG2(__lasx_xvsrai_w, temp0, 3, temp1, 3, temp2, 3, temp3, 3,
379 temp0, temp1, temp2, temp3);
383 t2 = __lasx_xvpickev_w(temp3, temp2);
386 t4 = __lasx_xvpickod_w(temp3, temp2);
388 temp2 = __lasx_xvpermi_q(t1, t1, 0x00);
390 t1 = __lasx_xvdp2add_w_h(const_6, temp2, const_7);
397 const_11, temp1, const_12, t1, t2, temp2, temp3);
402 temp0 = __lasx_xvpermi_q(temp3, temp2, 0x20);
403 temp1 = __lasx_xvpermi_q(temp3, temp2, 0x31);
407 temp2 = __lasx_xvsub_w(t4, t2);
409 temp2 = __lasx_xvaddi_wu(temp2, 1);
411 DUP4_ARG2(__lasx_xvsrai_w, temp0, 7, temp1, 7, temp2, 7, temp3, 7,
412 temp0, temp1, temp2, temp3);
423 DUP4_ARG2(__lasx_xvadd_w, temp0, const_1, temp1, const_2, temp2, const_3,
424 temp3, const_4, temp0, temp1, temp2, temp3);
425 DUP4_ARG1(__lasx_xvclip255_w, temp0, temp1, temp2, temp3,
426 temp0, temp1, temp2, temp3);
427 DUP2_ARG2(__lasx_xvpickev_h, temp1, temp0, temp3, temp2, temp0, temp1);
472 __m256i temp0, temp1, temp2, temp3, t1, t2;
485 temp2 = __lasx_xvpickod_h(temp0, temp0);
486 DUP2_ARG2(__lasx_xvdp2_w_h, temp1, const_1, temp2, const_2, t1, t2);
494 temp2 = __lasx_xvpermi_q(temp0, temp0, 0x11);
498 t2 = __lasx_xvdp2_w_h(temp2, const_2);
507 temp2 = __lasx_xvilvl_w(in2, in0);
508 temp2 = __lasx_vext2xv_wu_bu(temp2);
511 temp0 = __lasx_xvadd_w(temp0, temp2);