Lines Matching refs:temp
44 vz0 = vec_add(vb0,vb2); /* temp[0] = Y[0] + Y[2] */ \
45 vz1 = vec_sub(vb0,vb2); /* temp[1] = Y[0] - Y[2] */ \
47 vz2 = vec_sub(vz2,vb3); /* temp[2] = Y[1].1/2 - Y[3] */ \
49 vz3 = vec_add(vb1,vz3); /* temp[3] = Y[1] + Y[3].1/2 */ \
51 va0 = vec_add(vz0,vz3); /* x[0] = temp[0] + temp[3] */ \
52 va1 = vec_add(vz1,vz2); /* x[1] = temp[1] + temp[2] */ \
53 va2 = vec_sub(vz1,vz2); /* x[2] = temp[1] - temp[2] */ \
54 va3 = vec_sub(vz0,vz3) /* x[3] = temp[0] - temp[3] */
531 register vec_u8 temp;
538 temp = vec_xor(average, p2);
541 temp = vec_and(temp, ones); /*(p2^avg(p0, q0)) & 1 */
542 unclipped = vec_subs(average, temp); /*(p2+((p0+q0+1)>>1))>>1 */
583 DECLARE_ALIGNED(16, unsigned char, temp)[16]; \
595 temp[0] = alpha; \
596 temp[1] = beta; \
597 alphavec = vec_ld(0, temp); \
602 AV_COPY32(temp, tc0); \
603 tc0vec = vec_ld(0, (signed char*)temp); \
664 DECLARE_ALIGNED(16, int32_t, temp)[4];
669 temp[0] = log2_denom;
670 temp[1] = weight;
671 temp[2] = offset;
673 vtemp = (vec_s16)vec_ld(0, temp);
713 DECLARE_ALIGNED(16, int32_t, temp)[4];
717 temp[0] = log2_denom+1;
718 temp[1] = weights;
719 temp[2] = weightd;
720 temp[3] = offset;
722 vtemp = (vec_s16)vec_ld(0, temp);