Lines Matching refs:cnst
98 cnst = LD_W2; \
99 b5 = vec_madd(cnst, b5, mzero); /* b5 = b5 * W2; */ \
100 cnst = LD_W1; \
101 b2 = vec_madd(cnst, b2, b5); /* b2 = b5 + b2 * W1; */ \
102 cnst = LD_W0; \
103 b6 = vec_madd(cnst, b6, b5); /* b6 = b5 + b6 * W0; */ \
110 cnst = LD_W3; \
111 x8 = vec_madd(cnst, x8, mzero); /* x8 = x8 * W3; */ \
113 cnst = LD_W8; \
114 x0 = vec_madd(cnst, x0, mzero); /* x0 *= W8; */ \
115 cnst = LD_W9; \
116 x1 = vec_madd(cnst, x1, mzero); /* x1 *= W9; */ \
117 cnst = LD_WA; \
118 x2 = vec_madd(cnst, x2, x8); /* x2 = x2 * WA + x8; */ \
119 cnst = LD_WB; \
120 x3 = vec_madd(cnst, x3, x8); /* x3 = x3 * WB + x8; */ \
122 cnst = LD_W4; \
123 b7 = vec_madd(cnst, x4, x0); /* b7 = x4 * W4 + x0; */ \
124 cnst = LD_W5; \
125 b5 = vec_madd(cnst, x5, x1); /* b5 = x5 * W5 + x1; */ \
126 cnst = LD_W6; \
127 b3 = vec_madd(cnst, x6, x1); /* b3 = x6 * W6 + x1; */ \
128 cnst = LD_W7; \
129 b1 = vec_madd(cnst, x7, x0); /* b1 = x7 * W7 + x0; */ \
155 cnst = LD_W2; \
156 b5 = vec_madd(cnst, b5, mzero); /* b5 = b5 * W2; */ \
157 cnst = LD_W1; \
158 b2 = vec_madd(cnst, b2, b5); /* b2 = b5 + b2 * W1; */ \
159 cnst = LD_W0; \
160 b6 = vec_madd(cnst, b6, b5); /* b6 = b5 + b6 * W0; */ \
167 cnst = LD_W3; \
168 x8 = vec_madd(cnst, x8, mzero); /* x8 = x8 * W3; */ \
170 cnst = LD_W8; \
171 x0 = vec_madd(cnst, x0, mzero); /* x0 *= W8; */ \
172 cnst = LD_W9; \
173 x1 = vec_madd(cnst, x1, mzero); /* x1 *= W9; */ \
174 cnst = LD_WA; \
175 x2 = vec_madd(cnst, x2, x8); /* x2 = x2 * WA + x8; */ \
176 cnst = LD_WB; \
177 x3 = vec_madd(cnst, x3, x8); /* x3 = x3 * WB + x8; */ \
179 cnst = LD_W4; \
180 b7 = vec_madd(cnst, x4, x0); /* b7 = x4 * W4 + x0; */ \
181 cnst = LD_W5; \
182 b5 = vec_madd(cnst, x5, x1); /* b5 = x5 * W5 + x1; */ \
183 cnst = LD_W6; \
184 b3 = vec_madd(cnst, x6, x1); /* b3 = x6 * W6 + x1; */ \
185 cnst = LD_W7; \
186 b1 = vec_madd(cnst, x7, x0); /* b1 = x7 * W7 + x0; */ \
201 vector float mzero, cnst, cnsts0, cnsts1, cnsts2;
300 cnst = LD_W2;
301 x0 = vec_madd(cnst, x0, mzero);
302 x1 = vec_madd(cnst, x1, mzero);
303 cnst = LD_W1;
304 b20 = vec_madd(cnst, b20, x0);
305 b21 = vec_madd(cnst, b21, x1);
306 cnst = LD_W0;
307 b60 = vec_madd(cnst, b60, x0);
308 b61 = vec_madd(cnst, b61, x1);
328 cnst = LD_W3;
329 x8 = vec_madd(cnst, x8, mzero);
331 cnst = LD_W8;
332 x0 = vec_madd(cnst, x0, mzero);
333 cnst = LD_W9;
334 x1 = vec_madd(cnst, x1, mzero);
335 cnst = LD_WA;
336 x2 = vec_madd(cnst, x2, x8);
337 cnst = LD_WB;
338 x3 = vec_madd(cnst, x3, x8);
340 cnst = LD_W4;
341 b70 = vec_madd(cnst, b70, x0);
342 cnst = LD_W5;
343 b50 = vec_madd(cnst, b50, x1);
344 cnst = LD_W6;
345 b30 = vec_madd(cnst, b30, x1);
346 cnst = LD_W7;
347 b10 = vec_madd(cnst, b10, x0);
359 cnst = LD_W3;
360 x8 = vec_madd(cnst, x8, mzero);
362 cnst = LD_W8;
363 x0 = vec_madd(cnst, x0, mzero);
364 cnst = LD_W9;
365 x1 = vec_madd(cnst, x1, mzero);
366 cnst = LD_WA;
367 x2 = vec_madd(cnst, x2, x8);
368 cnst = LD_WB;
369 x3 = vec_madd(cnst, x3, x8);
371 cnst = LD_W4;
372 b71 = vec_madd(cnst, b71, x0);
373 cnst = LD_W5;
374 b51 = vec_madd(cnst, b51, x1);
375 cnst = LD_W6;
376 b31 = vec_madd(cnst, b31, x1);
377 cnst = LD_W7;
378 b11 = vec_madd(cnst, b11, x0);