Lines Matching refs:instr

30 scalar_possible(struct ir2_instr *instr)
32 if (instr->alu.scalar_opc == SCALAR_NONE)
35 return src_ncomp(instr) == 1;
59 alu_vector_prio(struct ir2_instr *instr)
61 if (instr->alu.vector_opc == VECTOR_NONE)
64 if (is_export(instr))
68 if (instr->src_count == 3)
71 if (!scalar_possible(instr))
74 return instr->src_count == 2 ? 2 : 3;
79 alu_scalar_prio(struct ir2_instr *instr)
81 if (!scalar_possible(instr))
85 if (instr->src_count > 1)
88 if (is_export(instr))
92 if (instr->alu.scalar_opc >= PRED_SETEs &&
93 instr->alu.scalar_opc <= PRED_SET_RESTOREs)
97 return instr->alu.vector_opc == VECTOR_NONE ? 0 : 3;
115 if (s->instr && s->instr->block_idx != block_idx)
121 if ((s->instr && s->instr->idx == src1.num) ||
131 if (s->instr_s || s->instr->src_count == 3)
134 if (s->instr->type != IR2_ALU || s->instr->alu.export >= 0)
157 scalarize_case1(struct ir2_context *ctx, struct ir2_instr *instr, bool order)
159 struct ir2_src src0 = instr->src[order];
160 struct ir2_src src1 = instr->src[!order];
186 if (reg->comp[i].ref_count != !!(instr->alu.write_mask & 1 << i))
190 sched = insert(ctx, instr->block_idx, reg->idx, src1, &comp);
194 ins = &ctx->instr[idx = ctx->instr_count++];
206 ins->pred = instr->pred;
207 ins->block_idx = instr->block_idx;
209 instr->src[0] = src0;
210 instr->alu.src1_swizzle = comp;
227 ir2_foreach_instr (instr, ctx) {
228 if (!instr->need_emit)
230 if (is_export(instr))
231 export = MIN2(export, export_buf(instr->alu.export));
234 ir2_foreach_instr (instr, ctx) {
235 if (!instr->need_emit)
239 if (is_export(instr) && export_buf(instr->alu.export) != export)
243 block_idx = instr->block_idx;
244 else if (block_idx != instr->block_idx || /* must be same block */
245 instr->type == IR2_CF || /* CF/MEM must be alone */
246 (is_export(instr) && export == SQ_MEMORY))
255 ir2_foreach_src (src, instr) {
264 if (!p->is_ssa && p->reg == reg && p->idx < instr->idx)
269 is_ok &= !ctx->instr[src->num].need_emit;
273 if (!instr->is_ssa) {
275 if (!p->need_emit || p->idx >= instr->idx)
279 if (get_reg_src(ctx, src) == instr->reg)
285 if (avail_count && instr->pred != avail[0]->pred)
291 avail[avail_count++] = instr;
300 ir2_foreach_avail (instr) {
301 if (instr->type == IR2_ALU)
304 ra_src_free(ctx, instr);
305 ra_reg(ctx, get_reg(instr), -1, false, 0);
307 instr->need_emit = false;
308 sched->instr = instr;
316 ir2_foreach_avail (instr) {
317 prio = alu_vector_prio(instr);
319 instr_v = instr;
326 ir2_foreach_avail (instr) {
327 bool compat = is_alu_compatible(instr_v, instr);
329 prio = alu_scalar_prio(instr);
334 instr_s = instr;
348 ir2_foreach_avail (instr) {
349 if (!is_alu_compatible(instr_v, instr) || !scalar_possible(instr))
353 assert(instr->src_count == 2);
355 if (scalarize_case1(ctx, instr, 0)) {
356 instr_s = instr;
359 if (scalarize_case1(ctx, instr, 1)) {
360 instr_s = instr;
386 sched->instr = instr_v;
414 struct ir2_instr *instr = sched->instr, *tex_lod;
415 if (instr && instr->type == IR2_FETCH && instr->fetch.opc == TEX_FETCH &&
416 instr->src_count == 2) {
418 tex_lod = &ctx->instr[ctx->instr_count++];
420 tex_lod->block_idx = instr->block_idx;
421 tex_lod->pred = instr->pred;
423 tex_lod->src[0] = instr->src[1];
427 sched->instr = tex_lod;
432 ir2_foreach_instr (instr, ctx)
433 free_block &= instr->block_idx != block_idx;