Lines Matching refs:reg

31       physreg_t reg;
51 get_copy_src(const struct ir3_register *reg, unsigned offset)
53 if (reg->flags & IR3_REG_IMMED) {
56 .imm = reg->uim_val,
58 } else if (reg->flags & IR3_REG_CONST) {
61 .const_num = reg->num,
66 .reg = ra_reg_get_physreg(reg) + offset,
92 * source/destination is a half-reg above the range accessable to half
93 * registers. However, when a full-reg source overlaps a half-reg
100 if (entry->src.reg >= RA_HALF_SIZE) {
107 .src = {.reg = entry->src.reg & ~1u},
116 (entry->src.reg & ~1u) == (entry->dst & ~1u) ?
122 .src = {.reg = tmp + (entry->src.reg & 1)},
130 .src = {.reg = entry->src.reg & ~1u},
143 .src = {.reg = entry->dst},
144 .dst = entry->src.reg,
151 unsigned src_num = ra_physreg_to_num(entry->src.reg, entry->flags);
166 /* Use a macro for shared regs because any shared reg writes need to
193 physreg_t tmp = !entry->src.flags && entry->src.reg < 2 ? 2 : 0;
197 .src = {.reg = entry->dst & ~1u},
206 if (!src.flags && (src.reg & ~1u) == (entry->dst & ~1u))
207 src.reg = tmp + (src.reg & 1u);
218 .src = {.reg = entry->dst & ~1u},
225 if (!entry->src.flags && entry->src.reg >= RA_HALF_SIZE) {
226 unsigned src_num = ra_physreg_to_num(entry->src.reg & ~1u,
230 if (entry->src.reg % 2 == 0) {
252 unsigned src_num = ra_physreg_to_num(entry->src.reg, entry->flags);
305 new_entry->src.reg = entry->src.reg + 1;
324 ctx->physreg_use_count[entry->src.reg + j]++;
353 ctx->physreg_use_count[entry->src.reg + j]--;
430 if (entry->dst == entry->src.reg) {
447 if (blocking->src.reg <= entry->dst &&
448 blocking->src.reg + 1 >= entry->dst &&
462 if (blocking->src.reg >= entry->dst &&
463 blocking->src.reg < entry->dst + copy_entry_size(entry)) {
464 blocking->src.reg =
465 entry->src.reg + (blocking->src.reg - entry->dst);