Lines Matching refs:def

43       struct ir3_register *def;
93 /* Map from SSA def to reg_or_immed it is mapped to at the end of the block.
183 if (!instr->srcs[i]->def ||
184 instr->srcs[i]->def->merge_set != instr->dsts[i]->merge_set) {
195 if (!instr->srcs[i]->def ||
196 instr->srcs[i]->def->merge_set != instr->dsts[0]->merge_set) {
225 src->next_use = tmp_next_use[src->def->name];
232 if (src->def->merge_set == instr->dsts[i]->merge_set &&
233 src->def->merge_set_offset == instr->dsts[i]->merge_set_offset) {
234 tmp_next_use[src->def->name] =
237 tmp_next_use[src->def->name] = cycle;
242 tmp_next_use[src->def->name] = cycle;
280 if (!phi->srcs[i]->def)
282 unsigned src = phi->srcs[i]->def->name;
383 interval->dst.def = reg;
589 struct ra_spill_interval *interval = ctx->intervals[src->def->name];
605 struct ra_spill_interval *interval = ctx->intervals[src->def->name];
618 struct ra_spill_interval *interval = ctx->intervals[src->def->name];
647 struct ra_spill_interval *interval = ctx->intervals[src->def->name];
695 src->def = NULL;
699 src->def = NULL;
701 src->def = val->def;
702 val->def->instr->flags &= ~IR3_INSTR_UNUSED;
736 reg = val->def;
746 ir3_src_create(spill, INVALID_REG, ctx->base_reg->flags)->def = ctx->base_reg;
755 src->def = reg;
836 add_to_merge_set(struct ir3_merge_set *set, struct ir3_register *def,
839 def->merge_set = set;
840 def->merge_set_offset = offset;
841 def->interval_start = set->interval_start + offset;
842 def->interval_end = set->interval_start + offset + reg_size(def);
846 split(struct ir3_register *def, unsigned offset,
849 if (reg_elems(def) == 1) {
851 return def;
854 assert(!(def->flags & IR3_REG_ARRAY));
855 assert(def->merge_set);
859 dst->flags |= def->flags & IR3_REG_HALF;
860 struct ir3_register *src = ir3_src_create(split, INVALID_REG, def->flags);
861 src->wrmask = def->wrmask;
862 src->def = def;
863 add_to_merge_set(def->merge_set, dst,
864 def->merge_set_offset + offset * reg_elem_size(def));
890 ir3_src_create(collect, INVALID_REG, parent_def->flags)->def = srcs[i];
920 ir3_src_create(reload, INVALID_REG, ctx->base_reg->flags)->def = ctx->base_reg;
949 struct ir3_register *def,
953 interval->dst.flags = def->flags;
954 interval->dst.def = def;
961 extract(def, (child_reg->interval_start -
962 interval->interval.reg->interval_start) / reg_elem_size(def),
969 reload_def(struct ra_spill_ctx *ctx, struct ir3_register *def,
972 unsigned elems = reg_elems(def);
973 struct ra_spill_interval *interval = ctx->intervals[def->name];
981 interval->dst.flags = def->flags;
982 interval->dst.def = extract(
983 parent->dst.def, (def->interval_start - parent->dst.def->interval_start) /
984 reg_elem_size(def), elems, instr, block);
991 dst = rematerialize(def, instr, block);
993 dst = reload(ctx, def, instr, block);
1002 struct ra_spill_interval *interval = ctx->intervals[src->def->name];
1005 reload_def(ctx, src->def, instr, instr->block);
1015 struct ra_spill_interval *interval = ctx->intervals[src->def->name];
1114 create_temp_interval(struct ra_spill_ctx *ctx, struct ir3_register *def)
1119 /* This is kinda hacky, but we need to create a fake SSA def here that is
1123 *reg = *def;
1126 reg->interval_end = offset + reg_size(def);
1134 ctx->live->interval_offset += reg_size(def);
1147 if (pcopy->srcs[j]->def == src->def)
1209 if (src->def && src->def->merge_set &&
1210 src->def->merge_set == dst->merge_set &&
1211 src->def->merge_set_offset == dst->merge_set_offset) {
1212 struct ra_spill_interval *src_interval = ctx->intervals[src->def->name];
1224 } else if (src->def) {
1237 ctx->intervals[src->def->name];
1246 src->def = temp;
1255 if (src->def && src->def->merge_set &&
1256 src->def->merge_set == dst->merge_set &&
1257 src->def->merge_set_offset == dst->merge_set_offset)
1262 if (!src->def) {
1277 struct ra_spill_interval *temp_interval = ctx->intervals[src->def->name];
1313 struct ir3_register *def)
1315 struct ra_spill_interval *interval = ctx->intervals[def->name];
1316 ra_spill_interval_init(interval, def);
1319 ctx->blocks[block->index].next_use_start[def->name];
1326 is_live_in_phi(struct ir3_register *def, struct ir3_block *block)
1328 return def->instr->opc == OPC_META_PHI && def->instr->block == block;
1332 is_live_in_pred(struct ra_spill_ctx *ctx, struct ir3_register *def,
1337 if (is_live_in_phi(def, block)) {
1338 def = def->instr->srcs[pred_idx]->def;
1339 if (!def)
1343 return _mesa_hash_table_search(state->remap, def);
1347 is_live_in_undef(struct ir3_register *def,
1350 if (!is_live_in_phi(def, block))
1353 return !def->instr->srcs[pred_idx]->def;
1357 read_live_in(struct ra_spill_ctx *ctx, struct ir3_register *def,
1363 if (is_live_in_phi(def, block)) {
1364 def = def->instr->srcs[pred_idx]->def;
1365 if (!def)
1369 struct hash_entry *entry = _mesa_hash_table_search(state->remap, def);
1377 is_live_in_all_preds(struct ra_spill_ctx *ctx, struct ir3_register *def,
1381 if (!is_live_in_pred(ctx, def, block, i))
1389 spill_live_in(struct ra_spill_ctx *ctx, struct ir3_register *def,
1399 struct reg_or_immed *pred_def = read_live_in(ctx, def, block, i);
1401 spill(ctx, pred_def, get_spill_slot(ctx, def), NULL, pred);
1465 struct ir3_register *def = interval->interval.reg;
1466 if (is_live_in_phi(def, block)) {
1467 def = def->instr->srcs[pred_idx]->def;
1470 if (def)
1471 _mesa_hash_table_insert(state->remap, def, new_val);
1477 extract(new_val->def,
1478 (child->interval.reg->interval_start - def->interval_start) /
1479 reg_elem_size(def), reg_elems(child->interval.reg),
1482 child_val->def = child_def;
1489 reload_live_in(struct ra_spill_ctx *ctx, struct ir3_register *def,
1492 struct ra_spill_interval *interval = ctx->intervals[def->name];
1499 if (is_live_in_undef(def, block, i))
1502 struct reg_or_immed *new_val = read_live_in(ctx, def, block, i);
1507 new_val->def = rematerialize(def, NULL, pred);
1509 new_val->def = reload(ctx, def, NULL, pred);
1510 new_val->flags = new_val->def->flags;
1526 add_live_in_phi(struct ra_spill_ctx *ctx, struct ir3_register *def,
1529 struct ra_spill_interval *interval = ctx->intervals[def->name];
1545 _mesa_hash_table_search(state->remap, def);
1549 !pred_val->def ||
1550 (cur_def && cur_def != pred_val->def)) {
1554 cur_def = pred_val->def;
1558 interval->dst.def = cur_def;
1566 dst->flags |= def->flags & (IR3_REG_HALF | IR3_REG_ARRAY);
1567 dst->size = def->size;
1568 dst->wrmask = def->wrmask;
1570 dst->interval_start = def->interval_start;
1571 dst->interval_end = def->interval_end;
1572 dst->merge_set = def->merge_set;
1573 dst->merge_set_offset = def->merge_set_offset;
1579 src->size = def->size;
1580 src->wrmask = def->wrmask;
1584 _mesa_hash_table_search(state->remap, def);
1589 src->def = def;
1593 interval->dst.def = dst;
1639 if (!src->def)
1643 _mesa_hash_table_search(state->remap, src->def);
1654 struct ir3_register *def = interval->interval.reg;
1658 spill(ctx, &interval->dst, get_spill_slot(ctx, def), NULL, block);
1675 reload_live_out(struct ra_spill_ctx *ctx, struct ir3_register *def,
1678 struct ra_spill_interval *interval = ctx->intervals[def->name];
1681 reload_def(ctx, def, NULL, block);
1708 struct ir3_register *def = instr->srcs[pred_idx]->def;
1709 if (!def)
1712 struct ra_spill_interval *interval = ctx->intervals[def->name];
1727 struct ir3_register *def = interval->interval.reg;
1728 if (is_live_in_phi(def, block)) {
1729 def = def->instr->srcs[pred_idx]->def;
1731 BITSET_SET(state->live_out, def->name);
1765 interval->dst.def) {
1868 struct ir3_register *def = NULL;
1871 if (src->def == phi->dsts[0])
1874 if (!src->def || (def && def != src->def))
1876 def = src->def;
1879 phi->data = def;
1885 simplify_phi_def(struct ir3_register *def)
1887 if (def->instr->opc == OPC_META_PHI) {
1888 struct ir3_instruction *phi = def->instr;
1900 return def;
1907 if (src->def)
1908 src->def = simplify_phi_def(src->def);
2049 src->def->interval_start < dst->interval_end &&
2050 dst->interval_start < src->def->interval_end) {
2051 ir3_force_merge(dst, src->def,
2052 src->def->interval_start - dst->interval_start);