Lines Matching refs:instr

43 instr_each_src_and_dest_is_ssa(const nir_instr *instr)
45 if (!nir_foreach_dest((nir_instr *)instr, dest_is_ssa, NULL) ||
46 !nir_foreach_src((nir_instr *)instr, src_is_ssa, NULL))
59 instr_can_rewrite(const nir_instr *instr)
62 assert(instr_each_src_and_dest_is_ssa(instr));
64 switch (instr->type) {
72 return nir_intrinsic_can_reorder(nir_instr_as_intrinsic(instr));
110 hash_alu(uint32_t hash, const nir_alu_instr *instr)
112 hash = HASH(hash, instr->op);
114 /* We explicitly don't hash instr->exact. */
115 uint8_t flags = instr->no_signed_wrap |
116 instr->no_unsigned_wrap << 1;
119 hash = HASH(hash, instr->dest.dest.ssa.num_components);
120 hash = HASH(hash, instr->dest.dest.ssa.bit_size);
122 if (nir_op_infos[instr->op].algebraic_properties & NIR_OP_IS_2SRC_COMMUTATIVE) {
123 assert(nir_op_infos[instr->op].num_inputs >= 2);
125 uint32_t hash0 = hash_alu_src(hash, &instr->src[0],
126 nir_ssa_alu_instr_src_components(instr, 0));
127 uint32_t hash1 = hash_alu_src(hash, &instr->src[1],
128 nir_ssa_alu_instr_src_components(instr, 1));
137 for (unsigned i = 2; i < nir_op_infos[instr->op].num_inputs; i++) {
138 hash = hash_alu_src(hash, &instr->src[i],
139 nir_ssa_alu_instr_src_components(instr, i));
142 for (unsigned i = 0; i < nir_op_infos[instr->op].num_inputs; i++) {
143 hash = hash_alu_src(hash, &instr->src[i],
144 nir_ssa_alu_instr_src_components(instr, i));
152 hash_deref(uint32_t hash, const nir_deref_instr *instr)
154 hash = HASH(hash, instr->deref_type);
155 hash = HASH(hash, instr->modes);
156 hash = HASH(hash, instr->type);
158 if (instr->deref_type == nir_deref_type_var)
159 return HASH(hash, instr->var);
161 hash = hash_src(hash, &instr->parent);
163 switch (instr->deref_type) {
165 hash = HASH(hash, instr->strct.index);
170 hash = hash_src(hash, &instr->arr.index);
171 hash = HASH(hash, instr->arr.in_bounds);
175 hash = HASH(hash, instr->cast.ptr_stride);
176 hash = HASH(hash, instr->cast.align_mul);
177 hash = HASH(hash, instr->cast.align_offset);
193 hash_load_const(uint32_t hash, const nir_load_const_instr *instr)
195 hash = HASH(hash, instr->def.num_components);
197 if (instr->def.bit_size == 1) {
198 for (unsigned i = 0; i < instr->def.num_components; i++) {
199 uint8_t b = instr->value[i].b;
203 unsigned size = instr->def.num_components * sizeof(*instr->value);
204 hash = XXH32(instr->value, size, hash);
219 hash_phi(uint32_t hash, const nir_phi_instr *instr)
221 hash = HASH(hash, instr->instr.block);
224 unsigned num_preds = instr->instr.block->predecessors->entries;
227 nir_foreach_phi_src(src, instr) {
242 hash_intrinsic(uint32_t hash, const nir_intrinsic_instr *instr)
244 const nir_intrinsic_info *info = &nir_intrinsic_infos[instr->intrinsic];
245 hash = HASH(hash, instr->intrinsic);
248 hash = HASH(hash, instr->dest.ssa.num_components);
249 hash = HASH(hash, instr->dest.ssa.bit_size);
252 hash = XXH32(instr->const_index, info->num_indices * sizeof(instr->const_index[0]), hash);
254 for (unsigned i = 0; i < nir_intrinsic_infos[instr->intrinsic].num_srcs; i++)
255 hash = hash_src(hash, &instr->src[i]);
261 hash_tex(uint32_t hash, const nir_tex_instr *instr)
263 hash = HASH(hash, instr->op);
264 hash = HASH(hash, instr->num_srcs);
266 for (unsigned i = 0; i < instr->num_srcs; i++) {
267 hash = HASH(hash, instr->src[i].src_type);
268 hash = hash_src(hash, &instr->src[i].src);
271 hash = HASH(hash, instr->coord_components);
272 hash = HASH(hash, instr->sampler_dim);
273 hash = HASH(hash, instr->is_array);
274 hash = HASH(hash, instr->is_shadow);
275 hash = HASH(hash, instr->is_new_style_shadow);
276 hash = HASH(hash, instr->is_sparse);
277 unsigned component = instr->component;
281 hash = HASH(hash, instr->tg4_offsets[i][j]);
282 hash = HASH(hash, instr->texture_index);
283 hash = HASH(hash, instr->sampler_index);
284 hash = HASH(hash, instr->texture_non_uniform);
285 hash = HASH(hash, instr->sampler_non_uniform);
299 const nir_instr *instr = data;
302 switch (instr->type) {
304 hash = hash_alu(hash, nir_instr_as_alu(instr));
307 hash = hash_deref(hash, nir_instr_as_deref(instr));
310 hash = hash_load_const(hash, nir_instr_as_load_const(instr));
313 hash = hash_phi(hash, nir_instr_as_phi(instr));
316 hash = hash_intrinsic(hash, nir_instr_as_intrinsic(instr));
319 hash = hash_tex(hash, nir_instr_as_tex(instr));
566 /* We explicitly don't compare instr->exact. */
707 if (phi1->instr.block != phi2->instr.block)
765 nir_instr_get_dest_ssa_def(nir_instr *instr)
767 switch (instr->type) {
769 assert(nir_instr_as_alu(instr)->dest.dest.is_ssa);
770 return &nir_instr_as_alu(instr)->dest.dest.ssa;
772 assert(nir_instr_as_deref(instr)->dest.is_ssa);
773 return &nir_instr_as_deref(instr)->dest.ssa;
775 return &nir_instr_as_load_const(instr)->def;
777 assert(nir_instr_as_phi(instr)->dest.is_ssa);
778 return &nir_instr_as_phi(instr)->dest.ssa;
780 assert(nir_instr_as_intrinsic(instr)->dest.is_ssa);
781 return &nir_instr_as_intrinsic(instr)->dest.ssa;
783 assert(nir_instr_as_tex(instr)->dest.is_ssa);
784 return &nir_instr_as_tex(instr)->dest.ssa;
809 nir_instr_set_add_or_rewrite(struct set *instr_set, nir_instr *instr,
813 if (!instr_can_rewrite(instr))
816 struct set_entry *e = _mesa_set_search_or_add(instr_set, instr, NULL);
818 if (match == instr)
821 if (!cond_function || cond_function(match, instr)) {
823 nir_ssa_def *def = nir_instr_get_dest_ssa_def(instr);
831 if (instr->type == nir_instr_type_alu && nir_instr_as_alu(instr)->exact)
836 nir_instr_remove(instr);
841 e->key = instr;
847 nir_instr_set_remove(struct set *instr_set, nir_instr *instr)
849 if (!instr_can_rewrite(instr))
852 struct set_entry *entry = _mesa_set_search(instr_set, instr);