Lines Matching refs:intrin
106 nir_intrinsic_instr *intrin,
116 unsigned off_const = nir_intrinsic_base(intrin);
117 nir_src *off_src = &intrin->src[offset_src_idx];
130 b->cursor = nir_before_instr(&intrin->instr);
134 b->cursor = nir_before_instr(&intrin->instr);
141 nir_instr_rewrite_src(&intrin->instr, &intrin->src[offset_src_idx], nir_src_for_ssa(replace_src));
142 nir_intrinsic_set_base(intrin, off_const);
148 nir_intrinsic_instr *intrin,
152 unsigned comp_size = (intrin->intrinsic == nir_intrinsic_load_shared2_amd ?
153 intrin->dest.ssa.bit_size : intrin->src[0].ssa->bit_size) / 8;
154 unsigned stride = (nir_intrinsic_st64(intrin) ? 64 : 1) * comp_size;
155 unsigned offset0 = nir_intrinsic_offset0(intrin) * stride;
156 unsigned offset1 = nir_intrinsic_offset1(intrin) * stride;
157 nir_src *off_src = &intrin->src[offset_src_idx];
170 b->cursor = nir_before_instr(&intrin->instr);
171 nir_instr_rewrite_src(&intrin->instr, off_src, nir_src_for_ssa(nir_imm_zero(b, 1, 32)));
172 nir_intrinsic_set_offset0(intrin, offset0 / stride);
173 nir_intrinsic_set_offset1(intrin, offset1 / stride);
174 nir_intrinsic_set_st64(intrin, st64);
186 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
188 switch (intrin->intrinsic) {
190 return try_fold_load_store(b, intrin, state, 0, state->options->uniform_max);
192 return try_fold_load_store(b, intrin, state, 1, state->options->ubo_vec4_max);
195 return try_fold_load_store(b, intrin, state, 0, state->options->shared_max);
198 return try_fold_load_store(b, intrin, state, 1, state->options->shared_max);
200 return try_fold_shared2(b, intrin, state, 0);
202 return try_fold_shared2(b, intrin, state, 1);
204 return try_fold_load_store(b, intrin, state, 1, state->options->buffer_max);
206 return try_fold_load_store(b, intrin, state, 2, state->options->buffer_max);