/third_party/mesa3d/src/compiler/nir/ |
H A D | nir_lower_subgroups.c | 33 lower_subgroups_64bit_split_intrinsic(nir_builder *b, nir_intrinsic_instr *intrin, in lower_subgroups_64bit_split_intrinsic() argument 38 comp = nir_unpack_64_2x32_split_x(b, intrin->src[0].ssa); in lower_subgroups_64bit_split_intrinsic() 40 comp = nir_unpack_64_2x32_split_y(b, intrin->src[0].ssa); in lower_subgroups_64bit_split_intrinsic() 42 nir_intrinsic_instr *intr = nir_intrinsic_instr_create(b->shader, intrin->intrinsic); in lower_subgroups_64bit_split_intrinsic() 44 intr->const_index[0] = intrin->const_index[0]; in lower_subgroups_64bit_split_intrinsic() 45 intr->const_index[1] = intrin->const_index[1]; in lower_subgroups_64bit_split_intrinsic() 47 if (nir_intrinsic_infos[intrin->intrinsic].num_srcs == 2) in lower_subgroups_64bit_split_intrinsic() 48 nir_src_copy(&intr->src[1], &intrin->src[1]); in lower_subgroups_64bit_split_intrinsic() 56 lower_subgroup_op_to_32bit(nir_builder *b, nir_intrinsic_instr *intrin) in lower_subgroup_op_to_32bit() argument 58 assert(intrin in lower_subgroup_op_to_32bit() 107 lower_subgroup_op_to_scalar(nir_builder *b, nir_intrinsic_instr *intrin, bool lower_to_32bit) lower_subgroup_op_to_scalar() argument 147 lower_vote_eq_to_scalar(nir_builder *b, nir_intrinsic_instr *intrin) lower_vote_eq_to_scalar() argument 173 lower_vote_eq(nir_builder *b, nir_intrinsic_instr *intrin) lower_vote_eq() argument 201 lower_shuffle_to_swizzle(nir_builder *b, nir_intrinsic_instr *intrin, const nir_lower_subgroups_options *options) lower_shuffle_to_swizzle() argument 231 lower_to_shuffle(nir_builder *b, nir_intrinsic_instr *intrin, const nir_lower_subgroups_options *options) lower_to_shuffle() argument 319 lower_shuffle(nir_builder *b, nir_intrinsic_instr *intrin) lower_shuffle() argument 557 lower_dynamic_quad_broadcast(nir_builder *b, nir_intrinsic_instr *intrin, const nir_lower_subgroups_options *options) lower_dynamic_quad_broadcast() argument 597 lower_read_invocation_to_cond(nir_builder *b, nir_intrinsic_instr *intrin) lower_read_invocation_to_cond() argument 610 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); lower_subgroups_instr() local [all...] |
H A D | nir_opt_uniform_atomics.c | 47 #define OP_NOIMG(intrin, alu) \ in parse_atomic_op() 48 case nir_intrinsic_ssbo_atomic_##intrin: \ in parse_atomic_op() 53 case nir_intrinsic_shared_atomic_##intrin: \ in parse_atomic_op() 54 case nir_intrinsic_global_atomic_##intrin: \ in parse_atomic_op() 55 case nir_intrinsic_deref_atomic_##intrin: \ in parse_atomic_op() 60 case nir_intrinsic_global_atomic_##intrin##_amd: \ in parse_atomic_op() 65 #define OP(intrin, alu) \ in parse_atomic_op() 66 OP_NOIMG(intrin, alu) \ in parse_atomic_op() 67 case nir_intrinsic_image_deref_atomic_##intrin: \ in parse_atomic_op() 68 case nir_intrinsic_image_atomic_##intrin in parse_atomic_op() 99 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(scalar.def->parent_instr); get_dim() local 150 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(scalar.def->parent_instr); match_invocation_comparison() local 209 optimize_atomic(nir_builder *b, nir_intrinsic_instr *intrin, bool return_prev) optimize_atomic() argument 252 optimize_and_rewrite_atomic(nir_builder *b, nir_intrinsic_instr *intrin) optimize_and_rewrite_atomic() argument 297 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); opt_uniform_atomics() local [all...] |
H A D | nir_lower_array_deref_of_vec.c | 76 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in nir_lower_array_deref_of_vec_impl() local 77 assert(intrin->intrinsic != nir_intrinsic_copy_deref); in nir_lower_array_deref_of_vec_impl() 79 if (intrin->intrinsic != nir_intrinsic_load_deref && in nir_lower_array_deref_of_vec_impl() 80 intrin->intrinsic != nir_intrinsic_interp_deref_at_centroid && in nir_lower_array_deref_of_vec_impl() 81 intrin->intrinsic != nir_intrinsic_interp_deref_at_sample && in nir_lower_array_deref_of_vec_impl() 82 intrin->intrinsic != nir_intrinsic_interp_deref_at_offset && in nir_lower_array_deref_of_vec_impl() 83 intrin->intrinsic != nir_intrinsic_interp_deref_at_vertex && in nir_lower_array_deref_of_vec_impl() 84 intrin->intrinsic != nir_intrinsic_store_deref) in nir_lower_array_deref_of_vec_impl() 87 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]); in nir_lower_array_deref_of_vec_impl() 103 assert(intrin in nir_lower_array_deref_of_vec_impl() [all...] |
H A D | nir_lower_vec3_to_vec4.c | 63 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in lower_vec3_to_vec4_impl() local 64 switch (intrin->intrinsic) { in lower_vec3_to_vec4_impl() 66 if (intrin->num_components != 3) in lower_vec3_to_vec4_impl() 69 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]); in lower_vec3_to_vec4_impl() 73 assert(intrin->dest.is_ssa); in lower_vec3_to_vec4_impl() 74 intrin->num_components = 4; in lower_vec3_to_vec4_impl() 75 intrin->dest.ssa.num_components = 4; in lower_vec3_to_vec4_impl() 77 b.cursor = nir_after_instr(&intrin->instr); in lower_vec3_to_vec4_impl() 78 nir_ssa_def *vec3 = nir_channels(&b, &intrin->dest.ssa, 0x7); in lower_vec3_to_vec4_impl() 79 nir_ssa_def_rewrite_uses_after(&intrin in lower_vec3_to_vec4_impl() [all...] |
H A D | nir_lower_scratch.c | 39 nir_intrinsic_instr *intrin, in lower_load_store() 42 b->cursor = nir_before_instr(&intrin->instr); in lower_load_store() 44 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]); in lower_load_store() 54 if (intrin->intrinsic == nir_intrinsic_load_deref) { in lower_load_store() 55 unsigned bit_size = intrin->dest.ssa.bit_size; in lower_load_store() 57 b, intrin->num_components, bit_size == 1 ? 32 : bit_size, offset, .align_mul=align); in lower_load_store() 61 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, value); in lower_load_store() 63 assert(intrin->intrinsic == nir_intrinsic_store_deref); in lower_load_store() 65 assert(intrin->src[1].is_ssa); in lower_load_store() 66 nir_ssa_def *value = intrin in lower_load_store() 38 lower_load_store(nir_builder *b, nir_intrinsic_instr *intrin, glsl_type_size_align_func size_align) lower_load_store() argument 89 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(src->parent_instr); only_used_for_load_store() local 115 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); nir_lower_vars_to_scratch() local 199 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); nir_lower_vars_to_scratch() local [all...] |
H A D | nir_opt_offsets.c | 106 nir_intrinsic_instr *intrin, in try_fold_load_store() 116 unsigned off_const = nir_intrinsic_base(intrin); in try_fold_load_store() 117 nir_src *off_src = &intrin->src[offset_src_idx]; in try_fold_load_store() 130 b->cursor = nir_before_instr(&intrin->instr); in try_fold_load_store() 134 b->cursor = nir_before_instr(&intrin->instr); in try_fold_load_store() 141 nir_instr_rewrite_src(&intrin->instr, &intrin->src[offset_src_idx], nir_src_for_ssa(replace_src)); in try_fold_load_store() 142 nir_intrinsic_set_base(intrin, off_const); in try_fold_load_store() 148 nir_intrinsic_instr *intrin, in try_fold_shared2() 152 unsigned comp_size = (intrin in try_fold_shared2() 105 try_fold_load_store(nir_builder *b, nir_intrinsic_instr *intrin, opt_offsets_state *state, unsigned offset_src_idx, uint32_t max) try_fold_load_store() argument 147 try_fold_shared2(nir_builder *b, nir_intrinsic_instr *intrin, opt_offsets_state *state, unsigned offset_src_idx) try_fold_shared2() argument 186 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); process_instr() local [all...] |
H A D | nir_opt_dead_write_vars.c | 50 nir_intrinsic_instr *intrin; member 75 nir_intrinsic_instr *intrin, in update_unused_writes() 92 nir_instr_remove(&entry->intrin->instr); in update_unused_writes() 101 .intrin = intrin, in update_unused_writes() 133 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in remove_dead_write_vars_local() local 134 switch (intrin->intrinsic) { in remove_dead_write_vars_local() 159 if (nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_RELEASE) { in remove_dead_write_vars_local() 161 nir_intrinsic_memory_modes(intrin)); in remove_dead_write_vars_local() 175 nir_deref_instr *src = nir_src_as_deref(intrin in remove_dead_write_vars_local() 74 update_unused_writes(struct util_dynarray *unused_writes, nir_intrinsic_instr *intrin, nir_deref_instr *dst, nir_component_mask_t mask) update_unused_writes() argument [all...] |
H A D | nir_lower_image.c | 35 lower_cube_size(nir_builder *b, nir_intrinsic_instr *intrin) in lower_cube_size() argument 37 assert(nir_intrinsic_image_dim(intrin) == GLSL_SAMPLER_DIM_CUBE); in lower_cube_size() 39 b->cursor = nir_before_instr(&intrin->instr); in lower_cube_size() 42 nir_instr_as_intrinsic(nir_instr_clone(b->shader, &intrin->instr)); in lower_cube_size() 49 unsigned coord_comps = intrin->dest.ssa.num_components; in lower_cube_size() 58 nir_ssa_def *vec = nir_vec_scalars(b, comps, intrin->dest.ssa.num_components); in lower_cube_size() 59 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, vec); in lower_cube_size() 60 nir_instr_remove(&intrin->instr); in lower_cube_size() 61 nir_instr_free(&intrin->instr); in lower_cube_size() 71 nir_intrinsic_instr *intrin in lower_image_instr() local [all...] |
H A D | nir_opt_constant_folding.c | 182 try_fold_intrinsic(nir_builder *b, nir_intrinsic_instr *intrin, in try_fold_intrinsic() argument 185 switch (intrin->intrinsic) { in try_fold_intrinsic() 189 if (nir_src_is_const(intrin->src[0])) { in try_fold_intrinsic() 190 if (nir_src_as_bool(intrin->src[0])) { in try_fold_intrinsic() 191 b->cursor = nir_before_instr(&intrin->instr); in try_fold_intrinsic() 193 switch (intrin->intrinsic) { in try_fold_intrinsic() 210 nir_instr_remove(&intrin->instr); in try_fold_intrinsic() 216 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]); in try_fold_intrinsic() 219 b->cursor = nir_before_instr(&intrin->instr); in try_fold_intrinsic() 220 nir_ssa_def *val = nir_build_imm(b, intrin in try_fold_intrinsic() [all...] |
H A D | nir_lower_io_to_vector.c | 457 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in nir_lower_io_to_vector_impl() local 459 switch (intrin->intrinsic) { in nir_lower_io_to_vector_impl() 465 nir_deref_instr *old_deref = nir_src_as_deref(intrin->src[0]); in nir_lower_io_to_vector_impl() 487 ((1 << intrin->num_components) - 1) << old_frac; in nir_lower_io_to_vector_impl() 489 b.cursor = nir_before_instr(&intrin->instr); in nir_lower_io_to_vector_impl() 503 nir_instr_rewrite_src(&intrin->instr, &intrin->src[0], in nir_lower_io_to_vector_impl() 506 intrin->num_components = in nir_lower_io_to_vector_impl() 508 intrin->dest.ssa.num_components = intrin in nir_lower_io_to_vector_impl() 621 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); nir_vectorize_tess_levels_impl() local [all...] |
H A D | nir_opt_ray_queries.c | 37 nir_intrinsic_instr *intrin) in mark_query_read() 39 nir_ssa_def *rq_def = intrin->src[0].ssa; in mark_query_read() 74 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in nir_find_ray_queries_read() local 75 switch (intrin->intrinsic) { in nir_find_ray_queries_read() 77 if (list_length(&intrin->dest.ssa.uses) > 0 || in nir_find_ray_queries_read() 78 list_length(&intrin->dest.ssa.if_uses) > 0) in nir_find_ray_queries_read() 79 mark_query_read(queries, intrin); in nir_find_ray_queries_read() 82 mark_query_read(queries, intrin); in nir_find_ray_queries_read() 100 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in nir_replace_unread_queries_instr() 101 switch (intrin in nir_replace_unread_queries_instr() 36 mark_query_read(struct set *queries, nir_intrinsic_instr *intrin) mark_query_read() argument [all...] |
H A D | nir_lower_task_shader.c | 52 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in lower_nv_task_output() local 54 switch (intrin->intrinsic) { in lower_nv_task_output() 59 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, load); in lower_nv_task_output() 66 nir_ssa_def *store_val = intrin->src[0].ssa; in lower_nv_task_output() 178 nir_intrinsic_instr *intrin, in lower_task_payload_to_shared() 184 unsigned base = nir_intrinsic_base(intrin); in lower_task_payload_to_shared() 185 intrin->intrinsic = shared_opcode_for_task_payload(intrin->intrinsic); in lower_task_payload_to_shared() 186 nir_intrinsic_set_base(intrin, base + s->payload_shared_addr); in lower_task_payload_to_shared() 231 nir_intrinsic_instr *intrin, in lower_task_launch_mesh_workgroups() 177 lower_task_payload_to_shared(nir_builder *b, nir_intrinsic_instr *intrin, lower_task_state *s) lower_task_payload_to_shared() argument 230 lower_task_launch_mesh_workgroups(nir_builder *b, nir_intrinsic_instr *intrin, lower_task_state *s) lower_task_launch_mesh_workgroups() argument 291 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); lower_task_intrin() local 332 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); uses_task_payload_atomics() local [all...] |
H A D | nir_lower_readonly_images_to_tex.c | 77 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in is_readonly_image_op() 78 if (intrin->intrinsic != nir_intrinsic_image_deref_load && in is_readonly_image_op() 79 intrin->intrinsic != nir_intrinsic_image_deref_size) in is_readonly_image_op() 82 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]); in is_readonly_image_op() 97 access = nir_intrinsic_access(intrin); in is_readonly_image_op() 108 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in lower_readonly_image_op() local 111 switch (intrin->intrinsic) { in lower_readonly_image_op() 124 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]); in lower_readonly_image_op() 146 switch (intrin->intrinsic) { in lower_readonly_image_op() 148 assert(intrin in lower_readonly_image_op() [all...] |
H A D | nir_lower_memory_model.c | 34 get_intrinsic_info(nir_intrinsic_instr *intrin, nir_variable_mode *modes, in get_intrinsic_info() argument 37 switch (intrin->intrinsic) { in get_intrinsic_info() 40 *modes = nir_src_as_deref(intrin->src[0])->modes; in get_intrinsic_info() 44 *modes = nir_src_as_deref(intrin->src[0])->modes; in get_intrinsic_info() 60 *modes = nir_src_as_deref(intrin->src[0])->modes; in get_intrinsic_info() 117 *modes = nir_src_as_deref(intrin->src[0])->modes; in get_intrinsic_info() 121 *modes = nir_src_as_deref(intrin->src[0])->modes; in get_intrinsic_info() 138 *modes = nir_src_as_deref(intrin->src[0])->modes; in get_intrinsic_info() 153 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in visit_instr() 155 if (intrin in visit_instr() [all...] |
H A D | nir_gs_count_vertices.c | 33 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in as_intrinsic() 34 if (intrin->intrinsic != op) in as_intrinsic() 37 return intrin; in as_intrinsic() 78 nir_intrinsic_instr *intrin = as_set_vertex_and_primitive_count(instr); in nir_gs_count_vertices_and_primitives() local 79 if (!intrin) in nir_gs_count_vertices_and_primitives() 82 unsigned stream = nir_intrinsic_stream_id(intrin); in nir_gs_count_vertices_and_primitives() 92 if (nir_src_is_const(intrin->src[0])) in nir_gs_count_vertices_and_primitives() 93 vtxcnt = nir_src_as_int(intrin->src[0]); in nir_gs_count_vertices_and_primitives() 94 if (nir_src_is_const(intrin->src[1])) in nir_gs_count_vertices_and_primitives() 95 prmcnt = nir_src_as_int(intrin in nir_gs_count_vertices_and_primitives() [all...] |
/third_party/mesa3d/src/amd/common/ |
H A D | ac_nir_lower_tess_io_to_mem.c | 171 nir_intrinsic_instr *intrin, in match_mask() 175 bool indirect = !nir_src_is_const(*nir_get_io_offset_src(intrin)); in match_mask() 179 uint64_t slot = nir_intrinsic_io_semantics(intrin).location; in match_mask() 181 intrin->intrinsic != nir_intrinsic_load_per_vertex_input && in match_mask() 182 intrin->intrinsic != nir_intrinsic_store_per_vertex_output) in match_mask() 189 tcs_output_needs_vmem(nir_intrinsic_instr *intrin, in tcs_output_needs_vmem() argument 192 uint64_t mask = intrin->intrinsic == nir_intrinsic_store_per_vertex_output in tcs_output_needs_vmem() 196 return match_mask(MESA_SHADER_TESS_CTRL, intrin, mask, true); in tcs_output_needs_vmem() 200 tcs_output_needs_lds(nir_intrinsic_instr *intrin, in tcs_output_needs_lds() argument 203 uint64_t mask = intrin in tcs_output_needs_lds() 170 match_mask(gl_shader_stage stage, nir_intrinsic_instr *intrin, uint64_t mask, bool match_indirect) match_mask() argument 279 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); filter_load_tcs_per_vertex_input() local 321 hs_output_lds_offset(nir_builder *b, lower_tess_io_state *st, nir_intrinsic_instr *intrin) hs_output_lds_offset() argument 365 hs_per_vertex_output_vmem_offset(nir_builder *b, lower_tess_io_state *st, nir_intrinsic_instr *intrin) hs_per_vertex_output_vmem_offset() argument 387 hs_per_patch_output_vmem_offset(nir_builder *b, lower_tess_io_state *st, nir_intrinsic_instr *intrin, unsigned const_base_offset) hs_per_patch_output_vmem_offset() argument 414 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); lower_hs_per_vertex_input_load() local 422 lower_hs_output_store(nir_builder *b, nir_intrinsic_instr *intrin, lower_tess_io_state *st) lower_hs_output_store() argument 469 lower_hs_output_load(nir_builder *b, nir_intrinsic_instr *intrin, lower_tess_io_state *st) lower_hs_output_load() argument 479 update_hs_scoped_barrier(nir_intrinsic_instr *intrin, lower_tess_io_state *st) update_hs_scoped_barrier() argument 504 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); lower_hs_output_access() local 634 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); lower_tes_input_load() local [all...] |
H A D | ac_nir_lower_global_access.c | 76 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in process_instr() 79 switch (intrin->intrinsic) { in process_instr() 134 nir_src *addr_src = &intrin->src[addr_src_idx]; in process_instr() 143 b->cursor = nir_before_instr(&intrin->instr); in process_instr() 152 new_intrin->num_components = intrin->num_components; in process_instr() 155 nir_ssa_dest_init(&new_intrin->instr, &new_intrin->dest, intrin->dest.ssa.num_components, in process_instr() 156 intrin->dest.ssa.bit_size, NULL); in process_instr() 158 unsigned num_src = nir_intrinsic_infos[intrin->intrinsic].num_srcs; in process_instr() 160 new_intrin->src[i] = nir_src_for_ssa(intrin->src[i].ssa); in process_instr() 164 if (nir_intrinsic_has_access(intrin)) in process_instr() [all...] |
/third_party/mesa3d/src/intel/compiler/ |
H A D | brw_nir_lower_mem_access_bit_sizes.c | 30 dup_mem_intrinsic(nir_builder *b, nir_intrinsic_instr *intrin, in dup_mem_intrinsic() argument 35 const nir_intrinsic_info *info = &nir_intrinsic_infos[intrin->intrinsic]; in dup_mem_intrinsic() 38 nir_intrinsic_instr_create(b->shader, intrin->intrinsic); in dup_mem_intrinsic() 40 nir_src *intrin_offset_src = nir_get_io_offset_src(intrin); in dup_mem_intrinsic() 42 assert(intrin->src[i].is_ssa); in dup_mem_intrinsic() 45 assert(&intrin->src[i] != intrin_offset_src); in dup_mem_intrinsic() 47 } else if (&intrin->src[i] == intrin_offset_src) { in dup_mem_intrinsic() 48 dup->src[i] = nir_src_for_ssa(nir_iadd_imm(b, intrin->src[i].ssa, in dup_mem_intrinsic() 51 dup->src[i] = nir_src_for_ssa(intrin->src[i].ssa); in dup_mem_intrinsic() 56 if (intrin in dup_mem_intrinsic() 82 lower_mem_load_bit_size(nir_builder *b, nir_intrinsic_instr *intrin, const struct intel_device_info *devinfo) lower_mem_load_bit_size() argument 156 lower_mem_store_bit_size(nir_builder *b, nir_intrinsic_instr *intrin, const struct intel_device_info *devinfo) lower_mem_store_bit_size() argument 254 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); lower_mem_access_bit_sizes_instr() local [all...] |
H A D | brw_nir_lower_storage_image.c | 366 nir_intrinsic_instr *intrin) in lower_image_load_instr() 368 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]); in lower_image_load_instr() 380 const unsigned dest_components = intrin->num_components; in lower_image_load_instr() 386 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, placeholder); in lower_image_load_instr() 388 intrin->num_components = isl_format_get_num_channels(lower_fmt); in lower_image_load_instr() 389 intrin->dest.ssa.num_components = intrin->num_components; in lower_image_load_instr() 391 b->cursor = nir_after_instr(&intrin->instr); in lower_image_load_instr() 394 &intrin->dest.ssa, in lower_image_load_instr() 408 const unsigned dest_components = intrin in lower_image_load_instr() 364 lower_image_load_instr(nir_builder *b, const struct intel_device_info *devinfo, nir_intrinsic_instr *intrin) lower_image_load_instr() argument 520 lower_image_store_instr(nir_builder *b, const struct intel_device_info *devinfo, nir_intrinsic_instr *intrin) lower_image_store_instr() argument 601 lower_image_atomic_instr(nir_builder *b, const struct intel_device_info *devinfo, nir_intrinsic_instr *intrin) lower_image_atomic_instr() argument 636 lower_image_size_instr(nir_builder *b, const struct intel_device_info *devinfo, nir_intrinsic_instr *intrin) lower_image_size_instr() argument 691 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); brw_nir_lower_storage_image_instr() local [all...] |
/third_party/mesa3d/src/amd/vulkan/ |
H A D | radv_nir_lower_abi.c | 64 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in lower_abi_instr() local 67 switch (intrin->intrinsic) { in lower_abi_instr() 114 return ac_nir_load_arg(b, &s->args->ac, s->args->ac.gs_vtx_offset[nir_intrinsic_base(intrin)]); in lower_abi_instr() 222 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in filter_abi_instr() 223 return (intrin->intrinsic == nir_intrinsic_load_ring_tess_factors_amd && !s->use_llvm) || in filter_abi_instr() 224 (intrin->intrinsic == nir_intrinsic_load_ring_tess_offchip_amd && !s->use_llvm) || in filter_abi_instr() 225 (intrin->intrinsic == nir_intrinsic_load_ring_esgs_amd && !s->use_llvm) || in filter_abi_instr() 226 intrin->intrinsic == nir_intrinsic_load_ring_tess_factors_offset_amd || in filter_abi_instr() 227 intrin->intrinsic == nir_intrinsic_load_ring_tess_offchip_offset_amd || in filter_abi_instr() 228 intrin in filter_abi_instr() [all...] |
H A D | radv_nir_apply_pipeline_layout.c | 70 visit_vulkan_resource_index(nir_builder *b, apply_layout_state *state, nir_intrinsic_instr *intrin) in visit_vulkan_resource_index() argument 72 unsigned desc_set = nir_intrinsic_desc_set(intrin); in visit_vulkan_resource_index() 73 unsigned binding = nir_intrinsic_binding(intrin); in visit_vulkan_resource_index() 91 nir_ssa_def *binding_ptr = nir_imul_imm(b, intrin->src[0].ssa, stride); in visit_vulkan_resource_index() 99 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, nir_pack_64_2x32_split(b, set_ptr, binding_ptr)); in visit_vulkan_resource_index() 101 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, in visit_vulkan_resource_index() 104 nir_instr_remove(&intrin->instr); in visit_vulkan_resource_index() 109 nir_intrinsic_instr *intrin) in visit_vulkan_resource_reindex() 111 VkDescriptorType desc_type = nir_intrinsic_desc_type(intrin); in visit_vulkan_resource_reindex() 113 nir_ssa_def *set_ptr = nir_unpack_64_2x32_split_x(b, intrin in visit_vulkan_resource_reindex() 108 visit_vulkan_resource_reindex(nir_builder *b, apply_layout_state *state, nir_intrinsic_instr *intrin) visit_vulkan_resource_reindex() argument 141 visit_load_vulkan_descriptor(nir_builder *b, apply_layout_state *state, nir_intrinsic_instr *intrin) visit_load_vulkan_descriptor() argument 205 visit_get_ssbo_size(nir_builder *b, apply_layout_state *state, nir_intrinsic_instr *intrin) visit_get_ssbo_size() argument 359 update_image_intrinsic(nir_builder *b, apply_layout_state *state, nir_intrinsic_instr *intrin) update_image_intrinsic() argument 373 apply_layout_to_intrin(nir_builder *b, apply_layout_state *state, nir_intrinsic_instr *intrin) apply_layout_to_intrin() argument [all...] |
/third_party/mesa3d/src/intel/vulkan/ |
H A D | anv_nir_apply_pipeline_layout.c | 129 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in get_used_bindings() local 130 switch (intrin->intrinsic) { in get_used_bindings() 132 add_binding(state, nir_intrinsic_desc_set(intrin), in get_used_bindings() 133 nir_intrinsic_binding(intrin)); in get_used_bindings() 154 add_deref_src_binding(state, intrin->src[0]); in get_used_bindings() 183 nir_intrinsic_instr *intrin = nir_src_as_intrinsic(src); in find_descriptor_for_index_src() local 185 while (intrin && intrin->intrinsic == nir_intrinsic_vulkan_resource_reindex) in find_descriptor_for_index_src() 186 intrin = nir_src_as_intrinsic(intrin in find_descriptor_for_index_src() 195 descriptor_has_bti(nir_intrinsic_instr *intrin, struct apply_pipeline_layout_state *state) descriptor_has_bti() argument 216 descriptor_address_format(nir_intrinsic_instr *intrin, struct apply_pipeline_layout_state *state) descriptor_address_format() argument 240 nir_intrinsic_instr *intrin = nir_src_as_intrinsic(deref->parent); nir_deref_find_descriptor() local 618 build_res_index_for_chain(nir_builder *b, nir_intrinsic_instr *intrin, nir_address_format addr_format, uint32_t *set, uint32_t *binding, struct apply_pipeline_layout_state *state) build_res_index_for_chain() argument 698 try_lower_direct_buffer_intrinsic(nir_builder *b, nir_intrinsic_instr *intrin, bool is_atomic, struct apply_pipeline_layout_state *state) try_lower_direct_buffer_intrinsic() argument 862 lower_res_index_intrinsic(nir_builder *b, nir_intrinsic_instr *intrin, struct apply_pipeline_layout_state *state) lower_res_index_intrinsic() argument 887 lower_res_reindex_intrinsic(nir_builder *b, nir_intrinsic_instr *intrin, struct apply_pipeline_layout_state *state) lower_res_reindex_intrinsic() argument 911 lower_load_vulkan_descriptor(nir_builder *b, nir_intrinsic_instr *intrin, struct apply_pipeline_layout_state *state) lower_load_vulkan_descriptor() argument 959 lower_get_ssbo_size(nir_builder *b, nir_intrinsic_instr *intrin, struct apply_pipeline_layout_state *state) lower_get_ssbo_size() argument 1007 lower_image_intrinsic(nir_builder *b, nir_intrinsic_instr *intrin, struct apply_pipeline_layout_state *state) lower_image_intrinsic() argument 1062 lower_load_constant(nir_builder *b, nir_intrinsic_instr *intrin, struct apply_pipeline_layout_state *state) lower_load_constant() argument 1328 lower_ray_query_globals(nir_builder *b, nir_intrinsic_instr *intrin, struct apply_pipeline_layout_state *state) lower_ray_query_globals() argument 1349 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); apply_pipeline_layout() local [all...] |
/third_party/mesa3d/src/compiler/spirv/ |
H A D | vtn_amd.c | 82 nir_intrinsic_instr *intrin = nir_intrinsic_instr_create(b->nb.shader, op); in vtn_handle_amd_shader_ballot_instruction() local 83 nir_ssa_dest_init_for_type(&intrin->instr, &intrin->dest, dest_type, NULL); in vtn_handle_amd_shader_ballot_instruction() 85 intrin->num_components = intrin->dest.ssa.num_components; in vtn_handle_amd_shader_ballot_instruction() 88 intrin->src[i] = nir_src_for_ssa(vtn_get_nir_ssa(b, w[i + 5])); in vtn_handle_amd_shader_ballot_instruction() 90 if (intrin->intrinsic == nir_intrinsic_quad_swizzle_amd) { in vtn_handle_amd_shader_ballot_instruction() 96 nir_intrinsic_set_swizzle_mask(intrin, mask); in vtn_handle_amd_shader_ballot_instruction() 98 } else if (intrin->intrinsic == nir_intrinsic_masked_swizzle_amd) { in vtn_handle_amd_shader_ballot_instruction() 103 nir_intrinsic_set_swizzle_mask(intrin, mas in vtn_handle_amd_shader_ballot_instruction() 192 nir_intrinsic_instr *intrin = nir_intrinsic_instr_create(b->nb.shader, op); vtn_handle_amd_shader_explicit_vertex_parameter_instruction() local [all...] |
H A D | vtn_subgroup.c | 53 nir_intrinsic_instr *intrin = in vtn_build_subgroup_instr() local 55 nir_ssa_dest_init_for_type(&intrin->instr, &intrin->dest, in vtn_build_subgroup_instr() 57 intrin->num_components = intrin->dest.ssa.num_components; in vtn_build_subgroup_instr() 59 intrin->src[0] = nir_src_for_ssa(src0->def); in vtn_build_subgroup_instr() 61 intrin->src[1] = nir_src_for_ssa(index); in vtn_build_subgroup_instr() 63 intrin->const_index[0] = const_idx0; in vtn_build_subgroup_instr() 64 intrin->const_index[1] = const_idx1; in vtn_build_subgroup_instr() 66 nir_builder_instr_insert(&b->nb, &intrin in vtn_build_subgroup_instr() 112 nir_intrinsic_instr *intrin = vtn_handle_subgroup() local 167 nir_intrinsic_instr *intrin = vtn_handle_subgroup() local 262 nir_intrinsic_instr *intrin = vtn_handle_subgroup() local [all...] |
/third_party/mesa3d/src/gallium/frontends/lavapipe/ |
H A D | lvp_lower_vulkan_resource.c | 33 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in lower_vulkan_resource_index() local 34 switch (intrin->intrinsic) { in lower_vulkan_resource_index() 72 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in lower_uniform_block_access() 73 if (intrin->intrinsic != nir_intrinsic_load_deref) in lower_uniform_block_access() 75 nir_deref_instr *deref = nir_instr_as_deref(intrin->src[0].ssa->parent_instr); in lower_uniform_block_access() 82 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in lower_block_instr() local 83 nir_binding nb = nir_chase_binding(intrin->src[0]); in lower_block_instr() 92 assert(intrin->src[0].ssa->num_components == 2); in lower_block_instr() 102 nir_ssa_def *added = nir_iadd(b, intrin->src[0].ssa, offset); in lower_block_instr() 103 nir_deref_instr *deref = nir_instr_as_deref(intrin in lower_block_instr() 112 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); lower_vri_intrin_vri() local 154 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); lower_vri_intrin_vrri() local 164 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); lower_vri_intrin_lvd() local 257 lower_vri_intrin_image(struct nir_builder *b, nir_intrinsic_instr *intrin, void *data_cb) lower_vri_intrin_image() argument 298 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); lower_vri_instr() local [all...] |