Lines Matching defs:intrin

392 lower_load(nir_intrinsic_instr *intrin, struct lower_io_state *state,
396 assert(intrin->dest.is_ssa);
397 if (intrin->dest.ssa.bit_size == 64 &&
406 while (dest_comp < intrin->dest.ssa.num_components) {
408 MIN2(intrin->dest.ssa.num_components - dest_comp,
425 return nir_vec(b, comp64, intrin->dest.ssa.num_components);
426 } else if (intrin->dest.ssa.bit_size == 1) {
431 intrin->dest.ssa.num_components, 32,
435 intrin->dest.ssa.num_components,
436 intrin->dest.ssa.bit_size,
504 lower_store(nir_intrinsic_instr *intrin, struct lower_io_state *state,
508 assert(intrin->src[1].is_ssa);
509 if (intrin->src[1].ssa->bit_size == 64 &&
517 nir_component_mask_t write_mask = nir_intrinsic_write_mask(intrin);
518 while (src_comp < intrin->num_components) {
520 MIN2(intrin->num_components - src_comp,
525 nir_channels(b, intrin->src[1].ssa,
546 } else if (intrin->dest.ssa.bit_size == 1) {
549 nir_ssa_def *b32_val = nir_b2b32(&state->builder, intrin->src[1].ssa);
551 component, intrin->num_components,
552 nir_intrinsic_write_mask(intrin),
555 emit_store(state, intrin->src[1].ssa, array_index, var, offset,
556 component, intrin->num_components,
557 nir_intrinsic_write_mask(intrin),
563 lower_interpolate_at(nir_intrinsic_instr *intrin, struct lower_io_state *state,
578 assert(intrin->intrinsic == nir_intrinsic_interp_deref_at_vertex);
579 vertex_index = intrin->src[1].ssa;
582 return lower_load(intrin, state, vertex_index, var, offset, component, type);
586 assert(intrin->dest.is_ssa && intrin->dest.ssa.bit_size <= 32);
589 switch (intrin->intrinsic) {
611 if (intrin->intrinsic == nir_intrinsic_interp_deref_at_sample ||
612 intrin->intrinsic == nir_intrinsic_interp_deref_at_offset ||
613 intrin->intrinsic == nir_intrinsic_interp_deref_at_vertex)
614 nir_src_copy(&bary_setup->src[0], &intrin->src[1]);
625 assert(intrin->dest.is_ssa);
628 intrin->dest.ssa.num_components,
629 intrin->dest.ssa.bit_size,
651 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
653 switch (intrin->intrinsic) {
672 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
707 if (intrin->intrinsic != nir_intrinsic_store_deref) {
709 nir_imm_zero(b, intrin->dest.ssa.num_components,
710 intrin->dest.ssa.bit_size);
711 nir_ssa_def_rewrite_uses(&intrin->dest.ssa,
715 nir_instr_remove(&intrin->instr);
726 switch (intrin->intrinsic) {
728 replacement = lower_load(intrin, state, array_index, var, offset,
733 lower_store(intrin, state, array_index, var, offset,
742 replacement = lower_interpolate_at(intrin, state, var, offset,
751 nir_ssa_def_rewrite_uses(&intrin->dest.ssa,
754 nir_instr_remove(&intrin->instr);
1330 build_explicit_io_load(nir_builder *b, nir_intrinsic_instr *intrin,
1336 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
1341 return build_explicit_io_load(b, intrin, addr, addr_format,
1349 build_explicit_io_load(b, intrin, addr, addr_format,
1355 build_explicit_io_load(b, intrin, addr, addr_format,
1366 build_explicit_io_load(b, intrin, addr, addr_format,
1373 build_explicit_io_load(b, intrin, addr, addr_format,
1386 switch (intrin->intrinsic) {
1495 nir_intrinsic_set_access(load, nir_intrinsic_access(intrin));
1509 unsigned bit_size = intrin->dest.ssa.bit_size;
1525 assert(intrin->dest.is_ssa);
1558 if (intrin->dest.ssa.bit_size == 1) {
1575 build_explicit_io_store(nir_builder *b, nir_intrinsic_instr *intrin,
1585 build_explicit_io_store(b, intrin, addr, addr_format,
1592 build_explicit_io_store(b, intrin, addr, addr_format,
1597 build_explicit_io_store(b, intrin, addr, addr_format,
1606 build_explicit_io_store(b, intrin, addr, addr_format,
1612 build_explicit_io_store(b, intrin, addr, addr_format,
1625 switch (intrin->intrinsic) {
1718 nir_intrinsic_set_access(store, nir_intrinsic_access(intrin));
1723 value->num_components == intrin->num_components);
1742 build_explicit_io_atomic(nir_builder *b, nir_intrinsic_instr *intrin,
1750 return build_explicit_io_atomic(b, intrin, addr, addr_format,
1756 build_explicit_io_atomic(b, intrin, addr, addr_format,
1760 build_explicit_io_atomic(b, intrin, addr, addr_format,
1769 build_explicit_io_atomic(b, intrin, addr, addr_format,
1774 build_explicit_io_atomic(b, intrin, addr, addr_format,
1785 nir_intrinsic_infos[intrin->intrinsic].num_srcs - 1;
1791 op = global_atomic_for_deref(addr_format, intrin->intrinsic);
1793 op = ssbo_atomic_for_deref(intrin->intrinsic);
1797 op = global_atomic_for_deref(addr_format, intrin->intrinsic);
1801 op = shared_atomic_for_deref(intrin->intrinsic);
1805 op = task_payload_atomic_for_deref(intrin->intrinsic);
1824 atomic->src[src++] = nir_src_for_ssa(intrin->src[1 + i].ssa);
1831 nir_intrinsic_set_access(atomic, nir_intrinsic_access(intrin));
1833 assert(intrin->dest.ssa.num_components == 1);
1835 1, intrin->dest.ssa.bit_size, NULL);
1913 nir_intrinsic_instr *intrin,
1917 b->cursor = nir_after_instr(&intrin->instr);
1919 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
1932 switch (intrin->intrinsic) {
1937 for (unsigned i = 0; i < intrin->num_components; i++) {
1942 comps[i] = build_explicit_io_load(b, intrin, comp_addr,
1949 value = nir_vec(b, comps, intrin->num_components);
1951 value = build_explicit_io_load(b, intrin, addr, addr_format,
1953 intrin->num_components);
1955 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, value);
1960 assert(intrin->src[1].is_ssa);
1961 nir_ssa_def *value = intrin->src[1].ssa;
1962 nir_component_mask_t write_mask = nir_intrinsic_write_mask(intrin);
1964 for (unsigned i = 0; i < intrin->num_components; i++) {
1972 build_explicit_io_store(b, intrin, comp_addr, addr_format,
1978 build_explicit_io_store(b, intrin, addr, addr_format,
1986 nir_ssa_def *value = build_explicit_io_load(b, intrin, addr, addr_format,
1989 intrin->num_components);
1990 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, value);
1995 assert(intrin->src[1].is_ssa);
1996 nir_ssa_def *value = intrin->src[1].ssa;
1998 build_explicit_io_store(b, intrin, addr, addr_format,
2006 build_explicit_io_atomic(b, intrin, addr, addr_format, deref->modes);
2007 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, value);
2012 nir_instr_remove(&intrin->instr);
2145 lower_explicit_io_access(nir_builder *b, nir_intrinsic_instr *intrin,
2148 assert(intrin->src[0].is_ssa);
2149 nir_lower_explicit_io_instr(b, intrin, intrin->src[0].ssa, addr_format);
2153 lower_explicit_io_array_length(nir_builder *b, nir_intrinsic_instr *intrin,
2156 b->cursor = nir_after_instr(&intrin->instr);
2158 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
2169 unsigned access = nir_intrinsic_access(intrin);
2175 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, arr_size);
2176 nir_instr_remove(&intrin->instr);
2180 lower_explicit_io_mode_check(nir_builder *b, nir_intrinsic_instr *intrin,
2189 intrin->intrinsic = nir_intrinsic_addr_mode_is;
2193 assert(intrin->src[0].is_ssa);
2194 nir_ssa_def *addr = intrin->src[0].ssa;
2196 b->cursor = nir_instr_remove(&intrin->instr);
2200 nir_intrinsic_memory_modes(intrin));
2202 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, is_mode);
2231 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
2232 switch (intrin->intrinsic) {
2251 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
2253 lower_explicit_io_access(&b, intrin, addr_format);
2260 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
2262 lower_explicit_io_array_length(&b, intrin, addr_format);
2269 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
2271 lower_explicit_io_mode_check(&b, intrin, addr_format);
2835 is_input(nir_intrinsic_instr *intrin)
2837 return intrin->intrinsic == nir_intrinsic_load_input ||
2838 intrin->intrinsic == nir_intrinsic_load_per_vertex_input ||
2839 intrin->intrinsic == nir_intrinsic_load_interpolated_input ||
2840 intrin->intrinsic == nir_intrinsic_load_fs_input_interp_deltas;
2844 is_output(nir_intrinsic_instr *intrin)
2846 return intrin->intrinsic == nir_intrinsic_load_output ||
2847 intrin->intrinsic == nir_intrinsic_load_per_vertex_output ||
2848 intrin->intrinsic == nir_intrinsic_load_per_primitive_output ||
2849 intrin->intrinsic == nir_intrinsic_store_output ||
2850 intrin->intrinsic == nir_intrinsic_store_per_vertex_output ||
2851 intrin->intrinsic == nir_intrinsic_store_per_primitive_output;
2854 static bool is_dual_slot(nir_intrinsic_instr *intrin)
2856 if (intrin->intrinsic == nir_intrinsic_store_output ||
2857 intrin->intrinsic == nir_intrinsic_store_per_vertex_output ||
2858 intrin->intrinsic == nir_intrinsic_store_per_primitive_output) {
2859 return nir_src_bit_size(intrin->src[0]) == 64 &&
2860 nir_src_num_components(intrin->src[0]) >= 3;
2863 return nir_dest_bit_size(intrin->dest) == 64 &&
2864 nir_dest_num_components(intrin->dest) >= 3;
2885 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
2887 if (((modes & nir_var_shader_in) && is_input(intrin)) ||
2888 ((modes & nir_var_shader_out) && is_output(intrin))) {
2889 nir_io_semantics sem = nir_intrinsic_io_semantics(intrin);
2898 nir_src *offset = nir_get_io_offset_src(intrin);
2902 !nir_intrinsic_io_semantics(intrin).per_view) {
2905 nir_intrinsic_set_base(intrin, nir_intrinsic_base(intrin) + off);
2909 sem.num_slots = is_dual_slot(intrin) ? 2 : 1;
2910 nir_intrinsic_set_io_semantics(intrin, sem);
2912 b->cursor = nir_before_instr(&intrin->instr);
2913 nir_instr_rewrite_src(&intrin->instr, offset,
2961 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
2963 if (intrin->intrinsic != nir_intrinsic_load_deref)
2966 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
2988 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, def);