Lines Matching defs:intr
126 nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
127 if (intr->intrinsic != nir_intrinsic_load_deref)
129 nir_variable *var = nir_deref_instr_get_variable(nir_instr_as_deref(intr->src[0].ssa->parent_instr));
163 nir_ssa_def_rewrite_uses_after(&intr->dest.ssa, new_vec,
167 nir_instr *parent = intr->src[0].ssa->parent_instr;
183 nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
184 if (intr->intrinsic != nir_intrinsic_load_deref)
186 nir_variable *var = nir_deref_instr_get_variable(nir_instr_as_deref(intr->src[0].ssa->parent_instr));
215 nir_ssa_def_rewrite_uses(&intr->dest.ssa, nir_vec(b, casted, num_components));
218 nir_instr *parent = intr->src[0].ssa->parent_instr;
514 nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
520 switch (intr->intrinsic) {
522 var = bo->ssbo[nir_dest_bit_size(intr->dest) >> 4];
523 offset = intr->src[2].ssa;
527 var = bo->ssbo[nir_dest_bit_size(intr->dest) >> 4];
528 offset = intr->src[1].ssa;
531 if (nir_src_is_const(intr->src[0]) && nir_src_as_const_value(intr->src[0])->u32 == 0)
532 var = bo->uniforms[nir_dest_bit_size(intr->dest) >> 4];
534 var = bo->ubo[nir_dest_bit_size(intr->dest) >> 4];
535 offset = intr->src[1].ssa;
548 if (has_unsized || offset_bytes + intr->num_components - 1 < size)
553 for (unsigned i = 0; i < intr->num_components; i++) {
557 result[i] = nir_imm_zero(b, 1, nir_dest_bit_size(intr->dest));
560 assert(rewrites == intr->num_components);
562 nir_ssa_def *load = nir_vec(b, result, intr->num_components);
563 nir_ssa_def_rewrite_uses(&intr->dest.ssa, load);
628 nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
629 if (intr->intrinsic != nir_intrinsic_load_deref)
631 nir_variable *var = nir_deref_instr_get_variable(nir_src_as_deref(intr->src[0]));
651 nir_ssa_def_rewrite_uses(&intr->dest.ssa, load);
983 nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
984 if (intr->intrinsic != nir_intrinsic_load_deref)
986 nir_deref_instr *deref = nir_src_as_deref(intr->src[0]);
1001 nir_ssa_def_rewrite_uses(&intr->dest.ssa, new_load);
1051 nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
1053 switch (intr->intrinsic) {
1066 nir_ssa_def *offset = nir_udiv_imm(b, intr->src[1].ssa, nir_dest_bit_size(intr->dest) / 8);
1067 nir_instr_rewrite_src_ssa(instr, &intr->src[1], offset);
1073 bool force_2x32 = intr->intrinsic == nir_intrinsic_load_ubo &&
1074 nir_src_is_const(intr->src[0]) &&
1075 nir_src_as_uint(intr->src[0]) == 0 &&
1076 nir_dest_bit_size(intr->dest) == 64 &&
1077 nir_intrinsic_align_offset(intr) % 8 != 0;
1078 force_2x32 |= nir_dest_bit_size(intr->dest) == 64 && !has_int64;
1079 nir_ssa_def *offset = nir_udiv_imm(b, intr->src[1].ssa, (force_2x32 ? 32 : nir_dest_bit_size(intr->dest)) / 8);
1080 nir_instr_rewrite_src_ssa(instr, &intr->src[1], offset);
1084 assert(intr->dest.ssa.num_components == 1);
1088 if (intr->intrinsic == nir_intrinsic_load_ssbo)
1089 load[i] = nir_load_ssbo(b, 1, 32, intr->src[0].ssa, nir_iadd_imm(b, intr->src[1].ssa, i), .align_mul = 4, .align_offset = 0);
1091 load[i] = nir_load_ubo(b, 1, 32, intr->src[0].ssa, nir_iadd_imm(b, intr->src[1].ssa, i), .align_mul = 4, .align_offset = 0, .range = 4);
1092 nir_intrinsic_set_access(nir_instr_as_intrinsic(load[i]->parent_instr), nir_intrinsic_access(intr));
1096 nir_ssa_def_rewrite_uses(&intr->dest.ssa, casted);
1103 bool force_2x32 = nir_dest_bit_size(intr->dest) == 64 && !has_int64;
1104 nir_ssa_def *offset = nir_udiv_imm(b, intr->src[0].ssa, (force_2x32 ? 32 : nir_dest_bit_size(intr->dest)) / 8);
1105 nir_instr_rewrite_src_ssa(instr, &intr->src[0], offset);
1109 assert(intr->dest.ssa.num_components == 1);
1113 load[i] = nir_load_shared(b, 1, 32, nir_iadd_imm(b, intr->src[0].ssa, i), .align_mul = 4, .align_offset = 0);
1116 nir_ssa_def_rewrite_uses(&intr->dest.ssa, casted);
1123 bool force_2x32 = nir_src_bit_size(intr->src[0]) == 64 && !has_int64;
1124 nir_ssa_def *offset = nir_udiv_imm(b, intr->src[2].ssa, (force_2x32 ? 32 : nir_src_bit_size(intr->src[0])) / 8);
1125 nir_instr_rewrite_src_ssa(instr, &intr->src[2], offset);
1129 assert(intr->src[0].ssa->num_components == 1);
1130 nir_ssa_def *vals[2] = {nir_unpack_64_2x32_split_x(b, intr->src[0].ssa), nir_unpack_64_2x32_split_y(b, intr->src[0].ssa)};
1132 nir_store_ssbo(b, vals[i], intr->src[1].ssa, nir_iadd_imm(b, intr->src[2].ssa, i), .align_mul = 4, .align_offset = 0);
1139 bool force_2x32 = nir_src_bit_size(intr->src[0]) == 64 && !has_int64;
1140 nir_ssa_def *offset = nir_udiv_imm(b, intr->src[1].ssa, (force_2x32 ? 32 : nir_src_bit_size(intr->src[0])) / 8);
1141 nir_instr_rewrite_src_ssa(instr, &intr->src[1], offset);
1143 if (nir_src_bit_size(intr->src[0]) == 64 && !has_int64) {
1145 assert(intr->src[0].ssa->num_components == 1);
1146 nir_ssa_def *vals[2] = {nir_unpack_64_2x32_split_x(b, intr->src[0].ssa), nir_unpack_64_2x32_split_y(b, intr->src[0].ssa)};
1148 nir_store_shared(b, vals[i], nir_iadd_imm(b, intr->src[1].ssa, i), .align_mul = 4, .align_offset = 0);
1220 nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
1221 switch (intr->intrinsic) {
1255 nir_ssa_def *offset = intr->src[1].ssa;
1256 nir_src *src = &intr->src[0];
1257 nir_variable *var = get_bo_var(b->shader, bo, true, src, nir_dest_bit_size(intr->dest));
1267 unsigned num_components = nir_dest_num_components(intr->dest);
1271 nir_ssa_dest_init(&new_instr->instr, &new_instr->dest, 1, nir_dest_bit_size(intr->dest), "");
1274 for (unsigned i = 2; i < nir_intrinsic_infos[intr->intrinsic].num_srcs; i++)
1275 nir_src_copy(&new_instr->src[i - 1], &intr->src[i]);
1283 nir_ssa_def_rewrite_uses(&intr->dest.ssa, load);
1293 nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
1300 switch (intr->intrinsic) {
1314 src = &intr->src[1];
1315 var = get_bo_var(b->shader, bo, true, src, nir_src_bit_size(intr->src[0]));
1316 offset = intr->src[2].ssa;
1320 src = &intr->src[0];
1321 var = get_bo_var(b->shader, bo, true, src, nir_dest_bit_size(intr->dest));
1322 offset = intr->src[1].ssa;
1325 src = &intr->src[0];
1326 var = get_bo_var(b->shader, bo, false, src, nir_dest_bit_size(intr->dest));
1327 offset = intr->src[1].ssa;
1343 assert(intr->num_components <= 2);
1346 for (unsigned i = 0; i < intr->num_components; i++) {
1349 if (intr->intrinsic == nir_intrinsic_load_ssbo)
1350 nir_intrinsic_set_access(nir_instr_as_intrinsic(result[i]->parent_instr), nir_intrinsic_access(intr));
1353 nir_ssa_def *load = nir_vec(b, result, intr->num_components);
1354 nir_ssa_def_rewrite_uses(&intr->dest.ssa, load);
1357 nir_build_store_deref(b, &deref_arr->dest.ssa, intr->src[0].ssa, BITFIELD_MASK(intr->num_components), nir_intrinsic_access(intr));
1468 nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
1469 if (intr->intrinsic != nir_intrinsic_load_deref)
1471 nir_variable *deref_var = nir_intrinsic_get_var(intr, 0);
1474 nir_ssa_def *undef = nir_ssa_undef(b, nir_dest_num_components(intr->dest), nir_dest_bit_size(intr->dest));
1475 nir_ssa_def_rewrite_uses(&intr->dest.ssa, undef);
1655 nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
1656 if (intr->intrinsic != nir_intrinsic_store_deref &&
1657 intr->intrinsic != nir_intrinsic_load_deref)
1659 if (nir_intrinsic_get_var(intr, 0) != var)
1661 if ((intr->intrinsic == nir_intrinsic_store_deref && intr->src[1].ssa->bit_size != 64) ||
1662 (intr->intrinsic == nir_intrinsic_load_deref && intr->dest.ssa.bit_size != 64))
1665 nir_deref_instr *deref = nir_src_as_deref(intr->src[0]);
1666 unsigned num_components = intr->num_components * 2;
1672 if (intr->intrinsic == nir_intrinsic_store_deref) {
1674 for (unsigned i = 0; i < intr->num_components; i++) {
1675 nir_ssa_def *ssa = nir_unpack_64_2x32(&b, nir_channel(&b, intr->src[1].ssa, i));
1679 unsigned wrmask = nir_intrinsic_write_mask(intr);
1682 for (unsigned i = 0; i < intr->num_components; i++) {
1785 dest = dests[idx] = nir_vec(&b, comp, intr->num_components);
1799 for (unsigned i = 0; i < intr->num_components; i++) {
1802 dest = nir_vec(&b, comp, intr->num_components);
1812 dest = nir_vec(&b, comp, intr->num_components);
1814 nir_ssa_def_rewrite_uses_after(&intr->dest.ssa, dest, instr);
2198 nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
2199 if (intr->intrinsic != nir_intrinsic_load_instance_id)
2202 nir_ssa_def *def = nir_isub(b, &intr->dest.ssa, nir_load_base_instance(b));
2203 nir_ssa_def_rewrite_uses_after(&intr->dest.ssa, def, def->parent_instr);
2755 nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
2756 if (intr->intrinsic == nir_intrinsic_image_deref_load ||
2757 intr->intrinsic == nir_intrinsic_image_deref_sparse_load ||
2758 intr->intrinsic == nir_intrinsic_image_deref_store ||
2759 intr->intrinsic == nir_intrinsic_image_deref_atomic_add ||
2760 intr->intrinsic == nir_intrinsic_image_deref_atomic_imin ||
2761 intr->intrinsic == nir_intrinsic_image_deref_atomic_umin ||
2762 intr->intrinsic == nir_intrinsic_image_deref_atomic_imax ||
2763 intr->intrinsic == nir_intrinsic_image_deref_atomic_umax ||
2764 intr->intrinsic == nir_intrinsic_image_deref_atomic_and ||
2765 intr->intrinsic == nir_intrinsic_image_deref_atomic_or ||
2766 intr->intrinsic == nir_intrinsic_image_deref_atomic_xor ||
2767 intr->intrinsic == nir_intrinsic_image_deref_atomic_exchange ||
2768 intr->intrinsic == nir_intrinsic_image_deref_atomic_comp_swap ||
2769 intr->intrinsic == nir_intrinsic_image_deref_atomic_fadd ||
2770 intr->intrinsic == nir_intrinsic_image_deref_size ||
2771 intr->intrinsic == nir_intrinsic_image_deref_samples ||
2772 intr->intrinsic == nir_intrinsic_image_deref_format ||
2773 intr->intrinsic == nir_intrinsic_image_deref_order) {
2776 nir_deref_instr_get_variable(nir_src_as_deref(intr->src[0]));
2784 if (intr->intrinsic == nir_intrinsic_is_sparse_texels_resident ||
2785 intr->intrinsic == nir_intrinsic_image_deref_sparse_load)
2790 switch (intr->intrinsic) {
2792 nir_variable *var = nir_intrinsic_get_var(intr, 0);
2812 nir_intrinsic_instr *intr = nir_instr_as_intrinsic(parent);
2813 assert(intr->intrinsic == nir_intrinsic_is_sparse_texels_resident);
2864 nir_intrinsic_instr *intr = nir_instr_as_intrinsic(parent);
2865 assert(intr->intrinsic == nir_intrinsic_is_sparse_texels_resident);
2866 src = intr->src[0].ssa;