Lines Matching refs:intr
493 unary_func_name(enum dxil_intr intr)
495 switch (intr) {
511 enum dxil_intr intr,
515 unary_func_name(intr),
520 const struct dxil_value *opcode = dxil_module_get_int32_const(&ctx->mod, intr);
534 enum dxil_intr intr,
541 const struct dxil_value *opcode = dxil_module_get_int32_const(&ctx->mod, intr);
556 enum dxil_intr intr,
565 const struct dxil_value *opcode = dxil_module_get_int32_const(&ctx->mod, intr);
581 enum dxil_intr intr,
591 const struct dxil_value *opcode = dxil_module_get_int32_const(&ctx->mod, intr);
2036 enum dxil_intr intr, const struct dxil_value *op)
2042 const struct dxil_value *v = emit_unary_call(ctx, overload, intr, op);
2051 enum dxil_intr intr,
2062 const struct dxil_value *v = emit_binary_call(ctx, overload, intr,
2072 enum dxil_intr intr,
2089 const struct dxil_value *v = emit_tertiary_call(ctx, overload, intr,
2571 emit_barrier(struct ntd_context *ctx, nir_intrinsic_instr *intr)
2574 nir_intrinsic_memory_modes(intr),
2575 nir_intrinsic_execution_scope(intr),
2576 nir_intrinsic_memory_scope(intr));
2581 emit_memory_barrier(struct ntd_context *ctx, nir_intrinsic_instr *intr)
2591 emit_memory_barrier_shared(struct ntd_context *ctx, nir_intrinsic_instr *intr)
2601 emit_group_memory_barrier(struct ntd_context *ctx, nir_intrinsic_instr *intr)
2610 emit_control_barrier(struct ntd_context *ctx, nir_intrinsic_instr *intr)
2620 nir_intrinsic_instr *intr)
2622 assert(intr->dest.is_ssa);
2623 nir_component_mask_t comps = nir_ssa_def_components_read(&intr->dest.ssa);
2625 for (int i = 0; i < nir_intrinsic_dest_components(intr); i++) {
2635 store_dest_value(ctx, &intr->dest, i, globalid);
2643 nir_intrinsic_instr *intr)
2645 assert(intr->dest.is_ssa);
2646 nir_component_mask_t comps = nir_ssa_def_components_read(&intr->dest.ssa);
2648 for (int i = 0; i < nir_intrinsic_dest_components(intr); i++) {
2658 store_dest_value(ctx, &intr->dest, i, threadidingroup);
2666 nir_intrinsic_instr *intr)
2668 assert(intr->dest.is_ssa);
2674 store_dest_value(ctx, &intr->dest, 0, flattenedthreadidingroup);
2681 nir_intrinsic_instr *intr)
2683 assert(intr->dest.is_ssa);
2684 nir_component_mask_t comps = nir_ssa_def_components_read(&intr->dest.ssa);
2686 for (int i = 0; i < nir_intrinsic_dest_components(intr); i++) {
2694 store_dest_value(ctx, &intr->dest, i, groupid);
2722 nir_intrinsic_instr *intr, const char *name,
2726 store_dest_value(ctx, &intr->dest, 0, value);
2732 emit_load_sample_mask_in(struct ntd_context *ctx, nir_intrinsic_instr *intr)
2745 store_dest_value(ctx, &intr->dest, 0, value);
2751 nir_intrinsic_instr *intr)
2775 store_dest_value(ctx, &intr->dest, i, value);
2778 for (unsigned i = num_coords; i < intr->dest.ssa.num_components; ++i) {
2780 store_dest_value(ctx, &intr->dest, i, value);
2895 emit_load_ssbo(struct ntd_context *ctx, nir_intrinsic_instr *intr)
2901 nir_variable *var = nir_get_binding_variable(ctx->shader, nir_chase_binding(intr->src[0]));
2906 const struct dxil_value *handle = get_resource_handle(ctx, &intr->src[0], class, DXIL_RESOURCE_KIND_RAW_BUFFER);
2908 get_src(ctx, &intr->src[1], 0, nir_type_uint);
2912 assert(nir_src_bit_size(intr->src[0]) == 32);
2913 assert(nir_intrinsic_dest_components(intr) <= 4);
2924 for (int i = 0; i < nir_intrinsic_dest_components(intr); i++) {
2929 store_dest_value(ctx, &intr->dest, i, val);
2935 emit_store_ssbo(struct ntd_context *ctx, nir_intrinsic_instr *intr)
2937 const struct dxil_value* handle = get_resource_handle(ctx, &intr->src[1], DXIL_RESOURCE_CLASS_UAV, DXIL_RESOURCE_KIND_RAW_BUFFER);
2939 get_src(ctx, &intr->src[2], 0, nir_type_uint);
2943 assert(nir_src_bit_size(intr->src[0]) == 32);
2944 unsigned num_components = nir_src_num_components(intr->src[0]);
2948 value[i] = get_src(ctx, &intr->src[0], i, nir_type_uint);
2974 emit_store_ssbo_masked(struct ntd_context *ctx, nir_intrinsic_instr *intr)
2977 get_src(ctx, &intr->src[0], 0, nir_type_uint);
2979 get_src(ctx, &intr->src[1], 0, nir_type_uint);
2980 const struct dxil_value* handle = get_resource_handle(ctx, &intr->src[2], DXIL_RESOURCE_CLASS_UAV, DXIL_RESOURCE_KIND_RAW_BUFFER);
2982 get_src(ctx, &intr->src[3], 0, nir_type_uint);
3000 emit_store_shared(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3007 assert(nir_src_bit_size(intr->src[0]) == 32);
3008 assert(nir_src_num_components(intr->src[0]) == 1);
3014 if (intr->intrinsic == nir_intrinsic_store_shared_dxil)
3015 index = get_src(ctx, &intr->src[1], 0, nir_type_uint);
3017 index = get_src(ctx, &intr->src[2], 0, nir_type_uint);
3028 value = get_src(ctx, &intr->src[0], 0, nir_type_uint);
3032 if (intr->intrinsic == nir_intrinsic_store_shared_dxil)
3035 const struct dxil_value *mask = get_src(ctx, &intr->src[1], 0, nir_type_uint);
3053 emit_store_scratch(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3060 assert(nir_src_bit_size(intr->src[0]) == 32);
3061 assert(nir_src_num_components(intr->src[0]) == 1);
3067 index = get_src(ctx, &intr->src[1], 0, nir_type_uint);
3078 value = get_src(ctx, &intr->src[0], 0, nir_type_uint);
3086 emit_load_ubo(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3088 const struct dxil_value* handle = get_resource_handle(ctx, &intr->src[0], DXIL_RESOURCE_CLASS_CBV, DXIL_RESOURCE_KIND_CBUFFER);
3093 nir_const_value *const_offset = nir_src_as_const_value(intr->src[1]);
3097 const struct dxil_value *offset_src = get_src(ctx, &intr->src[1], 0, nir_type_uint);
3110 for (unsigned i = 0; i < nir_dest_num_components(intr->dest); ++i) {
3112 store_dest(ctx, &intr->dest, i, retval,
3113 nir_dest_bit_size(intr->dest) > 1 ? nir_type_float : nir_type_bool);
3119 emit_load_ubo_dxil(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3121 assert(nir_dest_num_components(intr->dest) <= 4);
3122 assert(nir_dest_bit_size(intr->dest) == 32);
3124 const struct dxil_value* handle = get_resource_handle(ctx, &intr->src[0], DXIL_RESOURCE_CLASS_CBV, DXIL_RESOURCE_KIND_CBUFFER);
3126 get_src(ctx, &intr->src[1], 0, nir_type_uint);
3135 for (unsigned i = 0; i < nir_dest_num_components(intr->dest); i++)
3136 store_dest_value(ctx, &intr->dest, i,
3157 emit_store_output_via_intrinsic(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3159 assert(intr->intrinsic == nir_intrinsic_store_output ||
3161 bool is_patch_constant = intr->intrinsic == nir_intrinsic_store_output &&
3163 nir_alu_type out_type = nir_intrinsic_src_type(intr);
3164 enum overload_type overload = get_overload(out_type, intr->src[0].ssa->bit_size);
3174 const struct dxil_value *output_id = dxil_module_get_int32_const(&ctx->mod, nir_intrinsic_base(intr));
3175 unsigned row_index = intr->intrinsic == nir_intrinsic_store_output ? 1 : 2;
3180 nir_io_semantics semantics = nir_intrinsic_io_semantics(intr);
3190 row = get_src(ctx, &intr->src[row_index], 0, nir_type_int);
3193 uint32_t writemask = nir_intrinsic_write_mask(intr);
3195 nir_variable *var = find_patch_matching_variable_by_driver_location(ctx->shader, nir_var_shader_out, nir_intrinsic_base(intr), is_patch_constant);
3197 unsigned base_component = nir_intrinsic_component(intr) - var_base_component;
3201 &ctx->mod.patch_consts[nir_intrinsic_base(intr)] :
3202 &ctx->mod.outputs[nir_intrinsic_base(intr)];
3203 unsigned comp_size = intr->src[0].ssa->bit_size == 64 ? 2 : 1;
3210 for (unsigned i = 0; i < intr->num_components; ++i)
3217 if (!nir_src_is_const(intr->src[row_index])) {
3219 &ctx->mod.psv_patch_consts[nir_intrinsic_base(intr)] :
3220 &ctx->mod.psv_outputs[nir_intrinsic_base(intr)];
3225 for (unsigned i = 0; i < intr->num_components && success; ++i) {
3231 const struct dxil_value *value = get_src(ctx, &intr->src[0], i, out_type);
3246 emit_load_input_via_intrinsic(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3252 (nir_intrinsic_dest_type(intr) & nir_type_float)) {
3253 nir_variable *var = nir_find_variable_with_driver_location(ctx->shader, nir_var_shader_in, nir_intrinsic_base(intr));
3259 intr->intrinsic == nir_intrinsic_load_input) ||
3261 intr->intrinsic == nir_intrinsic_load_output);
3262 bool is_output_control_point = intr->intrinsic == nir_intrinsic_load_per_vertex_output;
3288 nir_intrinsic_base(intr) :
3289 ctx->mod.input_mappings[nir_intrinsic_base(intr)]);
3294 intr->intrinsic == nir_intrinsic_load_per_vertex_input ||
3295 intr->intrinsic == nir_intrinsic_load_per_vertex_output;
3300 vertex_id = get_src(ctx, &intr->src[0], 0, nir_type_int);
3317 nir_io_semantics semantics = nir_intrinsic_io_semantics(intr);
3327 row = get_src(ctx, &intr->src[row_index], 0, nir_type_int);
3329 nir_alu_type out_type = nir_intrinsic_dest_type(intr);
3330 enum overload_type overload = get_overload(out_type, intr->dest.ssa.bit_size);
3337 nir_variable *var = find_patch_matching_variable_by_driver_location(ctx->shader, nir_var_shader_in, nir_intrinsic_base(intr), is_patch_constant);
3339 unsigned base_component = nir_intrinsic_component(intr) - var_base_component;
3343 intr->intrinsic != nir_intrinsic_load_output) {
3345 &ctx->mod.patch_consts[nir_intrinsic_base(intr)] :
3346 &ctx->mod.inputs[ctx->mod.input_mappings[nir_intrinsic_base(intr)]];
3347 unsigned comp_size = intr->dest.ssa.bit_size == 64 ? 2 : 1;
3348 unsigned comp_mask = (1 << (intr->num_components * comp_size)) - 1;
3355 if (!nir_src_is_const(intr->src[row_index])) {
3357 &ctx->mod.psv_patch_consts[nir_intrinsic_base(intr)] :
3358 &ctx->mod.psv_inputs[ctx->mod.input_mappings[nir_intrinsic_base(intr)]];
3363 for (unsigned i = 0; i < intr->num_components; ++i) {
3380 store_dest(ctx, &intr->dest, i, retval, out_type);
3386 emit_load_interpolated_input(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3388 nir_intrinsic_instr *barycentric = nir_src_as_intrinsic(intr->src[0]);
3430 args[1] = dxil_module_get_int32_const(&ctx->mod, nir_intrinsic_base(intr));
3431 args[2] = get_src(ctx, &intr->src[1], 0, nir_type_int);
3438 nir_variable *var = find_patch_matching_variable_by_driver_location(ctx->shader, nir_var_shader_in, nir_intrinsic_base(intr), false);
3440 unsigned base_component = nir_intrinsic_component(intr) - var_base_component;
3444 &ctx->mod.inputs[ctx->mod.input_mappings[nir_intrinsic_base(intr)]];
3445 unsigned comp_size = intr->dest.ssa.bit_size == 64 ? 2 : 1;
3446 unsigned comp_mask = (1 << (intr->num_components * comp_size)) - 1;
3451 if (!nir_src_is_const(intr->src[1])) {
3453 &ctx->mod.psv_inputs[ctx->mod.input_mappings[nir_intrinsic_base(intr)]];
3458 for (unsigned i = 0; i < intr->num_components; ++i) {
3464 store_dest(ctx, &intr->dest, i, retval, nir_type_float);
3470 emit_load_ptr(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3473 nir_deref_instr_get_variable(nir_src_as_deref(intr->src[0]));
3476 get_src(ctx, &intr->src[1], 0, nir_type_uint);
3489 store_dest(ctx, &intr->dest, 0, retval, nir_type_uint);
3494 emit_load_shared(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3497 unsigned bit_size = nir_dest_bit_size(intr->dest);
3504 assert(nir_dest_num_components(intr->dest) == 1);
3510 index = get_src(ctx, &intr->src[0], 0, nir_type_uint);
3525 store_dest(ctx, &intr->dest, 0, retval, nir_type_uint);
3530 emit_load_scratch(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3533 unsigned bit_size = nir_dest_bit_size(intr->dest);
3540 assert(nir_dest_num_components(intr->dest) == 1);
3546 index = get_src(ctx, &intr->src[0], 0, nir_type_uint);
3561 store_dest(ctx, &intr->dest, 0, retval, nir_type_uint);
3585 emit_discard_if(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3587 const struct dxil_value *value = get_src(ctx, &intr->src[0], 0, nir_type_bool);
3602 emit_emit_vertex(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3605 const struct dxil_value *stream_id = dxil_module_get_int8_const(&ctx->mod, nir_intrinsic_stream_id(intr));
3622 emit_end_primitive(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3625 const struct dxil_value *stream_id = dxil_module_get_int8_const(&ctx->mod, nir_intrinsic_stream_id(intr));
3642 emit_image_store(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3644 const struct dxil_value *handle = get_resource_handle(ctx, &intr->src[0], DXIL_RESOURCE_CLASS_UAV, DXIL_RESOURCE_KIND_TEXTURE2D);
3649 if (intr->intrinsic == nir_intrinsic_image_deref_store)
3650 is_array = glsl_sampler_type_is_array(nir_src_as_deref(intr->src[0])->type);
3652 is_array = nir_intrinsic_image_array(intr);
3659 enum glsl_sampler_dim image_dim = intr->intrinsic == nir_intrinsic_image_store ?
3660 nir_intrinsic_image_dim(intr) :
3661 glsl_get_sampler_dim(nir_src_as_deref(intr->src[0])->type);
3666 assert(num_coords <= nir_src_num_components(intr->src[1]));
3668 coord[i] = get_src(ctx, &intr->src[1], i, nir_type_uint);
3673 nir_alu_type in_type = nir_intrinsic_src_type(intr);
3676 assert(nir_src_bit_size(intr->src[3]) == 32);
3677 unsigned num_components = nir_src_num_components(intr->src[3]);
3681 value[i] = get_src(ctx, &intr->src[3], i, in_type);
3702 emit_image_load(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3704 const struct dxil_value *handle = get_resource_handle(ctx, &intr->src[0], DXIL_RESOURCE_CLASS_UAV, DXIL_RESOURCE_KIND_TEXTURE2D);
3709 if (intr->intrinsic == nir_intrinsic_image_deref_load)
3710 is_array = glsl_sampler_type_is_array(nir_src_as_deref(intr->src[0])->type);
3712 is_array = nir_intrinsic_image_array(intr);
3719 enum glsl_sampler_dim image_dim = intr->intrinsic == nir_intrinsic_image_load ?
3720 nir_intrinsic_image_dim(intr) :
3721 glsl_get_sampler_dim(nir_src_as_deref(intr->src[0])->type);
3726 assert(num_coords <= nir_src_num_components(intr->src[1]));
3728 coord[i] = get_src(ctx, &intr->src[1], i, nir_type_uint);
3733 nir_alu_type out_type = nir_intrinsic_dest_type(intr);
3746 assert(nir_dest_bit_size(intr->dest) == 32);
3747 unsigned num_components = nir_dest_num_components(intr->dest);
3753 store_dest(ctx, &intr->dest, i, component, out_type);
3767 emit_image_atomic(struct ntd_context *ctx, nir_intrinsic_instr *intr,
3770 const struct dxil_value *handle = get_resource_handle(ctx, &intr->src[0], DXIL_RESOURCE_CLASS_UAV, DXIL_RESOURCE_KIND_TEXTURE2D);
3775 nir_deref_instr *src_as_deref = nir_src_as_deref(intr->src[0]);
3779 is_array = nir_intrinsic_image_array(intr);
3788 nir_intrinsic_image_dim(intr);
3793 assert(num_coords <= nir_src_num_components(intr->src[1]));
3795 coord[i] = get_src(ctx, &intr->src[1], i, nir_type_uint);
3800 const struct dxil_value *value = get_src(ctx, &intr->src[3], 0, type);
3810 store_dest(ctx, &intr->dest, 0, retval, type);
3815 emit_image_atomic_comp_swap(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3817 const struct dxil_value *handle = get_resource_handle(ctx, &intr->src[0], DXIL_RESOURCE_CLASS_UAV, DXIL_RESOURCE_KIND_TEXTURE2D);
3822 if (intr->intrinsic == nir_intrinsic_image_deref_atomic_comp_swap)
3823 is_array = glsl_sampler_type_is_array(nir_src_as_deref(intr->src[0])->type);
3825 is_array = nir_intrinsic_image_array(intr);
3832 enum glsl_sampler_dim image_dim = intr->intrinsic == nir_intrinsic_image_atomic_comp_swap ?
3833 nir_intrinsic_image_dim(intr) :
3834 glsl_get_sampler_dim(nir_src_as_deref(intr->src[0])->type);
3839 assert(num_coords <= nir_src_num_components(intr->src[1]));
3841 coord[i] = get_src(ctx, &intr->src[1], i, nir_type_uint);
3846 const struct dxil_value *cmpval = get_src(ctx, &intr->src[3], 0, nir_type_uint);
3847 const struct dxil_value *newval = get_src(ctx, &intr->src[4], 0, nir_type_uint);
3857 store_dest(ctx, &intr->dest, 0, retval, nir_type_uint);
3887 emit_image_size(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3889 const struct dxil_value *handle = get_resource_handle(ctx, &intr->src[0], DXIL_RESOURCE_CLASS_UAV, DXIL_RESOURCE_KIND_TEXTURE2D);
3893 const struct dxil_value *lod = get_src(ctx, &intr->src[1], 0, nir_type_uint);
3905 for (unsigned i = 0; i < nir_dest_num_components(intr->dest); ++i) {
3907 store_dest(ctx, &intr->dest, i, retval, nir_type_uint);
3914 emit_get_ssbo_size(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3918 nir_variable *var = nir_get_binding_variable(ctx->shader, nir_chase_binding(intr->src[0]));
3923 const struct dxil_value *handle = get_resource_handle(ctx, &intr->src[0], class, DXIL_RESOURCE_KIND_RAW_BUFFER);
3938 store_dest(ctx, &intr->dest, 0, retval, nir_type_uint);
3944 emit_ssbo_atomic(struct ntd_context *ctx, nir_intrinsic_instr *intr,
3947 const struct dxil_value* handle = get_resource_handle(ctx, &intr->src[0], DXIL_RESOURCE_CLASS_UAV, DXIL_RESOURCE_KIND_RAW_BUFFER);
3949 get_src(ctx, &intr->src[1], 0, nir_type_uint);
3951 get_src(ctx, &intr->src[2], 0, type);
3970 store_dest(ctx, &intr->dest, 0, retval, type);
3975 emit_ssbo_atomic_comp_swap(struct ntd_context *ctx, nir_intrinsic_instr *intr)
3977 const struct dxil_value* handle = get_resource_handle(ctx, &intr->src[0], DXIL_RESOURCE_CLASS_UAV, DXIL_RESOURCE_KIND_RAW_BUFFER);
3979 get_src(ctx, &intr->src[1], 0, nir_type_uint);
3981 get_src(ctx, &intr->src[2], 0, nir_type_int);
3983 get_src(ctx, &intr->src[3], 0, nir_type_int);
4002 store_dest(ctx, &intr->dest, 0, retval, nir_type_int);
4007 emit_shared_atomic(struct ntd_context *ctx, nir_intrinsic_instr *intr,
4012 assert(nir_src_bit_size(intr->src[1]) == 32);
4018 index = get_src(ctx, &intr->src[0], 0, nir_type_uint);
4029 value = get_src(ctx, &intr->src[1], 0, type);
4039 store_dest(ctx, &intr->dest, 0, retval, type);
4044 emit_shared_atomic_comp_swap(struct ntd_context *ctx, nir_intrinsic_instr *intr)
4048 assert(nir_src_bit_size(intr->src[1]) == 32);
4054 index = get_src(ctx, &intr->src[0], 0, nir_type_uint);
4065 cmpval = get_src(ctx, &intr->src[1], 0, nir_type_uint);
4066 newval = get_src(ctx, &intr->src[2], 0, nir_type_uint);
4076 store_dest(ctx, &intr->dest, 0, retval, nir_type_uint);
4081 emit_vulkan_resource_index(struct ntd_context *ctx, nir_intrinsic_instr *intr)
4083 unsigned int binding = nir_intrinsic_binding(intr);
4085 bool const_index = nir_src_is_const(intr->src[0]);
4087 binding += nir_src_as_const_value(intr->src[0])->u32;
4095 const struct dxil_value *offset = get_src(ctx, &intr->src[0], 0, nir_type_uint32);
4104 store_dest(ctx, &intr->dest, 0, index_value, nir_type_uint32);
4105 store_dest(ctx, &intr->dest, 1, dxil_module_get_int32_const(&ctx->mod, 0), nir_type_uint32);
4110 emit_load_vulkan_descriptor(struct ntd_context *ctx, nir_intrinsic_instr *intr)
4112 nir_intrinsic_instr* index = nir_src_as_intrinsic(intr->src[0]);
4122 nir_variable *var = nir_get_binding_variable(ctx->shader, nir_chase_binding(intr->src[0]));
4127 switch (nir_intrinsic_desc_type(intr)) {
4142 const struct dxil_value *index_value = get_src(ctx, &intr->src[0], 0, nir_type_uint32);
4150 store_dest_value(ctx, &intr->dest, 0, handle);
4151 store_dest(ctx, &intr->dest, 1, get_src(ctx, &intr->src[0], 1, nir_type_uint32), nir_type_uint32);
4157 emit_load_sample_pos_from_id(struct ntd_context *ctx, nir_intrinsic_instr *intr)
4169 get_src(ctx, &intr->src[0], 0, nir_type_uint32),
4183 store_dest(ctx, &intr->dest, i, coord, nir_type_float32);
4189 emit_load_layer_id(struct ntd_context *ctx, nir_intrinsic_instr *intr)
4193 store_dest_value(ctx, &intr->dest, 0, layer_id);
4198 emit_load_sample_id(struct ntd_context *ctx, nir_intrinsic_instr *intr)
4201 intr->intrinsic == nir_intrinsic_load_sample_id_no_per_sample);
4204 return emit_load_unary_external_function(ctx, intr, "dx.op.sampleIndex",
4207 store_dest_value(ctx, &intr->dest, 0, dxil_module_get_int32_const(&ctx->mod, 0));
4212 emit_intrinsic(struct ntd_context *ctx, nir_intrinsic_instr *intr)
4214 switch (intr->intrinsic) {
4217 return emit_load_global_invocation_id(ctx, intr);
4219 return emit_load_local_invocation_id(ctx, intr);
4221 return emit_load_local_invocation_index(ctx, intr);
4224 return emit_load_local_workgroup_id(ctx, intr);
4226 return emit_load_ssbo(ctx, intr);
4228 return emit_store_ssbo(ctx, intr);
4230 return emit_store_ssbo_masked(ctx, intr);
4233 return emit_store_shared(ctx, intr);
4235 return emit_store_scratch(ctx, intr);
4237 return emit_load_ptr(ctx, intr);
4239 return emit_load_ubo(ctx, intr);
4241 return emit_load_ubo_dxil(ctx, intr);
4243 return emit_load_unary_external_function(ctx, intr, "dx.op.primitiveID",
4247 return emit_load_sample_id(ctx, intr);
4251 return emit_load_unary_external_function(ctx, intr, "dx.op.outputControlPointID",
4254 return emit_load_unary_external_function(ctx, intr, "dx.op.gsInstanceID",
4260 return emit_load_sample_mask_in(ctx, intr);
4262 return emit_load_tess_coord(ctx, intr);
4264 return emit_load_shared(ctx, intr);
4266 return emit_load_scratch(ctx, intr);
4269 return emit_discard_if(ctx, intr);
4274 return emit_emit_vertex(ctx, intr);
4276 return emit_end_primitive(ctx, intr);
4278 return emit_barrier(ctx, intr);
4283 return emit_memory_barrier(ctx, intr);
4285 return emit_memory_barrier_shared(ctx, intr);
4287 return emit_group_memory_barrier(ctx, intr);
4289 return emit_control_barrier(ctx, intr);
4291 return emit_ssbo_atomic(ctx, intr, DXIL_ATOMIC_ADD, nir_type_int);
4293 return emit_ssbo_atomic(ctx, intr, DXIL_ATOMIC_IMIN, nir_type_int);
4295 return emit_ssbo_atomic(ctx, intr, DXIL_ATOMIC_UMIN, nir_type_uint);
4297 return emit_ssbo_atomic(ctx, intr, DXIL_ATOMIC_IMAX, nir_type_int);
4299 return emit_ssbo_atomic(ctx, intr, DXIL_ATOMIC_UMAX, nir_type_uint);
4301 return emit_ssbo_atomic(ctx, intr, DXIL_ATOMIC_AND, nir_type_uint);
4303 return emit_ssbo_atomic(ctx, intr, DXIL_ATOMIC_OR, nir_type_uint);
4305 return emit_ssbo_atomic(ctx, intr, DXIL_ATOMIC_XOR, nir_type_uint);
4307 return emit_ssbo_atomic(ctx, intr, DXIL_ATOMIC_EXCHANGE, nir_type_int);
4309 return emit_ssbo_atomic_comp_swap(ctx, intr);
4311 return emit_shared_atomic(ctx, intr, DXIL_RMWOP_ADD, nir_type_int);
4313 return emit_shared_atomic(ctx, intr, DXIL_RMWOP_MIN, nir_type_int);
4315 return emit_shared_atomic(ctx, intr, DXIL_RMWOP_UMIN, nir_type_uint);
4317 return emit_shared_atomic(ctx, intr, DXIL_RMWOP_MAX, nir_type_int);
4319 return emit_shared_atomic(ctx, intr, DXIL_RMWOP_UMAX, nir_type_uint);
4321 return emit_shared_atomic(ctx, intr, DXIL_RMWOP_AND, nir_type_uint);
4323 return emit_shared_atomic(ctx, intr, DXIL_RMWOP_OR, nir_type_uint);
4325 return emit_shared_atomic(ctx, intr, DXIL_RMWOP_XOR, nir_type_uint);
4327 return emit_shared_atomic(ctx, intr, DXIL_RMWOP_XCHG, nir_type_int);
4329 return emit_shared_atomic_comp_swap(ctx, intr);
4332 return emit_image_atomic(ctx, intr, DXIL_ATOMIC_ADD, nir_type_int);
4335 return emit_image_atomic(ctx, intr, DXIL_ATOMIC_IMIN, nir_type_int);
4338 return emit_image_atomic(ctx, intr, DXIL_ATOMIC_UMIN, nir_type_uint);
4341 return emit_image_atomic(ctx, intr, DXIL_ATOMIC_IMAX, nir_type_int);
4344 return emit_image_atomic(ctx, intr, DXIL_ATOMIC_IMAX, nir_type_uint);
4347 return emit_image_atomic(ctx, intr, DXIL_ATOMIC_AND, nir_type_uint);
4350 return emit_image_atomic(ctx, intr, DXIL_ATOMIC_OR, nir_type_uint);
4353 return emit_image_atomic(ctx, intr, DXIL_ATOMIC_XOR, nir_type_uint);
4356 return emit_image_atomic(ctx, intr, DXIL_ATOMIC_EXCHANGE, nir_type_uint);
4359 return emit_image_atomic_comp_swap(ctx, intr);
4362 return emit_image_store(ctx, intr);
4365 return emit_image_load(ctx, intr);
4368 return emit_image_size(ctx, intr);
4370 return emit_get_ssbo_size(ctx, intr);
4375 return emit_load_input_via_intrinsic(ctx, intr);
4378 return emit_store_output_via_intrinsic(ctx, intr);
4387 return emit_load_interpolated_input(ctx, intr);
4391 return emit_vulkan_resource_index(ctx, intr);
4393 return emit_load_vulkan_descriptor(ctx, intr);
4395 return emit_load_layer_id(ctx, intr);
4398 return emit_load_sample_pos_from_id(ctx, intr);
4403 NIR_INSTR_UNSUPPORTED(&intr->instr);