Lines Matching refs:pipeline
54 tu6_load_state_size(struct tu_pipeline *pipeline,
60 if (!(pipeline->active_desc_sets & (1u << i)))
117 tu6_emit_load_state(struct tu_pipeline *pipeline,
120 unsigned size = tu6_load_state_size(pipeline, layout, compute);
125 tu_cs_begin_sub_stream(&pipeline->cs, size, &cs);
131 * any shaders in a pipeline access, at the time that a draw or
132 * dispatch command is recorded to execute using that pipeline.
133 * However, if none of the shaders in a pipeline statically use any
135 * be bound for that set number, even if the pipeline layout includes
138 * This means that descriptor sets unused by the pipeline may have a
144 if (!(pipeline->active_desc_sets & (1u << i)))
243 pipeline->load_state = tu_cs_end_draw_state(&pipeline->cs, &cs);
1526 struct tu_pipeline *pipeline)
1588 if (pipeline) {
1589 pipeline->lrz.fs_has_kill = fs->has_kill;
1590 pipeline->lrz.early_fragment_tests = fs->fs.early_fragment_tests;
1594 pipeline->lrz.force_late_z = true;
1712 struct tu_pipeline *pipeline)
1800 pipeline);
1820 tu6_emit_vertex_input(struct tu_pipeline *pipeline,
1829 tu_cs_begin_sub_stream(&pipeline->cs,
1836 if (!(pipeline->dynamic_state_mask & BIT(TU_DYNAMIC_STATE_VB_STRIDE))) {
1912 *vi_state = tu_cs_end_draw_state(&pipeline->cs, &cs);
2178 tu6_emit_rb_mrt_controls(struct tu_pipeline *pipeline,
2192 if (pipeline->dynamic_state_mask & BIT(TU_DYNAMIC_STATE_COLOR_WRITE_ENABLE))
2200 pipeline->logic_op_enabled = true;
2206 pipeline->num_rts = blend_info->attachmentCount;
2237 pipeline->color_write_enable |= BIT(i);
2239 pipeline->blend_enable |= BIT(i);
2246 pipeline->rb_mrt_control[i] = rb_mrt_control & pipeline->rb_mrt_control_mask;
2247 pipeline->rb_mrt_blend_control[i] = rb_mrt_blend_control;
2254 tu6_emit_blend_control(struct tu_pipeline *pipeline,
2264 pipeline->sp_blend_cntl =
2268 .unk8 = true).value & pipeline->sp_blend_cntl_mask;
2271 pipeline->rb_blend_cntl =
2278 pipeline->rb_blend_cntl_mask;
2283 struct tu_pipeline *pipeline)
2285 tu_cs_emit_regs(cs, A6XX_SP_BLEND_CNTL(.dword = pipeline->sp_blend_cntl));
2286 tu_cs_emit_regs(cs, A6XX_RB_BLEND_CNTL(.dword = pipeline->rb_blend_cntl));
2288 for (unsigned i = 0; i < pipeline->num_rts; i++) {
2290 A6XX_RB_MRT_CONTROL(i, .dword = pipeline->rb_mrt_control[i]),
2291 A6XX_RB_MRT_BLEND_CONTROL(i, .dword = pipeline->rb_mrt_blend_control[i]));
2313 struct tu_pipeline *pipeline,
2326 tu_bo_init_new(dev, &pipeline->pvtmem_bo, total_size,
2331 config->iova = pipeline->pvtmem_bo->iova;
2339 struct tu_pipeline *pipeline,
2344 uint32_t size = 1024 + tu6_load_state_size(pipeline, layout, compute);
2380 /* Allocate the space for the pipeline out of the device's RO suballocator.
2385 * The pipeline cache would seem like a natural place to stick the
2389 * pipeline destroy isn't synchronized by the cache.
2392 VkResult result = tu_suballoc_bo_alloc(&pipeline->bo, &dev->pipeline_suballoc,
2398 tu_cs_init_suballoc(&pipeline->cs, dev, &pipeline->bo);
2405 const struct tu_pipeline *pipeline,
2416 !(pipeline->dynamic_state_mask & BIT(TU_DYNAMIC_STATE_RASTERIZER_DISCARD)))
2480 tu_upload_variant(struct tu_pipeline *pipeline,
2492 tu_cs_alloc(&pipeline->cs, variant->info.size / 4, 1, &memory);
2499 tu_append_executable(struct tu_pipeline *pipeline, struct ir3_shader_variant *variant,
2505 .nir_final = ralloc_strdup(pipeline->executables_mem_ctx, variant->disasm_info.nir),
2506 .disasm = ralloc_strdup(pipeline->executables_mem_ctx, variant->disasm_info.disasm),
2511 util_dynarray_append(&pipeline->executables, struct tu_pipeline_executable, exe);
2788 struct tu_pipeline *pipeline)
2812 pipeline->shared_consts = (struct tu_push_constant_range) {
2825 tu_pipeline_shader_key_init(&ir3_key, pipeline, builder->create_info);
2901 nir_shader_as_str(nir[stage], pipeline->executables_mem_ctx);
3030 tu_append_executable(pipeline, compiled_shaders->variants[stage],
3040 tu_append_executable(pipeline, vs->binning, NULL);
3050 pipeline->active_desc_sets = compiled_shaders->active_desc_sets;
3052 pipeline->tess.patch_type =
3087 struct tu_pipeline *pipeline)
3092 pipeline->gras_su_cntl_mask = ~0u;
3093 pipeline->rb_depth_cntl_mask = ~0u;
3094 pipeline->rb_stencil_cntl_mask = ~0u;
3095 pipeline->pc_raster_cntl_mask = ~0u;
3096 pipeline->vpc_unknown_9107_mask = ~0u;
3097 pipeline->sp_blend_cntl_mask = ~0u;
3098 pipeline->rb_blend_cntl_mask = ~0u;
3099 pipeline->rb_mrt_control_mask = ~0u;
3109 pipeline->gras_su_cntl_mask &= ~A6XX_GRAS_SU_CNTL_LINEHALFWIDTH__MASK;
3110 pipeline->dynamic_state_mask |= BIT(state);
3113 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_SAMPLE_LOCATIONS);
3116 pipeline->gras_su_cntl_mask &=
3118 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_GRAS_SU_CNTL);
3121 pipeline->gras_su_cntl_mask &= ~A6XX_GRAS_SU_CNTL_FRONT_CW;
3122 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_GRAS_SU_CNTL);
3125 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY);
3128 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_VB_STRIDE);
3131 pipeline->dynamic_state_mask |= BIT(VK_DYNAMIC_STATE_VIEWPORT);
3134 pipeline->dynamic_state_mask |= BIT(VK_DYNAMIC_STATE_SCISSOR);
3137 pipeline->rb_depth_cntl_mask &=
3139 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_RB_DEPTH_CNTL);
3142 pipeline->rb_depth_cntl_mask &= ~A6XX_RB_DEPTH_CNTL_Z_WRITE_ENABLE;
3143 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_RB_DEPTH_CNTL);
3146 pipeline->rb_depth_cntl_mask &= ~A6XX_RB_DEPTH_CNTL_ZFUNC__MASK;
3147 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_RB_DEPTH_CNTL);
3150 pipeline->rb_depth_cntl_mask &=
3152 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_RB_DEPTH_CNTL);
3155 pipeline->rb_stencil_cntl_mask &= ~(A6XX_RB_STENCIL_CONTROL_STENCIL_ENABLE |
3158 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_RB_STENCIL_CNTL);
3161 pipeline->rb_stencil_cntl_mask &= ~(A6XX_RB_STENCIL_CONTROL_FUNC__MASK |
3169 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_RB_STENCIL_CNTL);
3172 pipeline->gras_su_cntl_mask &= ~A6XX_GRAS_SU_CNTL_POLY_OFFSET;
3173 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_GRAS_SU_CNTL);
3176 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE);
3179 pipeline->pc_raster_cntl_mask &= ~A6XX_PC_RASTER_CNTL_DISCARD;
3180 pipeline->vpc_unknown_9107_mask &= ~A6XX_VPC_UNKNOWN_9107_RASTER_DISCARD;
3181 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_RASTERIZER_DISCARD);
3184 pipeline->sp_blend_cntl_mask &= ~A6XX_SP_BLEND_CNTL_ENABLE_BLEND__MASK;
3185 pipeline->rb_blend_cntl_mask &= ~A6XX_RB_BLEND_CNTL_ENABLE_BLEND__MASK;
3186 pipeline->rb_mrt_control_mask &= ~A6XX_RB_MRT_CONTROL_ROP_CODE__MASK;
3187 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_BLEND);
3188 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_LOGIC_OP);
3191 pipeline->sp_blend_cntl_mask &= ~A6XX_SP_BLEND_CNTL_ENABLE_BLEND__MASK;
3192 pipeline->rb_blend_cntl_mask &= ~A6XX_RB_BLEND_CNTL_ENABLE_BLEND__MASK;
3193 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_BLEND);
3199 pipeline->dynamic_state_mask |= BIT(TU_DYNAMIC_STATE_COLOR_WRITE_ENABLE);
3220 struct tu_pipeline *pipeline)
3235 tu_cs_begin_sub_stream(&pipeline->cs, 512, &prog_cs);
3237 pipeline->program.config_state = tu_cs_end_draw_state(&pipeline->cs, &prog_cs);
3239 tu_cs_begin_sub_stream(&pipeline->cs, 512 + builder->additional_cs_reserve_size, &prog_cs);
3240 tu6_emit_program(&prog_cs, builder, false, pipeline);
3241 pipeline->program.state = tu_cs_end_draw_state(&pipeline->cs, &prog_cs);
3243 tu_cs_begin_sub_stream(&pipeline->cs, 512 + builder->additional_cs_reserve_size, &prog_cs);
3244 tu6_emit_program(&prog_cs, builder, true, pipeline);
3245 pipeline->program.binning_state = tu_cs_end_draw_state(&pipeline->cs, &prog_cs);
3251 pipeline->active_stages = stages;
3257 tu_pipeline_set_linkage(&pipeline->program.link[i],
3265 struct tu_pipeline *pipeline)
3274 pipeline->num_vbs =
3275 MAX2(pipeline->num_vbs, vi_info->pVertexBindingDescriptions[i].binding + 1);
3278 tu6_emit_vertex_input(pipeline, &pipeline->vi.state, vs, vi_info);
3280 tu6_emit_vertex_input(pipeline, &pipeline->vi.binning_state, bs, vi_info);
3285 struct tu_pipeline *pipeline)
3290 pipeline->ia.primtype = tu6_primtype(ia_info->topology);
3291 pipeline->ia.primitive_restart = ia_info->primitiveRestartEnable;
3295 tu_pipeline_static_state(struct tu_pipeline *pipeline, struct tu_cs *cs,
3298 assert(id < ARRAY_SIZE(pipeline->dynamic_state));
3300 if (pipeline->dynamic_state_mask & BIT(id))
3303 pipeline->dynamic_state[id] = tu_cs_draw_state(&pipeline->cs, cs, size);
3309 struct tu_pipeline *pipeline)
3311 if (!(pipeline->active_stages & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) ||
3312 !(pipeline->active_stages & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))
3318 assert(pipeline->ia.primtype == DI_PT_PATCHES0);
3320 pipeline->ia.primtype += tess_info->patchControlPoints;
3323 pipeline->tess.upper_left_domain_origin = !domain_info ||
3326 pipeline->tess.param_stride = hs->output_size * 4;
3331 struct tu_pipeline *pipeline)
3337 * pipeline has rasterization disabled."
3348 pipeline->z_negative_one_to_one = depth_clip_info ? depth_clip_info->negativeOneToOne : false;
3352 if (tu_pipeline_static_state(pipeline, &cs, VK_DYNAMIC_STATE_VIEWPORT, 8 + 10 * vp_info->viewportCount))
3353 tu6_emit_viewport(&cs, vp_info->pViewports, vp_info->viewportCount, pipeline->z_negative_one_to_one);
3355 if (tu_pipeline_static_state(pipeline, &cs, VK_DYNAMIC_STATE_SCISSOR, 1 + 2 * vp_info->scissorCount))
3361 struct tu_pipeline *pipeline)
3375 pipeline->line_mode = RECTANGULAR;
3377 if (tu6_primtype_line(pipeline->ia.primtype) ||
3378 (tu6_primtype_patches(pipeline->ia.primtype) &&
3379 pipeline->tess.patch_type == IR3_TESS_ISOLINES)) {
3386 pipeline->line_mode = BRESENHAM;
3394 pipeline->rast_state = tu_cs_draw_state(&pipeline->cs, &cs, cs_size);
3402 .zero_gb_scale_z = pipeline->z_negative_one_to_one ? 0 : 1,
3427 tu6_emit_msaa(&cs, builder->samples, pipeline->line_mode);
3434 pipeline->pc_raster_cntl = A6XX_PC_RASTER_CNTL_STREAM(stream);
3435 pipeline->vpc_unknown_9107 = 0;
3437 pipeline->pc_raster_cntl |= A6XX_PC_RASTER_CNTL_DISCARD;
3438 pipeline->vpc_unknown_9107 |= A6XX_VPC_UNKNOWN_9107_RASTER_DISCARD;
3441 if (tu_pipeline_static_state(pipeline, &cs, TU_DYNAMIC_STATE_RASTERIZER_DISCARD, 4)) {
3442 tu_cs_emit_regs(&cs, A6XX_PC_RASTER_CNTL(.dword = pipeline->pc_raster_cntl));
3443 tu_cs_emit_regs(&cs, A6XX_VPC_UNKNOWN_9107(.dword = pipeline->vpc_unknown_9107));
3446 pipeline->gras_su_cntl =
3447 tu6_gras_su_cntl(rast_info, pipeline->line_mode, builder->multiview_mask != 0);
3449 if (tu_pipeline_static_state(pipeline, &cs, TU_DYNAMIC_STATE_GRAS_SU_CNTL, 2))
3450 tu_cs_emit_regs(&cs, A6XX_GRAS_SU_CNTL(.dword = pipeline->gras_su_cntl));
3452 if (tu_pipeline_static_state(pipeline, &cs, VK_DYNAMIC_STATE_DEPTH_BIAS, 4)) {
3460 pipeline->provoking_vertex_last = provoking_vtx_state &&
3466 struct tu_pipeline *pipeline)
3472 * the pipeline has rasterization disabled or if the subpass of the
3473 * render pass the pipeline is created against does not use a
3504 pipeline->depth_cpp_per_sample = util_format_get_component_bits(
3508 * to 0 when this pipeline is used, as enabling depth test when there
3511 if (pipeline->dynamic_state_mask & BIT(TU_DYNAMIC_STATE_RB_DEPTH_CNTL))
3512 pipeline->rb_depth_cntl_disable = true;
3536 pipeline->stencil_cpp_per_sample = util_format_get_component_bits(
3540 if (tu_pipeline_static_state(pipeline, &cs, TU_DYNAMIC_STATE_RB_DEPTH_CNTL, 2)) {
3544 pipeline->rb_depth_cntl = rb_depth_cntl;
3546 if (tu_pipeline_static_state(pipeline, &cs, TU_DYNAMIC_STATE_RB_STENCIL_CNTL, 2)) {
3550 pipeline->rb_stencil_cntl = rb_stencil_cntl;
3556 if (tu_pipeline_static_state(pipeline, &cs, VK_DYNAMIC_STATE_DEPTH_BOUNDS, 3)) {
3562 if (tu_pipeline_static_state(pipeline, &cs, VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, 2)) {
3567 if (tu_pipeline_static_state(pipeline, &cs, VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, 2)) {
3568 update_stencil_mask(&pipeline->stencil_wrmask, VK_STENCIL_FACE_FRONT_BIT, ds_info->front.writeMask);
3569 update_stencil_mask(&pipeline->stencil_wrmask, VK_STENCIL_FACE_BACK_BIT, ds_info->back.writeMask);
3570 tu_cs_emit_regs(&cs, A6XX_RB_STENCILWRMASK(.dword = pipeline->stencil_wrmask));
3573 if (tu_pipeline_static_state(pipeline, &cs, VK_DYNAMIC_STATE_STENCIL_REFERENCE, 2)) {
3581 pipeline->lrz.force_disable_mask |= TU_LRZ_FORCE_DISABLE_WRITE;
3584 pipeline->lrz.force_disable_mask = TU_LRZ_FORCE_DISABLE_LRZ;
3591 struct tu_pipeline_builder *builder, struct tu_pipeline *pipeline)
3596 * VkPipelineMultisampleStateCreateInfo, and is ignored if the pipeline
3603 * pipeline has rasterization disabled or if the subpass of the render
3604 * pass the pipeline is created against does not use any color
3620 tu6_emit_rb_mrt_controls(pipeline, blend_info,
3622 &pipeline->rop_reads_dst,
3623 &pipeline->color_bandwidth_per_sample);
3626 pipeline->rop_reads_dst ? pipeline->color_write_enable : pipeline->blend_enable;
3627 tu6_emit_blend_control(pipeline, blend_enable_mask,
3630 if (tu_pipeline_static_state(pipeline, &cs, TU_DYNAMIC_STATE_BLEND,
3632 tu6_emit_blend(&cs, pipeline);
3646 pipeline->lrz.force_disable_mask |= TU_LRZ_FORCE_DISABLE_WRITE;
3658 !(pipeline->color_write_enable & BIT(i)))) {
3659 pipeline->lrz.force_disable_mask |= TU_LRZ_FORCE_DISABLE_WRITE;
3663 if (tu_pipeline_static_state(pipeline, &cs, VK_DYNAMIC_STATE_BLEND_CONSTANTS, 5)) {
3675 if (tu_pipeline_static_state(pipeline, &cs, TU_DYNAMIC_STATE_SAMPLE_LOCATIONS,
3683 struct tu_pipeline_builder *builder, struct tu_pipeline *pipeline)
3688 pipeline->subpass_feedback_loop_ds = builder->subpass_feedback_loop_ds;
3697 pipeline->raster_order_attachment_access =
3703 pipeline->raster_order_attachment_access |=
3710 pipeline->raster_order_attachment_access = true;
3719 if (pipeline->raster_order_attachment_access) {
3745 pipeline->prim_order_state_gmem = tu_cs_draw_state(&pipeline->cs, &cs, 2);
3750 pipeline->prim_order_state_sysmem = tu_cs_draw_state(&pipeline->cs, &cs, 2);
3757 tu_pipeline_finish(struct tu_pipeline *pipeline,
3761 tu_cs_finish(&pipeline->cs);
3763 tu_suballoc_bo_free(&dev->pipeline_suballoc, &pipeline->bo);
3766 if (pipeline->pvtmem_bo)
3767 tu_bo_finish(dev, pipeline->pvtmem_bo);
3769 ralloc_free(pipeline->executables_mem_ctx);
3774 struct tu_pipeline **pipeline)
3778 *pipeline = vk_object_zalloc(&builder->device->vk, builder->alloc,
3779 sizeof(**pipeline), VK_OBJECT_TYPE_PIPELINE);
3780 if (!*pipeline)
3783 (*pipeline)->executables_mem_ctx = ralloc_context(NULL);
3784 util_dynarray_init(&(*pipeline)->executables, (*pipeline)->executables_mem_ctx);
3787 result = tu_pipeline_builder_compile_shaders(builder, *pipeline);
3789 vk_object_free(&builder->device->vk, builder->alloc, *pipeline);
3793 result = tu_pipeline_allocate_cs(builder->device, *pipeline,
3796 vk_object_free(&builder->device->vk, builder->alloc, *pipeline);
3802 tu_upload_variant(*pipeline, builder->shaders->variants[i]);
3805 tu_upload_variant(*pipeline, builder->binning_variant);
3828 result = tu_setup_pvtmem(builder->device, *pipeline, &builder->pvtmem,
3831 vk_object_free(&builder->device->vk, builder->alloc, *pipeline);
3835 tu_pipeline_builder_parse_dynamic(builder, *pipeline);
3836 tu_pipeline_builder_parse_shader_stages(builder, *pipeline);
3837 tu_pipeline_builder_parse_vertex_input(builder, *pipeline);
3838 tu_pipeline_builder_parse_input_assembly(builder, *pipeline);
3839 tu_pipeline_builder_parse_tessellation(builder, *pipeline);
3840 tu_pipeline_builder_parse_viewport(builder, *pipeline);
3841 tu_pipeline_builder_parse_rasterization(builder, *pipeline);
3842 tu_pipeline_builder_parse_depth_stencil(builder, *pipeline);
3843 tu_pipeline_builder_parse_multisample_and_color_blend(builder, *pipeline);
3844 tu_pipeline_builder_parse_rasterization_order(builder, *pipeline);
3845 tu6_emit_load_state(*pipeline, builder->layout, false);
3905 /* We don't know with dynamic rendering whether the pipeline will be
4014 struct tu_pipeline *pipeline = NULL;
4015 VkResult result = tu_pipeline_builder_build(&builder, &pipeline);
4019 *pPipeline = tu_pipeline_to_handle(pipeline);
4073 struct tu_pipeline *pipeline;
4086 pipeline = vk_object_zalloc(&dev->vk, pAllocator, sizeof(*pipeline),
4088 if (!pipeline)
4091 pipeline->executables_mem_ctx = ralloc_context(NULL);
4092 util_dynarray_init(&pipeline->executables, pipeline->executables_mem_ctx);
4121 pipeline->shared_consts = (struct tu_push_constant_range) {
4142 nir_shader_as_str(nir, pipeline->executables_mem_ctx) : NULL;
4184 pipeline->active_desc_sets = compiled->active_desc_sets;
4188 tu_pipeline_set_linkage(&pipeline->program.link[MESA_SHADER_COMPUTE],
4191 result = tu_pipeline_allocate_cs(dev, pipeline, layout, NULL, v);
4195 uint64_t shader_iova = tu_upload_variant(pipeline, v);
4198 tu_setup_pvtmem(dev, pipeline, &pvtmem, v->pvtmem_size, v->pvtmem_per_wave);
4201 pipeline->compute.local_size[i] = v->local_size[i];
4203 pipeline->compute.subgroup_size = v->info.double_threadsize ? 128 : 64;
4207 tu_cs_begin_sub_stream(&pipeline->cs, 64 + additional_reserve_size, &prog_cs);
4209 pipeline->program.state = tu_cs_end_draw_state(&pipeline->cs, &prog_cs);
4211 tu6_emit_load_state(pipeline, layout, true);
4213 tu_append_executable(pipeline, v, nir_initial_disasm);
4218 *pPipeline = tu_pipeline_to_handle(pipeline);
4228 vk_object_free(&dev->vk, pAllocator, pipeline);
4270 TU_FROM_HANDLE(tu_pipeline, pipeline, _pipeline);
4275 tu_pipeline_finish(pipeline, dev, pAllocator);
4276 vk_object_free(&dev->vk, pAllocator, pipeline);
4286 tu_pipeline_get_executable(struct tu_pipeline *pipeline, uint32_t index)
4288 assert(index < util_dynarray_num_elements(&pipeline->executables,
4291 &pipeline->executables, struct tu_pipeline_executable, index);
4302 TU_FROM_HANDLE(tu_pipeline, pipeline, pPipelineInfo->pipeline);
4306 util_dynarray_foreach (&pipeline->executables, struct tu_pipeline_executable, exe) {
4333 TU_FROM_HANDLE(tu_pipeline, pipeline, pExecutableInfo->pipeline);
4338 tu_pipeline_get_executable(pipeline, pExecutableInfo->executableIndex);
4504 TU_FROM_HANDLE(tu_pipeline, pipeline, pExecutableInfo->pipeline);
4510 tu_pipeline_get_executable(pipeline, pExecutableInfo->executableIndex);