Lines Matching refs:ctx
77 check_resource_for_batch_ref(struct zink_context *ctx, struct zink_resource *res)
87 zink_batch_reference_resource_rw(&ctx->batch, res, !!res->obj->bo->writes);
89 zink_batch_reference_resource(&ctx->batch, res);
96 struct zink_context *ctx = zink_context(pctx);
101 if (ctx->batch.state && !screen->device_lost) {
108 for (unsigned i = 0; i < ARRAY_SIZE(ctx->program_cache); i++) {
109 hash_table_foreach(&ctx->program_cache[i], entry) {
112 screen->descriptor_program_deinit(ctx, pg);
115 hash_table_foreach(&ctx->compute_program_cache, entry) {
118 screen->descriptor_program_deinit(ctx, pg);
121 if (ctx->blitter)
122 util_blitter_destroy(ctx->blitter);
123 for (unsigned i = 0; i < ctx->fb_state.nr_cbufs; i++)
124 pipe_surface_release(&ctx->base, &ctx->fb_state.cbufs[i]);
125 pipe_surface_release(&ctx->base, &ctx->fb_state.zsbuf);
127 pipe_resource_reference(&ctx->dummy_vertex_buffer, NULL);
128 pipe_resource_reference(&ctx->dummy_xfb_buffer, NULL);
130 for (unsigned i = 0; i < ARRAY_SIZE(ctx->dummy_surface); i++)
131 pipe_surface_release(&ctx->base, &ctx->dummy_surface[i]);
132 zink_buffer_view_reference(screen, &ctx->dummy_bufferview, NULL);
134 if (ctx->dd)
135 zink_descriptors_deinit_bindless(ctx);
137 if (ctx->batch.state) {
138 zink_clear_batch_state(ctx, ctx->batch.state);
139 zink_batch_state_destroy(screen, ctx->batch.state);
141 struct zink_batch_state *bs = ctx->batch_states;
144 zink_clear_batch_state(ctx, bs);
148 util_dynarray_foreach(&ctx->free_batch_states, struct zink_batch_state*, bs) {
149 zink_clear_batch_state(ctx, *bs);
154 util_idalloc_fini(&ctx->di.bindless[i].tex_slots);
155 util_idalloc_fini(&ctx->di.bindless[i].img_slots);
156 free(ctx->di.bindless[i].buffer_infos);
157 free(ctx->di.bindless[i].img_infos);
158 util_dynarray_fini(&ctx->di.bindless[i].updates);
159 util_dynarray_fini(&ctx->di.bindless[i].resident);
162 hash_table_foreach(&ctx->framebuffer_cache, he)
165 hash_table_foreach(ctx->render_pass_cache, he)
168 zink_context_destroy_query_pools(ctx);
171 slab_destroy_child(&ctx->transfer_pool);
172 for (unsigned i = 0; i < ARRAY_SIZE(ctx->program_cache); i++)
173 _mesa_hash_table_clear(&ctx->program_cache[i], NULL);
174 _mesa_hash_table_clear(&ctx->compute_program_cache, NULL);
175 _mesa_hash_table_destroy(ctx->render_pass_cache, NULL);
176 slab_destroy_child(&ctx->transfer_pool_unsync);
178 if (ctx->dd)
179 screen->descriptors_deinit(ctx);
181 zink_descriptor_layouts_deinit(ctx);
183 if (!(ctx->flags & ZINK_CONTEXT_COPY_ONLY))
186 ralloc_free(ctx);
190 check_device_lost(struct zink_context *ctx)
192 if (!zink_screen(ctx->base.screen)->device_lost || ctx->is_device_lost)
195 if (ctx->reset.reset)
196 ctx->reset.reset(ctx->reset.data, PIPE_GUILTY_CONTEXT_RESET);
197 ctx->is_device_lost = true;
203 struct zink_context *ctx = zink_context(pctx);
207 if (ctx->is_device_lost) {
214 if (ctx->reset.reset)
215 ctx->reset.reset(ctx->reset.data, status);
225 struct zink_context *ctx = zink_context(pctx);
226 bool had_reset = !!ctx->reset.reset;
229 ctx->reset = *cb;
231 memset(&ctx->reset, 0, sizeof(ctx->reset));
233 bool have_reset = !!ctx->reset.reset;
246 struct zink_context *ctx = zink_context(pctx);
250 util_set_thread_affinity(zink_screen(ctx->base.screen)->flush_queue.threads[0],
483 get_layout_for_binding(const struct zink_context *ctx, struct zink_resource *res, enum zink_descriptor_type type, bool is_compute)
489 return zink_descriptor_util_image_layout_eval(ctx, res, is_compute);
499 get_imageview_for_binding(struct zink_context *ctx, enum pipe_shader_type stage, enum zink_descriptor_type type, unsigned idx)
503 struct zink_sampler_view *sampler_view = zink_sampler_view(ctx->sampler_views[stage][idx]);
507 return (ctx->di.emulate_nonseamless[stage] & ctx->di.cubes[stage] & BITFIELD_BIT(idx)) ?
512 struct zink_image_view *image_view = &ctx->image_views[stage][idx];
523 get_bufferview_for_binding(struct zink_context *ctx, enum pipe_shader_type stage, enum zink_descriptor_type type, unsigned idx)
527 struct zink_sampler_view *sampler_view = zink_sampler_view(ctx->sampler_views[stage][idx]);
531 struct zink_image_view *image_view = &ctx->image_views[stage][idx];
542 update_descriptor_state_ubo(struct zink_context *ctx, enum pipe_shader_type shader, unsigned slot, struct zink_resource *res)
544 struct zink_screen *screen = zink_screen(ctx->base.screen);
547 ctx->di.descriptor_res[type][shader][slot] = res;
548 ctx->di.ubos[shader][slot].offset = ctx->ubos[shader][slot].buffer_offset;
550 ctx->di.ubos[shader][slot].buffer = res->obj->buffer;
551 ctx->di.ubos[shader][slot].range = ctx->ubos[shader][slot].buffer_size;
552 assert(ctx->di.ubos[shader][slot].range <= screen->info.props.limits.maxUniformBufferRange);
554 VkBuffer null_buffer = zink_resource(ctx->dummy_vertex_buffer)->obj->buffer;
555 ctx->di.ubos[shader][slot].buffer = have_null_descriptors ? VK_NULL_HANDLE : null_buffer;
556 ctx->di.ubos[shader][slot].range = VK_WHOLE_SIZE;
560 ctx->di.push_valid |= BITFIELD64_BIT(shader);
562 ctx->di.push_valid &= ~BITFIELD64_BIT(shader);
568 update_descriptor_state_ssbo(struct zink_context *ctx, enum pipe_shader_type shader, unsigned slot, struct zink_resource *res)
570 struct zink_screen *screen = zink_screen(ctx->base.screen);
573 ctx->di.descriptor_res[type][shader][slot] = res;
574 ctx->di.ssbos[shader][slot].offset = ctx->ssbos[shader][slot].buffer_offset;
576 ctx->di.ssbos[shader][slot].buffer = res->obj->buffer;
577 ctx->di.ssbos[shader][slot].range = ctx->ssbos[shader][slot].buffer_size;
579 VkBuffer null_buffer = zink_resource(ctx->dummy_vertex_buffer)->obj->buffer;
580 ctx->di.ssbos[shader][slot].buffer = have_null_descriptors ? VK_NULL_HANDLE : null_buffer;
581 ctx->di.ssbos[shader][slot].range = VK_WHOLE_SIZE;
587 update_descriptor_state_sampler(struct zink_context *ctx, enum pipe_shader_type shader, unsigned slot, struct zink_resource *res)
589 struct zink_screen *screen = zink_screen(ctx->base.screen);
592 ctx->di.descriptor_res[type][shader][slot] = res;
595 struct zink_buffer_view *bv = get_bufferview_for_binding(ctx, shader, type, slot);
596 ctx->di.tbos[shader][slot] = bv->buffer_view;
597 ctx->di.sampler_surfaces[shader][slot].bufferview = bv;
598 ctx->di.sampler_surfaces[shader][slot].is_buffer = true;
600 struct zink_surface *surface = get_imageview_for_binding(ctx, shader, type, slot);
601 ctx->di.textures[shader][slot].imageLayout = get_layout_for_binding(ctx, res, type, shader == PIPE_SHADER_COMPUTE);
602 ctx->di.textures[shader][slot].imageView = surface->image_view;
604 ctx->sampler_states[shader][slot] && ctx->sampler_states[shader][slot]->sampler_clamped) {
605 struct zink_sampler_state *state = ctx->sampler_states[shader][slot];
610 if (ctx->di.textures[shader][slot].sampler != sampler) {
611 screen->context_invalidate_descriptor_state(ctx, shader, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, slot, 1);
612 ctx->di.textures[shader][slot].sampler = sampler;
615 ctx->di.sampler_surfaces[shader][slot].surface = surface;
616 ctx->di.sampler_surfaces[shader][slot].is_buffer = false;
620 ctx->di.textures[shader][slot].imageView = VK_NULL_HANDLE;
621 ctx->di.textures[shader][slot].imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
622 ctx->di.tbos[shader][slot] = VK_NULL_HANDLE;
624 struct zink_surface *null_surface = zink_csurface(ctx->dummy_surface[0]);
625 struct zink_buffer_view *null_bufferview = ctx->dummy_bufferview;
626 ctx->di.textures[shader][slot].imageView = null_surface->image_view;
627 ctx->di.textures[shader][slot].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
628 ctx->di.tbos[shader][slot] = null_bufferview->buffer_view;
630 memset(&ctx->di.sampler_surfaces[shader][slot], 0, sizeof(ctx->di.sampler_surfaces[shader][slot]));
636 update_descriptor_state_image(struct zink_context *ctx, enum pipe_shader_type shader, unsigned slot, struct zink_resource *res)
638 struct zink_screen *screen = zink_screen(ctx->base.screen);
641 ctx->di.descriptor_res[type][shader][slot] = res;
644 struct zink_buffer_view *bv = get_bufferview_for_binding(ctx, shader, type, slot);
645 ctx->di.texel_images[shader][slot] = bv->buffer_view;
646 ctx->di.image_surfaces[shader][slot].bufferview = bv;
647 ctx->di.image_surfaces[shader][slot].is_buffer = true;
649 struct zink_surface *surface = get_imageview_for_binding(ctx, shader, type, slot);
650 ctx->di.images[shader][slot].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
651 ctx->di.images[shader][slot].imageView = surface->image_view;
652 ctx->di.image_surfaces[shader][slot].surface = surface;
653 ctx->di.image_surfaces[shader][slot].is_buffer = false;
657 memset(&ctx->di.images[shader][slot], 0, sizeof(ctx->di.images[shader][slot]));
658 ctx->di.texel_images[shader][slot] = VK_NULL_HANDLE;
660 struct zink_surface *null_surface = zink_csurface(ctx->dummy_surface[0]);
661 struct zink_buffer_view *null_bufferview = ctx->dummy_bufferview;
662 ctx->di.images[shader][slot].imageView = null_surface->image_view;
663 ctx->di.images[shader][slot].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
664 ctx->di.texel_images[shader][slot] = null_bufferview->buffer_view;
666 memset(&ctx->di.image_surfaces[shader][slot], 0, sizeof(ctx->di.image_surfaces[shader][slot]));
672 update_nonseamless_shader_key(struct zink_context *ctx, enum pipe_shader_type pstage)
676 mask = &ctx->compute_pipeline_state.key.base.nonseamless_cube_mask;
678 mask = &ctx->gfx_pipeline_state.shader_keys.key[pstage].base.nonseamless_cube_mask;
680 const uint32_t new_mask = ctx->di.emulate_nonseamless[pstage] & ctx->di.cubes[pstage];
682 ctx->dirty_shader_stages |= BITFIELD_BIT(pstage);
693 struct zink_context *ctx = zink_context(pctx);
696 ctx->di.emulate_nonseamless[shader] &= ~mask;
699 if (ctx->sampler_states[shader][start_slot + i] != state)
700 zink_screen(pctx->screen)->context_invalidate_descriptor_state(ctx, shader, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, start_slot, 1);
702 if (ctx->sampler_states[shader][start_slot + i])
703 was_nonseamless = ctx->sampler_states[shader][start_slot + i]->emulate_nonseamless;
704 ctx->sampler_states[shader][start_slot + i] = state;
706 ctx->di.textures[shader][start_slot + i].sampler = state->sampler;
708 struct zink_surface *surface = get_imageview_for_binding(ctx, shader, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, start_slot + i);
712 ctx->di.textures[shader][start_slot + i].sampler = state->sampler_clamped;
714 zink_batch_usage_set(&state->batch_uses, ctx->batch.state);
719 ctx->di.emulate_nonseamless[shader] |= bit;
720 if (state->emulate_nonseamless != was_nonseamless && (ctx->di.cubes[shader] & bit)) {
721 struct zink_surface *surface = get_imageview_for_binding(ctx, shader, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, start_slot + i);
722 if (surface && ctx->di.image_surfaces[shader][start_slot + i].surface != surface) {
723 ctx->di.images[shader][start_slot + i].imageView = surface->image_view;
724 ctx->di.image_surfaces[shader][start_slot + i].surface = surface;
725 update_descriptor_state_sampler(ctx, shader, start_slot + i, zink_resource(surface->base.texture));
726 screen->context_invalidate_descriptor_state(ctx, shader, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, start_slot + i, 1);
730 ctx->di.textures[shader][start_slot + i].sampler = VK_NULL_HANDLE;
733 ctx->di.num_samplers[shader] = start_slot + num_samplers;
735 update_nonseamless_shader_key(ctx, shader);
779 create_bvci(struct zink_context *ctx, struct zink_resource *res, enum pipe_format format, uint32_t offset, uint32_t range)
781 struct zink_screen *screen = zink_screen(ctx->base.screen);
811 get_buffer_view(struct zink_context *ctx, struct zink_resource *res, VkBufferViewCreateInfo *bvci)
813 struct zink_screen *screen = zink_screen(ctx->base.screen);
890 struct zink_context *ctx = zink_context(pctx);
912 if (!zink_kopper_acquire(ctx, res, UINT64_MAX)) {
959 sampler_view->image_view = (struct zink_surface*)zink_get_surface(ctx, pres, &templ, &ivci);
962 sampler_view->cube_array = (struct zink_surface*)zink_get_surface(ctx, pres, &templ, &ivci);
966 VkBufferViewCreateInfo bvci = create_bvci(ctx, res, state->format, state->u.buf.offset, state->u.buf.size);
967 sampler_view->buffer_view = get_buffer_view(ctx, res, &bvci);
1013 zink_get_sample_position(struct pipe_context *ctx,
1019 assert(zink_screen(ctx->screen)->info.props.limits.standardSampleLocations);
1091 update_res_bind_count(struct zink_context *ctx, struct zink_resource *res, bool is_compute, bool decrement)
1096 _mesa_set_remove_key(ctx->need_barriers[is_compute], res);
1097 check_resource_for_batch_ref(ctx, res);
1103 update_existing_vbo(struct zink_context *ctx, unsigned slot)
1105 if (!ctx->vertex_buffers[slot].buffer.resource)
1107 struct zink_resource *res = zink_resource(ctx->vertex_buffers[slot].buffer.resource);
1114 update_res_bind_count(ctx, res, false, true);
1125 struct zink_context *ctx = zink_context(pctx);
1129 uint32_t enabled_buffers = ctx->gfx_pipeline_state.vertex_buffers_enabled_mask;
1137 struct pipe_vertex_buffer *ctx_vb = &ctx->vertex_buffers[start_slot + i];
1139 update_existing_vbo(ctx, start_slot + i);
1152 update_res_bind_count(ctx, res, false, false);
1155 zink_batch_resource_usage_set(&ctx->batch, res, false);
1157 zink_resource_buffer_barrier(ctx, res, VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
1166 update_existing_vbo(ctx, start_slot + i);
1167 pipe_resource_reference(&ctx->vertex_buffers[start_slot + i].buffer.resource, NULL);
1171 update_existing_vbo(ctx, start_slot + i);
1172 pipe_resource_reference(&ctx->vertex_buffers[start_slot + i].buffer.resource, NULL);
1175 ctx->vertex_state_changed = true;
1176 else if (!have_input_state && (stride_changed || ctx->gfx_pipeline_state.vertex_buffers_enabled_mask != enabled_buffers))
1177 ctx->vertex_state_changed = true;
1178 ctx->gfx_pipeline_state.vertex_buffers_enabled_mask = enabled_buffers;
1179 ctx->vertex_buffers_dirty = num_buffers > 0;
1182 assert(ctx->vertex_buffers[b].buffer.resource);
1192 struct zink_context *ctx = zink_context(pctx);
1195 ctx->vp_state.viewport_states[start_slot + i] = state[i];
1197 ctx->vp_state_changed = true;
1205 struct zink_context *ctx = zink_context(pctx);
1208 ctx->vp_state.scissor_states[start_slot + i] = states[i];
1209 ctx->scissor_changed = true;
1217 struct zink_context *ctx = (struct zink_context *)pctx;
1223 key = &ctx->compute_pipeline_state.key;
1225 key = &ctx->gfx_pipeline_state.shader_keys.key[shader];
1228 if (!(ctx->inlinable_uniforms_valid_mask & bit) ||
1231 ctx->dirty_shader_stages |= bit;
1232 ctx->inlinable_uniforms_valid_mask |= bit;
1252 unbind_ubo(struct zink_context *ctx, struct zink_resource *res, enum pipe_shader_type pstage, unsigned slot)
1261 update_res_bind_count(ctx, res, pstage == PIPE_SHADER_COMPUTE, true);
1265 invalidate_inlined_uniforms(struct zink_context *ctx, enum pipe_shader_type pstage)
1268 if (!(ctx->inlinable_uniforms_valid_mask & bit))
1270 ctx->inlinable_uniforms_valid_mask &= ~bit;
1271 ctx->dirty_shader_stages |= bit;
1275 struct zink_shader_key *key = &ctx->gfx_pipeline_state.shader_keys.key[pstage];
1285 struct zink_context *ctx = zink_context(pctx);
1288 struct zink_resource *res = zink_resource(ctx->ubos[shader][index].buffer);
1294 u_upload_data(ctx->base.const_uploader, 0, cb->buffer_size,
1301 unbind_ubo(ctx, res, shader, index);
1306 update_res_bind_count(ctx, new_res, shader == PIPE_SHADER_COMPUTE, false);
1308 zink_batch_resource_usage_set(&ctx->batch, new_res, false);
1309 zink_resource_buffer_barrier(ctx, new_res, VK_ACCESS_UNIFORM_READ_BIT,
1313 update |= ((index || zink_descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY) && ctx->ubos[shader][index].buffer_offset != offset) ||
1315 ctx->ubos[shader][index].buffer_size != cb->buffer_size;
1318 pipe_resource_reference(&ctx->ubos[shader][index].buffer, NULL);
1319 ctx->ubos[shader][index].buffer = buffer;
1321 pipe_resource_reference(&ctx->ubos[shader][index].buffer, buffer);
1323 ctx->ubos[shader][index].buffer_offset = offset;
1324 ctx->ubos[shader][index].buffer_size = cb->buffer_size;
1325 ctx->ubos[shader][index].user_buffer = NULL;
1330 if (index + 1 >= ctx->di.num_ubos[shader])
1331 ctx->di.num_ubos[shader] = index + 1;
1332 update_descriptor_state_ubo(ctx, shader, index, new_res);
1334 ctx->ubos[shader][index].buffer_offset = 0;
1335 ctx->ubos[shader][index].buffer_size = 0;
1336 ctx->ubos[shader][index].user_buffer = NULL;
1338 unbind_ubo(ctx, res, shader, index);
1339 update_descriptor_state_ubo(ctx, shader, index, NULL);
1341 update = !!ctx->ubos[shader][index].buffer;
1343 pipe_resource_reference(&ctx->ubos[shader][index].buffer, NULL);
1344 if (ctx->di.num_ubos[shader] == index + 1)
1345 ctx->di.num_ubos[shader]--;
1349 invalidate_inlined_uniforms(ctx, shader);
1353 zink_screen(pctx->screen)->context_invalidate_descriptor_state(ctx, shader, ZINK_DESCRIPTOR_TYPE_UBO, index, 1);
1371 unbind_ssbo(struct zink_context *ctx, struct zink_resource *res, enum pipe_shader_type pstage, unsigned slot, bool writable)
1379 update_res_bind_count(ctx, res, pstage == PIPE_SHADER_COMPUTE, true);
1393 struct zink_context *ctx = zink_context(pctx);
1398 unsigned old_writable_mask = ctx->writable_ssbos[p_stage];
1399 ctx->writable_ssbos[p_stage] &= ~modified_bits;
1400 ctx->writable_ssbos[p_stage] |= writable_bitmask << start_slot;
1403 struct pipe_shader_buffer *ssbo = &ctx->ssbos[p_stage][start_slot + i];
1409 unbind_ssbo(ctx, res, p_stage, i, was_writable);
1413 update_res_bind_count(ctx, new_res, p_stage == PIPE_SHADER_COMPUTE, false);
1416 if (ctx->writable_ssbos[p_stage] & BITFIELD64_BIT(start_slot + i)) {
1422 zink_batch_resource_usage_set(&ctx->batch, new_res, access & VK_ACCESS_SHADER_WRITE_BIT);
1427 zink_resource_buffer_barrier(ctx, new_res, access,
1431 update_descriptor_state_ssbo(ctx, p_stage, start_slot + i, new_res);
1441 unbind_ssbo(ctx, res, p_stage, i, was_writable);
1442 update_descriptor_state_ssbo(ctx, p_stage, start_slot + i, NULL);
1447 if (start_slot + count >= ctx->di.num_ssbos[p_stage])
1448 ctx->di.num_ssbos[p_stage] = max_slot + 1;
1450 zink_screen(pctx->screen)->context_invalidate_descriptor_state(ctx, p_stage, ZINK_DESCRIPTOR_TYPE_SSBO, start_slot, count);
1454 update_binds_for_samplerviews(struct zink_context *ctx, struct zink_resource *res, bool is_compute)
1456 VkImageLayout layout = get_layout_for_binding(ctx, res, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, is_compute);
1459 if (ctx->di.textures[PIPE_SHADER_COMPUTE][slot].imageLayout != layout) {
1460 update_descriptor_state_sampler(ctx, PIPE_SHADER_COMPUTE, slot, res);
1461 zink_screen(ctx->base.screen)->context_invalidate_descriptor_state(ctx, PIPE_SHADER_COMPUTE, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, slot, 1);
1467 if (ctx->di.textures[i][slot].imageLayout != layout) {
1468 update_descriptor_state_sampler(ctx, i, slot, res);
1469 zink_screen(ctx->base.screen)->context_invalidate_descriptor_state(ctx, i, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, slot, 1);
1477 flush_pending_clears(struct zink_context *ctx, struct zink_resource *res)
1479 if (res->fb_binds && ctx->clears_enabled)
1480 zink_fb_clears_apply(ctx, &res->base.b);
1484 unbind_shader_image_counts(struct zink_context *ctx, struct zink_resource *res, bool is_compute, bool writable)
1486 update_res_bind_count(ctx, res, is_compute, true);
1492 update_binds_for_samplerviews(ctx, res, is_compute);
1496 check_for_layout_update(struct zink_context *ctx, struct zink_resource *res, bool is_compute)
1498 VkImageLayout layout = res->bind_count[is_compute] ? zink_descriptor_util_image_layout_eval(ctx, res, is_compute) : VK_IMAGE_LAYOUT_UNDEFINED;
1499 VkImageLayout other_layout = res->bind_count[!is_compute] ? zink_descriptor_util_image_layout_eval(ctx, res, !is_compute) : VK_IMAGE_LAYOUT_UNDEFINED;
1501 _mesa_set_add(ctx->need_barriers[is_compute], res);
1503 _mesa_set_add(ctx->need_barriers[!is_compute], res);
1507 unbind_shader_image(struct zink_context *ctx, enum pipe_shader_type stage, unsigned slot)
1509 struct zink_image_view *image_view = &ctx->image_views[stage][slot];
1516 unbind_shader_image_counts(ctx, res, is_compute, image_view->base.access & PIPE_IMAGE_ACCESS_WRITE);
1524 zink_batch_reference_bufferview(&ctx->batch, image_view->buffer_view);
1525 zink_buffer_view_reference(zink_screen(ctx->base.screen), &image_view->buffer_view, NULL);
1529 check_for_layout_update(ctx, res, is_compute);
1531 zink_batch_reference_surface(&ctx->batch, image_view->surface);
1532 zink_surface_reference(zink_screen(ctx->base.screen), &image_view->surface, NULL);
1539 create_image_bufferview(struct zink_context *ctx, const struct pipe_image_view *view)
1542 VkBufferViewCreateInfo bvci = create_bvci(ctx, res, view->format, view->u.buf.offset, view->u.buf.size);
1543 struct zink_buffer_view *buffer_view = get_buffer_view(ctx, res, &bvci);
1552 finalize_image_bind(struct zink_context *ctx, struct zink_resource *res, bool is_compute)
1559 update_binds_for_samplerviews(ctx, res, is_compute);
1560 check_for_layout_update(ctx, res, is_compute);
1564 create_image_surface(struct zink_context *ctx, const struct pipe_image_view *view, bool is_compute)
1566 struct zink_screen *screen = zink_screen(ctx->base.screen);
1598 struct pipe_surface *psurf = zink_get_surface(ctx, view->resource, &tmpl, &ivci);
1603 flush_pending_clears(ctx, res);
1614 struct zink_context *ctx = zink_context(pctx);
1617 struct zink_image_view *image_view = &ctx->image_views[p_stage][start_slot + i];
1620 if (!zink_resource_object_init_storage(ctx, res)) {
1636 struct zink_buffer_view *bv = create_image_bufferview(ctx, &images[i]);
1639 update_res_bind_count(ctx, res, p_stage == PIPE_SHADER_COMPUTE, false);
1641 unbind_shader_image(ctx, p_stage, start_slot + i);
1644 zink_batch_usage_set(&image_view->buffer_view->batch_uses, ctx->batch.state);
1645 zink_resource_buffer_barrier(ctx, res, access,
1648 struct zink_surface *surface = create_image_surface(ctx, &images[i], p_stage == PIPE_SHADER_COMPUTE);
1652 update_res_bind_count(ctx, res, p_stage == PIPE_SHADER_COMPUTE, false);
1653 unbind_shader_image(ctx, p_stage, start_slot + i);
1656 finalize_image_bind(ctx, res, p_stage == PIPE_SHADER_COMPUTE);
1657 zink_batch_usage_set(&image_view->surface->batch_uses, ctx->batch.state);
1660 zink_batch_resource_usage_set(&ctx->batch, res,
1663 update_descriptor_state_image(ctx, p_stage, start_slot + i, res);
1672 unbind_shader_image(ctx, p_stage, start_slot + i);
1673 update_descriptor_state_image(ctx, p_stage, start_slot + i, NULL);
1677 update |= !!ctx->image_views[p_stage][start_slot + count + i].base.resource;
1678 unbind_shader_image(ctx, p_stage, start_slot + count + i);
1679 update_descriptor_state_image(ctx, p_stage, start_slot + count + i, NULL);
1681 ctx->di.num_images[p_stage] = start_slot + count;
1683 zink_screen(pctx->screen)->context_invalidate_descriptor_state(ctx, p_stage, ZINK_DESCRIPTOR_TYPE_IMAGE, start_slot, count);
1687 check_samplerview_for_batch_ref(struct zink_context *ctx, struct zink_sampler_view *sv)
1692 zink_batch_reference_sampler_view(&ctx->batch, sv);
1696 unbind_samplerview(struct zink_context *ctx, enum pipe_shader_type stage, unsigned slot)
1698 struct zink_sampler_view *sv = zink_sampler_view(ctx->sampler_views[stage][slot]);
1703 check_samplerview_for_batch_ref(ctx, sv);
1704 update_res_bind_count(ctx, res, stage == PIPE_SHADER_COMPUTE, true);
1724 struct zink_context *ctx = zink_context(pctx);
1728 ctx->di.cubes[shader_type] &= ~mask;
1733 struct zink_sampler_view *a = zink_sampler_view(ctx->sampler_views[shader_type][start_slot + i]);
1739 unbind_samplerview(ctx, shader_type, start_slot + i);
1740 update_res_bind_count(ctx, res, shader_type == PIPE_SHADER_COMPUTE, false);
1745 check_samplerview_for_batch_ref(ctx, a);
1755 struct zink_buffer_view *buffer_view = get_buffer_view(ctx, res, &bvci);
1758 zink_batch_reference_bufferview(&ctx->batch, b->buffer_view);
1759 zink_buffer_view_reference(zink_screen(ctx->base.screen), &b->buffer_view, NULL);
1763 zink_batch_usage_set(&b->buffer_view->batch_uses, ctx->batch.state);
1764 zink_resource_buffer_barrier(ctx, res, VK_ACCESS_SHADER_READ_BIT,
1772 zink_rebind_surface(ctx, &psurf);
1778 flush_pending_clears(ctx, res);
1780 ctx->di.cubes[shader_type] |= BITFIELD_BIT(start_slot + i);
1781 zink_batch_usage_set(&b->cube_array->batch_uses, ctx->batch.state);
1783 check_for_layout_update(ctx, res, shader_type == PIPE_SHADER_COMPUTE);
1784 zink_batch_usage_set(&b->image_view->batch_uses, ctx->batch.state);
1789 zink_batch_resource_usage_set(&ctx->batch, res, false);
1792 unbind_samplerview(ctx, shader_type, start_slot + i);
1796 pipe_sampler_view_reference(&ctx->sampler_views[shader_type][start_slot + i], NULL);
1797 ctx->sampler_views[shader_type][start_slot + i] = pview;
1799 pipe_sampler_view_reference(&ctx->sampler_views[shader_type][start_slot + i], pview);
1801 update_descriptor_state_sampler(ctx, shader_type, start_slot + i, res);
1804 update |= !!ctx->sampler_views[shader_type][start_slot + i];
1805 unbind_samplerview(ctx, shader_type, start_slot + i);
1807 &ctx->sampler_views[shader_type][start_slot + i],
1809 update_descriptor_state_sampler(ctx, shader_type, start_slot + i, NULL);
1811 ctx->di.num_sampler_views[shader_type] = start_slot + num_views;
1814 screen->context_invalidate_descriptor_state(ctx, shader_type, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, start_slot, num_views);
1816 update_nonseamless_shader_key(ctx, shader_type);
1823 struct zink_context *ctx = zink_context(pctx);
1842 uint64_t handle = util_idalloc_alloc(&ctx->di.bindless[bd->ds.is_buffer].tex_slots);
1846 _mesa_hash_table_insert(&ctx->di.bindless[bd->ds.is_buffer].tex_handles, (void*)(uintptr_t)handle, bd);
1853 struct zink_context *ctx = zink_context(pctx);
1855 struct hash_entry *he = _mesa_hash_table_search(&ctx->di.bindless[is_buffer].tex_handles, (void*)(uintptr_t)handle);
1859 _mesa_hash_table_remove(&ctx->di.bindless[is_buffer].tex_handles, he);
1861 util_dynarray_append(&ctx->batch.state->bindless_releases[0], uint32_t, h);
1866 zink_batch_reference_bufferview(&ctx->batch, ds->bufferview);
1870 zink_batch_reference_surface(&ctx->batch, ds->surface);
1878 rebind_bindless_bufferview(struct zink_context *ctx, struct zink_resource *res, struct zink_descriptor_surface *ds)
1886 struct zink_buffer_view *buffer_view = get_buffer_view(ctx, res, &bvci);
1889 zink_batch_reference_bufferview(&ctx->batch, ds->bufferview);
1890 zink_buffer_view_reference(zink_screen(ctx->base.screen), &ds->bufferview, NULL);
1895 zero_bindless_descriptor(struct zink_context *ctx, uint32_t handle, bool is_buffer, bool is_image)
1897 if (likely(zink_screen(ctx->base.screen)->info.rb2_feats.nullDescriptor)) {
1899 VkBufferView *bv = &ctx->di.bindless[is_image].buffer_infos[handle];
1902 VkDescriptorImageInfo *ii = &ctx->di.bindless[is_image].img_infos[handle];
1907 VkBufferView *bv = &ctx->di.bindless[is_image].buffer_infos[handle];
1908 struct zink_buffer_view *null_bufferview = ctx->dummy_bufferview;
1911 struct zink_surface *null_surface = zink_csurface(ctx->dummy_surface[is_image]);
1912 VkDescriptorImageInfo *ii = &ctx->di.bindless[is_image].img_infos[handle];
1923 struct zink_context *ctx = zink_context(pctx);
1925 struct hash_entry *he = _mesa_hash_table_search(&ctx->di.bindless[is_buffer].tex_handles, (void*)(uintptr_t)handle);
1933 update_res_bind_count(ctx, res, false, false);
1934 update_res_bind_count(ctx, res, true, false);
1938 rebind_bindless_bufferview(ctx, res, ds);
1939 VkBufferView *bv = &ctx->di.bindless[0].buffer_infos[handle];
1941 zink_resource_buffer_barrier(ctx, res, VK_ACCESS_SHADER_READ_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT);
1943 VkDescriptorImageInfo *ii = &ctx->di.bindless[0].img_infos[handle];
1946 ii->imageLayout = zink_descriptor_util_image_layout_eval(ctx, res, false);
1947 flush_pending_clears(ctx, res);
1948 check_for_layout_update(ctx, res, false);
1949 check_for_layout_update(ctx, res, true);
1951 zink_batch_resource_usage_set(&ctx->batch, res, false);
1952 util_dynarray_append(&ctx->di.bindless[0].resident, struct zink_bindless_descriptor *, bd);
1954 util_dynarray_append(&ctx->di.bindless[0].updates, uint32_t, h);
1957 zero_bindless_descriptor(ctx, handle, is_buffer, false);
1958 util_dynarray_delete_unordered(&ctx->di.bindless[0].resident, struct zink_bindless_descriptor *, bd);
1959 update_res_bind_count(ctx, res, false, true);
1960 update_res_bind_count(ctx, res, true, true);
1964 check_for_layout_update(ctx, res, i);
1967 ctx->di.bindless_dirty[0] = true;
1973 struct zink_context *ctx = zink_context(pctx);
1976 if (!zink_resource_object_init_storage(ctx, res)) {
1987 bd->ds.bufferview = create_image_bufferview(ctx, view);
1989 bd->ds.surface = create_image_surface(ctx, view, false);
1990 uint64_t handle = util_idalloc_alloc(&ctx->di.bindless[bd->ds.is_buffer].img_slots);
1994 _mesa_hash_table_insert(&ctx->di.bindless[bd->ds.is_buffer].img_handles, (void*)(uintptr_t)handle, bd);
2001 struct zink_context *ctx = zink_context(pctx);
2003 struct hash_entry *he = _mesa_hash_table_search(&ctx->di.bindless[is_buffer].img_handles, (void*)(uintptr_t)handle);
2006 _mesa_hash_table_remove(&ctx->di.bindless[is_buffer].img_handles, he);
2008 util_dynarray_append(&ctx->batch.state->bindless_releases[1], uint32_t, h);
2013 zink_batch_reference_bufferview(&ctx->batch, ds->bufferview);
2017 zink_batch_reference_surface(&ctx->batch, ds->surface);
2026 struct zink_context *ctx = zink_context(pctx);
2028 struct hash_entry *he = _mesa_hash_table_search(&ctx->di.bindless[is_buffer].img_handles, (void*)(uintptr_t)handle);
2051 update_res_bind_count(ctx, res, false, false);
2052 update_res_bind_count(ctx, res, true, false);
2058 rebind_bindless_bufferview(ctx, res, ds);
2059 VkBufferView *bv = &ctx->di.bindless[1].buffer_infos[handle];
2061 zink_resource_buffer_barrier(ctx, res, access, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT);
2063 VkDescriptorImageInfo *ii = &ctx->di.bindless[1].img_infos[handle];
2067 finalize_image_bind(ctx, res, false);
2068 finalize_image_bind(ctx, res, true);
2070 zink_batch_resource_usage_set(&ctx->batch, res, zink_resource_access_is_write(access));
2071 util_dynarray_append(&ctx->di.bindless[1].resident, struct zink_bindless_descriptor *, bd);
2073 util_dynarray_append(&ctx->di.bindless[1].updates, uint32_t, h);
2079 zero_bindless_descriptor(ctx, handle, is_buffer, true);
2080 util_dynarray_delete_unordered(&ctx->di.bindless[1].resident, struct zink_bindless_descriptor *, bd);
2081 unbind_shader_image_counts(ctx, res, false, false);
2082 unbind_shader_image_counts(ctx, res, true, false);
2086 check_for_layout_update(ctx, res, i);
2089 ctx->di.bindless_dirty[1] = true;
2096 struct zink_context *ctx = zink_context(pctx);
2097 ctx->stencil_ref = ref;
2098 ctx->stencil_ref_changed = true;
2112 struct zink_context *ctx = zink_context(pctx);
2113 memcpy(&ctx->default_inner_level, default_inner_level, sizeof(ctx->default_inner_level));
2114 memcpy(&ctx->default_outer_level, default_outer_level, sizeof(ctx->default_outer_level));
2120 struct zink_context *ctx = zink_context(pctx);
2121 if (zink_set_tcs_key_patches(ctx, patch_vertices)) {
2122 ctx->gfx_pipeline_state.dyn_state2.vertices_per_patch = patch_vertices;
2123 if (zink_screen(ctx->base.screen)->info.dynamic_state2_feats.extendedDynamicState2PatchControlPoints)
2124 VKCTX(CmdSetPatchControlPointsEXT)(ctx->batch.state->cmdbuf, patch_vertices);
2126 ctx->gfx_pipeline_state.dirty = true;
2131 zink_update_fbfetch(struct zink_context *ctx)
2133 const bool had_fbfetch = ctx->di.fbfetch.imageLayout == VK_IMAGE_LAYOUT_GENERAL;
2134 if (!ctx->gfx_stages[PIPE_SHADER_FRAGMENT] ||
2135 !ctx->gfx_stages[PIPE_SHADER_FRAGMENT]->nir->info.fs.uses_fbfetch_output) {
2138 ctx->rp_changed = true;
2139 zink_batch_no_rp(ctx);
2140 ctx->di.fbfetch.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2141 ctx->di.fbfetch.imageView = zink_screen(ctx->base.screen)->info.rb2_feats.nullDescriptor ?
2143 zink_csurface(ctx->dummy_surface[0])->image_view;
2144 zink_screen(ctx->base.screen)->context_invalidate_descriptor_state(ctx, PIPE_SHADER_FRAGMENT, ZINK_DESCRIPTOR_TYPE_UBO, 0, 1);
2149 if (ctx->fb_state.cbufs[0]) {
2150 VkImageView fbfetch = zink_csurface(ctx->fb_state.cbufs[0])->image_view;
2154 changed |= fbfetch != ctx->di.fbfetch.imageView;
2155 ctx->di.fbfetch.imageView = zink_csurface(ctx->fb_state.cbufs[0])->image_view;
2157 bool fbfetch_ms = ctx->fb_state.cbufs[0]->texture->nr_samples > 1;
2158 if (zink_get_fs_key(ctx)->fbfetch_ms != fbfetch_ms)
2159 zink_set_fs_key(ctx)->fbfetch_ms = fbfetch_ms;
2161 ctx->di.fbfetch.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
2163 zink_screen(ctx->base.screen)->context_invalidate_descriptor_state(ctx, PIPE_SHADER_FRAGMENT, ZINK_DESCRIPTOR_TYPE_UBO, 0, 1);
2165 ctx->rp_changed = true;
2166 zink_batch_no_rp(ctx);
2172 zink_update_vk_sample_locations(struct zink_context *ctx)
2174 if (ctx->gfx_pipeline_state.sample_locations_enabled && ctx->sample_locations_changed) {
2175 unsigned samples = ctx->gfx_pipeline_state.rast_samples + 1;
2177 VkExtent2D grid_size = zink_screen(ctx->base.screen)->maxSampleLocationGridSize[idx];
2186 ctx->vk_sample_locations[wi].x = (ctx->sample_locations[ri] & 0xf) / 16.0f;
2187 ctx->vk_sample_locations[wi].y = (16 - (ctx->sample_locations[ri] >> 4)) / 16.0f;
2194 find_rp_state(struct zink_context *ctx)
2197 struct set_entry *he = _mesa_set_search_or_add(&ctx->rendering_state_cache, &ctx->gfx_pipeline_state.rendering_info, &found);
2203 info = ralloc(ctx, struct zink_rendering_info);
2204 memcpy(info, &ctx->gfx_pipeline_state.rendering_info, sizeof(VkPipelineRenderingCreateInfo));
2205 info->id = ctx->rendering_state_cache.entries;
2211 begin_rendering(struct zink_context *ctx)
2214 ctx->gfx_pipeline_state.render_pass = NULL;
2215 zink_update_vk_sample_locations(ctx);
2216 zink_render_update_swapchain(ctx);
2221 if (ctx->rp_changed || ctx->rp_layout_changed || ctx->rp_loadop_changed) {
2223 for (int i = 0; i < ctx->fb_state.nr_cbufs; i++) {
2224 struct zink_surface *surf = zink_csurface(ctx->fb_state.cbufs[i]);
2226 if (!surf || !zink_resource(surf->base.texture)->valid || (surf->is_swapchain && ctx->new_swapchain))
2227 ctx->dynamic_fb.attachments[i].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
2229 ctx->dynamic_fb.attachments[i].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
2230 ctx->gfx_pipeline_state.rendering_formats[i] = surf ? surf->info.format[0] : VK_FORMAT_R8G8B8A8_UNORM;
2234 unsigned prev_width = ctx->dynamic_fb.info.renderArea.extent.width;
2235 unsigned prev_height = ctx->dynamic_fb.info.renderArea.extent.height;
2236 ctx->dynamic_fb.info.renderArea.extent.width = MIN2(ctx->dynamic_fb.info.renderArea.extent.width, width);
2237 ctx->dynamic_fb.info.renderArea.extent.height = MIN2(ctx->dynamic_fb.info.renderArea.extent.height, height);
2238 changed_size |= ctx->dynamic_fb.info.renderArea.extent.width != prev_width;
2239 changed_size |= ctx->dynamic_fb.info.renderArea.extent.height != prev_height;
2243 VkImageLayout zlayout = ctx->dynamic_fb.info.pDepthAttachment ? ctx->dynamic_fb.info.pDepthAttachment->imageLayout : VK_IMAGE_LAYOUT_UNDEFINED;
2244 VkImageLayout slayout = ctx->dynamic_fb.info.pStencilAttachment ? ctx->dynamic_fb.info.pStencilAttachment->imageLayout : VK_IMAGE_LAYOUT_UNDEFINED;
2245 ctx->dynamic_fb.info.pDepthAttachment = NULL;
2246 ctx->gfx_pipeline_state.rendering_info.depthAttachmentFormat = VK_FORMAT_UNDEFINED;
2247 ctx->dynamic_fb.info.pStencilAttachment = NULL;
2248 ctx->gfx_pipeline_state.rendering_info.stencilAttachmentFormat = VK_FORMAT_UNDEFINED;
2250 if (ctx->fb_state.zsbuf) {
2251 struct zink_surface *surf = zink_csurface(ctx->fb_state.zsbuf);
2252 has_depth = util_format_has_depth(util_format_description(ctx->fb_state.zsbuf->format));
2253 has_stencil = util_format_has_stencil(util_format_description(ctx->fb_state.zsbuf->format));
2257 ctx->dynamic_fb.attachments[PIPE_MAX_COLOR_BUFS].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
2259 ctx->dynamic_fb.attachments[PIPE_MAX_COLOR_BUFS].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
2262 ctx->dynamic_fb.attachments[PIPE_MAX_COLOR_BUFS+1].loadOp = ctx->dynamic_fb.attachments[PIPE_MAX_COLOR_BUFS].loadOp;
2265 ctx->dynamic_fb.info.pDepthAttachment = &ctx->dynamic_fb.attachments[PIPE_MAX_COLOR_BUFS];
2266 ctx->gfx_pipeline_state.rendering_info.depthAttachmentFormat = surf->info.format[0];
2271 ctx->dynamic_fb.info.pStencilAttachment = &ctx->dynamic_fb.attachments[PIPE_MAX_COLOR_BUFS + 1];
2272 ctx->gfx_pipeline_state.rendering_info.stencilAttachmentFormat = surf->info.format[0];
2275 ctx->dynamic_fb.info.pDepthAttachment = NULL;
2276 ctx->gfx_pipeline_state.rendering_info.depthAttachmentFormat = VK_FORMAT_UNDEFINED;
2278 if (zlayout != (ctx->dynamic_fb.info.pDepthAttachment ? ctx->dynamic_fb.info.pDepthAttachment->imageLayout : VK_IMAGE_LAYOUT_UNDEFINED))
2280 if (slayout != (ctx->dynamic_fb.info.pStencilAttachment ? ctx->dynamic_fb.info.pStencilAttachment->imageLayout : VK_IMAGE_LAYOUT_UNDEFINED))
2284 for (int i = 0; i < ctx->fb_state.nr_cbufs; i++) {
2286 if (!ctx->fb_state.cbufs[i] || !zink_fb_clear_enabled(ctx, i))
2289 struct zink_framebuffer_clear_data *clear = zink_fb_clear_element(&ctx->fb_clears[i], 0);
2290 if (zink_fb_clear_needs_explicit(&ctx->fb_clears[i])) {
2292 if (zink_fb_clear_count(&ctx->fb_clears[i]) < 2 ||
2297 memcpy(&ctx->dynamic_fb.attachments[i].clearValue, &clear->color, sizeof(float) * 4);
2298 ctx->dynamic_fb.attachments[i].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
2300 if (ctx->fb_state.zsbuf && zink_fb_clear_enabled(ctx, PIPE_MAX_COLOR_BUFS)) {
2301 struct zink_framebuffer_clear *fb_clear = &ctx->fb_clears[PIPE_MAX_COLOR_BUFS];
2305 ctx->dynamic_fb.attachments[PIPE_MAX_COLOR_BUFS].clearValue.depthStencil.depth = clear->zs.depth;
2306 ctx->dynamic_fb.attachments[PIPE_MAX_COLOR_BUFS].clearValue.depthStencil.stencil = clear->zs.stencil;
2308 ctx->dynamic_fb.attachments[PIPE_MAX_COLOR_BUFS+1].clearValue.depthStencil.stencil = clear->zs.stencil;
2311 ctx->dynamic_fb.attachments[PIPE_MAX_COLOR_BUFS].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
2314 ctx->dynamic_fb.attachments[PIPE_MAX_COLOR_BUFS+1].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
2325 ctx->rp_changed = true;
2326 ctx->rp_loadop_changed = false;
2327 ctx->rp_layout_changed = false;
2330 assert(!ctx->dynamic_fb.info.pDepthAttachment || ctx->gfx_pipeline_state.rendering_info.depthAttachmentFormat);
2331 assert(!ctx->dynamic_fb.info.pStencilAttachment || ctx->gfx_pipeline_state.rendering_info.stencilAttachmentFormat);
2333 if (!ctx->rp_changed && ctx->batch.in_rp)
2335 ctx->rp_changed = false;
2337 unsigned rp_state = find_rp_state(ctx);
2338 bool rp_changed = ctx->gfx_pipeline_state.rp_state != rp_state;
2339 if (!rp_changed && ctx->batch.in_rp)
2341 zink_batch_no_rp(ctx);
2342 for (int i = 0; i < ctx->fb_state.nr_cbufs; i++) {
2343 struct zink_surface *surf = zink_csurface(ctx->fb_state.cbufs[i]);
2344 VkImageView iv = zink_prep_fb_attachment(ctx, surf, i);
2348 ctx->dynamic_fb.attachments[i].imageView = iv;
2350 if (ctx->fb_state.zsbuf) {
2351 struct zink_surface *surf = zink_csurface(ctx->fb_state.zsbuf);
2352 VkImageView iv = zink_prep_fb_attachment(ctx, surf, ctx->fb_state.nr_cbufs);
2353 ctx->dynamic_fb.attachments[PIPE_MAX_COLOR_BUFS].imageView = iv;
2354 ctx->dynamic_fb.attachments[PIPE_MAX_COLOR_BUFS].imageLayout = zink_resource(surf->base.texture)->layout;
2355 ctx->dynamic_fb.attachments[PIPE_MAX_COLOR_BUFS+1].imageView = iv;
2356 ctx->dynamic_fb.attachments[PIPE_MAX_COLOR_BUFS+1].imageLayout = zink_resource(surf->base.texture)->layout;
2358 ctx->gfx_pipeline_state.dirty |= rp_changed;
2359 ctx->gfx_pipeline_state.rp_state = rp_state;
2361 VKCTX(CmdBeginRendering)(ctx->batch.state->cmdbuf, &ctx->dynamic_fb.info);
2362 ctx->batch.in_rp = true;
2363 ctx->new_swapchain = false;
2368 zink_batch_rp(struct zink_context *ctx)
2370 assert(!(ctx->batch.in_rp && ctx->rp_changed));
2371 if (ctx->batch.in_rp && !ctx->rp_layout_changed)
2373 bool in_rp = ctx->batch.in_rp;
2374 if (!in_rp && ctx->void_clears) {
2378 ctx->base.clear(&ctx->base, ctx->void_clears, NULL, &color, 0, 0);
2379 ctx->void_clears = 0;
2386 if (!zink_screen(ctx->base.screen)->info.have_KHR_dynamic_rendering || ctx->transient_attachments || ctx->fbfetch_outputs)
2387 clear_buffers = zink_begin_render_pass(ctx);
2389 clear_buffers = begin_rendering(ctx);
2390 assert(!ctx->rp_changed);
2391 if (in_rp || !ctx->batch.in_rp)
2393 if (ctx->render_condition.query)
2394 zink_start_conditional_render(ctx);
2395 zink_clear_framebuffer(ctx, clear_buffers);
2399 zink_batch_no_rp(struct zink_context *ctx)
2401 if (!ctx->batch.in_rp)
2403 if (ctx->render_condition.query)
2404 zink_stop_conditional_render(ctx);
2405 if (ctx->gfx_pipeline_state.render_pass)
2406 zink_end_render_pass(ctx);
2408 VKCTX(CmdEndRendering)(ctx->batch.state->cmdbuf);
2409 ctx->batch.in_rp = false;
2411 assert(!ctx->batch.in_rp);
2415 update_res_sampler_layouts(struct zink_context *ctx, struct zink_resource *res)
2421 if (ctx->di.descriptor_res[ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW][i][slot] == res)
2422 ctx->di.textures[i][slot].imageLayout = zink_descriptor_util_image_layout_eval(ctx, res, false);
2430 zink_prep_fb_attachment(struct zink_context *ctx, struct zink_surface *surf, unsigned i)
2433 if (!surf || (i < ctx->fb_state.nr_cbufs && zink_use_dummy_attachments(ctx))) {
2434 surf = zink_csurface(ctx->dummy_surface[util_logbase2_ceil(ctx->fb_state.samples)]);
2438 zink_batch_resource_usage_set(&ctx->batch, res, true);
2439 zink_batch_usage_set(&surf->batch_uses, ctx->batch.state);
2445 if (!zink_kopper_acquire(ctx, res, UINT64_MAX))
2447 zink_surface_swapchain_update(ctx, surf);
2449 zink_update_fbfetch(ctx);
2452 if (ctx->gfx_pipeline_state.render_pass) {
2453 layout = zink_render_pass_attachment_get_barrier_info(&ctx->gfx_pipeline_state.render_pass->state.rts[i],
2454 i < ctx->fb_state.nr_cbufs, &pipeline, &access);
2457 if (i < ctx->fb_state.nr_cbufs)
2458 zink_init_color_attachment(ctx, i, &rt);
2460 zink_init_zs_attachment(ctx, &rt);
2461 layout = zink_render_pass_attachment_get_barrier_info(&rt, i < ctx->fb_state.nr_cbufs, &pipeline, &access);
2463 zink_resource_image_barrier(ctx, res, layout, access, pipeline);
2465 if (i == ctx->fb_state.nr_cbufs && res->sampler_bind_count[0])
2466 update_res_sampler_layouts(ctx, res);
2514 zink_init_vk_sample_locations(struct zink_context *ctx, VkSampleLocationsInfoEXT *loc)
2516 struct zink_screen *screen = zink_screen(ctx->base.screen);
2517 unsigned idx = util_logbase2_ceil(MAX2(ctx->gfx_pipeline_state.rast_samples + 1, 1));
2521 loc->sampleLocationsCount = ctx->gfx_pipeline_state.rast_samples + 1;
2523 loc->pSampleLocations = ctx->vk_sample_locations;
2529 struct zink_context *ctx = zink_context(pctx);
2531 if (!ctx->fb_state.zsbuf)
2534 struct zink_resource *res = zink_resource(ctx->fb_state.zsbuf->texture);
2536 zink_init_vk_sample_locations(ctx, &res->obj->zs_evaluate);
2537 zink_batch_no_rp(ctx);
2541 sync_flush(struct zink_context *ctx, struct zink_batch_state *bs)
2543 if (zink_screen(ctx->base.screen)->threaded)
2548 get_access_flags_for_binding(struct zink_context *ctx, enum zink_descriptor_type type, enum pipe_shader_type stage, unsigned idx)
2558 if (ctx->writable_ssbos[stage] & (1 << idx))
2563 struct zink_image_view *image_view = &ctx->image_views[stage][idx];
2578 update_resource_refs_for_stage(struct zink_context *ctx, enum pipe_shader_type stage)
2580 struct zink_batch *batch = &ctx->batch;
2582 [ZINK_DESCRIPTOR_TYPE_UBO] = ctx->di.num_ubos[stage],
2583 [ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW] = ctx->di.num_samplers[stage],
2584 [ZINK_DESCRIPTOR_TYPE_SSBO] = ctx->di.num_ssbos[stage],
2585 [ZINK_DESCRIPTOR_TYPE_IMAGE] = ctx->di.num_images[stage]
2589 if (ctx->di.descriptor_res[i][stage][j]) {
2590 struct zink_resource *res = ctx->di.descriptor_res[i][stage][j];
2593 bool is_write = zink_resource_access_is_write(get_access_flags_for_binding(ctx, i, stage, j));
2600 struct zink_sampler_view *sv = zink_sampler_view(ctx->sampler_views[stage][j]);
2601 struct zink_sampler_state *sampler_state = ctx->sampler_states[stage][j];
2602 struct zink_image_view *iv = &ctx->image_views[stage][j];
2603 if (sampler_state && i == ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW && j <= ctx->di.num_samplers[stage])
2604 zink_batch_usage_set(&sampler_state->batch_uses, ctx->batch.state);
2605 if (sv && i == ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW && j <= ctx->di.num_sampler_views[stage]) {
2607 zink_batch_usage_set(&sv->buffer_view->batch_uses, ctx->batch.state);
2609 zink_batch_usage_set(&sv->image_view->batch_uses, ctx->batch.state);
2611 zink_batch_usage_set(&sv->cube_array->batch_uses, ctx->batch.state);
2614 } else if (i == ZINK_DESCRIPTOR_TYPE_IMAGE && j <= ctx->di.num_images[stage]) {
2616 zink_batch_usage_set(&iv->buffer_view->batch_uses, ctx->batch.state);
2618 zink_batch_usage_set(&iv->surface->batch_uses, ctx->batch.state);
2627 zink_update_descriptor_refs(struct zink_context *ctx, bool compute)
2629 struct zink_batch *batch = &ctx->batch;
2631 update_resource_refs_for_stage(ctx, PIPE_SHADER_COMPUTE);
2632 if (ctx->curr_compute)
2633 zink_batch_reference_program(batch, &ctx->curr_compute->base);
2636 update_resource_refs_for_stage(ctx, i);
2637 unsigned vertex_buffers_enabled_mask = ctx->gfx_pipeline_state.vertex_buffers_enabled_mask;
2640 struct zink_resource *res = zink_resource(ctx->vertex_buffers[i].buffer.resource);
2646 if (ctx->curr_program)
2647 zink_batch_reference_program(batch, &ctx->curr_program->base);
2649 if (ctx->di.bindless_refs_dirty) {
2650 ctx->di.bindless_refs_dirty = false;
2652 util_dynarray_foreach(&ctx->di.bindless[i].resident, struct zink_bindless_descriptor*, bd) {
2654 zink_batch_resource_usage_set(&ctx->batch, res, (*bd)->access & PIPE_IMAGE_ACCESS_WRITE);
2665 reapply_color_write(struct zink_context *ctx)
2667 struct zink_screen *screen = zink_screen(ctx->base.screen);
2672 VKCTX(CmdSetColorWriteEnableEXT)(ctx->batch.state->cmdbuf, max_att, ctx->disable_color_writes ? disables : enables);
2674 if (screen->info.have_EXT_extended_dynamic_state && ctx->dsa_state)
2675 VKCTX(CmdSetDepthWriteEnableEXT)(ctx->batch.state->cmdbuf, ctx->disable_color_writes ? VK_FALSE : ctx->dsa_state->hw_state.depth_write);
2679 stall(struct zink_context *ctx)
2681 struct zink_screen *screen = zink_screen(ctx->base.screen);
2682 sync_flush(ctx, zink_batch_state(ctx->last_fence));
2683 zink_screen_timeline_wait(screen, ctx->last_fence->batch_id, PIPE_TIMEOUT_INFINITE);
2684 zink_batch_reset_all(ctx);
2688 flush_batch(struct zink_context *ctx, bool sync)
2690 struct zink_batch *batch = &ctx->batch;
2691 if (ctx->clears_enabled)
2693 zink_batch_rp(ctx);
2694 bool conditional_render_active = ctx->render_condition.active;
2695 zink_stop_conditional_render(ctx);
2696 zink_batch_no_rp(ctx);
2697 zink_end_batch(ctx, batch);
2698 ctx->deferred_fence = NULL;
2701 sync_flush(ctx, ctx->batch.state);
2703 if (ctx->batch.state->is_device_lost) {
2704 check_device_lost(ctx);
2706 zink_start_batch(ctx, batch);
2707 if (zink_screen(ctx->base.screen)->info.have_EXT_transform_feedback && ctx->num_so_targets)
2708 ctx->dirty_so_targets = true;
2709 ctx->pipeline_changed[0] = ctx->pipeline_changed[1] = true;
2710 zink_select_draw_vbo(ctx);
2711 zink_select_launch_grid(ctx);
2713 if (ctx->oom_stall)
2714 stall(ctx);
2715 ctx->oom_flush = false;
2716 ctx->oom_stall = false;
2717 if (ctx->dd) //copy context
2718 ctx->dd->bindless_bound = false;
2719 ctx->di.bindless_refs_dirty = true;
2720 ctx->sample_locations_changed = ctx->gfx_pipeline_state.sample_locations_enabled;
2721 if (zink_screen(ctx->base.screen)->info.dynamic_state2_feats.extendedDynamicState2PatchControlPoints)
2722 VKCTX(CmdSetPatchControlPointsEXT)(ctx->batch.state->cmdbuf, ctx->gfx_pipeline_state.dyn_state2.vertices_per_patch);
2724 zink_start_conditional_render(ctx);
2725 reapply_color_write(ctx);
2730 zink_flush_queue(struct zink_context *ctx)
2732 flush_batch(ctx, true);
2736 rebind_fb_surface(struct zink_context *ctx, struct pipe_surface **surf, struct zink_resource *match_res)
2742 return zink_rebind_ctx_surface(ctx, surf);
2747 rebind_fb_state(struct zink_context *ctx, struct zink_resource *match_res, bool from_set_fb)
2750 for (int i = 0; i < ctx->fb_state.nr_cbufs; i++) {
2751 rebind |= rebind_fb_surface(ctx, &ctx->fb_state.cbufs[i], match_res);
2752 if (from_set_fb && ctx->fb_state.cbufs[i] && ctx->fb_state.cbufs[i]->texture->bind & PIPE_BIND_SCANOUT)
2753 ctx->new_swapchain = true;
2755 rebind |= rebind_fb_surface(ctx, &ctx->fb_state.zsbuf, match_res);
2760 unbind_fb_surface(struct zink_context *ctx, struct pipe_surface *surf, unsigned idx, bool changed)
2762 ctx->dynamic_fb.attachments[idx].imageView = VK_NULL_HANDLE;
2769 zink_batch_reference_surface(&ctx->batch, zink_csurface(surf));
2771 zink_batch_reference_surface(&ctx->batch, transient);
2773 ctx->rp_changed = true;
2777 check_resource_for_batch_ref(ctx, res);
2779 update_res_sampler_layouts(ctx, res);
2780 _mesa_set_add(ctx->need_barriers[0], res);
2786 zink_set_color_write_enables(struct zink_context *ctx)
2788 bool disable_color_writes = ctx->rast_state && ctx->rast_state->base.rasterizer_discard && ctx->primitives_generated_active;
2789 if (ctx->disable_color_writes == disable_color_writes)
2792 if (disable_color_writes && ctx->clears_enabled)
2793 zink_batch_rp(ctx);
2794 ctx->disable_color_writes = disable_color_writes;
2795 if (zink_screen(ctx->base.screen)->driver_workarounds.color_write_missing) {
2797 zink_batch_no_rp(ctx);
2798 ctx->rp_changed = true;
2799 zink_update_framebuffer_state(ctx);
2801 reapply_color_write(ctx);
2809 struct zink_context *ctx = zink_context(pctx);
2811 unsigned w = ctx->fb_state.width;
2812 unsigned h = ctx->fb_state.height;
2815 bool flush_clears = ctx->clears_enabled &&
2816 (ctx->dynamic_fb.info.layerCount != layers ||
2818 for (int i = 0; i < ctx->fb_state.nr_cbufs; i++) {
2819 if (i >= state->nr_cbufs || ctx->fb_state.cbufs[i] != state->cbufs[i])
2820 flush_clears |= zink_fb_clear_enabled(ctx, i);
2822 if (ctx->fb_state.zsbuf != state->zsbuf)
2823 flush_clears |= zink_fb_clear_enabled(ctx, PIPE_MAX_COLOR_BUFS);
2825 zink_batch_rp(ctx);
2826 for (int i = 0; i < ctx->fb_state.nr_cbufs; i++) {
2827 struct pipe_surface *psurf = ctx->fb_state.cbufs[i];
2829 ctx->rp_changed |= !!zink_transient_surface(psurf) != !!zink_transient_surface(state->cbufs[i]);
2830 unbind_fb_surface(ctx, psurf, i, i >= state->nr_cbufs || psurf != state->cbufs[i]);
2831 if (psurf && ctx->needs_present == zink_resource(psurf->texture))
2832 ctx->needs_present = NULL;
2834 if (ctx->fb_state.zsbuf) {
2835 struct pipe_surface *psurf = ctx->fb_state.zsbuf;
2838 unbind_fb_surface(ctx, psurf, PIPE_MAX_COLOR_BUFS, changed);
2840 ctx->rp_changed |= !!zink_transient_surface(psurf) != !!zink_transient_surface(state->zsbuf);
2844 zink_resource_image_barrier(ctx, res, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2848 ctx->rp_changed |= ctx->fb_state.nr_cbufs != state->nr_cbufs;
2849 ctx->rp_changed |= !!ctx->fb_state.zsbuf != !!state->zsbuf;
2851 util_copy_framebuffer_state(&ctx->fb_state, state);
2852 zink_update_fbfetch(ctx);
2853 unsigned prev_void_alpha_attachments = ctx->gfx_pipeline_state.void_alpha_attachments;
2854 ctx->gfx_pipeline_state.void_alpha_attachments = 0;
2855 ctx->transient_attachments = 0;
2856 ctx->fb_layer_mismatch = 0;
2858 ctx->dynamic_fb.info.renderArea.offset.x = 0;
2859 ctx->dynamic_fb.info.renderArea.offset.y = 0;
2860 ctx->dynamic_fb.info.renderArea.extent.width = state->width;
2861 ctx->dynamic_fb.info.renderArea.extent.height = state->height;
2862 ctx->dynamic_fb.info.colorAttachmentCount = ctx->fb_state.nr_cbufs;
2863 ctx->rp_changed |= ctx->dynamic_fb.info.layerCount != layers;
2864 ctx->dynamic_fb.info.layerCount = layers;
2865 ctx->gfx_pipeline_state.rendering_info.colorAttachmentCount = ctx->fb_state.nr_cbufs;
2867 ctx->void_clears = 0;
2868 for (int i = 0; i < ctx->fb_state.nr_cbufs; i++) {
2869 struct pipe_surface *psurf = ctx->fb_state.cbufs[i];
2873 ctx->transient_attachments |= BITFIELD_BIT(i);
2878 ctx->fb_layer_mismatch |= BITFIELD_BIT(i);
2880 assert(!ctx->needs_present || ctx->needs_present == res);
2881 ctx->needs_present = res;
2885 if (!zink_screen(ctx->base.screen)->info.have_KHR_swapchain_mutable_format &&
2896 ctx->gfx_pipeline_state.void_alpha_attachments |= BITFIELD_BIT(i);
2898 ctx->void_clears |= (PIPE_CLEAR_COLOR0 << i);
2902 if (ctx->gfx_pipeline_state.void_alpha_attachments != prev_void_alpha_attachments)
2903 ctx->gfx_pipeline_state.dirty = true;
2904 unsigned depth_bias_scale_factor = ctx->depth_bias_scale_factor;
2905 if (ctx->fb_state.zsbuf) {
2906 struct pipe_surface *psurf = ctx->fb_state.zsbuf;
2909 ctx->transient_attachments |= BITFIELD_BIT(PIPE_MAX_COLOR_BUFS);
2913 ctx->fb_layer_mismatch |= BITFIELD_BIT(PIPE_MAX_COLOR_BUFS);
2918 ctx->depth_bias_scale_factor = zink_screen(ctx->base.screen)->driver_workarounds.z16_unscaled_bias;
2924 ctx->depth_bias_scale_factor = zink_screen(ctx->base.screen)->driver_workarounds.z24_unscaled_bias;
2929 ctx->depth_bias_scale_factor = 1<<23;
2932 ctx->depth_bias_scale_factor = 0;
2935 ctx->depth_bias_scale_factor = 0;
2937 if (depth_bias_scale_factor != ctx->depth_bias_scale_factor &&
2938 ctx->rast_state && ctx->rast_state->base.offset_units_unscaled)
2939 ctx->rast_state_changed = true;
2940 rebind_fb_state(ctx, NULL, true);
2941 ctx->fb_state.samples = MAX2(samples, 1);
2942 zink_update_framebuffer_state(ctx);
2943 if (ctx->fb_state.width != w || ctx->fb_state.height != h)
2944 ctx->scissor_changed = true;
2946 uint8_t rast_samples = ctx->fb_state.samples - 1;
2947 if (rast_samples != ctx->gfx_pipeline_state.rast_samples)
2948 zink_update_fs_key_samples(ctx);
2949 if (ctx->gfx_pipeline_state.rast_samples != rast_samples) {
2950 ctx->sample_locations_changed |= ctx->gfx_pipeline_state.sample_locations_enabled;
2951 ctx->gfx_pipeline_state.dirty = true;
2953 ctx->gfx_pipeline_state.rast_samples = rast_samples;
2956 zink_batch_no_rp(ctx);
2958 if (ctx->oom_flush)
2959 flush_batch(ctx, false);
2966 struct zink_context *ctx = zink_context(pctx);
2967 memcpy(ctx->blend_constants, color->color, sizeof(float) * 4);
2973 struct zink_context *ctx = zink_context(pctx);
2974 ctx->gfx_pipeline_state.sample_mask = sample_mask;
2975 ctx->gfx_pipeline_state.dirty = true;
2981 struct zink_context *ctx = zink_context(pctx);
2983 ctx->gfx_pipeline_state.sample_locations_enabled = size && locations;
2984 ctx->sample_locations_changed = ctx->gfx_pipeline_state.sample_locations_enabled;
2985 if (size > sizeof(ctx->sample_locations))
2986 size = sizeof(ctx->sample_locations);
2989 memcpy(ctx->sample_locations, locations, size);
3166 resource_check_defer_buffer_barrier(struct zink_context *ctx, struct zink_resource *res, VkPipelineStageFlags pipeline)
3173 _mesa_set_add(ctx->need_barriers[0], res);
3177 _mesa_set_add(ctx->need_barriers[1], res);
3181 unordered_res_exec(const struct zink_context *ctx, const struct zink_resource *res, bool is_write)
3187 if (is_write && zink_batch_usage_matches(res->obj->bo->reads, ctx->batch.state) && !res->obj->unordered_read)
3190 return res->obj->unordered_write || !zink_batch_usage_matches(res->obj->bo->writes, ctx->batch.state);
3194 zink_get_cmdbuf(struct zink_context *ctx, struct zink_resource *src, struct zink_resource *dst)
3198 unordered_exec &= unordered_res_exec(ctx, src, false);
3200 unordered_exec &= unordered_res_exec(ctx, dst, true);
3206 ctx->batch.state->has_barriers = true;
3207 return ctx->batch.state->barrier_cmdbuf;
3209 zink_batch_no_rp(ctx);
3210 return ctx->batch.state->cmdbuf;
3214 resource_check_defer_image_barrier(struct zink_context *ctx, struct zink_resource *res, VkImageLayout layout, VkPipelineStageFlags pipeline)
3228 if (layout == zink_descriptor_util_image_layout_eval(ctx, res, !is_compute))
3233 _mesa_set_add(ctx->need_barriers[!is_compute], res);
3236 _mesa_set_add(ctx->need_barriers[is_compute], res);
3240 zink_resource_image_barrier(struct zink_context *ctx, struct zink_resource *res,
3251 VkCommandBuffer cmdbuf = is_write ? zink_get_cmdbuf(ctx, NULL, res) : zink_get_cmdbuf(ctx, res, NULL);
3260 imb.dstQueueFamilyIndex = zink_screen(ctx->base.screen)->gfx_queue;
3273 resource_check_defer_image_barrier(ctx, res, new_layout, pipeline);
3334 zink_resource_buffer_barrier(struct zink_context *ctx, struct zink_resource *res, VkAccessFlags flags, VkPipelineStageFlags pipeline)
3349 VkCommandBuffer cmdbuf = is_write ? zink_get_cmdbuf(ctx, NULL, res) : zink_get_cmdbuf(ctx, res, NULL);
3361 resource_check_defer_buffer_barrier(ctx, res, pipeline);
3394 struct zink_context *ctx = zink_context(pctx);
3397 struct zink_batch *batch = &ctx->batch;
3399 struct zink_screen *screen = zink_screen(ctx->base.screen);
3403 if (!deferred && ctx->clears_enabled) {
3405 unsigned fbfetch_outputs = ctx->fbfetch_outputs;
3407 ctx->fbfetch_outputs = 0;
3408 ctx->rp_changed = true;
3411 zink_batch_rp(ctx);
3412 ctx->fbfetch_outputs = fbfetch_outputs;
3413 ctx->rp_changed |= fbfetch_outputs > 0;
3416 if (ctx->needs_present && (flags & PIPE_FLUSH_END_OF_FRAME)) {
3417 if (ctx->needs_present->obj->image)
3418 zink_resource_image_barrier(ctx, ctx->needs_present, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, 0, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
3419 ctx->needs_present = NULL;
3425 fence = ctx->last_fence;
3428 struct zink_batch_state *last = zink_batch_state(ctx->last_fence);
3430 sync_flush(ctx, last);
3432 check_device_lost(ctx);
3435 tc_driver_internal_flush_notify(ctx->tc);
3442 flush_batch(ctx, true);
3465 assert(!ctx->deferred_fence || ctx->deferred_fence == fence);
3466 ctx->deferred_fence = fence;
3476 sync_flush(ctx, zink_batch_state(fence));
3483 struct zink_context *ctx = zink_context(pctx);
3485 if (ctx->batch.has_work)
3487 if (ctx->last_fence)
3488 stall(ctx);
3492 zink_wait_on_batch(struct zink_context *ctx, uint64_t batch_id)
3497 flush_batch(ctx, true);
3498 bs = zink_batch_state(ctx->last_fence);
3503 if (!zink_screen_timeline_wait(zink_screen(ctx->base.screen), batch_id, UINT64_MAX))
3504 check_device_lost(ctx);
3508 zink_check_batch_completion(struct zink_context *ctx, uint64_t batch_id)
3510 assert(ctx->batch.state);
3515 if (zink_screen_check_last_finished(zink_screen(ctx->base.screen), batch_id))
3518 bool success = zink_screen_timeline_wait(zink_screen(ctx->base.screen), batch_id, 0);
3520 check_device_lost(ctx);
3527 struct zink_context *ctx = zink_context(pctx);
3532 if (!ctx->framebuffer || !ctx->framebuffer->state.num_attachments)
3536 if (ctx->rp_clears_enabled && dst == VK_ACCESS_INPUT_ATTACHMENT_READ_BIT)
3537 zink_batch_rp(ctx);
3540 if (!ctx->fbfetch_outputs)
3541 zink_batch_no_rp(ctx);
3543 if (zink_screen(ctx->base.screen)->info.have_KHR_synchronization2) {
3559 if (ctx->fb_state.zsbuf) {
3566 VKCTX(CmdPipelineBarrier2)(ctx->batch.state->cmdbuf, &dep);
3573 ctx->batch.state->cmdbuf,
3585 mem_barrier(struct zink_context *ctx, VkPipelineStageFlags src_stage, VkPipelineStageFlags dst_stage, VkAccessFlags src, VkAccessFlags dst)
3587 struct zink_batch *batch = &ctx->batch;
3593 zink_batch_no_rp(ctx);
3598 zink_flush_memory_barrier(struct zink_context *ctx, bool is_compute)
3606 VkPipelineStageFlags src = ctx->batch.last_was_compute ? cs_flags : gfx_flags;
3609 if (ctx->memory_barrier & (PIPE_BARRIER_TEXTURE | PIPE_BARRIER_SHADER_BUFFER | PIPE_BARRIER_IMAGE))
3610 mem_barrier(ctx, src, dst, VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT);
3612 if (ctx->memory_barrier & PIPE_BARRIER_CONSTANT_BUFFER)
3613 mem_barrier(ctx, src, dst,
3618 if (ctx->memory_barrier & PIPE_BARRIER_INDIRECT_BUFFER)
3619 mem_barrier(ctx, src, VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
3622 if (ctx->memory_barrier & PIPE_BARRIER_VERTEX_BUFFER)
3623 mem_barrier(ctx, gfx_flags, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
3627 if (ctx->memory_barrier & PIPE_BARRIER_INDEX_BUFFER)
3628 mem_barrier(ctx, gfx_flags, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
3631 if (ctx->memory_barrier & PIPE_BARRIER_FRAMEBUFFER)
3632 zink_texture_barrier(&ctx->base, 0);
3633 if (ctx->memory_barrier & PIPE_BARRIER_STREAMOUT_BUFFER)
3634 mem_barrier(ctx, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT |
3642 ctx->memory_barrier = 0;
3648 struct zink_context *ctx = zink_context(pctx);
3658 ctx->memory_barrier = flags;
3665 struct zink_context *ctx = zink_context(pctx);
3669 zink_resource_image_barrier(ctx, res, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, 0, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
3670 zink_batch_reference_resource_rw(&ctx->batch, res, true);
3672 ctx->needs_present = res;
3674 ctx->batch.swapchain = res;
3727 struct zink_context *ctx = zink_context(pctx);
3735 for (unsigned i = 0; i < ctx->num_so_targets; i++) {
3736 if (ctx->so_targets[i]) {
3737 struct zink_resource *so = zink_resource(ctx->so_targets[i]->buffer);
3740 update_res_bind_count(ctx, so, false, true);
3743 pipe_so_target_reference(&ctx->so_targets[i], NULL);
3745 ctx->num_so_targets = 0;
3749 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
3754 struct zink_resource *so = zink_resource(ctx->so_targets[i]->buffer);
3757 update_res_bind_count(ctx, so, false, false);
3760 for (unsigned i = num_targets; i < ctx->num_so_targets; i++) {
3761 if (ctx->so_targets[i]) {
3762 struct zink_resource *so = zink_resource(ctx->so_targets[i]->buffer);
3765 update_res_bind_count(ctx, so, false, true);
3768 pipe_so_target_reference(&ctx->so_targets[i], NULL);
3770 ctx->num_so_targets = num_targets;
3773 ctx->dirty_so_targets = true;
3778 zink_rebind_framebuffer(struct zink_context *ctx, struct zink_resource *res)
3780 if (!ctx->framebuffer)
3784 for (unsigned i = 0; i < ctx->fb_state.nr_cbufs; i++) {
3785 if (!ctx->fb_state.cbufs[i] ||
3786 zink_resource(ctx->fb_state.cbufs[i]->texture) != res)
3788 zink_rebind_ctx_surface(ctx, &ctx->fb_state.cbufs[i]);
3792 if (ctx->fb_state.zsbuf && zink_resource(ctx->fb_state.zsbuf->texture) != res) {
3793 zink_rebind_ctx_surface(ctx, &ctx->fb_state.zsbuf);
3798 did_rebind |= rebind_fb_state(ctx, res, false);
3803 zink_batch_no_rp(ctx);
3804 struct zink_framebuffer *fb = zink_get_framebuffer(ctx);
3805 ctx->fb_changed |= ctx->framebuffer != fb;
3806 ctx->framebuffer = fb;
3810 rebind_ubo(struct zink_context *ctx, enum pipe_shader_type shader, unsigned slot)
3812 struct zink_resource *res = update_descriptor_state_ubo(ctx, shader, slot,
3813 ctx->di.descriptor_res[ZINK_DESCRIPTOR_TYPE_UBO][shader][slot]);
3814 zink_screen(ctx->base.screen)->context_invalidate_descriptor_state(ctx, shader, ZINK_DESCRIPTOR_TYPE_UBO, slot, 1);
3819 rebind_ssbo(struct zink_context *ctx, enum pipe_shader_type shader, unsigned slot)
3821 const struct pipe_shader_buffer *ssbo = &ctx->ssbos[shader][slot];
3827 update_descriptor_state_ssbo(ctx, shader, slot, res);
3828 zink_screen(ctx->base.screen)->context_invalidate_descriptor_state(ctx, shader, ZINK_DESCRIPTOR_TYPE_SSBO, slot, 1);
3833 rebind_tbo(struct zink_context *ctx, enum pipe_shader_type shader, unsigned slot)
3835 struct zink_sampler_view *sampler_view = zink_sampler_view(ctx->sampler_views[shader][slot]);
3840 zink_batch_reference_bufferview(&ctx->batch, sampler_view->buffer_view);
3843 zink_buffer_view_reference(zink_screen(ctx->base.screen), &sampler_view->buffer_view, NULL);
3844 sampler_view->buffer_view = get_buffer_view(ctx, res, &bvci);
3845 update_descriptor_state_sampler(ctx, shader, slot, res);
3846 zink_screen(ctx->base.screen)->context_invalidate_descriptor_state(ctx, shader, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, slot, 1);
3851 rebind_ibo(struct zink_context *ctx, enum pipe_shader_type shader, unsigned slot)
3853 struct zink_image_view *image_view = &ctx->image_views[shader][slot];
3859 zink_batch_reference_bufferview(&ctx->batch, image_view->buffer_view);
3862 zink_buffer_view_reference(zink_screen(ctx->base.screen), &image_view->buffer_view, NULL);
3863 if (!zink_resource_object_init_storage(ctx, res)) {
3867 image_view->buffer_view = get_buffer_view(ctx, res, &bvci);
3871 update_descriptor_state_image(ctx, shader, slot, res);
3872 zink_screen(ctx->base.screen)->context_invalidate_descriptor_state(ctx, shader, ZINK_DESCRIPTOR_TYPE_IMAGE, slot, 1);
3877 rebind_buffer(struct zink_context *ctx, struct zink_resource *res, uint32_t rebind_mask, const unsigned expected_num_rebinds)
3886 if ((rebind_mask & BITFIELD_BIT(TC_BINDING_STREAMOUT_BUFFER)) || (!rebind_mask && res->so_bind_count && ctx->num_so_targets)) {
3887 for (unsigned i = 0; i < ctx->num_so_targets; i++) {
3888 if (ctx->so_targets[i]) {
3889 struct zink_resource *so = zink_resource(ctx->so_targets[i]->buffer);
3891 ctx->dirty_so_targets = true;
3903 if (ctx->vertex_buffers[slot].buffer.resource != &res->base.b) //wrong context
3908 ctx->vertex_buffers_dirty = true;
3919 if (&res->base.b != ctx->ubos[shader][slot].buffer) //wrong context
3921 rebind_ubo(ctx, shader, slot);
3934 struct pipe_shader_buffer *ssbo = &ctx->ssbos[shader][slot];
3937 rebind_ssbo(ctx, shader, slot);
3938 has_write |= (ctx->writable_ssbos[shader] & BITFIELD64_BIT(slot)) != 0;
3950 struct zink_sampler_view *sampler_view = zink_sampler_view(ctx->sampler_views[shader][slot]);
3953 rebind_tbo(ctx, shader, slot);
3966 for (unsigned slot = 0; num_image_rebinds_remaining && slot < ctx->di.num_images[shader]; slot++) {
3967 struct zink_resource *cres = ctx->di.descriptor_res[ZINK_DESCRIPTOR_TYPE_IMAGE][shader][slot];
3971 rebind_ibo(ctx, shader, slot);
3972 const struct zink_image_view *image_view = &ctx->image_views[shader][slot];
3980 zink_batch_resource_usage_set(&ctx->batch, res, has_write);
3985 zink_copy_buffer(struct zink_context *ctx, struct zink_resource *dst, struct zink_resource *src,
3993 struct zink_batch *batch = &ctx->batch;
3995 zink_resource_buffer_barrier(ctx, src, VK_ACCESS_TRANSFER_READ_BIT, 0);
3996 zink_resource_buffer_barrier(ctx, dst, VK_ACCESS_TRANSFER_WRITE_BIT, 0);
3997 VkCommandBuffer cmdbuf = zink_get_cmdbuf(ctx, src, dst);
4004 zink_copy_image_buffer(struct zink_context *ctx, struct zink_resource *dst, struct zink_resource *src,
4010 struct zink_batch *batch = &ctx->batch;
4017 if (!zink_kopper_acquire(ctx, img, UINT64_MAX))
4020 zink_resource_image_barrier(ctx, img, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 0, 0);
4021 zink_resource_buffer_barrier(ctx, buf, VK_ACCESS_TRANSFER_READ_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT);
4024 needs_present_readback = zink_kopper_acquire_readback(ctx, img);
4025 zink_resource_image_barrier(ctx, img, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 0, 0);
4026 zink_resource_buffer_barrier(ctx, buf, VK_ACCESS_TRANSFER_WRITE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT);
4071 ctx->batch.state->cmdbuf :
4072 buf2img ? zink_get_cmdbuf(ctx, buf, img) : zink_get_cmdbuf(ctx, img, buf);
4108 zink_kopper_present_readback(ctx, img);
4120 struct zink_context *ctx = zink_context(pctx);
4135 zink_fb_clears_apply_or_discard(ctx, pdst, (struct u_rect){dstx, dstx + src_box->width, dsty, dsty + src_box->height}, false);
4136 zink_fb_clears_apply_region(ctx, psrc, zink_rect_from_box(src_box));
4205 struct zink_batch *batch = &ctx->batch;
4206 zink_resource_setup_transfer_layouts(ctx, src, dst);
4207 VkCommandBuffer cmdbuf = zink_get_cmdbuf(ctx, src, dst);
4216 zink_copy_buffer(ctx, dst, src, dstx, src_box->x, src_box->width);
4218 zink_copy_image_buffer(ctx, dst, src, dst_level, dstx, dsty, dstz, src_level, src_box, 0);
4224 struct zink_context *ctx = zink_context(pctx);
4230 zink_flush_queue(ctx);
4236 zink_batch_add_wait_semaphore(&ctx->batch, sem);
4238 check_device_lost(ctx);
4245 rebind_image(struct zink_context *ctx, struct zink_resource *res)
4247 zink_rebind_framebuffer(ctx, res);
4252 for (unsigned j = 0; j < ctx->di.num_sampler_views[i]; j++) {
4253 struct zink_sampler_view *sv = zink_sampler_view(ctx->sampler_views[i][j]);
4256 zink_rebind_surface(ctx, &psurf);
4258 zink_screen(ctx->base.screen)->context_invalidate_descriptor_state(ctx, i, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, j, 1);
4259 update_descriptor_state_sampler(ctx, i, j, res);
4265 for (unsigned j = 0; j < ctx->di.num_images[i]; j++) {
4266 if (zink_resource(ctx->image_views[i][j].base.resource) == res) {
4267 zink_screen(ctx->base.screen)->context_invalidate_descriptor_state(ctx, i, ZINK_DESCRIPTOR_TYPE_IMAGE, j, 1);
4268 update_descriptor_state_image(ctx, i, j, res);
4269 _mesa_set_add(ctx->need_barriers[i == PIPE_SHADER_COMPUTE], res);
4276 zink_resource_rebind(struct zink_context *ctx, struct zink_resource *res)
4281 return rebind_buffer(ctx, res, 0, 0) == res->bind_count[0] + res->bind_count[1];
4283 rebind_image(ctx, res);
4288 zink_rebind_all_buffers(struct zink_context *ctx)
4290 struct zink_batch *batch = &ctx->batch;
4291 ctx->vertex_buffers_dirty = ctx->gfx_pipeline_state.vertex_buffers_enabled_mask > 0;
4292 ctx->dirty_so_targets = ctx->num_so_targets > 0;
4293 if (ctx->num_so_targets)
4294 zink_resource_buffer_barrier(ctx, zink_resource(ctx->dummy_xfb_buffer),
4297 for (unsigned slot = 0; slot < ctx->di.num_ubos[shader]; slot++) {
4298 struct zink_resource *res = rebind_ubo(ctx, shader, slot);
4302 for (unsigned slot = 0; slot < ctx->di.num_sampler_views[shader]; slot++) {
4303 struct zink_resource *res = rebind_tbo(ctx, shader, slot);
4307 for (unsigned slot = 0; slot < ctx->di.num_ssbos[shader]; slot++) {
4308 struct zink_resource *res = rebind_ssbo(ctx, shader, slot);
4310 zink_batch_resource_usage_set(batch, res, (ctx->writable_ssbos[shader] & BITFIELD64_BIT(slot)) != 0);
4312 for (unsigned slot = 0; slot < ctx->di.num_images[shader]; slot++) {
4313 struct zink_resource *res = rebind_ibo(ctx, shader, slot);
4315 zink_batch_resource_usage_set(batch, res, (ctx->image_views[shader][slot].base.access & PIPE_IMAGE_ACCESS_WRITE) != 0);
4321 zink_rebind_all_images(struct zink_context *ctx)
4323 rebind_fb_state(ctx, NULL, false);
4325 for (unsigned j = 0; j < ctx->di.num_sampler_views[i]; j++) {
4326 struct zink_sampler_view *sv = zink_sampler_view(ctx->sampler_views[i][j]);
4332 zink_rebind_surface(ctx, &psurf);
4334 zink_screen(ctx->base.screen)->context_invalidate_descriptor_state(ctx, i, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, j, 1);
4335 update_descriptor_state_sampler(ctx, i, j, res);
4338 for (unsigned j = 0; j < ctx->di.num_images[i]; j++) {
4339 struct zink_image_view *image_view = &ctx->image_views[i][j];
4343 if (ctx->image_views[i][j].surface->obj != res->obj) {
4344 zink_surface_reference(zink_screen(ctx->base.screen), &image_view->surface, NULL);
4345 image_view->surface = create_image_surface(ctx, &image_view->base, i == PIPE_SHADER_COMPUTE);
4346 zink_screen(ctx->base.screen)->context_invalidate_descriptor_state(ctx, i, ZINK_DESCRIPTOR_TYPE_IMAGE, j, 1);
4347 update_descriptor_state_image(ctx, i, j, res);
4348 _mesa_set_add(ctx->need_barriers[i == PIPE_SHADER_COMPUTE], res);
4361 struct zink_context *ctx = zink_context(pctx);
4371 zink_batch_reference_resource(&ctx->batch, d);
4376 if (num_rebinds && rebind_buffer(ctx, d, rebind_mask, num_rebinds) < num_rebinds)
4377 ctx->buffer_rebind_counter = p_atomic_inc_return(&screen->buffer_rebind_counter);
4422 struct zink_context *ctx = rzalloc(NULL, struct zink_context);
4424 if (!ctx)
4427 ctx->flags = flags;
4428 ctx->pipeline_changed[0] = ctx->pipeline_changed[1] = true;
4429 ctx->gfx_pipeline_state.dirty = true;
4430 ctx->gfx_pipeline_state.dyn_state2.vertices_per_patch = 1;
4431 ctx->gfx_pipeline_state.uses_dynamic_stride = screen->info.have_EXT_extended_dynamic_state ||
4433 ctx->compute_pipeline_state.dirty = true;
4434 ctx->fb_changed = ctx->rp_changed = true;
4435 ctx->gfx_pipeline_state.gfx_prim_mode = PIPE_PRIM_MAX;
4437 zink_init_draw_functions(ctx, screen);
4438 zink_init_grid_functions(ctx);
4440 ctx->base.screen = pscreen;
4441 ctx->base.priv = priv;
4443 ctx->base.destroy = zink_context_destroy;
4444 ctx->base.get_device_reset_status = zink_get_device_reset_status;
4445 ctx->base.set_device_reset_callback = zink_set_device_reset_callback;
4447 zink_context_state_init(&ctx->base);
4449 ctx->base.create_sampler_state = zink_create_sampler_state;
4450 ctx->base.bind_sampler_states = zink_bind_sampler_states;
4451 ctx->base.delete_sampler_state = zink_delete_sampler_state;
4453 ctx->base.create_sampler_view = zink_create_sampler_view;
4454 ctx->base.set_sampler_views = zink_set_sampler_views;
4455 ctx->base.sampler_view_destroy = zink_sampler_view_destroy;
4456 ctx->base.get_sample_position = zink_get_sample_position;
4457 ctx->base.set_sample_locations = zink_set_sample_locations;
4459 zink_program_init(ctx);
4461 ctx->base.set_polygon_stipple = zink_set_polygon_stipple;
4462 ctx->base.set_vertex_buffers = zink_set_vertex_buffers;
4463 ctx->base.set_viewport_states = zink_set_viewport_states;
4464 ctx->base.set_scissor_states = zink_set_scissor_states;
4465 ctx->base.set_inlinable_constants = zink_set_inlinable_constants;
4466 ctx->base.set_constant_buffer = zink_set_constant_buffer;
4467 ctx->base.set_shader_buffers = zink_set_shader_buffers;
4468 ctx->base.set_shader_images = zink_set_shader_images;
4469 ctx->base.set_framebuffer_state = zink_set_framebuffer_state;
4470 ctx->base.set_stencil_ref = zink_set_stencil_ref;
4471 ctx->base.set_clip_state = zink_set_clip_state;
4472 ctx->base.set_blend_color = zink_set_blend_color;
4473 ctx->base.set_tess_state = zink_set_tess_state;
4474 ctx->base.set_patch_vertices = zink_set_patch_vertices;
4476 ctx->base.set_sample_mask = zink_set_sample_mask;
4477 ctx->gfx_pipeline_state.sample_mask = UINT32_MAX;
4479 ctx->base.clear = zink_clear;
4480 ctx->base.clear_texture = zink_clear_texture;
4481 ctx->base.clear_buffer = zink_clear_buffer;
4482 ctx->base.clear_render_target = zink_clear_render_target;
4483 ctx->base.clear_depth_stencil = zink_clear_depth_stencil;
4485 ctx->base.create_fence_fd = zink_create_fence_fd;
4486 ctx->base.fence_server_sync = zink_fence_server_sync;
4487 ctx->base.fence_server_signal = zink_fence_server_signal;
4488 ctx->base.flush = zink_flush;
4489 ctx->base.memory_barrier = zink_memory_barrier;
4490 ctx->base.texture_barrier = zink_texture_barrier;
4491 ctx->base.evaluate_depth_buffer = zink_evaluate_depth_buffer;
4493 ctx->base.resource_commit = zink_resource_commit;
4494 ctx->base.resource_copy_region = zink_resource_copy_region;
4495 ctx->base.blit = zink_blit;
4496 ctx->base.create_stream_output_target = zink_create_stream_output_target;
4497 ctx->base.stream_output_target_destroy = zink_stream_output_target_destroy;
4499 ctx->base.set_stream_output_targets = zink_set_stream_output_targets;
4500 ctx->base.flush_resource = zink_flush_resource;
4502 ctx->base.emit_string_marker = zink_emit_string_marker;
4504 zink_context_surface_init(&ctx->base);
4505 zink_context_resource_init(&ctx->base);
4506 zink_context_query_init(&ctx->base);
4508 list_inithead(&ctx->query_pools);
4509 _mesa_set_init(&ctx->update_barriers[0][0], ctx, _mesa_hash_pointer, _mesa_key_pointer_equal);
4510 _mesa_set_init(&ctx->update_barriers[1][0], ctx, _mesa_hash_pointer, _mesa_key_pointer_equal);
4511 _mesa_set_init(&ctx->update_barriers[0][1], ctx, _mesa_hash_pointer, _mesa_key_pointer_equal);
4512 _mesa_set_init(&ctx->update_barriers[1][1], ctx, _mesa_hash_pointer, _mesa_key_pointer_equal);
4513 ctx->need_barriers[0] = &ctx->update_barriers[0][0];
4514 ctx->need_barriers[1] = &ctx->update_barriers[1][0];
4516 util_dynarray_init(&ctx->free_batch_states, ctx);
4518 ctx->gfx_pipeline_state.have_EXT_extended_dynamic_state = screen->info.have_EXT_extended_dynamic_state;
4519 ctx->gfx_pipeline_state.have_EXT_extended_dynamic_state2 = screen->info.have_EXT_extended_dynamic_state2;
4520 ctx->gfx_pipeline_state.extendedDynamicState2PatchControlPoints = screen->info.dynamic_state2_feats.extendedDynamicState2PatchControlPoints;
4522 slab_create_child(&ctx->transfer_pool, &screen->transfer_pool);
4523 slab_create_child(&ctx->transfer_pool_unsync, &screen->transfer_pool);
4525 ctx->base.stream_uploader = u_upload_create_default(&ctx->base);
4526 ctx->base.const_uploader = u_upload_create_default(&ctx->base);
4527 for (int i = 0; i < ARRAY_SIZE(ctx->fb_clears); i++)
4528 util_dynarray_init(&ctx->fb_clears[i].clears, ctx);
4531 ctx->blitter = util_blitter_create(&ctx->base);
4532 if (!ctx->blitter)
4536 ctx->gfx_pipeline_state.shader_keys.last_vertex.key.vs_base.last_vertex_stage = true;
4537 ctx->last_vertex_stage_dirty = true;
4538 ctx->gfx_pipeline_state.shader_keys.key[PIPE_SHADER_TESS_CTRL].key.tcs.patch_vertices = 1;
4539 ctx->gfx_pipeline_state.shader_keys.key[PIPE_SHADER_VERTEX].size = sizeof(struct zink_vs_key_base);
4540 ctx->gfx_pipeline_state.shader_keys.key[PIPE_SHADER_TESS_EVAL].size = sizeof(struct zink_vs_key_base);
4541 ctx->gfx_pipeline_state.shader_keys.key[PIPE_SHADER_TESS_CTRL].size = sizeof(struct zink_tcs_key);
4542 ctx->gfx_pipeline_state.shader_keys.key[PIPE_SHADER_GEOMETRY].size = sizeof(struct zink_vs_key_base);
4543 ctx->gfx_pipeline_state.shader_keys.key[PIPE_SHADER_FRAGMENT].size = sizeof(struct zink_fs_key);
4544 _mesa_hash_table_init(&ctx->compute_program_cache, ctx, _mesa_hash_pointer, _mesa_key_pointer_equal);
4545 _mesa_hash_table_init(&ctx->framebuffer_cache, ctx, hash_framebuffer_imageless, equals_framebuffer_imageless);
4546 if (!zink_init_render_pass(ctx))
4548 _mesa_set_init(&ctx->rendering_state_cache, ctx, hash_rendering_state, equals_rendering_state);
4549 ctx->dynamic_fb.info.pColorAttachments = ctx->dynamic_fb.attachments;
4550 ctx->dynamic_fb.info.sType = VK_STRUCTURE_TYPE_RENDERING_INFO;
4551 for (unsigned i = 0; i < ARRAY_SIZE(ctx->dynamic_fb.attachments); i++) {
4552 VkRenderingAttachmentInfo *att = &ctx->dynamic_fb.attachments[i];
4557 ctx->gfx_pipeline_state.rendering_info.sType = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO;
4558 ctx->gfx_pipeline_state.rendering_info.pColorAttachmentFormats = ctx->gfx_pipeline_state.rendering_formats;
4562 ctx->dummy_vertex_buffer = pipe_buffer_create(&screen->base,
4564 if (!ctx->dummy_vertex_buffer)
4566 ctx->dummy_xfb_buffer = pipe_buffer_create(&screen->base,
4568 if (!ctx->dummy_xfb_buffer)
4570 for (unsigned i = 0; i < ARRAY_SIZE(ctx->dummy_surface); i++) {
4573 ctx->dummy_surface[i] = zink_surface_create_null(ctx, PIPE_TEXTURE_2D, 1024, 1024, BITFIELD_BIT(i));
4574 if (!ctx->dummy_surface[i])
4577 VkBufferViewCreateInfo bvci = create_bvci(ctx, zink_resource(ctx->dummy_vertex_buffer), PIPE_FORMAT_R8G8B8A8_UNORM, 0, sizeof(data));
4578 ctx->dummy_bufferview = get_buffer_view(ctx, zink_resource(ctx->dummy_vertex_buffer), &bvci);
4579 if (!ctx->dummy_bufferview)
4582 if (!zink_descriptor_layouts_init(ctx))
4585 if (!screen->descriptors_init(ctx)) {
4587 if (!screen->descriptors_init(ctx))
4591 ctx->base.create_texture_handle = zink_create_texture_handle;
4592 ctx->base.delete_texture_handle = zink_delete_texture_handle;
4593 ctx->base.make_texture_handle_resident = zink_make_texture_handle_resident;
4594 ctx->base.create_image_handle = zink_create_image_handle;
4595 ctx->base.delete_image_handle = zink_delete_image_handle;
4596 ctx->base.make_image_handle_resident = zink_make_image_handle_resident;
4598 _mesa_hash_table_init(&ctx->di.bindless[i].img_handles, ctx, _mesa_hash_pointer, _mesa_key_pointer_equal);
4599 _mesa_hash_table_init(&ctx->di.bindless[i].tex_handles, ctx, _mesa_hash_pointer, _mesa_key_pointer_equal);
4602 util_idalloc_init(&ctx->di.bindless[i].tex_slots, ZINK_MAX_BINDLESS_HANDLES);
4603 util_idalloc_alloc(&ctx->di.bindless[i].tex_slots);
4604 util_idalloc_init(&ctx->di.bindless[i].img_slots, ZINK_MAX_BINDLESS_HANDLES);
4605 util_idalloc_alloc(&ctx->di.bindless[i].img_slots);
4606 ctx->di.bindless[i].buffer_infos = malloc(sizeof(VkBufferView) * ZINK_MAX_BINDLESS_HANDLES);
4607 ctx->di.bindless[i].img_infos = malloc(sizeof(VkDescriptorImageInfo) * ZINK_MAX_BINDLESS_HANDLES);
4608 util_dynarray_init(&ctx->di.bindless[i].updates, NULL);
4609 util_dynarray_init(&ctx->di.bindless[i].resident, NULL);
4613 zink_start_batch(ctx, &ctx->batch);
4614 if (!ctx->batch.state)
4618 pipe_buffer_write_nooverlap(&ctx->base, ctx->dummy_vertex_buffer, 0, sizeof(data), data);
4619 pipe_buffer_write_nooverlap(&ctx->base, ctx->dummy_xfb_buffer, 0, sizeof(data), data);
4624 update_descriptor_state_ubo(ctx, i, j, NULL);
4625 update_descriptor_state_sampler(ctx, i, j, NULL);
4626 update_descriptor_state_ssbo(ctx, i, j, NULL);
4627 update_descriptor_state_image(ctx, i, j, NULL);
4631 ctx->di.fbfetch.imageView = zink_csurface(ctx->dummy_surface[0])->image_view;
4633 reapply_color_write(ctx);
4637 zink_select_draw_vbo(ctx);
4638 zink_select_launch_grid(ctx);
4644 VKCTX(CmdSetPatchControlPointsEXT)(ctx->batch.state->cmdbuf, 1);
4647 return &ctx->base;
4650 struct threaded_context *tc = (struct threaded_context*)threaded_context_create(&ctx->base, &screen->transfer_pool,
4658 &ctx->tc);
4660 if (tc && (struct zink_context*)tc != ctx) {
4662 ctx->base.set_context_param = zink_set_context_param;
4668 if (ctx)
4669 zink_context_destroy(&ctx->base);
4676 struct zink_context *ctx = zink_context(pctx);
4677 struct zink_screen *screen = zink_screen(ctx->base.screen);