Lines Matching defs:res
77 check_resource_for_batch_ref(struct zink_context *ctx, struct zink_resource *res)
79 if (!zink_resource_has_binds(res)) {
86 if (!res->obj->dt && (res->obj->bo->reads || res->obj->bo->writes))
87 zink_batch_reference_resource_rw(&ctx->batch, res, !!res->obj->bo->writes);
89 zink_batch_reference_resource(&ctx->batch, res);
483 get_layout_for_binding(const struct zink_context *ctx, struct zink_resource *res, enum zink_descriptor_type type, bool is_compute)
485 if (res->obj->is_buffer)
489 return zink_descriptor_util_image_layout_eval(ctx, res, is_compute);
542 update_descriptor_state_ubo(struct zink_context *ctx, enum pipe_shader_type shader, unsigned slot, struct zink_resource *res)
547 ctx->di.descriptor_res[type][shader][slot] = res;
549 if (res) {
550 ctx->di.ubos[shader][slot].buffer = res->obj->buffer;
559 if (res)
564 return res;
568 update_descriptor_state_ssbo(struct zink_context *ctx, enum pipe_shader_type shader, unsigned slot, struct zink_resource *res)
573 ctx->di.descriptor_res[type][shader][slot] = res;
575 if (res) {
576 ctx->di.ssbos[shader][slot].buffer = res->obj->buffer;
583 return res;
587 update_descriptor_state_sampler(struct zink_context *ctx, enum pipe_shader_type shader, unsigned slot, struct zink_resource *res)
592 ctx->di.descriptor_res[type][shader][slot] = res;
593 if (res) {
594 if (res->obj->is_buffer) {
601 ctx->di.textures[shader][slot].imageLayout = get_layout_for_binding(ctx, res, type, shader == PIPE_SHADER_COMPUTE);
632 return res;
636 update_descriptor_state_image(struct zink_context *ctx, enum pipe_shader_type shader, unsigned slot, struct zink_resource *res)
641 ctx->di.descriptor_res[type][shader][slot] = res;
642 if (res) {
643 if (res->obj->is_buffer) {
668 return res;
779 create_bvci(struct zink_context *ctx, struct zink_resource *res, enum pipe_format format, uint32_t offset, uint32_t range)
789 bvci.buffer = res->obj->storage_buffer ? res->obj->storage_buffer : res->obj->buffer;
791 bvci.buffer = res->obj->buffer;
795 bvci.range = !offset && range == res->base.b.width0 ? VK_WHOLE_SIZE : range;
800 if (bvci.offset + bvci.range >= res->base.b.width0)
804 if (bvci.range == VK_WHOLE_SIZE && res->base.b.width0 > clamp)
811 get_buffer_view(struct zink_context *ctx, struct zink_resource *res, VkBufferViewCreateInfo *bvci)
817 simple_mtx_lock(&res->bufferview_mtx);
818 struct hash_entry *he = _mesa_hash_table_search_pre_hashed(&res->bufferview_cache, hash, bvci);
835 pipe_resource_reference(&buffer_view->pres, &res->base.b);
840 _mesa_hash_table_insert_pre_hashed(&res->bufferview_cache, hash, &buffer_view->bvci, buffer_view);
843 simple_mtx_unlock(&res->bufferview_mtx);
889 struct zink_resource *res = zink_resource(pres);
911 if (zink_is_swapchain(res)) {
912 if (!zink_kopper_acquire(ctx, res, UINT64_MAX)) {
918 ivci = create_ivci(screen, res, &templ, state->target);
966 VkBufferViewCreateInfo bvci = create_bvci(ctx, res, state->format, state->u.buf.offset, state->u.buf.size);
967 sampler_view->buffer_view = get_buffer_view(ctx, res, &bvci);
980 struct zink_resource *res = zink_resource(buffer_view->pres);
981 simple_mtx_lock(&res->bufferview_mtx);
984 simple_mtx_unlock(&res->bufferview_mtx);
987 struct hash_entry *he = _mesa_hash_table_search_pre_hashed(&res->bufferview_cache, buffer_view->hash, &buffer_view->bvci);
989 _mesa_hash_table_remove(&res->bufferview_cache, he);
990 simple_mtx_unlock(&res->bufferview_mtx);
1091 update_res_bind_count(struct zink_context *ctx, struct zink_resource *res, bool is_compute, bool decrement)
1094 assert(res->bind_count[is_compute]);
1095 if (!--res->bind_count[is_compute])
1096 _mesa_set_remove_key(ctx->need_barriers[is_compute], res);
1097 check_resource_for_batch_ref(ctx, res);
1099 res->bind_count[is_compute]++;
1107 struct zink_resource *res = zink_resource(ctx->vertex_buffers[slot].buffer.resource);
1108 res->vbo_bind_count--;
1109 res->vbo_bind_mask &= ~BITFIELD_BIT(slot);
1110 if (!res->vbo_bind_count) {
1111 res->gfx_barrier &= ~VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
1112 res->barrier_access[0] &= ~VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
1114 update_res_bind_count(ctx, res, false, true);
1147 struct zink_resource *res = zink_resource(vb->buffer.resource);
1148 res->vbo_bind_mask |= BITFIELD_BIT(start_slot + i);
1149 res->vbo_bind_count++;
1150 res->gfx_barrier |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
1151 res->barrier_access[0] |= VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
1152 update_res_bind_count(ctx, res, false, false);
1155 zink_batch_resource_usage_set(&ctx->batch, res, false);
1157 zink_resource_buffer_barrier(ctx, res, VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
1159 res->obj->unordered_read = false;
1238 unbind_descriptor_stage(struct zink_resource *res, enum pipe_shader_type pstage)
1240 if (!res->sampler_binds[pstage] && !res->image_binds[pstage])
1241 res->gfx_barrier &= ~zink_pipeline_flags_from_pipe_stage(pstage);
1245 unbind_buffer_descriptor_stage(struct zink_resource *res, enum pipe_shader_type pstage)
1247 if (!res->ubo_bind_mask[pstage] && !res->ssbo_bind_mask[pstage])
1248 unbind_descriptor_stage(res, pstage);
1252 unbind_ubo(struct zink_context *ctx, struct zink_resource *res, enum pipe_shader_type pstage, unsigned slot)
1254 if (!res)
1256 res->ubo_bind_mask[pstage] &= ~BITFIELD_BIT(slot);
1257 res->ubo_bind_count[pstage == PIPE_SHADER_COMPUTE]--;
1258 unbind_buffer_descriptor_stage(res, pstage);
1259 if (!res->ubo_bind_count[pstage == PIPE_SHADER_COMPUTE])
1260 res->barrier_access[pstage == PIPE_SHADER_COMPUTE] &= ~VK_ACCESS_UNIFORM_READ_BIT;
1261 update_res_bind_count(ctx, res, pstage == PIPE_SHADER_COMPUTE, true);
1288 struct zink_resource *res = zink_resource(ctx->ubos[shader][index].buffer);
1300 if (new_res != res) {
1301 unbind_ubo(ctx, res, shader, index);
1314 !!res != !!buffer || (res && res->obj->buffer != new_res->obj->buffer) ||
1337 if (res) {
1338 unbind_ubo(ctx, res, shader, index);
1357 unbind_descriptor_reads(struct zink_resource *res, enum pipe_shader_type pstage)
1359 if (!res->sampler_binds[pstage] && !res->image_binds[pstage])
1360 res->barrier_access[pstage == PIPE_SHADER_COMPUTE] &= ~VK_ACCESS_SHADER_READ_BIT;
1364 unbind_buffer_descriptor_reads(struct zink_resource *res, enum pipe_shader_type pstage)
1366 if (!res->ssbo_bind_count[pstage == PIPE_SHADER_COMPUTE])
1367 unbind_descriptor_reads(res, pstage);
1371 unbind_ssbo(struct zink_context *ctx, struct zink_resource *res, enum pipe_shader_type pstage, unsigned slot, bool writable)
1373 if (!res)
1375 res->ssbo_bind_mask[pstage] &= ~BITFIELD_BIT(slot);
1376 res->ssbo_bind_count[pstage == PIPE_SHADER_COMPUTE]--;
1377 unbind_buffer_descriptor_stage(res, pstage);
1378 unbind_buffer_descriptor_reads(res, pstage);
1379 update_res_bind_count(ctx, res, pstage == PIPE_SHADER_COMPUTE, true);
1381 res->write_bind_count[pstage == PIPE_SHADER_COMPUTE]--;
1382 if (!res->write_bind_count[pstage == PIPE_SHADER_COMPUTE])
1383 res->barrier_access[pstage == PIPE_SHADER_COMPUTE] &= ~VK_ACCESS_SHADER_WRITE_BIT;
1404 struct zink_resource *res = ssbo->buffer ? zink_resource(ssbo->buffer) : NULL;
1408 if (new_res != res) {
1409 unbind_ssbo(ctx, res, p_stage, i, was_writable);
1437 update = !!res;
1440 if (res) {
1441 unbind_ssbo(ctx, res, p_stage, i, was_writable);
1454 update_binds_for_samplerviews(struct zink_context *ctx, struct zink_resource *res, bool is_compute)
1456 VkImageLayout layout = get_layout_for_binding(ctx, res, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, is_compute);
1458 u_foreach_bit(slot, res->sampler_binds[PIPE_SHADER_COMPUTE]) {
1460 update_descriptor_state_sampler(ctx, PIPE_SHADER_COMPUTE, slot, res);
1466 u_foreach_bit(slot, res->sampler_binds[i]) {
1468 update_descriptor_state_sampler(ctx, i, slot, res);
1477 flush_pending_clears(struct zink_context *ctx, struct zink_resource *res)
1479 if (res->fb_binds && ctx->clears_enabled)
1480 zink_fb_clears_apply(ctx, &res->base.b);
1484 unbind_shader_image_counts(struct zink_context *ctx, struct zink_resource *res, bool is_compute, bool writable)
1486 update_res_bind_count(ctx, res, is_compute, true);
1488 res->write_bind_count[is_compute]--;
1489 res->image_bind_count[is_compute]--;
1491 if (!res->obj->is_buffer && !res->image_bind_count[is_compute] && res->bind_count[is_compute])
1492 update_binds_for_samplerviews(ctx, res, is_compute);
1496 check_for_layout_update(struct zink_context *ctx, struct zink_resource *res, bool is_compute)
1498 VkImageLayout layout = res->bind_count[is_compute] ? zink_descriptor_util_image_layout_eval(ctx, res, is_compute) : VK_IMAGE_LAYOUT_UNDEFINED;
1499 VkImageLayout other_layout = res->bind_count[!is_compute] ? zink_descriptor_util_image_layout_eval(ctx, res, !is_compute) : VK_IMAGE_LAYOUT_UNDEFINED;
1500 if (res->bind_count[is_compute] && layout && res->layout != layout)
1501 _mesa_set_add(ctx->need_barriers[is_compute], res);
1502 if (res->bind_count[!is_compute] && other_layout && (layout != other_layout || res->layout != other_layout))
1503 _mesa_set_add(ctx->need_barriers[!is_compute], res);
1514 struct zink_resource *res = zink_resource(image_view->base.resource);
1515 res->image_binds[stage] &= ~BITFIELD_BIT(slot);
1516 unbind_shader_image_counts(ctx, res, is_compute, image_view->base.access & PIPE_IMAGE_ACCESS_WRITE);
1517 if (!res->write_bind_count[is_compute])
1518 res->barrier_access[stage == PIPE_SHADER_COMPUTE] &= ~VK_ACCESS_SHADER_WRITE_BIT;
1521 unbind_buffer_descriptor_stage(res, stage);
1522 unbind_buffer_descriptor_reads(res, stage);
1527 unbind_descriptor_stage(res, stage);
1528 if (!res->image_bind_count[is_compute])
1529 check_for_layout_update(ctx, res, is_compute);
1541 struct zink_resource *res = zink_resource(view->resource);
1542 VkBufferViewCreateInfo bvci = create_bvci(ctx, res, view->format, view->u.buf.offset, view->u.buf.size);
1543 struct zink_buffer_view *buffer_view = get_buffer_view(ctx, res, &bvci);
1546 util_range_add(&res->base.b, &res->valid_buffer_range, view->u.buf.offset,
1552 finalize_image_bind(struct zink_context *ctx, struct zink_resource *res, bool is_compute)
1557 if (res->image_bind_count[is_compute] == 1 &&
1558 res->bind_count[is_compute] > 1)
1559 update_binds_for_samplerviews(ctx, res, is_compute);
1560 check_for_layout_update(ctx, res, is_compute);
1567 struct zink_resource *res = zink_resource(view->resource);
1569 enum pipe_texture_target target = res->base.b.target;
1577 if (depth < u_minify(res->base.b.depth0, view->u.tex.level)) {
1592 if (depth < res->base.b.array_size && depth == 1)
1597 VkImageViewCreateInfo ivci = create_ivci(screen, res, &tmpl, target);
1603 flush_pending_clears(ctx, res);
1619 struct zink_resource *res = zink_resource(images[i].resource);
1620 if (!zink_resource_object_init_storage(ctx, res)) {
1627 res->write_bind_count[p_stage == PIPE_SHADER_COMPUTE]++;
1633 res->gfx_barrier |= zink_pipeline_flags_from_pipe_stage(p_stage);
1634 res->barrier_access[p_stage == PIPE_SHADER_COMPUTE] |= access;
1639 update_res_bind_count(ctx, res, p_stage == PIPE_SHADER_COMPUTE, false);
1640 res->image_bind_count[p_stage == PIPE_SHADER_COMPUTE]++;
1645 zink_resource_buffer_barrier(ctx, res, access,
1646 res->gfx_barrier);
1651 res->image_bind_count[p_stage == PIPE_SHADER_COMPUTE]++;
1652 update_res_bind_count(ctx, res, p_stage == PIPE_SHADER_COMPUTE, false);
1656 finalize_image_bind(ctx, res, p_stage == PIPE_SHADER_COMPUTE);
1660 zink_batch_resource_usage_set(&ctx->batch, res,
1663 update_descriptor_state_image(ctx, p_stage, start_slot + i, res);
1665 res->obj->unordered_read = res->obj->unordered_write = false;
1667 res->obj->unordered_read = false;
1668 res->image_binds[p_stage] |= BITFIELD_BIT(start_slot + i);
1689 const struct zink_resource *res = zink_resource(sv->base.texture);
1690 if ((res->obj->is_buffer && zink_batch_usage_exists(sv->buffer_view->batch_uses)) ||
1691 (!res->obj->is_buffer && zink_batch_usage_exists(sv->image_view->batch_uses)))
1701 struct zink_resource *res = zink_resource(sv->base.texture);
1702 res->sampler_bind_count[stage == PIPE_SHADER_COMPUTE]--;
1704 update_res_bind_count(ctx, res, stage == PIPE_SHADER_COMPUTE, true);
1705 res->sampler_binds[stage] &= ~BITFIELD_BIT(slot);
1706 if (res->obj->is_buffer) {
1707 unbind_buffer_descriptor_stage(res, stage);
1708 unbind_buffer_descriptor_reads(res, stage);
1710 unbind_descriptor_stage(res, stage);
1711 unbind_descriptor_reads(res, stage);
1735 struct zink_resource *res = b ? zink_resource(b->base.texture) : NULL;
1737 if (!a || zink_resource(a->base.texture) != res) {
1740 update_res_bind_count(ctx, res, shader_type == PIPE_SHADER_COMPUTE, false);
1741 res->sampler_bind_count[shader_type == PIPE_SHADER_COMPUTE]++;
1742 res->gfx_barrier |= zink_pipeline_flags_from_pipe_stage(shader_type);
1743 res->barrier_access[shader_type == PIPE_SHADER_COMPUTE] |= VK_ACCESS_SHADER_READ_BIT;
1747 if (res->base.b.target == PIPE_BUFFER) {
1748 if (b->buffer_view->bvci.buffer != res->obj->buffer) {
1754 bvci.buffer = res->obj->buffer;
1755 struct zink_buffer_view *buffer_view = get_buffer_view(ctx, res, &bvci);
1764 zink_resource_buffer_barrier(ctx, res, VK_ACCESS_SHADER_READ_BIT,
1765 res->gfx_barrier);
1768 } else if (!res->obj->is_buffer) {
1769 if (res->obj != b->image_view->obj) {
1778 flush_pending_clears(ctx, res);
1783 check_for_layout_update(ctx, res, shader_type == PIPE_SHADER_COMPUTE);
1788 res->sampler_binds[shader_type] |= BITFIELD_BIT(start_slot + i);
1789 zink_batch_resource_usage_set(&ctx->batch, res, false);
1790 res->obj->unordered_read = false;
1801 update_descriptor_state_sampler(ctx, shader_type, start_slot + i, res);
1824 struct zink_resource *res = zink_resource(view->texture);
1837 bd->ds.is_buffer = res->base.b.target == PIPE_BUFFER;
1838 if (res->base.b.target == PIPE_BUFFER)
1863 struct zink_resource *res = zink_descriptor_surface_resource(ds);
1865 if (zink_resource_has_usage(res))
1869 if (zink_resource_has_usage(res))
1878 rebind_bindless_bufferview(struct zink_context *ctx, struct zink_resource *res, struct zink_descriptor_surface *ds)
1885 bvci.buffer = res->obj->buffer;
1886 struct zink_buffer_view *buffer_view = get_buffer_view(ctx, res, &bvci);
1888 if (zink_resource_has_usage(res))
1929 struct zink_resource *res = zink_descriptor_surface_resource(ds);
1933 update_res_bind_count(ctx, res, false, false);
1934 update_res_bind_count(ctx, res, true, false);
1935 res->bindless[0]++;
1937 if (ds->bufferview->bvci.buffer != res->obj->buffer)
1938 rebind_bindless_bufferview(ctx, res, ds);
1941 zink_resource_buffer_barrier(ctx, res, VK_ACCESS_SHADER_READ_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT);
1946 ii->imageLayout = zink_descriptor_util_image_layout_eval(ctx, res, false);
1947 flush_pending_clears(ctx, res);
1948 check_for_layout_update(ctx, res, false);
1949 check_for_layout_update(ctx, res, true);
1951 zink_batch_resource_usage_set(&ctx->batch, res, false);
1955 res->obj->unordered_read = false;
1959 update_res_bind_count(ctx, res, false, true);
1960 update_res_bind_count(ctx, res, true, true);
1961 res->bindless[0]--;
1963 if (!res->image_bind_count[i])
1964 check_for_layout_update(ctx, res, i);
1974 struct zink_resource *res = zink_resource(view->resource);
1976 if (!zink_resource_object_init_storage(ctx, res)) {
1985 bd->ds.is_buffer = res->base.b.target == PIPE_BUFFER;
1986 if (res->base.b.target == PIPE_BUFFER)
2010 struct zink_resource *res = zink_descriptor_surface_resource(ds);
2012 if (zink_resource_has_usage(res))
2016 if (zink_resource_has_usage(res))
2033 struct zink_resource *res = zink_descriptor_surface_resource(ds);
2037 res->write_bind_count[0]++;
2038 res->write_bind_count[1]++;
2040 res->write_bind_count[0]--;
2041 res->write_bind_count[1]--;
2051 update_res_bind_count(ctx, res, false, false);
2052 update_res_bind_count(ctx, res, true, false);
2053 res->image_bind_count[0]++;
2054 res->image_bind_count[1]++;
2055 res->bindless[1]++;
2057 if (ds->bufferview->bvci.buffer != res->obj->buffer)
2058 rebind_bindless_bufferview(ctx, res, ds);
2061 zink_resource_buffer_barrier(ctx, res, access, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT);
2067 finalize_image_bind(ctx, res, false);
2068 finalize_image_bind(ctx, res, true);
2070 zink_batch_resource_usage_set(&ctx->batch, res, zink_resource_access_is_write(access));
2075 res->obj->unordered_read = res->obj->unordered_write = false;
2077 res->obj->unordered_read = false;
2081 unbind_shader_image_counts(ctx, res, false, false);
2082 unbind_shader_image_counts(ctx, res, true, false);
2083 res->bindless[1]--;
2085 if (!res->image_bind_count[i])
2086 check_for_layout_update(ctx, res, i);
2415 update_res_sampler_layouts(struct zink_context *ctx, struct zink_resource *res)
2417 unsigned find = res->sampler_bind_count[0];
2419 u_foreach_bit(slot, res->sampler_binds[i]) {
2421 if (ctx->di.descriptor_res[ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW][i][slot] == res)
2422 ctx->di.textures[i][slot].imageLayout = zink_descriptor_util_image_layout_eval(ctx, res, false);
2432 struct zink_resource *res;
2435 res = zink_resource(surf->base.texture);
2437 res = zink_resource(surf->base.texture);
2438 zink_batch_resource_usage_set(&ctx->batch, res, true);
2444 if (zink_is_swapchain(res)) {
2445 if (!zink_kopper_acquire(ctx, res, UINT64_MAX))
2463 zink_resource_image_barrier(ctx, res, layout, access, pipeline);
2464 res->obj->unordered_read = res->obj->unordered_write = false;
2465 if (i == ctx->fb_state.nr_cbufs && res->sampler_bind_count[0])
2466 update_res_sampler_layouts(ctx, res);
2534 struct zink_resource *res = zink_resource(ctx->fb_state.zsbuf->texture);
2535 res->obj->needs_zs_evaluate = true;
2536 zink_init_vk_sample_locations(ctx, &res->obj->zs_evaluate);
2590 struct zink_resource *res = ctx->di.descriptor_res[i][stage][j];
2591 if (!res)
2594 zink_batch_resource_usage_set(batch, res, is_write);
2596 res->obj->unordered_read = res->obj->unordered_write = false;
2598 res->obj->unordered_read = false;
2606 if (res->obj->is_buffer) {
2615 if (res->obj->is_buffer)
2640 struct zink_resource *res = zink_resource(ctx->vertex_buffers[i].buffer.resource);
2641 if (res) {
2642 zink_batch_resource_usage_set(batch, res, false);
2643 res->obj->unordered_read = false;
2653 struct zink_resource *res = zink_descriptor_surface_resource(&(*bd)->ds);
2654 zink_batch_resource_usage_set(&ctx->batch, res, (*bd)->access & PIPE_IMAGE_ACCESS_WRITE);
2656 res->obj->unordered_read = res->obj->unordered_write = false;
2658 res->obj->unordered_read = false;
2766 struct zink_resource *res = zink_resource(surf->texture);
2775 res->fb_binds--;
2776 if (!res->fb_binds) {
2777 check_resource_for_batch_ref(ctx, res);
2778 if (res->sampler_bind_count[0]) {
2779 update_res_sampler_layouts(ctx, res);
2780 _mesa_set_add(ctx->need_barriers[0], res);
2836 struct zink_resource *res = zink_resource(psurf->texture);
2841 if (changed && unlikely(res->obj->needs_zs_evaluate))
2844 zink_resource_image_barrier(ctx, res, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2876 struct zink_resource *res = zink_resource(psurf->texture);
2879 if (res->modifiers) {
2880 assert(!ctx->needs_present || ctx->needs_present == res);
2881 ctx->needs_present = res;
2883 if (res->obj->dt) {
2886 psurf->format != res->base.b.format) {
2894 res->fb_binds++;
2897 if (!res->valid)
3119 zink_resource_image_needs_barrier(struct zink_resource *res, VkImageLayout new_layout, VkAccessFlags flags, VkPipelineStageFlags pipeline)
3125 return res->layout != new_layout || (res->obj->access_stage & pipeline) != pipeline ||
3126 (res->obj->access & flags) != flags ||
3127 zink_resource_access_is_write(res->obj->access) ||
3132 zink_resource_image_barrier_init(VkImageMemoryBarrier *imb, struct zink_resource *res, VkImageLayout new_layout, VkAccessFlags flags, VkPipelineStageFlags pipeline)
3140 res->aspect,
3147 res->obj->access ? res->obj->access : access_src_flags(res->layout),
3149 res->layout,
3153 res->obj->image,
3156 return res->obj->needs_zs_evaluate || zink_resource_image_needs_barrier(res, new_layout, flags, pipeline);
3166 resource_check_defer_buffer_barrier(struct zink_context *ctx, struct zink_resource *res, VkPipelineStageFlags pipeline)
3168 assert(res->obj->is_buffer);
3169 if (res->bind_count[0] - res->so_bind_count > 0) {
3170 if ((res->obj->is_buffer && res->vbo_bind_mask && !(pipeline & VK_PIPELINE_STAGE_VERTEX_INPUT_BIT)) ||
3171 ((!res->obj->is_buffer || util_bitcount(res->vbo_bind_mask) != res->bind_count[0]) && !is_shader_pipline_stage(pipeline)))
3173 _mesa_set_add(ctx->need_barriers[0], res);
3175 if (res->bind_count[1] && !(pipeline & VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT))
3177 _mesa_set_add(ctx->need_barriers[1], res);
3181 unordered_res_exec(const struct zink_context *ctx, const struct zink_resource *res, bool is_write)
3184 if (res->obj->unordered_read && res->obj->unordered_write)
3187 if (is_write && zink_batch_usage_matches(res->obj->bo->reads, ctx->batch.state) && !res->obj->unordered_read)
3190 return res->obj->unordered_write || !zink_batch_usage_matches(res->obj->bo->writes, ctx->batch.state);
3214 resource_check_defer_image_barrier(struct zink_context *ctx, struct zink_resource *res, VkImageLayout layout, VkPipelineStageFlags pipeline)
3216 assert(!res->obj->is_buffer);
3221 if ((is_shader || !res->bind_count[is_compute]) &&
3223 !res->bind_count[!is_compute] && (!is_compute || !res->fb_binds))
3226 if (res->bind_count[!is_compute] && is_shader) {
3228 if (layout == zink_descriptor_util_image_layout_eval(ctx, res, !is_compute))
3232 if (res->bind_count[!is_compute])
3233 _mesa_set_add(ctx->need_barriers[!is_compute], res);
3235 if (res->bind_count[is_compute] && !is_shader)
3236 _mesa_set_add(ctx->need_barriers[is_compute], res);
3240 zink_resource_image_barrier(struct zink_context *ctx, struct zink_resource *res,
3247 if (!zink_resource_image_barrier_init(&imb, res, new_layout, flags, pipeline))
3251 VkCommandBuffer cmdbuf = is_write ? zink_get_cmdbuf(ctx, NULL, res) : zink_get_cmdbuf(ctx, res, NULL);
3253 if (!res->obj->access_stage)
3255 if (res->obj->needs_zs_evaluate)
3256 imb.pNext = &res->obj->zs_evaluate;
3257 res->obj->needs_zs_evaluate = false;
3258 if (res->dmabuf_acquire) {
3261 res->dmabuf_acquire = false;
3265 res->obj->access_stage ? res->obj->access_stage : VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
3273 resource_check_defer_image_barrier(ctx, res, new_layout, pipeline);
3275 res->obj->access = imb.dstAccessMask;
3276 res->obj->access_stage = pipeline;
3277 res->layout = new_layout;
3321 zink_resource_buffer_needs_barrier(struct zink_resource *res, VkAccessFlags flags, VkPipelineStageFlags pipeline)
3323 if (!res->obj->access || !res->obj->access_stage)
3327 return zink_resource_access_is_write(res->obj->access) ||
3329 (res->obj->access_stage & pipeline) != pipeline ||
3330 (res->obj->access & flags) != flags;
3334 zink_resource_buffer_barrier(struct zink_context *ctx, struct zink_resource *res, VkAccessFlags flags, VkPipelineStageFlags pipeline)
3339 if (!zink_resource_buffer_needs_barrier(res, flags, pipeline))
3344 bmb.srcAccessMask = res->obj->access;
3346 if (!res->obj->access_stage)
3349 VkCommandBuffer cmdbuf = is_write ? zink_get_cmdbuf(ctx, NULL, res) : zink_get_cmdbuf(ctx, res, NULL);
3353 res->obj->access_stage ? res->obj->access_stage : pipeline_access_stage(res->obj->access),
3361 resource_check_defer_buffer_barrier(ctx, res, pipeline);
3363 res->obj->access = bmb.dstAccessMask;
3364 res->obj->access_stage = pipeline;
3368 zink_resource_needs_barrier(struct zink_resource *res, VkImageLayout layout, VkAccessFlags flags, VkPipelineStageFlags pipeline)
3370 if (res->base.b.target == PIPE_BUFFER)
3371 return zink_resource_buffer_needs_barrier(res, flags, pipeline);
3372 return zink_resource_image_needs_barrier(res, layout, flags, pipeline);
3666 struct zink_resource *res = zink_resource(pres);
3667 if (res->obj->dt) {
3668 if (zink_kopper_acquired(res->obj->dt, res->obj->dt_idx)) {
3669 zink_resource_image_barrier(ctx, res, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, 0, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
3670 zink_batch_reference_resource_rw(&ctx->batch, res, true);
3672 ctx->needs_present = res;
3674 ctx->batch.swapchain = res;
3675 } else if (res->dmabuf)
3676 res->dmabuf_acquire = true;
3778 zink_rebind_framebuffer(struct zink_context *ctx, struct zink_resource *res)
3783 if (res->aspect & VK_IMAGE_ASPECT_COLOR_BIT) {
3786 zink_resource(ctx->fb_state.cbufs[i]->texture) != res)
3792 if (ctx->fb_state.zsbuf && zink_resource(ctx->fb_state.zsbuf->texture) != res) {
3798 did_rebind |= rebind_fb_state(ctx, res, false);
3812 struct zink_resource *res = update_descriptor_state_ubo(ctx, shader, slot,
3815 return res;
3822 struct zink_resource *res = zink_resource(ssbo->buffer);
3823 if (!res)
3825 util_range_add(&res->base.b, &res->valid_buffer_range, ssbo->buffer_offset,
3827 update_descriptor_state_ssbo(ctx, shader, slot, res);
3829 return res;
3838 struct zink_resource *res = zink_resource(sampler_view->base.texture);
3842 bvci.buffer = res->obj->buffer;
3844 sampler_view->buffer_view = get_buffer_view(ctx, res, &bvci);
3845 update_descriptor_state_sampler(ctx, shader, slot, res);
3847 return res;
3854 struct zink_resource *res = zink_resource(image_view->base.resource);
3855 if (!res || res->base.b.target != PIPE_BUFFER)
3861 bvci.buffer = res->obj->buffer;
3863 if (!zink_resource_object_init_storage(ctx, res)) {
3867 image_view->buffer_view = get_buffer_view(ctx, res, &bvci);
3869 util_range_add(&res->base.b, &res->valid_buffer_range, image_view->base.u.buf.offset,
3871 update_descriptor_state_image(ctx, shader, slot, res);
3873 return res;
3877 rebind_buffer(struct zink_context *ctx, struct zink_resource *res, uint32_t rebind_mask, const unsigned expected_num_rebinds)
3882 if (!zink_resource_has_binds(res))
3885 assert(!res->bindless[1]); //TODO
3886 if ((rebind_mask & BITFIELD_BIT(TC_BINDING_STREAMOUT_BUFFER)) || (!rebind_mask && res->so_bind_count && ctx->num_so_targets)) {
3890 if (so && so == res) {
3901 if ((rebind_mask & BITFIELD_BIT(TC_BINDING_VERTEX_BUFFER)) || (!rebind_mask && res->vbo_bind_mask)) {
3902 u_foreach_bit(slot, res->vbo_bind_mask) {
3903 if (ctx->vertex_buffers[slot].buffer.resource != &res->base.b) //wrong context
3915 ((res->ubo_bind_count[0] ? BITFIELD_RANGE(TC_BINDING_UBO_VS, (PIPE_SHADER_TYPES - 1)) : 0) |
3916 (res->ubo_bind_count[1] ? BITFIELD_BIT(TC_BINDING_UBO_CS) : 0));
3918 u_foreach_bit(slot, res->ubo_bind_mask[shader]) {
3919 if (&res->base.b != ctx->ubos[shader][slot].buffer) //wrong context
3933 u_foreach_bit(slot, res->ssbo_bind_mask[shader]) {
3935 if (&res->base.b != ssbo->buffer) //wrong context
3949 u_foreach_bit(slot, res->sampler_binds[shader]) {
3951 if (&res->base.b != sampler_view->base.texture) //wrong context
3964 unsigned num_image_rebinds_remaining = rebind_mask ? expected_num_rebinds - num_rebinds : res->image_bind_count[0] + res->image_bind_count[1];
3968 if (res != cres)
3980 zink_batch_resource_usage_set(&ctx->batch, res, has_write);
4225 struct zink_resource *res = zink_resource(pres);
4229 if (zink_resource_has_unflushed_usage(res))
4233 bool ret = zink_bo_commit(screen, res, level, box, commit, &sem);
4245 rebind_image(struct zink_context *ctx, struct zink_resource *res)
4247 zink_rebind_framebuffer(ctx, res);
4248 if (!zink_resource_has_binds(res))
4251 if (res->sampler_binds[i]) {
4254 if (sv && sv->base.texture == &res->base.b) {
4259 update_descriptor_state_sampler(ctx, i, j, res);
4263 if (!res->image_bind_count[i == PIPE_SHADER_COMPUTE])
4266 if (zink_resource(ctx->image_views[i][j].base.resource) == res) {
4268 update_descriptor_state_image(ctx, i, j, res);
4269 _mesa_set_add(ctx->need_barriers[i == PIPE_SHADER_COMPUTE], res);
4276 zink_resource_rebind(struct zink_context *ctx, struct zink_resource *res)
4278 if (res->base.b.target == PIPE_BUFFER) {
4280 res->so_valid = false;
4281 return rebind_buffer(ctx, res, 0, 0) == res->bind_count[0] + res->bind_count[1];
4283 rebind_image(ctx, res);
4298 struct zink_resource *res = rebind_ubo(ctx, shader, slot);
4299 if (res)
4300 zink_batch_resource_usage_set(batch, res, false);
4303 struct zink_resource *res = rebind_tbo(ctx, shader, slot);
4304 if (res)
4305 zink_batch_resource_usage_set(batch, res, false);
4308 struct zink_resource *res = rebind_ssbo(ctx, shader, slot);
4309 if (res)
4310 zink_batch_resource_usage_set(batch, res, (ctx->writable_ssbos[shader] & BITFIELD64_BIT(slot)) != 0);
4313 struct zink_resource *res = rebind_ibo(ctx, shader, slot);
4314 if (res)
4315 zink_batch_resource_usage_set(batch, res, (ctx->image_views[shader][slot].base.access & PIPE_IMAGE_ACCESS_WRITE) != 0);
4329 struct zink_resource *res = zink_resource(sv->image_view->base.texture);
4330 if (res->obj != sv->image_view->obj) {
4335 update_descriptor_state_sampler(ctx, i, j, res);
4340 struct zink_resource *res = zink_resource(image_view->base.resource);
4341 if (!res)
4343 if (ctx->image_views[i][j].surface->obj != res->obj) {
4347 update_descriptor_state_image(ctx, i, j, res);
4348 _mesa_set_add(ctx->need_barriers[i == PIPE_SHADER_COMPUTE], res);
4384 struct zink_resource *res = zink_resource(pres);
4390 return !zink_resource_usage_check_completion(screen, res, check_usage);