1/* 2 * Copyright 2018 Collabora Ltd. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * on the rights to use, copy, modify, merge, publish, distribute, sub 8 * license, and/or sell copies of the Software, and to permit persons to whom 9 * the Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM, 19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE 21 * USE OR OTHER DEALINGS IN THE SOFTWARE. 22 */ 23 24#include "zink_context.h" 25#include "zink_kopper.h" 26#include "zink_framebuffer.h" 27#include "zink_query.h" 28#include "zink_resource.h" 29#include "zink_screen.h" 30 31#include "util/u_blitter.h" 32#include "util/u_dynarray.h" 33#include "util/format/u_format.h" 34#include "util/format_srgb.h" 35#include "util/u_framebuffer.h" 36#include "util/u_inlines.h" 37#include "util/u_rect.h" 38#include "util/u_surface.h" 39#include "util/u_helpers.h" 40 41static inline bool 42scissor_states_equal(const struct pipe_scissor_state *a, const struct pipe_scissor_state *b) 43{ 44 return a->minx == b->minx && a->miny == b->miny && a->maxx == b->maxx && a->maxy == b->maxy; 45} 46 47static void 48clear_in_rp(struct pipe_context *pctx, 49 unsigned buffers, 50 const struct pipe_scissor_state *scissor_state, 51 const union pipe_color_union *pcolor, 52 double depth, unsigned stencil) 53{ 54 struct zink_context *ctx = zink_context(pctx); 55 struct pipe_framebuffer_state *fb = &ctx->fb_state; 56 57 VkClearAttachment attachments[1 + PIPE_MAX_COLOR_BUFS]; 58 int num_attachments = 0; 59 60 if (buffers & PIPE_CLEAR_COLOR) { 61 VkClearColorValue color; 62 color.uint32[0] = pcolor->ui[0]; 63 color.uint32[1] = pcolor->ui[1]; 64 color.uint32[2] = pcolor->ui[2]; 65 color.uint32[3] = pcolor->ui[3]; 66 67 for (unsigned i = 0; i < fb->nr_cbufs; i++) { 68 if (!(buffers & (PIPE_CLEAR_COLOR0 << i)) || !fb->cbufs[i]) 69 continue; 70 71 attachments[num_attachments].aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; 72 attachments[num_attachments].colorAttachment = i; 73 attachments[num_attachments].clearValue.color = color; 74 ++num_attachments; 75 } 76 } 77 78 if (buffers & PIPE_CLEAR_DEPTHSTENCIL && fb->zsbuf) { 79 VkImageAspectFlags aspect = 0; 80 if (buffers & PIPE_CLEAR_DEPTH) 81 aspect |= VK_IMAGE_ASPECT_DEPTH_BIT; 82 if (buffers & PIPE_CLEAR_STENCIL) 83 aspect |= VK_IMAGE_ASPECT_STENCIL_BIT; 84 85 attachments[num_attachments].aspectMask = aspect; 86 attachments[num_attachments].clearValue.depthStencil.depth = depth; 87 attachments[num_attachments].clearValue.depthStencil.stencil = stencil; 88 ++num_attachments; 89 } 90 91 VkClearRect cr = {0}; 92 if (scissor_state) { 93 cr.rect.offset.x = scissor_state->minx; 94 cr.rect.offset.y = scissor_state->miny; 95 cr.rect.extent.width = MIN2(fb->width, scissor_state->maxx - scissor_state->minx); 96 cr.rect.extent.height = MIN2(fb->height, scissor_state->maxy - scissor_state->miny); 97 } else { 98 cr.rect.extent.width = fb->width; 99 cr.rect.extent.height = fb->height; 100 } 101 cr.baseArrayLayer = 0; 102 cr.layerCount = util_framebuffer_get_num_layers(fb); 103 struct zink_batch *batch = &ctx->batch; 104 assert(batch->in_rp); 105 VKCTX(CmdClearAttachments)(batch->state->cmdbuf, num_attachments, attachments, 1, &cr); 106 /* 107 Rendering within a subpass containing a feedback loop creates a data race, except in the following 108 cases: 109 • If a memory dependency is inserted between when the attachment is written and when it is 110 subsequently read by later fragments. Pipeline barriers expressing a subpass self-dependency 111 are the only way to achieve this, and one must be inserted every time a fragment will read 112 values at a particular sample (x, y, layer, sample) coordinate, if those values have been written 113 since the most recent pipeline barrier 114 115 VK 1.3.211, Chapter 8: Render Pass 116 */ 117 if (ctx->fbfetch_outputs) 118 ctx->base.texture_barrier(&ctx->base, PIPE_TEXTURE_BARRIER_FRAMEBUFFER); 119} 120 121static struct zink_framebuffer_clear_data * 122add_new_clear(struct zink_framebuffer_clear *fb_clear) 123{ 124 struct zink_framebuffer_clear_data cd = {0}; 125 util_dynarray_append(&fb_clear->clears, struct zink_framebuffer_clear_data, cd); 126 return zink_fb_clear_element(fb_clear, zink_fb_clear_count(fb_clear) - 1); 127} 128 129static struct zink_framebuffer_clear_data * 130get_clear_data(struct zink_context *ctx, struct zink_framebuffer_clear *fb_clear, const struct pipe_scissor_state *scissor_state) 131{ 132 unsigned num_clears = zink_fb_clear_count(fb_clear); 133 if (num_clears) { 134 struct zink_framebuffer_clear_data *last_clear = zink_fb_clear_element(fb_clear, num_clears - 1); 135 /* if we're completely overwriting the previous clear, merge this into the previous clear */ 136 if (!scissor_state || (last_clear->has_scissor && scissor_states_equal(&last_clear->scissor, scissor_state))) 137 return last_clear; 138 } 139 return add_new_clear(fb_clear); 140} 141 142static void 143clamp_color(const struct util_format_description *desc, union pipe_color_union *dst, const union pipe_color_union *src, unsigned i) 144{ 145 int non_void = util_format_get_first_non_void_channel(desc->format); 146 switch (desc->channel[i].type) { 147 case UTIL_FORMAT_TYPE_VOID: 148 if (desc->channel[non_void].type == UTIL_FORMAT_TYPE_FLOAT) { 149 dst->f[i] = uif(UINT32_MAX); 150 } else { 151 if (desc->channel[non_void].normalized) 152 dst->f[i] = 1.0; 153 else if (desc->channel[non_void].type == UTIL_FORMAT_TYPE_SIGNED) 154 dst->i[i] = INT32_MAX; 155 else 156 dst->ui[i] = UINT32_MAX; 157 } 158 break; 159 case UTIL_FORMAT_TYPE_SIGNED: 160 if (desc->channel[i].normalized) 161 dst->i[i] = src->i[i]; 162 else { 163 dst->i[i] = MAX2(src->i[i], -(1<<(desc->channel[i].size - 1))); 164 dst->i[i] = MIN2(dst->i[i], (1 << (desc->channel[i].size - 1)) - 1); 165 } 166 break; 167 case UTIL_FORMAT_TYPE_UNSIGNED: 168 if (desc->channel[i].normalized) 169 dst->ui[i] = src->ui[i]; 170 else 171 dst->ui[i] = MIN2(src->ui[i], BITFIELD_MASK(desc->channel[i].size)); 172 break; 173 case UTIL_FORMAT_TYPE_FIXED: 174 case UTIL_FORMAT_TYPE_FLOAT: 175 dst->ui[i] = src->ui[i]; 176 break; 177 } 178} 179 180void 181zink_clear(struct pipe_context *pctx, 182 unsigned buffers, 183 const struct pipe_scissor_state *scissor_state, 184 const union pipe_color_union *pcolor, 185 double depth, unsigned stencil) 186{ 187 struct zink_context *ctx = zink_context(pctx); 188 struct pipe_framebuffer_state *fb = &ctx->fb_state; 189 struct zink_batch *batch = &ctx->batch; 190 bool needs_rp = false; 191 192 if (unlikely(!zink_screen(pctx->screen)->info.have_EXT_conditional_rendering && !zink_check_conditional_render(ctx))) 193 return; 194 195 if (scissor_state) { 196 struct u_rect scissor = {scissor_state->minx, scissor_state->maxx, scissor_state->miny, scissor_state->maxy}; 197 needs_rp = !zink_blit_region_fills(scissor, fb->width, fb->height); 198 } 199 200 if (unlikely(ctx->fb_layer_mismatch)) { 201 /* this is a terrible scenario: 202 * at least one attachment has a layerCount greater than the others, 203 * so iterate over all the mismatched attachments and pre-clear them separately, 204 * then continue to flag them as need (additional) clearing 205 * to avoid loadOp=LOAD 206 */ 207 unsigned x = 0; 208 unsigned y = 0; 209 unsigned w = ctx->fb_state.width; 210 unsigned h = ctx->fb_state.height; 211 if (scissor_state) { 212 x = scissor_state->minx; 213 y = scissor_state->miny; 214 w = scissor_state->minx + scissor_state->maxx; 215 h = scissor_state->miny + scissor_state->maxy; 216 } 217 unsigned clear_buffers = buffers >> 2; 218 for (unsigned i = 0; i < ctx->fb_state.nr_cbufs; i++) { 219 if (ctx->fb_state.cbufs[i] && 220 (ctx->fb_layer_mismatch & clear_buffers & BITFIELD_BIT(i))) { 221 if (ctx->void_clears & (PIPE_CLEAR_COLOR0 << i)) { 222 union pipe_color_union color; 223 color.f[0] = color.f[1] = color.f[2] = 0; 224 color.f[3] = 1.0; 225 pctx->clear_render_target(pctx, ctx->fb_state.cbufs[i], &color, 226 0, 0, 227 ctx->fb_state.cbufs[i]->width, ctx->fb_state.cbufs[i]->height, 228 ctx->render_condition_active); 229 } 230 pctx->clear_render_target(pctx, ctx->fb_state.cbufs[i], pcolor, 231 x, y, w, h, ctx->render_condition_active); 232 } 233 } 234 if (ctx->fb_state.zsbuf && (buffers & PIPE_CLEAR_DEPTHSTENCIL)) 235 pctx->clear_depth_stencil(pctx, ctx->fb_state.zsbuf, buffers & PIPE_CLEAR_DEPTHSTENCIL, depth, stencil, 236 x, y, w, h, ctx->render_condition_active); 237 } 238 239 if (batch->in_rp) { 240 clear_in_rp(pctx, buffers, scissor_state, pcolor, depth, stencil); 241 return; 242 } 243 244 unsigned rp_clears_enabled = ctx->rp_clears_enabled; 245 246 if (ctx->void_clears & buffers) { 247 unsigned void_clears = ctx->void_clears & buffers; 248 ctx->void_clears &= ~buffers; 249 union pipe_color_union color; 250 color.f[0] = color.f[1] = color.f[2] = 0; 251 color.f[3] = 1.0; 252 for (unsigned i = 0; i < fb->nr_cbufs; i++) { 253 if ((void_clears & (PIPE_CLEAR_COLOR0 << i)) && fb->cbufs[i]) { 254 struct zink_framebuffer_clear *fb_clear = &ctx->fb_clears[i]; 255 unsigned num_clears = zink_fb_clear_count(fb_clear); 256 if (num_clears) { 257 if (zink_fb_clear_first_needs_explicit(fb_clear)) { 258 /* a scissored clear exists: 259 * - extend the clear array 260 * - shift existing clears back by one position 261 * - inject void clear base of array 262 */ 263 add_new_clear(fb_clear); 264 struct zink_framebuffer_clear_data *clear = fb_clear->clears.data; 265 memcpy(clear + 1, clear, num_clears); 266 memcpy(&clear->color, &color, sizeof(color)); 267 } else { 268 /* no void clear needed */ 269 } 270 void_clears &= ~(PIPE_CLEAR_COLOR0 << i); 271 } 272 } 273 } 274 if (void_clears) 275 pctx->clear(pctx, void_clears, NULL, &color, 0, 0); 276 } 277 278 if (buffers & PIPE_CLEAR_COLOR) { 279 for (unsigned i = 0; i < fb->nr_cbufs; i++) { 280 if ((buffers & (PIPE_CLEAR_COLOR0 << i)) && fb->cbufs[i]) { 281 struct pipe_surface *psurf = fb->cbufs[i]; 282 const struct util_format_description *desc = util_format_description(psurf->format); 283 struct zink_framebuffer_clear *fb_clear = &ctx->fb_clears[i]; 284 struct zink_framebuffer_clear_data *clear = get_clear_data(ctx, fb_clear, needs_rp ? scissor_state : NULL); 285 286 ctx->clears_enabled |= PIPE_CLEAR_COLOR0 << i; 287 clear->conditional = ctx->render_condition_active; 288 clear->has_scissor = needs_rp; 289 if (scissor_state && needs_rp) 290 clear->scissor = *scissor_state; 291 for (unsigned i = 0; i < 4; i++) 292 clamp_color(desc, &clear->color, pcolor, i); 293 if (zink_fb_clear_first_needs_explicit(fb_clear)) 294 ctx->rp_clears_enabled &= ~(PIPE_CLEAR_COLOR0 << i); 295 else 296 ctx->rp_clears_enabled |= PIPE_CLEAR_COLOR0 << i; 297 } 298 } 299 } 300 301 if (buffers & PIPE_CLEAR_DEPTHSTENCIL && fb->zsbuf) { 302 struct zink_framebuffer_clear *fb_clear = &ctx->fb_clears[PIPE_MAX_COLOR_BUFS]; 303 struct zink_framebuffer_clear_data *clear = get_clear_data(ctx, fb_clear, needs_rp ? scissor_state : NULL); 304 ctx->clears_enabled |= PIPE_CLEAR_DEPTHSTENCIL; 305 clear->conditional = ctx->render_condition_active; 306 clear->has_scissor = needs_rp; 307 if (scissor_state && needs_rp) 308 clear->scissor = *scissor_state; 309 if (buffers & PIPE_CLEAR_DEPTH) 310 clear->zs.depth = depth; 311 if (buffers & PIPE_CLEAR_STENCIL) 312 clear->zs.stencil = stencil; 313 clear->zs.bits |= (buffers & PIPE_CLEAR_DEPTHSTENCIL); 314 if (zink_fb_clear_first_needs_explicit(fb_clear)) 315 ctx->rp_clears_enabled &= ~PIPE_CLEAR_DEPTHSTENCIL; 316 else 317 ctx->rp_clears_enabled |= (buffers & PIPE_CLEAR_DEPTHSTENCIL); 318 } 319 assert(!ctx->batch.in_rp); 320 ctx->rp_changed |= ctx->rp_clears_enabled != rp_clears_enabled; 321} 322 323static inline bool 324colors_equal(union pipe_color_union *a, union pipe_color_union *b) 325{ 326 return a->ui[0] == b->ui[0] && a->ui[1] == b->ui[1] && a->ui[2] == b->ui[2] && a->ui[3] == b->ui[3]; 327} 328 329void 330zink_clear_framebuffer(struct zink_context *ctx, unsigned clear_buffers) 331{ 332 unsigned to_clear = 0; 333 struct pipe_framebuffer_state *fb_state = &ctx->fb_state; 334#ifndef NDEBUG 335 assert(!(clear_buffers & PIPE_CLEAR_DEPTHSTENCIL) || zink_fb_clear_enabled(ctx, PIPE_MAX_COLOR_BUFS)); 336 for (int i = 0; i < fb_state->nr_cbufs && clear_buffers >= PIPE_CLEAR_COLOR0; i++) { 337 assert(!(clear_buffers & (PIPE_CLEAR_COLOR0 << i)) || zink_fb_clear_enabled(ctx, i)); 338 } 339#endif 340 while (clear_buffers) { 341 struct zink_framebuffer_clear *color_clear = NULL; 342 struct zink_framebuffer_clear *zs_clear = NULL; 343 unsigned num_clears = 0; 344 for (int i = 0; i < fb_state->nr_cbufs && clear_buffers >= PIPE_CLEAR_COLOR0; i++) { 345 struct zink_framebuffer_clear *fb_clear = &ctx->fb_clears[i]; 346 /* these need actual clear calls inside the rp */ 347 if (!(clear_buffers & (PIPE_CLEAR_COLOR0 << i))) 348 continue; 349 if (color_clear) { 350 /* different number of clears -> do another clear */ 351 //XXX: could potentially merge "some" of the clears into this one for a very, very small optimization 352 if (num_clears != zink_fb_clear_count(fb_clear)) 353 goto out; 354 /* compare all the clears to determine if we can batch these buffers together */ 355 for (int j = !zink_fb_clear_first_needs_explicit(fb_clear); j < num_clears; j++) { 356 struct zink_framebuffer_clear_data *a = zink_fb_clear_element(color_clear, j); 357 struct zink_framebuffer_clear_data *b = zink_fb_clear_element(fb_clear, j); 358 /* scissors don't match, fire this one off */ 359 if (a->has_scissor != b->has_scissor || (a->has_scissor && !scissor_states_equal(&a->scissor, &b->scissor))) 360 goto out; 361 362 /* colors don't match, fire this one off */ 363 if (!colors_equal(&a->color, &b->color)) 364 goto out; 365 } 366 } else { 367 color_clear = fb_clear; 368 num_clears = zink_fb_clear_count(fb_clear); 369 } 370 371 clear_buffers &= ~(PIPE_CLEAR_COLOR0 << i); 372 to_clear |= (PIPE_CLEAR_COLOR0 << i); 373 } 374 clear_buffers &= ~PIPE_CLEAR_COLOR; 375 if (clear_buffers & PIPE_CLEAR_DEPTHSTENCIL) { 376 struct zink_framebuffer_clear *fb_clear = &ctx->fb_clears[PIPE_MAX_COLOR_BUFS]; 377 if (color_clear) { 378 if (num_clears != zink_fb_clear_count(fb_clear)) 379 goto out; 380 /* compare all the clears to determine if we can batch these buffers together */ 381 for (int j = !zink_fb_clear_first_needs_explicit(fb_clear); j < zink_fb_clear_count(color_clear); j++) { 382 struct zink_framebuffer_clear_data *a = zink_fb_clear_element(color_clear, j); 383 struct zink_framebuffer_clear_data *b = zink_fb_clear_element(fb_clear, j); 384 /* scissors don't match, fire this one off */ 385 if (a->has_scissor != b->has_scissor || (a->has_scissor && !scissor_states_equal(&a->scissor, &b->scissor))) 386 goto out; 387 } 388 } 389 zs_clear = fb_clear; 390 to_clear |= (clear_buffers & PIPE_CLEAR_DEPTHSTENCIL); 391 clear_buffers &= ~PIPE_CLEAR_DEPTHSTENCIL; 392 } 393out: 394 if (to_clear) { 395 if (num_clears) { 396 for (int j = !zink_fb_clear_first_needs_explicit(color_clear); j < num_clears; j++) { 397 struct zink_framebuffer_clear_data *clear = zink_fb_clear_element(color_clear, j); 398 struct zink_framebuffer_clear_data *zsclear = NULL; 399 /* zs bits are both set here if those aspects should be cleared at some point */ 400 unsigned clear_bits = to_clear & ~PIPE_CLEAR_DEPTHSTENCIL; 401 if (zs_clear) { 402 zsclear = zink_fb_clear_element(zs_clear, j); 403 clear_bits |= zsclear->zs.bits; 404 } 405 zink_clear(&ctx->base, clear_bits, 406 clear->has_scissor ? &clear->scissor : NULL, 407 &clear->color, 408 zsclear ? zsclear->zs.depth : 0, 409 zsclear ? zsclear->zs.stencil : 0); 410 } 411 } else { 412 for (int j = !zink_fb_clear_first_needs_explicit(zs_clear); j < zink_fb_clear_count(zs_clear); j++) { 413 struct zink_framebuffer_clear_data *clear = zink_fb_clear_element(zs_clear, j); 414 zink_clear(&ctx->base, clear->zs.bits, 415 clear->has_scissor ? &clear->scissor : NULL, 416 NULL, 417 clear->zs.depth, 418 clear->zs.stencil); 419 } 420 } 421 } 422 to_clear = 0; 423 } 424 for (int i = 0; i < ARRAY_SIZE(ctx->fb_clears); i++) 425 zink_fb_clear_reset(ctx, i); 426} 427 428static struct pipe_surface * 429create_clear_surface(struct pipe_context *pctx, struct pipe_resource *pres, unsigned level, const struct pipe_box *box) 430{ 431 struct pipe_surface tmpl = {{0}}; 432 433 tmpl.format = pres->format; 434 tmpl.u.tex.first_layer = box->z; 435 tmpl.u.tex.last_layer = box->z + box->depth - 1; 436 tmpl.u.tex.level = level; 437 return pctx->create_surface(pctx, pres, &tmpl); 438} 439 440static void 441set_clear_fb(struct pipe_context *pctx, struct pipe_surface *psurf, struct pipe_surface *zsurf) 442{ 443 struct pipe_framebuffer_state fb_state; 444 fb_state.width = psurf ? psurf->width : zsurf->width; 445 fb_state.height = psurf ? psurf->height : zsurf->height; 446 fb_state.nr_cbufs = !!psurf; 447 fb_state.cbufs[0] = psurf; 448 fb_state.zsbuf = zsurf; 449 pctx->set_framebuffer_state(pctx, &fb_state); 450} 451 452void 453zink_clear_texture(struct pipe_context *pctx, 454 struct pipe_resource *pres, 455 unsigned level, 456 const struct pipe_box *box, 457 const void *data) 458{ 459 struct zink_context *ctx = zink_context(pctx); 460 struct zink_resource *res = zink_resource(pres); 461 struct pipe_surface *surf = NULL; 462 struct pipe_scissor_state scissor = {box->x, box->y, box->x + box->width, box->y + box->height}; 463 464 if (res->aspect & VK_IMAGE_ASPECT_COLOR_BIT) { 465 union pipe_color_union color; 466 467 util_format_unpack_rgba(pres->format, color.ui, data, 1); 468 469 surf = create_clear_surface(pctx, pres, level, box); 470 util_blitter_save_framebuffer(ctx->blitter, &ctx->fb_state); 471 set_clear_fb(pctx, surf, NULL); 472 pctx->clear(pctx, PIPE_CLEAR_COLOR0, &scissor, &color, 0, 0); 473 util_blitter_restore_fb_state(ctx->blitter); 474 } else { 475 float depth = 0.0; 476 uint8_t stencil = 0; 477 478 if (res->aspect & VK_IMAGE_ASPECT_DEPTH_BIT) 479 util_format_unpack_z_float(pres->format, &depth, data, 1); 480 481 if (res->aspect & VK_IMAGE_ASPECT_STENCIL_BIT) 482 util_format_unpack_s_8uint(pres->format, &stencil, data, 1); 483 484 unsigned flags = 0; 485 if (res->aspect & VK_IMAGE_ASPECT_DEPTH_BIT) 486 flags |= PIPE_CLEAR_DEPTH; 487 if (res->aspect & VK_IMAGE_ASPECT_STENCIL_BIT) 488 flags |= PIPE_CLEAR_STENCIL; 489 surf = create_clear_surface(pctx, pres, level, box); 490 util_blitter_save_framebuffer(ctx->blitter, &ctx->fb_state); 491 set_clear_fb(pctx, NULL, surf); 492 pctx->clear(pctx, flags, &scissor, NULL, depth, stencil); 493 util_blitter_restore_fb_state(ctx->blitter); 494 } 495 /* this will never destroy the surface */ 496 pipe_surface_reference(&surf, NULL); 497} 498 499void 500zink_clear_buffer(struct pipe_context *pctx, 501 struct pipe_resource *pres, 502 unsigned offset, 503 unsigned size, 504 const void *clear_value, 505 int clear_value_size) 506{ 507 struct zink_context *ctx = zink_context(pctx); 508 struct zink_resource *res = zink_resource(pres); 509 510 uint32_t clamped; 511 if (util_lower_clearsize_to_dword(clear_value, &clear_value_size, &clamped)) 512 clear_value = &clamped; 513 if (offset % 4 == 0 && size % 4 == 0 && clear_value_size == sizeof(uint32_t)) { 514 /* 515 - dstOffset is the byte offset into the buffer at which to start filling, 516 and must be a multiple of 4. 517 518 - size is the number of bytes to fill, and must be either a multiple of 4, 519 or VK_WHOLE_SIZE to fill the range from offset to the end of the buffer 520 */ 521 struct zink_batch *batch = &ctx->batch; 522 zink_batch_no_rp(ctx); 523 zink_batch_reference_resource_rw(batch, res, true); 524 util_range_add(&res->base.b, &res->valid_buffer_range, offset, offset + size); 525 zink_resource_buffer_barrier(ctx, res, VK_ACCESS_TRANSFER_WRITE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT); 526 res->obj->unordered_read = res->obj->unordered_write = false; 527 VKCTX(CmdFillBuffer)(batch->state->cmdbuf, res->obj->buffer, offset, size, *(uint32_t*)clear_value); 528 return; 529 } 530 struct pipe_transfer *xfer; 531 uint8_t *map = pipe_buffer_map_range(pctx, pres, offset, size, 532 PIPE_MAP_WRITE | PIPE_MAP_ONCE | PIPE_MAP_DISCARD_RANGE, &xfer); 533 if (!map) 534 return; 535 unsigned rem = size % clear_value_size; 536 uint8_t *ptr = map; 537 for (unsigned i = 0; i < (size - rem) / clear_value_size; i++) { 538 memcpy(ptr, clear_value, clear_value_size); 539 ptr += clear_value_size; 540 } 541 if (rem) 542 memcpy(map + size - rem, clear_value, rem); 543 pipe_buffer_unmap(pctx, xfer); 544} 545 546void 547zink_clear_render_target(struct pipe_context *pctx, struct pipe_surface *dst, 548 const union pipe_color_union *color, unsigned dstx, 549 unsigned dsty, unsigned width, unsigned height, 550 bool render_condition_enabled) 551{ 552 struct zink_context *ctx = zink_context(pctx); 553 bool render_condition_active = ctx->render_condition_active; 554 if (!render_condition_enabled && render_condition_active) { 555 zink_stop_conditional_render(ctx); 556 ctx->render_condition_active = false; 557 } 558 util_blitter_save_framebuffer(ctx->blitter, &ctx->fb_state); 559 set_clear_fb(pctx, dst, NULL); 560 struct pipe_scissor_state scissor = {dstx, dsty, dstx + width, dsty + height}; 561 pctx->clear(pctx, PIPE_CLEAR_COLOR0, &scissor, color, 0, 0); 562 util_blitter_restore_fb_state(ctx->blitter); 563 if (!render_condition_enabled && render_condition_active) 564 zink_start_conditional_render(ctx); 565 ctx->render_condition_active = render_condition_active; 566} 567 568void 569zink_clear_depth_stencil(struct pipe_context *pctx, struct pipe_surface *dst, 570 unsigned clear_flags, double depth, unsigned stencil, 571 unsigned dstx, unsigned dsty, unsigned width, unsigned height, 572 bool render_condition_enabled) 573{ 574 struct zink_context *ctx = zink_context(pctx); 575 bool render_condition_active = ctx->render_condition_active; 576 if (!render_condition_enabled && render_condition_active) { 577 zink_stop_conditional_render(ctx); 578 ctx->render_condition_active = false; 579 } 580 bool cur_attachment = zink_csurface(ctx->fb_state.zsbuf) == zink_csurface(dst); 581 if (dstx > ctx->fb_state.width || dsty > ctx->fb_state.height || 582 dstx + width > ctx->fb_state.width || 583 dsty + height > ctx->fb_state.height) 584 cur_attachment = false; 585 if (!cur_attachment) { 586 util_blitter_save_framebuffer(ctx->blitter, &ctx->fb_state); 587 set_clear_fb(pctx, NULL, dst); 588 } 589 struct pipe_scissor_state scissor = {dstx, dsty, dstx + width, dsty + height}; 590 pctx->clear(pctx, clear_flags, &scissor, NULL, depth, stencil); 591 if (!cur_attachment) 592 util_blitter_restore_fb_state(ctx->blitter); 593 if (!render_condition_enabled && render_condition_active) 594 zink_start_conditional_render(ctx); 595 ctx->render_condition_active = render_condition_active; 596} 597 598bool 599zink_fb_clear_needs_explicit(struct zink_framebuffer_clear *fb_clear) 600{ 601 if (zink_fb_clear_count(fb_clear) != 1) 602 return true; 603 return zink_fb_clear_element_needs_explicit(zink_fb_clear_element(fb_clear, 0)); 604} 605 606bool 607zink_fb_clear_first_needs_explicit(struct zink_framebuffer_clear *fb_clear) 608{ 609 if (!zink_fb_clear_count(fb_clear)) 610 return false; 611 return zink_fb_clear_element_needs_explicit(zink_fb_clear_element(fb_clear, 0)); 612} 613 614static void 615fb_clears_apply_internal(struct zink_context *ctx, struct pipe_resource *pres, int i) 616{ 617 if (!zink_fb_clear_enabled(ctx, i)) 618 return; 619 if (ctx->batch.in_rp) 620 zink_clear_framebuffer(ctx, BITFIELD_BIT(i)); 621 else 622 /* this will automatically trigger all the clears */ 623 zink_batch_rp(ctx); 624 zink_fb_clear_reset(ctx, i); 625} 626 627void 628zink_fb_clear_reset(struct zink_context *ctx, unsigned i) 629{ 630 unsigned rp_clears_enabled = ctx->clears_enabled; 631 util_dynarray_clear(&ctx->fb_clears[i].clears); 632 if (i == PIPE_MAX_COLOR_BUFS) { 633 ctx->clears_enabled &= ~PIPE_CLEAR_DEPTHSTENCIL; 634 ctx->rp_clears_enabled &= ~PIPE_CLEAR_DEPTHSTENCIL; 635 } else { 636 ctx->clears_enabled &= ~(PIPE_CLEAR_COLOR0 << i); 637 ctx->rp_clears_enabled &= ~(PIPE_CLEAR_COLOR0 << i); 638 } 639 if (ctx->rp_clears_enabled != rp_clears_enabled) 640 ctx->rp_loadop_changed = true; 641} 642 643void 644zink_fb_clears_apply(struct zink_context *ctx, struct pipe_resource *pres) 645{ 646 if (zink_resource(pres)->aspect == VK_IMAGE_ASPECT_COLOR_BIT) { 647 for (int i = 0; i < ctx->fb_state.nr_cbufs; i++) { 648 if (ctx->fb_state.cbufs[i] && ctx->fb_state.cbufs[i]->texture == pres) { 649 fb_clears_apply_internal(ctx, pres, i); 650 } 651 } 652 } else { 653 if (ctx->fb_state.zsbuf && ctx->fb_state.zsbuf->texture == pres) { 654 fb_clears_apply_internal(ctx, pres, PIPE_MAX_COLOR_BUFS); 655 } 656 } 657} 658 659void 660zink_fb_clears_discard(struct zink_context *ctx, struct pipe_resource *pres) 661{ 662 if (zink_resource(pres)->aspect == VK_IMAGE_ASPECT_COLOR_BIT) { 663 for (int i = 0; i < ctx->fb_state.nr_cbufs; i++) { 664 if (ctx->fb_state.cbufs[i] && ctx->fb_state.cbufs[i]->texture == pres) { 665 if (zink_fb_clear_enabled(ctx, i)) { 666 zink_fb_clear_reset(ctx, i); 667 } 668 } 669 } 670 } else { 671 if (zink_fb_clear_enabled(ctx, PIPE_MAX_COLOR_BUFS) && ctx->fb_state.zsbuf && ctx->fb_state.zsbuf->texture == pres) { 672 int i = PIPE_MAX_COLOR_BUFS; 673 zink_fb_clear_reset(ctx, i); 674 } 675 } 676} 677 678void 679zink_clear_apply_conditionals(struct zink_context *ctx) 680{ 681 for (int i = 0; i < ARRAY_SIZE(ctx->fb_clears); i++) { 682 struct zink_framebuffer_clear *fb_clear = &ctx->fb_clears[i]; 683 if (!zink_fb_clear_enabled(ctx, i)) 684 continue; 685 for (int j = 0; j < zink_fb_clear_count(fb_clear); j++) { 686 struct zink_framebuffer_clear_data *clear = zink_fb_clear_element(fb_clear, j); 687 if (clear->conditional) { 688 struct pipe_surface *surf; 689 if (i < PIPE_MAX_COLOR_BUFS) 690 surf = ctx->fb_state.cbufs[i]; 691 else 692 surf = ctx->fb_state.zsbuf; 693 if (surf) 694 fb_clears_apply_internal(ctx, surf->texture, i); 695 else 696 zink_fb_clear_reset(ctx, i); 697 break; 698 } 699 } 700 } 701} 702 703static void 704fb_clears_apply_or_discard_internal(struct zink_context *ctx, struct pipe_resource *pres, struct u_rect region, bool discard_only, bool invert, int i) 705{ 706 struct zink_framebuffer_clear *fb_clear = &ctx->fb_clears[i]; 707 if (zink_fb_clear_enabled(ctx, i)) { 708 if (zink_blit_region_fills(region, pres->width0, pres->height0)) { 709 if (invert) 710 fb_clears_apply_internal(ctx, pres, i); 711 else 712 /* we know we can skip these */ 713 zink_fb_clears_discard(ctx, pres); 714 return; 715 } 716 for (int j = 0; j < zink_fb_clear_count(fb_clear); j++) { 717 struct zink_framebuffer_clear_data *clear = zink_fb_clear_element(fb_clear, j); 718 struct u_rect scissor = {clear->scissor.minx, clear->scissor.maxx, 719 clear->scissor.miny, clear->scissor.maxy}; 720 if (!clear->has_scissor || zink_blit_region_covers(region, scissor)) { 721 /* this is a clear that isn't fully covered by our pending write */ 722 if (!discard_only) 723 fb_clears_apply_internal(ctx, pres, i); 724 return; 725 } 726 } 727 if (!invert) 728 /* if we haven't already returned, then we know we can discard */ 729 zink_fb_clears_discard(ctx, pres); 730 } 731} 732 733void 734zink_fb_clears_apply_or_discard(struct zink_context *ctx, struct pipe_resource *pres, struct u_rect region, bool discard_only) 735{ 736 if (zink_resource(pres)->aspect == VK_IMAGE_ASPECT_COLOR_BIT) { 737 for (int i = 0; i < ctx->fb_state.nr_cbufs; i++) { 738 if (ctx->fb_state.cbufs[i] && ctx->fb_state.cbufs[i]->texture == pres) { 739 fb_clears_apply_or_discard_internal(ctx, pres, region, discard_only, false, i); 740 } 741 } 742 } else { 743 if (zink_fb_clear_enabled(ctx, PIPE_MAX_COLOR_BUFS) && ctx->fb_state.zsbuf && ctx->fb_state.zsbuf->texture == pres) { 744 fb_clears_apply_or_discard_internal(ctx, pres, region, discard_only, false, PIPE_MAX_COLOR_BUFS); 745 } 746 } 747} 748 749void 750zink_fb_clears_apply_region(struct zink_context *ctx, struct pipe_resource *pres, struct u_rect region) 751{ 752 if (zink_resource(pres)->aspect == VK_IMAGE_ASPECT_COLOR_BIT) { 753 for (int i = 0; i < ctx->fb_state.nr_cbufs; i++) { 754 if (ctx->fb_state.cbufs[i] && ctx->fb_state.cbufs[i]->texture == pres) { 755 fb_clears_apply_or_discard_internal(ctx, pres, region, false, true, i); 756 } 757 } 758 } else { 759 if (ctx->fb_state.zsbuf && ctx->fb_state.zsbuf->texture == pres) { 760 fb_clears_apply_or_discard_internal(ctx, pres, region, false, true, PIPE_MAX_COLOR_BUFS); 761 } 762 } 763} 764