Lines Matching defs:buffer
568 /* Allocate a buffer for the EOP bug on GFX9. */
820 * This is necessary in case the same cmd buffer is submitted again in the future.
2250 * depth/stencil buffer.
2523 * Update the fast clear color values if the image is bound as a color buffer.
2822 * bind our internal depth buffer that contains the VRS data as part of HTILE.
2825 struct radv_buffer *htile_buffer = cmd_buffer->device->vrs.buffer;
2902 /* With indirect generated commands the index buffer bind may be part of the
2903 * indirect command buffer, in which case the app may not have bound any yet. */
3719 struct radv_buffer *buffer = cmd_buffer->vertex_binding_buffers[binding];
3748 if (!buffer) {
3771 va = radv_buffer_get_va(buffer->bo);
3774 va += offset + buffer->offset;
3781 num_records = vk_buffer_range(&buffer->vk, offset, VK_WHOLE_SIZE);
3932 struct radv_buffer *buffer = sb[i].buffer;
3938 va = radv_buffer_get_va(buffer->bo) + buffer->offset;
3945 * the buffer will be considered not bound and store
3950 /* Compute the correct buffer size for NGG streamout
3952 * buffer.
3955 size = buffer->vk.size - sb[i].offset;
4001 /* By default NGG queries are disabled but they are enabled if the command buffer has active GDS
4002 * queries or if it's a secondary command buffer that inherits the number of generated
4322 * the image/buffer it often devolves to just VRAM/GTT anyway.
4873 * successfully created command buffer objects from this
5011 /* If the command buffer has already been resetted with
5098 /* We have to defer setting up vertex buffer since we need the buffer
5110 RADV_FROM_HANDLE(radv_buffer, buffer, pBuffers[i]);
5116 if (!!cmd_buffer->vertex_binding_buffers[idx] != !!buffer ||
5117 (buffer && ((vb[idx].offset & 0x3) != (pOffsets[i] & 0x3) ||
5122 cmd_buffer->vertex_binding_buffers[idx] = buffer;
5128 if (buffer) {
5177 radv_CmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5181 RADV_FROM_HANDLE(radv_buffer, index_buffer, buffer);
5438 * command buffer.
5443 * command buffer.
6240 /* When the secondary command buffer is compute only we don't
6247 /* When the secondary command buffer is graphics only we don't
6407 * copy the VRS rates to the HTILE buffer of the attachment.
6419 /* HTILE buffer */
6427 /* Copy the VRS rates to the HTILE buffer. */
6433 * to copy the VRS rates to our internal HTILE buffer.
6439 /* HTILE buffer */
6440 struct radv_buffer *htile_buffer = cmd_buffer->device->vrs.buffer;
6447 /* Copy the VRS rates to the HTILE buffer. */
6592 * When the current command buffer is predicating, emit a COND_EXEC packet
6660 * The starting address "index_va" may point anywhere within the index buffer. The number of
6661 * indexes allocated in the index buffer *past that point* is specified by "max_index_count".
7236 * When the count buffer contains zero, DISPATCH_TASKMESH_INDIRECT_MULTI_ACE hangs.
7238 * is only executed when the count buffer contains non-zero.
7243 * - Reserve a dword in the upload buffer and initialize it to 1 for the workaround
7823 struct radv_buffer *buffer, VkDeviceSize offset,
7840 /* Fill the buffer with all zeroes except instanceCount = 1.
7849 uint64_t va = radv_buffer_get_va(buffer->bo) + buffer->offset + offset;
7850 radv_cs_add_buffer(ws, cs, buffer->bo);
7893 /* The draw packet can now use this buffer: */
7894 struct radv_buffer buf = *buffer;
7904 radv_nv_task_indirect_bo(struct radv_cmd_buffer *cmd_buffer, struct radv_buffer *buffer,
7924 const uint64_t va = radv_buffer_get_va(buffer->bo) + buffer->offset + offset;
7925 radv_cs_add_buffer(ws, cs, buffer->bo);
7927 /* Fill the buffer with X=0, Y=1, Z=1. */
7942 /* Clamp draw count to fit the actual size of the buffer.
7943 * This is to avoid potential out of bounds copies (eg. for draws with an indirect count buffer).
7946 draw_count = MIN2(draw_count, (buffer->vk.size - buffer->offset - offset) / src_stride);
7966 /* The draw packet can now use this buffer: */
7967 struct radv_buffer buf = *buffer;
8068 RADV_FROM_HANDLE(radv_buffer, buffer, _buffer);
8072 info.indirect = buffer;
8091 RADV_FROM_HANDLE(radv_buffer, buffer, _buffer);
8096 info.indirect = buffer;
8115 RADV_FROM_HANDLE(radv_buffer, buffer, _buffer);
8120 info.indirect = buffer;
8142 RADV_FROM_HANDLE(radv_buffer, buffer, _buffer);
8148 info.indirect = buffer;
8195 RADV_FROM_HANDLE(radv_buffer, buffer, _buffer);
8200 info.indirect = buffer;
8219 * reading the NV command's indirect buffer in the shader.
8223 * in the upload buffer and copy the data to it.
8227 uint64_t nv_ib_va = radv_buffer_get_va(buffer->bo) + buffer->offset + offset;
8230 radv_nv_task_indirect_bo(cmd_buffer, buffer, offset, drawCount, stride);
8238 radv_nv_mesh_indirect_bo(cmd_buffer, buffer, offset, drawCount, stride);
8256 RADV_FROM_HANDLE(radv_buffer, buffer, _buffer);
8262 info.indirect = buffer;
8276 uint64_t nv_ib_va = radv_buffer_get_va(buffer->bo) + buffer->offset + offset;
8279 radv_nv_task_indirect_bo(cmd_buffer, buffer, offset, maxDrawCount, stride);
8287 radv_nv_mesh_indirect_bo(cmd_buffer, buffer, offset, maxDrawCount, stride);
8674 RADV_FROM_HANDLE(radv_buffer, buffer, _buffer);
8677 info.indirect = buffer->bo;
8678 info.va = radv_buffer_get_va(buffer->bo) + buffer->offset + offset;
9583 * buffer (or a MSAA image using FMASK). Note that updating a buffer is considered a clear
9840 RADV_FROM_HANDLE(radv_buffer, buffer, pConditionalRenderingBegin->buffer);
9846 va = radv_buffer_get_va(buffer->bo) + buffer->offset + pConditionalRenderingBegin->offset;
9848 /* By default, if the 32-bit value at offset in buffer memory is zero,
9866 * "If the 32-bit value at offset in buffer memory is zero,
9869 * buffer memory changes while conditional rendering is
9954 sb[idx].buffer = radv_buffer_from_handle(pBuffers[i]);
9958 sb[idx].size = sb[idx].buffer->vk.size - sb[idx].offset;
9963 radv_cs_add_buffer(cmd_buffer->device->ws, cmd_buffer->cs, sb[idx].buffer->bo);
10097 RADV_FROM_HANDLE(radv_buffer, buffer, pCounterBuffers[counter_buffer_idx]);
10098 uint64_t va = radv_buffer_get_va(buffer->bo);
10104 va += buffer->offset + counter_buffer_offset;
10115 radv_cs_add_buffer(cmd_buffer->device->ws, cs, buffer->bo);
10162 RADV_FROM_HANDLE(radv_buffer, buffer, pCounterBuffers[counter_buffer_idx]);
10168 va += radv_buffer_get_va(buffer->bo);
10169 va += buffer->offset + counter_buffer_offset;
10171 radv_cs_add_buffer(cmd_buffer->device->ws, cs, buffer->bo);
10221 /* The array of counters buffer is optional. */
10222 RADV_FROM_HANDLE(radv_buffer, buffer, pCounterBuffers[counter_buffer_idx]);
10223 uint64_t va = radv_buffer_get_va(buffer->bo);
10229 va += buffer->offset + counter_buffer_offset;
10240 radv_cs_add_buffer(cmd_buffer->device->ws, cs, buffer->bo);
10243 /* Deactivate transform feedback by zeroing the buffer size.
10245 * be enabled even if there is not buffer bound. This ensures
10274 /* The array of counters buffer is optional. */
10275 RADV_FROM_HANDLE(radv_buffer, buffer, pCounterBuffers[counter_buffer_idx]);
10276 uint64_t va = radv_buffer_get_va(buffer->bo);
10282 va += buffer->offset + counter_buffer_offset;
10288 radv_cs_add_buffer(cmd_buffer->device->ws, cs, buffer->bo);
10343 RADV_FROM_HANDLE(radv_buffer, buffer, dstBuffer);
10345 uint64_t va = radv_buffer_get_va(buffer->bo) + buffer->offset + dstOffset;