Lines Matching defs:set
428 struct radv_descriptor_set_header *set = &cmd_buffer->descriptors[i].push_set.set;
429 free(set->mapped_ptr);
430 if (set->layout)
431 vk_descriptor_set_layout_unref(&cmd_buffer->device->vk, &set->layout->vk);
432 vk_object_base_finish(&set->base);
479 vk_object_base_init(&device->vk, &cmd_buffer->descriptors[i].push_set.set.base,
946 struct radv_descriptor_set *set, unsigned idx)
951 descriptors_state->sets[idx] = set;
969 struct radv_descriptor_set *set = descriptors_state->sets[i];
970 data[i * 2] = (uint64_t)(uintptr_t)set;
971 data[i * 2 + 1] = (uint64_t)(uintptr_t)set >> 32;
1021 struct radv_descriptor_set *set = descriptors_state->sets[start + i];
1023 radv_emit_shader_pointer_body(device, cs, set->header.va, true);
1338 /* We don't set the DISABLE bits, because the HW can't have holes,
1339 * so the SPI color format is set to 32-bit 1-component. */
1466 /* Do not set the DISABLE bits for the unused attachments, as that
1887 /* As the result of min(A, 1x1) or replace(A, 1x1) are always 1x1, set the vertex rate
2007 /* If this bit is set, the FMASK decompression operation
2383 /* Conditionally set DB_Z_INFO.ZRANGE_PRECISION to 0 when the last
2576 /* Some default value we can set in the update. */
3414 struct radv_descriptor_set *set = (struct radv_descriptor_set *)&descriptors_state->push_set.set;
3417 if (!radv_cmd_buffer_upload_data(cmd_buffer, set->header.size, set->header.mapped_ptr,
3421 set->header.va = radv_buffer_get_va(cmd_buffer->upload.upload_bo);
3422 set->header.va += bo_offset;
3441 struct radv_descriptor_set *set = descriptors_state->sets[i];
3443 set_va = set->header.va;
4027 /* Un-set the SGPR index so we know to re-emit it later. */
4349 /* since the STORAGE bit isn't set we know that this is a meta operation.
4351 * set it here. */
4874 * command, set all entries of the pCommandBuffers array to
5198 struct radv_descriptor_set *set, unsigned idx)
5202 radv_set_descriptor_set(cmd_buffer, bind_point, set, idx);
5204 assert(set);
5205 assert(!(set->header.layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
5208 for (unsigned j = 0; j < set->header.buffer_count; ++j)
5209 if (set->descriptors[j])
5210 radv_cs_add_buffer(ws, cmd_buffer->cs, set->descriptors[j]);
5213 if (set->header.bo)
5214 radv_cs_add_buffer(ws, cmd_buffer->cs, set->header.bo);
5234 RADV_FROM_HANDLE(radv_descriptor_set, set, pDescriptorSets[i]);
5236 if (!set) {
5247 /* If the set is already bound we only need to update the
5249 if (descriptors_state->sets[set_idx] != set ||
5251 radv_bind_descriptor_set(cmd_buffer, pipelineBindPoint, set, set_idx);
5254 for (unsigned j = 0; j < set->header.layout->dynamic_offset_count; ++j, ++dyn_idx) {
5255 unsigned idx = j + layout->set[i + firstSet].dynamic_offset_start;
5259 struct radv_descriptor_range *range = set->header.dynamic_descriptors + j;
5283 cmd_buffer->push_constant_stages |= set->header.layout->dynamic_shader_stages;
5289 radv_init_push_descriptor_set(struct radv_cmd_buffer *cmd_buffer, struct radv_descriptor_set *set,
5295 set->header.size = layout->size;
5297 if (set->header.layout != layout) {
5298 if (set->header.layout)
5299 vk_descriptor_set_layout_unref(&cmd_buffer->device->vk, &set->header.layout->vk);
5301 set->header.layout = layout;
5304 if (descriptors_state->push_set.capacity < set->header.size) {
5305 size_t new_size = MAX2(set->header.size, 1024);
5309 free(set->header.mapped_ptr);
5310 set->header.mapped_ptr = malloc(new_size);
5312 if (!set->header.mapped_ptr) {
5327 uint32_t set, uint32_t descriptorWriteCount,
5335 assert(set == 0);
5336 assert(layout->set[set].layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
5338 push_set->header.size = layout->set[set].layout->size;
5339 push_set->header.layout = layout->set[set].layout;
5352 radv_set_descriptor_set(cmd_buffer, pipelineBindPoint, push_set, set);
5357 VkPipelineLayout _layout, uint32_t set, uint32_t descriptorWriteCount,
5365 (struct radv_descriptor_set *)&descriptors_state->push_set.set;
5367 assert(layout->set[set].layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
5369 if (!radv_init_push_descriptor_set(cmd_buffer, push_set, layout->set[set].layout,
5385 radv_set_descriptor_set(cmd_buffer, pipelineBindPoint, push_set, set);
5392 VkPipelineLayout _layout, uint32_t set, const void *pData)
5400 (struct radv_descriptor_set *)&descriptors_state->push_set.set;
5402 assert(layout->set[set].layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
5404 if (!radv_init_push_descriptor_set(cmd_buffer, push_set, layout->set[set].layout,
5411 radv_set_descriptor_set(cmd_buffer, templ->bind_point, push_set, set);
5602 * GS ring pointers are set.
7362 * there is only ever 1 active set of scissor values at the same time.
7364 * 2) Whenever the hardware switches contexts we have to set the scissor
7481 * No further steps needed, just remember the SGPR's location is not set.
7609 * cmd_buffer->state.context_roll_without_scissor_emitted is set. */
7678 /* Need to apply this workaround early as it can set flush flags. */
7688 /* If we have to wait for idle, set all states first, so that
7690 * calls. Then upload descriptors, set shader pointers, and
7701 /* If we don't wait for idle, start prefetches first, then set
8386 * A starting offset for the grid. If unaligned is set, the offset
8582 /* If we have to wait for idle, set all states first, so that
8584 * calls. Then upload descriptors, set shader pointers, and
8606 /* If we don't wait for idle, start prefetches first, then set
8623 * So if we set compute userdata & shader registers we should dirty the raytracing
9252 * default DB_Z_INFO.RANGE_PRECISION is set to 1, and we only
9850 * executed as normal. If the inverted flag is set, all commands are