1/*
2 * Copyright © 2019 Raspberry Pi Ltd
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24#include "vk_descriptors.h"
25#include "vk_util.h"
26
27#include "v3dv_private.h"
28
29/*
30 * For a given descriptor defined by the descriptor_set it belongs, its
31 * binding layout, and array_index, it returns the map region assigned to it
32 * from the descriptor pool bo.
33 */
34static void *
35descriptor_bo_map(struct v3dv_device *device,
36                  struct v3dv_descriptor_set *set,
37                  const struct v3dv_descriptor_set_binding_layout *binding_layout,
38                  uint32_t array_index)
39{
40   /* Inline uniform blocks use BO memory to store UBO contents, not
41    * descriptor data, so their descriptor BO size is 0 even though they
42    * do use BO memory.
43    */
44   uint32_t bo_size = v3dv_X(device, descriptor_bo_size)(binding_layout->type);
45   assert(bo_size > 0 ||
46          binding_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK);
47
48   return set->pool->bo->map +
49      set->base_offset + binding_layout->descriptor_offset +
50      array_index * bo_size;
51}
52
53static bool
54descriptor_type_is_dynamic(VkDescriptorType type)
55{
56   switch (type) {
57   case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
58   case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
59      return true;
60      break;
61   default:
62      return false;
63   }
64}
65
66/*
67 * Tries to get a real descriptor using a descriptor map index from the
68 * descriptor_state + pipeline_layout.
69 */
70struct v3dv_descriptor *
71v3dv_descriptor_map_get_descriptor(struct v3dv_descriptor_state *descriptor_state,
72                                   struct v3dv_descriptor_map *map,
73                                   struct v3dv_pipeline_layout *pipeline_layout,
74                                   uint32_t index,
75                                   uint32_t *dynamic_offset)
76{
77   assert(index < map->num_desc);
78
79   uint32_t set_number = map->set[index];
80   assert((descriptor_state->valid & 1 << set_number));
81
82   struct v3dv_descriptor_set *set =
83      descriptor_state->descriptor_sets[set_number];
84   assert(set);
85
86   uint32_t binding_number = map->binding[index];
87   assert(binding_number < set->layout->binding_count);
88
89   const struct v3dv_descriptor_set_binding_layout *binding_layout =
90      &set->layout->binding[binding_number];
91
92   uint32_t array_index = map->array_index[index];
93   assert(array_index < binding_layout->array_size);
94
95   if (descriptor_type_is_dynamic(binding_layout->type)) {
96      uint32_t dynamic_offset_index =
97         pipeline_layout->set[set_number].dynamic_offset_start +
98         binding_layout->dynamic_offset_index + array_index;
99
100      *dynamic_offset = descriptor_state->dynamic_offsets[dynamic_offset_index];
101   }
102
103   return &set->descriptors[binding_layout->descriptor_index + array_index];
104}
105
106/* Equivalent to map_get_descriptor but it returns a reloc with the bo
107 * associated with that descriptor (suballocation of the descriptor pool bo)
108 *
109 * It also returns the descriptor type, so the caller could do extra
110 * validation or adding extra offsets if the bo contains more that one field.
111 */
112struct v3dv_cl_reloc
113v3dv_descriptor_map_get_descriptor_bo(struct v3dv_device *device,
114                                      struct v3dv_descriptor_state *descriptor_state,
115                                      struct v3dv_descriptor_map *map,
116                                      struct v3dv_pipeline_layout *pipeline_layout,
117                                      uint32_t index,
118                                      VkDescriptorType *out_type)
119{
120   assert(index < map->num_desc);
121
122   uint32_t set_number = map->set[index];
123   assert(descriptor_state->valid & 1 << set_number);
124
125   struct v3dv_descriptor_set *set =
126      descriptor_state->descriptor_sets[set_number];
127   assert(set);
128
129   uint32_t binding_number = map->binding[index];
130   assert(binding_number < set->layout->binding_count);
131
132   const struct v3dv_descriptor_set_binding_layout *binding_layout =
133      &set->layout->binding[binding_number];
134
135   assert(binding_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK ||
136          v3dv_X(device, descriptor_bo_size)(binding_layout->type) > 0);
137   if (out_type)
138      *out_type = binding_layout->type;
139
140   uint32_t array_index = map->array_index[index];
141   assert(array_index < binding_layout->array_size);
142
143   struct v3dv_cl_reloc reloc = {
144      .bo = set->pool->bo,
145      .offset = set->base_offset + binding_layout->descriptor_offset +
146      array_index * v3dv_X(device, descriptor_bo_size)(binding_layout->type),
147   };
148
149   return reloc;
150}
151
152/*
153 * The difference between this method and v3dv_descriptor_map_get_descriptor,
154 * is that if the sampler are added as immutable when creating the set layout,
155 * they are bound to the set layout, so not part of the descriptor per
156 * se. This method return early in that case.
157 */
158const struct v3dv_sampler *
159v3dv_descriptor_map_get_sampler(struct v3dv_descriptor_state *descriptor_state,
160                                struct v3dv_descriptor_map *map,
161                                struct v3dv_pipeline_layout *pipeline_layout,
162                                uint32_t index)
163{
164   assert(index < map->num_desc);
165
166   uint32_t set_number = map->set[index];
167   assert(descriptor_state->valid & 1 << set_number);
168
169   struct v3dv_descriptor_set *set =
170      descriptor_state->descriptor_sets[set_number];
171   assert(set);
172
173   uint32_t binding_number = map->binding[index];
174   assert(binding_number < set->layout->binding_count);
175
176   const struct v3dv_descriptor_set_binding_layout *binding_layout =
177      &set->layout->binding[binding_number];
178
179   uint32_t array_index = map->array_index[index];
180   assert(array_index < binding_layout->array_size);
181
182   if (binding_layout->immutable_samplers_offset != 0) {
183      assert(binding_layout->type == VK_DESCRIPTOR_TYPE_SAMPLER ||
184             binding_layout->type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
185
186      const struct v3dv_sampler *immutable_samplers =
187         v3dv_immutable_samplers(set->layout, binding_layout);
188
189      assert(immutable_samplers);
190      const struct v3dv_sampler *sampler = &immutable_samplers[array_index];
191      assert(sampler);
192
193      return sampler;
194   }
195
196   struct v3dv_descriptor *descriptor =
197      &set->descriptors[binding_layout->descriptor_index + array_index];
198
199   assert(descriptor->type == VK_DESCRIPTOR_TYPE_SAMPLER ||
200          descriptor->type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
201
202   assert(descriptor->sampler);
203
204   return descriptor->sampler;
205}
206
207
208struct v3dv_cl_reloc
209v3dv_descriptor_map_get_sampler_state(struct v3dv_device *device,
210                                      struct v3dv_descriptor_state *descriptor_state,
211                                      struct v3dv_descriptor_map *map,
212                                      struct v3dv_pipeline_layout *pipeline_layout,
213                                      uint32_t index)
214{
215   VkDescriptorType type;
216   struct v3dv_cl_reloc reloc =
217      v3dv_descriptor_map_get_descriptor_bo(device, descriptor_state, map,
218                                            pipeline_layout,
219                                            index, &type);
220
221   assert(type == VK_DESCRIPTOR_TYPE_SAMPLER ||
222          type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
223
224   if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
225      reloc.offset += v3dv_X(device, combined_image_sampler_sampler_state_offset)();
226
227   return reloc;
228}
229
230struct v3dv_bo*
231v3dv_descriptor_map_get_texture_bo(struct v3dv_descriptor_state *descriptor_state,
232                                   struct v3dv_descriptor_map *map,
233                                   struct v3dv_pipeline_layout *pipeline_layout,
234                                   uint32_t index)
235
236{
237   struct v3dv_descriptor *descriptor =
238      v3dv_descriptor_map_get_descriptor(descriptor_state, map,
239                                         pipeline_layout, index, NULL);
240
241   switch (descriptor->type) {
242   case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
243   case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
244      assert(descriptor->buffer_view);
245      return descriptor->buffer_view->buffer->mem->bo;
246   case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
247   case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
248   case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
249   case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
250      assert(descriptor->image_view);
251      struct v3dv_image *image =
252         (struct v3dv_image *) descriptor->image_view->vk.image;
253      return image->mem->bo;
254   }
255   default:
256      unreachable("descriptor type doesn't has a texture bo");
257   }
258}
259
260struct v3dv_cl_reloc
261v3dv_descriptor_map_get_texture_shader_state(struct v3dv_device *device,
262                                             struct v3dv_descriptor_state *descriptor_state,
263                                             struct v3dv_descriptor_map *map,
264                                             struct v3dv_pipeline_layout *pipeline_layout,
265                                             uint32_t index)
266{
267   VkDescriptorType type;
268   struct v3dv_cl_reloc reloc =
269      v3dv_descriptor_map_get_descriptor_bo(device,
270                                            descriptor_state, map,
271                                            pipeline_layout,
272                                            index, &type);
273
274   assert(type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||
275          type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
276          type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT ||
277          type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ||
278          type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER ||
279          type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
280
281   if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
282      reloc.offset += v3dv_X(device, combined_image_sampler_texture_state_offset)();
283
284   return reloc;
285}
286
287#define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));
288
289static void
290sha1_update_descriptor_set_binding_layout(struct mesa_sha1 *ctx,
291                                          const struct v3dv_descriptor_set_binding_layout *layout)
292{
293   SHA1_UPDATE_VALUE(ctx, layout->type);
294   SHA1_UPDATE_VALUE(ctx, layout->array_size);
295   SHA1_UPDATE_VALUE(ctx, layout->descriptor_index);
296   SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_count);
297   SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_index);
298   SHA1_UPDATE_VALUE(ctx, layout->descriptor_offset);
299   SHA1_UPDATE_VALUE(ctx, layout->immutable_samplers_offset);
300}
301
302static void
303sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
304                                  const struct v3dv_descriptor_set_layout *layout)
305{
306   SHA1_UPDATE_VALUE(ctx, layout->flags);
307   SHA1_UPDATE_VALUE(ctx, layout->binding_count);
308   SHA1_UPDATE_VALUE(ctx, layout->shader_stages);
309   SHA1_UPDATE_VALUE(ctx, layout->descriptor_count);
310   SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_count);
311
312   for (uint16_t i = 0; i < layout->binding_count; i++)
313      sha1_update_descriptor_set_binding_layout(ctx, &layout->binding[i]);
314}
315
316
317/*
318 * As anv and tu already points:
319 *
320 * "Pipeline layouts.  These have nothing to do with the pipeline.  They are
321 * just multiple descriptor set layouts pasted together."
322 */
323
324VKAPI_ATTR VkResult VKAPI_CALL
325v3dv_CreatePipelineLayout(VkDevice _device,
326                         const VkPipelineLayoutCreateInfo *pCreateInfo,
327                         const VkAllocationCallbacks *pAllocator,
328                         VkPipelineLayout *pPipelineLayout)
329{
330   V3DV_FROM_HANDLE(v3dv_device, device, _device);
331   struct v3dv_pipeline_layout *layout;
332
333   assert(pCreateInfo->sType ==
334          VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
335
336   layout = vk_object_zalloc(&device->vk, pAllocator, sizeof(*layout),
337                             VK_OBJECT_TYPE_PIPELINE_LAYOUT);
338   if (layout == NULL)
339      return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
340
341   layout->num_sets = pCreateInfo->setLayoutCount;
342
343   uint32_t dynamic_offset_count = 0;
344   for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
345      V3DV_FROM_HANDLE(v3dv_descriptor_set_layout, set_layout,
346                     pCreateInfo->pSetLayouts[set]);
347      v3dv_descriptor_set_layout_ref(set_layout);
348      layout->set[set].layout = set_layout;
349      layout->set[set].dynamic_offset_start = dynamic_offset_count;
350      for (uint32_t b = 0; b < set_layout->binding_count; b++) {
351         dynamic_offset_count += set_layout->binding[b].array_size *
352            set_layout->binding[b].dynamic_offset_count;
353      }
354
355      layout->shader_stages |= set_layout->shader_stages;
356   }
357
358   layout->push_constant_size = 0;
359   for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
360      const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
361      layout->push_constant_size =
362         MAX2(layout->push_constant_size, range->offset + range->size);
363   }
364
365   layout->push_constant_size = align(layout->push_constant_size, 4096);
366
367   layout->dynamic_offset_count = dynamic_offset_count;
368
369   struct mesa_sha1 ctx;
370   _mesa_sha1_init(&ctx);
371   for (unsigned s = 0; s < layout->num_sets; s++) {
372      sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
373      _mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
374                        sizeof(layout->set[s].dynamic_offset_start));
375   }
376   _mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
377   _mesa_sha1_final(&ctx, layout->sha1);
378
379   *pPipelineLayout = v3dv_pipeline_layout_to_handle(layout);
380
381   return VK_SUCCESS;
382}
383
384VKAPI_ATTR void VKAPI_CALL
385v3dv_DestroyPipelineLayout(VkDevice _device,
386                          VkPipelineLayout _pipelineLayout,
387                          const VkAllocationCallbacks *pAllocator)
388{
389   V3DV_FROM_HANDLE(v3dv_device, device, _device);
390   V3DV_FROM_HANDLE(v3dv_pipeline_layout, pipeline_layout, _pipelineLayout);
391
392   if (!pipeline_layout)
393      return;
394
395   for (uint32_t i = 0; i < pipeline_layout->num_sets; i++)
396      v3dv_descriptor_set_layout_unref(device, pipeline_layout->set[i].layout);
397
398   vk_object_free(&device->vk, pAllocator, pipeline_layout);
399}
400
401VKAPI_ATTR VkResult VKAPI_CALL
402v3dv_CreateDescriptorPool(VkDevice _device,
403                          const VkDescriptorPoolCreateInfo *pCreateInfo,
404                          const VkAllocationCallbacks *pAllocator,
405                          VkDescriptorPool *pDescriptorPool)
406{
407   V3DV_FROM_HANDLE(v3dv_device, device, _device);
408   struct v3dv_descriptor_pool *pool;
409   /* size is for the vulkan object descriptor pool. The final size would
410    * depend on some of FREE_DESCRIPTOR flags used
411    */
412   uint64_t size = sizeof(struct v3dv_descriptor_pool);
413   /* bo_size is for the descriptor related info that we need to have on a GPU
414    * address (so on v3dv_bo_alloc allocated memory), like for example the
415    * texture sampler state. Note that not all the descriptors use it
416    */
417   uint32_t bo_size = 0;
418   uint32_t descriptor_count = 0;
419
420   const VkDescriptorPoolInlineUniformBlockCreateInfo *inline_info =
421      vk_find_struct_const(pCreateInfo->pNext,
422                           DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO);
423
424   for (unsigned i = 0; i < pCreateInfo->poolSizeCount; ++i) {
425      /* Verify supported descriptor type */
426      switch(pCreateInfo->pPoolSizes[i].type) {
427      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
428      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
429      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
430      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
431      case VK_DESCRIPTOR_TYPE_SAMPLER:
432      case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
433      case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
434      case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
435      case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
436      case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
437      case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
438      case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
439         break;
440      default:
441         unreachable("Unimplemented descriptor type");
442         break;
443      }
444
445      assert(pCreateInfo->pPoolSizes[i].descriptorCount > 0);
446      if (pCreateInfo->pPoolSizes[i].type ==
447          VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
448         /* Inline uniform blocks are specified to use the descriptor array
449          * size as the size in bytes of the block.
450          */
451         assert(inline_info);
452         descriptor_count += inline_info->maxInlineUniformBlockBindings;
453         bo_size += pCreateInfo->pPoolSizes[i].descriptorCount;
454      } else {
455         descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
456         bo_size += v3dv_X(device, descriptor_bo_size)(pCreateInfo->pPoolSizes[i].type) *
457            pCreateInfo->pPoolSizes[i].descriptorCount;
458      }
459   }
460
461   /* We align all our buffers to V3D_NON_COHERENT_ATOM_SIZE, make sure we
462    * allocate enough memory to honor that requirement for all our inline
463    * buffers too.
464    */
465   if (inline_info) {
466      bo_size += V3D_NON_COHERENT_ATOM_SIZE *
467                 inline_info->maxInlineUniformBlockBindings;
468   }
469
470   if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
471      uint64_t host_size =
472         pCreateInfo->maxSets * sizeof(struct v3dv_descriptor_set);
473      host_size += sizeof(struct v3dv_descriptor) * descriptor_count;
474      size += host_size;
475   } else {
476      size += sizeof(struct v3dv_descriptor_pool_entry) * pCreateInfo->maxSets;
477   }
478
479   pool = vk_object_zalloc(&device->vk, pAllocator, size,
480                           VK_OBJECT_TYPE_DESCRIPTOR_POOL);
481
482   if (!pool)
483      return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
484
485   if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
486      pool->host_memory_base = (uint8_t*)pool + sizeof(struct v3dv_descriptor_pool);
487      pool->host_memory_ptr = pool->host_memory_base;
488      pool->host_memory_end = (uint8_t*)pool + size;
489   }
490
491   pool->max_entry_count = pCreateInfo->maxSets;
492
493   if (bo_size > 0) {
494      pool->bo = v3dv_bo_alloc(device, bo_size, "descriptor pool bo", true);
495      if (!pool->bo)
496         goto out_of_device_memory;
497
498      bool ok = v3dv_bo_map(device, pool->bo, pool->bo->size);
499      if (!ok)
500         goto out_of_device_memory;
501
502      pool->current_offset = 0;
503   } else {
504      pool->bo = NULL;
505   }
506
507   list_inithead(&pool->set_list);
508
509   *pDescriptorPool = v3dv_descriptor_pool_to_handle(pool);
510
511   return VK_SUCCESS;
512
513 out_of_device_memory:
514   vk_object_free(&device->vk, pAllocator, pool);
515   return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
516}
517
518static void
519descriptor_set_destroy(struct v3dv_device *device,
520                       struct v3dv_descriptor_pool *pool,
521                       struct v3dv_descriptor_set *set,
522                       bool free_bo)
523{
524   assert(!pool->host_memory_base);
525
526   if (free_bo && !pool->host_memory_base) {
527      for (uint32_t i = 0; i < pool->entry_count; i++) {
528         if (pool->entries[i].set == set) {
529            memmove(&pool->entries[i], &pool->entries[i+1],
530                    sizeof(pool->entries[i]) * (pool->entry_count - i - 1));
531            --pool->entry_count;
532            break;
533         }
534      }
535   }
536   vk_object_free(&device->vk, NULL, set);
537}
538
539VKAPI_ATTR void VKAPI_CALL
540v3dv_DestroyDescriptorPool(VkDevice _device,
541                           VkDescriptorPool _pool,
542                           const VkAllocationCallbacks *pAllocator)
543{
544   V3DV_FROM_HANDLE(v3dv_device, device, _device);
545   V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, _pool);
546
547   if (!pool)
548      return;
549
550   list_for_each_entry_safe(struct v3dv_descriptor_set, set,
551                            &pool->set_list, pool_link) {
552      v3dv_descriptor_set_layout_unref(device, set->layout);
553   }
554
555   if (!pool->host_memory_base) {
556      for(int i = 0; i < pool->entry_count; ++i) {
557         descriptor_set_destroy(device, pool, pool->entries[i].set, false);
558      }
559   }
560
561   if (pool->bo) {
562      v3dv_bo_free(device, pool->bo);
563      pool->bo = NULL;
564   }
565
566   vk_object_free(&device->vk, pAllocator, pool);
567}
568
569VKAPI_ATTR VkResult VKAPI_CALL
570v3dv_ResetDescriptorPool(VkDevice _device,
571                         VkDescriptorPool descriptorPool,
572                         VkDescriptorPoolResetFlags flags)
573{
574   V3DV_FROM_HANDLE(v3dv_device, device, _device);
575   V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, descriptorPool);
576
577   list_for_each_entry_safe(struct v3dv_descriptor_set, set,
578                            &pool->set_list, pool_link) {
579      v3dv_descriptor_set_layout_unref(device, set->layout);
580   }
581   list_inithead(&pool->set_list);
582
583   if (!pool->host_memory_base) {
584      for(int i = 0; i < pool->entry_count; ++i) {
585         descriptor_set_destroy(device, pool, pool->entries[i].set, false);
586      }
587   } else {
588      /* We clean-up the host memory, so when allocating a new set from the
589       * pool, it is already 0
590       */
591      uint32_t host_size = pool->host_memory_end - pool->host_memory_base;
592      memset(pool->host_memory_base, 0, host_size);
593   }
594
595   pool->entry_count = 0;
596   pool->host_memory_ptr = pool->host_memory_base;
597   pool->current_offset = 0;
598
599   return VK_SUCCESS;
600}
601
602void
603v3dv_descriptor_set_layout_destroy(struct v3dv_device *device,
604                                   struct v3dv_descriptor_set_layout *set_layout)
605{
606   assert(set_layout->ref_cnt == 0);
607   vk_object_base_finish(&set_layout->base);
608   vk_free2(&device->vk.alloc, NULL, set_layout);
609}
610
611VKAPI_ATTR VkResult VKAPI_CALL
612v3dv_CreateDescriptorSetLayout(VkDevice _device,
613                               const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
614                               const VkAllocationCallbacks *pAllocator,
615                               VkDescriptorSetLayout *pSetLayout)
616{
617   V3DV_FROM_HANDLE(v3dv_device, device, _device);
618   struct v3dv_descriptor_set_layout *set_layout;
619
620   assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
621
622   uint32_t num_bindings = 0;
623   uint32_t immutable_sampler_count = 0;
624   for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
625      num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);
626
627      /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
628       *
629       *    "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
630       *    VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
631       *    pImmutableSamplers can be used to initialize a set of immutable
632       *    samplers. [...]  If descriptorType is not one of these descriptor
633       *    types, then pImmutableSamplers is ignored.
634       *
635       * We need to be careful here and only parse pImmutableSamplers if we
636       * have one of the right descriptor types.
637       */
638      VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;
639      if ((desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
640           desc_type == VK_DESCRIPTOR_TYPE_SAMPLER) &&
641           pCreateInfo->pBindings[j].pImmutableSamplers) {
642         immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
643      }
644   }
645
646   /* We place immutable samplers after the binding data. We want to use
647    * offsetof instead of any sizeof(struct v3dv_descriptor_set_layout)
648    * because the latter may include padding at the end of the struct.
649    */
650   uint32_t samplers_offset =
651      offsetof(struct v3dv_descriptor_set_layout, binding[num_bindings]);
652
653   uint32_t size = samplers_offset +
654      immutable_sampler_count * sizeof(struct v3dv_sampler);
655
656   /* Descriptor set layouts are reference counted and therefore can survive
657    * vkDestroyPipelineSetLayout, so they need to be allocated with a device
658    * scope.
659    */
660   set_layout =
661      vk_zalloc(&device->vk.alloc, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
662   if (!set_layout)
663      return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
664
665   vk_object_base_init(&device->vk, &set_layout->base,
666                       VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT);
667
668   struct v3dv_sampler *samplers = (void*) &set_layout->binding[num_bindings];
669
670   assert(pCreateInfo->bindingCount == 0 || num_bindings > 0);
671
672   VkDescriptorSetLayoutBinding *bindings = NULL;
673   VkResult result = vk_create_sorted_bindings(pCreateInfo->pBindings,
674                                               pCreateInfo->bindingCount, &bindings);
675   if (result != VK_SUCCESS) {
676      v3dv_descriptor_set_layout_destroy(device, set_layout);
677      return vk_error(device, result);
678   }
679
680   set_layout->binding_count = num_bindings;
681   set_layout->flags = pCreateInfo->flags;
682   set_layout->shader_stages = 0;
683   set_layout->bo_size = 0;
684   set_layout->ref_cnt = 1;
685
686   uint32_t descriptor_count = 0;
687   uint32_t dynamic_offset_count = 0;
688
689   for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
690      const VkDescriptorSetLayoutBinding *binding = bindings + i;
691      uint32_t binding_number = binding->binding;
692
693      switch (binding->descriptorType) {
694      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
695      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
696         break;
697      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
698      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
699         set_layout->binding[binding_number].dynamic_offset_count = 1;
700         break;
701      case VK_DESCRIPTOR_TYPE_SAMPLER:
702      case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
703      case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
704      case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
705      case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
706      case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
707      case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
708      case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
709         /* Nothing here, just to keep the descriptor type filtering below */
710         break;
711      default:
712         unreachable("Unknown descriptor type\n");
713         break;
714      }
715
716      set_layout->binding[binding_number].type = binding->descriptorType;
717      set_layout->binding[binding_number].array_size = binding->descriptorCount;
718      set_layout->binding[binding_number].descriptor_index = descriptor_count;
719      set_layout->binding[binding_number].dynamic_offset_index = dynamic_offset_count;
720
721      if ((binding->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
722           binding->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) &&
723          binding->pImmutableSamplers) {
724
725         set_layout->binding[binding_number].immutable_samplers_offset = samplers_offset;
726
727         for (uint32_t i = 0; i < binding->descriptorCount; i++)
728            samplers[i] = *v3dv_sampler_from_handle(binding->pImmutableSamplers[i]);
729
730         samplers += binding->descriptorCount;
731         samplers_offset += sizeof(struct v3dv_sampler) * binding->descriptorCount;
732      }
733
734      set_layout->shader_stages |= binding->stageFlags;
735
736      if (binding->descriptorType != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
737         dynamic_offset_count += binding->descriptorCount *
738            set_layout->binding[binding_number].dynamic_offset_count;
739
740         descriptor_count += binding->descriptorCount;
741
742         set_layout->binding[binding_number].descriptor_offset =
743            set_layout->bo_size;
744         set_layout->bo_size +=
745            v3dv_X(device, descriptor_bo_size)(set_layout->binding[binding_number].type) *
746            binding->descriptorCount;
747      } else {
748         /* We align all our buffers, inline buffers too. We made sure to take
749          * this account when calculating total BO size requirements at pool
750          * creation time.
751          */
752         set_layout->bo_size = align(set_layout->bo_size,
753                                     V3D_NON_COHERENT_ATOM_SIZE);
754
755         set_layout->binding[binding_number].descriptor_offset =
756            set_layout->bo_size;
757
758         /* Inline uniform blocks are not arrayed, instead descriptorCount
759          * specifies the size of the buffer in bytes.
760          */
761         set_layout->bo_size += binding->descriptorCount;
762         descriptor_count++;
763      }
764   }
765
766   free(bindings);
767
768   set_layout->descriptor_count = descriptor_count;
769   set_layout->dynamic_offset_count = dynamic_offset_count;
770
771   *pSetLayout = v3dv_descriptor_set_layout_to_handle(set_layout);
772
773   return VK_SUCCESS;
774}
775
776VKAPI_ATTR void VKAPI_CALL
777v3dv_DestroyDescriptorSetLayout(VkDevice _device,
778                                VkDescriptorSetLayout _set_layout,
779                                const VkAllocationCallbacks *pAllocator)
780{
781   V3DV_FROM_HANDLE(v3dv_device, device, _device);
782   V3DV_FROM_HANDLE(v3dv_descriptor_set_layout, set_layout, _set_layout);
783
784   if (!set_layout)
785      return;
786
787   v3dv_descriptor_set_layout_unref(device, set_layout);
788}
789
790static inline VkResult
791out_of_pool_memory(const struct v3dv_device *device,
792                   const struct v3dv_descriptor_pool *pool)
793{
794   /* Don't log OOPM errors for internal driver pools, we handle these properly
795    * by allocating a new pool, so they don't point to real issues.
796    */
797   if (!pool->is_driver_internal)
798      return vk_error(device, VK_ERROR_OUT_OF_POOL_MEMORY);
799   else
800      return VK_ERROR_OUT_OF_POOL_MEMORY;
801}
802
803static VkResult
804descriptor_set_create(struct v3dv_device *device,
805                      struct v3dv_descriptor_pool *pool,
806                      struct v3dv_descriptor_set_layout *layout,
807                      struct v3dv_descriptor_set **out_set)
808{
809   struct v3dv_descriptor_set *set;
810   uint32_t descriptor_count = layout->descriptor_count;
811   unsigned mem_size = sizeof(struct v3dv_descriptor_set) +
812      sizeof(struct v3dv_descriptor) * descriptor_count;
813
814   if (pool->host_memory_base) {
815      if (pool->host_memory_end - pool->host_memory_ptr < mem_size)
816         return out_of_pool_memory(device, pool);
817
818      set = (struct v3dv_descriptor_set*)pool->host_memory_ptr;
819      pool->host_memory_ptr += mem_size;
820
821      vk_object_base_init(&device->vk, &set->base, VK_OBJECT_TYPE_DESCRIPTOR_SET);
822   } else {
823      set = vk_object_zalloc(&device->vk, NULL, mem_size,
824                             VK_OBJECT_TYPE_DESCRIPTOR_SET);
825
826      if (!set)
827         return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
828   }
829
830   set->pool = pool;
831
832   set->layout = layout;
833
834   /* FIXME: VK_EXT_descriptor_indexing introduces
835    * VARIABLE_DESCRIPTOR_LAYOUT_COUNT. That would affect the layout_size used
836    * below for bo allocation
837    */
838
839   uint32_t offset = 0;
840   uint32_t index = pool->entry_count;
841
842   if (layout->bo_size) {
843      if (!pool->host_memory_base && pool->entry_count == pool->max_entry_count) {
844         vk_object_free(&device->vk, NULL, set);
845         return out_of_pool_memory(device, pool);
846      }
847
848      /* We first try to allocate linearly fist, so that we don't spend time
849       * looking for gaps if the app only allocates & resets via the pool.
850       *
851       * If that fails, we try to find a gap from previously freed subregions
852       * iterating through the descriptor pool entries. Note that we are not
853       * doing that if we have a pool->host_memory_base. We only have that if
854       * VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT is not set, so in
855       * that case the user can't free subregions, so it doesn't make sense to
856       * even try (or track those subregions).
857       */
858      if (pool->current_offset + layout->bo_size <= pool->bo->size) {
859         offset = pool->current_offset;
860         pool->current_offset += layout->bo_size;
861      } else if (!pool->host_memory_base) {
862         for (index = 0; index < pool->entry_count; index++) {
863            if (pool->entries[index].offset - offset >= layout->bo_size)
864               break;
865            offset = pool->entries[index].offset + pool->entries[index].size;
866         }
867         if (pool->bo->size - offset < layout->bo_size) {
868            vk_object_free(&device->vk, NULL, set);
869            return out_of_pool_memory(device, pool);
870         }
871         memmove(&pool->entries[index + 1], &pool->entries[index],
872                 sizeof(pool->entries[0]) * (pool->entry_count - index));
873      } else {
874         assert(pool->host_memory_base);
875         return out_of_pool_memory(device, pool);
876      }
877
878      set->base_offset = offset;
879   }
880
881   if (!pool->host_memory_base) {
882      pool->entries[index].set = set;
883      pool->entries[index].offset = offset;
884      pool->entries[index].size = layout->bo_size;
885      pool->entry_count++;
886   }
887
888   /* Go through and fill out immutable samplers if we have any */
889   for (uint32_t b = 0; b < layout->binding_count; b++) {
890      if (layout->binding[b].immutable_samplers_offset == 0)
891         continue;
892
893      const struct v3dv_sampler *samplers =
894         (const struct v3dv_sampler *)((const char *)layout +
895                                       layout->binding[b].immutable_samplers_offset);
896
897      for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
898         uint32_t combined_offset =
899            layout->binding[b].type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ?
900            v3dv_X(device, combined_image_sampler_sampler_state_offset)() : 0;
901
902         void *desc_map = descriptor_bo_map(device, set, &layout->binding[b], i);
903         desc_map += combined_offset;
904
905         memcpy(desc_map,
906                samplers[i].sampler_state,
907                sizeof(samplers[i].sampler_state));
908      }
909   }
910
911   v3dv_descriptor_set_layout_ref(layout);
912   list_addtail(&set->pool_link, &pool->set_list);
913
914   *out_set = set;
915
916   return VK_SUCCESS;
917}
918
919VKAPI_ATTR VkResult VKAPI_CALL
920v3dv_AllocateDescriptorSets(VkDevice _device,
921                            const VkDescriptorSetAllocateInfo *pAllocateInfo,
922                            VkDescriptorSet *pDescriptorSets)
923{
924   V3DV_FROM_HANDLE(v3dv_device, device, _device);
925   V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
926
927   VkResult result = VK_SUCCESS;
928   struct v3dv_descriptor_set *set = NULL;
929   uint32_t i = 0;
930
931   for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
932      V3DV_FROM_HANDLE(v3dv_descriptor_set_layout, layout,
933                       pAllocateInfo->pSetLayouts[i]);
934
935      result = descriptor_set_create(device, pool, layout, &set);
936      if (result != VK_SUCCESS)
937         break;
938
939      pDescriptorSets[i] = v3dv_descriptor_set_to_handle(set);
940   }
941
942   if (result != VK_SUCCESS) {
943      v3dv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
944                              i, pDescriptorSets);
945      for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
946         pDescriptorSets[i] = VK_NULL_HANDLE;
947      }
948   }
949
950   return result;
951}
952
953VKAPI_ATTR VkResult VKAPI_CALL
954v3dv_FreeDescriptorSets(VkDevice _device,
955                        VkDescriptorPool descriptorPool,
956                        uint32_t count,
957                        const VkDescriptorSet *pDescriptorSets)
958{
959   V3DV_FROM_HANDLE(v3dv_device, device, _device);
960   V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, descriptorPool);
961
962   for (uint32_t i = 0; i < count; i++) {
963      V3DV_FROM_HANDLE(v3dv_descriptor_set, set, pDescriptorSets[i]);
964
965      if (set) {
966         v3dv_descriptor_set_layout_unref(device, set->layout);
967         list_del(&set->pool_link);
968         if (!pool->host_memory_base)
969            descriptor_set_destroy(device, pool, set, true);
970      }
971   }
972
973   return VK_SUCCESS;
974}
975
976static void
977descriptor_bo_copy(struct v3dv_device *device,
978                   struct v3dv_descriptor_set *dst_set,
979                   const struct v3dv_descriptor_set_binding_layout *dst_binding_layout,
980                   uint32_t dst_array_index,
981                   struct v3dv_descriptor_set *src_set,
982                   const struct v3dv_descriptor_set_binding_layout *src_binding_layout,
983                   uint32_t src_array_index)
984{
985   assert(dst_binding_layout->type == src_binding_layout->type);
986
987   void *dst_map = descriptor_bo_map(device, dst_set, dst_binding_layout, dst_array_index);
988   void *src_map = descriptor_bo_map(device, src_set, src_binding_layout, src_array_index);
989
990   memcpy(dst_map, src_map, v3dv_X(device, descriptor_bo_size)(src_binding_layout->type));
991}
992
993static void
994write_buffer_descriptor(struct v3dv_descriptor *descriptor,
995                        VkDescriptorType desc_type,
996                        const VkDescriptorBufferInfo *buffer_info)
997{
998   V3DV_FROM_HANDLE(v3dv_buffer, buffer, buffer_info->buffer);
999
1000   descriptor->type = desc_type;
1001   descriptor->buffer = buffer;
1002   descriptor->offset = buffer_info->offset;
1003   if (buffer_info->range == VK_WHOLE_SIZE) {
1004      descriptor->range = buffer->size - buffer_info->offset;
1005   } else {
1006      assert(descriptor->range <= UINT32_MAX);
1007      descriptor->range = buffer_info->range;
1008   }
1009}
1010
1011static void
1012write_image_descriptor(struct v3dv_device *device,
1013                       struct v3dv_descriptor *descriptor,
1014                       VkDescriptorType desc_type,
1015                       struct v3dv_descriptor_set *set,
1016                       const struct v3dv_descriptor_set_binding_layout *binding_layout,
1017                       struct v3dv_image_view *iview,
1018                       struct v3dv_sampler *sampler,
1019                       uint32_t array_index)
1020{
1021   descriptor->type = desc_type;
1022   descriptor->sampler = sampler;
1023   descriptor->image_view = iview;
1024
1025   void *desc_map = descriptor_bo_map(device, set,
1026                                      binding_layout, array_index);
1027
1028   if (iview) {
1029      const uint32_t tex_state_index =
1030         iview->vk.view_type != VK_IMAGE_VIEW_TYPE_CUBE_ARRAY ||
1031         desc_type != VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ? 0 : 1;
1032      memcpy(desc_map,
1033             iview->texture_shader_state[tex_state_index],
1034             sizeof(iview->texture_shader_state[0]));
1035      desc_map += v3dv_X(device, combined_image_sampler_sampler_state_offset)();
1036   }
1037
1038   if (sampler && !binding_layout->immutable_samplers_offset) {
1039      /* For immutable samplers this was already done as part of the
1040       * descriptor set create, as that info can't change later
1041       */
1042      memcpy(desc_map,
1043             sampler->sampler_state,
1044             sizeof(sampler->sampler_state));
1045   }
1046}
1047
1048
1049static void
1050write_buffer_view_descriptor(struct v3dv_device *device,
1051                             struct v3dv_descriptor *descriptor,
1052                             VkDescriptorType desc_type,
1053                             struct v3dv_descriptor_set *set,
1054                             const struct v3dv_descriptor_set_binding_layout *binding_layout,
1055                             struct v3dv_buffer_view *bview,
1056                             uint32_t array_index)
1057{
1058   assert(bview);
1059   descriptor->type = desc_type;
1060   descriptor->buffer_view = bview;
1061
1062   void *desc_map = descriptor_bo_map(device, set, binding_layout, array_index);
1063
1064   memcpy(desc_map,
1065          bview->texture_shader_state,
1066          sizeof(bview->texture_shader_state));
1067}
1068
1069static void
1070write_inline_uniform_descriptor(struct v3dv_device *device,
1071                                struct v3dv_descriptor *descriptor,
1072                                struct v3dv_descriptor_set *set,
1073                                const struct v3dv_descriptor_set_binding_layout *binding_layout,
1074                                const void *data,
1075                                size_t offset,
1076                                size_t size)
1077{
1078   assert(binding_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK);
1079   descriptor->type = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK;
1080   descriptor->buffer = NULL;
1081
1082   void *desc_map = descriptor_bo_map(device, set, binding_layout, 0);
1083   memcpy(desc_map + offset, data, size);
1084
1085   /* Inline uniform buffers allocate BO space in the pool for all inline
1086    * buffers it may allocate and then this space is assigned to individual
1087    * descriptors when they are written, so we define the range of an inline
1088    * buffer as the largest range of data that the client has written to it.
1089    */
1090   descriptor->offset = 0;
1091   descriptor->range = MAX2(descriptor->range, offset + size);
1092}
1093
1094VKAPI_ATTR void VKAPI_CALL
1095v3dv_UpdateDescriptorSets(VkDevice  _device,
1096                          uint32_t descriptorWriteCount,
1097                          const VkWriteDescriptorSet *pDescriptorWrites,
1098                          uint32_t descriptorCopyCount,
1099                          const VkCopyDescriptorSet *pDescriptorCopies)
1100{
1101   V3DV_FROM_HANDLE(v3dv_device, device, _device);
1102   for (uint32_t i = 0; i < descriptorWriteCount; i++) {
1103      const VkWriteDescriptorSet *writeset = &pDescriptorWrites[i];
1104      V3DV_FROM_HANDLE(v3dv_descriptor_set, set, writeset->dstSet);
1105
1106      const struct v3dv_descriptor_set_binding_layout *binding_layout =
1107         set->layout->binding + writeset->dstBinding;
1108
1109      struct v3dv_descriptor *descriptor = set->descriptors;
1110
1111      descriptor += binding_layout->descriptor_index;
1112
1113      /* Inline uniform blocks are not arrayed, instead they use dstArrayElement
1114       * to specify the byte offset of the uniform update and descriptorCount
1115       * to specify the size (in bytes) of the update.
1116       */
1117      uint32_t descriptor_count;
1118      if (writeset->descriptorType != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
1119         descriptor += writeset->dstArrayElement;
1120         descriptor_count = writeset->descriptorCount;
1121      } else {
1122         descriptor_count = 1;
1123      }
1124
1125      for (uint32_t j = 0; j < descriptor_count; ++j) {
1126         switch(writeset->descriptorType) {
1127
1128         case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1129         case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1130         case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1131         case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: {
1132            const VkDescriptorBufferInfo *buffer_info = writeset->pBufferInfo + j;
1133            write_buffer_descriptor(descriptor, writeset->descriptorType,
1134                                    buffer_info);
1135            break;
1136         }
1137         case VK_DESCRIPTOR_TYPE_SAMPLER: {
1138            /* If we are here we shouldn't be modifying a immutable sampler,
1139             * so we don't ensure that would work or not crash. But let the
1140             * validation layers check that
1141             */
1142            const VkDescriptorImageInfo *image_info = writeset->pImageInfo + j;
1143            V3DV_FROM_HANDLE(v3dv_sampler, sampler, image_info->sampler);
1144            write_image_descriptor(device, descriptor, writeset->descriptorType,
1145                                   set, binding_layout, NULL, sampler,
1146                                   writeset->dstArrayElement + j);
1147
1148            break;
1149         }
1150         case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1151         case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1152         case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: {
1153            const VkDescriptorImageInfo *image_info = writeset->pImageInfo + j;
1154            V3DV_FROM_HANDLE(v3dv_image_view, iview, image_info->imageView);
1155            write_image_descriptor(device, descriptor, writeset->descriptorType,
1156                                   set, binding_layout, iview, NULL,
1157                                   writeset->dstArrayElement + j);
1158
1159            break;
1160         }
1161         case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
1162            const VkDescriptorImageInfo *image_info = writeset->pImageInfo + j;
1163            V3DV_FROM_HANDLE(v3dv_image_view, iview, image_info->imageView);
1164            V3DV_FROM_HANDLE(v3dv_sampler, sampler, image_info->sampler);
1165            write_image_descriptor(device, descriptor, writeset->descriptorType,
1166                                   set, binding_layout, iview, sampler,
1167                                   writeset->dstArrayElement + j);
1168
1169            break;
1170         }
1171         case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1172         case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
1173            V3DV_FROM_HANDLE(v3dv_buffer_view, buffer_view,
1174                             writeset->pTexelBufferView[j]);
1175            write_buffer_view_descriptor(device, descriptor, writeset->descriptorType,
1176                                         set, binding_layout, buffer_view,
1177                                         writeset->dstArrayElement + j);
1178            break;
1179         }
1180         case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: {
1181            const VkWriteDescriptorSetInlineUniformBlock *inline_write =
1182               vk_find_struct_const(writeset->pNext,
1183                                    WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK);
1184            assert(inline_write->dataSize == writeset->descriptorCount);
1185            write_inline_uniform_descriptor(device, descriptor, set,
1186                                            binding_layout,
1187                                            inline_write->pData,
1188                                            writeset->dstArrayElement, /* offset */
1189                                            inline_write->dataSize);
1190            break;
1191         }
1192         default:
1193            unreachable("unimplemented descriptor type");
1194            break;
1195         }
1196         descriptor++;
1197      }
1198   }
1199
1200   for (uint32_t i = 0; i < descriptorCopyCount; i++) {
1201      const VkCopyDescriptorSet *copyset = &pDescriptorCopies[i];
1202      V3DV_FROM_HANDLE(v3dv_descriptor_set, src_set,
1203                       copyset->srcSet);
1204      V3DV_FROM_HANDLE(v3dv_descriptor_set, dst_set,
1205                       copyset->dstSet);
1206
1207      const struct v3dv_descriptor_set_binding_layout *src_binding_layout =
1208         src_set->layout->binding + copyset->srcBinding;
1209      const struct v3dv_descriptor_set_binding_layout *dst_binding_layout =
1210         dst_set->layout->binding + copyset->dstBinding;
1211
1212      assert(src_binding_layout->type == dst_binding_layout->type);
1213
1214      struct v3dv_descriptor *src_descriptor = src_set->descriptors;
1215      struct v3dv_descriptor *dst_descriptor = dst_set->descriptors;
1216
1217      src_descriptor += src_binding_layout->descriptor_index;
1218      dst_descriptor += dst_binding_layout->descriptor_index;
1219
1220      if (src_binding_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
1221         /* {src,dst}ArrayElement specifies src/dst start offset and
1222          * descriptorCount specifies size (in bytes) to copy.
1223          */
1224         const void *src_data = src_set->pool->bo->map +
1225                                src_set->base_offset +
1226                                src_binding_layout->descriptor_offset +
1227                                copyset->srcArrayElement;
1228         write_inline_uniform_descriptor(device, dst_descriptor, dst_set,
1229                                         dst_binding_layout,
1230                                         src_data,
1231                                         copyset->dstArrayElement,
1232                                         copyset->descriptorCount);
1233         continue;
1234      }
1235
1236      src_descriptor += copyset->srcArrayElement;
1237      dst_descriptor += copyset->dstArrayElement;
1238
1239      for (uint32_t j = 0; j < copyset->descriptorCount; j++) {
1240         *dst_descriptor = *src_descriptor;
1241         dst_descriptor++;
1242         src_descriptor++;
1243
1244         if (v3dv_X(device, descriptor_bo_size)(src_binding_layout->type) > 0) {
1245            descriptor_bo_copy(device,
1246                               dst_set, dst_binding_layout,
1247                               j + copyset->dstArrayElement,
1248                               src_set, src_binding_layout,
1249                               j + copyset->srcArrayElement);
1250         }
1251
1252      }
1253   }
1254}
1255
1256VKAPI_ATTR void VKAPI_CALL
1257v3dv_GetDescriptorSetLayoutSupport(
1258   VkDevice _device,
1259   const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
1260   VkDescriptorSetLayoutSupport *pSupport)
1261{
1262   V3DV_FROM_HANDLE(v3dv_device, device, _device);
1263   VkDescriptorSetLayoutBinding *bindings = NULL;
1264   VkResult result = vk_create_sorted_bindings(
1265      pCreateInfo->pBindings, pCreateInfo->bindingCount, &bindings);
1266   if (result != VK_SUCCESS) {
1267      pSupport->supported = false;
1268      return;
1269   }
1270
1271   bool supported = true;
1272
1273   uint32_t desc_host_size = sizeof(struct v3dv_descriptor);
1274   uint32_t host_size = sizeof(struct v3dv_descriptor_set);
1275   uint32_t bo_size = 0;
1276   for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
1277      const VkDescriptorSetLayoutBinding *binding = bindings + i;
1278
1279      if ((UINT32_MAX - host_size) / desc_host_size < binding->descriptorCount) {
1280         supported = false;
1281         break;
1282      }
1283
1284      uint32_t desc_bo_size = v3dv_X(device, descriptor_bo_size)(binding->descriptorType);
1285      if (desc_bo_size > 0 &&
1286          (UINT32_MAX - bo_size) / desc_bo_size < binding->descriptorCount) {
1287         supported = false;
1288         break;
1289      }
1290
1291      host_size += binding->descriptorCount * desc_host_size;
1292      bo_size += binding->descriptorCount * desc_bo_size;
1293   }
1294
1295   free(bindings);
1296
1297   pSupport->supported = supported;
1298}
1299
1300VkResult
1301v3dv_CreateDescriptorUpdateTemplate(
1302   VkDevice _device,
1303   const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
1304   const VkAllocationCallbacks *pAllocator,
1305   VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
1306{
1307   V3DV_FROM_HANDLE(v3dv_device, device, _device);
1308   struct v3dv_descriptor_update_template *template;
1309
1310   size_t size = sizeof(*template) +
1311      pCreateInfo->descriptorUpdateEntryCount * sizeof(template->entries[0]);
1312   template = vk_object_alloc(&device->vk, pAllocator, size,
1313                              VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);
1314   if (template == NULL)
1315      return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1316
1317   template->bind_point = pCreateInfo->pipelineBindPoint;
1318
1319   assert(pCreateInfo->templateType ==
1320          VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET);
1321   template->set = pCreateInfo->set;
1322
1323   template->entry_count = pCreateInfo->descriptorUpdateEntryCount;
1324   for (uint32_t i = 0; i < template->entry_count; i++) {
1325      const VkDescriptorUpdateTemplateEntry *pEntry =
1326         &pCreateInfo->pDescriptorUpdateEntries[i];
1327
1328      template->entries[i] = (struct v3dv_descriptor_template_entry) {
1329         .type = pEntry->descriptorType,
1330         .binding = pEntry->dstBinding,
1331         .array_element = pEntry->dstArrayElement,
1332         .array_count = pEntry->descriptorCount,
1333         .offset = pEntry->offset,
1334         .stride = pEntry->stride,
1335      };
1336   }
1337
1338   *pDescriptorUpdateTemplate =
1339      v3dv_descriptor_update_template_to_handle(template);
1340
1341   return VK_SUCCESS;
1342}
1343
1344void
1345v3dv_DestroyDescriptorUpdateTemplate(
1346   VkDevice _device,
1347   VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1348   const VkAllocationCallbacks *pAllocator)
1349{
1350   V3DV_FROM_HANDLE(v3dv_device, device, _device);
1351   V3DV_FROM_HANDLE(v3dv_descriptor_update_template, template,
1352                    descriptorUpdateTemplate);
1353
1354   if (!template)
1355      return;
1356
1357   vk_object_free(&device->vk, pAllocator, template);
1358}
1359
1360void
1361v3dv_UpdateDescriptorSetWithTemplate(
1362   VkDevice _device,
1363   VkDescriptorSet descriptorSet,
1364   VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1365   const void *pData)
1366{
1367   V3DV_FROM_HANDLE(v3dv_device, device, _device);
1368   V3DV_FROM_HANDLE(v3dv_descriptor_set, set, descriptorSet);
1369   V3DV_FROM_HANDLE(v3dv_descriptor_update_template, template,
1370                    descriptorUpdateTemplate);
1371
1372   for (int i = 0; i < template->entry_count; i++) {
1373      const struct v3dv_descriptor_template_entry *entry =
1374         &template->entries[i];
1375
1376      const struct v3dv_descriptor_set_binding_layout *binding_layout =
1377         set->layout->binding + entry->binding;
1378
1379      struct v3dv_descriptor *descriptor =
1380         set->descriptors +
1381         binding_layout->descriptor_index;
1382
1383      switch (entry->type) {
1384      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1385      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1386      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1387      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1388         for (uint32_t j = 0; j < entry->array_count; j++) {
1389            const VkDescriptorBufferInfo *info =
1390               pData + entry->offset + j * entry->stride;
1391            write_buffer_descriptor(descriptor + entry->array_element + j,
1392                                    entry->type, info);
1393         }
1394         break;
1395
1396      case VK_DESCRIPTOR_TYPE_SAMPLER:
1397      case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1398      case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1399      case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1400      case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1401         for (uint32_t j = 0; j < entry->array_count; j++) {
1402            const VkDescriptorImageInfo *info =
1403               pData + entry->offset + j * entry->stride;
1404            V3DV_FROM_HANDLE(v3dv_image_view, iview, info->imageView);
1405            V3DV_FROM_HANDLE(v3dv_sampler, sampler, info->sampler);
1406            write_image_descriptor(device, descriptor + entry->array_element + j,
1407                                   entry->type, set, binding_layout, iview,
1408                                   sampler, entry->array_element + j);
1409         }
1410         break;
1411
1412      case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1413      case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1414         for (uint32_t j = 0; j < entry->array_count; j++) {
1415            const VkBufferView *_bview =
1416               pData + entry->offset + j * entry->stride;
1417            V3DV_FROM_HANDLE(v3dv_buffer_view, bview, *_bview);
1418            write_buffer_view_descriptor(device,
1419                                         descriptor + entry->array_element + j,
1420                                         entry->type, set, binding_layout, bview,
1421                                         entry->array_element + j);
1422         }
1423         break;
1424
1425      case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: {
1426         write_inline_uniform_descriptor(device, descriptor, set,
1427                                         binding_layout,
1428                                         pData + entry->offset,
1429                                         entry->array_element, /* offset */
1430                                         entry->array_count);  /* size */
1431         break;
1432      }
1433
1434      default:
1435         unreachable("Unsupported descriptor type");
1436      }
1437   }
1438}
1439
1440VKAPI_ATTR VkResult VKAPI_CALL
1441v3dv_CreateSamplerYcbcrConversion(
1442    VkDevice _device,
1443    const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
1444    const VkAllocationCallbacks *pAllocator,
1445    VkSamplerYcbcrConversion *pYcbcrConversion)
1446{
1447   unreachable("Ycbcr sampler conversion is not supported");
1448   return VK_SUCCESS;
1449}
1450
1451VKAPI_ATTR void VKAPI_CALL
1452v3dv_DestroySamplerYcbcrConversion(
1453    VkDevice _device,
1454    VkSamplerYcbcrConversion YcbcrConversion,
1455    const VkAllocationCallbacks *pAllocator)
1456{
1457   unreachable("Ycbcr sampler conversion is not supported");
1458}
1459