1/*
2 * Copyright © Microsoft Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24#include "dzn_private.h"
25
26#include "vk_alloc.h"
27#include "vk_descriptors.h"
28#include "vk_util.h"
29
30#include "util/mesa-sha1.h"
31
32static uint32_t
33translate_desc_stages(VkShaderStageFlags in)
34{
35   if (in == VK_SHADER_STAGE_ALL)
36      in = VK_SHADER_STAGE_ALL_GRAPHICS | VK_SHADER_STAGE_COMPUTE_BIT;
37
38   uint32_t out = 0;
39
40   u_foreach_bit(s, in)
41      out |= BITFIELD_BIT(vk_to_mesa_shader_stage(BITFIELD_BIT(s)));
42
43   return out;
44}
45
46static D3D12_SHADER_VISIBILITY
47translate_desc_visibility(VkShaderStageFlags in)
48{
49   switch (in) {
50   case VK_SHADER_STAGE_VERTEX_BIT: return D3D12_SHADER_VISIBILITY_VERTEX;
51   case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT: return D3D12_SHADER_VISIBILITY_HULL;
52   case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT: return D3D12_SHADER_VISIBILITY_DOMAIN;
53   case VK_SHADER_STAGE_GEOMETRY_BIT: return D3D12_SHADER_VISIBILITY_GEOMETRY;
54   case VK_SHADER_STAGE_FRAGMENT_BIT: return D3D12_SHADER_VISIBILITY_PIXEL;
55   default: return D3D12_SHADER_VISIBILITY_ALL;
56   }
57}
58
59static D3D12_DESCRIPTOR_RANGE_TYPE
60desc_type_to_range_type(VkDescriptorType in, bool writeable)
61{
62   switch (in) {
63   case VK_DESCRIPTOR_TYPE_SAMPLER:
64      return D3D12_DESCRIPTOR_RANGE_TYPE_SAMPLER;
65
66   case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
67   case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
68   case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
69      return D3D12_DESCRIPTOR_RANGE_TYPE_SRV;
70
71   case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
72   case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
73      return D3D12_DESCRIPTOR_RANGE_TYPE_CBV;
74
75   case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
76   case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
77   case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
78   case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
79      return writeable ? D3D12_DESCRIPTOR_RANGE_TYPE_UAV : D3D12_DESCRIPTOR_RANGE_TYPE_SRV;
80   default:
81      unreachable("Unsupported desc type");
82   }
83}
84
85static bool
86is_dynamic_desc_type(VkDescriptorType desc_type)
87{
88   return (desc_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
89           desc_type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
90}
91
92static bool
93dzn_descriptor_type_depends_on_shader_usage(VkDescriptorType type)
94{
95   return type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER ||
96          type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ||
97          type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ||
98          type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
99}
100
101static inline bool
102dzn_desc_type_has_sampler(VkDescriptorType type)
103{
104   return type == VK_DESCRIPTOR_TYPE_SAMPLER ||
105          type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
106}
107
108static uint32_t
109num_descs_for_type(VkDescriptorType type, bool static_sampler)
110{
111   unsigned num_descs = 1;
112
113   /* Some type map to an SRV or UAV depending on how the shaders is using the
114    * resource (NONWRITEABLE flag set or not), in that case we need to reserve
115    * slots for both the UAV and SRV descs.
116    */
117   if (dzn_descriptor_type_depends_on_shader_usage(type))
118      num_descs++;
119
120   /* There's no combined SRV+SAMPLER type in d3d12, we need an descriptor
121    * for the sampler.
122    */
123   if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
124      num_descs++;
125
126   /* Don't count immutable samplers, they have their own descriptor. */
127   if (static_sampler && dzn_desc_type_has_sampler(type))
128      num_descs--;
129
130   return num_descs;
131}
132
133static VkResult
134dzn_descriptor_set_layout_create(struct dzn_device *device,
135                                 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
136                                 const VkAllocationCallbacks *pAllocator,
137                                 VkDescriptorSetLayout *out)
138{
139   const VkDescriptorSetLayoutBinding *bindings = pCreateInfo->pBindings;
140   uint32_t binding_count = 0, static_sampler_count = 0, total_ranges = 0;
141   uint32_t dynamic_ranges_offset = 0, immutable_sampler_count = 0;
142   uint32_t range_count[MAX_SHADER_VISIBILITIES][NUM_POOL_TYPES] = { 0 };
143
144   for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
145      binding_count = MAX2(binding_count, bindings[i].binding + 1);
146
147      if (!bindings[i].descriptorCount)
148         continue;
149
150      D3D12_SHADER_VISIBILITY visibility =
151         translate_desc_visibility(bindings[i].stageFlags);
152      VkDescriptorType desc_type = bindings[i].descriptorType;
153      bool has_sampler = dzn_desc_type_has_sampler(desc_type);
154
155      /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
156       *
157       *    "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
158       *    VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
159       *    pImmutableSamplers can be used to initialize a set of immutable
160       *    samplers. [...]  If descriptorType is not one of these descriptor
161       *    types, then pImmutableSamplers is ignored.
162       *
163       * We need to be careful here and only parse pImmutableSamplers if we
164       * have one of the right descriptor types.
165       */
166      bool immutable_samplers =
167         has_sampler &&
168         bindings[i].pImmutableSamplers != NULL;
169      bool static_sampler = false;
170
171      if (immutable_samplers && bindings[i].descriptorCount == 1) {
172         VK_FROM_HANDLE(dzn_sampler, sampler, bindings[i].pImmutableSamplers[0]);
173
174         if (sampler->static_border_color != -1)
175            static_sampler = true;
176      }
177
178      if (static_sampler) {
179         static_sampler_count += bindings[i].descriptorCount;
180      } else if (has_sampler) {
181         range_count[visibility][D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER]++;
182         total_ranges++;
183
184         if (immutable_samplers)
185            immutable_sampler_count += bindings[i].descriptorCount;
186      }
187
188      if (desc_type != VK_DESCRIPTOR_TYPE_SAMPLER) {
189         range_count[visibility][D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV]++;
190         total_ranges++;
191
192         if (dzn_descriptor_type_depends_on_shader_usage(desc_type)) {
193            range_count[visibility][D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV]++;
194            total_ranges++;
195         }
196
197         if (!is_dynamic_desc_type(desc_type)) {
198            uint32_t factor =
199               dzn_descriptor_type_depends_on_shader_usage(desc_type) ? 2 : 1;
200            dynamic_ranges_offset += bindings[i].descriptorCount * factor;
201         }
202      }
203   }
204
205   /* We need to allocate decriptor set layouts off the device allocator
206    * with DEVICE scope because they are reference counted and may not be
207    * destroyed when vkDestroyDescriptorSetLayout is called.
208    */
209   VK_MULTIALLOC(ma);
210   VK_MULTIALLOC_DECL(&ma, struct dzn_descriptor_set_layout, set_layout, 1);
211   VK_MULTIALLOC_DECL(&ma, D3D12_DESCRIPTOR_RANGE1,
212                      ranges, total_ranges);
213   VK_MULTIALLOC_DECL(&ma, D3D12_STATIC_SAMPLER_DESC, static_samplers,
214                      static_sampler_count);
215   VK_MULTIALLOC_DECL(&ma, const struct dzn_sampler *, immutable_samplers,
216                      immutable_sampler_count);
217   VK_MULTIALLOC_DECL(&ma, struct dzn_descriptor_set_layout_binding, binfos,
218                      binding_count);
219
220   if (!vk_descriptor_set_layout_multizalloc(&device->vk, &ma))
221      return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
222
223   set_layout->static_samplers = static_samplers;
224   set_layout->static_sampler_count = static_sampler_count;
225   set_layout->immutable_samplers = immutable_samplers;
226   set_layout->immutable_sampler_count = immutable_sampler_count;
227   set_layout->bindings = binfos;
228   set_layout->binding_count = binding_count;
229   set_layout->dynamic_buffers.range_offset = dynamic_ranges_offset;
230
231   for (uint32_t i = 0; i < MAX_SHADER_VISIBILITIES; i++) {
232      dzn_foreach_pool_type (type) {
233         if (range_count[i][type]) {
234            set_layout->ranges[i][type] = ranges;
235            set_layout->range_count[i][type] = range_count[i][type];
236            ranges += range_count[i][type];
237         }
238      }
239   }
240
241   VkDescriptorSetLayoutBinding *ordered_bindings;
242   VkResult ret =
243      vk_create_sorted_bindings(pCreateInfo->pBindings,
244                                pCreateInfo->bindingCount,
245                                &ordered_bindings);
246   if (ret != VK_SUCCESS)
247      return ret;
248
249   assert(binding_count ==
250          (pCreateInfo->bindingCount ?
251           (ordered_bindings[pCreateInfo->bindingCount - 1].binding + 1) : 0));
252
253   uint32_t range_idx[MAX_SHADER_VISIBILITIES][NUM_POOL_TYPES] = { 0 };
254   uint32_t static_sampler_idx = 0, immutable_sampler_idx = 0;
255   uint32_t dynamic_buffer_idx = 0;
256   uint32_t base_register = 0;
257
258   for (uint32_t i = 0; i < binding_count; i++) {
259      binfos[i].static_sampler_idx = ~0;
260      binfos[i].immutable_sampler_idx = ~0;
261      binfos[i].dynamic_buffer_idx = ~0;
262      dzn_foreach_pool_type (type)
263         binfos[i].range_idx[type] = ~0;
264   }
265
266   for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
267      VkDescriptorType desc_type = ordered_bindings[i].descriptorType;
268      uint32_t binding = ordered_bindings[i].binding;
269      uint32_t desc_count = ordered_bindings[i].descriptorCount;
270      bool has_sampler = dzn_desc_type_has_sampler(desc_type);
271      bool has_immutable_samplers =
272         has_sampler &&
273         ordered_bindings[i].pImmutableSamplers != NULL;
274      bool has_static_sampler = has_immutable_samplers && desc_count == 1;
275      bool is_dynamic = is_dynamic_desc_type(desc_type);
276
277      D3D12_SHADER_VISIBILITY visibility =
278         translate_desc_visibility(ordered_bindings[i].stageFlags);
279      binfos[binding].type = desc_type;
280      binfos[binding].stages =
281         translate_desc_stages(ordered_bindings[i].stageFlags);
282      set_layout->stages |= binfos[binding].stages;
283      binfos[binding].visibility = visibility;
284      binfos[binding].base_shader_register = base_register;
285      assert(base_register + desc_count >= base_register);
286      base_register += desc_count;
287
288      if (has_static_sampler) {
289         VK_FROM_HANDLE(dzn_sampler, sampler, ordered_bindings[i].pImmutableSamplers[0]);
290
291         /* Not all border colors are supported. */
292         if (sampler->static_border_color != -1) {
293            binfos[binding].static_sampler_idx = static_sampler_idx;
294            D3D12_STATIC_SAMPLER_DESC *desc = (D3D12_STATIC_SAMPLER_DESC *)
295               &static_samplers[static_sampler_idx];
296
297            desc->Filter = sampler->desc.Filter;
298            desc->AddressU = sampler->desc.AddressU;
299            desc->AddressV = sampler->desc.AddressV;
300            desc->AddressW = sampler->desc.AddressW;
301            desc->MipLODBias = sampler->desc.MipLODBias;
302            desc->MaxAnisotropy = sampler->desc.MaxAnisotropy;
303            desc->ComparisonFunc = sampler->desc.ComparisonFunc;
304            desc->BorderColor = sampler->static_border_color;
305            desc->MinLOD = sampler->desc.MinLOD;
306            desc->MaxLOD = sampler->desc.MaxLOD;
307            desc->ShaderRegister = binfos[binding].base_shader_register;
308            desc->ShaderVisibility = translate_desc_visibility(ordered_bindings[i].stageFlags);
309            static_sampler_idx++;
310         } else {
311            has_static_sampler = false;
312         }
313      }
314
315      if (has_immutable_samplers && !has_static_sampler) {
316         binfos[binding].immutable_sampler_idx = immutable_sampler_idx;
317         for (uint32_t s = 0; s < desc_count; s++) {
318            VK_FROM_HANDLE(dzn_sampler, sampler, ordered_bindings[i].pImmutableSamplers[s]);
319
320            immutable_samplers[immutable_sampler_idx++] = sampler;
321         }
322      }
323
324      if (is_dynamic) {
325         binfos[binding].dynamic_buffer_idx = dynamic_buffer_idx;
326         for (uint32_t d = 0; d < desc_count; d++)
327            set_layout->dynamic_buffers.bindings[dynamic_buffer_idx + d] = binding;
328         dynamic_buffer_idx += desc_count;
329         assert(dynamic_buffer_idx <= MAX_DYNAMIC_BUFFERS);
330      }
331
332      if (!ordered_bindings[i].descriptorCount)
333         continue;
334
335      unsigned num_descs =
336         num_descs_for_type(desc_type, has_static_sampler);
337      if (!num_descs) continue;
338
339      assert(visibility < ARRAY_SIZE(set_layout->ranges));
340
341      bool has_range[NUM_POOL_TYPES] = { 0 };
342      has_range[D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER] =
343         has_sampler && !has_static_sampler;
344      has_range[D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV] =
345         desc_type != VK_DESCRIPTOR_TYPE_SAMPLER;
346
347      dzn_foreach_pool_type (type) {
348         if (!has_range[type]) continue;
349
350         uint32_t idx = range_idx[visibility][type]++;
351         assert(idx < range_count[visibility][type]);
352
353         binfos[binding].range_idx[type] = idx;
354         D3D12_DESCRIPTOR_RANGE1 *range = (D3D12_DESCRIPTOR_RANGE1 *)
355            &set_layout->ranges[visibility][type][idx];
356         VkDescriptorType range_type = desc_type;
357         if (desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
358            range_type = type == D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER ?
359                         VK_DESCRIPTOR_TYPE_SAMPLER :
360                         VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
361         }
362         range->RangeType = desc_type_to_range_type(range_type, false);
363         range->NumDescriptors = desc_count;
364         range->BaseShaderRegister = binfos[binding].base_shader_register;
365         range->Flags = type == D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER ?
366            D3D12_DESCRIPTOR_RANGE_FLAG_NONE :
367            D3D12_DESCRIPTOR_RANGE_FLAG_DESCRIPTORS_STATIC_KEEPING_BUFFER_BOUNDS_CHECKS;
368         if (is_dynamic) {
369            range->OffsetInDescriptorsFromTableStart =
370               set_layout->dynamic_buffers.range_offset +
371               set_layout->dynamic_buffers.desc_count;
372            set_layout->dynamic_buffers.count += range->NumDescriptors;
373            set_layout->dynamic_buffers.desc_count += range->NumDescriptors;
374         } else {
375            range->OffsetInDescriptorsFromTableStart = set_layout->range_desc_count[type];
376            set_layout->range_desc_count[type] += range->NumDescriptors;
377         }
378
379         if (!dzn_descriptor_type_depends_on_shader_usage(desc_type))
380            continue;
381
382         assert(idx + 1 < range_count[visibility][type]);
383         range_idx[visibility][type]++;
384         range[1] = range[0];
385         range++;
386         range->RangeType = desc_type_to_range_type(range_type, true);
387         if (is_dynamic) {
388            range->OffsetInDescriptorsFromTableStart =
389               set_layout->dynamic_buffers.range_offset +
390               set_layout->dynamic_buffers.desc_count;
391            set_layout->dynamic_buffers.desc_count += range->NumDescriptors;
392         } else {
393            range->OffsetInDescriptorsFromTableStart = set_layout->range_desc_count[type];
394            set_layout->range_desc_count[type] += range->NumDescriptors;
395         }
396      }
397   }
398
399   free(ordered_bindings);
400
401   *out = dzn_descriptor_set_layout_to_handle(set_layout);
402   return VK_SUCCESS;
403}
404
405static uint32_t
406dzn_descriptor_set_layout_get_heap_offset(const struct dzn_descriptor_set_layout *layout,
407                                          uint32_t b,
408                                          D3D12_DESCRIPTOR_HEAP_TYPE type,
409                                          bool writeable)
410{
411   assert(b < layout->binding_count);
412   D3D12_SHADER_VISIBILITY visibility = layout->bindings[b].visibility;
413   assert(visibility < ARRAY_SIZE(layout->ranges));
414   assert(type < NUM_POOL_TYPES);
415
416   uint32_t range_idx = layout->bindings[b].range_idx[type];
417
418   if (range_idx == ~0)
419      return ~0;
420
421   if (writeable &&
422       !dzn_descriptor_type_depends_on_shader_usage(layout->bindings[b].type))
423      return ~0;
424
425   if (writeable)
426      range_idx++;
427
428   assert(range_idx < layout->range_count[visibility][type]);
429   return layout->ranges[visibility][type][range_idx].OffsetInDescriptorsFromTableStart;
430}
431
432static uint32_t
433dzn_descriptor_set_layout_get_desc_count(const struct dzn_descriptor_set_layout *layout,
434                                         uint32_t b)
435{
436   D3D12_SHADER_VISIBILITY visibility = layout->bindings[b].visibility;
437   assert(visibility < ARRAY_SIZE(layout->ranges));
438
439   dzn_foreach_pool_type (type) {
440      uint32_t range_idx = layout->bindings[b].range_idx[type];
441      assert(range_idx == ~0 || range_idx < layout->range_count[visibility][type]);
442
443      if (range_idx != ~0)
444         return layout->ranges[visibility][type][range_idx].NumDescriptors;
445   }
446
447   return 0;
448}
449
450VKAPI_ATTR VkResult VKAPI_CALL
451dzn_CreateDescriptorSetLayout(VkDevice device,
452                              const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
453                              const VkAllocationCallbacks *pAllocator,
454                              VkDescriptorSetLayout *pSetLayout)
455{
456   return dzn_descriptor_set_layout_create(dzn_device_from_handle(device),
457                                           pCreateInfo, pAllocator, pSetLayout);
458}
459
460VKAPI_ATTR void VKAPI_CALL
461dzn_GetDescriptorSetLayoutSupport(VkDevice device,
462                                  const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
463                                  VkDescriptorSetLayoutSupport *pSupport)
464{
465   const VkDescriptorSetLayoutBinding *bindings = pCreateInfo->pBindings;
466   uint32_t sampler_count = 0, other_desc_count = 0;
467
468   for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
469      VkDescriptorType desc_type = bindings[i].descriptorType;
470      bool has_sampler = dzn_desc_type_has_sampler(desc_type);
471
472      if (has_sampler)
473         sampler_count += bindings[i].descriptorCount;
474      if (desc_type != VK_DESCRIPTOR_TYPE_SAMPLER)
475         other_desc_count += bindings[i].descriptorCount;
476      if (dzn_descriptor_type_depends_on_shader_usage(desc_type))
477         other_desc_count += bindings[i].descriptorCount;
478   }
479
480   pSupport->supported =
481      sampler_count <= (MAX_DESCS_PER_SAMPLER_HEAP / MAX_SETS) &&
482      other_desc_count <= (MAX_DESCS_PER_CBV_SRV_UAV_HEAP / MAX_SETS);
483}
484
485static void
486dzn_pipeline_layout_destroy(struct vk_device *vk_device,
487                            struct vk_pipeline_layout *vk_layout)
488{
489   struct dzn_pipeline_layout *layout =
490      container_of(vk_layout, struct dzn_pipeline_layout, vk);
491
492   if (layout->root.sig)
493      ID3D12RootSignature_Release(layout->root.sig);
494
495   vk_pipeline_layout_destroy(vk_device, &layout->vk);
496}
497
498// Reserve two root parameters for the push constants and sysvals CBVs.
499#define MAX_INTERNAL_ROOT_PARAMS 2
500
501// One root parameter for samplers and the other one for views, multiplied by
502// the number of visibility combinations, plus the internal root parameters.
503#define MAX_ROOT_PARAMS ((MAX_SHADER_VISIBILITIES * 2) + MAX_INTERNAL_ROOT_PARAMS)
504
505// Maximum number of DWORDS (32-bit words) that can be used for a root signature
506#define MAX_ROOT_DWORDS 64
507
508static void
509dzn_pipeline_layout_hash_stages(struct dzn_pipeline_layout *layout,
510                                const VkPipelineLayoutCreateInfo *info)
511{
512   uint32_t stages = 0;
513   for (uint32_t stage = 0; stage < ARRAY_SIZE(layout->stages); stage++) {
514      for (uint32_t set = 0; set < info->setLayoutCount; set++) {
515         VK_FROM_HANDLE(dzn_descriptor_set_layout, set_layout, info->pSetLayouts[set]);
516
517         stages |= set_layout->stages;
518      }
519   }
520
521   for (uint32_t stage = 0; stage < ARRAY_SIZE(layout->stages); stage++) {
522      if (!(stages & BITFIELD_BIT(stage)))
523         continue;
524
525      struct mesa_sha1 ctx;
526
527      _mesa_sha1_init(&ctx);
528      for (uint32_t set = 0; set < info->setLayoutCount; set++) {
529         VK_FROM_HANDLE(dzn_descriptor_set_layout, set_layout, info->pSetLayouts[set]);
530         if (!(BITFIELD_BIT(stage) & set_layout->stages))
531            continue;
532
533         for (uint32_t b = 0; b < set_layout->binding_count; b++) {
534            if (!(BITFIELD_BIT(stage) & set_layout->bindings[b].stages))
535               continue;
536
537            _mesa_sha1_update(&ctx, &b, sizeof(b));
538            _mesa_sha1_update(&ctx, &set_layout->bindings[b].base_shader_register,
539                              sizeof(set_layout->bindings[b].base_shader_register));
540         }
541      }
542      _mesa_sha1_final(&ctx, layout->stages[stage].hash);
543   }
544}
545
546static VkResult
547dzn_pipeline_layout_create(struct dzn_device *device,
548                           const VkPipelineLayoutCreateInfo *pCreateInfo,
549                           const VkAllocationCallbacks *pAllocator,
550                           VkPipelineLayout *out)
551{
552   uint32_t binding_count = 0;
553
554   for (uint32_t s = 0; s < pCreateInfo->setLayoutCount; s++) {
555      VK_FROM_HANDLE(dzn_descriptor_set_layout, set_layout, pCreateInfo->pSetLayouts[s]);
556
557      if (!set_layout)
558         continue;
559
560      binding_count += set_layout->binding_count;
561   }
562
563   VK_MULTIALLOC(ma);
564   VK_MULTIALLOC_DECL(&ma, struct dzn_pipeline_layout, layout, 1);
565   VK_MULTIALLOC_DECL(&ma, uint32_t, binding_translation, binding_count);
566
567   if (!vk_pipeline_layout_multizalloc(&device->vk, &ma, pCreateInfo))
568      return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
569
570   layout->vk.destroy = dzn_pipeline_layout_destroy;
571
572   for (uint32_t s = 0; s < pCreateInfo->setLayoutCount; s++) {
573      VK_FROM_HANDLE(dzn_descriptor_set_layout, set_layout, pCreateInfo->pSetLayouts[s]);
574
575      if (!set_layout || !set_layout->binding_count)
576         continue;
577
578      layout->binding_translation[s].base_reg = binding_translation;
579      binding_translation += set_layout->binding_count;
580   }
581
582   uint32_t range_count = 0, static_sampler_count = 0;
583
584   layout->root.param_count = 0;
585   dzn_foreach_pool_type (type)
586      layout->desc_count[type] = 0;
587
588   layout->set_count = pCreateInfo->setLayoutCount;
589   for (uint32_t j = 0; j < layout->set_count; j++) {
590      VK_FROM_HANDLE(dzn_descriptor_set_layout, set_layout, pCreateInfo->pSetLayouts[j]);
591      uint32_t *binding_trans = layout->binding_translation[j].base_reg;
592
593      layout->sets[j].dynamic_buffer_count = set_layout->dynamic_buffers.count;
594      memcpy(layout->sets[j].range_desc_count, set_layout->range_desc_count,
595             sizeof(layout->sets[j].range_desc_count));
596      layout->binding_translation[j].binding_count = set_layout->binding_count;
597      for (uint32_t b = 0; b < set_layout->binding_count; b++)
598         binding_trans[b] = set_layout->bindings[b].base_shader_register;
599
600      static_sampler_count += set_layout->static_sampler_count;
601      dzn_foreach_pool_type (type) {
602         layout->sets[j].heap_offsets[type] = layout->desc_count[type];
603         layout->desc_count[type] += set_layout->range_desc_count[type];
604         for (uint32_t i = 0; i < MAX_SHADER_VISIBILITIES; i++)
605            range_count += set_layout->range_count[i][type];
606      }
607
608      layout->desc_count[D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV] +=
609         set_layout->dynamic_buffers.desc_count;
610      for (uint32_t o = 0, elem = 0; o < set_layout->dynamic_buffers.count; o++, elem++) {
611         uint32_t b = set_layout->dynamic_buffers.bindings[o];
612
613         if (o > 0 && set_layout->dynamic_buffers.bindings[o - 1] != b)
614            elem = 0;
615
616         uint32_t srv =
617            dzn_descriptor_set_layout_get_heap_offset(set_layout, b, D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV, false);
618         uint32_t uav =
619            dzn_descriptor_set_layout_get_heap_offset(set_layout, b, D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV, true);
620
621         layout->sets[j].dynamic_buffer_heap_offsets[o].srv = srv != ~0 ? srv + elem : ~0;
622         layout->sets[j].dynamic_buffer_heap_offsets[o].uav = uav != ~0 ? uav + elem : ~0;
623      }
624   }
625
626   D3D12_DESCRIPTOR_RANGE1 *ranges =
627      vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*ranges) * range_count, 8,
628                VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
629   if (range_count && !ranges) {
630      vk_pipeline_layout_unref(&device->vk, &layout->vk);
631      return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
632   }
633
634   D3D12_STATIC_SAMPLER_DESC *static_sampler_descs =
635      vk_alloc2(&device->vk.alloc, pAllocator,
636                sizeof(*static_sampler_descs) * static_sampler_count, 8,
637                VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
638   if (static_sampler_count && !static_sampler_descs) {
639      vk_free2(&device->vk.alloc, pAllocator, ranges);
640      vk_pipeline_layout_unref(&device->vk, &layout->vk);
641      return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
642   }
643
644
645   D3D12_ROOT_PARAMETER1 root_params[MAX_ROOT_PARAMS] = { 0 };
646   D3D12_DESCRIPTOR_RANGE1 *range_ptr = ranges;
647   D3D12_ROOT_PARAMETER1 *root_param;
648   uint32_t root_dwords = 0;
649
650   for (uint32_t i = 0; i < MAX_SHADER_VISIBILITIES; i++) {
651      dzn_foreach_pool_type (type) {
652         root_param = &root_params[layout->root.param_count];
653         root_param->ParameterType = D3D12_ROOT_PARAMETER_TYPE_DESCRIPTOR_TABLE;
654         root_param->DescriptorTable.pDescriptorRanges = range_ptr;
655         root_param->DescriptorTable.NumDescriptorRanges = 0;
656         root_param->ShaderVisibility = (D3D12_SHADER_VISIBILITY)i;
657
658         for (uint32_t j = 0; j < pCreateInfo->setLayoutCount; j++) {
659            VK_FROM_HANDLE(dzn_descriptor_set_layout, set_layout, pCreateInfo->pSetLayouts[j]);
660            uint32_t range_count = set_layout->range_count[i][type];
661
662            memcpy(range_ptr, set_layout->ranges[i][type],
663                   range_count * sizeof(D3D12_DESCRIPTOR_RANGE1));
664            for (uint32_t k = 0; k < range_count; k++) {
665               range_ptr[k].RegisterSpace = j;
666               range_ptr[k].OffsetInDescriptorsFromTableStart +=
667                  layout->sets[j].heap_offsets[type];
668            }
669            root_param->DescriptorTable.NumDescriptorRanges += range_count;
670            range_ptr += range_count;
671         }
672
673         if (root_param->DescriptorTable.NumDescriptorRanges) {
674            layout->root.type[layout->root.param_count++] = (D3D12_DESCRIPTOR_HEAP_TYPE)type;
675            root_dwords++;
676         }
677      }
678   }
679
680   layout->root.sets_param_count = layout->root.param_count;
681
682   /* Add our sysval CBV, and make it visible to all shaders */
683   layout->root.sysval_cbv_param_idx = layout->root.param_count;
684   root_param = &root_params[layout->root.param_count++];
685   root_param->ParameterType = D3D12_ROOT_PARAMETER_TYPE_32BIT_CONSTANTS;
686   root_param->Descriptor.RegisterSpace = DZN_REGISTER_SPACE_SYSVALS;
687   root_param->Constants.ShaderRegister = 0;
688   root_param->Constants.Num32BitValues =
689       DIV_ROUND_UP(MAX2(sizeof(struct dxil_spirv_vertex_runtime_data),
690                         sizeof(struct dxil_spirv_compute_runtime_data)),
691                    4);
692   root_param->ShaderVisibility = D3D12_SHADER_VISIBILITY_ALL;
693   root_dwords += root_param->Constants.Num32BitValues;
694
695   D3D12_STATIC_SAMPLER_DESC *static_sampler_ptr = static_sampler_descs;
696   for (uint32_t j = 0; j < pCreateInfo->setLayoutCount; j++) {
697      VK_FROM_HANDLE(dzn_descriptor_set_layout, set_layout, pCreateInfo->pSetLayouts[j]);
698
699      memcpy(static_sampler_ptr, set_layout->static_samplers,
700             set_layout->static_sampler_count * sizeof(*set_layout->static_samplers));
701      if (j > 0) {
702         for (uint32_t k = 0; k < set_layout->static_sampler_count; k++)
703            static_sampler_ptr[k].RegisterSpace = j;
704      }
705      static_sampler_ptr += set_layout->static_sampler_count;
706   }
707
708   uint32_t push_constant_size = 0;
709   uint32_t push_constant_flags = 0;
710   for (uint32_t j = 0; j < pCreateInfo->pushConstantRangeCount; j++) {
711      const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + j;
712      push_constant_size = MAX2(push_constant_size, range->offset + range->size);
713      push_constant_flags |= range->stageFlags;
714   }
715
716   if (push_constant_size > 0) {
717      layout->root.push_constant_cbv_param_idx = layout->root.param_count;
718      D3D12_ROOT_PARAMETER1 *root_param = &root_params[layout->root.param_count++];
719
720      root_param->ParameterType = D3D12_ROOT_PARAMETER_TYPE_32BIT_CONSTANTS;
721      root_param->Constants.ShaderRegister = 0;
722      root_param->Constants.Num32BitValues = ALIGN(push_constant_size, 4) / 4;
723      root_param->Constants.RegisterSpace = DZN_REGISTER_SPACE_PUSH_CONSTANT;
724      root_param->ShaderVisibility = translate_desc_visibility(push_constant_flags);
725      root_dwords += root_param->Constants.Num32BitValues;
726   }
727
728   assert(layout->root.param_count <= ARRAY_SIZE(root_params));
729   assert(root_dwords <= MAX_ROOT_DWORDS);
730
731   D3D12_VERSIONED_ROOT_SIGNATURE_DESC root_sig_desc = {
732      .Version = D3D_ROOT_SIGNATURE_VERSION_1_1,
733      .Desc_1_1 = {
734         .NumParameters = layout->root.param_count,
735         .pParameters = layout->root.param_count ? root_params : NULL,
736         .NumStaticSamplers =static_sampler_count,
737         .pStaticSamplers = static_sampler_descs,
738         /* TODO Only enable this flag when needed (optimization) */
739         .Flags = D3D12_ROOT_SIGNATURE_FLAG_ALLOW_INPUT_ASSEMBLER_INPUT_LAYOUT,
740      },
741   };
742
743   layout->root.sig = dzn_device_create_root_sig(device, &root_sig_desc);
744   vk_free2(&device->vk.alloc, pAllocator, ranges);
745   vk_free2(&device->vk.alloc, pAllocator, static_sampler_descs);
746
747   if (!layout->root.sig) {
748      vk_pipeline_layout_unref(&device->vk, &layout->vk);
749      return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
750   }
751
752   dzn_pipeline_layout_hash_stages(layout, pCreateInfo);
753   *out = dzn_pipeline_layout_to_handle(layout);
754   return VK_SUCCESS;
755}
756
757VKAPI_ATTR VkResult VKAPI_CALL
758dzn_CreatePipelineLayout(VkDevice device,
759                         const VkPipelineLayoutCreateInfo *pCreateInfo,
760                         const VkAllocationCallbacks *pAllocator,
761                         VkPipelineLayout *pPipelineLayout)
762{
763   return dzn_pipeline_layout_create(dzn_device_from_handle(device),
764                                     pCreateInfo, pAllocator, pPipelineLayout);
765}
766
767static D3D12_DESCRIPTOR_HEAP_TYPE
768desc_type_to_heap_type(VkDescriptorType in)
769{
770   switch (in) {
771   case VK_DESCRIPTOR_TYPE_SAMPLER:
772     return D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER;
773   case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
774   case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
775   case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
776   case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
777   case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
778   case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
779   case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
780   case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
781     return D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV;
782   case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
783   case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
784   default:
785      unreachable("Unsupported desc type");
786   }
787}
788
789static void
790dzn_descriptor_heap_finish(struct dzn_descriptor_heap *heap)
791{
792   if (heap->heap)
793      ID3D12DescriptorHeap_Release(heap->heap);
794
795   if (heap->dev)
796      ID3D12Device_Release(heap->dev);
797}
798
799static VkResult
800dzn_descriptor_heap_init(struct dzn_descriptor_heap *heap,
801                         struct dzn_device *device,
802                         D3D12_DESCRIPTOR_HEAP_TYPE type,
803                         uint32_t desc_count,
804                         bool shader_visible)
805{
806   heap->desc_count = desc_count;
807   heap->type = type;
808   heap->dev = device->dev;
809   ID3D12Device1_AddRef(heap->dev);
810   heap->desc_sz = ID3D12Device1_GetDescriptorHandleIncrementSize(device->dev, type);
811
812   D3D12_DESCRIPTOR_HEAP_DESC desc = {
813      .Type = type,
814      .NumDescriptors = desc_count,
815      .Flags = shader_visible ?
816               D3D12_DESCRIPTOR_HEAP_FLAG_SHADER_VISIBLE :
817               D3D12_DESCRIPTOR_HEAP_FLAG_NONE,
818   };
819
820   if (FAILED(ID3D12Device1_CreateDescriptorHeap(device->dev, &desc,
821                                                 &IID_ID3D12DescriptorHeap,
822                                                 (void **)&heap->heap))) {
823      return vk_error(device,
824                      shader_visible ?
825                      VK_ERROR_OUT_OF_DEVICE_MEMORY : VK_ERROR_OUT_OF_HOST_MEMORY);
826   }
827
828   D3D12_CPU_DESCRIPTOR_HANDLE cpu_handle = dzn_ID3D12DescriptorHeap_GetCPUDescriptorHandleForHeapStart(heap->heap);
829   heap->cpu_base = cpu_handle.ptr;
830   if (shader_visible) {
831      D3D12_GPU_DESCRIPTOR_HANDLE gpu_handle = dzn_ID3D12DescriptorHeap_GetGPUDescriptorHandleForHeapStart(heap->heap);
832      heap->gpu_base = gpu_handle.ptr;
833   }
834
835   return VK_SUCCESS;
836}
837
838D3D12_CPU_DESCRIPTOR_HANDLE
839dzn_descriptor_heap_get_cpu_handle(const struct dzn_descriptor_heap *heap, uint32_t desc_offset)
840{
841   return (D3D12_CPU_DESCRIPTOR_HANDLE) {
842      .ptr = heap->cpu_base + (desc_offset * heap->desc_sz),
843   };
844}
845
846D3D12_GPU_DESCRIPTOR_HANDLE
847dzn_descriptor_heap_get_gpu_handle(const struct dzn_descriptor_heap *heap, uint32_t desc_offset)
848{
849   return (D3D12_GPU_DESCRIPTOR_HANDLE) {
850      .ptr = heap->gpu_base ? heap->gpu_base + (desc_offset * heap->desc_sz) : 0,
851   };
852}
853
854static void
855dzn_descriptor_heap_write_sampler_desc(struct dzn_descriptor_heap *heap,
856                                       uint32_t desc_offset,
857                                       const struct dzn_sampler *sampler)
858{
859   ID3D12Device1_CreateSampler(heap->dev, &sampler->desc,
860                               dzn_descriptor_heap_get_cpu_handle(heap, desc_offset));
861}
862
863void
864dzn_descriptor_heap_write_image_view_desc(struct dzn_descriptor_heap *heap,
865                                          uint32_t desc_offset,
866                                          bool writeable, bool cube_as_2darray,
867                                          const struct dzn_image_view *iview)
868{
869   D3D12_CPU_DESCRIPTOR_HANDLE view_handle =
870      dzn_descriptor_heap_get_cpu_handle(heap, desc_offset);
871   struct dzn_image *image = container_of(iview->vk.image, struct dzn_image, vk);
872
873   if (writeable) {
874      ID3D12Device1_CreateUnorderedAccessView(heap->dev, image->res, NULL, &iview->uav_desc, view_handle);
875   } else if (cube_as_2darray &&
876              (iview->srv_desc.ViewDimension == D3D12_SRV_DIMENSION_TEXTURECUBEARRAY ||
877               iview->srv_desc.ViewDimension == D3D12_SRV_DIMENSION_TEXTURECUBE)) {
878      D3D12_SHADER_RESOURCE_VIEW_DESC srv_desc = iview->srv_desc;
879      srv_desc.ViewDimension = D3D12_SRV_DIMENSION_TEXTURE2DARRAY;
880      srv_desc.Texture2DArray.PlaneSlice = 0;
881      if (iview->srv_desc.ViewDimension == D3D12_SRV_DIMENSION_TEXTURECUBEARRAY) {
882         srv_desc.Texture2DArray.MostDetailedMip =
883            iview->srv_desc.TextureCubeArray.MostDetailedMip;
884         srv_desc.Texture2DArray.MipLevels =
885            iview->srv_desc.TextureCubeArray.MipLevels;
886         srv_desc.Texture2DArray.FirstArraySlice =
887            iview->srv_desc.TextureCubeArray.First2DArrayFace;
888         srv_desc.Texture2DArray.ArraySize =
889            iview->srv_desc.TextureCubeArray.NumCubes * 6;
890      } else {
891         srv_desc.Texture2DArray.MostDetailedMip =
892            iview->srv_desc.TextureCube.MostDetailedMip;
893         srv_desc.Texture2DArray.MipLevels =
894            iview->srv_desc.TextureCube.MipLevels;
895         srv_desc.Texture2DArray.FirstArraySlice = 0;
896         srv_desc.Texture2DArray.ArraySize = 6;
897      }
898
899      ID3D12Device1_CreateShaderResourceView(heap->dev, image->res, &srv_desc, view_handle);
900   } else {
901      ID3D12Device1_CreateShaderResourceView(heap->dev, image->res, &iview->srv_desc, view_handle);
902   }
903}
904
905static void
906dzn_descriptor_heap_write_buffer_view_desc(struct dzn_descriptor_heap *heap,
907                                           uint32_t desc_offset,
908                                           bool writeable,
909                                           const struct dzn_buffer_view *bview)
910{
911   D3D12_CPU_DESCRIPTOR_HANDLE view_handle =
912      dzn_descriptor_heap_get_cpu_handle(heap, desc_offset);
913
914   if (writeable)
915      ID3D12Device1_CreateUnorderedAccessView(heap->dev, bview->buffer->res, NULL, &bview->uav_desc, view_handle);
916   else
917      ID3D12Device1_CreateShaderResourceView(heap->dev, bview->buffer->res, &bview->srv_desc, view_handle);
918}
919
920void
921dzn_descriptor_heap_write_buffer_desc(struct dzn_descriptor_heap *heap,
922                                      uint32_t desc_offset,
923                                      bool writeable,
924                                      const struct dzn_buffer_desc *info)
925{
926   D3D12_CPU_DESCRIPTOR_HANDLE view_handle =
927      dzn_descriptor_heap_get_cpu_handle(heap, desc_offset);
928
929   VkDeviceSize size =
930      info->range == VK_WHOLE_SIZE ?
931      info->buffer->size - info->offset :
932      info->range;
933
934   if (info->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
935       info->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) {
936      assert(!writeable);
937      D3D12_CONSTANT_BUFFER_VIEW_DESC cbv_desc = {
938         .BufferLocation = ID3D12Resource_GetGPUVirtualAddress(info->buffer->res) + info->offset,
939         .SizeInBytes = ALIGN_POT(size, 256),
940      };
941      ID3D12Device1_CreateConstantBufferView(heap->dev, &cbv_desc, view_handle);
942   } else if (writeable) {
943      D3D12_UNORDERED_ACCESS_VIEW_DESC uav_desc = {
944         .Format = DXGI_FORMAT_R32_TYPELESS,
945         .ViewDimension = D3D12_UAV_DIMENSION_BUFFER,
946         .Buffer = {
947            .FirstElement = info->offset / sizeof(uint32_t),
948            .NumElements = (UINT)size / sizeof(uint32_t),
949            .Flags = D3D12_BUFFER_UAV_FLAG_RAW,
950         },
951      };
952      ID3D12Device1_CreateUnorderedAccessView(heap->dev, info->buffer->res, NULL, &uav_desc, view_handle);
953   } else {
954      D3D12_SHADER_RESOURCE_VIEW_DESC srv_desc = {
955         .Format = DXGI_FORMAT_R32_TYPELESS,
956         .ViewDimension = D3D12_SRV_DIMENSION_BUFFER,
957         .Shader4ComponentMapping = D3D12_DEFAULT_SHADER_4_COMPONENT_MAPPING,
958         .Buffer = {
959            .FirstElement = info->offset / sizeof(uint32_t),
960            .NumElements = (UINT)size / sizeof(uint32_t),
961            .Flags = D3D12_BUFFER_SRV_FLAG_RAW,
962         },
963      };
964      ID3D12Device1_CreateShaderResourceView(heap->dev, info->buffer->res, &srv_desc, view_handle);
965   }
966}
967
968void
969dzn_descriptor_heap_copy(struct dzn_descriptor_heap *dst_heap,
970                         uint32_t dst_offset,
971                         const struct dzn_descriptor_heap *src_heap,
972                         uint32_t src_offset,
973                         uint32_t desc_count)
974{
975   D3D12_CPU_DESCRIPTOR_HANDLE dst_handle =
976      dzn_descriptor_heap_get_cpu_handle(dst_heap, dst_offset);
977   D3D12_CPU_DESCRIPTOR_HANDLE src_handle =
978      dzn_descriptor_heap_get_cpu_handle(src_heap, src_offset);
979
980   ID3D12Device1_CopyDescriptorsSimple(dst_heap->dev, desc_count,
981                                       dst_handle,
982                                       src_handle,
983                                       dst_heap->type);
984}
985
986struct dzn_descriptor_set_ptr {
987   uint32_t binding, elem;
988};
989
990static void
991dzn_descriptor_set_ptr_validate(const struct dzn_descriptor_set_layout *layout,
992                                struct dzn_descriptor_set_ptr *ptr)
993{
994
995   if (ptr->binding >= layout->binding_count) {
996      ptr->binding = ~0;
997      ptr->elem = ~0;
998      return;
999   }
1000
1001   uint32_t desc_count =
1002      dzn_descriptor_set_layout_get_desc_count(layout, ptr->binding);
1003   if (ptr->elem >= desc_count) {
1004      ptr->binding = ~0;
1005      ptr->elem = ~0;
1006   }
1007}
1008
1009static void
1010dzn_descriptor_set_ptr_init(const struct dzn_descriptor_set_layout *layout,
1011                            struct dzn_descriptor_set_ptr *ptr,
1012                            uint32_t binding, uint32_t elem)
1013{
1014   ptr->binding = binding;
1015   ptr->elem = elem;
1016   dzn_descriptor_set_ptr_validate(layout, ptr);
1017}
1018
1019static void
1020dzn_descriptor_set_ptr_move(const struct dzn_descriptor_set_layout *layout,
1021                            struct dzn_descriptor_set_ptr *ptr,
1022                            uint32_t count)
1023{
1024   if (ptr->binding == ~0)
1025      return;
1026
1027   while (count) {
1028      uint32_t desc_count =
1029         dzn_descriptor_set_layout_get_desc_count(layout, ptr->binding);
1030
1031      if (count >= desc_count - ptr->elem) {
1032         count -= desc_count - ptr->elem;
1033         ptr->binding++;
1034         ptr->elem = 0;
1035      } else {
1036         ptr->elem += count;
1037         count = 0;
1038      }
1039   }
1040
1041   dzn_descriptor_set_ptr_validate(layout, ptr);
1042}
1043
1044static bool
1045dzn_descriptor_set_ptr_is_valid(const struct dzn_descriptor_set_ptr *ptr)
1046{
1047   return ptr->binding != ~0 && ptr->elem != ~0;
1048}
1049
1050static uint32_t
1051dzn_descriptor_set_remaining_descs_in_binding(const struct dzn_descriptor_set_layout *layout,
1052                                              const struct dzn_descriptor_set_ptr *ptr)
1053{
1054   if (ptr->binding >= layout->binding_count)
1055      return 0;
1056
1057   uint32_t desc_count =
1058      dzn_descriptor_set_layout_get_desc_count(layout, ptr->binding);
1059
1060   return desc_count >= ptr->elem ? desc_count - ptr->elem : 0;
1061}
1062
1063
1064static uint32_t
1065dzn_descriptor_set_ptr_get_heap_offset(const struct dzn_descriptor_set_layout *layout,
1066                                       D3D12_DESCRIPTOR_HEAP_TYPE type,
1067                                       const struct dzn_descriptor_set_ptr *ptr,
1068                                       bool writeable)
1069{
1070   if (ptr->binding == ~0)
1071      return ~0;
1072
1073   uint32_t base =
1074      dzn_descriptor_set_layout_get_heap_offset(layout, ptr->binding, type, writeable);
1075   if (base == ~0)
1076      return ~0;
1077
1078   return base + ptr->elem;
1079}
1080
1081static void
1082dzn_descriptor_set_write_sampler_desc(struct dzn_descriptor_set *set,
1083                                      uint32_t heap_offset,
1084                                      const struct dzn_sampler *sampler)
1085{
1086   if (heap_offset == ~0)
1087      return;
1088
1089   D3D12_DESCRIPTOR_HEAP_TYPE type = D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER;
1090
1091   mtx_lock(&set->pool->defragment_lock);
1092   dzn_descriptor_heap_write_sampler_desc(&set->pool->heaps[type],
1093                                          set->heap_offsets[type] + heap_offset,
1094                                          sampler);
1095    mtx_unlock(&set->pool->defragment_lock);
1096}
1097
1098static void
1099dzn_descriptor_set_ptr_write_sampler_desc(struct dzn_descriptor_set *set,
1100                                          const struct dzn_descriptor_set_ptr *ptr,
1101                                          const struct dzn_sampler *sampler)
1102{
1103   D3D12_DESCRIPTOR_HEAP_TYPE type = D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER;
1104   uint32_t heap_offset =
1105      dzn_descriptor_set_ptr_get_heap_offset(set->layout, type, ptr, false);
1106
1107   dzn_descriptor_set_write_sampler_desc(set, heap_offset, sampler);
1108}
1109
1110static uint32_t
1111dzn_descriptor_set_ptr_get_dynamic_buffer_idx(const struct dzn_descriptor_set_layout *layout,
1112                                              const struct dzn_descriptor_set_ptr *ptr)
1113{
1114   if (ptr->binding == ~0)
1115      return ~0;
1116
1117   uint32_t base = layout->bindings[ptr->binding].dynamic_buffer_idx;
1118
1119   if (base == ~0)
1120      return ~0;
1121
1122   return base + ptr->elem;
1123}
1124
1125static void
1126dzn_descriptor_set_write_dynamic_buffer_desc(struct dzn_descriptor_set *set,
1127                                             uint32_t dynamic_buffer_idx,
1128                                             const struct dzn_buffer_desc *info)
1129{
1130   if (dynamic_buffer_idx == ~0)
1131      return;
1132
1133   assert(dynamic_buffer_idx < set->layout->dynamic_buffers.count);
1134   set->dynamic_buffers[dynamic_buffer_idx] = *info;
1135}
1136
1137static void
1138dzn_descriptor_set_ptr_write_dynamic_buffer_desc(struct dzn_descriptor_set *set,
1139                                                 const struct dzn_descriptor_set_ptr *ptr,
1140                                                 const struct dzn_buffer_desc *info)
1141{
1142   uint32_t dynamic_buffer_idx =
1143      dzn_descriptor_set_ptr_get_dynamic_buffer_idx(set->layout, ptr);
1144
1145   dzn_descriptor_set_write_dynamic_buffer_desc(set, dynamic_buffer_idx, info);
1146}
1147
1148static VkDescriptorType
1149dzn_descriptor_set_ptr_get_vk_type(const struct dzn_descriptor_set_layout *layout,
1150                                   const struct dzn_descriptor_set_ptr *ptr)
1151{
1152   if (ptr->binding >= layout->binding_count)
1153      return (VkDescriptorType)~0;
1154
1155   return layout->bindings[ptr->binding].type;
1156}
1157
1158static void
1159dzn_descriptor_set_write_image_view_desc(struct dzn_descriptor_set *set,
1160                                         uint32_t heap_offset,
1161                                         uint32_t alt_heap_offset,
1162                                         bool cube_as_2darray,
1163                                         const struct dzn_image_view *iview)
1164{
1165   D3D12_DESCRIPTOR_HEAP_TYPE type = D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV;
1166
1167   if (heap_offset == ~0)
1168      return;
1169
1170   mtx_lock(&set->pool->defragment_lock);
1171   dzn_descriptor_heap_write_image_view_desc(&set->pool->heaps[type],
1172                                             set->heap_offsets[type] + heap_offset,
1173                                             false, cube_as_2darray,
1174                                             iview);
1175
1176   if (alt_heap_offset != ~0) {
1177      dzn_descriptor_heap_write_image_view_desc(&set->pool->heaps[type],
1178                                                set->heap_offsets[type] + alt_heap_offset,
1179                                                true, cube_as_2darray,
1180                                                iview);
1181   }
1182   mtx_unlock(&set->pool->defragment_lock);
1183}
1184
1185static void
1186dzn_descriptor_set_ptr_write_image_view_desc(struct dzn_descriptor_set *set,
1187                                             const struct dzn_descriptor_set_ptr *ptr,
1188                                             bool cube_as_2darray,
1189                                             const struct dzn_image_view *iview)
1190{
1191   D3D12_DESCRIPTOR_HEAP_TYPE type = D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV;
1192   uint32_t heap_offset =
1193      dzn_descriptor_set_ptr_get_heap_offset(set->layout, type, ptr, false);
1194   uint32_t alt_heap_offset =
1195      dzn_descriptor_set_ptr_get_heap_offset(set->layout, type, ptr, true);
1196
1197   dzn_descriptor_set_write_image_view_desc(set, heap_offset, alt_heap_offset,
1198                                            cube_as_2darray, iview);
1199}
1200
1201static void
1202dzn_descriptor_set_write_buffer_view_desc(struct dzn_descriptor_set *set,
1203                                          uint32_t heap_offset,
1204                                          uint32_t alt_heap_offset,
1205                                          const struct dzn_buffer_view *bview)
1206{
1207   if (heap_offset == ~0)
1208      return;
1209
1210   D3D12_DESCRIPTOR_HEAP_TYPE type = D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV;
1211
1212   mtx_lock(&set->pool->defragment_lock);
1213   dzn_descriptor_heap_write_buffer_view_desc(&set->pool->heaps[type],
1214                                              set->heap_offsets[type] +
1215                                              heap_offset,
1216                                              false, bview);
1217
1218   if (alt_heap_offset != ~0) {
1219      dzn_descriptor_heap_write_buffer_view_desc(&set->pool->heaps[type],
1220                                                 set->heap_offsets[type] +
1221                                                 alt_heap_offset,
1222                                                 true, bview);
1223   }
1224   mtx_unlock(&set->pool->defragment_lock);
1225}
1226
1227static void
1228dzn_descriptor_set_ptr_write_buffer_view_desc(struct dzn_descriptor_set *set,
1229                                              const struct dzn_descriptor_set_ptr *ptr,
1230                                              const struct dzn_buffer_view *bview)
1231{
1232   D3D12_DESCRIPTOR_HEAP_TYPE type = D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV;
1233   uint32_t heap_offset =
1234      dzn_descriptor_set_ptr_get_heap_offset(set->layout, type, ptr, false);
1235   uint32_t alt_heap_offset =
1236      dzn_descriptor_set_ptr_get_heap_offset(set->layout, type, ptr, true);
1237
1238   dzn_descriptor_set_write_buffer_view_desc(set, heap_offset, alt_heap_offset, bview);
1239}
1240
1241static void
1242dzn_descriptor_set_write_buffer_desc(struct dzn_descriptor_set *set,
1243                                     uint32_t heap_offset,
1244                                     uint32_t alt_heap_offset,
1245                                     const struct dzn_buffer_desc *bdesc)
1246{
1247   D3D12_DESCRIPTOR_HEAP_TYPE type = D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV;
1248   if (heap_offset == ~0)
1249      return;
1250
1251   mtx_lock(&set->pool->defragment_lock);
1252   dzn_descriptor_heap_write_buffer_desc(&set->pool->heaps[type],
1253                                         set->heap_offsets[type] + heap_offset,
1254                                         false, bdesc);
1255
1256   if (alt_heap_offset != ~0) {
1257      dzn_descriptor_heap_write_buffer_desc(&set->pool->heaps[type],
1258                                            set->heap_offsets[type] +
1259                                            alt_heap_offset,
1260                                            true, bdesc);
1261   }
1262   mtx_unlock(&set->pool->defragment_lock);
1263}
1264
1265static void
1266dzn_descriptor_set_ptr_write_buffer_desc(struct dzn_descriptor_set *set,
1267                                         const struct dzn_descriptor_set_ptr *ptr,
1268                                         const struct dzn_buffer_desc *bdesc)
1269{
1270   D3D12_DESCRIPTOR_HEAP_TYPE type = D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV;
1271   uint32_t heap_offset =
1272      dzn_descriptor_set_ptr_get_heap_offset(set->layout, type, ptr, false);
1273   uint32_t alt_heap_offset =
1274      dzn_descriptor_set_ptr_get_heap_offset(set->layout, type, ptr, true);
1275
1276   dzn_descriptor_set_write_buffer_desc(set, heap_offset, alt_heap_offset, bdesc);
1277}
1278
1279static void
1280dzn_descriptor_set_init(struct dzn_descriptor_set *set,
1281                        struct dzn_device *device,
1282                        struct dzn_descriptor_pool *pool,
1283                        struct dzn_descriptor_set_layout *layout)
1284{
1285   vk_object_base_init(&device->vk, &set->base, VK_OBJECT_TYPE_DESCRIPTOR_SET);
1286
1287   set->pool = pool;
1288   set->layout = layout;
1289
1290   mtx_lock(&pool->defragment_lock);
1291   dzn_foreach_pool_type(type) {
1292      set->heap_offsets[type] = pool->free_offset[type];
1293      set->heap_sizes[type] = layout->range_desc_count[type];
1294      set->pool->free_offset[type] += layout->range_desc_count[type];
1295   }
1296   mtx_unlock(&pool->defragment_lock);
1297
1298   /* Pre-fill the immutable samplers */
1299   if (layout->immutable_sampler_count) {
1300      for (uint32_t b = 0; b < layout->binding_count; b++) {
1301         bool has_samplers =
1302            dzn_desc_type_has_sampler(layout->bindings[b].type);
1303
1304         if (!has_samplers || layout->bindings[b].immutable_sampler_idx == ~0)
1305            continue;
1306
1307         struct dzn_descriptor_set_ptr ptr;
1308         const struct dzn_sampler **sampler =
1309            &layout->immutable_samplers[layout->bindings[b].immutable_sampler_idx];
1310         for (dzn_descriptor_set_ptr_init(set->layout, &ptr, b, 0);
1311              dzn_descriptor_set_ptr_is_valid(&ptr);
1312              dzn_descriptor_set_ptr_move(set->layout, &ptr, 1)) {
1313            dzn_descriptor_set_ptr_write_sampler_desc(set, &ptr, *sampler);
1314            sampler++;
1315         }
1316      }
1317   }
1318}
1319
1320static void
1321dzn_descriptor_set_finish(struct dzn_descriptor_set *set)
1322{
1323   vk_object_base_finish(&set->base);
1324   set->pool = NULL;
1325   set->layout = NULL;
1326}
1327
1328static void
1329dzn_descriptor_pool_destroy(struct dzn_descriptor_pool *pool,
1330                            const VkAllocationCallbacks *pAllocator)
1331{
1332   if (!pool)
1333      return;
1334
1335   struct dzn_device *device = container_of(pool->base.device, struct dzn_device, vk);
1336
1337   dzn_foreach_pool_type (type) {
1338      if (pool->desc_count[type])
1339         dzn_descriptor_heap_finish(&pool->heaps[type]);
1340   }
1341
1342   vk_object_base_finish(&pool->base);
1343   vk_free2(&device->vk.alloc, pAllocator, pool);
1344}
1345
1346static VkResult
1347dzn_descriptor_pool_create(struct dzn_device *device,
1348                           const VkDescriptorPoolCreateInfo *pCreateInfo,
1349                           const VkAllocationCallbacks *pAllocator,
1350                           VkDescriptorPool *out)
1351{
1352   VK_MULTIALLOC(ma);
1353   VK_MULTIALLOC_DECL(&ma, struct dzn_descriptor_pool, pool, 1);
1354   VK_MULTIALLOC_DECL(&ma, struct dzn_descriptor_set, sets, pCreateInfo->maxSets);
1355
1356   if (!vk_multialloc_zalloc2(&ma, &device->vk.alloc, pAllocator,
1357                              VK_SYSTEM_ALLOCATION_SCOPE_OBJECT))
1358      return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1359
1360   pool->alloc = pAllocator ? *pAllocator : device->vk.alloc;
1361   pool->sets = sets;
1362   pool->set_count = pCreateInfo->maxSets;
1363   mtx_init(&pool->defragment_lock, mtx_plain);
1364
1365   vk_object_base_init(&device->vk, &pool->base, VK_OBJECT_TYPE_DESCRIPTOR_POOL);
1366
1367   for (uint32_t p = 0; p < pCreateInfo->poolSizeCount; p++) {
1368      VkDescriptorType type = pCreateInfo->pPoolSizes[p].type;
1369      uint32_t num_desc = pCreateInfo->pPoolSizes[p].descriptorCount;
1370
1371      switch (type) {
1372      case VK_DESCRIPTOR_TYPE_SAMPLER:
1373         pool->desc_count[D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER] += num_desc;
1374         break;
1375      case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1376         pool->desc_count[D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV] += num_desc;
1377         pool->desc_count[D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER] += num_desc;
1378         break;
1379      case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1380      case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1381      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1382      case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1383         pool->desc_count[D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV] += num_desc;
1384         break;
1385      case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1386      case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1387      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1388         /* Reserve one UAV and one SRV slot for those. */
1389         pool->desc_count[D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV] += num_desc * 2;
1390         break;
1391      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1392      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1393         break;
1394      default:
1395         unreachable("Unsupported desc type");
1396      }
1397   }
1398
1399   dzn_foreach_pool_type (type) {
1400      if (!pool->desc_count[type])
1401         continue;
1402
1403      VkResult result =
1404         dzn_descriptor_heap_init(&pool->heaps[type], device, type, pool->desc_count[type], false);
1405      if (result != VK_SUCCESS) {
1406         dzn_descriptor_pool_destroy(pool, pAllocator);
1407         return result;
1408      }
1409   }
1410
1411   *out = dzn_descriptor_pool_to_handle(pool);
1412   return VK_SUCCESS;
1413}
1414
1415static VkResult
1416dzn_descriptor_pool_defragment_heap(struct dzn_descriptor_pool *pool,
1417                                    D3D12_DESCRIPTOR_HEAP_TYPE type)
1418{
1419   struct dzn_device *device = container_of(pool->base.device, struct dzn_device, vk);
1420   struct dzn_descriptor_heap new_heap;
1421
1422   VkResult result =
1423      dzn_descriptor_heap_init(&new_heap, device, type,
1424                               pool->heaps[type].desc_count,
1425                               false);
1426   if (result != VK_SUCCESS)
1427      return result;
1428
1429   mtx_lock(&pool->defragment_lock);
1430   uint32_t heap_offset = 0;
1431   for (uint32_t s = 0; s < pool->set_count; s++) {
1432      if (!pool->sets[s].layout)
1433         continue;
1434
1435      dzn_descriptor_heap_copy(&new_heap, heap_offset,
1436                               &pool->heaps[type],
1437                               pool->sets[s].heap_offsets[type],
1438                               pool->sets[s].heap_sizes[type]);
1439      pool->sets[s].heap_offsets[type] = heap_offset;
1440      heap_offset += pool->sets[s].heap_sizes[type];
1441   }
1442   mtx_unlock(&pool->defragment_lock);
1443
1444   dzn_descriptor_heap_finish(&pool->heaps[type]);
1445   pool->heaps[type] = new_heap;
1446
1447   return VK_SUCCESS;
1448}
1449
1450VKAPI_ATTR VkResult VKAPI_CALL
1451dzn_CreateDescriptorPool(VkDevice device,
1452                         const VkDescriptorPoolCreateInfo *pCreateInfo,
1453                         const VkAllocationCallbacks *pAllocator,
1454                         VkDescriptorPool *pDescriptorPool)
1455{
1456   return dzn_descriptor_pool_create(dzn_device_from_handle(device),
1457                                     pCreateInfo, pAllocator, pDescriptorPool);
1458}
1459
1460VKAPI_ATTR void VKAPI_CALL
1461dzn_DestroyDescriptorPool(VkDevice device,
1462                          VkDescriptorPool descriptorPool,
1463                          const VkAllocationCallbacks *pAllocator)
1464{
1465   dzn_descriptor_pool_destroy(dzn_descriptor_pool_from_handle(descriptorPool),
1466                               pAllocator);
1467}
1468
1469VKAPI_ATTR VkResult VKAPI_CALL
1470dzn_ResetDescriptorPool(VkDevice device,
1471                        VkDescriptorPool descriptorPool,
1472                        VkDescriptorPoolResetFlags flags)
1473{
1474   VK_FROM_HANDLE(dzn_descriptor_pool, pool, descriptorPool);
1475
1476   for (uint32_t s = 0; s < pool->set_count; s++)
1477      dzn_descriptor_set_finish(&pool->sets[s]);
1478
1479   dzn_foreach_pool_type(type)
1480      pool->free_offset[type] = 0;
1481
1482   return VK_SUCCESS;
1483}
1484
1485void
1486dzn_descriptor_heap_pool_finish(struct dzn_descriptor_heap_pool *pool)
1487{
1488   list_splicetail(&pool->active_heaps, &pool->free_heaps);
1489   list_for_each_entry_safe(struct dzn_descriptor_heap_pool_entry, entry, &pool->free_heaps, link) {
1490      list_del(&entry->link);
1491      dzn_descriptor_heap_finish(&entry->heap);
1492      vk_free(pool->alloc, entry);
1493   }
1494}
1495
1496void
1497dzn_descriptor_heap_pool_init(struct dzn_descriptor_heap_pool *pool,
1498                              struct dzn_device *device,
1499                              D3D12_DESCRIPTOR_HEAP_TYPE type,
1500                              bool shader_visible,
1501                              const VkAllocationCallbacks *alloc)
1502{
1503   assert(!shader_visible ||
1504          type == D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV ||
1505          type == D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER);
1506
1507   pool->alloc = alloc;
1508   pool->type = type;
1509   pool->shader_visible = shader_visible;
1510   list_inithead(&pool->active_heaps);
1511   list_inithead(&pool->free_heaps);
1512   pool->offset = 0;
1513   pool->desc_sz = ID3D12Device1_GetDescriptorHandleIncrementSize(device->dev, type);
1514}
1515
1516VkResult
1517dzn_descriptor_heap_pool_alloc_slots(struct dzn_descriptor_heap_pool *pool,
1518                                     struct dzn_device *device, uint32_t desc_count,
1519                                     struct dzn_descriptor_heap **heap,
1520                                     uint32_t *first_slot)
1521{
1522   struct dzn_descriptor_heap *last_heap =
1523      list_is_empty(&pool->active_heaps) ?
1524      NULL :
1525      &(list_last_entry(&pool->active_heaps, struct dzn_descriptor_heap_pool_entry, link)->heap);
1526   uint32_t last_heap_desc_count =
1527      last_heap ? last_heap->desc_count : 0;
1528
1529   if (pool->offset + desc_count > last_heap_desc_count) {
1530      uint32_t granularity =
1531         (pool->type == D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV ||
1532          pool->type == D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER) ?
1533         64 * 1024 : 4 * 1024;
1534      uint32_t alloc_step = ALIGN_POT(desc_count * pool->desc_sz, granularity);
1535      uint32_t heap_desc_count = MAX2(alloc_step / pool->desc_sz, 16);
1536
1537      /* Maximum of 2048 samplers per heap when shader_visible is true. */
1538      if (pool->shader_visible &&
1539          pool->type == D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER) {
1540         assert(desc_count <= MAX_DESCS_PER_SAMPLER_HEAP);
1541         heap_desc_count = MIN2(heap_desc_count, MAX_DESCS_PER_SAMPLER_HEAP);
1542      }
1543
1544      struct dzn_descriptor_heap_pool_entry *new_heap = NULL;
1545
1546      list_for_each_entry_safe(struct dzn_descriptor_heap_pool_entry, entry, &pool->free_heaps, link) {
1547         if (entry->heap.desc_count >= heap_desc_count) {
1548            new_heap = entry;
1549            list_del(&entry->link);
1550            break;
1551         }
1552      }
1553
1554      if (!new_heap) {
1555         new_heap =
1556            vk_zalloc(pool->alloc, sizeof(*new_heap), 8,
1557                      VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1558         if (!new_heap)
1559            return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1560
1561         VkResult result =
1562            dzn_descriptor_heap_init(&new_heap->heap, device, pool->type,
1563                                     heap_desc_count, pool->shader_visible);
1564         if (result != VK_SUCCESS) {
1565            vk_free(&device->vk.alloc, new_heap);
1566            return result;
1567         }
1568      }
1569
1570      list_addtail(&new_heap->link, &pool->active_heaps);
1571      pool->offset = 0;
1572      last_heap = &new_heap->heap;
1573   }
1574
1575   *heap = last_heap;
1576   *first_slot = pool->offset;
1577   pool->offset += desc_count;
1578   return VK_SUCCESS;
1579}
1580
1581void
1582dzn_descriptor_heap_pool_reset(struct dzn_descriptor_heap_pool *pool)
1583{
1584   pool->offset = 0;
1585   list_splicetail(&pool->active_heaps, &pool->free_heaps);
1586   list_inithead(&pool->active_heaps);
1587}
1588
1589VKAPI_ATTR VkResult VKAPI_CALL
1590dzn_AllocateDescriptorSets(VkDevice dev,
1591                           const VkDescriptorSetAllocateInfo *pAllocateInfo,
1592                           VkDescriptorSet *pDescriptorSets)
1593{
1594   VK_FROM_HANDLE(dzn_descriptor_pool, pool, pAllocateInfo->descriptorPool);
1595   VK_FROM_HANDLE(dzn_device, device, dev);
1596   VkResult result;
1597   unsigned i;
1598
1599   if (pAllocateInfo->descriptorSetCount > (pool->set_count - pool->used_set_count))
1600      return VK_ERROR_OUT_OF_POOL_MEMORY;
1601
1602   uint32_t set_idx = 0;
1603   for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
1604      VK_FROM_HANDLE(dzn_descriptor_set_layout, layout, pAllocateInfo->pSetLayouts[i]);
1605
1606      dzn_foreach_pool_type(type) {
1607         if (pool->used_desc_count[type] + layout->range_desc_count[type] > pool->desc_count[type]) {
1608            dzn_FreeDescriptorSets(dev, pAllocateInfo->descriptorPool, i, pDescriptorSets);
1609            return vk_error(device, VK_ERROR_OUT_OF_POOL_MEMORY);
1610         }
1611
1612         if (pool->free_offset[type] + layout->range_desc_count[type] > pool->desc_count[type]) {
1613            result = dzn_descriptor_pool_defragment_heap(pool, type);
1614            if (result != VK_SUCCESS) {
1615               dzn_FreeDescriptorSets(dev, pAllocateInfo->descriptorPool, i, pDescriptorSets);
1616               return vk_error(device, VK_ERROR_FRAGMENTED_POOL);
1617            }
1618         }
1619      }
1620
1621      struct dzn_descriptor_set *set = NULL;
1622      for (; set_idx < pool->set_count; set_idx++) {
1623         if (!pool->sets[set_idx].layout) {
1624            set = &pool->sets[set_idx];
1625            break;
1626         }
1627      }
1628
1629      dzn_descriptor_set_init(set, device, pool, layout);
1630      pDescriptorSets[i] = dzn_descriptor_set_to_handle(set);
1631   }
1632
1633   return VK_SUCCESS;
1634}
1635
1636VKAPI_ATTR VkResult VKAPI_CALL
1637dzn_FreeDescriptorSets(VkDevice dev,
1638                       VkDescriptorPool descriptorPool,
1639                       uint32_t count,
1640                       const VkDescriptorSet *pDescriptorSets)
1641{
1642   VK_FROM_HANDLE(dzn_descriptor_pool, pool, descriptorPool);
1643
1644   for (uint32_t s = 0; s < count; s++) {
1645      VK_FROM_HANDLE(dzn_descriptor_set, set, pDescriptorSets[s]);
1646
1647      if (!set)
1648         continue;
1649
1650      assert(set->pool == pool);
1651
1652      dzn_descriptor_set_finish(set);
1653   }
1654
1655   mtx_lock(&pool->defragment_lock);
1656   dzn_foreach_pool_type(type)
1657      pool->free_offset[type] = 0;
1658
1659   for (uint32_t s = 0; s < pool->set_count; s++) {
1660      const struct dzn_descriptor_set *set = &pool->sets[s];
1661
1662      if (set->layout) {
1663         dzn_foreach_pool_type (type) {
1664            pool->free_offset[type] =
1665               MAX2(pool->free_offset[type],
1666                    set->heap_offsets[type] +
1667                    set->layout->range_desc_count[type]);
1668         }
1669      }
1670   }
1671   mtx_unlock(&pool->defragment_lock);
1672
1673   return VK_SUCCESS;
1674}
1675
1676static void
1677dzn_descriptor_set_write(const VkWriteDescriptorSet *pDescriptorWrite)
1678{
1679   VK_FROM_HANDLE(dzn_descriptor_set, set, pDescriptorWrite->dstSet);
1680
1681   struct dzn_descriptor_set_ptr ptr;
1682
1683   dzn_descriptor_set_ptr_init(set->layout, &ptr,
1684                               pDescriptorWrite->dstBinding,
1685                               pDescriptorWrite->dstArrayElement);
1686   uint32_t desc_count = pDescriptorWrite->descriptorCount;
1687
1688   uint32_t d = 0;
1689   bool cube_as_2darray =
1690      pDescriptorWrite->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
1691
1692   switch (pDescriptorWrite->descriptorType) {
1693   case VK_DESCRIPTOR_TYPE_SAMPLER:
1694      for (; dzn_descriptor_set_ptr_is_valid(&ptr) && d < desc_count;
1695           dzn_descriptor_set_ptr_move(set->layout, &ptr, 1)) {
1696         assert(dzn_descriptor_set_ptr_get_vk_type(set->layout, &ptr) == pDescriptorWrite->descriptorType);
1697         const VkDescriptorImageInfo *pImageInfo = pDescriptorWrite->pImageInfo + d;
1698         VK_FROM_HANDLE(dzn_sampler, sampler, pImageInfo->sampler);
1699
1700         if (sampler)
1701            dzn_descriptor_set_ptr_write_sampler_desc(set, &ptr, sampler);
1702
1703         d++;
1704      }
1705      break;
1706   case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1707      for (; dzn_descriptor_set_ptr_is_valid(&ptr) && d < desc_count;
1708           dzn_descriptor_set_ptr_move(set->layout, &ptr, 1)) {
1709         assert(dzn_descriptor_set_ptr_get_vk_type(set->layout, &ptr) == pDescriptorWrite->descriptorType);
1710         const VkDescriptorImageInfo *pImageInfo = pDescriptorWrite->pImageInfo + d;
1711         VK_FROM_HANDLE(dzn_sampler, sampler, pImageInfo->sampler);
1712         VK_FROM_HANDLE(dzn_image_view, iview, pImageInfo->imageView);
1713
1714         if (sampler)
1715            dzn_descriptor_set_ptr_write_sampler_desc(set, &ptr, sampler);
1716
1717         if (iview)
1718            dzn_descriptor_set_ptr_write_image_view_desc(set, &ptr, cube_as_2darray, iview);
1719
1720         d++;
1721      }
1722      break;
1723
1724   case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1725   case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1726   case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1727      for (; dzn_descriptor_set_ptr_is_valid(&ptr) && d < desc_count;
1728           dzn_descriptor_set_ptr_move(set->layout, &ptr, 1)) {
1729         assert(dzn_descriptor_set_ptr_get_vk_type(set->layout, &ptr) == pDescriptorWrite->descriptorType);
1730         const VkDescriptorImageInfo *pImageInfo = pDescriptorWrite->pImageInfo + d;
1731         VK_FROM_HANDLE(dzn_image_view, iview, pImageInfo->imageView);
1732
1733         if (iview)
1734            dzn_descriptor_set_ptr_write_image_view_desc(set, &ptr, cube_as_2darray, iview);
1735
1736         d++;
1737      }
1738      break;
1739   case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1740   case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1741      for (; dzn_descriptor_set_ptr_is_valid(&ptr) && d < desc_count;
1742           dzn_descriptor_set_ptr_move(set->layout, &ptr, 1)) {
1743         assert(dzn_descriptor_set_ptr_get_vk_type(set->layout, &ptr) == pDescriptorWrite->descriptorType);
1744         const VkDescriptorBufferInfo *binfo = &pDescriptorWrite->pBufferInfo[d];
1745         struct dzn_buffer_desc desc = {
1746            pDescriptorWrite->descriptorType,
1747            dzn_buffer_from_handle(binfo->buffer),
1748            binfo->range, binfo->offset
1749         };
1750
1751         if (desc.buffer)
1752            dzn_descriptor_set_ptr_write_buffer_desc(set, &ptr, &desc);
1753
1754         d++;
1755      }
1756      break;
1757
1758   case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1759   case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1760      for (; dzn_descriptor_set_ptr_is_valid(&ptr) && d < desc_count;
1761           dzn_descriptor_set_ptr_move(set->layout, &ptr, 1)) {
1762         assert(dzn_descriptor_set_ptr_get_vk_type(set->layout, &ptr) == pDescriptorWrite->descriptorType);
1763         const VkDescriptorBufferInfo *binfo = &pDescriptorWrite->pBufferInfo[d];
1764         struct dzn_buffer_desc desc = {
1765            pDescriptorWrite->descriptorType,
1766            dzn_buffer_from_handle(binfo->buffer),
1767            binfo->range, binfo->offset
1768         };
1769
1770         if (desc.buffer)
1771            dzn_descriptor_set_ptr_write_dynamic_buffer_desc(set, &ptr, &desc);
1772
1773         d++;
1774      }
1775      break;
1776
1777   case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1778   case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1779      for (; dzn_descriptor_set_ptr_is_valid(&ptr) && d < desc_count;
1780           dzn_descriptor_set_ptr_move(set->layout, &ptr, 1)) {
1781         assert(dzn_descriptor_set_ptr_get_vk_type(set->layout, &ptr) == pDescriptorWrite->descriptorType);
1782         VK_FROM_HANDLE(dzn_buffer_view, bview, pDescriptorWrite->pTexelBufferView[d]);
1783
1784         if (bview)
1785            dzn_descriptor_set_ptr_write_buffer_view_desc(set, &ptr, bview);
1786
1787         d++;
1788      }
1789      break;
1790
1791   default:
1792      unreachable("invalid descriptor type");
1793      break;
1794   }
1795
1796   assert(d == pDescriptorWrite->descriptorCount);
1797}
1798
1799static void
1800dzn_descriptor_set_copy(const VkCopyDescriptorSet *pDescriptorCopy)
1801{
1802   VK_FROM_HANDLE(dzn_descriptor_set, src_set, pDescriptorCopy->srcSet);
1803   VK_FROM_HANDLE(dzn_descriptor_set, dst_set, pDescriptorCopy->dstSet);
1804   struct dzn_descriptor_set_ptr src_ptr, dst_ptr;
1805
1806   dzn_descriptor_set_ptr_init(src_set->layout, &src_ptr,
1807                               pDescriptorCopy->srcBinding,
1808                               pDescriptorCopy->srcArrayElement);
1809   dzn_descriptor_set_ptr_init(dst_set->layout, &dst_ptr,
1810                               pDescriptorCopy->dstBinding,
1811                               pDescriptorCopy->dstArrayElement);
1812
1813   uint32_t copied_count = 0;
1814
1815   while (dzn_descriptor_set_ptr_is_valid(&src_ptr) &&
1816          dzn_descriptor_set_ptr_is_valid(&dst_ptr) &&
1817          copied_count < pDescriptorCopy->descriptorCount) {
1818      VkDescriptorType src_type =
1819         dzn_descriptor_set_ptr_get_vk_type(src_set->layout, &src_ptr);
1820      ASSERTED VkDescriptorType dst_type =
1821         dzn_descriptor_set_ptr_get_vk_type(dst_set->layout, &dst_ptr);
1822
1823      assert(src_type == dst_type);
1824      uint32_t count =
1825         MIN2(dzn_descriptor_set_remaining_descs_in_binding(src_set->layout, &src_ptr),
1826              dzn_descriptor_set_remaining_descs_in_binding(dst_set->layout, &dst_ptr));
1827
1828      if (src_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
1829          src_type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
1830         uint32_t src_idx =
1831            dzn_descriptor_set_ptr_get_dynamic_buffer_idx(src_set->layout, &src_ptr);
1832         uint32_t dst_idx =
1833            dzn_descriptor_set_ptr_get_dynamic_buffer_idx(dst_set->layout, &dst_ptr);
1834
1835         memcpy(&dst_set->dynamic_buffers[dst_idx],
1836                &src_set->dynamic_buffers[src_idx],
1837                sizeof(*dst_set->dynamic_buffers) * count);
1838      } else {
1839         dzn_foreach_pool_type(type) {
1840            uint32_t src_heap_offset =
1841               dzn_descriptor_set_ptr_get_heap_offset(src_set->layout, type, &src_ptr, false);
1842            uint32_t dst_heap_offset =
1843               dzn_descriptor_set_ptr_get_heap_offset(dst_set->layout, type, &dst_ptr, false);
1844
1845            if (src_heap_offset == ~0) {
1846               assert(dst_heap_offset == ~0);
1847               continue;
1848            }
1849
1850            mtx_lock(&src_set->pool->defragment_lock);
1851            mtx_lock(&dst_set->pool->defragment_lock);
1852            dzn_descriptor_heap_copy(&dst_set->pool->heaps[type],
1853                                     dst_set->heap_offsets[type] + dst_heap_offset,
1854                                     &src_set->pool->heaps[type],
1855                                     src_set->heap_offsets[type] + src_heap_offset,
1856                                     count);
1857
1858            if (dzn_descriptor_type_depends_on_shader_usage(src_type)) {
1859               src_heap_offset =
1860                  dzn_descriptor_set_ptr_get_heap_offset(src_set->layout, type, &src_ptr, true);
1861               dst_heap_offset =
1862                  dzn_descriptor_set_ptr_get_heap_offset(dst_set->layout, type, &dst_ptr, true);
1863               assert(src_heap_offset != ~0);
1864               assert(dst_heap_offset != ~0);
1865               dzn_descriptor_heap_copy(&dst_set->pool->heaps[type],
1866                                        dst_set->heap_offsets[type] + dst_heap_offset,
1867                                        &src_set->pool->heaps[type],
1868                                        src_set->heap_offsets[type] + src_heap_offset,
1869                                        count);
1870            }
1871            mtx_unlock(&dst_set->pool->defragment_lock);
1872            mtx_unlock(&src_set->pool->defragment_lock);
1873         }
1874      }
1875
1876      dzn_descriptor_set_ptr_move(src_set->layout, &src_ptr, count);
1877      dzn_descriptor_set_ptr_move(dst_set->layout, &dst_ptr, count);
1878      copied_count += count;
1879   }
1880
1881   assert(copied_count == pDescriptorCopy->descriptorCount);
1882}
1883
1884VKAPI_ATTR void VKAPI_CALL
1885dzn_UpdateDescriptorSets(VkDevice _device,
1886                         uint32_t descriptorWriteCount,
1887                         const VkWriteDescriptorSet *pDescriptorWrites,
1888                         uint32_t descriptorCopyCount,
1889                         const VkCopyDescriptorSet *pDescriptorCopies)
1890{
1891   for (unsigned i = 0; i < descriptorWriteCount; i++)
1892      dzn_descriptor_set_write(&pDescriptorWrites[i]);
1893
1894   for (unsigned i = 0; i < descriptorCopyCount; i++)
1895      dzn_descriptor_set_copy(&pDescriptorCopies[i]);
1896}
1897
1898static void
1899dzn_descriptor_update_template_destroy(struct dzn_descriptor_update_template *templ,
1900                                       const VkAllocationCallbacks *alloc)
1901{
1902   if (!templ)
1903      return;
1904
1905   struct dzn_device *device =
1906      container_of(templ->base.device, struct dzn_device, vk);
1907
1908   vk_object_base_finish(&templ->base);
1909   vk_free2(&device->vk.alloc, alloc, templ);
1910}
1911
1912static VkResult
1913dzn_descriptor_update_template_create(struct dzn_device *device,
1914                                      const VkDescriptorUpdateTemplateCreateInfo *info,
1915                                      const VkAllocationCallbacks *alloc,
1916                                      VkDescriptorUpdateTemplate *out)
1917{
1918   assert(info->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET);
1919
1920   VK_FROM_HANDLE(dzn_descriptor_set_layout, set_layout, info->descriptorSetLayout);
1921
1922   uint32_t entry_count = 0;
1923   for (uint32_t e = 0; e < info->descriptorUpdateEntryCount; e++) {
1924      struct dzn_descriptor_set_ptr ptr;
1925      dzn_descriptor_set_ptr_init(set_layout, &ptr,
1926                                  info->pDescriptorUpdateEntries[e].dstBinding,
1927                                  info->pDescriptorUpdateEntries[e].dstArrayElement);
1928      uint32_t desc_count = info->pDescriptorUpdateEntries[e].descriptorCount;
1929      ASSERTED VkDescriptorType type = info->pDescriptorUpdateEntries[e].descriptorType;
1930      uint32_t d = 0;
1931
1932      while (dzn_descriptor_set_ptr_is_valid(&ptr) && d < desc_count) {
1933         uint32_t ndescs = dzn_descriptor_set_remaining_descs_in_binding(set_layout, &ptr);
1934
1935         assert(dzn_descriptor_set_ptr_get_vk_type(set_layout, &ptr) == type);
1936         d += ndescs;
1937         dzn_descriptor_set_ptr_move(set_layout, &ptr, ndescs);
1938         entry_count++;
1939      }
1940
1941      assert(d >= desc_count);
1942   }
1943
1944   VK_MULTIALLOC(ma);
1945   VK_MULTIALLOC_DECL(&ma, struct dzn_descriptor_update_template, templ, 1);
1946   VK_MULTIALLOC_DECL(&ma, struct dzn_descriptor_update_template_entry, entries, entry_count);
1947
1948   if (!vk_multialloc_zalloc2(&ma, &device->vk.alloc, alloc,
1949                              VK_SYSTEM_ALLOCATION_SCOPE_OBJECT))
1950      return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1951
1952   vk_object_base_init(&device->vk, &templ->base, VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);
1953   templ->entry_count = entry_count;
1954   templ->entries = entries;
1955
1956   struct dzn_descriptor_update_template_entry *entry = entries;
1957
1958   for (uint32_t e = 0; e < info->descriptorUpdateEntryCount; e++) {
1959      struct dzn_descriptor_set_ptr ptr;
1960      dzn_descriptor_set_ptr_init(set_layout, &ptr,
1961                                  info->pDescriptorUpdateEntries[e].dstBinding,
1962                                  info->pDescriptorUpdateEntries[e].dstArrayElement);
1963      uint32_t desc_count = info->pDescriptorUpdateEntries[e].descriptorCount;
1964      VkDescriptorType type = info->pDescriptorUpdateEntries[e].descriptorType;
1965      size_t user_data_offset = info->pDescriptorUpdateEntries[e].offset;
1966      size_t user_data_stride = info->pDescriptorUpdateEntries[e].stride;
1967      uint32_t d = 0;
1968
1969      while (dzn_descriptor_set_ptr_is_valid(&ptr) && d < desc_count) {
1970         uint32_t ndescs = dzn_descriptor_set_remaining_descs_in_binding(set_layout, &ptr);
1971
1972         entry->type = type;
1973         entry->desc_count = MIN2(desc_count - d, ndescs);
1974         entry->user_data.stride = user_data_stride;
1975         entry->user_data.offset = user_data_offset;
1976         memset(&entry->heap_offsets, ~0, sizeof(entry->heap_offsets));
1977
1978         assert(dzn_descriptor_set_ptr_get_vk_type(set_layout, &ptr) == type);
1979         if (dzn_desc_type_has_sampler(type)) {
1980            entry->heap_offsets.sampler =
1981               dzn_descriptor_set_ptr_get_heap_offset(set_layout,
1982                                                      D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER,
1983                                                      &ptr, false);
1984         }
1985
1986         if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
1987             type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
1988            entry->dynamic_buffer_idx =
1989               dzn_descriptor_set_ptr_get_dynamic_buffer_idx(set_layout, &ptr);
1990         } else if (type != VK_DESCRIPTOR_TYPE_SAMPLER) {
1991            entry->heap_offsets.cbv_srv_uav =
1992               dzn_descriptor_set_ptr_get_heap_offset(set_layout,
1993                                                      D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV,
1994                                                      &ptr, false);
1995            if (dzn_descriptor_type_depends_on_shader_usage(type)) {
1996               entry->heap_offsets.extra_uav =
1997                  dzn_descriptor_set_ptr_get_heap_offset(set_layout,
1998                                                         D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV,
1999                                                         &ptr, true);
2000            }
2001         }
2002
2003         d += ndescs;
2004         dzn_descriptor_set_ptr_move(set_layout, &ptr, ndescs);
2005         user_data_offset += user_data_stride * ndescs;
2006         ++entry;
2007      }
2008   }
2009
2010   *out = dzn_descriptor_update_template_to_handle(templ);
2011   return VK_SUCCESS;
2012}
2013
2014VKAPI_ATTR VkResult VKAPI_CALL
2015dzn_CreateDescriptorUpdateTemplate(VkDevice device,
2016                                   const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
2017                                   const VkAllocationCallbacks *pAllocator,
2018                                   VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
2019{
2020   return dzn_descriptor_update_template_create(dzn_device_from_handle(device),
2021                                                pCreateInfo, pAllocator,
2022                                                pDescriptorUpdateTemplate);
2023}
2024
2025VKAPI_ATTR void VKAPI_CALL
2026dzn_DestroyDescriptorUpdateTemplate(VkDevice device,
2027                                    VkDescriptorUpdateTemplate descriptorUpdateTemplate,
2028                                    const VkAllocationCallbacks *pAllocator)
2029{
2030   dzn_descriptor_update_template_destroy(dzn_descriptor_update_template_from_handle(descriptorUpdateTemplate),
2031                                          pAllocator);
2032}
2033
2034static const void *
2035dzn_descriptor_update_template_get_desc_data(const struct dzn_descriptor_update_template *templ,
2036                                             uint32_t e, uint32_t d,
2037                                             const void *user_data)
2038{
2039   return (const void *)((const uint8_t *)user_data +
2040                         templ->entries[e].user_data.offset +
2041                         (d * templ->entries[e].user_data.stride));
2042}
2043
2044VKAPI_ATTR void VKAPI_CALL
2045dzn_UpdateDescriptorSetWithTemplate(VkDevice device,
2046                                    VkDescriptorSet descriptorSet,
2047                                    VkDescriptorUpdateTemplate descriptorUpdateTemplate,
2048                                    const void *pData)
2049{
2050   VK_FROM_HANDLE(dzn_descriptor_set, set, descriptorSet);
2051   VK_FROM_HANDLE(dzn_descriptor_update_template, templ, descriptorUpdateTemplate);
2052
2053   for (uint32_t e = 0; e < templ->entry_count; e++) {
2054      const struct dzn_descriptor_update_template_entry *entry = &templ->entries[e];
2055      bool cube_as_2darray =
2056         entry->type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
2057
2058      switch (entry->type) {
2059      case VK_DESCRIPTOR_TYPE_SAMPLER:
2060         for (uint32_t d = 0; d < entry->desc_count; d++) {
2061            const VkDescriptorImageInfo *info = (const VkDescriptorImageInfo *)
2062               dzn_descriptor_update_template_get_desc_data(templ, e, d, pData);
2063            VK_FROM_HANDLE(dzn_sampler, sampler, info->sampler);
2064
2065            if (sampler)
2066               dzn_descriptor_set_write_sampler_desc(set, entry->heap_offsets.sampler + d, sampler);
2067         }
2068         break;
2069
2070      case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2071         for (uint32_t d = 0; d < entry->desc_count; d++) {
2072            const VkDescriptorImageInfo *info = (const VkDescriptorImageInfo *)
2073               dzn_descriptor_update_template_get_desc_data(templ, e, d, pData);
2074            VK_FROM_HANDLE(dzn_sampler, sampler, info->sampler);
2075            VK_FROM_HANDLE(dzn_image_view, iview, info->imageView);
2076
2077            if (sampler)
2078               dzn_descriptor_set_write_sampler_desc(set, entry->heap_offsets.sampler + d, sampler);
2079
2080            if (iview)
2081               dzn_descriptor_set_write_image_view_desc(set, entry->heap_offsets.cbv_srv_uav + d, ~0, cube_as_2darray, iview);
2082         }
2083         break;
2084
2085      case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2086      case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2087      case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2088         for (uint32_t d = 0; d < entry->desc_count; d++) {
2089            const VkDescriptorImageInfo *info = (const VkDescriptorImageInfo *)
2090               dzn_descriptor_update_template_get_desc_data(templ, e, d, pData);
2091            uint32_t srv_heap_offset = entry->heap_offsets.cbv_srv_uav + d;
2092            uint32_t uav_heap_offset =
2093               entry->type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ?
2094               entry->heap_offsets.extra_uav + d : ~0;
2095            VK_FROM_HANDLE(dzn_image_view, iview, info->imageView);
2096
2097            if (iview)
2098               dzn_descriptor_set_write_image_view_desc(set, srv_heap_offset, uav_heap_offset, cube_as_2darray, iview);
2099         }
2100         break;
2101      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2102      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2103         for (uint32_t d = 0; d < entry->desc_count; d++) {
2104            const VkDescriptorBufferInfo *info = (const VkDescriptorBufferInfo *)
2105               dzn_descriptor_update_template_get_desc_data(templ, e, d, pData);
2106            uint32_t cbv_srv_heap_offset = entry->heap_offsets.cbv_srv_uav + d;
2107            uint32_t uav_heap_offset =
2108               entry->type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ?
2109               entry->heap_offsets.extra_uav + d : ~0;
2110
2111            struct dzn_buffer_desc desc = {
2112               entry->type,
2113               dzn_buffer_from_handle(info->buffer),
2114               info->range, info->offset
2115            };
2116
2117            if (desc.buffer)
2118               dzn_descriptor_set_write_buffer_desc(set, cbv_srv_heap_offset, uav_heap_offset, &desc);
2119         }
2120         break;
2121
2122      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
2123      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
2124         for (uint32_t d = 0; d < entry->desc_count; d++) {
2125            const VkDescriptorBufferInfo *info = (const VkDescriptorBufferInfo *)
2126               dzn_descriptor_update_template_get_desc_data(templ, e, d, pData);
2127            uint32_t dyn_buf_idx = entry->dynamic_buffer_idx + d;
2128
2129            struct dzn_buffer_desc desc = {
2130               entry->type,
2131               dzn_buffer_from_handle(info->buffer),
2132               info->range, info->offset
2133            };
2134
2135            if (desc.buffer)
2136               dzn_descriptor_set_write_dynamic_buffer_desc(set, dyn_buf_idx, &desc);
2137         }
2138         break;
2139
2140      case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2141      case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2142         for (uint32_t d = 0; d < entry->desc_count; d++) {
2143            VkBufferView *info = (VkBufferView *)
2144               dzn_descriptor_update_template_get_desc_data(templ, e, d, pData);
2145            VK_FROM_HANDLE(dzn_buffer_view, bview, *info);
2146            uint32_t srv_heap_offset = entry->heap_offsets.cbv_srv_uav + d;
2147            uint32_t uav_heap_offset =
2148               entry->type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER ?
2149               entry->heap_offsets.extra_uav + d : ~0;
2150
2151            if (bview)
2152               dzn_descriptor_set_write_buffer_view_desc(set, srv_heap_offset, uav_heap_offset, bview);
2153         }
2154         break;
2155
2156      default:
2157         unreachable("invalid descriptor type");
2158      }
2159   }
2160}
2161