1/*------------------------------------------------------------------------- 2 * Vulkan Conformance Tests 3 * ------------------------ 4 * 5 * Copyright (c) 2021 The Khronos Group Inc. 6 * Copyright (c) 2021 Valve Corporation. 7 * 8 * Licensed under the Apache License, Version 2.0 (the "License"); 9 * you may not use this file except in compliance with the License. 10 * You may obtain a copy of the License at 11 * 12 * http://www.apache.org/licenses/LICENSE-2.0 13 * 14 * Unless required by applicable law or agreed to in writing, software 15 * distributed under the License is distributed on an "AS IS" BASIS, 16 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 17 * See the License for the specific language governing permissions and 18 * limitations under the License. 19 * 20 *//*! 21 * \file 22 * \brief Tests for VK_VALVE_mutable_descriptor_type. 23 *//*--------------------------------------------------------------------*/ 24#include "vktBindingValveMutableTests.hpp" 25#include "vktTestCase.hpp" 26 27#include "vkDefs.hpp" 28#include "vkRefUtil.hpp" 29#include "vkQueryUtil.hpp" 30#include "vkImageWithMemory.hpp" 31#include "vkBufferWithMemory.hpp" 32#include "vkTypeUtil.hpp" 33#include "vkObjUtil.hpp" 34#include "vkBarrierUtil.hpp" 35#include "vkCmdUtil.hpp" 36#include "vkBuilderUtil.hpp" 37#include "vkRayTracingUtil.hpp" 38 39#include "deUniquePtr.hpp" 40#include "deSTLUtil.hpp" 41#include "deStringUtil.hpp" 42 43#include <vector> 44#include <algorithm> 45#include <iterator> 46#include <set> 47#include <sstream> 48#include <limits> 49 50namespace vkt 51{ 52namespace BindingModel 53{ 54 55namespace 56{ 57 58using namespace vk; 59 60deUint32 getDescriptorNumericValue (deUint32 iteration, deUint32 bindingIdx, deUint32 descriptorIdx = 0u) 61{ 62 // When assigning numeric values for the descriptor contents, each descriptor will get 0x5aIIBBDD. II is an octed containing the 63 // iteration index. BB is an octet containing the binding index and DD is the descriptor index inside that binding. 64 constexpr deUint32 kNumericValueBase = 0x5a000000u; 65 66 return (kNumericValueBase | ((iteration & 0xFFu) << 16) | ((bindingIdx & 0xFFu) << 8) | (descriptorIdx & 0xFFu)); 67} 68 69deUint16 getAccelerationStructureOffsetX (deUint32 descriptorNumericValue) 70{ 71 // Keep the lowest 16 bits (binding and descriptor idx) as the offset. 72 return static_cast<deUint16>(descriptorNumericValue); 73} 74 75// Value that will be stored in the output buffer to signal success reading values. 76deUint32 getExpectedOutputBufferValue () 77{ 78 return 2u; 79} 80 81// This value will be stored in an image to be sampled when checking descriptors containing samplers alone. 82deUint32 getExternalSampledImageValue () 83{ 84 return 0x41322314u; 85} 86 87// Value that will be ORed with the descriptor value before writing. 88deUint32 getStoredValueMask () 89{ 90 return 0xFF000000u; 91} 92 93VkFormat getDescriptorImageFormat () 94{ 95 return VK_FORMAT_R32_UINT; 96} 97 98VkExtent3D getDefaultExtent () 99{ 100 return makeExtent3D(1u, 1u, 1u); 101} 102 103// Convert value to hexadecimal. 104std::string toHex (deUint32 val) 105{ 106 std::ostringstream s; 107 s << "0x" << std::hex << val << "u"; 108 return s.str(); 109} 110 111// Returns the list of descriptor types that cannot be part of a mutable descriptor. 112std::vector<VkDescriptorType> getForbiddenMutableTypes () 113{ 114 return std::vector<VkDescriptorType> 115 { 116 VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, 117 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, 118 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 119 VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, 120 }; 121} 122 123// Returns the list of descriptor types that are mandatory for the extension. 124std::vector<VkDescriptorType> getMandatoryMutableTypes () 125{ 126 return std::vector<VkDescriptorType> 127 { 128 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 129 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 130 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 131 VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 132 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 133 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER 134 }; 135} 136 137// This helps quickly transform a vector of descriptor types into a bitmask, which makes it easier to check some conditions. 138enum DescriptorTypeFlagBits 139{ 140 DTFB_SAMPLER = (1 << 0), 141 DTFB_COMBINED_IMAGE_SAMPLER = (1 << 1), 142 DTFB_SAMPLED_IMAGE = (1 << 2), 143 DTFB_STORAGE_IMAGE = (1 << 3), 144 DTFB_UNIFORM_TEXEL_BUFFER = (1 << 4), 145 DTFB_STORAGE_TEXEL_BUFFER = (1 << 5), 146 DTFB_UNIFORM_BUFFER = (1 << 6), 147 DTFB_STORAGE_BUFFER = (1 << 7), 148 DTFB_UNIFORM_BUFFER_DYNAMIC = (1 << 8), 149 DTFB_STORAGE_BUFFER_DYNAMIC = (1 << 9), 150 DTFB_INPUT_ATTACHMENT = (1 << 10), 151 DTFB_INLINE_UNIFORM_BLOCK_EXT = (1 << 11), 152 DTFB_ACCELERATION_STRUCTURE_KHR = (1 << 12), 153 DTFB_ACCELERATION_STRUCTURE_NV = (1 << 13), 154 DTFB_MUTABLE_VALVE = (1 << 14), 155}; 156 157using DescriptorTypeFlags = deUint32; 158 159// Convert type to its corresponding flag bit. 160DescriptorTypeFlagBits toDescriptorTypeFlagBit (VkDescriptorType descriptorType) 161{ 162 switch (descriptorType) 163 { 164 case VK_DESCRIPTOR_TYPE_SAMPLER: return DTFB_SAMPLER; 165 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: return DTFB_COMBINED_IMAGE_SAMPLER; 166 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: return DTFB_SAMPLED_IMAGE; 167 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: return DTFB_STORAGE_IMAGE; 168 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: return DTFB_UNIFORM_TEXEL_BUFFER; 169 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: return DTFB_STORAGE_TEXEL_BUFFER; 170 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: return DTFB_UNIFORM_BUFFER; 171 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: return DTFB_STORAGE_BUFFER; 172 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: return DTFB_UNIFORM_BUFFER_DYNAMIC; 173 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: return DTFB_STORAGE_BUFFER_DYNAMIC; 174 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: return DTFB_INPUT_ATTACHMENT; 175 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: return DTFB_INLINE_UNIFORM_BLOCK_EXT; 176 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: return DTFB_ACCELERATION_STRUCTURE_KHR; 177 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV: return DTFB_ACCELERATION_STRUCTURE_NV; 178 case VK_DESCRIPTOR_TYPE_MUTABLE_VALVE: return DTFB_MUTABLE_VALVE; 179 default: break; 180 } 181 182 // Unreachable. 183 DE_ASSERT(false); 184 return DTFB_SAMPLER; 185} 186 187// Convert vector of descriptor types to a bitfield. 188DescriptorTypeFlags toDescriptorTypeFlags (const std::vector<VkDescriptorType>& types) 189{ 190 DescriptorTypeFlags result = 0u; 191 for (const auto& t : types) 192 result |= toDescriptorTypeFlagBit(t); 193 return result; 194} 195 196// Convert bitfield to vector of descriptor types. 197std::vector<VkDescriptorType> toDescriptorTypeVector (DescriptorTypeFlags bitfield) 198{ 199 std::vector<VkDescriptorType> result; 200 201 if (bitfield & DTFB_SAMPLER) result.push_back(VK_DESCRIPTOR_TYPE_SAMPLER); 202 if (bitfield & DTFB_COMBINED_IMAGE_SAMPLER) result.push_back(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER); 203 if (bitfield & DTFB_SAMPLED_IMAGE) result.push_back(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE); 204 if (bitfield & DTFB_STORAGE_IMAGE) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE); 205 if (bitfield & DTFB_UNIFORM_TEXEL_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER); 206 if (bitfield & DTFB_STORAGE_TEXEL_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER); 207 if (bitfield & DTFB_UNIFORM_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER); 208 if (bitfield & DTFB_STORAGE_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER); 209 if (bitfield & DTFB_UNIFORM_BUFFER_DYNAMIC) result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC); 210 if (bitfield & DTFB_STORAGE_BUFFER_DYNAMIC) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC); 211 if (bitfield & DTFB_INPUT_ATTACHMENT) result.push_back(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT); 212 if (bitfield & DTFB_INLINE_UNIFORM_BLOCK_EXT) result.push_back(VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT); 213 if (bitfield & DTFB_ACCELERATION_STRUCTURE_KHR) result.push_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR); 214 if (bitfield & DTFB_ACCELERATION_STRUCTURE_NV) result.push_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV); 215 if (bitfield & DTFB_MUTABLE_VALVE) result.push_back(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE); 216 217 return result; 218} 219 220// How to create the source set when copying descriptors from another set. 221// * MUTABLE means to transform bindings into mutable bindings. 222// * NONMUTABLE means to transform bindings into non-mutable bindings. 223enum class SourceSetStrategy 224{ 225 MUTABLE = 0, 226 NONMUTABLE, 227 NO_SOURCE, 228}; 229 230enum class PoolMutableStrategy 231{ 232 KEEP_TYPES = 0, 233 EXPAND_TYPES, 234 NO_TYPES, 235}; 236 237// Type of information that's present in VkWriteDescriptorSet. 238enum class WriteType 239{ 240 IMAGE_INFO = 0, 241 BUFFER_INFO, 242 BUFFER_VIEW, 243 ACCELERATION_STRUCTURE_INFO, 244}; 245 246struct WriteInfo 247{ 248 WriteType writeType; 249 union 250 { 251 VkDescriptorImageInfo imageInfo; 252 VkDescriptorBufferInfo bufferInfo; 253 VkBufferView bufferView; 254 VkWriteDescriptorSetAccelerationStructureKHR asInfo; 255 }; 256 257 explicit WriteInfo (const VkDescriptorImageInfo& info_) 258 : writeType(WriteType::IMAGE_INFO) 259 , imageInfo(info_) 260 {} 261 262 explicit WriteInfo (const VkDescriptorBufferInfo& info_) 263 : writeType(WriteType::BUFFER_INFO) 264 , bufferInfo(info_) 265 {} 266 267 explicit WriteInfo (VkBufferView view_) 268 : writeType(WriteType::BUFFER_VIEW) 269 , bufferView(view_) 270 {} 271 272 explicit WriteInfo (const VkWriteDescriptorSetAccelerationStructureKHR& asInfo_) 273 : writeType(WriteType::ACCELERATION_STRUCTURE_INFO) 274 , asInfo(asInfo_) 275 {} 276}; 277 278// Resource backing up a single binding. 279enum class ResourceType 280{ 281 SAMPLER = 0, 282 IMAGE, 283 COMBINED_IMAGE_SAMPLER, 284 BUFFER, 285 BUFFER_VIEW, 286 ACCELERATION_STRUCTURE, 287}; 288 289// Type of resource backing up a particular descriptor type. 290ResourceType toResourceType (VkDescriptorType descriptorType) 291{ 292 ResourceType resourceType = ResourceType::SAMPLER; 293 switch (descriptorType) 294 { 295 case VK_DESCRIPTOR_TYPE_SAMPLER: 296 resourceType = ResourceType::SAMPLER; 297 break; 298 299 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: 300 resourceType = ResourceType::COMBINED_IMAGE_SAMPLER; 301 break; 302 303 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: 304 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: 305 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: 306 resourceType = ResourceType::IMAGE; 307 break; 308 309 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: 310 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: 311 resourceType = ResourceType::BUFFER_VIEW; 312 break; 313 314 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: 315 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: 316 resourceType = ResourceType::BUFFER; 317 break; 318 319 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: 320 resourceType = ResourceType::ACCELERATION_STRUCTURE; 321 break; 322 323 default: 324 DE_ASSERT(false); 325 break; 326 } 327 328 return resourceType; 329} 330 331bool isShaderWritable (VkDescriptorType descriptorType) 332{ 333 return (descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER || descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE || 334 descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER); 335} 336 337Move<VkSampler> makeDefaultSampler (const DeviceInterface& vkd, VkDevice device) 338{ 339 const VkSamplerCreateInfo samplerCreateInfo = { 340 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, // VkStructureType sType; 341 nullptr, // const void* pNext; 342 0u, // VkSamplerCreateFlags flags; 343 VK_FILTER_NEAREST, // VkFilter magFilter; 344 VK_FILTER_NEAREST, // VkFilter minFilter; 345 VK_SAMPLER_MIPMAP_MODE_NEAREST, // VkSamplerMipmapMode mipmapMode; 346 VK_SAMPLER_ADDRESS_MODE_REPEAT, // VkSamplerAddressMode addressModeU; 347 VK_SAMPLER_ADDRESS_MODE_REPEAT, // VkSamplerAddressMode addressModeV; 348 VK_SAMPLER_ADDRESS_MODE_REPEAT, // VkSamplerAddressMode addressModeW; 349 0.f, // float mipLodBias; 350 VK_FALSE, // VkBool32 anisotropyEnable; 351 1.f, // float maxAnisotropy; 352 VK_FALSE, // VkBool32 compareEnable; 353 VK_COMPARE_OP_ALWAYS, // VkCompareOp compareOp; 354 0.f, // float minLod; 355 0.f, // float maxLod; 356 VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, // VkBorderColor borderColor; 357 VK_FALSE, // VkBool32 unnormalizedCoordinates; 358 }; 359 360 return createSampler(vkd, device, &samplerCreateInfo); 361} 362 363de::MovePtr<ImageWithMemory> makeDefaultImage (const DeviceInterface& vkd, VkDevice device, Allocator& alloc) 364{ 365 const auto extent = makeExtent3D(1u, 1u, 1u); 366 const VkImageUsageFlags usageFlags = ( 367 VK_IMAGE_USAGE_SAMPLED_BIT 368 | VK_IMAGE_USAGE_STORAGE_BIT 369 | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT 370 | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT 371 | VK_IMAGE_USAGE_TRANSFER_SRC_BIT 372 | VK_IMAGE_USAGE_TRANSFER_DST_BIT); 373 374 const VkImageCreateInfo imageCreateInfo = { 375 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType; 376 nullptr, // const void* pNext; 377 0u, // VkImageCreateFlags flags; 378 VK_IMAGE_TYPE_2D, // VkImageType imageType; 379 getDescriptorImageFormat(), // VkFormat format; 380 extent, // VkExtent3D extent; 381 1u, // deUint32 mipLevels; 382 1u, // deUint32 arrayLayers; 383 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples; 384 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling; 385 usageFlags, // VkImageUsageFlags usage; 386 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode; 387 0u, // deUint32 queueFamilyIndexCount; 388 nullptr, // const deUint32* pQueueFamilyIndices; 389 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout; 390 }; 391 return de::MovePtr<ImageWithMemory>(new ImageWithMemory(vkd, device, alloc, imageCreateInfo, MemoryRequirement::Any)); 392} 393 394Move<VkImageView> makeDefaultImageView (const DeviceInterface& vkd, VkDevice device, VkImage image) 395{ 396 const auto subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u); 397 return makeImageView(vkd, device, image, VK_IMAGE_VIEW_TYPE_2D, getDescriptorImageFormat(), subresourceRange); 398} 399 400de::MovePtr<BufferWithMemory> makeDefaultBuffer (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 numElements = 1u) 401{ 402 const VkBufferUsageFlags bufferUsage = ( 403 VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT 404 | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT 405 | VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT 406 | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT 407 | VK_BUFFER_USAGE_TRANSFER_SRC_BIT 408 | VK_BUFFER_USAGE_TRANSFER_DST_BIT); 409 410 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(deUint32) * static_cast<size_t>(numElements)); 411 412 const auto bufferCreateInfo = makeBufferCreateInfo(bufferSize, bufferUsage); 413 414 return de::MovePtr<BufferWithMemory>(new BufferWithMemory(vkd, device, alloc, bufferCreateInfo, MemoryRequirement::HostVisible)); 415} 416 417Move<VkBufferView> makeDefaultBufferView (const DeviceInterface& vkd, VkDevice device, VkBuffer buffer) 418{ 419 const auto bufferOffset = static_cast<VkDeviceSize>(0); 420 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(deUint32)); 421 422 return makeBufferView(vkd, device, buffer, getDescriptorImageFormat(), bufferOffset, bufferSize); 423} 424 425struct AccelerationStructureData 426{ 427 using TLASPtr = de::MovePtr<TopLevelAccelerationStructure>; 428 using BLASPtr = de::MovePtr<BottomLevelAccelerationStructure>; 429 430 TLASPtr tlas; 431 BLASPtr blas; 432 433 void swap (AccelerationStructureData& other) 434 { 435 auto myTlasPtr = tlas.release(); 436 auto myBlasPtr = blas.release(); 437 438 auto otherTlasPtr = other.tlas.release(); 439 auto otherBlasPtr = other.blas.release(); 440 441 tlas = TLASPtr(otherTlasPtr); 442 blas = BLASPtr(otherBlasPtr); 443 444 other.tlas = TLASPtr(myTlasPtr); 445 other.blas = BLASPtr(myBlasPtr); 446 } 447 448 AccelerationStructureData () : tlas() , blas() {} 449 450 AccelerationStructureData (AccelerationStructureData&& other) 451 : AccelerationStructureData() 452 { 453 swap(other); 454 } 455 456 AccelerationStructureData& operator= (AccelerationStructureData&& other) 457 { 458 swap(other); 459 return *this; 460 } 461}; 462 463AccelerationStructureData makeDefaultAccelerationStructure (const DeviceInterface& vkd, VkDevice device, VkCommandBuffer cmdBuffer, Allocator& alloc, bool triangles, deUint16 offsetX) 464{ 465 AccelerationStructureData data; 466 467 // Triangle around (offsetX, 0) with depth 5.0. 468 const float middleX = static_cast<float>(offsetX); 469 const float leftX = middleX - 0.5f; 470 const float rightX = middleX + 0.5f; 471 const float topY = 0.5f; 472 const float bottomY = -0.5f; 473 const float depth = 5.0f; 474 475 std::vector<tcu::Vec3> vertices; 476 477 if (triangles) 478 { 479 vertices.reserve(3u); 480 vertices.emplace_back(middleX, topY, depth); 481 vertices.emplace_back(rightX, bottomY, depth); 482 vertices.emplace_back(leftX, bottomY, depth); 483 } 484 else 485 { 486 vertices.reserve(2u); 487 vertices.emplace_back(leftX, bottomY, depth); 488 vertices.emplace_back(rightX, topY, depth); 489 } 490 491 data.tlas = makeTopLevelAccelerationStructure(); 492 data.blas = makeBottomLevelAccelerationStructure(); 493 494 VkGeometryInstanceFlagsKHR instanceFlags = 0u; 495 if (triangles) 496 instanceFlags |= VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR; 497 498 data.blas->addGeometry(vertices, triangles, VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR); 499 data.blas->createAndBuild(vkd, device, cmdBuffer, alloc); 500 501 de::SharedPtr<BottomLevelAccelerationStructure> blasSharedPtr (data.blas.release()); 502 data.tlas->setInstanceCount(1u); 503 data.tlas->addInstance(blasSharedPtr, identityMatrix3x4, 0u, 0xFFu, 0u, instanceFlags); 504 data.tlas->createAndBuild(vkd, device, cmdBuffer, alloc); 505 506 return data; 507} 508 509const auto kShaderAccess = (VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT); 510 511struct Resource 512{ 513 VkDescriptorType descriptorType; 514 ResourceType resourceType; 515 Move<VkSampler> sampler; 516 de::MovePtr<ImageWithMemory> imageWithMemory; 517 Move<VkImageView> imageView; 518 de::MovePtr<BufferWithMemory> bufferWithMemory; 519 Move<VkBufferView> bufferView; 520 AccelerationStructureData asData; 521 deUint32 initialValue; 522 523 Resource (VkDescriptorType descriptorType_, const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, bool useAABBs, deUint32 initialValue_, deUint32 numElements = 1u) 524 : descriptorType (descriptorType_) 525 , resourceType (toResourceType(descriptorType)) 526 , sampler () 527 , imageWithMemory () 528 , imageView () 529 , bufferWithMemory () 530 , bufferView () 531 , asData () 532 , initialValue (initialValue_) 533 { 534 if (numElements != 1u) 535 DE_ASSERT(resourceType == ResourceType::BUFFER); 536 537 switch (resourceType) 538 { 539 case ResourceType::SAMPLER: 540 sampler = makeDefaultSampler(vkd, device); 541 break; 542 543 case ResourceType::IMAGE: 544 imageWithMemory = makeDefaultImage(vkd, device, alloc); 545 imageView = makeDefaultImageView(vkd, device, imageWithMemory->get()); 546 break; 547 548 case ResourceType::COMBINED_IMAGE_SAMPLER: 549 sampler = makeDefaultSampler(vkd, device); 550 imageWithMemory = makeDefaultImage(vkd, device, alloc); 551 imageView = makeDefaultImageView(vkd, device, imageWithMemory->get()); 552 break; 553 554 case ResourceType::BUFFER: 555 bufferWithMemory = makeDefaultBuffer(vkd, device, alloc, numElements); 556 break; 557 558 case ResourceType::BUFFER_VIEW: 559 bufferWithMemory = makeDefaultBuffer(vkd, device, alloc); 560 bufferView = makeDefaultBufferView(vkd, device, bufferWithMemory->get()); 561 break; 562 563 case ResourceType::ACCELERATION_STRUCTURE: 564 { 565 const auto cmdPool = makeCommandPool(vkd, device, qIndex); 566 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY); 567 const auto cmdBuffer = cmdBufferPtr.get(); 568 const bool triangles = !useAABBs; 569 570 beginCommandBuffer(vkd, cmdBuffer); 571 asData = makeDefaultAccelerationStructure(vkd, device, cmdBuffer, alloc, triangles, getAccelerationStructureOffsetX(initialValue)); 572 endCommandBuffer(vkd, cmdBuffer); 573 submitCommandsAndWait(vkd, device, queue, cmdBuffer); 574 } 575 break; 576 577 default: 578 DE_ASSERT(false); 579 break; 580 } 581 582 if (imageWithMemory || bufferWithMemory) 583 { 584 const auto cmdPool = makeCommandPool(vkd, device, qIndex); 585 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY); 586 const auto cmdBuffer = cmdBufferPtr.get(); 587 588 if (imageWithMemory) 589 { 590 // Prepare staging buffer. 591 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(initialValue)); 592 const VkBufferUsageFlags bufferUsage = (VK_BUFFER_USAGE_TRANSFER_SRC_BIT); 593 const auto stagingBufferInfo = makeBufferCreateInfo(bufferSize, bufferUsage); 594 595 BufferWithMemory stagingBuffer(vkd, device, alloc, stagingBufferInfo, MemoryRequirement::HostVisible); 596 auto& bufferAlloc = stagingBuffer.getAllocation(); 597 void* bufferData = bufferAlloc.getHostPtr(); 598 599 deMemcpy(bufferData, &initialValue, sizeof(initialValue)); 600 flushAlloc(vkd, device, bufferAlloc); 601 602 beginCommandBuffer(vkd, cmdBuffer); 603 604 // Transition and copy image. 605 const auto copyRegion = makeBufferImageCopy(makeExtent3D(1u, 1u, 1u), 606 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u)); 607 608 // Switch image to TRANSFER_DST_OPTIMAL before copying data to it. 609 const auto subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u); 610 611 const auto preTransferBarrier = makeImageMemoryBarrier( 612 0u, VK_ACCESS_TRANSFER_WRITE_BIT, 613 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 614 imageWithMemory->get(), subresourceRange); 615 616 vkd.cmdPipelineBarrier( 617 cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u, 618 0u, nullptr, 0u, nullptr, 1u, &preTransferBarrier); 619 620 // Copy data to image. 621 vkd.cmdCopyBufferToImage(cmdBuffer, stagingBuffer.get(), imageWithMemory->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region); 622 623 // Switch image to the GENERAL layout before reading or writing to it from shaders. 624 const auto postTransferBarrier = makeImageMemoryBarrier( 625 VK_ACCESS_TRANSFER_WRITE_BIT, kShaderAccess, 626 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, 627 imageWithMemory->get(), subresourceRange); 628 629 vkd.cmdPipelineBarrier( 630 cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0u, 631 0u, nullptr, 0u, nullptr, 1u, &postTransferBarrier); 632 633 endCommandBuffer(vkd, cmdBuffer); 634 submitCommandsAndWait(vkd, device, queue, cmdBuffer); 635 } 636 637 if (bufferWithMemory) 638 { 639 auto& bufferAlloc = bufferWithMemory->getAllocation(); 640 void* bufferData = bufferAlloc.getHostPtr(); 641 642 const std::vector<deUint32> bufferValues(numElements, initialValue); 643 deMemcpy(bufferData, bufferValues.data(), de::dataSize(bufferValues)); 644 flushAlloc(vkd, device, bufferAlloc); 645 646 beginCommandBuffer(vkd, cmdBuffer); 647 648 // Make sure host writes happen before shader reads/writes. Note: this barrier is not needed in theory. 649 const auto hostToShaderBarrier = makeMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, kShaderAccess); 650 651 vkd.cmdPipelineBarrier( 652 cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0u, 653 1u, &hostToShaderBarrier, 0u, nullptr, 0u, nullptr); 654 655 endCommandBuffer(vkd, cmdBuffer); 656 submitCommandsAndWait(vkd, device, queue, cmdBuffer); 657 } 658 } 659 } 660 661 // Remove problematic copy constructor. 662 Resource (const Resource&) = delete; 663 664 // Make it movable. 665 Resource (Resource&& other) noexcept 666 : descriptorType (other.descriptorType) 667 , resourceType (other.resourceType) 668 , sampler (other.sampler) 669 , imageWithMemory (other.imageWithMemory.release()) 670 , imageView (other.imageView) 671 , bufferWithMemory (other.bufferWithMemory.release()) 672 , bufferView (other.bufferView) 673 , asData (std::move(other.asData)) 674 , initialValue (other.initialValue) 675 {} 676 677 ~Resource () 678 {} 679 680 WriteInfo makeWriteInfo () const 681 { 682 using WriteInfoPtr = de::MovePtr<WriteInfo>; 683 684 WriteInfoPtr writeInfo; 685 686 switch (resourceType) 687 { 688 case ResourceType::SAMPLER: 689 { 690 const VkDescriptorImageInfo imageInfo = { sampler.get(), DE_NULL, VK_IMAGE_LAYOUT_UNDEFINED }; 691 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo)); 692 } 693 break; 694 695 case ResourceType::IMAGE: 696 { 697 const VkDescriptorImageInfo imageInfo = { DE_NULL, imageView.get(), VK_IMAGE_LAYOUT_GENERAL }; 698 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo)); 699 } 700 break; 701 702 case ResourceType::COMBINED_IMAGE_SAMPLER: 703 { 704 const VkDescriptorImageInfo imageInfo = { sampler.get(), imageView.get(), VK_IMAGE_LAYOUT_GENERAL }; 705 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo)); 706 } 707 break; 708 709 case ResourceType::BUFFER: 710 { 711 const VkDescriptorBufferInfo bufferInfo = { bufferWithMemory->get(), 0ull, static_cast<VkDeviceSize>(sizeof(deUint32)) }; 712 writeInfo = WriteInfoPtr (new WriteInfo(bufferInfo)); 713 } 714 break; 715 716 case ResourceType::BUFFER_VIEW: 717 writeInfo = WriteInfoPtr (new WriteInfo(bufferView.get())); 718 break; 719 720 case ResourceType::ACCELERATION_STRUCTURE: 721 { 722 VkWriteDescriptorSetAccelerationStructureKHR asWrite = initVulkanStructure(); 723 asWrite.accelerationStructureCount = 1u; 724 asWrite.pAccelerationStructures = asData.tlas.get()->getPtr(); 725 writeInfo = WriteInfoPtr (new WriteInfo(asWrite)); 726 } 727 break; 728 729 default: 730 DE_ASSERT(false); 731 break; 732 } 733 734 return *writeInfo; 735 } 736 737 tcu::Maybe<deUint32> getStoredValue (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, deUint32 position = 0u) const 738 { 739 if (position != 0u) 740 DE_ASSERT(static_cast<bool>(bufferWithMemory)); 741 742 if (imageWithMemory || bufferWithMemory) 743 { 744 // Command pool and buffer. 745 const auto cmdPool = makeCommandPool(vkd, device, qIndex); 746 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY); 747 const auto cmdBuffer = cmdBufferPtr.get(); 748 749 if (imageWithMemory) 750 { 751 // Prepare staging buffer. 752 deUint32 result; 753 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(result)); 754 const VkBufferUsageFlags bufferUsage = (VK_BUFFER_USAGE_TRANSFER_DST_BIT); 755 const auto stagingBufferInfo = makeBufferCreateInfo(bufferSize, bufferUsage); 756 757 BufferWithMemory stagingBuffer(vkd, device, alloc, stagingBufferInfo, MemoryRequirement::HostVisible); 758 auto& bufferAlloc = stagingBuffer.getAllocation(); 759 void* bufferData = bufferAlloc.getHostPtr(); 760 761 // Copy image value to staging buffer. 762 beginCommandBuffer(vkd, cmdBuffer); 763 764 // Make sure shader accesses happen before transfers and prepare image for transfer. 765 const auto colorResourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u); 766 767 const auto preTransferBarrier = makeImageMemoryBarrier( 768 kShaderAccess, VK_ACCESS_TRANSFER_READ_BIT, 769 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 770 imageWithMemory->get(), colorResourceRange); 771 772 vkd.cmdPipelineBarrier( 773 cmdBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u, 774 0u, nullptr, 0u, nullptr, 1u, &preTransferBarrier); 775 776 // Copy image contents to staging buffer. 777 const auto copyRegion = makeBufferImageCopy(makeExtent3D(1u, 1u, 1u), 778 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u)); 779 vkd.cmdCopyImageToBuffer(cmdBuffer, imageWithMemory->get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, stagingBuffer.get(), 1u, ©Region); 780 781 // Make sure writes are visible from the host. 782 const auto postTransferBarrier = makeMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT); 783 vkd.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0u, 1u, &postTransferBarrier, 0u, nullptr, 0u, nullptr); 784 785 endCommandBuffer(vkd, cmdBuffer); 786 submitCommandsAndWait(vkd, device, queue, cmdBuffer); 787 788 // Get value from staging buffer. 789 invalidateAlloc(vkd, device, bufferAlloc); 790 deMemcpy(&result, bufferData, sizeof(result)); 791 return tcu::just(result); 792 } 793 794 if (bufferWithMemory) 795 { 796 auto& bufferAlloc = bufferWithMemory->getAllocation(); 797 auto bufferData = reinterpret_cast<const char*>(bufferAlloc.getHostPtr()); 798 deUint32 result; 799 800 // Make sure shader writes are visible from the host. 801 beginCommandBuffer(vkd, cmdBuffer); 802 803 const auto shaderToHostBarrier = makeMemoryBarrier(kShaderAccess, VK_ACCESS_HOST_READ_BIT); 804 vkd.cmdPipelineBarrier( 805 cmdBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0u, 806 1u, &shaderToHostBarrier, 0u, nullptr, 0u, nullptr); 807 808 endCommandBuffer(vkd, cmdBuffer); 809 submitCommandsAndWait(vkd, device, queue, cmdBuffer); 810 811 invalidateAlloc(vkd, device, bufferAlloc); 812 deMemcpy(&result, bufferData + sizeof(deUint32) * static_cast<size_t>(position), sizeof(result)); 813 return tcu::just(result); 814 } 815 } 816 817 return tcu::Nothing; 818 } 819}; 820 821struct BindingInterface 822{ 823 // Minimum number of iterations to test all mutable types. 824 virtual deUint32 maxTypes () const = 0; 825 826 // Types that will be used by the binding at a given iteration. 827 virtual std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const = 0; 828 829 // Binding's main type. 830 virtual VkDescriptorType mainType () const = 0; 831 832 // Binding's list of mutable types, if present. 833 virtual std::vector<VkDescriptorType> mutableTypes () const = 0; 834 835 // Descriptor count in the binding. 836 virtual size_t size () const = 0; 837 838 // Is the binding an array binding? 839 virtual bool isArray () const = 0; 840 841 // Is the binding an unbounded array? 842 virtual bool isUnbounded () const = 0; 843 844 // Will the binding use different descriptor types in a given iteration? 845 virtual bool needsAliasing (deUint32 iteration) const 846 { 847 const auto typesVec = typesAtIteration(iteration); 848 std::set<VkDescriptorType> descTypes(begin(typesVec), end(typesVec)); 849 return (descTypes.size() > 1u); 850 } 851 852 // Will the binding need aliasing on any iteration up to a given number? 853 virtual bool needsAliasingUpTo (deUint32 numIterations) const 854 { 855 std::vector<bool> needsAliasingFlags; 856 needsAliasingFlags.reserve(numIterations); 857 858 for (deUint32 iter = 0u; iter < numIterations; ++iter) 859 needsAliasingFlags.push_back(needsAliasing(iter)); 860 861 return std::any_of(begin(needsAliasingFlags), end(needsAliasingFlags), [] (bool f) { return f; }); 862 } 863 864private: 865 virtual bool hasDescriptorType (deUint32 iteration, VkDescriptorType descriptorType) const 866 { 867 const auto typesVec = typesAtIteration(iteration); 868 return (std::find(begin(typesVec), end(typesVec), descriptorType) != end(typesVec)); 869 } 870 871public: 872 // Convert one particular binding to a mutable or non-mutable equivalent binding, returning the equivalent binding. 873 virtual de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const = 0; 874 virtual de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const = 0; 875 876 // Create resources needed to back up this binding. 877 virtual std::vector<Resource> createResources ( 878 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, 879 deUint32 iteration, bool useAABBs, deUint32 baseValue) const = 0; 880 881 // Get GLSL binding declarations. Note: no array size means no array, if size is < 0 it means unbounded array. 882 virtual std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const = 0; 883 884 // Get GLSL statements to check this binding. 885 virtual std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const = 0; 886}; 887 888// Represents a single binding that will be used in a test. 889class SingleBinding : public BindingInterface 890{ 891private: 892 VkDescriptorType type; // The descriptor type. 893 std::vector<VkDescriptorType> mutableTypesVec; // The types that will be used for each iteration of a test if mutable. 894 895public: 896 SingleBinding (VkDescriptorType type_, std::vector<VkDescriptorType> mutableTypes_) 897 : type (type_) 898 , mutableTypesVec (std::move(mutableTypes_)) 899 { 900 static const auto kForbiddenMutableTypes = getForbiddenMutableTypes(); 901 const auto kBeginForbidden = begin(kForbiddenMutableTypes); 902 const auto kEndForbidden = end(kForbiddenMutableTypes); 903 904 // For release builds. 905 DE_UNREF(kBeginForbidden); 906 DE_UNREF(kEndForbidden); 907 908 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) 909 { 910 DE_ASSERT(mutableTypesVec.empty()); 911 } 912 else 913 { 914 DE_ASSERT(!mutableTypesVec.empty()); 915 DE_ASSERT(std::none_of(begin(mutableTypesVec), end(mutableTypesVec), 916 [&kBeginForbidden, &kEndForbidden] (VkDescriptorType t) -> bool { 917 return std::find(kBeginForbidden, kEndForbidden, t) != kEndForbidden; 918 })); 919 } 920 } 921 922 deUint32 maxTypes () const override 923 { 924 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) 925 return 1u; 926 const auto vecSize = mutableTypesVec.size(); 927 DE_ASSERT(vecSize <= std::numeric_limits<deUint32>::max()); 928 return static_cast<deUint32>(vecSize); 929 } 930 931 VkDescriptorType typeAtIteration (deUint32 iteration) const 932 { 933 return typesAtIteration(iteration)[0]; 934 } 935 936 std::vector<VkDescriptorType> usedTypes () const 937 { 938 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) 939 return std::vector<VkDescriptorType>(1u, type); 940 return mutableTypesVec; 941 } 942 943 std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const override 944 { 945 const auto typesVec = usedTypes(); 946 return std::vector<VkDescriptorType>(1u, typesVec[static_cast<size_t>(iteration) % typesVec.size()]); 947 } 948 949 VkDescriptorType mainType () const override 950 { 951 return type; 952 } 953 954 std::vector<VkDescriptorType> mutableTypes () const override 955 { 956 return mutableTypesVec; 957 } 958 959 size_t size () const override 960 { 961 return size_t{1u}; 962 } 963 964 bool isArray () const override 965 { 966 return false; 967 } 968 969 bool isUnbounded () const override 970 { 971 return false; 972 } 973 974 de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const override 975 { 976 DE_UNREF(iteration); 977 978 static const auto kMandatoryMutableTypeFlags = toDescriptorTypeFlags(getMandatoryMutableTypes()); 979 if (type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) 980 { 981 const auto descFlags = (toDescriptorTypeFlags(mutableTypesVec) | kMandatoryMutableTypeFlags); 982 return de::MovePtr<BindingInterface>(new SingleBinding(type, toDescriptorTypeVector(descFlags))); 983 } 984 985 // Make sure it's not a forbidden mutable type. 986 static const auto kForbiddenMutableTypes = getForbiddenMutableTypes(); 987 DE_ASSERT(std::find(begin(kForbiddenMutableTypes), end(kForbiddenMutableTypes), type) == end(kForbiddenMutableTypes)); 988 989 // Convert the binding to mutable using a wider set of descriptor types if possible, including the binding type. 990 const auto descFlags = (kMandatoryMutableTypeFlags | toDescriptorTypeFlagBit(type)); 991 992 return de::MovePtr<BindingInterface>(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, toDescriptorTypeVector(descFlags))); 993 } 994 995 de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const override 996 { 997 return de::MovePtr<BindingInterface>(new SingleBinding(typeAtIteration(iteration), std::vector<VkDescriptorType>())); 998 } 999 1000 std::vector<Resource> createResources ( 1001 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, 1002 deUint32 iteration, bool useAABBs, deUint32 baseValue) const override 1003 { 1004 const auto descriptorType = typeAtIteration(iteration); 1005 1006 std::vector<Resource> resources; 1007 resources.emplace_back(descriptorType, vkd, device, alloc, qIndex, queue, useAABBs, baseValue); 1008 return resources; 1009 } 1010 1011 std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const override 1012 { 1013 const auto descriptorType = typeAtIteration(iteration); 1014 const std::string arraySuffix = ((static_cast<bool>(arraySize)) ? ((arraySize.get() < 0) ? "[]" : ("[" + de::toString(arraySize.get()) + "]")) : ""); 1015 const std::string layoutAttribs = "set=" + de::toString(setNum) + ", binding=" + de::toString(bindingNum); 1016 const std::string bindingSuffix = "_" + de::toString(setNum) + "_" + de::toString(bindingNum); 1017 const std::string nameSuffix = bindingSuffix + arraySuffix; 1018 std::ostringstream declarations; 1019 1020 declarations << "layout ("; 1021 1022 switch (descriptorType) 1023 { 1024 case VK_DESCRIPTOR_TYPE_SAMPLER: 1025 declarations << layoutAttribs << ") uniform sampler sampler" << nameSuffix; 1026 break; 1027 1028 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: 1029 declarations << layoutAttribs << ") uniform usampler2D combinedSampler" << nameSuffix; 1030 break; 1031 1032 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: 1033 declarations << layoutAttribs << ") uniform utexture2D sampledImage" << nameSuffix; 1034 break; 1035 1036 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: 1037 declarations << layoutAttribs << ") uniform uboBlock" << bindingSuffix << " { uint val; } ubo" << nameSuffix; 1038 break; 1039 1040 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: 1041 declarations << layoutAttribs << ") buffer sboBlock" << bindingSuffix << " { uint val; } ssbo" << nameSuffix; 1042 break; 1043 1044 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: 1045 declarations << layoutAttribs << ") uniform utextureBuffer uniformTexel" << nameSuffix; 1046 break; 1047 1048 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: 1049 declarations << layoutAttribs << ", r32ui) uniform uimageBuffer storageTexel" << nameSuffix; 1050 break; 1051 1052 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: 1053 declarations << layoutAttribs << ", r32ui) uniform uimage2D storageImage" << nameSuffix; 1054 break; 1055 1056 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: 1057 declarations << layoutAttribs << ", input_attachment_index=" << inputAttachmentIdx << ") uniform usubpassInput inputAttachment" << nameSuffix; 1058 break; 1059 1060 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: 1061 declarations << layoutAttribs << ") uniform accelerationStructureEXT accelerationStructure" << nameSuffix; 1062 break; 1063 1064 default: 1065 DE_ASSERT(false); 1066 break; 1067 } 1068 1069 declarations << ";\n"; 1070 1071 return declarations.str(); 1072 } 1073 1074 std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue_, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const override 1075 { 1076 const auto descriptorType = typeAtIteration(iteration); 1077 const std::string bindingSuffix = "_" + de::toString(setNum) + "_" + de::toString(bindingNum); 1078 1079 std::string indexSuffix; 1080 if (arrayIndex) 1081 { 1082 indexSuffix = de::toString(arrayIndex.get()); 1083 if (usePushConstants) 1084 indexSuffix += " + pc.zero"; 1085 indexSuffix = "[" + indexSuffix + "]"; 1086 } 1087 1088 const std::string nameSuffix = bindingSuffix + indexSuffix; 1089 const std::string baseValue = toHex(baseValue_); 1090 const std::string externalImageValue = toHex(getExternalSampledImageValue()); 1091 const std::string mask = toHex(getStoredValueMask()); 1092 1093 std::ostringstream checks; 1094 1095 // Note: all of these depend on an external anyError uint variable. 1096 switch (descriptorType) 1097 { 1098 case VK_DESCRIPTOR_TYPE_SAMPLER: 1099 // Note this depends on an "externalSampledImage" binding. 1100 checks << " {\n"; 1101 checks << " uint readValue = texture(usampler2D(externalSampledImage, sampler" << nameSuffix << "), vec2(0, 0)).r;\n"; 1102 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n"; 1103 checks << " anyError |= ((readValue == " << externalImageValue << ") ? 0u : 1u);\n"; 1104 //checks << " anyError = readValue;\n"; 1105 checks << " }\n"; 1106 break; 1107 1108 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: 1109 checks << " {\n"; 1110 checks << " uint readValue = texture(combinedSampler" << nameSuffix << ", vec2(0, 0)).r;\n"; 1111 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n"; 1112 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n"; 1113 //checks << " anyError = readValue;\n"; 1114 checks << " }\n"; 1115 break; 1116 1117 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: 1118 // Note this depends on an "externalSampler" binding. 1119 checks << " {\n"; 1120 checks << " uint readValue = texture(usampler2D(sampledImage" << nameSuffix << ", externalSampler), vec2(0, 0)).r;\n"; 1121 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n"; 1122 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n"; 1123 //checks << " anyError = readValue;\n"; 1124 checks << " }\n"; 1125 break; 1126 1127 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: 1128 checks << " {\n"; 1129 checks << " uint readValue = ubo" << nameSuffix << ".val;\n"; 1130 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n"; 1131 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n"; 1132 //checks << " anyError = readValue;\n"; 1133 checks << " }\n"; 1134 break; 1135 1136 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: 1137 checks << " {\n"; 1138 checks << " uint readValue = ssbo" << nameSuffix << ".val;\n"; 1139 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n"; 1140 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n"; 1141 //checks << " anyError = readValue;\n"; 1142 // Check writes. 1143 checks << " ssbo" << nameSuffix << ".val = (readValue | " << mask << ");\n"; 1144 checks << " }\n"; 1145 break; 1146 1147 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: 1148 checks << " {\n"; 1149 checks << " uint readValue = texelFetch(uniformTexel" << nameSuffix << ", 0).x;\n"; 1150 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n"; 1151 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n"; 1152 //checks << " anyError = readValue;\n"; 1153 checks << " }\n"; 1154 break; 1155 1156 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: 1157 checks << " {\n"; 1158 checks << " uint readValue = imageLoad(storageTexel" << nameSuffix << ", 0).x;\n"; 1159 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n"; 1160 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n"; 1161 //checks << " anyError = readValue;\n"; 1162 checks << " readValue |= " << mask << ";\n"; 1163 // Check writes. 1164 checks << " imageStore(storageTexel" << nameSuffix << ", 0, uvec4(readValue, 0, 0, 0));\n"; 1165 checks << " }\n"; 1166 break; 1167 1168 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: 1169 checks << " {\n"; 1170 checks << " uint readValue = imageLoad(storageImage" << nameSuffix << ", ivec2(0, 0)).x;\n"; 1171 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n"; 1172 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n"; 1173 //checks << " anyError = readValue;\n"; 1174 checks << " readValue |= " << mask << ";\n"; 1175 // Check writes. 1176 checks << " imageStore(storageImage" << nameSuffix << ", ivec2(0, 0), uvec4(readValue, 0, 0, 0));\n"; 1177 checks << " }\n"; 1178 break; 1179 1180 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: 1181 checks << " {\n"; 1182 checks << " uint readValue = subpassLoad(inputAttachment" << nameSuffix << ").x;\n"; 1183 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n"; 1184 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n"; 1185 //checks << " anyError = readValue;\n"; 1186 checks << " }\n"; 1187 break; 1188 1189 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: 1190 checks << " {\n"; 1191 checks << " const uint cullMask = 0xFF;\n"; 1192 checks << " const vec3 origin = vec3(" << getAccelerationStructureOffsetX(baseValue_) << ".0, 0.0, 0.0);\n"; 1193 checks << " const vec3 direction = vec3(0.0, 0.0, 1.0);\n"; 1194 checks << " const float tmin = 1.0;\n"; 1195 checks << " const float tmax = 10.0;\n"; 1196 checks << " uint candidateFound = 0u;\n"; 1197 checks << " rayQueryEXT rq;\n"; 1198 checks << " rayQueryInitializeEXT(rq, accelerationStructure" << nameSuffix << ", gl_RayFlagsNoneEXT, cullMask, origin, tmin, direction, tmax);\n"; 1199 checks << " while (rayQueryProceedEXT(rq)) {\n"; 1200 checks << " const uint candidateType = rayQueryGetIntersectionTypeEXT(rq, false);\n"; 1201 checks << " if (candidateType == gl_RayQueryCandidateIntersectionTriangleEXT || candidateType == gl_RayQueryCandidateIntersectionAABBEXT) {\n"; 1202 checks << " candidateFound = 1u;\n"; 1203 checks << " }\n"; 1204 checks << " }\n"; 1205 checks << " anyError |= ((candidateFound == 1u) ? 0u : 1u);\n"; 1206 checks << " }\n"; 1207 break; 1208 1209 default: 1210 DE_ASSERT(false); 1211 break; 1212 } 1213 1214 return checks.str(); 1215 } 1216}; 1217 1218// Represents an array of bindings. Individual bindings are stored as SingleBindings because each one of them may take a different 1219// type in each iteration (i.e. they can all have different descriptor type vectors). 1220class ArrayBinding : public BindingInterface 1221{ 1222private: 1223 bool unbounded; 1224 std::vector<SingleBinding> bindings; 1225 1226public: 1227 ArrayBinding (bool unbounded_, std::vector<SingleBinding> bindings_) 1228 : unbounded (unbounded_) 1229 , bindings (std::move(bindings_)) 1230 { 1231 // We need to check all single bindings have the same effective type, even if mutable descriptors have different orders. 1232 DE_ASSERT(!bindings.empty()); 1233 1234 std::set<VkDescriptorType> basicTypes; 1235 std::set<DescriptorTypeFlags> bindingTypes; 1236 1237 for (const auto& b : bindings) 1238 { 1239 basicTypes.insert(b.mainType()); 1240 bindingTypes.insert(toDescriptorTypeFlags(b.usedTypes())); 1241 } 1242 1243 DE_ASSERT(basicTypes.size() == 1u); 1244 DE_ASSERT(bindingTypes.size() == 1u); 1245 1246 // For release builds. 1247 DE_UNREF(basicTypes); 1248 DE_UNREF(bindingTypes); 1249 } 1250 1251 deUint32 maxTypes () const override 1252 { 1253 // Each binding may have the same effective type but a different number of iterations due to repeated types. 1254 std::vector<size_t> bindingSizes; 1255 bindingSizes.reserve(bindings.size()); 1256 1257 std::transform(begin(bindings), end(bindings), std::back_inserter(bindingSizes), 1258 [] (const SingleBinding& b) { return b.usedTypes().size(); }); 1259 1260 const auto maxElement = std::max_element(begin(bindingSizes), end(bindingSizes)); 1261 DE_ASSERT(maxElement != end(bindingSizes)); 1262 DE_ASSERT(*maxElement <= std::numeric_limits<deUint32>::max()); 1263 return static_cast<deUint32>(*maxElement); 1264 } 1265 1266 std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const override 1267 { 1268 std::vector<VkDescriptorType> result; 1269 result.reserve(bindings.size()); 1270 1271 for (const auto& b : bindings) 1272 result.push_back(b.typeAtIteration(iteration)); 1273 1274 return result; 1275 } 1276 1277 VkDescriptorType mainType () const override 1278 { 1279 return bindings[0].mainType(); 1280 } 1281 1282 std::vector<VkDescriptorType> mutableTypes () const override 1283 { 1284 return bindings[0].mutableTypes(); 1285 } 1286 1287 size_t size () const override 1288 { 1289 return bindings.size(); 1290 } 1291 1292 bool isArray () const override 1293 { 1294 return true; 1295 } 1296 1297 bool isUnbounded () const override 1298 { 1299 return unbounded; 1300 } 1301 1302 de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const override 1303 { 1304 // Replicate the first binding once converted, as all are equivalent. 1305 const auto firstBindingPtr = bindings[0].toMutable(iteration); 1306 const auto firstBinding = *dynamic_cast<SingleBinding*>(firstBindingPtr.get()); 1307 const std::vector<SingleBinding> newBindings (bindings.size(), firstBinding); 1308 1309 return de::MovePtr<BindingInterface>(new ArrayBinding(unbounded, newBindings)); 1310 } 1311 1312 de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const override 1313 { 1314 // Make sure this binding can be converted to nonmutable for a given iteration. 1315 DE_ASSERT(!needsAliasing(iteration)); 1316 1317 // We could use each SingleBinding's toNonMutable(), but this is the same. 1318 const auto descType = bindings[0].typeAtIteration(iteration); 1319 const SingleBinding firstBinding (descType, std::vector<VkDescriptorType>()); 1320 const std::vector<SingleBinding> newBindings (bindings.size(), firstBinding); 1321 1322 return de::MovePtr<BindingInterface>(new ArrayBinding(unbounded, newBindings)); 1323 } 1324 1325 std::vector<Resource> createResources ( 1326 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, 1327 deUint32 iteration, bool useAABBs, deUint32 baseValue) const override 1328 { 1329 std::vector<Resource> resources; 1330 const auto numBindings = static_cast<deUint32>(bindings.size()); 1331 1332 for (deUint32 i = 0u; i < numBindings; ++i) 1333 { 1334 auto resourceVec = bindings[i].createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs, baseValue + i); 1335 resources.emplace_back(std::move(resourceVec[0])); 1336 } 1337 1338 return resources; 1339 } 1340 1341 // We will ignore the array size parameter. 1342 std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const override 1343 { 1344 const auto descriptorCount = bindings.size(); 1345 const auto arraySizeVal = (isUnbounded() ? tcu::just(deInt32{-1}) : tcu::just(static_cast<deInt32>(descriptorCount))); 1346 1347 DE_UNREF(arraySize); 1348 DE_ASSERT(descriptorCount < static_cast<size_t>(std::numeric_limits<deInt32>::max())); 1349 1350 // Maybe a single declaration is enough. 1351 if (!needsAliasing(iteration)) 1352 return bindings[0].glslDeclarations(iteration, setNum, bindingNum, inputAttachmentIdx, arraySizeVal); 1353 1354 // Aliasing needed. Avoid reusing types. 1355 const auto descriptorTypes = typesAtIteration(iteration); 1356 std::set<VkDescriptorType> usedTypes; 1357 std::ostringstream declarations; 1358 1359 for (size_t descriptorIdx = 0u; descriptorIdx < descriptorCount; ++descriptorIdx) 1360 { 1361 const auto& descriptorType = descriptorTypes[descriptorIdx]; 1362 if (usedTypes.count(descriptorType) > 0) 1363 continue; 1364 1365 usedTypes.insert(descriptorType); 1366 declarations << bindings[descriptorIdx].glslDeclarations(iteration, setNum, bindingNum, inputAttachmentIdx, arraySizeVal); 1367 } 1368 1369 return declarations.str(); 1370 } 1371 1372 std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue_, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const override 1373 { 1374 DE_ASSERT(!arrayIndex); 1375 DE_UNREF(arrayIndex); // For release builds. 1376 1377 std::ostringstream checks; 1378 const auto numDescriptors = static_cast<deUint32>(bindings.size()); 1379 1380 for (deUint32 descriptorIdx = 0u; descriptorIdx < numDescriptors; ++descriptorIdx) 1381 { 1382 const auto& binding = bindings[descriptorIdx]; 1383 checks << binding.glslCheckStatements(iteration, setNum, bindingNum, baseValue_ + descriptorIdx, tcu::just(descriptorIdx), usePushConstants); 1384 } 1385 1386 return checks.str(); 1387 } 1388}; 1389 1390class DescriptorSet; 1391 1392using DescriptorSetPtr = de::SharedPtr<DescriptorSet>; 1393 1394class DescriptorSet 1395{ 1396public: 1397 using BindingInterfacePtr = de::MovePtr<BindingInterface>; 1398 using BindingPtrVector = std::vector<BindingInterfacePtr>; 1399 1400private: 1401 BindingPtrVector bindings; 1402 1403public: 1404 explicit DescriptorSet (BindingPtrVector& bindings_) 1405 : bindings(std::move(bindings_)) 1406 { 1407 DE_ASSERT(!bindings.empty()); 1408 } 1409 1410 size_t numBindings () const 1411 { 1412 return bindings.size(); 1413 } 1414 1415 const BindingInterface* getBinding (size_t bindingIdx) const 1416 { 1417 return bindings.at(bindingIdx).get(); 1418 } 1419 1420 // Maximum number of descriptor types used by any binding in the set. 1421 deUint32 maxTypes () const 1422 { 1423 std::vector<deUint32> maxSizes; 1424 maxSizes.reserve(bindings.size()); 1425 1426 std::transform(begin(bindings), end(bindings), std::back_inserter(maxSizes), 1427 [] (const BindingInterfacePtr& b) { return b->maxTypes(); }); 1428 1429 const auto maxElement = std::max_element(begin(maxSizes), end(maxSizes)); 1430 DE_ASSERT(maxElement != end(maxSizes)); 1431 return *maxElement; 1432 } 1433 1434 // Create another descriptor set that can be the source for copies when setting descriptor values. 1435 DescriptorSetPtr genSourceSet (SourceSetStrategy strategy, deUint32 iteration) const 1436 { 1437 BindingPtrVector newBindings; 1438 for (const auto& b : bindings) 1439 { 1440 if (strategy == SourceSetStrategy::MUTABLE) 1441 newBindings.push_back(b->toMutable(iteration)); 1442 else 1443 newBindings.push_back(b->toNonMutable(iteration)); 1444 } 1445 1446 return DescriptorSetPtr(new DescriptorSet(newBindings)); 1447 } 1448 1449 // Makes a descriptor pool that can be used when allocating descriptors for this set. 1450 Move<VkDescriptorPool> makeDescriptorPool (const DeviceInterface& vkd, VkDevice device, PoolMutableStrategy strategy, VkDescriptorPoolCreateFlags flags) const 1451 { 1452 std::vector<VkDescriptorPoolSize> poolSizes; 1453 std::vector<std::vector<VkDescriptorType>> mutableTypesVec; 1454 std::vector<VkMutableDescriptorTypeListVALVE> mutableTypeLists; 1455 1456 // Make vector element addresses stable. 1457 const auto bindingCount = numBindings(); 1458 poolSizes.reserve(bindingCount); 1459 mutableTypesVec.reserve(bindingCount); 1460 mutableTypeLists.reserve(bindingCount); 1461 1462 for (const auto& b : bindings) 1463 { 1464 const auto mainType = b->mainType(); 1465 const VkDescriptorPoolSize poolSize = { 1466 mainType, 1467 static_cast<deUint32>(b->size()), 1468 }; 1469 poolSizes.push_back(poolSize); 1470 1471 if (strategy == PoolMutableStrategy::KEEP_TYPES || strategy == PoolMutableStrategy::EXPAND_TYPES) 1472 { 1473 if (mainType == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) 1474 { 1475 if (strategy == PoolMutableStrategy::KEEP_TYPES) 1476 { 1477 mutableTypesVec.emplace_back(b->mutableTypes()); 1478 } 1479 else 1480 { 1481 // Expand the type list with the mandatory types. 1482 static const auto mandatoryTypesFlags = toDescriptorTypeFlags(getMandatoryMutableTypes()); 1483 const auto bindingTypes = toDescriptorTypeVector(mandatoryTypesFlags | toDescriptorTypeFlags(b->mutableTypes())); 1484 1485 mutableTypesVec.emplace_back(bindingTypes); 1486 } 1487 1488 const auto& lastVec = mutableTypesVec.back(); 1489 const VkMutableDescriptorTypeListVALVE typeList = { static_cast<deUint32>(lastVec.size()), de::dataOrNull(lastVec) }; 1490 mutableTypeLists.push_back(typeList); 1491 } 1492 else 1493 { 1494 const VkMutableDescriptorTypeListVALVE typeList = { 0u, nullptr }; 1495 mutableTypeLists.push_back(typeList); 1496 } 1497 } 1498 else if (strategy == PoolMutableStrategy::NO_TYPES) 1499 ; // Do nothing, we will not use any type list. 1500 else 1501 DE_ASSERT(false); 1502 } 1503 1504 VkDescriptorPoolCreateInfo poolCreateInfo = initVulkanStructure(); 1505 1506 poolCreateInfo.maxSets = 1u; 1507 poolCreateInfo.flags = flags; 1508 poolCreateInfo.poolSizeCount = static_cast<deUint32>(poolSizes.size()); 1509 poolCreateInfo.pPoolSizes = de::dataOrNull(poolSizes); 1510 1511 VkMutableDescriptorTypeCreateInfoVALVE mutableInfo = initVulkanStructure(); 1512 1513 if (strategy == PoolMutableStrategy::KEEP_TYPES || strategy == PoolMutableStrategy::EXPAND_TYPES) 1514 { 1515 mutableInfo.mutableDescriptorTypeListCount = static_cast<deUint32>(mutableTypeLists.size()); 1516 mutableInfo.pMutableDescriptorTypeLists = de::dataOrNull(mutableTypeLists); 1517 poolCreateInfo.pNext = &mutableInfo; 1518 } 1519 1520 return createDescriptorPool(vkd, device, &poolCreateInfo); 1521 } 1522 1523private: 1524 // Building the descriptor set layout create info structure is cumbersome, so we'll reuse the same procedure to check support 1525 // and create the layout. This structure contains the result. "supported" is created as an enum to avoid the Move<> to bool 1526 // conversion cast in the contructors. 1527 struct DescriptorSetLayoutResult 1528 { 1529 enum class LayoutSupported { NO = 0, YES }; 1530 1531 LayoutSupported supported; 1532 Move<VkDescriptorSetLayout> layout; 1533 1534 explicit DescriptorSetLayoutResult (Move<VkDescriptorSetLayout>&& layout_) 1535 : supported (LayoutSupported::YES) 1536 , layout (layout_) 1537 {} 1538 1539 explicit DescriptorSetLayoutResult (LayoutSupported supported_) 1540 : supported (supported_) 1541 , layout () 1542 {} 1543 }; 1544 1545 DescriptorSetLayoutResult makeOrCheckDescriptorSetLayout (bool checkOnly, const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const 1546 { 1547 const auto numIterations = maxTypes(); 1548 std::vector<VkDescriptorSetLayoutBinding> bindingsVec; 1549 std::vector<std::vector<VkDescriptorType>> mutableTypesVec; 1550 std::vector<VkMutableDescriptorTypeListVALVE> mutableTypeLists; 1551 1552 // Make vector element addresses stable. 1553 const auto bindingCount = numBindings(); 1554 bindingsVec.reserve(bindingCount); 1555 mutableTypesVec.reserve(bindingCount); 1556 mutableTypeLists.reserve(bindingCount); 1557 1558 for (size_t bindingIdx = 0u; bindingIdx < bindings.size(); ++bindingIdx) 1559 { 1560 const auto& binding = bindings[bindingIdx]; 1561 const auto mainType = binding->mainType(); 1562 1563 const VkDescriptorSetLayoutBinding layoutBinding = { 1564 static_cast<deUint32>(bindingIdx), // deUint32 binding; 1565 mainType, // VkDescriptorType descriptorType; 1566 static_cast<deUint32>(binding->size()), // deUint32 descriptorCount; 1567 stageFlags, // VkShaderStageFlags stageFlags; 1568 nullptr, // const VkSampler* pImmutableSamplers; 1569 }; 1570 bindingsVec.push_back(layoutBinding); 1571 1572 // This list may be empty for non-mutable types, which is fine. 1573 mutableTypesVec.push_back(binding->mutableTypes()); 1574 const auto& lastVec = mutableTypesVec.back(); 1575 1576 const VkMutableDescriptorTypeListVALVE typeList = { 1577 static_cast<deUint32>(lastVec.size()), // deUint32 descriptorTypeCount; 1578 de::dataOrNull(lastVec), // const VkDescriptorType* pDescriptorTypes; 1579 }; 1580 mutableTypeLists.push_back(typeList); 1581 } 1582 1583 // Make sure to include the variable descriptor count and/or update after bind binding flags. 1584 const bool updateAfterBind = ((createFlags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT) != 0u); 1585 bool lastIsUnbounded = false; 1586 bool aliasingNeded = false; 1587 std::vector<bool> bindingNeedsAliasing(bindings.size(), false); 1588 1589 for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx) 1590 { 1591 if (bindingIdx < bindings.size() - 1) 1592 DE_ASSERT(!bindings[bindingIdx]->isUnbounded()); 1593 else 1594 lastIsUnbounded = bindings[bindingIdx]->isUnbounded(); 1595 1596 if (bindings[bindingIdx]->needsAliasingUpTo(numIterations)) 1597 { 1598 bindingNeedsAliasing[bindingIdx] = true; 1599 aliasingNeded = true; 1600 } 1601 } 1602 1603 using FlagsCreateInfoPtr = de::MovePtr<VkDescriptorSetLayoutBindingFlagsCreateInfo>; 1604 using BindingFlagsVecPtr = de::MovePtr<std::vector<VkDescriptorBindingFlags>>; 1605 1606 FlagsCreateInfoPtr flagsCreateInfo; 1607 BindingFlagsVecPtr bindingFlagsVec; 1608 1609 if (updateAfterBind || lastIsUnbounded || aliasingNeded) 1610 { 1611 flagsCreateInfo = FlagsCreateInfoPtr(new VkDescriptorSetLayoutBindingFlagsCreateInfo); 1612 *flagsCreateInfo = initVulkanStructure(); 1613 1614 bindingFlagsVec = BindingFlagsVecPtr(new std::vector<VkDescriptorBindingFlags>(bindingsVec.size(), (updateAfterBind ? VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT : 0))); 1615 if (lastIsUnbounded) 1616 bindingFlagsVec->back() |= VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT; 1617 1618 for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx) 1619 { 1620 if (bindingNeedsAliasing[bindingIdx]) 1621 bindingFlagsVec->at(bindingIdx) |= VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT; 1622 } 1623 1624 flagsCreateInfo->bindingCount = static_cast<deUint32>(bindingFlagsVec->size()); 1625 flagsCreateInfo->pBindingFlags = de::dataOrNull(*bindingFlagsVec); 1626 } 1627 1628 const VkMutableDescriptorTypeCreateInfoVALVE createInfoValve = { 1629 VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE, 1630 flagsCreateInfo.get(), 1631 static_cast<deUint32>(mutableTypeLists.size()), 1632 de::dataOrNull(mutableTypeLists), 1633 }; 1634 1635 const VkDescriptorSetLayoutCreateInfo layoutCreateInfo = { 1636 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, // VkStructureType sType; 1637 &createInfoValve, // const void* pNext; 1638 createFlags, // VkDescriptorSetLayoutCreateFlags flags; 1639 static_cast<deUint32>(bindingsVec.size()), // deUint32 bindingCount; 1640 de::dataOrNull(bindingsVec), // const VkDescriptorSetLayoutBinding* pBindings; 1641 }; 1642 1643 if (checkOnly) 1644 { 1645 VkDescriptorSetLayoutSupport support = initVulkanStructure(); 1646 vkd.getDescriptorSetLayoutSupport(device, &layoutCreateInfo, &support); 1647 DescriptorSetLayoutResult result((support.supported == VK_TRUE) ? DescriptorSetLayoutResult::LayoutSupported::YES 1648 : DescriptorSetLayoutResult::LayoutSupported::NO); 1649 return result; 1650 } 1651 else 1652 { 1653 DescriptorSetLayoutResult result(createDescriptorSetLayout(vkd, device, &layoutCreateInfo)); 1654 return result; 1655 } 1656 } 1657 1658public: 1659 Move<VkDescriptorSetLayout> makeDescriptorSetLayout (const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const 1660 { 1661 return makeOrCheckDescriptorSetLayout(false /*checkOnly*/, vkd, device, stageFlags, createFlags).layout; 1662 } 1663 1664 bool checkDescriptorSetLayout (const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const 1665 { 1666 return (makeOrCheckDescriptorSetLayout(true /*checkOnly*/, vkd, device, stageFlags, createFlags).supported == DescriptorSetLayoutResult::LayoutSupported::YES); 1667 } 1668 1669 size_t numDescriptors () const 1670 { 1671 size_t total = 0; 1672 for (const auto& b : bindings) 1673 total += b->size(); 1674 return total; 1675 } 1676 1677 std::vector<Resource> createResources (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, deUint32 iteration, bool useAABBs) const 1678 { 1679 // Create resources for each binding. 1680 std::vector<Resource> result; 1681 result.reserve(numDescriptors()); 1682 1683 const auto bindingsCount = static_cast<deUint32>(bindings.size()); 1684 1685 for (deUint32 bindingIdx = 0u; bindingIdx < bindingsCount; ++bindingIdx) 1686 { 1687 const auto& binding = bindings[bindingIdx]; 1688 auto bindingResources = binding->createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs, getDescriptorNumericValue(iteration, bindingIdx)); 1689 1690 for (auto& resource : bindingResources) 1691 result.emplace_back(std::move(resource)); 1692 } 1693 1694 return result; 1695 } 1696 1697 // Updates a descriptor set with the given resources. Note: the set must have been created with a layout that's compatible with this object. 1698 void updateDescriptorSet (const DeviceInterface& vkd, VkDevice device, VkDescriptorSet set, deUint32 iteration, const std::vector<Resource>& resources) const 1699 { 1700 // Make sure the number of resources is correct. 1701 const auto numResources = resources.size(); 1702 DE_ASSERT(numDescriptors() == numResources); 1703 1704 std::vector<VkWriteDescriptorSet> descriptorWrites; 1705 descriptorWrites.reserve(numResources); 1706 1707 std::vector<VkDescriptorImageInfo> imageInfoVec; 1708 std::vector<VkDescriptorBufferInfo> bufferInfoVec; 1709 std::vector<VkBufferView> bufferViewVec; 1710 std::vector<VkWriteDescriptorSetAccelerationStructureKHR> asWriteVec; 1711 size_t resourceIdx = 0; 1712 1713 // We'll be storing pointers to elements of these vectors as we're appending elements, so we need their addresses to be stable. 1714 imageInfoVec.reserve(numResources); 1715 bufferInfoVec.reserve(numResources); 1716 bufferViewVec.reserve(numResources); 1717 asWriteVec.reserve(numResources); 1718 1719 for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx) 1720 { 1721 const auto& binding = bindings[bindingIdx]; 1722 const auto descriptorTypes = binding->typesAtIteration(iteration); 1723 1724 for (size_t descriptorIdx = 0; descriptorIdx < binding->size(); ++descriptorIdx) 1725 { 1726 // Make sure the resource type matches the expected value. 1727 const auto& resource = resources[resourceIdx]; 1728 const auto& descriptorType = descriptorTypes[descriptorIdx]; 1729 1730 DE_ASSERT(resource.descriptorType == descriptorType); 1731 1732 // Obtain the descriptor write info for the resource. 1733 const auto writeInfo = resource.makeWriteInfo(); 1734 1735 switch (writeInfo.writeType) 1736 { 1737 case WriteType::IMAGE_INFO: imageInfoVec.push_back(writeInfo.imageInfo); break; 1738 case WriteType::BUFFER_INFO: bufferInfoVec.push_back(writeInfo.bufferInfo); break; 1739 case WriteType::BUFFER_VIEW: bufferViewVec.push_back(writeInfo.bufferView); break; 1740 case WriteType::ACCELERATION_STRUCTURE_INFO: asWriteVec.push_back(writeInfo.asInfo); break; 1741 default: DE_ASSERT(false); break; 1742 } 1743 1744 // Add a new VkWriteDescriptorSet struct or extend the last one with more info. This helps us exercise different implementation code paths. 1745 bool extended = false; 1746 1747 if (!descriptorWrites.empty() && descriptorIdx > 0) 1748 { 1749 auto& last = descriptorWrites.back(); 1750 if (last.dstSet == set /* this should always be true */ && 1751 last.dstBinding == bindingIdx && (last.dstArrayElement + last.descriptorCount) == descriptorIdx && 1752 last.descriptorType == descriptorType && 1753 writeInfo.writeType != WriteType::ACCELERATION_STRUCTURE_INFO) 1754 { 1755 // The new write should be in the same vector (imageInfoVec, bufferInfoVec or bufferViewVec) so increasing the count works. 1756 ++last.descriptorCount; 1757 extended = true; 1758 } 1759 } 1760 1761 if (!extended) 1762 { 1763 const VkWriteDescriptorSet write = { 1764 VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, 1765 ((writeInfo.writeType == WriteType::ACCELERATION_STRUCTURE_INFO) ? &asWriteVec.back() : nullptr), 1766 set, 1767 static_cast<deUint32>(bindingIdx), 1768 static_cast<deUint32>(descriptorIdx), 1769 1u, 1770 descriptorType, 1771 (writeInfo.writeType == WriteType::IMAGE_INFO ? &imageInfoVec.back() : nullptr), 1772 (writeInfo.writeType == WriteType::BUFFER_INFO ? &bufferInfoVec.back() : nullptr), 1773 (writeInfo.writeType == WriteType::BUFFER_VIEW ? &bufferViewVec.back() : nullptr), 1774 }; 1775 descriptorWrites.push_back(write); 1776 } 1777 1778 ++resourceIdx; 1779 } 1780 } 1781 1782 // Finally, update descriptor set with all the writes. 1783 vkd.updateDescriptorSets(device, static_cast<deUint32>(descriptorWrites.size()), de::dataOrNull(descriptorWrites), 0u, nullptr); 1784 } 1785 1786 // Copies between descriptor sets. They must be compatible and related to this set. 1787 void copyDescriptorSet (const DeviceInterface& vkd, VkDevice device, VkDescriptorSet srcSet, VkDescriptorSet dstSet) const 1788 { 1789 std::vector<VkCopyDescriptorSet> copies; 1790 1791 for (size_t bindingIdx = 0; bindingIdx < numBindings(); ++bindingIdx) 1792 { 1793 const auto& binding = getBinding(bindingIdx); 1794 const auto bindingNumber = static_cast<deUint32>(bindingIdx); 1795 const auto descriptorCount = static_cast<deUint32>(binding->size()); 1796 1797 const VkCopyDescriptorSet copy = 1798 { 1799 VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, 1800 nullptr, 1801 // set, binding, array element. 1802 srcSet, bindingNumber, 0u, 1803 dstSet, bindingNumber, 0u, 1804 descriptorCount, 1805 }; 1806 1807 copies.push_back(copy); 1808 } 1809 1810 vkd.updateDescriptorSets(device, 0u, nullptr, static_cast<deUint32>(copies.size()), de::dataOrNull(copies)); 1811 } 1812 1813 // Does any binding in the set need aliasing in a given iteration? 1814 bool needsAliasing (deUint32 iteration) const 1815 { 1816 std::vector<bool> aliasingNeededFlags; 1817 aliasingNeededFlags.reserve(bindings.size()); 1818 1819 std::transform(begin(bindings), end(bindings), std::back_inserter(aliasingNeededFlags), 1820 [iteration] (const BindingInterfacePtr& b) { return b->needsAliasing(iteration); }); 1821 return std::any_of(begin(aliasingNeededFlags), end(aliasingNeededFlags), [] (bool f) { return f; }); 1822 } 1823 1824 // Does any binding in the set need aliasing in any iteration? 1825 bool needsAnyAliasing () const 1826 { 1827 const auto numIterations = maxTypes(); 1828 std::vector<bool> aliasingNeededFlags (numIterations, false); 1829 1830 for (deUint32 iteration = 0; iteration < numIterations; ++iteration) 1831 aliasingNeededFlags[iteration] = needsAliasing(iteration); 1832 1833 return std::any_of(begin(aliasingNeededFlags), end(aliasingNeededFlags), [] (bool f) { return f; }); 1834 } 1835 1836 // Is the last binding an unbounded array? 1837 bool lastBindingIsUnbounded () const 1838 { 1839 if (bindings.empty()) 1840 return false; 1841 return bindings.back()->isUnbounded(); 1842 } 1843 1844 // Get the variable descriptor count for the last binding if any. 1845 tcu::Maybe<deUint32> getVariableDescriptorCount () const 1846 { 1847 if (lastBindingIsUnbounded()) 1848 return tcu::just(static_cast<deUint32>(bindings.back()->size())); 1849 return tcu::Nothing; 1850 } 1851 1852 // Check if the set contains a descriptor type of the given type at the given iteration. 1853 bool containsTypeAtIteration (VkDescriptorType descriptorType, deUint32 iteration) const 1854 { 1855 return std::any_of(begin(bindings), end(bindings), 1856 [descriptorType, iteration] (const BindingInterfacePtr& b) { 1857 const auto types = b->typesAtIteration(iteration); 1858 return de::contains(begin(types), end(types), descriptorType); 1859 }); 1860 } 1861 1862 // Is any binding an array? 1863 bool hasArrays () const 1864 { 1865 return std::any_of(begin(bindings), end(bindings), [] (const BindingInterfacePtr& b) { return b->isArray(); }); 1866 } 1867}; 1868 1869enum class UpdateType 1870{ 1871 WRITE = 0, 1872 COPY, 1873}; 1874 1875enum class SourceSetType 1876{ 1877 NORMAL = 0, 1878 HOST_ONLY, 1879 NO_SOURCE, 1880}; 1881 1882enum class UpdateMoment 1883{ 1884 NORMAL = 0, 1885 UPDATE_AFTER_BIND, 1886}; 1887 1888enum class TestingStage 1889{ 1890 COMPUTE = 0, 1891 VERTEX, 1892 TESS_EVAL, 1893 TESS_CONTROL, 1894 GEOMETRY, 1895 FRAGMENT, 1896 RAY_GEN, 1897 INTERSECTION, 1898 ANY_HIT, 1899 CLOSEST_HIT, 1900 MISS, 1901 CALLABLE, 1902}; 1903 1904enum class ArrayAccessType 1905{ 1906 CONSTANT = 0, 1907 PUSH_CONSTANT, 1908 NO_ARRAY, 1909}; 1910 1911// Are we testing a ray tracing pipeline stage? 1912bool isRayTracingStage (TestingStage stage) 1913{ 1914 switch (stage) 1915 { 1916 case TestingStage::RAY_GEN: 1917 case TestingStage::INTERSECTION: 1918 case TestingStage::ANY_HIT: 1919 case TestingStage::CLOSEST_HIT: 1920 case TestingStage::MISS: 1921 case TestingStage::CALLABLE: 1922 return true; 1923 default: 1924 break; 1925 } 1926 1927 return false; 1928} 1929 1930struct TestParams 1931{ 1932 DescriptorSetPtr descriptorSet; 1933 UpdateType updateType; 1934 SourceSetStrategy sourceSetStrategy; 1935 SourceSetType sourceSetType; 1936 PoolMutableStrategy poolMutableStrategy; 1937 UpdateMoment updateMoment; 1938 ArrayAccessType arrayAccessType; 1939 TestingStage testingStage; 1940 1941 VkShaderStageFlags getStageFlags () const 1942 { 1943 VkShaderStageFlags flags = 0u; 1944 1945 switch (testingStage) 1946 { 1947 case TestingStage::COMPUTE: flags |= VK_SHADER_STAGE_COMPUTE_BIT; break; 1948 case TestingStage::VERTEX: flags |= VK_SHADER_STAGE_VERTEX_BIT; break; 1949 case TestingStage::TESS_EVAL: flags |= VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT; break; 1950 case TestingStage::TESS_CONTROL: flags |= VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT; break; 1951 case TestingStage::GEOMETRY: flags |= VK_SHADER_STAGE_GEOMETRY_BIT; break; 1952 case TestingStage::FRAGMENT: flags |= VK_SHADER_STAGE_FRAGMENT_BIT; break; 1953 case TestingStage::RAY_GEN: flags |= VK_SHADER_STAGE_RAYGEN_BIT_KHR; break; 1954 case TestingStage::INTERSECTION: flags |= VK_SHADER_STAGE_INTERSECTION_BIT_KHR; break; 1955 case TestingStage::ANY_HIT: flags |= VK_SHADER_STAGE_ANY_HIT_BIT_KHR; break; 1956 case TestingStage::CLOSEST_HIT: flags |= VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR; break; 1957 case TestingStage::MISS: flags |= VK_SHADER_STAGE_MISS_BIT_KHR; break; 1958 case TestingStage::CALLABLE: flags |= VK_SHADER_STAGE_CALLABLE_BIT_KHR; break; 1959 default: 1960 DE_ASSERT(false); 1961 break; 1962 } 1963 1964 return flags; 1965 } 1966 1967 VkPipelineStageFlags getPipelineWriteStage () const 1968 { 1969 VkPipelineStageFlags flags = 0u; 1970 1971 switch (testingStage) 1972 { 1973 case TestingStage::COMPUTE: flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break; 1974 case TestingStage::VERTEX: flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT; break; 1975 case TestingStage::TESS_EVAL: flags |= VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT; break; 1976 case TestingStage::TESS_CONTROL: flags |= VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT; break; 1977 case TestingStage::GEOMETRY: flags |= VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT; break; 1978 case TestingStage::FRAGMENT: flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; break; 1979 case TestingStage::RAY_GEN: // fallthrough 1980 case TestingStage::INTERSECTION: // fallthrough 1981 case TestingStage::ANY_HIT: // fallthrough 1982 case TestingStage::CLOSEST_HIT: // fallthrough 1983 case TestingStage::MISS: // fallthrough 1984 case TestingStage::CALLABLE: flags |= VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR; break; 1985 default: 1986 DE_ASSERT(false); 1987 break; 1988 } 1989 1990 return flags; 1991 } 1992 1993private: 1994 VkDescriptorSetLayoutCreateFlags getLayoutCreateFlags (bool isSourceSet) const 1995 { 1996 // UPDATE_AFTER_BIND cannot be used with HOST_ONLY sets. 1997 //DE_ASSERT(!(updateMoment == UpdateMoment::UPDATE_AFTER_BIND && sourceSetType == SourceSetType::HOST_ONLY)); 1998 1999 VkDescriptorSetLayoutCreateFlags createFlags = 0u; 2000 2001 if ((!isSourceSet || sourceSetType != SourceSetType::HOST_ONLY) && updateMoment == UpdateMoment::UPDATE_AFTER_BIND) 2002 createFlags |= VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT; 2003 2004 if (isSourceSet && sourceSetType == SourceSetType::HOST_ONLY) 2005 createFlags |= VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE; 2006 2007 return createFlags; 2008 } 2009 2010public: 2011 VkDescriptorSetLayoutCreateFlags getSrcLayoutCreateFlags () const 2012 { 2013 return getLayoutCreateFlags(true); 2014 } 2015 2016 VkDescriptorSetLayoutCreateFlags getDstLayoutCreateFlags () const 2017 { 2018 return getLayoutCreateFlags(false); 2019 } 2020 2021private: 2022 VkDescriptorPoolCreateFlags getPoolCreateFlags (bool isSourceSet) const 2023 { 2024 // UPDATE_AFTER_BIND cannot be used with HOST_ONLY sets. 2025 //DE_ASSERT(!(updateMoment == UpdateMoment::UPDATE_AFTER_BIND && sourceSetType == SourceSetType::HOST_ONLY)); 2026 2027 VkDescriptorPoolCreateFlags poolCreateFlags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT; 2028 2029 if ((!isSourceSet || sourceSetType != SourceSetType::HOST_ONLY) && updateMoment == UpdateMoment::UPDATE_AFTER_BIND) 2030 poolCreateFlags |= VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT; 2031 2032 if (isSourceSet && sourceSetType == SourceSetType::HOST_ONLY) 2033 poolCreateFlags |= VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE; 2034 2035 return poolCreateFlags; 2036 } 2037 2038public: 2039 VkDescriptorPoolCreateFlags getSrcPoolCreateFlags () const 2040 { 2041 return getPoolCreateFlags(true); 2042 } 2043 2044 VkDescriptorPoolCreateFlags getDstPoolCreateFlags () const 2045 { 2046 return getPoolCreateFlags(false); 2047 } 2048 2049 VkPipelineBindPoint getBindPoint () const 2050 { 2051 if (testingStage == TestingStage::COMPUTE) 2052 return VK_PIPELINE_BIND_POINT_COMPUTE; 2053 if (isRayTracingStage(testingStage)) 2054 return VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR; 2055 return VK_PIPELINE_BIND_POINT_GRAPHICS; 2056 } 2057}; 2058 2059class MutableTypesTest : public TestCase 2060{ 2061public: 2062 MutableTypesTest (tcu::TestContext& testCtx, const std::string& name, const std::string& description, const TestParams& params) 2063 : TestCase(testCtx, name, description) 2064 , m_params(params) 2065 {} 2066 2067 ~MutableTypesTest () override = default; 2068 2069 void initPrograms (vk::SourceCollections& programCollection) const override; 2070 TestInstance* createInstance (Context& context) const override; 2071 void checkSupport (Context& context) const override; 2072 2073private: 2074 TestParams m_params; 2075}; 2076 2077class MutableTypesInstance : public TestInstance 2078{ 2079public: 2080 MutableTypesInstance (Context& context, const TestParams& params) 2081 : TestInstance (context) 2082 , m_params (params) 2083 {} 2084 2085 ~MutableTypesInstance () override = default; 2086 2087 tcu::TestStatus iterate () override; 2088 2089private: 2090 TestParams m_params; 2091}; 2092 2093// Check if a descriptor set contains a given descriptor type in any iteration up to maxTypes(). 2094bool containsAnyDescriptorType (const DescriptorSet& descriptorSet, VkDescriptorType descriptorType) 2095{ 2096 const auto numIterations = descriptorSet.maxTypes(); 2097 2098 for (deUint32 iter = 0u; iter < numIterations; ++iter) 2099 { 2100 if (descriptorSet.containsTypeAtIteration(descriptorType, iter)) 2101 return true; 2102 } 2103 2104 return false; 2105} 2106 2107// Check if testing this descriptor set needs an external image (for sampler descriptors). 2108bool needsExternalImage (const DescriptorSet& descriptorSet) 2109{ 2110 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_SAMPLER); 2111} 2112 2113// Check if testing this descriptor set needs an external sampler (for sampled images). 2114bool needsExternalSampler (const DescriptorSet& descriptorSet) 2115{ 2116 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE); 2117} 2118 2119// Check if this descriptor set contains a input attachments. 2120bool usesInputAttachments (const DescriptorSet& descriptorSet) 2121{ 2122 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT); 2123} 2124 2125// Check if this descriptor set contains acceleration structures. 2126bool usesAccelerationStructures (const DescriptorSet& descriptorSet) 2127{ 2128 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR); 2129} 2130 2131std::string shaderName (deUint32 iteration) 2132{ 2133 return ("iteration-" + de::toString(iteration)); 2134} 2135 2136void MutableTypesTest::initPrograms (vk::SourceCollections& programCollection) const 2137{ 2138 const bool usePushConstants = (m_params.arrayAccessType == ArrayAccessType::PUSH_CONSTANT); 2139 const bool useExternalImage = needsExternalImage(*m_params.descriptorSet); 2140 const bool useExternalSampler = needsExternalSampler(*m_params.descriptorSet); 2141 const bool rayQueries = usesAccelerationStructures(*m_params.descriptorSet); 2142 const bool rayTracing = isRayTracingStage(m_params.testingStage); 2143 const auto numIterations = m_params.descriptorSet->maxTypes(); 2144 const auto numBindings = m_params.descriptorSet->numBindings(); 2145 const vk::ShaderBuildOptions rtBuildOptions (programCollection.usedVulkanVersion, vk::SPIRV_VERSION_1_4, 0u, true); 2146 2147 // Extra set and bindings for external resources. 2148 std::ostringstream extraSet; 2149 deUint32 extraBindings = 0u; 2150 2151 extraSet << "layout (set=1, binding=" << extraBindings++ << ") buffer OutputBufferBlock { uint value[" << numIterations << "]; } outputBuffer;\n"; 2152 if (useExternalImage) 2153 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform utexture2D externalSampledImage;\n"; 2154 if (useExternalSampler) 2155 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform sampler externalSampler;\n"; 2156 // The extra binding below will be declared in the "passthrough" ray generation shader. 2157#if 0 2158 if (rayTracing) 2159 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform accelerationStructureEXT externalAS;\n"; 2160#endif 2161 2162 // Common vertex preamble. 2163 std::ostringstream vertexPreamble; 2164 vertexPreamble 2165 << "vec2 vertexPositions[3] = vec2[](\n" 2166 << " vec2(0.0, -0.5),\n" 2167 << " vec2(0.5, 0.5),\n" 2168 << " vec2(-0.5, 0.5)\n" 2169 << ");\n" 2170 ; 2171 2172 // Vertex shader body common statements. 2173 std::ostringstream vertexBodyCommon; 2174 vertexBodyCommon << " gl_Position = vec4(vertexPositions[gl_VertexIndex], 0.0, 1.0);\n"; 2175 2176 // Common tessellation control preamble. 2177 std::ostringstream tescPreamble; 2178 tescPreamble 2179 << "layout (vertices=3) out;\n" 2180 << "in gl_PerVertex\n" 2181 << "{\n" 2182 << " vec4 gl_Position;\n" 2183 << "} gl_in[gl_MaxPatchVertices];\n" 2184 << "out gl_PerVertex\n" 2185 << "{\n" 2186 << " vec4 gl_Position;\n" 2187 << "} gl_out[];\n" 2188 ; 2189 2190 // Common tessellation control body. 2191 std::ostringstream tescBodyCommon; 2192 tescBodyCommon 2193 << " gl_TessLevelInner[0] = 1.0;\n" 2194 << " gl_TessLevelInner[1] = 1.0;\n" 2195 << " gl_TessLevelOuter[0] = 1.0;\n" 2196 << " gl_TessLevelOuter[1] = 1.0;\n" 2197 << " gl_TessLevelOuter[2] = 1.0;\n" 2198 << " gl_TessLevelOuter[3] = 1.0;\n" 2199 << " gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n" 2200 ; 2201 2202 // Common tessellation evaluation preamble. 2203 std::ostringstream tesePreamble; 2204 tesePreamble 2205 << "layout (triangles, fractional_odd_spacing, cw) in;\n" 2206 << "in gl_PerVertex\n" 2207 << "{\n" 2208 << " vec4 gl_Position;\n" 2209 << "} gl_in[gl_MaxPatchVertices];\n" 2210 << "out gl_PerVertex\n" 2211 << "{\n" 2212 << " vec4 gl_Position;\n" 2213 << "};\n" 2214 ; 2215 2216 // Common tessellation evaluation body. 2217 std::ostringstream teseBodyCommon; 2218 teseBodyCommon 2219 << " gl_Position = (gl_TessCoord.x * gl_in[0].gl_Position) +\n" 2220 << " (gl_TessCoord.y * gl_in[1].gl_Position) +\n" 2221 << " (gl_TessCoord.z * gl_in[2].gl_Position);\n" 2222 ; 2223 2224 // Shader preamble. 2225 std::ostringstream preamble; 2226 2227 preamble 2228 << "#version 460\n" 2229 << "#extension GL_EXT_nonuniform_qualifier : enable\n" 2230 << "#extension GL_EXT_debug_printf : enable\n" 2231 << (rayTracing ? "#extension GL_EXT_ray_tracing : enable\n" : "") 2232 << (rayQueries ? "#extension GL_EXT_ray_query : enable\n" : "") 2233 << "\n" 2234 ; 2235 2236 if (m_params.testingStage == TestingStage::VERTEX) 2237 { 2238 preamble << vertexPreamble.str(); 2239 } 2240 else if (m_params.testingStage == TestingStage::COMPUTE) 2241 { 2242 preamble 2243 << "layout (local_size_x=1, local_size_y=1, local_size_z=1) in;\n" 2244 << "\n" 2245 ; 2246 } 2247 else if (m_params.testingStage == TestingStage::GEOMETRY) 2248 { 2249 preamble 2250 << "layout (triangles) in;\n" 2251 << "layout (triangle_strip, max_vertices=3) out;\n" 2252 << "in gl_PerVertex\n" 2253 << "{\n" 2254 << " vec4 gl_Position;\n" 2255 << "} gl_in[3];\n" 2256 << "out gl_PerVertex\n" 2257 << "{\n" 2258 << " vec4 gl_Position;\n" 2259 << "};\n" 2260 ; 2261 } 2262 else if (m_params.testingStage == TestingStage::TESS_CONTROL) 2263 { 2264 preamble << tescPreamble.str(); 2265 } 2266 else if (m_params.testingStage == TestingStage::TESS_EVAL) 2267 { 2268 preamble << tesePreamble.str(); 2269 } 2270 else if (m_params.testingStage == TestingStage::CALLABLE) 2271 { 2272 preamble << "layout (location=0) callableDataInEXT float unusedCallableData;\n"; 2273 } 2274 else if (m_params.testingStage == TestingStage::CLOSEST_HIT || 2275 m_params.testingStage == TestingStage::ANY_HIT || 2276 m_params.testingStage == TestingStage::MISS) 2277 { 2278 preamble << "layout (location=0) rayPayloadInEXT float unusedRayPayload;\n"; 2279 } 2280 else if (m_params.testingStage == TestingStage::INTERSECTION) 2281 { 2282 preamble << "hitAttributeEXT vec3 hitAttribute;\n"; 2283 } 2284 2285 preamble << extraSet.str(); 2286 if (usePushConstants) 2287 preamble << "layout (push_constant, std430) uniform PushConstantBlock { uint zero; } pc;\n"; 2288 preamble << "\n"; 2289 2290 // We need to create a shader per iteration. 2291 for (deUint32 iter = 0u; iter < numIterations; ++iter) 2292 { 2293 // Shader preamble. 2294 std::ostringstream shader; 2295 shader << preamble.str(); 2296 2297 deUint32 inputAttachmentCount = 0u; 2298 2299 // Descriptor declarations for this iteration. 2300 for (size_t bindingIdx = 0; bindingIdx < numBindings; ++bindingIdx) 2301 { 2302 DE_ASSERT(bindingIdx <= std::numeric_limits<deUint32>::max()); 2303 2304 const auto binding = m_params.descriptorSet->getBinding(bindingIdx); 2305 const auto bindingTypes = binding->typesAtIteration(iter); 2306 const auto hasInputAttachment = de::contains(begin(bindingTypes), end(bindingTypes), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT); 2307 const auto isArray = binding->isArray(); 2308 const auto isUnbounded = binding->isUnbounded(); 2309 const auto bindingSize = binding->size(); 2310 2311 // If the binding is an input attachment, make sure it's not an array. 2312 DE_ASSERT(!hasInputAttachment || !isArray); 2313 2314 // Make sure the descriptor count fits a deInt32 if needed. 2315 DE_ASSERT(!isArray || isUnbounded || bindingSize <= static_cast<size_t>(std::numeric_limits<deInt32>::max())); 2316 2317 const auto arraySize = (isArray ? (isUnbounded ? tcu::just(deInt32{-1}) : tcu::just(static_cast<deInt32>(bindingSize))) 2318 : tcu::Nothing); 2319 2320 shader << binding->glslDeclarations(iter, 0u, static_cast<deUint32>(bindingIdx), inputAttachmentCount, arraySize); 2321 2322 if (hasInputAttachment) 2323 ++inputAttachmentCount; 2324 } 2325 2326 // Main body. 2327 shader 2328 << "\n" 2329 << "void main() {\n" 2330 // This checks if we are the first invocation to arrive here, so the checks are executed only once. 2331 << " const uint flag = atomicCompSwap(outputBuffer.value[" << iter << "], 0u, 1u);\n" 2332 << " if (flag == 0u) {\n" 2333 << " uint anyError = 0u;\n" 2334 ; 2335 2336 for (size_t bindingIdx = 0; bindingIdx < numBindings; ++bindingIdx) 2337 { 2338 const auto binding = m_params.descriptorSet->getBinding(bindingIdx); 2339 const auto idx32 = static_cast<deUint32>(bindingIdx); 2340 shader << binding->glslCheckStatements(iter, 0u, idx32, getDescriptorNumericValue(iter, idx32), tcu::Nothing, usePushConstants); 2341 } 2342 2343 shader 2344 << " if (anyError == 0u) {\n" 2345 << " atomicAdd(outputBuffer.value[" << iter << "], 1u);\n" 2346 << " }\n" 2347 << " }\n" // Closes if (flag == 0u). 2348 ; 2349 2350 if (m_params.testingStage == TestingStage::VERTEX) 2351 { 2352 shader << vertexBodyCommon.str(); 2353 } 2354 else if (m_params.testingStage == TestingStage::GEOMETRY) 2355 { 2356 shader 2357 << " gl_Position = gl_in[0].gl_Position; EmitVertex();\n" 2358 << " gl_Position = gl_in[1].gl_Position; EmitVertex();\n" 2359 << " gl_Position = gl_in[2].gl_Position; EmitVertex();\n" 2360 ; 2361 } 2362 else if (m_params.testingStage == TestingStage::TESS_CONTROL) 2363 { 2364 shader << tescBodyCommon.str(); 2365 } 2366 else if (m_params.testingStage == TestingStage::TESS_EVAL) 2367 { 2368 shader << teseBodyCommon.str(); 2369 } 2370 2371 shader 2372 << "}\n" // End of main(). 2373 ; 2374 2375 { 2376 const auto shaderNameStr = shaderName(iter); 2377 const auto shaderStr = shader.str(); 2378 auto& glslSource = programCollection.glslSources.add(shaderNameStr); 2379 2380 if (m_params.testingStage == TestingStage::COMPUTE) 2381 glslSource << glu::ComputeSource(shaderStr); 2382 else if (m_params.testingStage == TestingStage::VERTEX) 2383 glslSource << glu::VertexSource(shaderStr); 2384 else if (m_params.testingStage == TestingStage::FRAGMENT) 2385 glslSource << glu::FragmentSource(shaderStr); 2386 else if (m_params.testingStage == TestingStage::GEOMETRY) 2387 glslSource << glu::GeometrySource(shaderStr); 2388 else if (m_params.testingStage == TestingStage::TESS_CONTROL) 2389 glslSource << glu::TessellationControlSource(shaderStr); 2390 else if (m_params.testingStage == TestingStage::TESS_EVAL) 2391 glslSource << glu::TessellationEvaluationSource(shaderStr); 2392 else if (m_params.testingStage == TestingStage::RAY_GEN) 2393 glslSource << glu::RaygenSource(updateRayTracingGLSL(shaderStr)); 2394 else if (m_params.testingStage == TestingStage::INTERSECTION) 2395 glslSource << glu::IntersectionSource(updateRayTracingGLSL(shaderStr)); 2396 else if (m_params.testingStage == TestingStage::ANY_HIT) 2397 glslSource << glu::AnyHitSource(updateRayTracingGLSL(shaderStr)); 2398 else if (m_params.testingStage == TestingStage::CLOSEST_HIT) 2399 glslSource << glu::ClosestHitSource(updateRayTracingGLSL(shaderStr)); 2400 else if (m_params.testingStage == TestingStage::MISS) 2401 glslSource << glu::MissSource(updateRayTracingGLSL(shaderStr)); 2402 else if (m_params.testingStage == TestingStage::CALLABLE) 2403 glslSource << glu::CallableSource(updateRayTracingGLSL(shaderStr)); 2404 else 2405 DE_ASSERT(false); 2406 2407 if (rayTracing || rayQueries) 2408 glslSource << rtBuildOptions; 2409 } 2410 } 2411 2412 if (m_params.testingStage == TestingStage::FRAGMENT 2413 || m_params.testingStage == TestingStage::GEOMETRY 2414 || m_params.testingStage == TestingStage::TESS_CONTROL 2415 || m_params.testingStage == TestingStage::TESS_EVAL) 2416 { 2417 // Add passthrough vertex shader that works for points. 2418 std::ostringstream vertPassthrough; 2419 vertPassthrough 2420 << "#version 460\n" 2421 << "out gl_PerVertex\n" 2422 << "{\n" 2423 << " vec4 gl_Position;\n" 2424 << "};\n" 2425 << vertexPreamble.str() 2426 << "void main() {\n" 2427 << vertexBodyCommon.str() 2428 << "}\n" 2429 ; 2430 programCollection.glslSources.add("vert") << glu::VertexSource(vertPassthrough.str()); 2431 } 2432 2433 if (m_params.testingStage == TestingStage::TESS_CONTROL) 2434 { 2435 // Add passthrough tessellation evaluation shader. 2436 std::ostringstream tesePassthrough; 2437 tesePassthrough 2438 << "#version 460\n" 2439 << tesePreamble.str() 2440 << "void main (void)\n" 2441 << "{\n" 2442 << teseBodyCommon.str() 2443 << "}\n" 2444 ; 2445 2446 programCollection.glslSources.add("tese") << glu::TessellationEvaluationSource(tesePassthrough.str()); 2447 } 2448 2449 if (m_params.testingStage == TestingStage::TESS_EVAL) 2450 { 2451 // Add passthrough tessellation control shader. 2452 std::ostringstream tescPassthrough; 2453 tescPassthrough 2454 << "#version 460\n" 2455 << tescPreamble.str() 2456 << "void main (void)\n" 2457 << "{\n" 2458 << tescBodyCommon.str() 2459 << "}\n" 2460 ; 2461 2462 programCollection.glslSources.add("tesc") << glu::TessellationControlSource(tescPassthrough.str()); 2463 } 2464 2465 if (rayTracing && m_params.testingStage != TestingStage::RAY_GEN) 2466 { 2467 // Add a "passthrough" ray generation shader. 2468 std::ostringstream rgen; 2469 rgen 2470 << "#version 460 core\n" 2471 << "#extension GL_EXT_ray_tracing : require\n" 2472 << "layout (set=1, binding=" << extraBindings << ") uniform accelerationStructureEXT externalAS;\n" 2473 << ((m_params.testingStage == TestingStage::CALLABLE) 2474 ? "layout (location=0) callableDataEXT float unusedCallableData;\n" 2475 : "layout (location=0) rayPayloadEXT float unusedRayPayload;\n") 2476 << "\n" 2477 << "void main()\n" 2478 << "{\n" 2479 ; 2480 2481 if (m_params.testingStage == TestingStage::INTERSECTION 2482 || m_params.testingStage == TestingStage::ANY_HIT 2483 || m_params.testingStage == TestingStage::CLOSEST_HIT 2484 || m_params.testingStage == TestingStage::MISS) 2485 { 2486 // We need to trace rays in this case to get hits or misses. 2487 const auto zDir = ((m_params.testingStage == TestingStage::MISS) ? "-1.0" : "1.0"); 2488 2489 rgen 2490 << " const uint cullMask = 0xFF;\n" 2491 << " const float tMin = 1.0;\n" 2492 << " const float tMax = 10.0;\n" 2493 << " const vec3 origin = vec3(0.0, 0.0, 0.0);\n" 2494 << " const vec3 direction = vec3(0.0, 0.0, " << zDir << ");\n" 2495 << " traceRayEXT(externalAS, gl_RayFlagsNoneEXT, cullMask, 0, 0, 0, origin, tMin, direction, tMax, 0);\n" 2496 ; 2497 2498 } 2499 else if (m_params.testingStage == TestingStage::CALLABLE) 2500 { 2501 rgen << " executeCallableEXT(0, 0);\n"; 2502 } 2503 2504 // End of main(). 2505 rgen << "}\n"; 2506 2507 programCollection.glslSources.add("rgen") << glu::RaygenSource(updateRayTracingGLSL(rgen.str())) << rtBuildOptions; 2508 2509 // Intersection shaders will ignore the intersection, so we need a passthrough miss shader. 2510 if (m_params.testingStage == TestingStage::INTERSECTION) 2511 { 2512 std::ostringstream miss; 2513 miss 2514 << "#version 460 core\n" 2515 << "#extension GL_EXT_ray_tracing : require\n" 2516 << "layout (location=0) rayPayloadEXT float unusedRayPayload;\n" 2517 << "\n" 2518 << "void main()\n" 2519 << "{\n" 2520 << "}\n" 2521 ; 2522 2523 programCollection.glslSources.add("miss") << glu::MissSource(updateRayTracingGLSL(miss.str())) << rtBuildOptions; 2524 } 2525 } 2526} 2527 2528TestInstance* MutableTypesTest::createInstance (Context& context) const 2529{ 2530 return new MutableTypesInstance(context, m_params); 2531} 2532 2533void requirePartiallyBound (Context& context) 2534{ 2535 context.requireDeviceFunctionality("VK_EXT_descriptor_indexing"); 2536 const auto& indexingFeatures = context.getDescriptorIndexingFeatures(); 2537 if (!indexingFeatures.descriptorBindingPartiallyBound) 2538 TCU_THROW(NotSupportedError, "Partially bound bindings not supported"); 2539} 2540 2541void requireVariableDescriptorCount (Context& context) 2542{ 2543 context.requireDeviceFunctionality("VK_EXT_descriptor_indexing"); 2544 const auto& indexingFeatures = context.getDescriptorIndexingFeatures(); 2545 if (!indexingFeatures.descriptorBindingVariableDescriptorCount) 2546 TCU_THROW(NotSupportedError, "Variable descriptor count not supported"); 2547} 2548 2549// Calculates the set of used descriptor types for a given set and iteration count, for bindings matching a predicate. 2550std::set<VkDescriptorType> getUsedDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations, bool (*predicate)(const BindingInterface* binding)) 2551{ 2552 std::set<VkDescriptorType> usedDescriptorTypes; 2553 2554 for (size_t bindingIdx = 0; bindingIdx < descriptorSet.numBindings(); ++bindingIdx) 2555 { 2556 const auto bindingPtr = descriptorSet.getBinding(bindingIdx); 2557 if (predicate(bindingPtr)) 2558 { 2559 for (deUint32 iter = 0u; iter < numIterations; ++iter) 2560 { 2561 const auto descTypes = bindingPtr->typesAtIteration(iter); 2562 usedDescriptorTypes.insert(begin(descTypes), end(descTypes)); 2563 } 2564 } 2565 } 2566 2567 return usedDescriptorTypes; 2568} 2569 2570std::set<VkDescriptorType> getAllUsedDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations) 2571{ 2572 return getUsedDescriptorTypes(descriptorSet, numIterations, [] (const BindingInterface*) { return true; }); 2573} 2574 2575std::set<VkDescriptorType> getUsedArrayDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations) 2576{ 2577 return getUsedDescriptorTypes(descriptorSet, numIterations, [] (const BindingInterface* b) { return b->isArray(); }); 2578} 2579 2580// Are we testing a vertex pipeline stage? 2581bool isVertexStage (TestingStage stage) 2582{ 2583 switch (stage) 2584 { 2585 case TestingStage::VERTEX: 2586 case TestingStage::TESS_CONTROL: 2587 case TestingStage::TESS_EVAL: 2588 case TestingStage::GEOMETRY: 2589 return true; 2590 default: 2591 break; 2592 } 2593 2594 return false; 2595} 2596 2597void MutableTypesTest::checkSupport (Context& context) const 2598{ 2599 context.requireDeviceFunctionality("VK_VALVE_mutable_descriptor_type"); 2600 2601 // Check ray tracing if needed. 2602 const bool rayTracing = isRayTracingStage(m_params.testingStage); 2603 2604 if (rayTracing) 2605 { 2606 context.requireDeviceFunctionality("VK_KHR_acceleration_structure"); 2607 context.requireDeviceFunctionality("VK_KHR_ray_tracing_pipeline"); 2608 } 2609 2610 // Check if ray queries are needed. Ray queries are used to verify acceleration structure descriptors. 2611 const bool rayQueriesNeeded = usesAccelerationStructures(*m_params.descriptorSet); 2612 if (rayQueriesNeeded) 2613 { 2614 context.requireDeviceFunctionality("VK_KHR_acceleration_structure"); 2615 context.requireDeviceFunctionality("VK_KHR_ray_query"); 2616 } 2617 2618 // We'll use iterations to check each mutable type, as needed. 2619 const auto numIterations = m_params.descriptorSet->maxTypes(); 2620 2621 if (m_params.descriptorSet->lastBindingIsUnbounded()) 2622 requireVariableDescriptorCount(context); 2623 2624 for (deUint32 iter = 0u; iter < numIterations; ++iter) 2625 { 2626 if (m_params.descriptorSet->needsAliasing(iter)) 2627 { 2628 requirePartiallyBound(context); 2629 break; 2630 } 2631 } 2632 2633 if (m_params.updateMoment == UpdateMoment::UPDATE_AFTER_BIND) 2634 { 2635 // Check update after bind for each used descriptor type. 2636 const auto& usedDescriptorTypes = getAllUsedDescriptorTypes(*m_params.descriptorSet, numIterations); 2637 const auto& indexingFeatures = context.getDescriptorIndexingFeatures(); 2638 2639 for (const auto& descType : usedDescriptorTypes) 2640 { 2641 switch (descType) 2642 { 2643 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: 2644 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: 2645 if (!indexingFeatures.descriptorBindingUniformBufferUpdateAfterBind) 2646 TCU_THROW(NotSupportedError, "Update-after-bind not supported for uniform buffers"); 2647 break; 2648 2649 case VK_DESCRIPTOR_TYPE_SAMPLER: 2650 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: 2651 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: 2652 if (!indexingFeatures.descriptorBindingSampledImageUpdateAfterBind) 2653 TCU_THROW(NotSupportedError, "Update-after-bind not supported for samplers and sampled images"); 2654 break; 2655 2656 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: 2657 if (!indexingFeatures.descriptorBindingStorageImageUpdateAfterBind) 2658 TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage images"); 2659 break; 2660 2661 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: 2662 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: 2663 if (!indexingFeatures.descriptorBindingStorageBufferUpdateAfterBind) 2664 TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage buffers"); 2665 break; 2666 2667 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: 2668 if (!indexingFeatures.descriptorBindingUniformTexelBufferUpdateAfterBind) 2669 TCU_THROW(NotSupportedError, "Update-after-bind not supported for uniform texel buffers"); 2670 break; 2671 2672 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: 2673 if (!indexingFeatures.descriptorBindingStorageTexelBufferUpdateAfterBind) 2674 TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage texel buffers"); 2675 break; 2676 2677 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: 2678 TCU_THROW(InternalError, "Tests do not support update-after-bind with input attachments"); 2679 2680 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: 2681 { 2682 // Just in case we ever mix some of these in. 2683 context.requireDeviceFunctionality("VK_EXT_inline_uniform_block"); 2684 const auto& iubFeatures = context.getInlineUniformBlockFeaturesEXT(); 2685 if (!iubFeatures.descriptorBindingInlineUniformBlockUpdateAfterBind) 2686 TCU_THROW(NotSupportedError, "Update-after-bind not supported for inline uniform blocks"); 2687 } 2688 break; 2689 2690 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: 2691 { 2692 // Just in case we ever mix some of these in. 2693 context.requireDeviceFunctionality("VK_KHR_acceleration_structure"); 2694 const auto& asFeatures = context.getAccelerationStructureFeatures(); 2695 if (!asFeatures.descriptorBindingAccelerationStructureUpdateAfterBind) 2696 TCU_THROW(NotSupportedError, "Update-after-bind not supported for acceleration structures"); 2697 } 2698 break; 2699 2700 case VK_DESCRIPTOR_TYPE_MUTABLE_VALVE: 2701 TCU_THROW(InternalError, "Found VK_DESCRIPTOR_TYPE_MUTABLE_VALVE in list of used descriptor types"); 2702 2703 default: 2704 TCU_THROW(InternalError, "Unexpected descriptor type found in list of used descriptor types: " + de::toString(descType)); 2705 } 2706 } 2707 } 2708 2709 if (m_params.arrayAccessType == ArrayAccessType::PUSH_CONSTANT) 2710 { 2711 // These require dynamically uniform indices. 2712 const auto& usedDescriptorTypes = getUsedArrayDescriptorTypes(*m_params.descriptorSet, numIterations); 2713 const auto& features = context.getDeviceFeatures(); 2714 const auto descriptorIndexingSupported = context.isDeviceFunctionalitySupported("VK_EXT_descriptor_indexing"); 2715 const auto& indexingFeatures = context.getDescriptorIndexingFeatures(); 2716 2717 for (const auto& descType : usedDescriptorTypes) 2718 { 2719 switch (descType) 2720 { 2721 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: 2722 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: 2723 if (!features.shaderUniformBufferArrayDynamicIndexing) 2724 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for uniform buffers"); 2725 break; 2726 2727 case VK_DESCRIPTOR_TYPE_SAMPLER: 2728 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: 2729 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: 2730 if (!features.shaderSampledImageArrayDynamicIndexing) 2731 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for samplers and sampled images"); 2732 break; 2733 2734 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: 2735 if (!features.shaderStorageImageArrayDynamicIndexing) 2736 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage images"); 2737 break; 2738 2739 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: 2740 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: 2741 if (!features.shaderStorageBufferArrayDynamicIndexing) 2742 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage buffers"); 2743 break; 2744 2745 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: 2746 if (!descriptorIndexingSupported || !indexingFeatures.shaderUniformTexelBufferArrayDynamicIndexing) 2747 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for uniform texel buffers"); 2748 break; 2749 2750 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: 2751 if (!descriptorIndexingSupported || !indexingFeatures.shaderStorageTexelBufferArrayDynamicIndexing) 2752 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage texel buffers"); 2753 break; 2754 2755 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: 2756 if (!descriptorIndexingSupported || !indexingFeatures.shaderInputAttachmentArrayDynamicIndexing) 2757 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for input attachments"); 2758 break; 2759 2760 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: 2761 context.requireDeviceFunctionality("VK_KHR_acceleration_structure"); 2762 break; 2763 2764 case VK_DESCRIPTOR_TYPE_MUTABLE_VALVE: 2765 TCU_THROW(InternalError, "Found VK_DESCRIPTOR_TYPE_MUTABLE_VALVE in list of used array descriptor types"); 2766 2767 default: 2768 TCU_THROW(InternalError, "Unexpected descriptor type found in list of used descriptor types: " + de::toString(descType)); 2769 } 2770 } 2771 } 2772 2773 // Check layout support. 2774 { 2775 const auto& vkd = context.getDeviceInterface(); 2776 const auto device = context.getDevice(); 2777 const auto stageFlags = m_params.getStageFlags(); 2778 2779 { 2780 const auto layoutCreateFlags = m_params.getDstLayoutCreateFlags(); 2781 const auto supported = m_params.descriptorSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags); 2782 2783 if (!supported) 2784 TCU_THROW(NotSupportedError, "Required descriptor set layout not supported"); 2785 } 2786 2787 if (m_params.updateType == UpdateType::COPY) 2788 { 2789 const auto layoutCreateFlags = m_params.getSrcLayoutCreateFlags(); 2790 const auto supported = m_params.descriptorSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags); 2791 2792 if (!supported) 2793 TCU_THROW(NotSupportedError, "Required descriptor set layout for source set not supported"); 2794 2795 // Check specific layouts for the different source sets are supported. 2796 for (deUint32 iter = 0u; iter < numIterations; ++iter) 2797 { 2798 const auto srcSet = m_params.descriptorSet->genSourceSet(m_params.sourceSetStrategy, iter); 2799 const auto srcLayoutSupported = srcSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags); 2800 2801 if (!srcLayoutSupported) 2802 TCU_THROW(NotSupportedError, "Descriptor set layout for source set at iteration " + de::toString(iter) + " not supported"); 2803 } 2804 } 2805 } 2806 2807 // Check supported stores and stages. 2808 const bool vertexStage = isVertexStage(m_params.testingStage); 2809 const bool fragmentStage = (m_params.testingStage == TestingStage::FRAGMENT); 2810 const bool geometryStage = (m_params.testingStage == TestingStage::GEOMETRY); 2811 const bool tessellation = (m_params.testingStage == TestingStage::TESS_CONTROL || m_params.testingStage == TestingStage::TESS_EVAL); 2812 2813 const auto& features = context.getDeviceFeatures(); 2814 2815 if (vertexStage && !features.vertexPipelineStoresAndAtomics) 2816 TCU_THROW(NotSupportedError, "Vertex pipeline stores and atomics not supported"); 2817 2818 if (fragmentStage && !features.fragmentStoresAndAtomics) 2819 TCU_THROW(NotSupportedError, "Fragment shader stores and atomics not supported"); 2820 2821 if (geometryStage && !features.geometryShader) 2822 TCU_THROW(NotSupportedError, "Geometry shader not supported"); 2823 2824 if (tessellation && !features.tessellationShader) 2825 TCU_THROW(NotSupportedError, "Tessellation shaders not supported"); 2826} 2827 2828// What to do at each iteration step. Used to apply UPDATE_AFTER_BIND or not. 2829enum class Step 2830{ 2831 UPDATE = 0, 2832 BIND, 2833}; 2834 2835// Create render pass. 2836Move<VkRenderPass> buildRenderPass (const DeviceInterface& vkd, VkDevice device, const std::vector<Resource>& resources) 2837{ 2838 const auto imageFormat = getDescriptorImageFormat(); 2839 2840 std::vector<VkAttachmentDescription> attachmentDescriptions; 2841 std::vector<VkAttachmentReference> attachmentReferences; 2842 std::vector<deUint32> attachmentIndices; 2843 2844 for (const auto& resource : resources) 2845 { 2846 if (resource.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) 2847 { 2848 const auto nextIndex = static_cast<deUint32>(attachmentDescriptions.size()); 2849 2850 const VkAttachmentDescription description = { 2851 0u, // VkAttachmentDescriptionFlags flags; 2852 imageFormat, // VkFormat format; 2853 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples; 2854 VK_ATTACHMENT_LOAD_OP_LOAD, // VkAttachmentLoadOp loadOp; 2855 VK_ATTACHMENT_STORE_OP_DONT_CARE, // VkAttachmentStoreOp storeOp; 2856 VK_ATTACHMENT_LOAD_OP_DONT_CARE, // VkAttachmentLoadOp stencilLoadOp; 2857 VK_ATTACHMENT_STORE_OP_DONT_CARE, // VkAttachmentStoreOp stencilStoreOp; 2858 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout initialLayout; 2859 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout finalLayout; 2860 }; 2861 2862 const VkAttachmentReference reference = { nextIndex, VK_IMAGE_LAYOUT_GENERAL }; 2863 2864 attachmentIndices.push_back(nextIndex); 2865 attachmentDescriptions.push_back(description); 2866 attachmentReferences.push_back(reference); 2867 } 2868 } 2869 2870 const auto attachmentCount = static_cast<deUint32>(attachmentDescriptions.size()); 2871 DE_ASSERT(attachmentCount == static_cast<deUint32>(attachmentIndices.size())); 2872 DE_ASSERT(attachmentCount == static_cast<deUint32>(attachmentReferences.size())); 2873 2874 const VkSubpassDescription subpassDescription = 2875 { 2876 0u, // VkSubpassDescriptionFlags flags; 2877 VK_PIPELINE_BIND_POINT_GRAPHICS, // VkPipelineBindPoint pipelineBindPoint; 2878 attachmentCount, // deUint32 inputAttachmentCount; 2879 de::dataOrNull(attachmentReferences), // const VkAttachmentReference* pInputAttachments; 2880 0u, // deUint32 colorAttachmentCount; 2881 nullptr, // const VkAttachmentReference* pColorAttachments; 2882 0u, // const VkAttachmentReference* pResolveAttachments; 2883 nullptr, // const VkAttachmentReference* pDepthStencilAttachment; 2884 0u, // deUint32 preserveAttachmentCount; 2885 nullptr, // const deUint32* pPreserveAttachments; 2886 }; 2887 2888 const VkRenderPassCreateInfo renderPassCreateInfo = 2889 { 2890 VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, // VkStructureType sType; 2891 nullptr, // const void* pNext; 2892 0u, // VkRenderPassCreateFlags flags; 2893 static_cast<deUint32>(attachmentDescriptions.size()), // deUint32 attachmentCount; 2894 de::dataOrNull(attachmentDescriptions), // const VkAttachmentDescription* pAttachments; 2895 1u, // deUint32 subpassCount; 2896 &subpassDescription, // const VkSubpassDescription* pSubpasses; 2897 0u, // deUint32 dependencyCount; 2898 nullptr, // const VkSubpassDependency* pDependencies; 2899 }; 2900 2901 return createRenderPass(vkd, device, &renderPassCreateInfo); 2902} 2903 2904// Create a graphics pipeline. 2905Move<VkPipeline> buildGraphicsPipeline (const DeviceInterface& vkd, VkDevice device, VkPipelineLayout pipelineLayout, 2906 VkShaderModule vertModule, 2907 VkShaderModule tescModule, 2908 VkShaderModule teseModule, 2909 VkShaderModule geomModule, 2910 VkShaderModule fragModule, 2911 VkRenderPass renderPass) 2912{ 2913 const auto extent = getDefaultExtent(); 2914 const std::vector<VkViewport> viewports (1u, makeViewport(extent)); 2915 const std::vector<VkRect2D> scissors (1u, makeRect2D(extent)); 2916 const auto hasTess = (tescModule != DE_NULL || teseModule != DE_NULL); 2917 const auto topology = (hasTess ? VK_PRIMITIVE_TOPOLOGY_PATCH_LIST : VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST); 2918 2919 2920 const VkPipelineVertexInputStateCreateInfo vertexInputStateCreateInfo = initVulkanStructure(); 2921 2922 const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateCreateInfo = { 2923 VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, // VkStructureType sType; 2924 nullptr, // const void* pNext; 2925 0u, // VkPipelineInputAssemblyStateCreateFlags flags; 2926 topology, // VkPrimitiveTopology topology; 2927 VK_FALSE, // VkBool32 primitiveRestartEnable; 2928 }; 2929 2930 const VkPipelineTessellationStateCreateInfo tessellationStateCreateInfo = { 2931 VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, // VkStructureType sType; 2932 nullptr, // const void* pNext; 2933 0u, // VkPipelineTessellationStateCreateFlags flags; 2934 (hasTess ? 3u : 0u), // deUint32 patchControlPoints; 2935 }; 2936 2937 const VkPipelineViewportStateCreateInfo viewportStateCreateInfo = { 2938 VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, // VkStructureType sType; 2939 nullptr, // const void* pNext; 2940 0u, // VkPipelineViewportStateCreateFlags flags; 2941 static_cast<deUint32>(viewports.size()), // deUint32 viewportCount; 2942 de::dataOrNull(viewports), // const VkViewport* pViewports; 2943 static_cast<deUint32>(scissors.size()), // deUint32 scissorCount; 2944 de::dataOrNull(scissors), // const VkRect2D* pScissors; 2945 }; 2946 2947 const VkPipelineRasterizationStateCreateInfo rasterizationStateCreateInfo = { 2948 VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, // VkStructureType sType; 2949 nullptr, // const void* pNext; 2950 0u, // VkPipelineRasterizationStateCreateFlags flags; 2951 VK_FALSE, // VkBool32 depthClampEnable; 2952 (fragModule == DE_NULL ? VK_TRUE : VK_FALSE), // VkBool32 rasterizerDiscardEnable; 2953 VK_POLYGON_MODE_FILL, // VkPolygonMode polygonMode; 2954 VK_CULL_MODE_NONE, // VkCullModeFlags cullMode; 2955 VK_FRONT_FACE_CLOCKWISE, // VkFrontFace frontFace; 2956 VK_FALSE, // VkBool32 depthBiasEnable; 2957 0.0f, // float depthBiasConstantFactor; 2958 0.0f, // float depthBiasClamp; 2959 0.0f, // float depthBiasSlopeFactor; 2960 1.0f, // float lineWidth; 2961 }; 2962 2963 const VkPipelineMultisampleStateCreateInfo multisampleStateCreateInfo = { 2964 VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, // VkStructureType sType; 2965 nullptr, // const void* pNext; 2966 0u, // VkPipelineMultisampleStateCreateFlags flags; 2967 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits rasterizationSamples; 2968 VK_FALSE, // VkBool32 sampleShadingEnable; 2969 1.0f, // float minSampleShading; 2970 nullptr, // const VkSampleMask* pSampleMask; 2971 VK_FALSE, // VkBool32 alphaToCoverageEnable; 2972 VK_FALSE, // VkBool32 alphaToOneEnable; 2973 }; 2974 2975 const VkPipelineDepthStencilStateCreateInfo depthStencilStateCreateInfo = initVulkanStructure(); 2976 2977 const VkPipelineColorBlendStateCreateInfo colorBlendStateCreateInfo = initVulkanStructure(); 2978 2979 return makeGraphicsPipeline(vkd, device, pipelineLayout, 2980 vertModule, tescModule, teseModule, geomModule, fragModule, 2981 renderPass, 0u, &vertexInputStateCreateInfo, &inputAssemblyStateCreateInfo, 2982 (hasTess ? &tessellationStateCreateInfo : nullptr), &viewportStateCreateInfo, 2983 &rasterizationStateCreateInfo, &multisampleStateCreateInfo, 2984 &depthStencilStateCreateInfo, &colorBlendStateCreateInfo, nullptr); 2985} 2986 2987Move<VkFramebuffer> buildFramebuffer (const DeviceInterface& vkd, VkDevice device, VkRenderPass renderPass, const std::vector<Resource>& resources) 2988{ 2989 const auto extent = getDefaultExtent(); 2990 2991 std::vector<VkImageView> inputAttachments; 2992 for (const auto& resource : resources) 2993 { 2994 if (resource.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) 2995 inputAttachments.push_back(resource.imageView.get()); 2996 } 2997 2998 const VkFramebufferCreateInfo framebufferCreateInfo = 2999 { 3000 VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, // VkStructureType sType; 3001 nullptr, // const void* pNext; 3002 0u, // VkFramebufferCreateFlags flags; 3003 renderPass, // VkRenderPass renderPass; 3004 static_cast<deUint32>(inputAttachments.size()), // deUint32 attachmentCount; 3005 de:: dataOrNull(inputAttachments), // const VkImageView* pAttachments; 3006 extent.width, // deUint32 width; 3007 extent.height, // deUint32 height; 3008 extent.depth, // deUint32 layers; 3009 }; 3010 3011 return createFramebuffer(vkd, device, &framebufferCreateInfo); 3012} 3013 3014tcu::TestStatus MutableTypesInstance::iterate () 3015{ 3016 const auto device = m_context.getDevice(); 3017 const auto physDev = m_context.getPhysicalDevice(); 3018 const auto qIndex = m_context.getUniversalQueueFamilyIndex(); 3019 const auto queue = m_context.getUniversalQueue(); 3020 3021 const auto& vki = m_context.getInstanceInterface(); 3022 const auto& vkd = m_context.getDeviceInterface(); 3023 auto & alloc = m_context.getDefaultAllocator(); 3024 const auto& paramSet = m_params.descriptorSet; 3025 3026 const auto numIterations = paramSet->maxTypes(); 3027 const bool useExternalImage = needsExternalImage(*m_params.descriptorSet); 3028 const bool useExternalSampler = needsExternalSampler(*m_params.descriptorSet); 3029 const auto stageFlags = m_params.getStageFlags(); 3030 const bool srcSetNeeded = (m_params.updateType == UpdateType::COPY); 3031 const bool updateAfterBind = (m_params.updateMoment == UpdateMoment::UPDATE_AFTER_BIND); 3032 const auto bindPoint = m_params.getBindPoint(); 3033 const bool rayTracing = isRayTracingStage(m_params.testingStage); 3034 const bool useAABBs = (m_params.testingStage == TestingStage::INTERSECTION); 3035 3036 // Resources for each iteration. 3037 std::vector<std::vector<Resource>> allResources; 3038 allResources.reserve(numIterations); 3039 3040 // Command pool. 3041 const auto cmdPool = makeCommandPool(vkd, device, qIndex); 3042 3043 // Descriptor pool and set for the active (dst) descriptor set. 3044 const auto dstPoolFlags = m_params.getDstPoolCreateFlags(); 3045 const auto dstLayoutFlags = m_params.getDstLayoutCreateFlags(); 3046 3047 const auto dstPool = paramSet->makeDescriptorPool(vkd, device, m_params.poolMutableStrategy, dstPoolFlags); 3048 const auto dstLayout = paramSet->makeDescriptorSetLayout(vkd, device, stageFlags, dstLayoutFlags); 3049 const auto varCount = paramSet->getVariableDescriptorCount(); 3050 3051 using VariableCountInfoPtr = de::MovePtr<VkDescriptorSetVariableDescriptorCountAllocateInfo>; 3052 3053 VariableCountInfoPtr dstVariableCountInfo; 3054 if (varCount) 3055 { 3056 dstVariableCountInfo = VariableCountInfoPtr(new VkDescriptorSetVariableDescriptorCountAllocateInfo); 3057 *dstVariableCountInfo = initVulkanStructure(); 3058 3059 dstVariableCountInfo->descriptorSetCount = 1u; 3060 dstVariableCountInfo->pDescriptorCounts = &(varCount.get()); 3061 } 3062 const auto dstSet = makeDescriptorSet(vkd, device, dstPool.get(), dstLayout.get(), dstVariableCountInfo.get()); 3063 3064 // Source pool and set (optional). 3065 const auto srcPoolFlags = m_params.getSrcPoolCreateFlags(); 3066 const auto srcLayoutFlags = m_params.getSrcLayoutCreateFlags(); 3067 DescriptorSetPtr iterationSrcSet; 3068 Move<VkDescriptorPool> srcPool; 3069 Move<VkDescriptorSetLayout> srcLayout; 3070 Move<VkDescriptorSet> srcSet; 3071 3072 // Extra set for external resources and output buffer. 3073 std::vector<Resource> extraResources; 3074 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, vkd, device, alloc, qIndex, queue, useAABBs, 0u, numIterations); 3075 if (useExternalImage) 3076 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, vkd, device, alloc, qIndex, queue, useAABBs, getExternalSampledImageValue()); 3077 if (useExternalSampler) 3078 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_SAMPLER, vkd, device, alloc, qIndex, queue, useAABBs, 0u); 3079 if (rayTracing) 3080 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, vkd, device, alloc, qIndex, queue, useAABBs, 0u); 3081 3082 Move<VkDescriptorPool> extraPool; 3083 { 3084 DescriptorPoolBuilder poolBuilder; 3085 poolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER); 3086 if (useExternalImage) 3087 poolBuilder.addType(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE); 3088 if (useExternalSampler) 3089 poolBuilder.addType(VK_DESCRIPTOR_TYPE_SAMPLER); 3090 if (rayTracing) 3091 poolBuilder.addType(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR); 3092 extraPool = poolBuilder.build(vkd, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u); 3093 } 3094 3095 Move<VkDescriptorSetLayout> extraLayout; 3096 { 3097 DescriptorSetLayoutBuilder layoutBuilder; 3098 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1u, stageFlags, nullptr); 3099 if (useExternalImage) 3100 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1u, stageFlags, nullptr); 3101 if (useExternalSampler) 3102 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_SAMPLER, 1u, stageFlags, nullptr); 3103 if (rayTracing) 3104 { 3105 // The extra acceleration structure is used from the ray generation shader only. 3106 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, 1u, VK_SHADER_STAGE_RAYGEN_BIT_KHR, nullptr); 3107 } 3108 extraLayout = layoutBuilder.build(vkd, device); 3109 } 3110 3111 const auto extraSet = makeDescriptorSet(vkd, device, extraPool.get(), extraLayout.get()); 3112 3113 // Update extra set. 3114 using DescriptorBufferInfoPtr = de::MovePtr<VkDescriptorBufferInfo>; 3115 using DescriptorImageInfoPtr = de::MovePtr<VkDescriptorImageInfo>; 3116 using DescriptorASInfoPtr = de::MovePtr<VkWriteDescriptorSetAccelerationStructureKHR>; 3117 3118 deUint32 bindingCount = 0u; 3119 DescriptorBufferInfoPtr bufferInfoPtr; 3120 DescriptorImageInfoPtr imageInfoPtr; 3121 DescriptorImageInfoPtr samplerInfoPtr; 3122 DescriptorASInfoPtr asWriteInfoPtr; 3123 3124 const auto outputBufferSize = static_cast<VkDeviceSize>(sizeof(deUint32) * static_cast<size_t>(numIterations)); 3125 bufferInfoPtr = DescriptorBufferInfoPtr(new VkDescriptorBufferInfo(makeDescriptorBufferInfo(extraResources[bindingCount++].bufferWithMemory->get(), 0ull, outputBufferSize))); 3126 if (useExternalImage) 3127 imageInfoPtr = DescriptorImageInfoPtr(new VkDescriptorImageInfo(makeDescriptorImageInfo(DE_NULL, extraResources[bindingCount++].imageView.get(), VK_IMAGE_LAYOUT_GENERAL))); 3128 if (useExternalSampler) 3129 samplerInfoPtr = DescriptorImageInfoPtr(new VkDescriptorImageInfo(makeDescriptorImageInfo(extraResources[bindingCount++].sampler.get(), DE_NULL, VK_IMAGE_LAYOUT_GENERAL))); 3130 if (rayTracing) 3131 { 3132 asWriteInfoPtr = DescriptorASInfoPtr(new VkWriteDescriptorSetAccelerationStructureKHR); 3133 *asWriteInfoPtr = initVulkanStructure(); 3134 asWriteInfoPtr->accelerationStructureCount = 1u; 3135 asWriteInfoPtr->pAccelerationStructures = extraResources[bindingCount++].asData.tlas.get()->getPtr(); 3136 } 3137 3138 { 3139 bindingCount = 0u; 3140 DescriptorSetUpdateBuilder updateBuilder; 3141 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, bufferInfoPtr.get()); 3142 if (useExternalImage) 3143 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, imageInfoPtr.get()); 3144 if (useExternalSampler) 3145 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_SAMPLER, samplerInfoPtr.get()); 3146 if (rayTracing) 3147 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, asWriteInfoPtr.get()); 3148 updateBuilder.update(vkd, device); 3149 } 3150 3151 // Push constants. 3152 const deUint32 zero = 0u; 3153 const VkPushConstantRange pcRange = {stageFlags, 0u /*offset*/, static_cast<deUint32>(sizeof(zero)) /*size*/ }; 3154 3155 // Needed for some test variants. 3156 Move<VkShaderModule> vertPassthrough; 3157 Move<VkShaderModule> tesePassthrough; 3158 Move<VkShaderModule> tescPassthrough; 3159 Move<VkShaderModule> rgenPassthrough; 3160 Move<VkShaderModule> missPassthrough; 3161 3162 if (m_params.testingStage == TestingStage::FRAGMENT 3163 || m_params.testingStage == TestingStage::GEOMETRY 3164 || m_params.testingStage == TestingStage::TESS_CONTROL 3165 || m_params.testingStage == TestingStage::TESS_EVAL) 3166 { 3167 vertPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("vert"), 0u); 3168 } 3169 3170 if (m_params.testingStage == TestingStage::TESS_CONTROL) 3171 { 3172 tesePassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("tese"), 0u); 3173 } 3174 3175 if (m_params.testingStage == TestingStage::TESS_EVAL) 3176 { 3177 tescPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("tesc"), 0u); 3178 } 3179 3180 if (m_params.testingStage == TestingStage::CLOSEST_HIT 3181 || m_params.testingStage == TestingStage::ANY_HIT 3182 || m_params.testingStage == TestingStage::INTERSECTION 3183 || m_params.testingStage == TestingStage::MISS 3184 || m_params.testingStage == TestingStage::CALLABLE) 3185 { 3186 rgenPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("rgen"), 0u); 3187 } 3188 3189 if (m_params.testingStage == TestingStage::INTERSECTION) 3190 { 3191 missPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("miss"), 0u); 3192 } 3193 3194 for (deUint32 iteration = 0u; iteration < numIterations; ++iteration) 3195 { 3196 // Generate source set for the current iteration. 3197 if (srcSetNeeded) 3198 { 3199 // Free previous descriptor set before rebuilding the pool. 3200 srcSet = Move<VkDescriptorSet>(); 3201 iterationSrcSet = paramSet->genSourceSet(m_params.sourceSetStrategy, iteration); 3202 srcPool = iterationSrcSet->makeDescriptorPool(vkd, device, m_params.poolMutableStrategy, srcPoolFlags); 3203 srcLayout = iterationSrcSet->makeDescriptorSetLayout(vkd, device, stageFlags, srcLayoutFlags); 3204 3205 const auto srcVarCount = iterationSrcSet->getVariableDescriptorCount(); 3206 VariableCountInfoPtr srcVariableCountInfo; 3207 3208 if (srcVarCount) 3209 { 3210 srcVariableCountInfo = VariableCountInfoPtr(new VkDescriptorSetVariableDescriptorCountAllocateInfo); 3211 *srcVariableCountInfo = initVulkanStructure(); 3212 3213 srcVariableCountInfo->descriptorSetCount = 1u; 3214 srcVariableCountInfo->pDescriptorCounts = &(srcVarCount.get()); 3215 } 3216 3217 srcSet = makeDescriptorSet(vkd, device, srcPool.get(), srcLayout.get(), srcVariableCountInfo.get()); 3218 } 3219 3220 // Set layouts and sets used in the pipeline. 3221 const std::vector<VkDescriptorSetLayout> setLayouts = {dstLayout.get(), extraLayout.get()}; 3222 const std::vector<VkDescriptorSet> usedSets = {dstSet.get(), extraSet.get()}; 3223 3224 // Create resources. 3225 allResources.emplace_back(paramSet->createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs)); 3226 const auto& resources = allResources.back(); 3227 3228 // Make pipeline for the current iteration. 3229 const auto pipelineLayout = makePipelineLayout(vkd, device, static_cast<deUint32>(setLayouts.size()), de::dataOrNull(setLayouts), 1u, &pcRange); 3230 const auto moduleName = shaderName(iteration); 3231 const auto shaderModule = createShaderModule(vkd, device, m_context.getBinaryCollection().get(moduleName), 0u); 3232 3233 Move<VkPipeline> pipeline; 3234 Move<VkRenderPass> renderPass; 3235 Move<VkFramebuffer> framebuffer; 3236 3237 deUint32 shaderGroupHandleSize = 0u; 3238 deUint32 shaderGroupBaseAlignment = 1u; 3239 3240 de::MovePtr<BufferWithMemory> raygenSBT; 3241 de::MovePtr<BufferWithMemory> missSBT; 3242 de::MovePtr<BufferWithMemory> hitSBT; 3243 de::MovePtr<BufferWithMemory> callableSBT; 3244 3245 VkStridedDeviceAddressRegionKHR raygenSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0); 3246 VkStridedDeviceAddressRegionKHR missSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0); 3247 VkStridedDeviceAddressRegionKHR hitSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0); 3248 VkStridedDeviceAddressRegionKHR callableSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0); 3249 3250 if (bindPoint == VK_PIPELINE_BIND_POINT_COMPUTE) 3251 pipeline = makeComputePipeline(vkd, device, pipelineLayout.get(), 0u, shaderModule.get(), 0u, nullptr); 3252 else if (bindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS) 3253 { 3254 VkShaderModule vertModule = DE_NULL; 3255 VkShaderModule teseModule = DE_NULL; 3256 VkShaderModule tescModule = DE_NULL; 3257 VkShaderModule geomModule = DE_NULL; 3258 VkShaderModule fragModule = DE_NULL; 3259 3260 if (m_params.testingStage == TestingStage::VERTEX) 3261 vertModule = shaderModule.get(); 3262 else if (m_params.testingStage == TestingStage::FRAGMENT) 3263 { 3264 vertModule = vertPassthrough.get(); 3265 fragModule = shaderModule.get(); 3266 } 3267 else if (m_params.testingStage == TestingStage::GEOMETRY) 3268 { 3269 vertModule = vertPassthrough.get(); 3270 geomModule = shaderModule.get(); 3271 } 3272 else if (m_params.testingStage == TestingStage::TESS_CONTROL) 3273 { 3274 vertModule = vertPassthrough.get(); 3275 teseModule = tesePassthrough.get(); 3276 tescModule = shaderModule.get(); 3277 } 3278 else if (m_params.testingStage == TestingStage::TESS_EVAL) 3279 { 3280 vertModule = vertPassthrough.get(); 3281 tescModule = tescPassthrough.get(); 3282 teseModule = shaderModule.get(); 3283 } 3284 else 3285 DE_ASSERT(false); 3286 3287 renderPass = buildRenderPass(vkd, device, resources); 3288 pipeline = buildGraphicsPipeline(vkd, device, pipelineLayout.get(), vertModule, tescModule, teseModule, geomModule, fragModule, renderPass.get()); 3289 framebuffer = buildFramebuffer(vkd, device, renderPass.get(), resources); 3290 } 3291 else if (bindPoint == VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR) 3292 { 3293 const auto rayTracingPipeline = de::newMovePtr<RayTracingPipeline>(); 3294 const auto rayTracingPropertiesKHR = makeRayTracingProperties(vki, physDev); 3295 shaderGroupHandleSize = rayTracingPropertiesKHR->getShaderGroupHandleSize(); 3296 shaderGroupBaseAlignment = rayTracingPropertiesKHR->getShaderGroupBaseAlignment(); 3297 3298 VkShaderModule rgenModule = DE_NULL; 3299 VkShaderModule isecModule = DE_NULL; 3300 VkShaderModule ahitModule = DE_NULL; 3301 VkShaderModule chitModule = DE_NULL; 3302 VkShaderModule missModule = DE_NULL; 3303 VkShaderModule callModule = DE_NULL; 3304 3305 const deUint32 rgenGroup = 0u; 3306 deUint32 hitGroup = 0u; 3307 deUint32 missGroup = 0u; 3308 deUint32 callGroup = 0u; 3309 3310 if (m_params.testingStage == TestingStage::RAY_GEN) 3311 { 3312 rgenModule = shaderModule.get(); 3313 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup); 3314 } 3315 else if (m_params.testingStage == TestingStage::INTERSECTION) 3316 { 3317 hitGroup = 1u; 3318 missGroup = 2u; 3319 rgenModule = rgenPassthrough.get(); 3320 missModule = missPassthrough.get(); 3321 isecModule = shaderModule.get(); 3322 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup); 3323 rayTracingPipeline->addShader(VK_SHADER_STAGE_INTERSECTION_BIT_KHR, isecModule, hitGroup); 3324 rayTracingPipeline->addShader(VK_SHADER_STAGE_MISS_BIT_KHR, missModule, missGroup); 3325 } 3326 else if (m_params.testingStage == TestingStage::ANY_HIT) 3327 { 3328 hitGroup = 1u; 3329 rgenModule = rgenPassthrough.get(); 3330 ahitModule = shaderModule.get(); 3331 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup); 3332 rayTracingPipeline->addShader(VK_SHADER_STAGE_ANY_HIT_BIT_KHR, ahitModule, hitGroup); 3333 } 3334 else if (m_params.testingStage == TestingStage::CLOSEST_HIT) 3335 { 3336 hitGroup = 1u; 3337 rgenModule = rgenPassthrough.get(); 3338 chitModule = shaderModule.get(); 3339 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup); 3340 rayTracingPipeline->addShader(VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, chitModule, hitGroup); 3341 } 3342 else if (m_params.testingStage == TestingStage::MISS) 3343 { 3344 missGroup = 1u; 3345 rgenModule = rgenPassthrough.get(); 3346 missModule = shaderModule.get(); 3347 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup); 3348 rayTracingPipeline->addShader(VK_SHADER_STAGE_MISS_BIT_KHR, missModule, missGroup); 3349 } 3350 else if (m_params.testingStage == TestingStage::CALLABLE) 3351 { 3352 callGroup = 1u; 3353 rgenModule = rgenPassthrough.get(); 3354 callModule = shaderModule.get(); 3355 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup); 3356 rayTracingPipeline->addShader(VK_SHADER_STAGE_CALLABLE_BIT_KHR, callModule, callGroup); 3357 } 3358 else 3359 DE_ASSERT(false); 3360 3361 pipeline = rayTracingPipeline->createPipeline(vkd, device, pipelineLayout.get()); 3362 3363 raygenSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, rgenGroup, 1u); 3364 raygenSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, raygenSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize); 3365 3366 if (missGroup > 0u) 3367 { 3368 missSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, missGroup, 1u); 3369 missSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, missSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize); 3370 } 3371 3372 if (hitGroup > 0u) 3373 { 3374 hitSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, hitGroup, 1u); 3375 hitSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, hitSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize); 3376 } 3377 3378 if (callGroup > 0u) 3379 { 3380 callableSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, callGroup, 1u); 3381 callableSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, callableSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize); 3382 } 3383 } 3384 else 3385 DE_ASSERT(false); 3386 3387 // Command buffer for the current iteration. 3388 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY); 3389 const auto cmdBuffer = cmdBufferPtr.get(); 3390 3391 beginCommandBuffer(vkd, cmdBuffer); 3392 3393 const Step steps[] = { 3394 (updateAfterBind ? Step::BIND : Step::UPDATE), 3395 (updateAfterBind ? Step::UPDATE : Step::BIND) 3396 }; 3397 3398 for (const auto& step : steps) 3399 { 3400 if (step == Step::BIND) 3401 { 3402 vkd.cmdBindPipeline(cmdBuffer, bindPoint, pipeline.get()); 3403 vkd.cmdBindDescriptorSets(cmdBuffer, bindPoint, pipelineLayout.get(), 0u, static_cast<deUint32>(usedSets.size()), de::dataOrNull(usedSets), 0u, nullptr); 3404 } 3405 else // Step::UPDATE 3406 { 3407 if (srcSetNeeded) 3408 { 3409 // Note: these operations need to be called on paramSet and not iterationSrcSet. The latter is a compatible set 3410 // that's correct and contains compatible bindings but, when a binding has been changed from non-mutable to 3411 // mutable or to an extended mutable type, the list of descriptor types for the mutable bindings in 3412 // iterationSrcSet are not in iteration order like they are in the original set and must not be taken into 3413 // account to update or copy sets. 3414 paramSet->updateDescriptorSet(vkd, device, srcSet.get(), iteration, resources); 3415 paramSet->copyDescriptorSet(vkd, device, srcSet.get(), dstSet.get()); 3416 } 3417 else 3418 { 3419 paramSet->updateDescriptorSet(vkd, device, dstSet.get(), iteration, resources); 3420 } 3421 } 3422 } 3423 3424 // Run shader. 3425 vkd.cmdPushConstants(cmdBuffer, pipelineLayout.get(), stageFlags, 0u, static_cast<deUint32>(sizeof(zero)), &zero); 3426 3427 if (bindPoint == VK_PIPELINE_BIND_POINT_COMPUTE) 3428 vkd.cmdDispatch(cmdBuffer, 1u, 1u, 1u); 3429 else if (bindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS) 3430 { 3431 const auto extent = getDefaultExtent(); 3432 const auto renderArea = makeRect2D(extent); 3433 3434 beginRenderPass(vkd, cmdBuffer, renderPass.get(), framebuffer.get(), renderArea); 3435 vkd.cmdDraw(cmdBuffer, 3u, 1u, 0u, 0u); 3436 endRenderPass(vkd, cmdBuffer); 3437 } 3438 else if (bindPoint == VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR) 3439 { 3440 vkd.cmdTraceRaysKHR(cmdBuffer, &raygenSBTRegion, &missSBTRegion, &hitSBTRegion, &callableSBTRegion, 1u, 1u, 1u); 3441 } 3442 else 3443 DE_ASSERT(false); 3444 3445 endCommandBuffer(vkd, cmdBuffer); 3446 submitCommandsAndWait(vkd, device, queue, cmdBuffer); 3447 3448 // Verify output buffer. 3449 { 3450 const auto outputBufferVal = extraResources[0].getStoredValue(vkd, device, alloc, qIndex, queue, iteration); 3451 DE_ASSERT(static_cast<bool>(outputBufferVal)); 3452 3453 const auto expectedValue = getExpectedOutputBufferValue(); 3454 if (outputBufferVal.get() != expectedValue) 3455 { 3456 std::ostringstream msg; 3457 msg << "Iteration " << iteration << ": unexpected value found in output buffer (expected " << expectedValue << " and found " << outputBufferVal.get() << ")"; 3458 TCU_FAIL(msg.str()); 3459 } 3460 } 3461 3462 // Verify descriptor writes. 3463 { 3464 size_t resourcesOffset = 0; 3465 const auto writeMask = getStoredValueMask(); 3466 const auto numBindings = paramSet->numBindings(); 3467 3468 for (deUint32 bindingIdx = 0u; bindingIdx < numBindings; ++bindingIdx) 3469 { 3470 const auto binding = paramSet->getBinding(bindingIdx); 3471 const auto bindingTypes = binding->typesAtIteration(iteration); 3472 3473 for (size_t descriptorIdx = 0; descriptorIdx < bindingTypes.size(); ++descriptorIdx) 3474 { 3475 const auto& descriptorType = bindingTypes[descriptorIdx]; 3476 if (!isShaderWritable(descriptorType)) 3477 continue; 3478 3479 const auto& resource = resources[resourcesOffset + descriptorIdx]; 3480 const auto initialValue = resource.initialValue; 3481 const auto storedValuePtr = resource.getStoredValue(vkd, device, alloc, qIndex, queue); 3482 3483 DE_ASSERT(static_cast<bool>(storedValuePtr)); 3484 const auto storedValue = storedValuePtr.get(); 3485 const auto expectedValue = (initialValue | writeMask); 3486 if (expectedValue != storedValue) 3487 { 3488 std::ostringstream msg; 3489 msg << "Iteration " << iteration << ": descriptor at binding " << bindingIdx << " index " << descriptorIdx 3490 << " with type " << de::toString(descriptorType) << " contains unexpected value " << std::hex 3491 << storedValue << " (expected " << expectedValue << ")"; 3492 TCU_FAIL(msg.str()); 3493 } 3494 } 3495 3496 resourcesOffset += bindingTypes.size(); 3497 } 3498 } 3499 } 3500 3501 return tcu::TestStatus::pass("Pass"); 3502} 3503 3504using GroupPtr = de::MovePtr<tcu::TestCaseGroup>; 3505 3506void createMutableTestVariants (tcu::TestContext& testCtx, tcu::TestCaseGroup* parentGroup, const DescriptorSetPtr& descriptorSet, const std::vector<TestingStage>& stagesToTest) 3507{ 3508 const struct 3509 { 3510 UpdateType updateType; 3511 const char* name; 3512 } updateTypes[] = { 3513 {UpdateType::WRITE, "update_write"}, 3514 {UpdateType::COPY, "update_copy"}, 3515 }; 3516 3517 const struct 3518 { 3519 SourceSetStrategy sourceSetStrategy; 3520 const char* name; 3521 } sourceStrategies[] = { 3522 {SourceSetStrategy::MUTABLE, "mutable_source"}, 3523 {SourceSetStrategy::NONMUTABLE, "nonmutable_source"}, 3524 {SourceSetStrategy::NO_SOURCE, "no_source"}, 3525 }; 3526 3527 const struct 3528 { 3529 SourceSetType sourceSetType; 3530 const char* name; 3531 } sourceTypes[] = { 3532 {SourceSetType::NORMAL, "normal_source"}, 3533 {SourceSetType::HOST_ONLY, "host_only_source"}, 3534 {SourceSetType::NO_SOURCE, "no_source"}, 3535 }; 3536 3537 const struct 3538 { 3539 PoolMutableStrategy poolMutableStrategy; 3540 const char* name; 3541 } poolStrategies[] = { 3542 {PoolMutableStrategy::KEEP_TYPES, "pool_same_types"}, 3543 {PoolMutableStrategy::NO_TYPES, "pool_no_types"}, 3544 {PoolMutableStrategy::EXPAND_TYPES, "pool_expand_types"}, 3545 }; 3546 3547 const struct 3548 { 3549 UpdateMoment updateMoment; 3550 const char* name; 3551 } updateMoments[] = { 3552 {UpdateMoment::NORMAL, "pre_update"}, 3553 {UpdateMoment::UPDATE_AFTER_BIND, "update_after_bind"}, 3554 }; 3555 3556 const struct 3557 { 3558 ArrayAccessType arrayAccessType; 3559 const char* name; 3560 } arrayAccessTypes[] = { 3561 {ArrayAccessType::CONSTANT, "index_constant"}, 3562 {ArrayAccessType::PUSH_CONSTANT, "index_push_constant"}, 3563 {ArrayAccessType::NO_ARRAY, "no_array"}, 3564 }; 3565 3566 const struct StageAndName 3567 { 3568 TestingStage testingStage; 3569 const char* name; 3570 } testStageList[] = { 3571 {TestingStage::COMPUTE, "comp"}, 3572 {TestingStage::VERTEX, "vert"}, 3573 {TestingStage::TESS_CONTROL, "tesc"}, 3574 {TestingStage::TESS_EVAL, "tese"}, 3575 {TestingStage::GEOMETRY, "geom"}, 3576 {TestingStage::FRAGMENT, "frag"}, 3577 {TestingStage::RAY_GEN, "rgen"}, 3578 {TestingStage::INTERSECTION, "isec"}, 3579 {TestingStage::ANY_HIT, "ahit"}, 3580 {TestingStage::CLOSEST_HIT, "chit"}, 3581 {TestingStage::MISS, "miss"}, 3582 {TestingStage::CALLABLE, "call"}, 3583 }; 3584 3585 const bool hasArrays = descriptorSet->hasArrays(); 3586 const bool hasInputAttachments = usesInputAttachments(*descriptorSet); 3587 3588 for (const auto& ut : updateTypes) 3589 { 3590 GroupPtr updateGroup(new tcu::TestCaseGroup(testCtx, ut.name, "")); 3591 3592 for (const auto& srcStrategy : sourceStrategies) 3593 { 3594 // Skip combinations that make no sense. 3595 if (ut.updateType == UpdateType::WRITE && srcStrategy.sourceSetStrategy != SourceSetStrategy::NO_SOURCE) 3596 continue; 3597 3598 if (ut.updateType == UpdateType::COPY && srcStrategy.sourceSetStrategy == SourceSetStrategy::NO_SOURCE) 3599 continue; 3600 3601 if (srcStrategy.sourceSetStrategy == SourceSetStrategy::NONMUTABLE && descriptorSet->needsAnyAliasing()) 3602 continue; 3603 3604 GroupPtr srcStrategyGroup(new tcu::TestCaseGroup(testCtx, srcStrategy.name, "")); 3605 3606 for (const auto& srcType : sourceTypes) 3607 { 3608 // Skip combinations that make no sense. 3609 if (ut.updateType == UpdateType::WRITE && srcType.sourceSetType != SourceSetType::NO_SOURCE) 3610 continue; 3611 3612 if (ut.updateType == UpdateType::COPY && srcType.sourceSetType == SourceSetType::NO_SOURCE) 3613 continue; 3614 3615 GroupPtr srcTypeGroup(new tcu::TestCaseGroup(testCtx, srcType.name, "")); 3616 3617 for (const auto& poolStrategy: poolStrategies) 3618 { 3619 GroupPtr poolStrategyGroup(new tcu::TestCaseGroup(testCtx, poolStrategy.name, "")); 3620 3621 for (const auto& moment : updateMoments) 3622 { 3623 //if (moment.updateMoment == UpdateMoment::UPDATE_AFTER_BIND && srcType.sourceSetType == SourceSetType::HOST_ONLY) 3624 // continue; 3625 3626 if (moment.updateMoment == UpdateMoment::UPDATE_AFTER_BIND && hasInputAttachments) 3627 continue; 3628 3629 GroupPtr momentGroup(new tcu::TestCaseGroup(testCtx, moment.name, "")); 3630 3631 for (const auto& accessType : arrayAccessTypes) 3632 { 3633 // Skip combinations that make no sense. 3634 if (hasArrays && accessType.arrayAccessType == ArrayAccessType::NO_ARRAY) 3635 continue; 3636 3637 if (!hasArrays && accessType.arrayAccessType != ArrayAccessType::NO_ARRAY) 3638 continue; 3639 3640 GroupPtr accessTypeGroup(new tcu::TestCaseGroup(testCtx, accessType.name, "")); 3641 3642 for (const auto& testStage : stagesToTest) 3643 { 3644 const auto beginItr = std::begin(testStageList); 3645 const auto endItr = std::end(testStageList); 3646 const auto iter = std::find_if(beginItr, endItr, [testStage] (const StageAndName& ts) { return ts.testingStage == testStage; }); 3647 3648 DE_ASSERT(iter != endItr); 3649 const auto& stage = *iter; 3650 3651 if (hasInputAttachments && stage.testingStage != TestingStage::FRAGMENT) 3652 continue; 3653 3654 TestParams params = { 3655 descriptorSet, 3656 ut.updateType, 3657 srcStrategy.sourceSetStrategy, 3658 srcType.sourceSetType, 3659 poolStrategy.poolMutableStrategy, 3660 moment.updateMoment, 3661 accessType.arrayAccessType, 3662 stage.testingStage, 3663 }; 3664 3665 accessTypeGroup->addChild(new MutableTypesTest(testCtx, stage.name, "", params)); 3666 } 3667 3668 momentGroup->addChild(accessTypeGroup.release()); 3669 } 3670 3671 poolStrategyGroup->addChild(momentGroup.release()); 3672 } 3673 3674 srcTypeGroup->addChild(poolStrategyGroup.release()); 3675 } 3676 3677 srcStrategyGroup->addChild(srcTypeGroup.release()); 3678 } 3679 3680 updateGroup->addChild(srcStrategyGroup.release()); 3681 } 3682 3683 parentGroup->addChild(updateGroup.release()); 3684 } 3685} 3686 3687} 3688 3689std::string descriptorTypeStr (VkDescriptorType descriptorType) 3690{ 3691 static const auto prefixLen = std::string("VK_DESCRIPTOR_TYPE_").size(); 3692 return de::toLower(de::toString(descriptorType).substr(prefixLen)); 3693} 3694 3695tcu::TestCaseGroup* createDescriptorValveMutableTests (tcu::TestContext& testCtx) 3696{ 3697 GroupPtr mainGroup(new tcu::TestCaseGroup(testCtx, "mutable_descriptor", "Tests for VK_VALVE_mutable_descriptor_type")); 3698 3699 const VkDescriptorType basicDescriptorTypes[] = { 3700 VK_DESCRIPTOR_TYPE_SAMPLER, 3701 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 3702 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 3703 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 3704 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 3705 VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 3706 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 3707 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 3708 VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 3709 VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, 3710 }; 3711 3712 static const auto mandatoryTypes = getMandatoryMutableTypes(); 3713 3714 using StageVec = std::vector<TestingStage>; 3715 3716 const StageVec allStages = 3717 { 3718 TestingStage::COMPUTE, 3719 TestingStage::VERTEX, 3720 TestingStage::TESS_CONTROL, 3721 TestingStage::TESS_EVAL, 3722 TestingStage::GEOMETRY, 3723 TestingStage::FRAGMENT, 3724 TestingStage::RAY_GEN, 3725 TestingStage::INTERSECTION, 3726 TestingStage::ANY_HIT, 3727 TestingStage::CLOSEST_HIT, 3728 TestingStage::MISS, 3729 TestingStage::CALLABLE, 3730 }; 3731 3732 const StageVec reducedStages = 3733 { 3734 TestingStage::COMPUTE, 3735 TestingStage::VERTEX, 3736 TestingStage::FRAGMENT, 3737 TestingStage::RAY_GEN, 3738 }; 3739 3740 const StageVec computeOnly = 3741 { 3742 TestingStage::COMPUTE, 3743 }; 3744 3745 // Basic tests with a single mutable descriptor. 3746 { 3747 GroupPtr singleCases(new tcu::TestCaseGroup(testCtx, "single", "Basic mutable descriptor tests with a single mutable descriptor")); 3748 3749 for (const auto& descriptorType : basicDescriptorTypes) 3750 { 3751 const auto groupName = descriptorTypeStr(descriptorType); 3752 const std::vector<VkDescriptorType> actualTypes(1u, descriptorType); 3753 3754 DescriptorSetPtr setPtr; 3755 { 3756 DescriptorSet::BindingPtrVector setBindings; 3757 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, actualTypes)); 3758 setPtr = DescriptorSetPtr(new DescriptorSet(setBindings)); 3759 } 3760 3761 GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str(), "")); 3762 createMutableTestVariants(testCtx, subGroup.get(), setPtr, allStages); 3763 3764 singleCases->addChild(subGroup.release()); 3765 } 3766 3767 // Case with a single descriptor that iterates several types. 3768 { 3769 DescriptorSetPtr setPtr; 3770 { 3771 DescriptorSet::BindingPtrVector setBindings; 3772 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, mandatoryTypes)); 3773 setPtr = DescriptorSetPtr(new DescriptorSet(setBindings)); 3774 } 3775 3776 GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, "all_mandatory", "")); 3777 createMutableTestVariants(testCtx, subGroup.get(), setPtr, reducedStages); 3778 3779 singleCases->addChild(subGroup.release()); 3780 } 3781 3782 // Cases that try to verify switching from any descriptor type to any other is possible. 3783 { 3784 GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, "switches", "Test switching from one to another descriptor type works as expected")); 3785 3786 for (const auto& initialDescriptorType : basicDescriptorTypes) 3787 { 3788 for (const auto& finalDescriptorType : basicDescriptorTypes) 3789 { 3790 if (initialDescriptorType == finalDescriptorType) 3791 continue; 3792 3793 const std::vector<VkDescriptorType> mutableTypes { initialDescriptorType, finalDescriptorType }; 3794 DescriptorSet::BindingPtrVector setBindings; 3795 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, mutableTypes)); 3796 3797 DescriptorSetPtr setPtr = DescriptorSetPtr(new DescriptorSet(setBindings)); 3798 3799 const auto groupName = descriptorTypeStr(initialDescriptorType) + "_" + descriptorTypeStr(finalDescriptorType); 3800 GroupPtr combinationGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str(), "")); 3801 createMutableTestVariants(testCtx, combinationGroup.get(), setPtr, reducedStages); 3802 subGroup->addChild(combinationGroup.release()); 3803 } 3804 } 3805 3806 singleCases->addChild(subGroup.release()); 3807 } 3808 3809 mainGroup->addChild(singleCases.release()); 3810 } 3811 3812 // Cases with a single non-mutable descriptor. This provides some basic checks to verify copying to non-mutable bindings works. 3813 { 3814 GroupPtr singleNonMutableGroup (new tcu::TestCaseGroup(testCtx, "single_nonmutable", "Tests using a single non-mutable descriptor")); 3815 3816 for (const auto& descriptorType : basicDescriptorTypes) 3817 { 3818 DescriptorSet::BindingPtrVector bindings; 3819 bindings.emplace_back(new SingleBinding(descriptorType, std::vector<VkDescriptorType>())); 3820 DescriptorSetPtr descriptorSet (new DescriptorSet(bindings)); 3821 3822 const auto groupName = descriptorTypeStr(descriptorType); 3823 GroupPtr descGroup (new tcu::TestCaseGroup(testCtx, groupName.c_str(), "")); 3824 3825 createMutableTestVariants(testCtx, descGroup.get(), descriptorSet, reducedStages); 3826 singleNonMutableGroup->addChild(descGroup.release()); 3827 } 3828 3829 mainGroup->addChild(singleNonMutableGroup.release()); 3830 } 3831 3832 const struct { 3833 bool unbounded; 3834 const char* name; 3835 } unboundedCases[] = { 3836 {false, "constant_size"}, 3837 {true, "unbounded"}, 3838 }; 3839 3840 const struct { 3841 bool aliasing; 3842 const char* name; 3843 } aliasingCases[] = { 3844 {false, "noaliasing"}, 3845 {true, "aliasing"}, 3846 }; 3847 3848 const struct { 3849 bool oneArrayOnly; 3850 bool mixNonMutable; 3851 const char* groupName; 3852 const char* groupDesc; 3853 } arrayCountGroups[] = { 3854 {true, false, "one_array", "Tests using an array of mutable descriptors"}, 3855 {false, false, "multiple_arrays", "Tests using multiple arrays of mutable descriptors"}, 3856 {false, true, "multiple_arrays_mixed", "Tests using multiple arrays of mutable descriptors mixed with arrays of nonmutable ones"}, 3857 }; 3858 3859 for (const auto& variant : arrayCountGroups) 3860 { 3861 GroupPtr arrayGroup(new tcu::TestCaseGroup(testCtx, variant.groupName, variant.groupDesc)); 3862 3863 for (const auto& unboundedCase : unboundedCases) 3864 { 3865 GroupPtr unboundedGroup(new tcu::TestCaseGroup(testCtx, unboundedCase.name, "")); 3866 3867 for (const auto& aliasingCase : aliasingCases) 3868 { 3869 GroupPtr aliasingGroup(new tcu::TestCaseGroup(testCtx, aliasingCase.name, "")); 3870 3871 DescriptorSet::BindingPtrVector setBindings; 3872 3873 // Prepare descriptors for this test variant. 3874 for (size_t mandatoryTypesRotation = 0; mandatoryTypesRotation < mandatoryTypes.size(); ++mandatoryTypesRotation) 3875 { 3876 const bool isLastBinding = (variant.oneArrayOnly || mandatoryTypesRotation == mandatoryTypes.size() - 1u); 3877 const bool isUnbounded = (unboundedCase.unbounded && isLastBinding); 3878 3879 // Create a rotation of the mandatory types for each mutable array binding. 3880 auto mandatoryTypesVector = mandatoryTypes; 3881 { 3882 const auto beginPtr = &mandatoryTypesVector[0]; 3883 const auto endPtr = beginPtr + mandatoryTypesVector.size(); 3884 std::rotate(beginPtr, &mandatoryTypesVector[mandatoryTypesRotation], endPtr); 3885 } 3886 3887 std::vector<SingleBinding> arrayBindings; 3888 3889 if (aliasingCase.aliasing) 3890 { 3891 // With aliasing, the descriptor types rotate in each descriptor. 3892 for (size_t typeIdx = 0; typeIdx < mandatoryTypesVector.size(); ++typeIdx) 3893 { 3894 auto rotatedTypes = mandatoryTypesVector; 3895 const auto beginPtr = &rotatedTypes[0]; 3896 const auto endPtr = beginPtr + rotatedTypes.size(); 3897 3898 std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr); 3899 3900 arrayBindings.emplace_back(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, rotatedTypes); 3901 } 3902 } 3903 else 3904 { 3905 // Without aliasing, all descriptors use the same type at the same time. 3906 const SingleBinding noAliasingBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, mandatoryTypesVector); 3907 arrayBindings.resize(mandatoryTypesVector.size(), noAliasingBinding); 3908 } 3909 3910 setBindings.emplace_back(new ArrayBinding(isUnbounded, arrayBindings)); 3911 3912 if (variant.mixNonMutable && !isUnbounded) 3913 { 3914 // Create a non-mutable array binding interleaved with the other ones. 3915 const SingleBinding nonMutableBinding(mandatoryTypes[mandatoryTypesRotation], std::vector<VkDescriptorType>()); 3916 std::vector<SingleBinding> nonMutableBindings(mandatoryTypes.size(), nonMutableBinding); 3917 setBindings.emplace_back(new ArrayBinding(false, nonMutableBindings)); 3918 } 3919 3920 if (variant.oneArrayOnly) 3921 break; 3922 } 3923 3924 DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings)); 3925 createMutableTestVariants(testCtx, aliasingGroup.get(), descriptorSet, computeOnly); 3926 3927 unboundedGroup->addChild(aliasingGroup.release()); 3928 } 3929 3930 arrayGroup->addChild(unboundedGroup.release()); 3931 } 3932 3933 mainGroup->addChild(arrayGroup.release()); 3934 } 3935 3936 // Cases with a single mutable binding followed by an array of mutable bindings. 3937 // The array will use a single type beyond the mandatory ones. 3938 { 3939 GroupPtr singleAndArrayGroup(new tcu::TestCaseGroup(testCtx, "single_and_array", "Tests using a single mutable binding followed by a mutable array binding")); 3940 3941 for (const auto& descriptorType : basicDescriptorTypes) 3942 { 3943 // Input attachments will not use arrays. 3944 if (descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) 3945 continue; 3946 3947 if (de::contains(begin(mandatoryTypes), end(mandatoryTypes), descriptorType)) 3948 continue; 3949 3950 const auto groupName = descriptorTypeStr(descriptorType); 3951 GroupPtr descTypeGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str(), "")); 3952 3953 for (const auto& aliasingCase : aliasingCases) 3954 { 3955 GroupPtr aliasingGroup(new tcu::TestCaseGroup(testCtx, aliasingCase.name, "")); 3956 3957 DescriptorSet::BindingPtrVector setBindings; 3958 std::vector<SingleBinding> arrayBindings; 3959 3960 // Single mutable descriptor as the first binding. 3961 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, mandatoryTypes)); 3962 3963 // Descriptor array as the second binding. 3964 auto arrayBindingDescTypes = mandatoryTypes; 3965 arrayBindingDescTypes.push_back(descriptorType); 3966 3967 if (aliasingCase.aliasing) 3968 { 3969 // With aliasing, the descriptor types rotate in each descriptor. 3970 for (size_t typeIdx = 0; typeIdx < arrayBindingDescTypes.size(); ++typeIdx) 3971 { 3972 auto rotatedTypes = arrayBindingDescTypes; 3973 const auto beginPtr = &rotatedTypes[0]; 3974 const auto endPtr = beginPtr + rotatedTypes.size(); 3975 3976 std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr); 3977 3978 arrayBindings.emplace_back(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, rotatedTypes); 3979 } 3980 } 3981 else 3982 { 3983 // Without aliasing, all descriptors use the same type at the same time. 3984 const SingleBinding noAliasingBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, arrayBindingDescTypes); 3985 arrayBindings.resize(arrayBindingDescTypes.size(), noAliasingBinding); 3986 } 3987 3988 // Second binding: array binding. 3989 setBindings.emplace_back(new ArrayBinding(false/*unbounded*/, arrayBindings)); 3990 3991 // Create set and test variants. 3992 DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings)); 3993 createMutableTestVariants(testCtx, aliasingGroup.get(), descriptorSet, computeOnly); 3994 3995 descTypeGroup->addChild(aliasingGroup.release()); 3996 } 3997 3998 singleAndArrayGroup->addChild(descTypeGroup.release()); 3999 } 4000 4001 mainGroup->addChild(singleAndArrayGroup.release()); 4002 } 4003 4004 // Cases with several mutable non-array bindings. 4005 { 4006 GroupPtr multipleGroup (new tcu::TestCaseGroup(testCtx, "multiple", "Tests using multiple mutable bindings")); 4007 GroupPtr mutableOnlyGroup (new tcu::TestCaseGroup(testCtx, "mutable_only", "Tests using only mutable descriptors")); 4008 GroupPtr mixedGroup (new tcu::TestCaseGroup(testCtx, "mixed", "Tests mixing mutable descriptors an non-mutable descriptors")); 4009 4010 // Each descriptor will have a different type in every iteration, like in the one_array aliasing case. 4011 for (int groupIdx = 0; groupIdx < 2; ++groupIdx) 4012 { 4013 const bool mixed = (groupIdx == 1); 4014 DescriptorSet::BindingPtrVector setBindings; 4015 4016 for (size_t typeIdx = 0; typeIdx < mandatoryTypes.size(); ++typeIdx) 4017 { 4018 auto rotatedTypes = mandatoryTypes; 4019 const auto beginPtr = &rotatedTypes[0]; 4020 const auto endPtr = beginPtr + rotatedTypes.size(); 4021 4022 std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr); 4023 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, rotatedTypes)); 4024 4025 // Additional non-mutable binding interleaved with the mutable ones. 4026 if (mixed) 4027 setBindings.emplace_back(new SingleBinding(rotatedTypes[0], std::vector<VkDescriptorType>())); 4028 } 4029 DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings)); 4030 4031 const auto dstGroup = (mixed ? mixedGroup.get() : mutableOnlyGroup.get()); 4032 createMutableTestVariants(testCtx, dstGroup, descriptorSet, computeOnly); 4033 } 4034 4035 multipleGroup->addChild(mutableOnlyGroup.release()); 4036 multipleGroup->addChild(mixedGroup.release()); 4037 mainGroup->addChild(multipleGroup.release()); 4038 } 4039 4040 return mainGroup.release(); 4041} 4042 4043} // BindingModel 4044} // vkt 4045