1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2016 The Khronos Group Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 *//*!
20 * \file
21 * \brief Synchronization operation abstraction
22 *//*--------------------------------------------------------------------*/
23
24 #include "vktSynchronizationOperation.hpp"
25 #include "vkDefs.hpp"
26 #include "vktTestCase.hpp"
27 #include "vktTestCaseUtil.hpp"
28 #include "vkRef.hpp"
29 #include "vkRefUtil.hpp"
30 #include "vkMemUtil.hpp"
31 #include "vkBarrierUtil.hpp"
32 #include "vkQueryUtil.hpp"
33 #include "vkTypeUtil.hpp"
34 #include "vkImageUtil.hpp"
35 #include "vkBuilderUtil.hpp"
36 #include "vkCmdUtil.hpp"
37 #include "vkObjUtil.hpp"
38 #include "deUniquePtr.hpp"
39 #include "tcuTestLog.hpp"
40 #include "tcuTextureUtil.hpp"
41 #include <vector>
42 #include <sstream>
43
44 namespace vkt
45 {
46 namespace synchronization
47 {
48 namespace
49 {
50 using namespace vk;
51
52 enum Constants
53 {
54 MAX_IMAGE_DIMENSION_2D = 0x1000u,
55 MAX_UBO_RANGE = 0x4000u,
56 MAX_UPDATE_BUFFER_SIZE = 0x10000u,
57 };
58
59 enum BufferType
60 {
61 BUFFER_TYPE_UNIFORM,
62 BUFFER_TYPE_STORAGE,
63 BUFFER_TYPE_UNIFORM_TEXEL,
64 };
65
66 enum AccessMode
67 {
68 ACCESS_MODE_READ,
69 ACCESS_MODE_WRITE,
70 };
71
72 enum PipelineType
73 {
74 PIPELINE_TYPE_GRAPHICS,
75 PIPELINE_TYPE_COMPUTE,
76 };
77
78 static const char* const s_perVertexBlock = "gl_PerVertex {\n"
79 " vec4 gl_Position;\n"
80 "}";
81
82 static const SyncInfo emptySyncInfo =
83 {
84 0, // VkPipelineStageFlags stageMask;
85 0, // VkAccessFlags accessMask;
86 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
87 };
88
getShaderStageName(VkShaderStageFlagBits stage)89 std::string getShaderStageName(VkShaderStageFlagBits stage)
90 {
91 switch (stage)
92 {
93 default:
94 DE_FATAL("Unhandled stage!");
95 return "";
96 case VK_SHADER_STAGE_COMPUTE_BIT:
97 return "compute";
98 case VK_SHADER_STAGE_FRAGMENT_BIT:
99 return "fragment";
100 case VK_SHADER_STAGE_VERTEX_BIT:
101 return "vertex";
102 case VK_SHADER_STAGE_GEOMETRY_BIT:
103 return "geometry";
104 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
105 return "tess_control";
106 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
107 return "tess_eval";
108 }
109 }
110
111 //! A pipeline that can be embedded inside an operation.
112 class Pipeline
113 {
114 public:
~Pipeline(void)115 virtual ~Pipeline (void) {}
116 virtual void recordCommands (OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet) = 0;
117 };
118
119 //! Vertex data that covers the whole viewport with two triangles.
120 class VertexGrid
121 {
122 public:
VertexGrid(OperationContext& context)123 VertexGrid (OperationContext& context)
124 : m_vertexFormat (VK_FORMAT_R32G32B32A32_SFLOAT)
125 , m_vertexStride (tcu::getPixelSize(mapVkFormat(m_vertexFormat)))
126 {
127 const DeviceInterface& vk = context.getDeviceInterface();
128 const VkDevice device = context.getDevice();
129 Allocator& allocator = context.getAllocator();
130
131 // Vertex positions
132 {
133 m_vertexData.push_back(tcu::Vec4( 1.0f, 1.0f, 0.0f, 1.0f));
134 m_vertexData.push_back(tcu::Vec4(-1.0f, 1.0f, 0.0f, 1.0f));
135 m_vertexData.push_back(tcu::Vec4(-1.0f, -1.0f, 0.0f, 1.0f));
136
137 m_vertexData.push_back(tcu::Vec4(-1.0f, -1.0f, 0.0f, 1.0f));
138 m_vertexData.push_back(tcu::Vec4( 1.0f, -1.0f, 0.0f, 1.0f));
139 m_vertexData.push_back(tcu::Vec4( 1.0f, 1.0f, 0.0f, 1.0f));
140 }
141
142 {
143 const VkDeviceSize vertexDataSizeBytes = m_vertexData.size() * sizeof(m_vertexData[0]);
144
145 m_vertexBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator, makeBufferCreateInfo(vertexDataSizeBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT), MemoryRequirement::HostVisible));
146 DE_ASSERT(sizeof(m_vertexData[0]) == m_vertexStride);
147
148 {
149 const Allocation& alloc = m_vertexBuffer->getAllocation();
150
151 deMemcpy(alloc.getHostPtr(), &m_vertexData[0], static_cast<std::size_t>(vertexDataSizeBytes));
152 flushAlloc(vk, device, alloc);
153 }
154 }
155
156 // Indices
157 {
158 const VkDeviceSize indexBufferSizeBytes = sizeof(deUint32) * m_vertexData.size();
159 const deUint32 numIndices = static_cast<deUint32>(m_vertexData.size());
160
161 m_indexBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator, makeBufferCreateInfo(indexBufferSizeBytes, VK_BUFFER_USAGE_INDEX_BUFFER_BIT), MemoryRequirement::HostVisible));
162
163 {
164 const Allocation& alloc = m_indexBuffer->getAllocation();
165 deUint32* const pData = static_cast<deUint32*>(alloc.getHostPtr());
166
167 for (deUint32 i = 0; i < numIndices; ++i)
168 pData[i] = i;
169
170 flushAlloc(vk, device, alloc);
171 }
172 }
173 }
174
getVertexFormat(void) const175 VkFormat getVertexFormat (void) const { return m_vertexFormat; }
getVertexStride(void) const176 deUint32 getVertexStride (void) const { return m_vertexStride; }
getIndexType(void) const177 VkIndexType getIndexType (void) const { return VK_INDEX_TYPE_UINT32; }
getNumVertices(void) const178 deUint32 getNumVertices (void) const { return static_cast<deUint32>(m_vertexData.size()); }
getNumIndices(void) const179 deUint32 getNumIndices (void) const { return getNumVertices(); }
getVertexBuffer(void) const180 VkBuffer getVertexBuffer (void) const { return **m_vertexBuffer; }
getIndexBuffer(void) const181 VkBuffer getIndexBuffer (void) const { return **m_indexBuffer; }
182
183 private:
184 const VkFormat m_vertexFormat;
185 const deUint32 m_vertexStride;
186 std::vector<tcu::Vec4> m_vertexData;
187 de::MovePtr<Buffer> m_vertexBuffer;
188 de::MovePtr<Buffer> m_indexBuffer;
189 };
190
191 //! Add flags for all shader stages required to support a particular stage (e.g. fragment requires vertex as well).
getRequiredStages(const VkShaderStageFlagBits stage)192 VkShaderStageFlags getRequiredStages (const VkShaderStageFlagBits stage)
193 {
194 VkShaderStageFlags flags = 0;
195
196 DE_ASSERT(stage == VK_SHADER_STAGE_COMPUTE_BIT || (stage & VK_SHADER_STAGE_COMPUTE_BIT) == 0);
197
198 if (stage & VK_SHADER_STAGE_ALL_GRAPHICS)
199 flags |= VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
200
201 if (stage & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))
202 flags |= VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
203
204 if (stage & VK_SHADER_STAGE_GEOMETRY_BIT)
205 flags |= VK_SHADER_STAGE_GEOMETRY_BIT;
206
207 if (stage & VK_SHADER_STAGE_COMPUTE_BIT)
208 flags |= VK_SHADER_STAGE_COMPUTE_BIT;
209
210 return flags;
211 }
212
213 //! Check that SSBO read/write is available and that all shader stages are supported.
requireFeaturesForSSBOAccess(OperationContext& context, const VkShaderStageFlags usedStages)214 void requireFeaturesForSSBOAccess (OperationContext& context, const VkShaderStageFlags usedStages)
215 {
216 const InstanceInterface& vki = context.getInstanceInterface();
217 const VkPhysicalDevice physDevice = context.getPhysicalDevice();
218 FeatureFlags flags = (FeatureFlags)0;
219
220 if (usedStages & VK_SHADER_STAGE_FRAGMENT_BIT)
221 flags |= FEATURE_FRAGMENT_STORES_AND_ATOMICS;
222
223 if (usedStages & (VK_SHADER_STAGE_ALL_GRAPHICS & (~VK_SHADER_STAGE_FRAGMENT_BIT)))
224 flags |= FEATURE_VERTEX_PIPELINE_STORES_AND_ATOMICS;
225
226 if (usedStages & VK_SHADER_STAGE_GEOMETRY_BIT)
227 flags |= FEATURE_GEOMETRY_SHADER;
228
229 if (usedStages & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))
230 flags |= FEATURE_TESSELLATION_SHADER;
231
232 requireFeatures(vki, physDevice, flags);
233 }
234
getHostBufferData(const OperationContext& context, const Buffer& hostBuffer, const VkDeviceSize size)235 Data getHostBufferData (const OperationContext& context, const Buffer& hostBuffer, const VkDeviceSize size)
236 {
237 const DeviceInterface& vk = context.getDeviceInterface();
238 const VkDevice device = context.getDevice();
239 const Allocation& alloc = hostBuffer.getAllocation();
240 const Data data =
241 {
242 static_cast<std::size_t>(size), // std::size_t size;
243 static_cast<deUint8*>(alloc.getHostPtr()), // const deUint8* data;
244 };
245
246 invalidateAlloc(vk, device, alloc);
247
248 return data;
249 }
250
setHostBufferData(const OperationContext& context, const Buffer& hostBuffer, const Data& data)251 void setHostBufferData (const OperationContext& context, const Buffer& hostBuffer, const Data& data)
252 {
253 const DeviceInterface& vk = context.getDeviceInterface();
254 const VkDevice device = context.getDevice();
255 const Allocation& alloc = hostBuffer.getAllocation();
256
257 deMemcpy(alloc.getHostPtr(), data.data, data.size);
258 flushAlloc(vk, device, alloc);
259 }
260
assertValidShaderStage(const VkShaderStageFlagBits stage)261 void assertValidShaderStage (const VkShaderStageFlagBits stage)
262 {
263 switch (stage)
264 {
265 case VK_SHADER_STAGE_VERTEX_BIT:
266 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
267 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
268 case VK_SHADER_STAGE_GEOMETRY_BIT:
269 case VK_SHADER_STAGE_FRAGMENT_BIT:
270 case VK_SHADER_STAGE_COMPUTE_BIT:
271 // OK
272 break;
273
274 default:
275 DE_FATAL("Invalid shader stage");
276 break;
277 }
278 }
279
pipelineStageFlagsFromShaderStageFlagBits(const VkShaderStageFlagBits shaderStage)280 VkPipelineStageFlags pipelineStageFlagsFromShaderStageFlagBits (const VkShaderStageFlagBits shaderStage)
281 {
282 switch (shaderStage)
283 {
284 case VK_SHADER_STAGE_VERTEX_BIT: return VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR;
285 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT: return VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR;
286 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT: return VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR;
287 case VK_SHADER_STAGE_GEOMETRY_BIT: return VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR;
288 case VK_SHADER_STAGE_FRAGMENT_BIT: return VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR;
289 case VK_SHADER_STAGE_COMPUTE_BIT: return VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR;
290
291 // Other usages are probably an error, so flag that.
292 default:
293 DE_FATAL("Invalid shader stage");
294 return (VkPipelineStageFlags)0;
295 }
296 }
297
298 //! Fill destination buffer with a repeating pattern.
fillPattern(void* const pData, const VkDeviceSize size, bool useIndexPattern = false)299 void fillPattern (void* const pData, const VkDeviceSize size, bool useIndexPattern = false)
300 {
301 // There are two pattern options - most operations use primePattern,
302 // indexPattern is only needed for testing vertex index bufffer.
303 static const deUint8 primePattern[] = { 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31 };
304 static const deUint32 indexPattern[] = { 0, 1, 2, 3, 4 };
305
306 const deUint8* pattern = (useIndexPattern ? reinterpret_cast<const deUint8*>(indexPattern)
307 : primePattern);
308 const deUint32 patternSize = static_cast<deUint32>(useIndexPattern
309 ? DE_LENGTH_OF_ARRAY(indexPattern)*sizeof(deUint32)
310 : DE_LENGTH_OF_ARRAY(primePattern));
311 deUint8* const pBytes = static_cast<deUint8*>(pData);
312
313 for (deUint32 i = 0; i < size; ++i)
314 pBytes[i] = pattern[i % patternSize];
315 }
316
317 //! Get size in bytes of a pixel buffer with given extent.
getPixelBufferSize(const VkFormat format, const VkExtent3D& extent)318 VkDeviceSize getPixelBufferSize (const VkFormat format, const VkExtent3D& extent)
319 {
320 const int pixelSize = tcu::getPixelSize(mapVkFormat(format));
321 return (pixelSize * extent.width * extent.height * extent.depth);
322 }
323
324 //! Determine the size of a 2D image that can hold sizeBytes data.
get2DImageExtentWithSize(const VkDeviceSize sizeBytes, const deUint32 pixelSize)325 VkExtent3D get2DImageExtentWithSize (const VkDeviceSize sizeBytes, const deUint32 pixelSize)
326 {
327 const deUint32 size = static_cast<deUint32>(sizeBytes / pixelSize);
328
329 DE_ASSERT(size <= MAX_IMAGE_DIMENSION_2D * MAX_IMAGE_DIMENSION_2D);
330
331 return makeExtent3D(
332 std::min(size, static_cast<deUint32>(MAX_IMAGE_DIMENSION_2D)),
333 (size / MAX_IMAGE_DIMENSION_2D) + (size % MAX_IMAGE_DIMENSION_2D != 0 ? 1u : 0u),
334 1u);
335 }
336
makeClearValue(const VkFormat format)337 VkClearValue makeClearValue (const VkFormat format)
338 {
339 if (isDepthStencilFormat(format))
340 return makeClearValueDepthStencil(0.4f, 21u);
341 else
342 {
343 if (isIntFormat(format) || isUintFormat(format))
344 return makeClearValueColorU32(8u, 16u, 24u, 32u);
345 else
346 return makeClearValueColorF32(0.25f, 0.49f, 0.75f, 1.0f);
347 }
348 }
349
clearPixelBuffer(tcu::PixelBufferAccess& pixels, const VkClearValue& clearValue)350 void clearPixelBuffer (tcu::PixelBufferAccess& pixels, const VkClearValue& clearValue)
351 {
352 const tcu::TextureFormat format = pixels.getFormat();
353 const tcu::TextureChannelClass channelClass = tcu::getTextureChannelClass(format.type);
354
355 if (format.order == tcu::TextureFormat::D)
356 {
357 for (int z = 0; z < pixels.getDepth(); z++)
358 for (int y = 0; y < pixels.getHeight(); y++)
359 for (int x = 0; x < pixels.getWidth(); x++)
360 pixels.setPixDepth(clearValue.depthStencil.depth, x, y, z);
361 }
362 else if (format.order == tcu::TextureFormat::S)
363 {
364 for (int z = 0; z < pixels.getDepth(); z++)
365 for (int y = 0; y < pixels.getHeight(); y++)
366 for (int x = 0; x < pixels.getWidth(); x++)
367 pixels.setPixStencil(clearValue.depthStencil.stencil, x, y, z);
368 }
369 else if (format.order == tcu::TextureFormat::DS)
370 {
371 for (int z = 0; z < pixels.getDepth(); z++)
372 for (int y = 0; y < pixels.getHeight(); y++)
373 for (int x = 0; x < pixels.getWidth(); x++)
374 {
375 pixels.setPixDepth(clearValue.depthStencil.depth, x, y, z);
376 pixels.setPixStencil(clearValue.depthStencil.stencil, x, y, z);
377 }
378 }
379 else if (channelClass == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER || channelClass == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER)
380 {
381 const tcu::UVec4 color (clearValue.color.uint32);
382
383 for (int z = 0; z < pixels.getDepth(); z++)
384 for (int y = 0; y < pixels.getHeight(); y++)
385 for (int x = 0; x < pixels.getWidth(); x++)
386 pixels.setPixel(color, x, y, z);
387 }
388 else
389 {
390 const tcu::Vec4 color (clearValue.color.float32);
391
392 for (int z = 0; z < pixels.getDepth(); z++)
393 for (int y = 0; y < pixels.getHeight(); y++)
394 for (int x = 0; x < pixels.getWidth(); x++)
395 pixels.setPixel(color, x, y, z);
396 }
397 }
398
getImageViewType(const VkImageType imageType)399 VkImageViewType getImageViewType (const VkImageType imageType)
400 {
401 switch (imageType)
402 {
403 case VK_IMAGE_TYPE_1D: return VK_IMAGE_VIEW_TYPE_1D;
404 case VK_IMAGE_TYPE_2D: return VK_IMAGE_VIEW_TYPE_2D;
405 case VK_IMAGE_TYPE_3D: return VK_IMAGE_VIEW_TYPE_3D;
406
407 default:
408 DE_FATAL("Unknown image type");
409 return VK_IMAGE_VIEW_TYPE_LAST;
410 }
411 }
412
getShaderImageType(const VkFormat format, const VkImageType imageType)413 std::string getShaderImageType (const VkFormat format, const VkImageType imageType)
414 {
415 const tcu::TextureFormat texFormat = mapVkFormat(format);
416 const std::string formatPart = tcu::getTextureChannelClass(texFormat.type) == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER ? "u" :
417 tcu::getTextureChannelClass(texFormat.type) == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER ? "i" : "";
418 switch (imageType)
419 {
420 case VK_IMAGE_TYPE_1D: return formatPart + "image1D";
421 case VK_IMAGE_TYPE_2D: return formatPart + "image2D";
422 case VK_IMAGE_TYPE_3D: return formatPart + "image3D";
423
424 default:
425 DE_FATAL("Unknown image type");
426 return "";
427 }
428 }
429
getShaderImageFormatQualifier(const VkFormat format)430 std::string getShaderImageFormatQualifier (const VkFormat format)
431 {
432 const tcu::TextureFormat texFormat = mapVkFormat(format);
433 const char* orderPart = DE_NULL;
434 const char* typePart = DE_NULL;
435
436 switch (texFormat.order)
437 {
438 case tcu::TextureFormat::R: orderPart = "r"; break;
439 case tcu::TextureFormat::RG: orderPart = "rg"; break;
440 case tcu::TextureFormat::RGB: orderPart = "rgb"; break;
441 case tcu::TextureFormat::RGBA: orderPart = "rgba"; break;
442
443 default:
444 DE_FATAL("Unksupported texture channel order");
445 break;
446 }
447
448 switch (texFormat.type)
449 {
450 case tcu::TextureFormat::FLOAT: typePart = "32f"; break;
451 case tcu::TextureFormat::HALF_FLOAT: typePart = "16f"; break;
452
453 case tcu::TextureFormat::UNSIGNED_INT32: typePart = "32ui"; break;
454 case tcu::TextureFormat::UNSIGNED_INT16: typePart = "16ui"; break;
455 case tcu::TextureFormat::UNSIGNED_INT8: typePart = "8ui"; break;
456
457 case tcu::TextureFormat::SIGNED_INT32: typePart = "32i"; break;
458 case tcu::TextureFormat::SIGNED_INT16: typePart = "16i"; break;
459 case tcu::TextureFormat::SIGNED_INT8: typePart = "8i"; break;
460
461 case tcu::TextureFormat::UNORM_INT16: typePart = "16"; break;
462 case tcu::TextureFormat::UNORM_INT8: typePart = "8"; break;
463
464 case tcu::TextureFormat::SNORM_INT16: typePart = "16_snorm"; break;
465 case tcu::TextureFormat::SNORM_INT8: typePart = "8_snorm"; break;
466
467 default:
468 DE_FATAL("Unksupported texture channel type");
469 break;
470 }
471
472 return std::string(orderPart) + typePart;
473 }
474
475 namespace FillUpdateBuffer
476 {
477
478 enum BufferOp
479 {
480 BUFFER_OP_FILL,
481 BUFFER_OP_UPDATE,
482 BUFFER_OP_UPDATE_WITH_INDEX_PATTERN,
483 };
484
485 class Implementation : public Operation
486 {
487 public:
Implementation(OperationContext& context, Resource& resource, const BufferOp bufferOp)488 Implementation (OperationContext& context, Resource& resource, const BufferOp bufferOp)
489 : m_context (context)
490 , m_resource (resource)
491 , m_fillValue (0x13)
492 , m_bufferOp (bufferOp)
493 {
494 DE_ASSERT((m_resource.getBuffer().size % sizeof(deUint32)) == 0);
495 DE_ASSERT(m_bufferOp == BUFFER_OP_FILL || m_resource.getBuffer().size <= MAX_UPDATE_BUFFER_SIZE);
496
497 m_data.resize(static_cast<size_t>(m_resource.getBuffer().size));
498
499 if (m_bufferOp == BUFFER_OP_FILL)
500 {
501 const std::size_t size = m_data.size() / sizeof(m_fillValue);
502 deUint32* const pData = reinterpret_cast<deUint32*>(&m_data[0]);
503
504 for (deUint32 i = 0; i < size; ++i)
505 pData[i] = m_fillValue;
506 }
507 else if (m_bufferOp == BUFFER_OP_UPDATE)
508 {
509 fillPattern(&m_data[0], m_data.size());
510 }
511 else if(m_bufferOp == BUFFER_OP_UPDATE_WITH_INDEX_PATTERN)
512 {
513 fillPattern(&m_data[0], m_data.size(), true);
514 }
515 }
516
recordCommands(const VkCommandBuffer cmdBuffer)517 void recordCommands (const VkCommandBuffer cmdBuffer)
518 {
519 const DeviceInterface& vk = m_context.getDeviceInterface();
520
521 if (m_bufferOp == BUFFER_OP_FILL)
522 {
523 vk.cmdFillBuffer(cmdBuffer, m_resource.getBuffer().handle, m_resource.getBuffer().offset, m_resource.getBuffer().size, m_fillValue);
524
525 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
526 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
527 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
528 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
529 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
530 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
531 m_resource.getBuffer().handle, // VkBuffer buffer
532 0u, // VkDeviceSize offset
533 m_resource.getBuffer().size // VkDeviceSize size
534 );
535 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
536 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
537 }
538 else
539 vk.cmdUpdateBuffer(cmdBuffer, m_resource.getBuffer().handle, m_resource.getBuffer().offset, m_resource.getBuffer().size, reinterpret_cast<deUint32*>(&m_data[0]));
540 }
541
getInSyncInfo(void) const542 SyncInfo getInSyncInfo (void) const
543 {
544 return emptySyncInfo;
545 }
546
getOutSyncInfo(void) const547 SyncInfo getOutSyncInfo (void) const
548 {
549 const SyncInfo syncInfo =
550 {
551 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
552 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
553 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
554 };
555
556 return syncInfo;
557 }
558
getData(void) const559 Data getData (void) const
560 {
561 const Data data =
562 {
563 m_data.size(), // std::size_t size;
564 &m_data[0], // const deUint8* data;
565 };
566 return data;
567 }
568
setData(const Data& data)569 void setData (const Data& data)
570 {
571 deMemcpy(&m_data[0], data.data, data.size);
572 }
573
574 private:
575 OperationContext& m_context;
576 Resource& m_resource;
577 std::vector<deUint8> m_data;
578 const deUint32 m_fillValue;
579 const BufferOp m_bufferOp;
580 };
581
582 class Support : public OperationSupport
583 {
584 public:
Support(const ResourceDescription& resourceDesc, const BufferOp bufferOp)585 Support (const ResourceDescription& resourceDesc, const BufferOp bufferOp)
586 : m_resourceDesc (resourceDesc)
587 , m_bufferOp (bufferOp)
588 {
589 DE_ASSERT(m_bufferOp == BUFFER_OP_FILL || m_bufferOp == BUFFER_OP_UPDATE || m_bufferOp == BUFFER_OP_UPDATE_WITH_INDEX_PATTERN);
590 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER || m_resourceDesc.type == RESOURCE_TYPE_INDEX_BUFFER);
591 }
592
getInResourceUsageFlags(void) const593 deUint32 getInResourceUsageFlags (void) const
594 {
595 return 0;
596 }
597
getOutResourceUsageFlags(void) const598 deUint32 getOutResourceUsageFlags (void) const
599 {
600 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
601 }
602
getQueueFlags(const OperationContext& context) const603 VkQueueFlags getQueueFlags (const OperationContext& context) const
604 {
605 if (m_bufferOp == BUFFER_OP_FILL && !context.isDeviceFunctionalitySupported("VK_KHR_maintenance1"))
606 {
607 return VK_QUEUE_COMPUTE_BIT | VK_QUEUE_GRAPHICS_BIT;
608 }
609
610 return VK_QUEUE_TRANSFER_BIT;
611 }
612
build(OperationContext& context, Resource& resource) const613 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
614 {
615 return de::MovePtr<Operation>(new Implementation(context, resource, m_bufferOp));
616 }
617
build(OperationContext&, Resource&, Resource&) const618 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
619 {
620 DE_ASSERT(0);
621 return de::MovePtr<Operation>();
622 }
623
624 private:
625 const ResourceDescription m_resourceDesc;
626 const BufferOp m_bufferOp;
627 };
628
629 } // FillUpdateBuffer ns
630
631 namespace CopyBuffer
632 {
633
634 class Implementation : public Operation
635 {
636 public:
Implementation(OperationContext& context, Resource& resource, const AccessMode mode)637 Implementation (OperationContext& context, Resource& resource, const AccessMode mode)
638 : m_context (context)
639 , m_resource (resource)
640 , m_mode (mode)
641 {
642 const DeviceInterface& vk = m_context.getDeviceInterface();
643 const VkDevice device = m_context.getDevice();
644 Allocator& allocator = m_context.getAllocator();
645 const VkBufferUsageFlags hostBufferUsage = (m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
646
647 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, hostBufferUsage), MemoryRequirement::HostVisible));
648
649 const Allocation& alloc = m_hostBuffer->getAllocation();
650
651 if (m_mode == ACCESS_MODE_READ)
652 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_resource.getBuffer().size));
653 else
654 fillPattern(alloc.getHostPtr(), m_resource.getBuffer().size);
655
656 flushAlloc(vk, device, alloc);
657 }
658
recordCommands(const VkCommandBuffer cmdBuffer)659 void recordCommands (const VkCommandBuffer cmdBuffer)
660 {
661 const DeviceInterface& vk = m_context.getDeviceInterface();
662 const VkBufferCopy copyRegion = makeBufferCopy(0u, 0u, m_resource.getBuffer().size);
663 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
664
665 if (m_mode == ACCESS_MODE_READ)
666 {
667 vk.cmdCopyBuffer(cmdBuffer, m_resource.getBuffer().handle, **m_hostBuffer, 1u, ©Region);
668
669 // Insert a barrier so copied data is available to the host
670 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
671 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
672 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
673 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
674 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
675 **m_hostBuffer, // VkBuffer buffer
676 0u, // VkDeviceSize offset
677 m_resource.getBuffer().size // VkDeviceSize size
678 );
679 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
680 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
681 }
682 else
683 {
684 // Insert a barrier so buffer data is available to the device
685 //const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
686 // VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
687 // VK_ACCESS_2_HOST_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
688 // VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
689 // VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
690 // **m_hostBuffer, // VkBuffer buffer
691 // 0u, // VkDeviceSize offset
692 // m_resource.getBuffer().size // VkDeviceSize size
693 //);
694 //VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
695 //synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
696
697 vk.cmdCopyBuffer(cmdBuffer, **m_hostBuffer, m_resource.getBuffer().handle, 1u, ©Region);
698 }
699 }
700
getInSyncInfo(void) const701 SyncInfo getInSyncInfo (void) const
702 {
703 const VkAccessFlags access = (m_mode == ACCESS_MODE_READ ? VK_ACCESS_2_TRANSFER_READ_BIT_KHR : 0);
704 const SyncInfo syncInfo =
705 {
706 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
707 access, // VkAccessFlags accessMask;
708 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
709 };
710 return syncInfo;
711 }
712
getOutSyncInfo(void) const713 SyncInfo getOutSyncInfo (void) const
714 {
715 const VkAccessFlags access = (m_mode == ACCESS_MODE_WRITE ? VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR : 0);
716 const SyncInfo syncInfo =
717 {
718 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
719 access, // VkAccessFlags accessMask;
720 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
721 };
722 return syncInfo;
723 }
724
getData(void) const725 Data getData (void) const
726 {
727 return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
728 }
729
setData(const Data& data)730 void setData (const Data& data)
731 {
732 DE_ASSERT(m_mode == ACCESS_MODE_WRITE);
733 setHostBufferData(m_context, *m_hostBuffer, data);
734 }
735
736 private:
737 OperationContext& m_context;
738 Resource& m_resource;
739 const AccessMode m_mode;
740 de::MovePtr<Buffer> m_hostBuffer;
741 };
742
743 class Support : public OperationSupport
744 {
745 public:
Support(const ResourceDescription& resourceDesc, const AccessMode mode)746 Support (const ResourceDescription& resourceDesc, const AccessMode mode)
747 : m_mode (mode)
748 {
749 DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_BUFFER);
750 DE_UNREF(resourceDesc);
751 }
752
getInResourceUsageFlags(void) const753 deUint32 getInResourceUsageFlags (void) const
754 {
755 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT : 0;
756 }
757
getOutResourceUsageFlags(void) const758 deUint32 getOutResourceUsageFlags (void) const
759 {
760 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : 0;
761 }
762
getQueueFlags(const OperationContext& context) const763 VkQueueFlags getQueueFlags (const OperationContext& context) const
764 {
765 DE_UNREF(context);
766 return VK_QUEUE_TRANSFER_BIT;
767 }
768
build(OperationContext& context, Resource& resource) const769 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
770 {
771 return de::MovePtr<Operation>(new Implementation(context, resource, m_mode));
772 }
773
build(OperationContext&, Resource&, Resource&) const774 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
775 {
776 DE_ASSERT(0);
777 return de::MovePtr<Operation>();
778 }
779
780 private:
781 const AccessMode m_mode;
782 };
783
784 class CopyImplementation : public Operation
785 {
786 public:
CopyImplementation(OperationContext& context, Resource& inResource, Resource& outResource)787 CopyImplementation (OperationContext& context, Resource& inResource, Resource& outResource)
788 : m_context (context)
789 , m_inResource (inResource)
790 , m_outResource (outResource)
791 {
792 }
793
recordCommands(const VkCommandBuffer cmdBuffer)794 void recordCommands (const VkCommandBuffer cmdBuffer)
795 {
796 const DeviceInterface& vk = m_context.getDeviceInterface();
797 const VkBufferCopy copyRegion = makeBufferCopy(0u, 0u, m_inResource.getBuffer().size);
798
799 vk.cmdCopyBuffer(cmdBuffer, m_inResource.getBuffer().handle, m_outResource.getBuffer().handle, 1u, ©Region);
800 }
801
getInSyncInfo(void) const802 SyncInfo getInSyncInfo (void) const
803 {
804 const SyncInfo syncInfo =
805 {
806 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
807 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
808 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
809 };
810 return syncInfo;
811 }
812
getOutSyncInfo(void) const813 SyncInfo getOutSyncInfo (void) const
814 {
815 const SyncInfo syncInfo =
816 {
817 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
818 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
819 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
820 };
821 return syncInfo;
822 }
823
getData(void) const824 Data getData (void) const
825 {
826 Data data = { 0, DE_NULL };
827 return data;
828 }
829
setData(const Data&)830 void setData (const Data&)
831 {
832 DE_ASSERT(0);
833 }
834
835 private:
836 OperationContext& m_context;
837 Resource& m_inResource;
838 Resource& m_outResource;
839 de::MovePtr<Buffer> m_hostBuffer;
840 };
841
842 class CopySupport : public OperationSupport
843 {
844 public:
CopySupport(const ResourceDescription& resourceDesc)845 CopySupport (const ResourceDescription& resourceDesc)
846 {
847 DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_BUFFER);
848 DE_UNREF(resourceDesc);
849 }
850
getInResourceUsageFlags(void) const851 deUint32 getInResourceUsageFlags (void) const
852 {
853 return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
854 }
855
getOutResourceUsageFlags(void) const856 deUint32 getOutResourceUsageFlags (void) const
857 {
858 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
859 }
860
getQueueFlags(const OperationContext& context) const861 VkQueueFlags getQueueFlags (const OperationContext& context) const
862 {
863 DE_UNREF(context);
864 return VK_QUEUE_TRANSFER_BIT;
865 }
866
build(OperationContext&, Resource&) const867 de::MovePtr<Operation> build (OperationContext&, Resource&) const
868 {
869 DE_ASSERT(0);
870 return de::MovePtr<Operation>();
871 }
872
build(OperationContext& context, Resource& inResource, Resource& outResource) const873 de::MovePtr<Operation> build (OperationContext& context, Resource& inResource, Resource& outResource) const
874 {
875 return de::MovePtr<Operation>(new CopyImplementation(context, inResource, outResource));
876 }
877 };
878
879 } // CopyBuffer ns
880
881 namespace CopyBlitResolveImage
882 {
883
884 class ImplementationBase : public Operation
885 {
886 public:
887 //! Copy/Blit/Resolve etc. operation
888 virtual void recordCopyCommand (const VkCommandBuffer cmdBuffer) = 0;
889
890 //! Get source stage mask that is used during read - added to test synchronization2 new stage masks
891 virtual VkPipelineStageFlags2KHR getReadSrcStageMask() const = 0;
892
ImplementationBase(OperationContext& context, Resource& resource, const AccessMode mode)893 ImplementationBase (OperationContext& context, Resource& resource, const AccessMode mode)
894 : m_context (context)
895 , m_resource (resource)
896 , m_mode (mode)
897 , m_bufferSize (getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
898 {
899 const DeviceInterface& vk = m_context.getDeviceInterface();
900 const VkDevice device = m_context.getDevice();
901 Allocator& allocator = m_context.getAllocator();
902
903 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
904 vk, device, allocator, makeBufferCreateInfo(m_bufferSize, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT),
905 MemoryRequirement::HostVisible));
906
907 const Allocation& alloc = m_hostBuffer->getAllocation();
908 if (m_mode == ACCESS_MODE_READ)
909 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_bufferSize));
910 else
911 fillPattern(alloc.getHostPtr(), m_bufferSize);
912 flushAlloc(vk, device, alloc);
913
914 // Staging image
915 const auto& imgResource = m_resource.getImage();
916 m_image = de::MovePtr<Image>(new Image(
917 vk, device, allocator,
918 makeImageCreateInfo(imgResource.imageType, imgResource.extent, imgResource.format,
919 (VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT), VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
920 MemoryRequirement::Any));
921 }
922
recordCommands(const VkCommandBuffer cmdBuffer)923 void recordCommands (const VkCommandBuffer cmdBuffer)
924 {
925 const DeviceInterface& vk = m_context.getDeviceInterface();
926 const VkBufferImageCopy bufferCopyRegion = makeBufferImageCopy(m_resource.getImage().extent, m_resource.getImage().subresourceLayers);
927 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
928
929 // Staging image layout
930 {
931 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
932 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
933 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
934 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
935 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
936 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
937 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
938 **m_image, // VkImage image
939 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
940 );
941 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
942 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
943 }
944
945 if (m_mode == ACCESS_MODE_READ)
946 {
947 // Resource Image -> Staging image
948 recordCopyCommand(cmdBuffer);
949
950 // Staging image layout
951 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
952 getReadSrcStageMask(), // VkPipelineStageFlags2KHR srcStageMask
953 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
954 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
955 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
956 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
957 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
958 **m_image, // VkImage image
959 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
960 );
961 VkDependencyInfoKHR imageDependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
962 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &imageDependencyInfo);
963
964 // Image -> Host buffer
965 vk.cmdCopyImageToBuffer(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u, &bufferCopyRegion);
966
967 // Insert a barrier so copied data is available to the host
968 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
969 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
970 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
971 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
972 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
973 **m_hostBuffer, // VkBuffer buffer
974 0u, // VkDeviceSize offset
975 m_bufferSize // VkDeviceSize size
976 );
977 VkDependencyInfoKHR bufferDependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
978 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &bufferDependencyInfo);
979 }
980 else
981 {
982 // Host buffer -> Staging image
983 vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &bufferCopyRegion);
984
985 // Staging image layout
986 {
987 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
988 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
989 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
990 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
991 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
992 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
993 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
994 **m_image, // VkImage image
995 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
996 );
997 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
998 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
999 }
1000
1001 // Resource image layout
1002 {
1003 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1004 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
1005 (VkAccessFlags2KHR)0, // VkAccessFlags2KHR srcAccessMask
1006 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
1007 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1008 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1009 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
1010 m_resource.getImage().handle, // VkImage image
1011 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1012 );
1013 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1014 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1015 }
1016
1017 // Staging image -> Resource Image
1018 recordCopyCommand(cmdBuffer);
1019 }
1020 }
1021
getInSyncInfo(void) const1022 SyncInfo getInSyncInfo (void) const
1023 {
1024 const VkAccessFlags2KHR access = (m_mode == ACCESS_MODE_READ ? VK_ACCESS_2_TRANSFER_READ_BIT_KHR : VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR);
1025 const VkImageLayout layout = (m_mode == ACCESS_MODE_READ ? VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL : VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
1026 const SyncInfo syncInfo =
1027 {
1028 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
1029 access, // VkAccessFlags accessMask;
1030 layout, // VkImageLayout imageLayout;
1031 };
1032 return syncInfo;
1033 }
1034
getOutSyncInfo(void) const1035 SyncInfo getOutSyncInfo (void) const
1036 {
1037 const VkAccessFlags2KHR access = (m_mode == ACCESS_MODE_READ ? VK_ACCESS_2_TRANSFER_READ_BIT_KHR : VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR);
1038 const VkImageLayout layout = (m_mode == ACCESS_MODE_READ ? VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL : VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
1039 const SyncInfo syncInfo =
1040 {
1041 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
1042 access, // VkAccessFlags accessMask;
1043 layout, // VkImageLayout imageLayout;
1044 };
1045 return syncInfo;
1046 }
1047
getData(void) const1048 Data getData (void) const
1049 {
1050 return getHostBufferData(m_context, *m_hostBuffer, m_bufferSize);
1051 }
1052
setData(const Data& data)1053 void setData (const Data& data)
1054 {
1055 DE_ASSERT(m_mode == ACCESS_MODE_WRITE);
1056 setHostBufferData(m_context, *m_hostBuffer, data);
1057 }
1058
1059 protected:
1060 OperationContext& m_context;
1061 Resource& m_resource;
1062 const AccessMode m_mode;
1063 const VkDeviceSize m_bufferSize;
1064 de::MovePtr<Buffer> m_hostBuffer;
1065 de::MovePtr<Image> m_image;
1066 };
1067
makeExtentOffset(const Resource& resource)1068 VkOffset3D makeExtentOffset (const Resource& resource)
1069 {
1070 DE_ASSERT(resource.getType() == RESOURCE_TYPE_IMAGE);
1071 const VkExtent3D extent = resource.getImage().extent;
1072
1073 switch (resource.getImage().imageType)
1074 {
1075 case VK_IMAGE_TYPE_1D: return makeOffset3D(extent.width, 1, 1);
1076 case VK_IMAGE_TYPE_2D: return makeOffset3D(extent.width, extent.height, 1);
1077 case VK_IMAGE_TYPE_3D: return makeOffset3D(extent.width, extent.height, extent.depth);
1078 default:
1079 DE_ASSERT(0);
1080 return VkOffset3D();
1081 }
1082 }
1083
makeBlitRegion(const Resource& resource)1084 VkImageBlit makeBlitRegion (const Resource& resource)
1085 {
1086 const VkImageBlit blitRegion =
1087 {
1088 resource.getImage().subresourceLayers, // VkImageSubresourceLayers srcSubresource;
1089 { makeOffset3D(0, 0, 0), makeExtentOffset(resource) }, // VkOffset3D srcOffsets[2];
1090 resource.getImage().subresourceLayers, // VkImageSubresourceLayers dstSubresource;
1091 { makeOffset3D(0, 0, 0), makeExtentOffset(resource) }, // VkOffset3D dstOffsets[2];
1092 };
1093 return blitRegion;
1094 }
1095
1096 class BlitImplementation : public ImplementationBase
1097 {
1098 public:
BlitImplementation(OperationContext& context, Resource& resource, const AccessMode mode)1099 BlitImplementation (OperationContext& context, Resource& resource, const AccessMode mode)
1100 : ImplementationBase (context, resource, mode)
1101 , m_blitRegion (makeBlitRegion(m_resource))
1102 {
1103 const InstanceInterface& vki = m_context.getInstanceInterface();
1104 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
1105 const auto& imgResource = m_resource.getImage();
1106 const VkFormatProperties formatProps = getPhysicalDeviceFormatProperties(vki, physDevice, imgResource.format);
1107 const auto& features = ((imgResource.tiling == VK_IMAGE_TILING_LINEAR) ? formatProps.linearTilingFeatures : formatProps.optimalTilingFeatures);
1108 const VkFormatFeatureFlags requiredFlags = (VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT);
1109
1110 // SRC and DST blit is required because both images are using the same format.
1111 if ((features & requiredFlags) != requiredFlags)
1112 TCU_THROW(NotSupportedError, "Format doesn't support blits");
1113 }
1114
recordCopyCommand(const VkCommandBuffer cmdBuffer)1115 void recordCopyCommand (const VkCommandBuffer cmdBuffer)
1116 {
1117 const DeviceInterface& vk = m_context.getDeviceInterface();
1118
1119 if (m_mode == ACCESS_MODE_READ)
1120 {
1121 // Resource Image -> Staging image
1122 vk.cmdBlitImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1123 1u, &m_blitRegion, VK_FILTER_NEAREST);
1124 }
1125 else
1126 {
1127 // Staging image -> Resource Image
1128 vk.cmdBlitImage(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1129 1u, &m_blitRegion, VK_FILTER_NEAREST);
1130 }
1131 }
1132
getReadSrcStageMask() const1133 VkPipelineStageFlags2KHR getReadSrcStageMask() const
1134 {
1135 return (m_context.getSynchronizationType() == SynchronizationType::LEGACY) ? VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR : VK_PIPELINE_STAGE_2_BLIT_BIT_KHR;
1136 }
1137
1138
1139 private:
1140 const VkImageBlit m_blitRegion;
1141 };
1142
1143 template <typename ImageCopyOrResolve>
makeImageRegion(const Resource& resource)1144 ImageCopyOrResolve makeImageRegion (const Resource& resource)
1145 {
1146 return
1147 {
1148 resource.getImage().subresourceLayers, // VkImageSubresourceLayers srcSubresource;
1149 makeOffset3D(0, 0, 0), // VkOffset3D srcOffset;
1150 resource.getImage().subresourceLayers, // VkImageSubresourceLayers dstSubresource;
1151 makeOffset3D(0, 0, 0), // VkOffset3D dstOffset;
1152 resource.getImage().extent, // VkExtent3D extent;
1153 };
1154 }
1155
1156 class CopyImplementation : public ImplementationBase
1157 {
1158 public:
CopyImplementation(OperationContext& context, Resource& resource, const AccessMode mode)1159 CopyImplementation (OperationContext& context, Resource& resource, const AccessMode mode)
1160 : ImplementationBase (context, resource, mode)
1161 , m_imageCopyRegion (makeImageRegion<VkImageCopy>(m_resource))
1162 {
1163 }
1164
recordCopyCommand(const VkCommandBuffer cmdBuffer)1165 void recordCopyCommand (const VkCommandBuffer cmdBuffer)
1166 {
1167 const DeviceInterface& vk = m_context.getDeviceInterface();
1168
1169 if (m_mode == ACCESS_MODE_READ)
1170 {
1171 // Resource Image -> Staging image
1172 vk.cmdCopyImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_imageCopyRegion);
1173 }
1174 else
1175 {
1176 // Staging image -> Resource Image
1177 vk.cmdCopyImage(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_imageCopyRegion);
1178 }
1179 }
1180
getReadSrcStageMask() const1181 VkPipelineStageFlags2KHR getReadSrcStageMask() const
1182 {
1183 return (m_context.getSynchronizationType() == SynchronizationType::LEGACY) ? VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR : VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
1184 }
1185
1186 private:
1187 const VkImageCopy m_imageCopyRegion;
1188 };
1189
1190 class ResolveImplementation : public ImplementationBase
1191 {
1192 public:
ResolveImplementation(OperationContext& context, Resource& resource, const AccessMode mode)1193 ResolveImplementation(OperationContext& context, Resource& resource, const AccessMode mode)
1194 : ImplementationBase (context, resource, mode)
1195 , m_imageResolveRegion (makeImageRegion<VkImageResolve>(resource))
1196 {
1197 DE_ASSERT(m_mode == ACCESS_MODE_READ);
1198 }
1199
recordCopyCommand(const VkCommandBuffer cmdBuffer)1200 void recordCopyCommand(const VkCommandBuffer cmdBuffer)
1201 {
1202 const DeviceInterface& vk = m_context.getDeviceInterface();
1203
1204 // Resource Image -> Staging image
1205 vk.cmdResolveImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_imageResolveRegion);
1206 }
1207
getReadSrcStageMask() const1208 VkPipelineStageFlags2KHR getReadSrcStageMask() const
1209 {
1210 return (m_context.getSynchronizationType() == SynchronizationType::LEGACY) ? VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR : VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR;
1211 }
1212
1213 private:
1214 VkImageResolve m_imageResolveRegion;
1215 };
1216
1217 enum Type
1218 {
1219 TYPE_COPY,
1220 TYPE_BLIT,
1221 TYPE_RESOLVE,
1222 };
1223
1224 class Support : public OperationSupport
1225 {
1226 public:
Support(const ResourceDescription& resourceDesc, const Type type, const AccessMode mode)1227 Support (const ResourceDescription& resourceDesc, const Type type, const AccessMode mode)
1228 : m_type (type)
1229 , m_mode (mode)
1230 {
1231 DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_IMAGE);
1232
1233 const bool isDepthStencil = isDepthStencilFormat(resourceDesc.imageFormat);
1234 m_requiredQueueFlags = (isDepthStencil || m_type != TYPE_COPY ? VK_QUEUE_GRAPHICS_BIT : VK_QUEUE_TRANSFER_BIT);
1235
1236 // Don't blit depth/stencil images.
1237 DE_ASSERT(m_type != TYPE_BLIT || !isDepthStencil);
1238 }
1239
getInResourceUsageFlags(void) const1240 deUint32 getInResourceUsageFlags (void) const
1241 {
1242 return (m_mode == ACCESS_MODE_READ ? VK_IMAGE_USAGE_TRANSFER_SRC_BIT : 0);
1243 }
1244
getOutResourceUsageFlags(void) const1245 deUint32 getOutResourceUsageFlags (void) const
1246 {
1247 return (m_mode == ACCESS_MODE_WRITE ? VK_IMAGE_USAGE_TRANSFER_DST_BIT : 0);
1248 }
1249
getQueueFlags(const OperationContext& context) const1250 VkQueueFlags getQueueFlags (const OperationContext& context) const
1251 {
1252 DE_UNREF(context);
1253 return m_requiredQueueFlags;
1254 }
1255
build(OperationContext& context, Resource& resource) const1256 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
1257 {
1258 if (m_type == TYPE_COPY)
1259 return de::MovePtr<Operation>(new CopyImplementation(context, resource, m_mode));
1260 else if (m_type == TYPE_BLIT)
1261 return de::MovePtr<Operation>(new BlitImplementation(context, resource, m_mode));
1262 else
1263 return de::MovePtr<Operation>(new ResolveImplementation(context, resource, m_mode));
1264 }
1265
build(OperationContext&, Resource&, Resource&) const1266 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
1267 {
1268 DE_ASSERT(0);
1269 return de::MovePtr<Operation>();
1270 }
1271
1272 private:
1273 const Type m_type;
1274 const AccessMode m_mode;
1275 VkQueueFlags m_requiredQueueFlags;
1276 };
1277
1278 class BlitCopyImplementation : public Operation
1279 {
1280 public:
BlitCopyImplementation(OperationContext& context, Resource& inResource, Resource& outResource)1281 BlitCopyImplementation (OperationContext& context, Resource& inResource, Resource& outResource)
1282 : m_context (context)
1283 , m_inResource (inResource)
1284 , m_outResource (outResource)
1285 , m_blitRegion (makeBlitRegion(m_inResource))
1286 {
1287 DE_ASSERT(m_inResource.getType() == RESOURCE_TYPE_IMAGE);
1288 DE_ASSERT(m_outResource.getType() == RESOURCE_TYPE_IMAGE);
1289
1290 const InstanceInterface& vki = m_context.getInstanceInterface();
1291 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
1292 const auto& imgResource = m_inResource.getImage();
1293 const VkFormatProperties formatProps = getPhysicalDeviceFormatProperties(vki, physDevice, imgResource.format);
1294 const auto& features = ((imgResource.tiling == VK_IMAGE_TILING_LINEAR) ? formatProps.linearTilingFeatures : formatProps.optimalTilingFeatures);
1295 const VkFormatFeatureFlags requiredFlags = (VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT);
1296
1297 // SRC and DST blit is required because both images are using the same format.
1298 if ((features & requiredFlags) != requiredFlags)
1299 TCU_THROW(NotSupportedError, "Format doesn't support blits");
1300 }
1301
recordCommands(const VkCommandBuffer cmdBuffer)1302 void recordCommands (const VkCommandBuffer cmdBuffer)
1303 {
1304 const DeviceInterface& vk = m_context.getDeviceInterface();
1305 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
1306
1307 {
1308 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1309 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
1310 (VkAccessFlags2KHR)0, // VkAccessFlags2KHR srcAccessMask
1311 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR dstStageMask
1312 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1313 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1314 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
1315 m_outResource.getImage().handle, // VkImage image
1316 m_outResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1317 );
1318 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1319 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1320 }
1321
1322 vk.cmdBlitImage(cmdBuffer,
1323 m_inResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1324 m_outResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1325 1u, &m_blitRegion, VK_FILTER_NEAREST);
1326 }
1327
getInSyncInfo(void) const1328 SyncInfo getInSyncInfo (void) const
1329 {
1330 const SyncInfo syncInfo =
1331 {
1332 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags stageMask;
1333 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
1334 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout imageLayout;
1335 };
1336 return syncInfo;
1337 }
1338
getOutSyncInfo(void) const1339 SyncInfo getOutSyncInfo (void) const
1340 {
1341 const SyncInfo syncInfo =
1342 {
1343 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags stageMask;
1344 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
1345 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
1346 };
1347 return syncInfo;
1348 }
1349
getData(void) const1350 Data getData (void) const
1351 {
1352 Data data = { 0, DE_NULL };
1353 return data;
1354 }
1355
setData(const Data&)1356 void setData (const Data&)
1357 {
1358 DE_ASSERT(0);
1359 }
1360
1361 private:
1362 OperationContext& m_context;
1363 Resource& m_inResource;
1364 Resource& m_outResource;
1365 const VkImageBlit m_blitRegion;
1366 };
1367
1368 class CopyCopyImplementation : public Operation
1369 {
1370 public:
CopyCopyImplementation(OperationContext& context, Resource& inResource, Resource& outResource)1371 CopyCopyImplementation (OperationContext& context, Resource& inResource, Resource& outResource)
1372 : m_context (context)
1373 , m_inResource (inResource)
1374 , m_outResource (outResource)
1375 , m_imageCopyRegion (makeImageRegion<VkImageCopy>(m_inResource))
1376 {
1377 DE_ASSERT(m_inResource.getType() == RESOURCE_TYPE_IMAGE);
1378 DE_ASSERT(m_outResource.getType() == RESOURCE_TYPE_IMAGE);
1379 }
1380
recordCommands(const VkCommandBuffer cmdBuffer)1381 void recordCommands (const VkCommandBuffer cmdBuffer)
1382 {
1383 const DeviceInterface& vk = m_context.getDeviceInterface();
1384 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
1385
1386 {
1387 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1388 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
1389 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
1390 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR dstStageMask
1391 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1392 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1393 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
1394 m_outResource.getImage().handle, // VkImage image
1395 m_outResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1396 );
1397 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1398 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1399 }
1400
1401 vk.cmdCopyImage(cmdBuffer,
1402 m_inResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1403 m_outResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1404 1u, &m_imageCopyRegion);
1405 }
1406
getInSyncInfo(void) const1407 SyncInfo getInSyncInfo (void) const
1408 {
1409 const SyncInfo syncInfo =
1410 {
1411 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags stageMask;
1412 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
1413 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout imageLayout;
1414 };
1415 return syncInfo;
1416 }
1417
getOutSyncInfo(void) const1418 SyncInfo getOutSyncInfo (void) const
1419 {
1420 const SyncInfo syncInfo =
1421 {
1422 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags stageMask;
1423 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
1424 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
1425 };
1426 return syncInfo;
1427 }
1428
getData(void) const1429 Data getData (void) const
1430 {
1431 Data data = { 0, DE_NULL };
1432 return data;
1433 }
1434
setData(const Data&)1435 void setData (const Data&)
1436 {
1437 DE_ASSERT(0);
1438 }
1439
1440 private:
1441 OperationContext& m_context;
1442 Resource& m_inResource;
1443 Resource& m_outResource;
1444 const VkImageCopy m_imageCopyRegion;
1445 };
1446
1447 class CopySupport : public OperationSupport
1448 {
1449 public:
CopySupport(const ResourceDescription& resourceDesc, const Type type)1450 CopySupport (const ResourceDescription& resourceDesc, const Type type)
1451 : m_type (type)
1452 {
1453 DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_IMAGE);
1454
1455 const bool isDepthStencil = isDepthStencilFormat(resourceDesc.imageFormat);
1456 m_requiredQueueFlags = (isDepthStencil || m_type == TYPE_BLIT ? VK_QUEUE_GRAPHICS_BIT : VK_QUEUE_TRANSFER_BIT);
1457
1458 // Don't blit depth/stencil images.
1459 DE_ASSERT(m_type != TYPE_BLIT || !isDepthStencil);
1460 }
1461
getInResourceUsageFlags(void) const1462 deUint32 getInResourceUsageFlags (void) const
1463 {
1464 return VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1465 }
1466
getOutResourceUsageFlags(void) const1467 deUint32 getOutResourceUsageFlags (void) const
1468 {
1469 return VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1470 }
1471
getQueueFlags(const OperationContext& context) const1472 VkQueueFlags getQueueFlags (const OperationContext& context) const
1473 {
1474 DE_UNREF(context);
1475 return m_requiredQueueFlags;
1476 }
1477
build(OperationContext&, Resource&) const1478 de::MovePtr<Operation> build (OperationContext&, Resource&) const
1479 {
1480 DE_ASSERT(0);
1481 return de::MovePtr<Operation>();
1482 }
1483
build(OperationContext& context, Resource& inResource, Resource& outResource) const1484 de::MovePtr<Operation> build (OperationContext& context, Resource& inResource, Resource& outResource) const
1485 {
1486 if (m_type == TYPE_COPY)
1487 return de::MovePtr<Operation>(new CopyCopyImplementation(context, inResource, outResource));
1488 else
1489 return de::MovePtr<Operation>(new BlitCopyImplementation(context, inResource, outResource));
1490 }
1491
1492 private:
1493 const Type m_type;
1494 VkQueueFlags m_requiredQueueFlags;
1495 };
1496
1497 } // CopyBlitImage ns
1498
1499 namespace ShaderAccess
1500 {
1501
1502 enum DispatchCall
1503 {
1504 DISPATCH_CALL_DISPATCH,
1505 DISPATCH_CALL_DISPATCH_INDIRECT,
1506 };
1507
1508 class GraphicsPipeline : public Pipeline
1509 {
1510 public:
GraphicsPipeline(OperationContext& context, const VkShaderStageFlagBits stage, const std::string& shaderPrefix, const VkDescriptorSetLayout descriptorSetLayout)1511 GraphicsPipeline (OperationContext& context, const VkShaderStageFlagBits stage, const std::string& shaderPrefix, const VkDescriptorSetLayout descriptorSetLayout)
1512 : m_vertices (context)
1513 {
1514 const DeviceInterface& vk = context.getDeviceInterface();
1515 const VkDevice device = context.getDevice();
1516 Allocator& allocator = context.getAllocator();
1517 const VkShaderStageFlags requiredStages = getRequiredStages(stage);
1518
1519 // Color attachment
1520
1521 m_colorFormat = VK_FORMAT_R8G8B8A8_UNORM;
1522 m_colorImageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
1523 m_colorImageExtent = makeExtent3D(16u, 16u, 1u);
1524 m_colorAttachmentImage = de::MovePtr<Image>(new Image(vk, device, allocator,
1525 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_colorImageExtent, m_colorFormat, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
1526 VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
1527 MemoryRequirement::Any));
1528
1529 // Pipeline
1530
1531 m_colorAttachmentView = makeImageView (vk, device, **m_colorAttachmentImage, VK_IMAGE_VIEW_TYPE_2D, m_colorFormat, m_colorImageSubresourceRange);
1532 m_renderPass = makeRenderPass (vk, device, m_colorFormat);
1533 m_framebuffer = makeFramebuffer (vk, device, *m_renderPass, *m_colorAttachmentView, m_colorImageExtent.width, m_colorImageExtent.height);
1534 m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
1535
1536 GraphicsPipelineBuilder pipelineBuilder;
1537 pipelineBuilder
1538 .setRenderSize (tcu::IVec2(m_colorImageExtent.width, m_colorImageExtent.height))
1539 .setVertexInputSingleAttribute (m_vertices.getVertexFormat(), m_vertices.getVertexStride())
1540 .setShader (vk, device, VK_SHADER_STAGE_VERTEX_BIT, context.getBinaryCollection().get(shaderPrefix + "vert"), DE_NULL)
1541 .setShader (vk, device, VK_SHADER_STAGE_FRAGMENT_BIT, context.getBinaryCollection().get(shaderPrefix + "frag"), DE_NULL);
1542
1543 if (requiredStages & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))
1544 pipelineBuilder
1545 .setPatchControlPoints (m_vertices.getNumVertices())
1546 .setShader (vk, device, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, context.getBinaryCollection().get(shaderPrefix + "tesc"), DE_NULL)
1547 .setShader (vk, device, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, context.getBinaryCollection().get(shaderPrefix + "tese"), DE_NULL);
1548
1549 if (requiredStages & VK_SHADER_STAGE_GEOMETRY_BIT)
1550 pipelineBuilder
1551 .setShader (vk, device, VK_SHADER_STAGE_GEOMETRY_BIT, context.getBinaryCollection().get(shaderPrefix + "geom"), DE_NULL);
1552
1553 m_pipeline = pipelineBuilder.build(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData(), context.getResourceInterface());
1554 }
1555
recordCommands(OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)1556 void recordCommands (OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
1557 {
1558 const DeviceInterface& vk = context.getDeviceInterface();
1559 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(context.getSynchronizationType(), vk, DE_FALSE);
1560
1561 // Change color attachment image layout
1562 {
1563 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1564 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
1565 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
1566 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, // VkPipelineStageFlags2KHR dstStageMask
1567 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1568 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1569 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout
1570 **m_colorAttachmentImage, // VkImage image
1571 m_colorImageSubresourceRange // VkImageSubresourceRange subresourceRange
1572 );
1573 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1574 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1575 }
1576
1577 {
1578 const VkRect2D renderArea = makeRect2D(m_colorImageExtent);
1579 const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
1580
1581 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
1582 }
1583
1584 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
1585 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1u, &descriptorSet, 0u, DE_NULL);
1586 {
1587 const VkDeviceSize vertexBufferOffset = 0ull;
1588 const VkBuffer vertexBuffer = m_vertices.getVertexBuffer();
1589 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &vertexBuffer, &vertexBufferOffset);
1590 }
1591
1592 vk.cmdDraw(cmdBuffer, m_vertices.getNumVertices(), 1u, 0u, 0u);
1593 endRenderPass(vk, cmdBuffer);
1594 }
1595
1596 private:
1597 const VertexGrid m_vertices;
1598 VkFormat m_colorFormat;
1599 de::MovePtr<Image> m_colorAttachmentImage;
1600 Move<VkImageView> m_colorAttachmentView;
1601 VkExtent3D m_colorImageExtent;
1602 VkImageSubresourceRange m_colorImageSubresourceRange;
1603 Move<VkRenderPass> m_renderPass;
1604 Move<VkFramebuffer> m_framebuffer;
1605 Move<VkPipelineLayout> m_pipelineLayout;
1606 Move<VkPipeline> m_pipeline;
1607 };
1608
1609 class ComputePipeline : public Pipeline
1610 {
1611 public:
ComputePipeline(OperationContext& context, const DispatchCall dispatchCall, const std::string& shaderPrefix, const VkDescriptorSetLayout descriptorSetLayout)1612 ComputePipeline (OperationContext& context, const DispatchCall dispatchCall, const std::string& shaderPrefix, const VkDescriptorSetLayout descriptorSetLayout)
1613 : m_dispatchCall (dispatchCall)
1614 {
1615 const DeviceInterface& vk = context.getDeviceInterface();
1616 const VkDevice device = context.getDevice();
1617 Allocator& allocator = context.getAllocator();
1618
1619 if (m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT)
1620 {
1621 m_indirectBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator,
1622 makeBufferCreateInfo(sizeof(VkDispatchIndirectCommand), VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT), MemoryRequirement::HostVisible));
1623
1624 const Allocation& alloc = m_indirectBuffer->getAllocation();
1625 VkDispatchIndirectCommand* const pIndirectCommand = static_cast<VkDispatchIndirectCommand*>(alloc.getHostPtr());
1626
1627 pIndirectCommand->x = 1u;
1628 pIndirectCommand->y = 1u;
1629 pIndirectCommand->z = 1u;
1630
1631 flushAlloc(vk, device, alloc);
1632 }
1633
1634 const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, context.getBinaryCollection().get(shaderPrefix + "comp"), (VkShaderModuleCreateFlags)0));
1635
1636 m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
1637 m_pipeline = makeComputePipeline(vk, device, *m_pipelineLayout, *shaderModule, DE_NULL, context.getPipelineCacheData(), context.getResourceInterface());
1638 }
1639
recordCommands(OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)1640 void recordCommands (OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
1641 {
1642 const DeviceInterface& vk = context.getDeviceInterface();
1643
1644 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipeline);
1645 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0u, 1u, &descriptorSet, 0u, DE_NULL);
1646
1647 if (m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT)
1648 vk.cmdDispatchIndirect(cmdBuffer, **m_indirectBuffer, 0u);
1649 else
1650 vk.cmdDispatch(cmdBuffer, 1u, 1u, 1u);
1651 }
1652
1653 private:
1654 const DispatchCall m_dispatchCall;
1655 de::MovePtr<Buffer> m_indirectBuffer;
1656 Move<VkPipelineLayout> m_pipelineLayout;
1657 Move<VkPipeline> m_pipeline;
1658 };
1659
1660 //! Read/write operation on a UBO/SSBO in graphics/compute pipeline.
1661 class BufferImplementation : public Operation
1662 {
1663 public:
BufferImplementation(OperationContext& context, Resource& resource, const VkShaderStageFlagBits stage, const BufferType bufferType, const std::string& shaderPrefix, const AccessMode mode, const bool specializedAccess, const PipelineType pipelineType, const DispatchCall dispatchCall)1664 BufferImplementation (OperationContext& context,
1665 Resource& resource,
1666 const VkShaderStageFlagBits stage,
1667 const BufferType bufferType,
1668 const std::string& shaderPrefix,
1669 const AccessMode mode,
1670 const bool specializedAccess,
1671 const PipelineType pipelineType,
1672 const DispatchCall dispatchCall)
1673 : Operation (specializedAccess)
1674 , m_context (context)
1675 , m_resource (resource)
1676 , m_stage (stage)
1677 , m_pipelineStage (pipelineStageFlagsFromShaderStageFlagBits(m_stage))
1678 , m_bufferType (bufferType)
1679 , m_mode (mode)
1680 , m_dispatchCall (dispatchCall)
1681 {
1682 requireFeaturesForSSBOAccess (m_context, m_stage);
1683
1684 const DeviceInterface& vk = m_context.getDeviceInterface();
1685 const VkDevice device = m_context.getDevice();
1686 Allocator& allocator = m_context.getAllocator();
1687
1688 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
1689 vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible));
1690
1691 // Init host buffer data
1692 {
1693 const Allocation& alloc = m_hostBuffer->getAllocation();
1694 if (m_mode == ACCESS_MODE_READ)
1695 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_resource.getBuffer().size));
1696 else
1697 fillPattern(alloc.getHostPtr(), m_resource.getBuffer().size);
1698 flushAlloc(vk, device, alloc);
1699 }
1700 // Prepare descriptors
1701 {
1702 VkDescriptorType bufferDescriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
1703
1704 if (m_bufferType == BUFFER_TYPE_UNIFORM)
1705 bufferDescriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1706 else if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
1707 bufferDescriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
1708
1709 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
1710 .addSingleBinding(bufferDescriptorType, m_stage)
1711 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, m_stage)
1712 .build(vk, device);
1713
1714 m_descriptorPool = DescriptorPoolBuilder()
1715 .addType(bufferDescriptorType)
1716 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
1717 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
1718
1719 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
1720
1721 if (m_mode == ACCESS_MODE_READ)
1722 {
1723 if ((m_bufferType == BUFFER_TYPE_UNIFORM) || (m_bufferType == BUFFER_TYPE_STORAGE))
1724 {
1725 const VkDescriptorBufferInfo bufferInfo = makeDescriptorBufferInfo(m_resource.getBuffer().handle, m_resource.getBuffer().offset, m_resource.getBuffer().size);
1726 const VkDescriptorBufferInfo hostBufferInfo = makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_resource.getBuffer().size);
1727 DescriptorSetUpdateBuilder()
1728 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), bufferDescriptorType, &bufferInfo)
1729 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
1730 .update(vk, device);
1731 }
1732 else
1733 {
1734 m_pBufferView = vk::makeBufferView(vk, device, m_resource.getBuffer().handle, VK_FORMAT_R32G32B32A32_UINT, m_resource.getBuffer().offset, m_resource.getBuffer().size);
1735 const VkDescriptorBufferInfo hostBufferInfo = makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_resource.getBuffer().size);
1736 DescriptorSetUpdateBuilder()
1737 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), bufferDescriptorType, &m_pBufferView.get())
1738 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
1739 .update(vk, device);
1740 }
1741 }
1742 else
1743 {
1744 const VkDescriptorBufferInfo bufferInfo = makeDescriptorBufferInfo(m_resource.getBuffer().handle, m_resource.getBuffer().offset, m_resource.getBuffer().size);
1745 const VkDescriptorBufferInfo hostBufferInfo = makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_resource.getBuffer().size);
1746 DescriptorSetUpdateBuilder()
1747 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
1748 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &bufferInfo)
1749 .update(vk, device);
1750 }
1751 }
1752
1753 // Create pipeline
1754 m_pipeline = (pipelineType == PIPELINE_TYPE_GRAPHICS ? de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout))
1755 : de::MovePtr<Pipeline>(new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
1756 }
1757
recordCommands(const VkCommandBuffer cmdBuffer)1758 void recordCommands (const VkCommandBuffer cmdBuffer)
1759 {
1760 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
1761
1762 // Post draw/dispatch commands
1763
1764 if (m_mode == ACCESS_MODE_READ)
1765 {
1766 const DeviceInterface& vk = m_context.getDeviceInterface();
1767 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
1768
1769 // Insert a barrier so data written by the shader is available to the host
1770 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
1771 m_pipelineStage, // VkPipelineStageFlags2KHR srcStageMask
1772 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
1773 VK_PIPELINE_STAGE_HOST_BIT, // VkPipelineStageFlags2KHR dstStageMask
1774 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1775 **m_hostBuffer, // VkBuffer buffer
1776 0u, // VkDeviceSize offset
1777 m_resource.getBuffer().size // VkDeviceSize size
1778 );
1779 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
1780 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1781 }
1782 }
1783
getInSyncInfo(void) const1784 SyncInfo getInSyncInfo (void) const
1785 {
1786 VkAccessFlags2KHR accessFlags = VK_ACCESS_2_NONE_KHR;
1787
1788 if (m_mode == ACCESS_MODE_READ)
1789 {
1790 if (m_bufferType == BUFFER_TYPE_UNIFORM)
1791 accessFlags = VK_ACCESS_2_UNIFORM_READ_BIT_KHR;
1792
1793 else if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
1794 {
1795 if (m_specializedAccess)
1796 accessFlags = VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR;
1797 else
1798 accessFlags = VK_ACCESS_2_SHADER_READ_BIT_KHR;
1799 }
1800 else
1801 {
1802 if (m_specializedAccess)
1803 accessFlags = VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR;
1804 else
1805 accessFlags = VK_ACCESS_2_SHADER_READ_BIT_KHR;
1806 }
1807 }
1808 else
1809 {
1810 if (m_specializedAccess)
1811 accessFlags = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR;
1812 else
1813 accessFlags = VK_ACCESS_2_SHADER_WRITE_BIT_KHR;
1814 }
1815
1816 const SyncInfo syncInfo =
1817 {
1818 m_pipelineStage, // VkPipelineStageFlags stageMask;
1819 accessFlags, // VkAccessFlags accessMask;
1820 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
1821 };
1822 return syncInfo;
1823 }
1824
getOutSyncInfo(void) const1825 SyncInfo getOutSyncInfo (void) const
1826 {
1827 VkAccessFlags2KHR accessFlags = VK_ACCESS_2_NONE_KHR;
1828
1829 if (m_mode == ACCESS_MODE_WRITE)
1830 {
1831 if (m_specializedAccess)
1832 accessFlags = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR;
1833 else
1834 accessFlags = VK_ACCESS_2_SHADER_WRITE_BIT_KHR;
1835 }
1836
1837 const SyncInfo syncInfo =
1838 {
1839 m_pipelineStage, // VkPipelineStageFlags stageMask;
1840 accessFlags, // VkAccessFlags accessMask;
1841 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
1842 };
1843 return syncInfo;
1844 }
1845
getData(void) const1846 Data getData (void) const
1847 {
1848 return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
1849 }
1850
setData(const Data& data)1851 void setData (const Data& data)
1852 {
1853 DE_ASSERT(m_mode == ACCESS_MODE_WRITE);
1854 setHostBufferData(m_context, *m_hostBuffer, data);
1855 }
1856
getShaderStage(void)1857 vk::VkShaderStageFlagBits getShaderStage (void) { return m_stage; }
1858
1859 private:
1860 OperationContext& m_context;
1861 Resource& m_resource;
1862 const VkShaderStageFlagBits m_stage;
1863 const VkPipelineStageFlags m_pipelineStage;
1864 const BufferType m_bufferType;
1865 const AccessMode m_mode;
1866 const DispatchCall m_dispatchCall;
1867 de::MovePtr<Buffer> m_hostBuffer;
1868 Move<VkDescriptorPool> m_descriptorPool;
1869 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
1870 Move<VkDescriptorSet> m_descriptorSet;
1871 de::MovePtr<Pipeline> m_pipeline;
1872 Move<VkBufferView> m_pBufferView;
1873 };
1874
1875 class ImageImplementation : public Operation
1876 {
1877 public:
ImageImplementation(OperationContext& context, Resource& resource, const VkShaderStageFlagBits stage, const std::string& shaderPrefix, const AccessMode mode, const bool specializedAccess, const PipelineType pipelineType, const DispatchCall dispatchCall)1878 ImageImplementation (OperationContext& context,
1879 Resource& resource,
1880 const VkShaderStageFlagBits stage,
1881 const std::string& shaderPrefix,
1882 const AccessMode mode,
1883 const bool specializedAccess,
1884 const PipelineType pipelineType,
1885 const DispatchCall dispatchCall)
1886 : Operation (specializedAccess)
1887 , m_context (context)
1888 , m_resource (resource)
1889 , m_stage (stage)
1890 , m_pipelineStage (pipelineStageFlagsFromShaderStageFlagBits(m_stage))
1891 , m_mode (mode)
1892 , m_dispatchCall (dispatchCall)
1893 , m_hostBufferSizeBytes (getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
1894 {
1895 const DeviceInterface& vk = m_context.getDeviceInterface();
1896 const InstanceInterface& vki = m_context.getInstanceInterface();
1897 const VkDevice device = m_context.getDevice();
1898 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
1899 Allocator& allocator = m_context.getAllocator();
1900
1901 // Image stores are always required, in either access mode.
1902 requireFeaturesForSSBOAccess(m_context, m_stage);
1903
1904 // Some storage image formats may not be supported
1905 const auto& imgResource = m_resource.getImage();
1906 requireStorageImageSupport(vki, physDevice, imgResource.format, imgResource.tiling);
1907
1908 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
1909 vk, device, allocator, makeBufferCreateInfo(m_hostBufferSizeBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT),
1910 MemoryRequirement::HostVisible));
1911
1912 // Init host buffer data
1913 {
1914 const Allocation& alloc = m_hostBuffer->getAllocation();
1915 if (m_mode == ACCESS_MODE_READ)
1916 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_hostBufferSizeBytes));
1917 else
1918 fillPattern(alloc.getHostPtr(), m_hostBufferSizeBytes);
1919 flushAlloc(vk, device, alloc);
1920 }
1921
1922 // Image resources
1923 {
1924 m_image = de::MovePtr<Image>(new Image(vk, device, allocator,
1925 makeImageCreateInfo(m_resource.getImage().imageType, m_resource.getImage().extent, m_resource.getImage().format,
1926 (VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_STORAGE_BIT),
1927 VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
1928 MemoryRequirement::Any));
1929
1930 if (m_mode == ACCESS_MODE_READ)
1931 {
1932 m_srcImage = &m_resource.getImage().handle;
1933 m_dstImage = &(**m_image);
1934 }
1935 else
1936 {
1937 m_srcImage = &(**m_image);
1938 m_dstImage = &m_resource.getImage().handle;
1939 }
1940
1941 const VkImageViewType viewType = getImageViewType(m_resource.getImage().imageType);
1942
1943 m_srcImageView = makeImageView(vk, device, *m_srcImage, viewType, m_resource.getImage().format, m_resource.getImage().subresourceRange);
1944 m_dstImageView = makeImageView(vk, device, *m_dstImage, viewType, m_resource.getImage().format, m_resource.getImage().subresourceRange);
1945 }
1946
1947 // Prepare descriptors
1948 {
1949 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
1950 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
1951 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
1952 .build(vk, device);
1953
1954 m_descriptorPool = DescriptorPoolBuilder()
1955 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
1956 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
1957 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
1958
1959 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
1960
1961 const VkDescriptorImageInfo srcImageInfo = makeDescriptorImageInfo(DE_NULL, *m_srcImageView, VK_IMAGE_LAYOUT_GENERAL);
1962 const VkDescriptorImageInfo dstImageInfo = makeDescriptorImageInfo(DE_NULL, *m_dstImageView, VK_IMAGE_LAYOUT_GENERAL);
1963
1964 DescriptorSetUpdateBuilder()
1965 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &srcImageInfo)
1966 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &dstImageInfo)
1967 .update(vk, device);
1968 }
1969
1970 // Create pipeline
1971 m_pipeline = (pipelineType == PIPELINE_TYPE_GRAPHICS ? de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout))
1972 : de::MovePtr<Pipeline>(new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
1973 }
1974
recordCommands(const VkCommandBuffer cmdBuffer)1975 void recordCommands (const VkCommandBuffer cmdBuffer)
1976 {
1977 const DeviceInterface& vk = m_context.getDeviceInterface();
1978 const VkBufferImageCopy bufferCopyRegion = makeBufferImageCopy(m_resource.getImage().extent, m_resource.getImage().subresourceLayers);
1979 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
1980
1981 // Destination image layout
1982 {
1983 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1984 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
1985 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
1986 m_pipelineStage, // VkPipelineStageFlags2KHR dstStageMask
1987 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1988 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1989 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout newLayout
1990 *m_dstImage, // VkImage image
1991 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1992 );
1993 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1994 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1995 }
1996
1997 // In write mode, source image must be filled with data.
1998 if (m_mode == ACCESS_MODE_WRITE)
1999 {
2000 // Layout for transfer
2001 {
2002 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
2003 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
2004 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
2005 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR dstStageMask
2006 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2007 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
2008 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
2009 *m_srcImage, // VkImage image
2010 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
2011 );
2012 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
2013 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2014 }
2015
2016 // Host buffer -> Src image
2017 vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, *m_srcImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &bufferCopyRegion);
2018
2019 // Layout for shader reading
2020 {
2021 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
2022 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR srcStageMask
2023 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
2024 m_pipelineStage, // VkPipelineStageFlags2KHR dstStageMask
2025 VK_ACCESS_2_SHADER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2026 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
2027 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout newLayout
2028 *m_srcImage, // VkImage image
2029 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
2030 );
2031 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
2032 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2033 }
2034 }
2035
2036 // Execute shaders
2037
2038 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
2039
2040 // Post draw/dispatch commands
2041
2042 if (m_mode == ACCESS_MODE_READ)
2043 {
2044 // Layout for transfer
2045 {
2046 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
2047 m_pipelineStage, // VkPipelineStageFlags2KHR srcStageMask
2048 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
2049 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR dstStageMask
2050 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2051 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout oldLayout
2052 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
2053 *m_dstImage, // VkImage image
2054 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
2055 );
2056 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
2057 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2058 }
2059
2060 // Dst image -> Host buffer
2061 vk.cmdCopyImageToBuffer(cmdBuffer, *m_dstImage, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u, &bufferCopyRegion);
2062
2063 // Insert a barrier so data written by the shader is available to the host
2064 {
2065 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
2066 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR srcStageMask
2067 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
2068 VK_PIPELINE_STAGE_HOST_BIT, // VkPipelineStageFlags2KHR dstStageMask
2069 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2070 **m_hostBuffer, // VkBuffer buffer
2071 0u, // VkDeviceSize offset
2072 m_hostBufferSizeBytes // VkDeviceSize size
2073 );
2074 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
2075 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2076 }
2077 }
2078 }
2079
getInSyncInfo(void) const2080 SyncInfo getInSyncInfo (void) const
2081 {
2082 VkAccessFlags2KHR accessFlags = VK_ACCESS_2_NONE_KHR;
2083
2084 if (m_mode == ACCESS_MODE_READ)
2085 {
2086 if (m_specializedAccess)
2087 accessFlags = VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR;
2088 else
2089 accessFlags = VK_ACCESS_2_SHADER_READ_BIT_KHR;
2090 }
2091
2092 const SyncInfo syncInfo =
2093 {
2094 m_pipelineStage, // VkPipelineStageFlags stageMask;
2095 accessFlags, // VkAccessFlags accessMask;
2096 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
2097 };
2098 return syncInfo;
2099 }
2100
getOutSyncInfo(void) const2101 SyncInfo getOutSyncInfo (void) const
2102 {
2103 VkAccessFlags2KHR accessFlags = VK_ACCESS_2_NONE_KHR;
2104
2105 if (m_mode == ACCESS_MODE_WRITE)
2106 {
2107 if (m_specializedAccess)
2108 accessFlags = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR;
2109 else
2110 accessFlags = VK_ACCESS_2_SHADER_WRITE_BIT_KHR;
2111 }
2112
2113 const SyncInfo syncInfo =
2114 {
2115 m_pipelineStage, // VkPipelineStageFlags stageMask;
2116 accessFlags, // VkAccessFlags accessMask;
2117 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
2118 };
2119 return syncInfo;
2120 }
2121
getData(void) const2122 Data getData (void) const
2123 {
2124 return getHostBufferData(m_context, *m_hostBuffer, m_hostBufferSizeBytes);
2125 }
2126
setData(const Data& data)2127 void setData (const Data& data)
2128 {
2129 DE_ASSERT(m_mode == ACCESS_MODE_WRITE);
2130 setHostBufferData(m_context, *m_hostBuffer, data);
2131 }
2132
getShaderStage(void)2133 vk::VkShaderStageFlagBits getShaderStage (void) { return m_stage; }
2134
2135 private:
2136 OperationContext& m_context;
2137 Resource& m_resource;
2138 const VkShaderStageFlagBits m_stage;
2139 const VkPipelineStageFlags m_pipelineStage;
2140 const AccessMode m_mode;
2141 const DispatchCall m_dispatchCall;
2142 const VkDeviceSize m_hostBufferSizeBytes;
2143 de::MovePtr<Buffer> m_hostBuffer;
2144 de::MovePtr<Image> m_image; //! Additional image used as src or dst depending on operation mode.
2145 const VkImage* m_srcImage;
2146 const VkImage* m_dstImage;
2147 Move<VkImageView> m_srcImageView;
2148 Move<VkImageView> m_dstImageView;
2149 Move<VkDescriptorPool> m_descriptorPool;
2150 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
2151 Move<VkDescriptorSet> m_descriptorSet;
2152 de::MovePtr<Pipeline> m_pipeline;
2153
2154 };
2155
2156 //! Create generic passthrough shaders with bits of custom code inserted in a specific shader stage.
initPassthroughPrograms(SourceCollections& programCollection, const std::string& shaderPrefix, const std::string& declCode, const std::string& mainCode, const VkShaderStageFlagBits stage)2157 void initPassthroughPrograms (SourceCollections& programCollection,
2158 const std::string& shaderPrefix,
2159 const std::string& declCode,
2160 const std::string& mainCode,
2161 const VkShaderStageFlagBits stage)
2162 {
2163 const VkShaderStageFlags requiredStages = getRequiredStages(stage);
2164
2165 if (requiredStages & VK_SHADER_STAGE_VERTEX_BIT)
2166 {
2167 std::ostringstream src;
2168 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2169 << "\n"
2170 << "layout(location = 0) in vec4 v_in_position;\n"
2171 << "\n"
2172 << "out " << s_perVertexBlock << ";\n"
2173 << "\n"
2174 << (stage & VK_SHADER_STAGE_VERTEX_BIT ? declCode + "\n" : "")
2175 << "void main (void)\n"
2176 << "{\n"
2177 << " gl_Position = v_in_position;\n"
2178 << (stage & VK_SHADER_STAGE_VERTEX_BIT ? mainCode : "")
2179 << "}\n";
2180
2181 if (!programCollection.glslSources.contains(shaderPrefix + "vert"))
2182 programCollection.glslSources.add(shaderPrefix + "vert") << glu::VertexSource(src.str());
2183 }
2184
2185 if (requiredStages & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT)
2186 {
2187 std::ostringstream src;
2188 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2189 << "\n"
2190 << "layout(vertices = 3) out;\n"
2191 << "\n"
2192 << "in " << s_perVertexBlock << " gl_in[gl_MaxPatchVertices];\n"
2193 << "\n"
2194 << "out " << s_perVertexBlock << " gl_out[];\n"
2195 << "\n"
2196 << (stage & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT ? declCode + "\n" : "")
2197 << "void main (void)\n"
2198 << "{\n"
2199 << " gl_TessLevelInner[0] = 1.0;\n"
2200 << " gl_TessLevelInner[1] = 1.0;\n"
2201 << "\n"
2202 << " gl_TessLevelOuter[0] = 1.0;\n"
2203 << " gl_TessLevelOuter[1] = 1.0;\n"
2204 << " gl_TessLevelOuter[2] = 1.0;\n"
2205 << " gl_TessLevelOuter[3] = 1.0;\n"
2206 << "\n"
2207 << " gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
2208 << (stage & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT ? "\n" + mainCode : "")
2209 << "}\n";
2210
2211 if (!programCollection.glslSources.contains(shaderPrefix + "tesc"))
2212 programCollection.glslSources.add(shaderPrefix + "tesc") << glu::TessellationControlSource(src.str());
2213 }
2214
2215 if (requiredStages & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
2216 {
2217 std::ostringstream src;
2218 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2219 << "\n"
2220 << "layout(triangles, equal_spacing, ccw) in;\n"
2221 << "\n"
2222 << "in " << s_perVertexBlock << " gl_in[gl_MaxPatchVertices];\n"
2223 << "\n"
2224 << "out " << s_perVertexBlock << ";\n"
2225 << "\n"
2226 << (stage & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT ? declCode + "\n" : "")
2227 << "void main (void)\n"
2228 << "{\n"
2229 << " vec3 px = gl_TessCoord.x * gl_in[0].gl_Position.xyz;\n"
2230 << " vec3 py = gl_TessCoord.y * gl_in[1].gl_Position.xyz;\n"
2231 << " vec3 pz = gl_TessCoord.z * gl_in[2].gl_Position.xyz;\n"
2232 << " gl_Position = vec4(px + py + pz, 1.0);\n"
2233 << (stage & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT ? mainCode : "")
2234 << "}\n";
2235
2236 if (!programCollection.glslSources.contains(shaderPrefix + "tese"))
2237 programCollection.glslSources.add(shaderPrefix + "tese") << glu::TessellationEvaluationSource(src.str());
2238 }
2239
2240 if (requiredStages & VK_SHADER_STAGE_GEOMETRY_BIT)
2241 {
2242 std::ostringstream src;
2243 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2244 << "\n"
2245 << "layout(triangles) in;\n"
2246 << "layout(triangle_strip, max_vertices = 3) out;\n"
2247 << "\n"
2248 << "in " << s_perVertexBlock << " gl_in[];\n"
2249 << "\n"
2250 << "out " << s_perVertexBlock << ";\n"
2251 << "\n"
2252 << (stage & VK_SHADER_STAGE_GEOMETRY_BIT ? declCode + "\n" : "")
2253 << "void main (void)\n"
2254 << "{\n"
2255 << " gl_Position = gl_in[0].gl_Position;\n"
2256 << " EmitVertex();\n"
2257 << "\n"
2258 << " gl_Position = gl_in[1].gl_Position;\n"
2259 << " EmitVertex();\n"
2260 << "\n"
2261 << " gl_Position = gl_in[2].gl_Position;\n"
2262 << " EmitVertex();\n"
2263 << (stage & VK_SHADER_STAGE_GEOMETRY_BIT ? "\n" + mainCode : "")
2264 << "}\n";
2265
2266 if (!programCollection.glslSources.contains(shaderPrefix + "geom"))
2267 programCollection.glslSources.add(shaderPrefix + "geom") << glu::GeometrySource(src.str());
2268 }
2269
2270 if (requiredStages & VK_SHADER_STAGE_FRAGMENT_BIT)
2271 {
2272 std::ostringstream src;
2273 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2274 << "\n"
2275 << "layout(location = 0) out vec4 o_color;\n"
2276 << "\n"
2277 << (stage & VK_SHADER_STAGE_FRAGMENT_BIT ? declCode + "\n" : "")
2278 << "void main (void)\n"
2279 << "{\n"
2280 << " o_color = vec4(1.0);\n"
2281 << (stage & VK_SHADER_STAGE_FRAGMENT_BIT ? "\n" + mainCode : "")
2282 << "}\n";
2283
2284 if (!programCollection.glslSources.contains(shaderPrefix + "frag"))
2285 programCollection.glslSources.add(shaderPrefix + "frag") << glu::FragmentSource(src.str());
2286 }
2287
2288 if (requiredStages & VK_SHADER_STAGE_COMPUTE_BIT)
2289 {
2290 std::ostringstream src;
2291 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2292 << "\n"
2293 << "layout(local_size_x = 1) in;\n"
2294 << "\n"
2295 << (stage & VK_SHADER_STAGE_COMPUTE_BIT ? declCode + "\n" : "")
2296 << "void main (void)\n"
2297 << "{\n"
2298 << (stage & VK_SHADER_STAGE_COMPUTE_BIT ? mainCode : "")
2299 << "}\n";
2300
2301 if (!programCollection.glslSources.contains(shaderPrefix + "comp"))
2302 programCollection.glslSources.add(shaderPrefix + "comp") << glu::ComputeSource(src.str());
2303 }
2304 }
2305
2306 class BufferSupport : public OperationSupport
2307 {
2308 public:
BufferSupport(const ResourceDescription& resourceDesc, const BufferType bufferType, const AccessMode mode, const bool specializedAccess, const VkShaderStageFlagBits stage, const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)2309 BufferSupport (const ResourceDescription& resourceDesc,
2310 const BufferType bufferType,
2311 const AccessMode mode,
2312 const bool specializedAccess,
2313 const VkShaderStageFlagBits stage,
2314 const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)
2315 : OperationSupport (specializedAccess)
2316 , m_resourceDesc (resourceDesc)
2317 , m_bufferType (bufferType)
2318 , m_mode (mode)
2319 , m_stage (stage)
2320 , m_shaderPrefix (std::string(m_mode == ACCESS_MODE_READ ? "read_" : "write_") + (m_bufferType == BUFFER_TYPE_UNIFORM ? "ubo_" : (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL ? "ubo_texel_" : "ssbo_")))
2321 , m_dispatchCall (dispatchCall)
2322 {
2323 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER);
2324 DE_ASSERT(m_bufferType == BUFFER_TYPE_UNIFORM || m_bufferType == BUFFER_TYPE_STORAGE || m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL);
2325 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
2326 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_bufferType == BUFFER_TYPE_STORAGE);
2327 DE_ASSERT(m_bufferType != BUFFER_TYPE_UNIFORM || m_resourceDesc.size.x() <= MAX_UBO_RANGE);
2328 DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
2329
2330 assertValidShaderStage(m_stage);
2331 }
2332
initPrograms(SourceCollections& programCollection) const2333 void initPrograms (SourceCollections& programCollection) const
2334 {
2335 DE_ASSERT((m_resourceDesc.size.x() % sizeof(tcu::UVec4)) == 0);
2336
2337 std::string bufferTypeStr = "";
2338 if (m_bufferType == BUFFER_TYPE_UNIFORM)
2339 bufferTypeStr = "uniform";
2340 else
2341 {
2342 if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
2343 bufferTypeStr = "uniform utextureBuffer";
2344 else
2345 bufferTypeStr = "buffer";
2346 }
2347
2348 const int numVecElements = static_cast<int>(m_resourceDesc.size.x() / sizeof(tcu::UVec4)); // std140 must be aligned to a multiple of 16
2349 std::ostringstream declSrc;
2350 std::ostringstream copySrc;
2351 std::string outputBuff = "layout(set = 0, binding = 1, std140) writeonly buffer Output {\n"
2352 " uvec4 data[" + std::to_string(numVecElements) + "];\n"
2353 "} b_out;\n";
2354 if ((m_bufferType == BUFFER_TYPE_UNIFORM) || (m_bufferType == BUFFER_TYPE_STORAGE))
2355 {
2356 declSrc << "layout(set = 0, binding = 0, std140) readonly " << bufferTypeStr << " Input {\n"
2357 << " uvec4 data[" << numVecElements << "];\n"
2358 << "} b_in;\n"
2359 << "\n"
2360 << outputBuff;
2361
2362 copySrc << " for (int i = 0; i < " << numVecElements << "; ++i) {\n"
2363 << " b_out.data[i] = b_in.data[i];\n"
2364 << " }\n";
2365 }
2366 else if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
2367 {
2368 declSrc << "layout(set = 0, binding = 0) " << bufferTypeStr << " Input;\n"
2369 << "\n"
2370 << outputBuff;
2371
2372 copySrc << " for (int i = 0; i < " << numVecElements << "; ++i) {\n"
2373 << " b_out.data[i] = texelFetch(Input, i);\n"
2374 << " }\n";
2375 }
2376
2377 initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), copySrc.str(), m_stage);
2378 }
2379
getInResourceUsageFlags(void) const2380 deUint32 getInResourceUsageFlags (void) const
2381 {
2382 if (m_bufferType == BUFFER_TYPE_UNIFORM)
2383 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT : 0;
2384 else if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
2385 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT : 0;
2386 else
2387 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_STORAGE_BUFFER_BIT : 0;
2388 }
2389
getOutResourceUsageFlags(void) const2390 deUint32 getOutResourceUsageFlags (void) const
2391 {
2392 if (m_bufferType == BUFFER_TYPE_UNIFORM)
2393 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT : 0;
2394 else if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
2395 return m_mode == ACCESS_MODE_WRITE ? 0 : VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
2396 else
2397 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_STORAGE_BUFFER_BIT : 0;
2398 }
2399
getQueueFlags(const OperationContext& context) const2400 VkQueueFlags getQueueFlags (const OperationContext& context) const
2401 {
2402 DE_UNREF(context);
2403 return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
2404 }
2405
build(OperationContext& context, Resource& resource) const2406 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
2407 {
2408 if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
2409 return de::MovePtr<Operation>(new BufferImplementation(context, resource, m_stage, m_bufferType, m_shaderPrefix, m_mode, m_specializedAccess, PIPELINE_TYPE_COMPUTE, m_dispatchCall));
2410 else
2411 return de::MovePtr<Operation>(new BufferImplementation(context, resource, m_stage, m_bufferType, m_shaderPrefix, m_mode, m_specializedAccess, PIPELINE_TYPE_GRAPHICS, m_dispatchCall));
2412 }
2413
build(OperationContext&, Resource&, Resource&) const2414 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
2415 {
2416 DE_ASSERT(0);
2417 return de::MovePtr<Operation>();
2418 }
2419
getShaderStage(void)2420 vk::VkShaderStageFlagBits getShaderStage (void) { return m_stage; }
2421
2422 private:
2423 const ResourceDescription m_resourceDesc;
2424 const BufferType m_bufferType;
2425 const AccessMode m_mode;
2426 const VkShaderStageFlagBits m_stage;
2427 const std::string m_shaderPrefix;
2428 const DispatchCall m_dispatchCall;
2429 };
2430
2431 class ImageSupport : public OperationSupport
2432 {
2433 public:
ImageSupport(const ResourceDescription& resourceDesc, const AccessMode mode, const bool specializedAccess, const VkShaderStageFlagBits stage, const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)2434 ImageSupport (const ResourceDescription& resourceDesc,
2435 const AccessMode mode,
2436 const bool specializedAccess,
2437 const VkShaderStageFlagBits stage,
2438 const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)
2439 : OperationSupport (specializedAccess)
2440 , m_resourceDesc (resourceDesc)
2441 , m_mode (mode)
2442 , m_stage (stage)
2443 , m_shaderPrefix (m_mode == ACCESS_MODE_READ ? "read_image_" : "write_image_")
2444 , m_dispatchCall (dispatchCall)
2445 {
2446 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
2447 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
2448 DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
2449
2450 assertValidShaderStage(m_stage);
2451 }
2452
initPrograms(SourceCollections& programCollection) const2453 void initPrograms (SourceCollections& programCollection) const
2454 {
2455 const std::string imageFormat = getShaderImageFormatQualifier(m_resourceDesc.imageFormat);
2456 const std::string imageType = getShaderImageType(m_resourceDesc.imageFormat, m_resourceDesc.imageType);
2457
2458 std::ostringstream declSrc;
2459 declSrc << "layout(set = 0, binding = 0, " << imageFormat << ") readonly uniform " << imageType << " srcImg;\n"
2460 << "layout(set = 0, binding = 1, " << imageFormat << ") writeonly uniform " << imageType << " dstImg;\n";
2461
2462 std::ostringstream mainSrc;
2463 if (m_resourceDesc.imageType == VK_IMAGE_TYPE_1D)
2464 mainSrc << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2465 << " imageStore(dstImg, x, imageLoad(srcImg, x));\n";
2466 else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_2D)
2467 mainSrc << " for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
2468 << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2469 << " imageStore(dstImg, ivec2(x, y), imageLoad(srcImg, ivec2(x, y)));\n";
2470 else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_3D)
2471 mainSrc << " for (int z = 0; z < " << m_resourceDesc.size.z() << "; ++z)\n"
2472 << " for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
2473 << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2474 << " imageStore(dstImg, ivec3(x, y, z), imageLoad(srcImg, ivec3(x, y, z)));\n";
2475 else
2476 DE_ASSERT(0);
2477
2478 initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), mainSrc.str(), m_stage);
2479 }
2480
getInResourceUsageFlags(void) const2481 deUint32 getInResourceUsageFlags (void) const
2482 {
2483 return VK_IMAGE_USAGE_STORAGE_BIT;
2484 }
2485
getOutResourceUsageFlags(void) const2486 deUint32 getOutResourceUsageFlags (void) const
2487 {
2488 return VK_IMAGE_USAGE_STORAGE_BIT;
2489 }
2490
getQueueFlags(const OperationContext& context) const2491 VkQueueFlags getQueueFlags (const OperationContext& context) const
2492 {
2493 DE_UNREF(context);
2494 return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
2495 }
2496
build(OperationContext& context, Resource& resource) const2497 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
2498 {
2499 if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
2500 return de::MovePtr<Operation>(new ImageImplementation(context, resource, m_stage, m_shaderPrefix, m_mode, m_specializedAccess, PIPELINE_TYPE_COMPUTE, m_dispatchCall));
2501 else
2502 return de::MovePtr<Operation>(new ImageImplementation(context, resource, m_stage, m_shaderPrefix, m_mode, m_specializedAccess, PIPELINE_TYPE_GRAPHICS, m_dispatchCall));
2503 }
2504
build(OperationContext&, Resource&, Resource&) const2505 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
2506 {
2507 DE_ASSERT(0);
2508 return de::MovePtr<Operation>();
2509 }
2510
getShaderStage(void)2511 vk::VkShaderStageFlagBits getShaderStage (void) { return m_stage; }
2512
2513 private:
2514 const ResourceDescription m_resourceDesc;
2515 const AccessMode m_mode;
2516 const VkShaderStageFlagBits m_stage;
2517 const std::string m_shaderPrefix;
2518 const DispatchCall m_dispatchCall;
2519 };
2520
2521 //! Copy operation on a UBO/SSBO in graphics/compute pipeline.
2522 class BufferCopyImplementation : public Operation
2523 {
2524 public:
BufferCopyImplementation(OperationContext& context, Resource& inResource, Resource& outResource, const VkShaderStageFlagBits stage, const BufferType bufferType, const std::string& shaderPrefix, const bool specializedAccess, const PipelineType pipelineType, const DispatchCall dispatchCall)2525 BufferCopyImplementation (OperationContext& context,
2526 Resource& inResource,
2527 Resource& outResource,
2528 const VkShaderStageFlagBits stage,
2529 const BufferType bufferType,
2530 const std::string& shaderPrefix,
2531 const bool specializedAccess,
2532 const PipelineType pipelineType,
2533 const DispatchCall dispatchCall)
2534 : Operation (specializedAccess)
2535 , m_context (context)
2536 , m_inResource (inResource)
2537 , m_outResource (outResource)
2538 , m_stage (stage)
2539 , m_pipelineStage (pipelineStageFlagsFromShaderStageFlagBits(m_stage))
2540 , m_bufferType (bufferType)
2541 , m_dispatchCall (dispatchCall)
2542 {
2543 requireFeaturesForSSBOAccess (m_context, m_stage);
2544
2545 const DeviceInterface& vk = m_context.getDeviceInterface();
2546 const VkDevice device = m_context.getDevice();
2547
2548 // Prepare descriptors
2549 {
2550 const VkDescriptorType bufferDescriptorType = (m_bufferType == BUFFER_TYPE_UNIFORM ? VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER : VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
2551
2552 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
2553 .addSingleBinding(bufferDescriptorType, m_stage)
2554 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, m_stage)
2555 .build(vk, device);
2556
2557 m_descriptorPool = DescriptorPoolBuilder()
2558 .addType(bufferDescriptorType)
2559 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
2560 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
2561
2562 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
2563
2564 const VkDescriptorBufferInfo inBufferInfo = makeDescriptorBufferInfo(m_inResource.getBuffer().handle, m_inResource.getBuffer().offset, m_inResource.getBuffer().size);
2565 const VkDescriptorBufferInfo outBufferInfo = makeDescriptorBufferInfo(m_outResource.getBuffer().handle, m_outResource.getBuffer().offset, m_outResource.getBuffer().size);
2566
2567 DescriptorSetUpdateBuilder()
2568 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &inBufferInfo)
2569 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &outBufferInfo)
2570 .update(vk, device);
2571 }
2572
2573 // Create pipeline
2574 m_pipeline = (pipelineType == PIPELINE_TYPE_GRAPHICS ? de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout))
2575 : de::MovePtr<Pipeline>(new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
2576 }
2577
recordCommands(const VkCommandBuffer cmdBuffer)2578 void recordCommands (const VkCommandBuffer cmdBuffer)
2579 {
2580 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
2581 }
2582
getInSyncInfo(void) const2583 SyncInfo getInSyncInfo (void) const
2584 {
2585 VkAccessFlags2KHR accessFlags = (m_specializedAccess ? VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR : VK_ACCESS_2_SHADER_READ_BIT_KHR);
2586 const SyncInfo syncInfo =
2587 {
2588 m_pipelineStage, // VkPipelineStageFlags stageMask;
2589 accessFlags, // VkAccessFlags accessMask;
2590 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
2591 };
2592 return syncInfo;
2593 }
2594
getOutSyncInfo(void) const2595 SyncInfo getOutSyncInfo (void) const
2596 {
2597 VkAccessFlags2KHR accessFlags = (m_specializedAccess ? VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR : VK_ACCESS_2_SHADER_WRITE_BIT_KHR);
2598 const SyncInfo syncInfo =
2599 {
2600 m_pipelineStage, // VkPipelineStageFlags stageMask;
2601 accessFlags, // VkAccessFlags accessMask;
2602 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
2603 };
2604 return syncInfo;
2605 }
2606
getData(void) const2607 Data getData (void) const
2608 {
2609 Data data = { 0, DE_NULL };
2610 return data;
2611 }
2612
setData(const Data&)2613 void setData (const Data&)
2614 {
2615 DE_ASSERT(0);
2616 }
2617
getShaderStage(void)2618 vk::VkShaderStageFlagBits getShaderStage (void) { return m_stage; }
2619
2620 private:
2621 OperationContext& m_context;
2622 Resource& m_inResource;
2623 Resource& m_outResource;
2624 const VkShaderStageFlagBits m_stage;
2625 const VkPipelineStageFlags m_pipelineStage;
2626 const BufferType m_bufferType;
2627 const DispatchCall m_dispatchCall;
2628 Move<VkDescriptorPool> m_descriptorPool;
2629 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
2630 Move<VkDescriptorSet> m_descriptorSet;
2631 de::MovePtr<Pipeline> m_pipeline;
2632 };
2633
2634 class CopyBufferSupport : public OperationSupport
2635 {
2636 public:
CopyBufferSupport(const ResourceDescription& resourceDesc, const BufferType bufferType, const bool specializedAccess, const VkShaderStageFlagBits stage, const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)2637 CopyBufferSupport (const ResourceDescription& resourceDesc,
2638 const BufferType bufferType,
2639 const bool specializedAccess,
2640 const VkShaderStageFlagBits stage,
2641 const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)
2642 : OperationSupport (specializedAccess)
2643 , m_resourceDesc (resourceDesc)
2644 , m_bufferType (bufferType)
2645 , m_stage (stage)
2646 , m_shaderPrefix (std::string("copy_") + getShaderStageName(stage) + (m_bufferType == BUFFER_TYPE_UNIFORM ? "_ubo_" : "_ssbo_"))
2647 , m_dispatchCall (dispatchCall)
2648 {
2649 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER);
2650 DE_ASSERT(m_bufferType == BUFFER_TYPE_UNIFORM || m_bufferType == BUFFER_TYPE_STORAGE);
2651 DE_ASSERT(m_bufferType != BUFFER_TYPE_UNIFORM || m_resourceDesc.size.x() <= MAX_UBO_RANGE);
2652 DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
2653
2654 assertValidShaderStage(m_stage);
2655 }
2656
initPrograms(SourceCollections& programCollection) const2657 void initPrograms (SourceCollections& programCollection) const
2658 {
2659 DE_ASSERT((m_resourceDesc.size.x() % sizeof(tcu::UVec4)) == 0);
2660
2661 const std::string bufferTypeStr = (m_bufferType == BUFFER_TYPE_UNIFORM ? "uniform" : "buffer");
2662 const int numVecElements = static_cast<int>(m_resourceDesc.size.x() / sizeof(tcu::UVec4)); // std140 must be aligned to a multiple of 16
2663
2664 std::ostringstream declSrc;
2665 declSrc << "layout(set = 0, binding = 0, std140) readonly " << bufferTypeStr << " Input {\n"
2666 << " uvec4 data[" << numVecElements << "];\n"
2667 << "} b_in;\n"
2668 << "\n"
2669 << "layout(set = 0, binding = 1, std140) writeonly buffer Output {\n"
2670 << " uvec4 data[" << numVecElements << "];\n"
2671 << "} b_out;\n";
2672
2673 std::ostringstream copySrc;
2674 copySrc << " for (int i = 0; i < " << numVecElements << "; ++i) {\n"
2675 << " b_out.data[i] = b_in.data[i];\n"
2676 << " }\n";
2677
2678 initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), copySrc.str(), m_stage);
2679 }
2680
getInResourceUsageFlags(void) const2681 deUint32 getInResourceUsageFlags (void) const
2682 {
2683 return (m_bufferType == BUFFER_TYPE_UNIFORM ? VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT : VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
2684 }
2685
getOutResourceUsageFlags(void) const2686 deUint32 getOutResourceUsageFlags (void) const
2687 {
2688 return VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
2689 }
2690
getQueueFlags(const OperationContext& context) const2691 VkQueueFlags getQueueFlags (const OperationContext& context) const
2692 {
2693 DE_UNREF(context);
2694 return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
2695 }
2696
build(OperationContext&, Resource&) const2697 de::MovePtr<Operation> build (OperationContext&, Resource&) const
2698 {
2699 DE_ASSERT(0);
2700 return de::MovePtr<Operation>();
2701 }
2702
build(OperationContext& context, Resource& inResource, Resource& outResource) const2703 de::MovePtr<Operation> build (OperationContext& context, Resource& inResource, Resource& outResource) const
2704 {
2705 if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
2706 return de::MovePtr<Operation>(new BufferCopyImplementation(context, inResource, outResource, m_stage, m_bufferType, m_shaderPrefix, m_specializedAccess, PIPELINE_TYPE_COMPUTE, m_dispatchCall));
2707 else
2708 return de::MovePtr<Operation>(new BufferCopyImplementation(context, inResource, outResource, m_stage, m_bufferType, m_shaderPrefix, m_specializedAccess, PIPELINE_TYPE_GRAPHICS, m_dispatchCall));
2709 }
2710
getShaderStage(void)2711 vk::VkShaderStageFlagBits getShaderStage (void) { return m_stage; }
2712
2713 private:
2714 const ResourceDescription m_resourceDesc;
2715 const BufferType m_bufferType;
2716 const VkShaderStageFlagBits m_stage;
2717 const std::string m_shaderPrefix;
2718 const DispatchCall m_dispatchCall;
2719 };
2720
2721 class CopyImageImplementation : public Operation
2722 {
2723 public:
CopyImageImplementation(OperationContext& context, Resource& inResource, Resource& outResource, const VkShaderStageFlagBits stage, const std::string& shaderPrefix, const bool specializedAccess, const PipelineType pipelineType, const DispatchCall dispatchCall)2724 CopyImageImplementation (OperationContext& context,
2725 Resource& inResource,
2726 Resource& outResource,
2727 const VkShaderStageFlagBits stage,
2728 const std::string& shaderPrefix,
2729 const bool specializedAccess,
2730 const PipelineType pipelineType,
2731 const DispatchCall dispatchCall)
2732 : Operation (specializedAccess)
2733 , m_context (context)
2734 , m_inResource (inResource)
2735 , m_outResource (outResource)
2736 , m_stage (stage)
2737 , m_pipelineStage (pipelineStageFlagsFromShaderStageFlagBits(m_stage))
2738 , m_dispatchCall (dispatchCall)
2739 {
2740 const DeviceInterface& vk = m_context.getDeviceInterface();
2741 const InstanceInterface& vki = m_context.getInstanceInterface();
2742 const VkDevice device = m_context.getDevice();
2743 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
2744
2745 // Image stores are always required, in either access mode.
2746 requireFeaturesForSSBOAccess(m_context, m_stage);
2747
2748 // Some storage image formats may not be supported
2749 const auto& imgResource = m_inResource.getImage();
2750 requireStorageImageSupport(vki, physDevice, imgResource.format, imgResource.tiling);
2751
2752 // Image resources
2753 {
2754 const VkImageViewType viewType = getImageViewType(m_inResource.getImage().imageType);
2755
2756 m_srcImageView = makeImageView(vk, device, m_inResource.getImage().handle, viewType, m_inResource.getImage().format, m_inResource.getImage().subresourceRange);
2757 m_dstImageView = makeImageView(vk, device, m_outResource.getImage().handle, viewType, m_outResource.getImage().format, m_outResource.getImage().subresourceRange);
2758 }
2759
2760 // Prepare descriptors
2761 {
2762 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
2763 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
2764 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
2765 .build(vk, device);
2766
2767 m_descriptorPool = DescriptorPoolBuilder()
2768 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2769 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2770 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
2771
2772 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
2773
2774 const VkDescriptorImageInfo srcImageInfo = makeDescriptorImageInfo(DE_NULL, *m_srcImageView, VK_IMAGE_LAYOUT_GENERAL);
2775 const VkDescriptorImageInfo dstImageInfo = makeDescriptorImageInfo(DE_NULL, *m_dstImageView, VK_IMAGE_LAYOUT_GENERAL);
2776
2777 DescriptorSetUpdateBuilder()
2778 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &srcImageInfo)
2779 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &dstImageInfo)
2780 .update(vk, device);
2781 }
2782
2783 // Create pipeline
2784 m_pipeline = (pipelineType == PIPELINE_TYPE_GRAPHICS ? de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout))
2785 : de::MovePtr<Pipeline>(new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
2786 }
2787
recordCommands(const VkCommandBuffer cmdBuffer)2788 void recordCommands (const VkCommandBuffer cmdBuffer)
2789 {
2790 {
2791 const DeviceInterface& vk = m_context.getDeviceInterface();
2792 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
2793
2794 const VkImageMemoryBarrier2KHR imageMemoryBarriers2 = makeImageMemoryBarrier2(
2795 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
2796 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
2797 m_pipelineStage, // VkPipelineStageFlags2KHR dstStageMask
2798 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2799 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
2800 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout newLayout
2801 m_outResource.getImage().handle, // VkImage image
2802 m_outResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
2803 );
2804 VkDependencyInfoKHR dependencyInfo
2805 {
2806 VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR, // VkStructureType sType
2807 DE_NULL, // const void* pNext
2808 VK_DEPENDENCY_BY_REGION_BIT, // VkDependencyFlags dependencyFlags
2809 0u, // deUint32 memoryBarrierCount
2810 DE_NULL, // const VkMemoryBarrier2KHR* pMemoryBarriers
2811 0u, // deUint32 bufferMemoryBarrierCount
2812 DE_NULL, // const VkBufferMemoryBarrier2KHR* pBufferMemoryBarriers
2813 1, // deUint32 imageMemoryBarrierCount
2814 &imageMemoryBarriers2 // const VkImageMemoryBarrier2KHR* pImageMemoryBarriers
2815 };
2816 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2817 }
2818
2819 // Execute shaders
2820 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
2821 }
2822
getInSyncInfo(void) const2823 SyncInfo getInSyncInfo (void) const
2824 {
2825 VkAccessFlags2KHR accessFlags = (m_specializedAccess ? VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR : VK_ACCESS_2_SHADER_READ_BIT_KHR);
2826 const SyncInfo syncInfo =
2827 {
2828 m_pipelineStage, // VkPipelineStageFlags stageMask;
2829 accessFlags, // VkAccessFlags accessMask;
2830 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
2831 };
2832 return syncInfo;
2833 }
2834
getOutSyncInfo(void) const2835 SyncInfo getOutSyncInfo (void) const
2836 {
2837 VkAccessFlags2KHR accessFlags = (m_specializedAccess ? VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR : VK_ACCESS_2_SHADER_WRITE_BIT_KHR);
2838 const SyncInfo syncInfo =
2839 {
2840 m_pipelineStage, // VkPipelineStageFlags stageMask;
2841 accessFlags, // VkAccessFlags accessMask;
2842 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
2843 };
2844 return syncInfo;
2845 }
2846
getData(void) const2847 Data getData (void) const
2848 {
2849 Data data = { 0, DE_NULL };
2850 return data;
2851 }
2852
setData(const Data&)2853 void setData (const Data&)
2854 {
2855 DE_ASSERT(0);
2856 }
2857
getShaderStage(void)2858 vk::VkShaderStageFlagBits getShaderStage (void) { return m_stage; }
2859
2860 private:
2861 OperationContext& m_context;
2862 Resource& m_inResource;
2863 Resource& m_outResource;
2864 const VkShaderStageFlagBits m_stage;
2865 const VkPipelineStageFlags m_pipelineStage;
2866 const DispatchCall m_dispatchCall;
2867 Move<VkImageView> m_srcImageView;
2868 Move<VkImageView> m_dstImageView;
2869 Move<VkDescriptorPool> m_descriptorPool;
2870 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
2871 Move<VkDescriptorSet> m_descriptorSet;
2872 de::MovePtr<Pipeline> m_pipeline;
2873 };
2874
2875 class CopyImageSupport : public OperationSupport
2876 {
2877 public:
CopyImageSupport(const ResourceDescription& resourceDesc, const VkShaderStageFlagBits stage, const bool specializedAccess, const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)2878 CopyImageSupport (const ResourceDescription& resourceDesc,
2879 const VkShaderStageFlagBits stage,
2880 const bool specializedAccess,
2881 const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)
2882 : OperationSupport (specializedAccess)
2883 , m_resourceDesc (resourceDesc)
2884 , m_stage (stage)
2885 , m_shaderPrefix (std::string("copy_image_") + getShaderStageName(stage) + "_")
2886 , m_dispatchCall (dispatchCall)
2887 {
2888 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
2889 DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
2890
2891 assertValidShaderStage(m_stage);
2892 }
2893
initPrograms(SourceCollections& programCollection) const2894 void initPrograms (SourceCollections& programCollection) const
2895 {
2896 const std::string imageFormat = getShaderImageFormatQualifier(m_resourceDesc.imageFormat);
2897 const std::string imageType = getShaderImageType(m_resourceDesc.imageFormat, m_resourceDesc.imageType);
2898
2899 std::ostringstream declSrc;
2900 declSrc << "layout(set = 0, binding = 0, " << imageFormat << ") readonly uniform " << imageType << " srcImg;\n"
2901 << "layout(set = 0, binding = 1, " << imageFormat << ") writeonly uniform " << imageType << " dstImg;\n";
2902
2903 std::ostringstream mainSrc;
2904 if (m_resourceDesc.imageType == VK_IMAGE_TYPE_1D)
2905 mainSrc << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2906 << " imageStore(dstImg, x, imageLoad(srcImg, x));\n";
2907 else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_2D)
2908 mainSrc << " for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
2909 << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2910 << " imageStore(dstImg, ivec2(x, y), imageLoad(srcImg, ivec2(x, y)));\n";
2911 else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_3D)
2912 mainSrc << " for (int z = 0; z < " << m_resourceDesc.size.z() << "; ++z)\n"
2913 << " for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
2914 << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2915 << " imageStore(dstImg, ivec3(x, y, z), imageLoad(srcImg, ivec3(x, y, z)));\n";
2916 else
2917 DE_ASSERT(0);
2918
2919 initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), mainSrc.str(), m_stage);
2920 }
2921
getInResourceUsageFlags(void) const2922 deUint32 getInResourceUsageFlags (void) const
2923 {
2924 return VK_IMAGE_USAGE_STORAGE_BIT;
2925 }
2926
getOutResourceUsageFlags(void) const2927 deUint32 getOutResourceUsageFlags (void) const
2928 {
2929 return VK_IMAGE_USAGE_STORAGE_BIT;
2930 }
2931
getQueueFlags(const OperationContext& context) const2932 VkQueueFlags getQueueFlags (const OperationContext& context) const
2933 {
2934 DE_UNREF(context);
2935 return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
2936 }
2937
build(OperationContext&, Resource&) const2938 de::MovePtr<Operation> build (OperationContext&, Resource&) const
2939 {
2940 DE_ASSERT(0);
2941 return de::MovePtr<Operation>();
2942 }
2943
build(OperationContext& context, Resource& inResource, Resource& outResource) const2944 de::MovePtr<Operation> build (OperationContext& context, Resource& inResource, Resource& outResource) const
2945 {
2946 if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
2947 return de::MovePtr<Operation>(new CopyImageImplementation(context, inResource, outResource, m_stage, m_shaderPrefix, m_specializedAccess, PIPELINE_TYPE_COMPUTE, m_dispatchCall));
2948 else
2949 return de::MovePtr<Operation>(new CopyImageImplementation(context, inResource, outResource, m_stage, m_shaderPrefix, m_specializedAccess, PIPELINE_TYPE_GRAPHICS, m_dispatchCall));
2950 }
2951
getShaderStage(void)2952 vk::VkShaderStageFlagBits getShaderStage (void) { return m_stage; }
2953
2954 private:
2955 const ResourceDescription m_resourceDesc;
2956 const VkShaderStageFlagBits m_stage;
2957 const std::string m_shaderPrefix;
2958 const DispatchCall m_dispatchCall;
2959 };
2960
2961 class MSImageImplementation : public Operation
2962 {
2963 public:
MSImageImplementation(OperationContext& context, Resource& resource)2964 MSImageImplementation(OperationContext& context,
2965 Resource& resource)
2966 : m_context (context)
2967 , m_resource (resource)
2968 , m_hostBufferSizeBytes(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
2969 {
2970 const DeviceInterface& vk = m_context.getDeviceInterface();
2971 const InstanceInterface& vki = m_context.getInstanceInterface();
2972 const VkDevice device = m_context.getDevice();
2973 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
2974 const VkPhysicalDeviceFeatures features = getPhysicalDeviceFeatures(vki, physDevice);
2975 Allocator& allocator = m_context.getAllocator();
2976
2977 const auto& imgResource = m_resource.getImage();
2978 requireStorageImageSupport(vki, physDevice, imgResource.format, imgResource.tiling);
2979 if (!features.shaderStorageImageMultisample)
2980 TCU_THROW(NotSupportedError, "Using multisample images as storage is not supported");
2981
2982 VkBufferCreateInfo bufferCreateInfo = makeBufferCreateInfo(m_hostBufferSizeBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
2983 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator, bufferCreateInfo, MemoryRequirement::HostVisible));
2984 const Allocation& alloc = m_hostBuffer->getAllocation();
2985 fillPattern(alloc.getHostPtr(), m_hostBufferSizeBytes);
2986 flushAlloc(vk, device, alloc);
2987
2988 const ImageResource& image = m_resource.getImage();
2989 const VkImageViewType viewType = getImageViewType(image.imageType);
2990 m_imageView = makeImageView(vk, device, image.handle, viewType, image.format, image.subresourceRange);
2991
2992 // Prepare descriptors
2993 {
2994 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
2995 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
2996 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
2997 .build(vk, device);
2998
2999 m_descriptorPool = DescriptorPoolBuilder()
3000 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
3001 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
3002 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
3003
3004 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
3005
3006 const VkDescriptorBufferInfo bufferInfo = makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_hostBufferSizeBytes);
3007 const VkDescriptorImageInfo imageInfo = makeDescriptorImageInfo(DE_NULL, *m_imageView, VK_IMAGE_LAYOUT_GENERAL);
3008
3009 DescriptorSetUpdateBuilder()
3010 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &bufferInfo)
3011 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &imageInfo)
3012 .update(vk, device);
3013 }
3014
3015 // Create pipeline
3016 const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, context.getBinaryCollection().get("comp"), (VkShaderModuleCreateFlags)0));
3017 m_pipelineLayout = makePipelineLayout (vk, device, *m_descriptorSetLayout);
3018 m_pipeline = makeComputePipeline(vk, device, *m_pipelineLayout, *shaderModule, DE_NULL, context.getPipelineCacheData(), context.getResourceInterface());
3019 }
3020
recordCommands(const VkCommandBuffer cmdBuffer)3021 void recordCommands(const VkCommandBuffer cmdBuffer)
3022 {
3023 const DeviceInterface& vk = m_context.getDeviceInterface();
3024 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
3025
3026 // change image layout
3027 {
3028 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3029 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3030 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3031 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3032 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3033 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3034 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout newLayout
3035 m_resource.getImage().handle, // VkImage image
3036 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
3037 );
3038 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
3039 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3040 }
3041
3042 // execute shader
3043 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipeline);
3044 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0u, 1u, &*m_descriptorSet, 0u, DE_NULL);
3045 vk.cmdDispatch(cmdBuffer, m_resource.getImage().extent.width, m_resource.getImage().extent.height, 1u);
3046 }
3047
getInSyncInfo(void) const3048 SyncInfo getInSyncInfo(void) const
3049 {
3050 DE_ASSERT(false);
3051 return emptySyncInfo;
3052 }
3053
getOutSyncInfo(void) const3054 SyncInfo getOutSyncInfo(void) const
3055 {
3056 return
3057 {
3058 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR, // VkPipelineStageFlags stageMask;
3059 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3060 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
3061 };
3062 }
3063
getData(void) const3064 Data getData(void) const
3065 {
3066 return getHostBufferData(m_context, *m_hostBuffer, m_hostBufferSizeBytes);
3067 }
3068
setData(const Data&)3069 void setData(const Data&)
3070 {
3071 DE_ASSERT(false);
3072 }
3073
3074 private:
3075 OperationContext& m_context;
3076 Resource& m_resource;
3077 Move<VkImageView> m_imageView;
3078
3079 const VkDeviceSize m_hostBufferSizeBytes;
3080 de::MovePtr<Buffer> m_hostBuffer;
3081
3082 Move<VkDescriptorPool> m_descriptorPool;
3083 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
3084 Move<VkDescriptorSet> m_descriptorSet;
3085 Move<VkPipelineLayout> m_pipelineLayout;
3086 Move<VkPipeline> m_pipeline;
3087 };
3088
3089 class MSImageSupport : public OperationSupport
3090 {
3091 public:
MSImageSupport(const ResourceDescription& resourceDesc)3092 MSImageSupport(const ResourceDescription& resourceDesc)
3093 : m_resourceDesc (resourceDesc)
3094 {
3095 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
3096 }
3097
initPrograms(SourceCollections& programCollection) const3098 void initPrograms (SourceCollections& programCollection) const
3099 {
3100 std::stringstream source;
3101 source <<
3102 "#version 440\n"
3103 "\n"
3104 "layout(local_size_x = 1) in;\n"
3105 "layout(set = 0, binding = 0, std430) readonly buffer Input {\n"
3106 " uint data[];\n"
3107 "} inData;\n"
3108 "layout(set = 0, binding = 1, r32ui) writeonly uniform uimage2DMS msImage;\n"
3109 "\n"
3110 "void main (void)\n"
3111 "{\n"
3112 " int gx = int(gl_GlobalInvocationID.x);\n"
3113 " int gy = int(gl_GlobalInvocationID.y);\n"
3114 " uint value = inData.data[gy * " << m_resourceDesc.size.x() << " + gx];\n"
3115 " for (int sampleNdx = 0; sampleNdx < " << m_resourceDesc.imageSamples << "; ++sampleNdx)\n"
3116 " imageStore(msImage, ivec2(gx, gy), sampleNdx, uvec4(value));\n"
3117 "}\n";
3118 programCollection.glslSources.add("comp") << glu::ComputeSource(source.str().c_str());
3119 }
3120
getInResourceUsageFlags(void) const3121 deUint32 getInResourceUsageFlags (void) const
3122 {
3123 return 0;
3124 }
3125
getOutResourceUsageFlags(void) const3126 deUint32 getOutResourceUsageFlags (void) const
3127 {
3128 return VK_IMAGE_USAGE_STORAGE_BIT;
3129 }
3130
getQueueFlags(const OperationContext&) const3131 VkQueueFlags getQueueFlags (const OperationContext&) const
3132 {
3133 return VK_QUEUE_COMPUTE_BIT;
3134 }
3135
build(OperationContext& context, Resource& resource) const3136 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
3137 {
3138 return de::MovePtr<Operation>(new MSImageImplementation(context, resource));
3139 }
3140
build(OperationContext&, Resource&, Resource&) const3141 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
3142 {
3143 DE_ASSERT(0);
3144 return de::MovePtr<Operation>();
3145 }
3146
3147 private:
3148 const ResourceDescription m_resourceDesc;
3149 };
3150
3151 } // ShaderAccess ns
3152
3153 namespace CopyBufferToImage
3154 {
3155
3156 class WriteImplementation : public Operation
3157 {
3158 public:
WriteImplementation(OperationContext& context, Resource& resource)3159 WriteImplementation (OperationContext& context, Resource& resource)
3160 : m_context (context)
3161 , m_resource (resource)
3162 , m_bufferSize (getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
3163 {
3164 DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_IMAGE);
3165
3166 const DeviceInterface& vk = m_context.getDeviceInterface();
3167 const VkDevice device = m_context.getDevice();
3168 Allocator& allocator = m_context.getAllocator();
3169
3170 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
3171 vk, device, allocator, makeBufferCreateInfo(m_bufferSize, VK_BUFFER_USAGE_TRANSFER_SRC_BIT), MemoryRequirement::HostVisible));
3172
3173 const Allocation& alloc = m_hostBuffer->getAllocation();
3174 fillPattern(alloc.getHostPtr(), m_bufferSize);
3175 flushAlloc(vk, device, alloc);
3176 }
3177
recordCommands(const VkCommandBuffer cmdBuffer)3178 void recordCommands (const VkCommandBuffer cmdBuffer)
3179 {
3180 const DeviceInterface& vk = m_context.getDeviceInterface();
3181 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_resource.getImage().extent, m_resource.getImage().subresourceLayers);
3182 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
3183
3184 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3185 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3186 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3187 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3188 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3189 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3190 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3191 m_resource.getImage().handle, // VkImage image
3192 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
3193 );
3194 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
3195 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3196
3197 vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
3198 }
3199
getInSyncInfo(void) const3200 SyncInfo getInSyncInfo (void) const
3201 {
3202 return emptySyncInfo;
3203 }
3204
getOutSyncInfo(void) const3205 SyncInfo getOutSyncInfo (void) const
3206 {
3207 const SyncInfo syncInfo =
3208 {
3209 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3210 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3211 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
3212 };
3213 return syncInfo;
3214 }
3215
getData(void) const3216 Data getData (void) const
3217 {
3218 return getHostBufferData(m_context, *m_hostBuffer, m_bufferSize);
3219 }
3220
setData(const Data& data)3221 void setData (const Data& data)
3222 {
3223 setHostBufferData(m_context, *m_hostBuffer, data);
3224 }
3225
3226 private:
3227 OperationContext& m_context;
3228 Resource& m_resource;
3229 de::MovePtr<Buffer> m_hostBuffer;
3230 const VkDeviceSize m_bufferSize;
3231 };
3232
3233 class ReadImplementation : public Operation
3234 {
3235 public:
ReadImplementation(OperationContext& context, Resource& resource)3236 ReadImplementation (OperationContext& context, Resource& resource)
3237 : m_context (context)
3238 , m_resource (resource)
3239 , m_subresourceRange (makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u))
3240 , m_subresourceLayers (makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u))
3241 {
3242 DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_BUFFER);
3243
3244 const DeviceInterface& vk = m_context.getDeviceInterface();
3245 const VkDevice device = m_context.getDevice();
3246 Allocator& allocator = m_context.getAllocator();
3247 const VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
3248 const deUint32 pixelSize = tcu::getPixelSize(mapVkFormat(format));
3249
3250 DE_ASSERT((m_resource.getBuffer().size % pixelSize) == 0);
3251 m_imageExtent = get2DImageExtentWithSize(m_resource.getBuffer().size, pixelSize); // there may be some unused space at the end
3252
3253 // Copy destination image.
3254 m_image = de::MovePtr<Image>(new Image(
3255 vk, device, allocator,
3256 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_imageExtent, format,
3257 (VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
3258 VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
3259 MemoryRequirement::Any));
3260
3261 // Image data will be copied here, so it can be read on the host.
3262 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
3263 vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible));
3264 }
3265
recordCommands(const VkCommandBuffer cmdBuffer)3266 void recordCommands (const VkCommandBuffer cmdBuffer)
3267 {
3268 const DeviceInterface& vk = m_context.getDeviceInterface();
3269 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_imageExtent, m_subresourceLayers);
3270 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
3271
3272 // Resource -> Image
3273 {
3274 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3275 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3276 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3277 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3278 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3279 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3280 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3281 **m_image, // VkImage image
3282 m_subresourceRange // VkImageSubresourceRange subresourceRange
3283 );
3284 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
3285 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3286
3287 vk.cmdCopyBufferToImage(cmdBuffer, m_resource.getBuffer().handle, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
3288 }
3289 // Image -> Host buffer
3290 {
3291 const VkImageMemoryBarrier2KHR imageLayoutBarrier2 = makeImageMemoryBarrier2(
3292 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3293 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
3294 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3295 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3296 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
3297 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
3298 **m_image, // VkImage image
3299 m_subresourceRange // VkImageSubresourceRange subresourceRange
3300 );
3301 VkDependencyInfoKHR layoutDependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageLayoutBarrier2);
3302 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &layoutDependencyInfo);
3303
3304 vk.cmdCopyImageToBuffer(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u, ©Region);
3305
3306 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
3307 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3308 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
3309 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3310 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3311 **m_hostBuffer, // VkBuffer buffer
3312 0u, // VkDeviceSize offset
3313 m_resource.getBuffer().size // VkDeviceSize size
3314 );
3315 VkDependencyInfoKHR bufferDependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
3316 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &bufferDependencyInfo);
3317 }
3318 }
3319
getInSyncInfo(void) const3320 SyncInfo getInSyncInfo (void) const
3321 {
3322 const SyncInfo syncInfo =
3323 {
3324 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3325 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
3326 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
3327 };
3328 return syncInfo;
3329 }
3330
getOutSyncInfo(void) const3331 SyncInfo getOutSyncInfo (void) const
3332 {
3333 return emptySyncInfo;
3334 }
3335
getData(void) const3336 Data getData (void) const
3337 {
3338 return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
3339 }
3340
setData(const Data& data)3341 void setData (const Data& data)
3342 {
3343 setHostBufferData(m_context, *m_hostBuffer, data);
3344 }
3345
3346 private:
3347 OperationContext& m_context;
3348 Resource& m_resource;
3349 const VkImageSubresourceRange m_subresourceRange;
3350 const VkImageSubresourceLayers m_subresourceLayers;
3351 de::MovePtr<Buffer> m_hostBuffer;
3352 de::MovePtr<Image> m_image;
3353 VkExtent3D m_imageExtent;
3354 };
3355
3356 class Support : public OperationSupport
3357 {
3358 public:
Support(const ResourceDescription& resourceDesc, const AccessMode mode)3359 Support (const ResourceDescription& resourceDesc, const AccessMode mode)
3360 : m_mode (mode)
3361 , m_resourceType (resourceDesc.type)
3362 , m_requiredQueueFlags (resourceDesc.type == RESOURCE_TYPE_IMAGE && isDepthStencilFormat(resourceDesc.imageFormat) ? VK_QUEUE_GRAPHICS_BIT : VK_QUEUE_TRANSFER_BIT)
3363 {
3364 // From spec:
3365 // Because depth or stencil aspect buffer to image copies may require format conversions on some implementations,
3366 // they are not supported on queues that do not support graphics.
3367
3368 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
3369 DE_ASSERT(m_mode == ACCESS_MODE_READ || resourceDesc.type != RESOURCE_TYPE_BUFFER);
3370 DE_ASSERT(m_mode == ACCESS_MODE_WRITE || resourceDesc.type != RESOURCE_TYPE_IMAGE);
3371 }
3372
getInResourceUsageFlags(void) const3373 deUint32 getInResourceUsageFlags (void) const
3374 {
3375 if (m_resourceType == RESOURCE_TYPE_IMAGE)
3376 return m_mode == ACCESS_MODE_READ ? VK_IMAGE_USAGE_TRANSFER_SRC_BIT : 0;
3377 else
3378 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT : 0;
3379 }
3380
getOutResourceUsageFlags(void) const3381 deUint32 getOutResourceUsageFlags (void) const
3382 {
3383 if (m_resourceType == RESOURCE_TYPE_IMAGE)
3384 return m_mode == ACCESS_MODE_WRITE ? VK_IMAGE_USAGE_TRANSFER_DST_BIT : 0;
3385 else
3386 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : 0;
3387 }
3388
getQueueFlags(const OperationContext& context) const3389 VkQueueFlags getQueueFlags (const OperationContext& context) const
3390 {
3391 DE_UNREF(context);
3392 return m_requiredQueueFlags;
3393 }
3394
build(OperationContext& context, Resource& resource) const3395 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
3396 {
3397 if (m_mode == ACCESS_MODE_READ)
3398 return de::MovePtr<Operation>(new ReadImplementation(context, resource));
3399 else
3400 return de::MovePtr<Operation>(new WriteImplementation(context, resource));
3401 }
3402
build(OperationContext&, Resource&, Resource&) const3403 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
3404 {
3405 DE_ASSERT(0);
3406 return de::MovePtr<Operation>();
3407 }
3408
3409 private:
3410 const AccessMode m_mode;
3411 const enum ResourceType m_resourceType;
3412 const VkQueueFlags m_requiredQueueFlags;
3413 };
3414
3415 class CopyImplementation : public Operation
3416 {
3417 public:
CopyImplementation(OperationContext& context, Resource& inResource, Resource& outResource)3418 CopyImplementation (OperationContext& context, Resource& inResource, Resource& outResource)
3419 : m_context (context)
3420 , m_inResource (inResource)
3421 , m_outResource (outResource)
3422 {
3423 DE_ASSERT(m_inResource.getType() == RESOURCE_TYPE_BUFFER);
3424 DE_ASSERT(m_outResource.getType() == RESOURCE_TYPE_IMAGE);
3425 }
3426
recordCommands(const VkCommandBuffer cmdBuffer)3427 void recordCommands (const VkCommandBuffer cmdBuffer)
3428 {
3429 const DeviceInterface& vk = m_context.getDeviceInterface();
3430 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_outResource.getImage().extent, m_outResource.getImage().subresourceLayers);
3431 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
3432
3433 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
3434 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3435 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3436 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3437 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3438 m_inResource.getBuffer().handle, // VkBuffer buffer
3439 0u, // VkDeviceSize offset
3440 m_inResource.getBuffer().size // VkDeviceSize size
3441 );
3442 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3443 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3444 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3445 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3446 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3447 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3448 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3449 m_outResource.getImage().handle, // VkImage image
3450 m_outResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
3451 );
3452 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2, &imageMemoryBarrier2);
3453 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3454
3455 vk.cmdCopyBufferToImage(cmdBuffer, m_inResource.getBuffer().handle, m_outResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
3456 }
3457
getInSyncInfo(void) const3458 SyncInfo getInSyncInfo (void) const
3459 {
3460 const SyncInfo syncInfo =
3461 {
3462 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3463 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
3464 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout imageLayout;
3465 };
3466 return syncInfo;
3467 }
3468
getOutSyncInfo(void) const3469 SyncInfo getOutSyncInfo (void) const
3470 {
3471 const SyncInfo syncInfo =
3472 {
3473 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3474 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3475 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
3476 };
3477 return syncInfo;
3478 }
3479
getData(void) const3480 Data getData (void) const
3481 {
3482 Data data = { 0, DE_NULL };
3483 return data;
3484 }
3485
setData(const Data&)3486 void setData (const Data&)
3487 {
3488 DE_ASSERT(0);
3489 }
3490
3491 private:
3492 OperationContext& m_context;
3493 Resource& m_inResource;
3494 Resource& m_outResource;
3495 };
3496
3497 class CopySupport : public OperationSupport
3498 {
3499 public:
CopySupport(const ResourceDescription& resourceDesc)3500 CopySupport (const ResourceDescription& resourceDesc)
3501 : m_resourceType (resourceDesc.type)
3502 , m_requiredQueueFlags (resourceDesc.type == RESOURCE_TYPE_IMAGE && isDepthStencilFormat(resourceDesc.imageFormat) ? VK_QUEUE_GRAPHICS_BIT : VK_QUEUE_TRANSFER_BIT)
3503 {
3504 }
3505
getInResourceUsageFlags(void) const3506 deUint32 getInResourceUsageFlags (void) const
3507 {
3508 if (m_resourceType == RESOURCE_TYPE_IMAGE)
3509 return VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
3510 else
3511 return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3512 }
3513
getOutResourceUsageFlags(void) const3514 deUint32 getOutResourceUsageFlags (void) const
3515 {
3516 if (m_resourceType == RESOURCE_TYPE_IMAGE)
3517 return VK_IMAGE_USAGE_TRANSFER_DST_BIT;
3518 else
3519 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3520 }
3521
getQueueFlags(const OperationContext& context) const3522 VkQueueFlags getQueueFlags (const OperationContext& context) const
3523 {
3524 DE_UNREF(context);
3525 return m_requiredQueueFlags;
3526 }
3527
build(OperationContext&, Resource&) const3528 de::MovePtr<Operation> build (OperationContext&, Resource&) const
3529 {
3530 DE_ASSERT(0);
3531 return de::MovePtr<Operation>();
3532 }
3533
build(OperationContext& context, Resource& inResource, Resource& outResource) const3534 de::MovePtr<Operation> build (OperationContext& context, Resource& inResource, Resource& outResource) const
3535 {
3536 return de::MovePtr<Operation>(new CopyImplementation(context, inResource, outResource));
3537 }
3538
3539 private:
3540 const enum ResourceType m_resourceType;
3541 const VkQueueFlags m_requiredQueueFlags;
3542 };
3543
3544 } // CopyBufferToImage ns
3545
3546 namespace CopyImageToBuffer
3547 {
3548
3549 class WriteImplementation : public Operation
3550 {
3551 public:
WriteImplementation(OperationContext& context, Resource& resource)3552 WriteImplementation (OperationContext& context, Resource& resource)
3553 : m_context (context)
3554 , m_resource (resource)
3555 , m_subresourceRange (makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u))
3556 , m_subresourceLayers (makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u))
3557 {
3558 DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_BUFFER);
3559
3560 const DeviceInterface& vk = m_context.getDeviceInterface();
3561 const VkDevice device = m_context.getDevice();
3562 Allocator& allocator = m_context.getAllocator();
3563 const VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
3564 const deUint32 pixelSize = tcu::getPixelSize(mapVkFormat(format));
3565
3566 DE_ASSERT((m_resource.getBuffer().size % pixelSize) == 0);
3567 m_imageExtent = get2DImageExtentWithSize(m_resource.getBuffer().size, pixelSize);
3568
3569 // Source data staging buffer
3570 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
3571 vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT), MemoryRequirement::HostVisible));
3572
3573 const Allocation& alloc = m_hostBuffer->getAllocation();
3574 fillPattern(alloc.getHostPtr(), m_resource.getBuffer().size);
3575 flushAlloc(vk, device, alloc);
3576
3577 // Source data image
3578 m_image = de::MovePtr<Image>(new Image(
3579 vk, device, allocator,
3580 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_imageExtent, format, (VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
3581 VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
3582 MemoryRequirement::Any));
3583 }
3584
recordCommands(const VkCommandBuffer cmdBuffer)3585 void recordCommands (const VkCommandBuffer cmdBuffer)
3586 {
3587 const DeviceInterface& vk = m_context.getDeviceInterface();
3588 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_imageExtent, m_subresourceLayers);
3589 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
3590
3591 // Host buffer -> Image
3592 {
3593 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3594 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3595 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3596 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3597 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3598 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3599 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3600 **m_image, // VkImage image
3601 m_subresourceRange // VkImageSubresourceRange subresourceRange
3602 );
3603 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
3604 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3605
3606 vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
3607 }
3608 // Image -> Resource
3609 {
3610 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3611 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3612 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
3613 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3614 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3615 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
3616 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
3617 **m_image, // VkImage image
3618 m_subresourceRange // VkImageSubresourceRange subresourceRange
3619 );
3620 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
3621 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3622
3623 vk.cmdCopyImageToBuffer(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_resource.getBuffer().handle, 1u, ©Region);
3624 }
3625 }
3626
getInSyncInfo(void) const3627 SyncInfo getInSyncInfo (void) const
3628 {
3629 return emptySyncInfo;
3630 }
3631
getOutSyncInfo(void) const3632 SyncInfo getOutSyncInfo (void) const
3633 {
3634 const SyncInfo syncInfo =
3635 {
3636 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3637 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3638 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
3639 };
3640 return syncInfo;
3641 }
3642
getData(void) const3643 Data getData (void) const
3644 {
3645 return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
3646 }
3647
setData(const Data& data)3648 void setData (const Data& data)
3649 {
3650 setHostBufferData(m_context, *m_hostBuffer, data);
3651 }
3652
3653 private:
3654 OperationContext& m_context;
3655 Resource& m_resource;
3656 const VkImageSubresourceRange m_subresourceRange;
3657 const VkImageSubresourceLayers m_subresourceLayers;
3658 de::MovePtr<Buffer> m_hostBuffer;
3659 de::MovePtr<Image> m_image;
3660 VkExtent3D m_imageExtent;
3661 };
3662
3663 class ReadImplementation : public Operation
3664 {
3665 public:
ReadImplementation(OperationContext& context, Resource& resource)3666 ReadImplementation (OperationContext& context, Resource& resource)
3667 : m_context (context)
3668 , m_resource (resource)
3669 , m_bufferSize (getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
3670 {
3671 DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_IMAGE);
3672
3673 const DeviceInterface& vk = m_context.getDeviceInterface();
3674 const VkDevice device = m_context.getDevice();
3675 Allocator& allocator = m_context.getAllocator();
3676
3677 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
3678 vk, device, allocator, makeBufferCreateInfo(m_bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible));
3679
3680 const Allocation& alloc = m_hostBuffer->getAllocation();
3681 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_bufferSize));
3682 flushAlloc(vk, device, alloc);
3683 }
3684
recordCommands(const VkCommandBuffer cmdBuffer)3685 void recordCommands (const VkCommandBuffer cmdBuffer)
3686 {
3687 const DeviceInterface& vk = m_context.getDeviceInterface();
3688 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_resource.getImage().extent, m_resource.getImage().subresourceLayers);
3689 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
3690
3691 vk.cmdCopyImageToBuffer(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u, ©Region);
3692
3693 // Insert a barrier so data written by the transfer is available to the host
3694 {
3695 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
3696 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3697 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
3698 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3699 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3700 **m_hostBuffer, // VkBuffer buffer
3701 0u, // VkDeviceSize offset
3702 VK_WHOLE_SIZE // VkDeviceSize size
3703 );
3704 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
3705 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3706 }
3707 }
3708
getInSyncInfo(void) const3709 SyncInfo getInSyncInfo (void) const
3710 {
3711 const SyncInfo syncInfo =
3712 {
3713 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3714 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
3715 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout imageLayout;
3716 };
3717 return syncInfo;
3718 }
3719
getOutSyncInfo(void) const3720 SyncInfo getOutSyncInfo (void) const
3721 {
3722 return emptySyncInfo;
3723 }
3724
getData(void) const3725 Data getData (void) const
3726 {
3727 return getHostBufferData(m_context, *m_hostBuffer, m_bufferSize);
3728 }
3729
setData(const Data&)3730 void setData (const Data&)
3731 {
3732 DE_ASSERT(0);
3733 }
3734
3735 private:
3736 OperationContext& m_context;
3737 Resource& m_resource;
3738 de::MovePtr<Buffer> m_hostBuffer;
3739 const VkDeviceSize m_bufferSize;
3740 };
3741
3742 class CopyImplementation : public Operation
3743 {
3744 public:
CopyImplementation(OperationContext& context, Resource& inResource, Resource& outResource)3745 CopyImplementation (OperationContext& context, Resource& inResource, Resource& outResource)
3746 : m_context (context)
3747 , m_inResource (inResource)
3748 , m_outResource (outResource)
3749 , m_subresourceRange (makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u))
3750 , m_subresourceLayers (makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u))
3751 {
3752 DE_ASSERT(m_inResource.getType() == RESOURCE_TYPE_IMAGE);
3753 DE_ASSERT(m_outResource.getType() == RESOURCE_TYPE_BUFFER);
3754 }
3755
recordCommands(const VkCommandBuffer cmdBuffer)3756 void recordCommands (const VkCommandBuffer cmdBuffer)
3757 {
3758 const DeviceInterface& vk = m_context.getDeviceInterface();
3759 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_inResource.getImage().extent, m_subresourceLayers);
3760 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
3761
3762 {
3763 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3764 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3765 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3766 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3767 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3768 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3769 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3770 m_inResource.getImage().handle, // VkImage image
3771 m_inResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
3772 );
3773 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
3774 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3775 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3776 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3777 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3778 m_outResource.getBuffer().handle, // VkBuffer buffer
3779 0u, // VkDeviceSize offset
3780 m_outResource.getBuffer().size // VkDeviceSize size
3781 );
3782 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2, &imageMemoryBarrier2);
3783 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3784 }
3785
3786 vk.cmdCopyImageToBuffer(cmdBuffer, m_inResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_outResource.getBuffer().handle, 1u, ©Region);
3787 }
3788
getInSyncInfo(void) const3789 SyncInfo getInSyncInfo (void) const
3790 {
3791 const SyncInfo syncInfo =
3792 {
3793 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3794 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
3795 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
3796 };
3797 return syncInfo;
3798 }
3799
getOutSyncInfo(void) const3800 SyncInfo getOutSyncInfo (void) const
3801 {
3802 const SyncInfo syncInfo =
3803 {
3804 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3805 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3806 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
3807 };
3808 return syncInfo;
3809 }
3810
getData(void) const3811 Data getData (void) const
3812 {
3813 Data data = { 0, DE_NULL };
3814 return data;
3815 }
3816
setData(const Data&)3817 void setData (const Data&)
3818 {
3819 DE_ASSERT(0);
3820 }
3821
3822 private:
3823 OperationContext& m_context;
3824 Resource& m_inResource;
3825 Resource& m_outResource;
3826 const VkImageSubresourceRange m_subresourceRange;
3827 const VkImageSubresourceLayers m_subresourceLayers;
3828 };
3829
3830 class Support : public OperationSupport
3831 {
3832 public:
Support(const ResourceDescription& resourceDesc, const AccessMode mode)3833 Support (const ResourceDescription& resourceDesc, const AccessMode mode)
3834 : m_mode (mode)
3835 , m_requiredQueueFlags (resourceDesc.type == RESOURCE_TYPE_IMAGE && isDepthStencilFormat(resourceDesc.imageFormat) ? VK_QUEUE_GRAPHICS_BIT : VK_QUEUE_TRANSFER_BIT)
3836 {
3837 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
3838 DE_ASSERT(m_mode == ACCESS_MODE_READ || resourceDesc.type != RESOURCE_TYPE_IMAGE);
3839 DE_ASSERT(m_mode == ACCESS_MODE_WRITE || resourceDesc.type != RESOURCE_TYPE_BUFFER);
3840 }
3841
getInResourceUsageFlags(void) const3842 deUint32 getInResourceUsageFlags (void) const
3843 {
3844 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT : 0;
3845 }
3846
getOutResourceUsageFlags(void) const3847 deUint32 getOutResourceUsageFlags (void) const
3848 {
3849 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : 0;
3850 }
3851
getQueueFlags(const OperationContext& context) const3852 VkQueueFlags getQueueFlags (const OperationContext& context) const
3853 {
3854 DE_UNREF(context);
3855 return m_requiredQueueFlags;
3856 }
3857
build(OperationContext& context, Resource& resource) const3858 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
3859 {
3860 if (m_mode == ACCESS_MODE_READ)
3861 return de::MovePtr<Operation>(new ReadImplementation(context, resource));
3862 else
3863 return de::MovePtr<Operation>(new WriteImplementation(context, resource));
3864 }
3865
build(OperationContext&, Resource&, Resource&) const3866 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
3867 {
3868 DE_ASSERT(0);
3869 return de::MovePtr<Operation>();
3870 }
3871
3872 private:
3873 const AccessMode m_mode;
3874 const VkQueueFlags m_requiredQueueFlags;
3875 };
3876
3877 } // CopyImageToBuffer ns
3878
3879 namespace ClearImage
3880 {
3881
3882 enum ClearMode
3883 {
3884 CLEAR_MODE_COLOR,
3885 CLEAR_MODE_DEPTH_STENCIL,
3886 };
3887
3888 class Implementation : public Operation
3889 {
3890 public:
Implementation(OperationContext& context, Resource& resource, const ClearMode mode)3891 Implementation (OperationContext& context, Resource& resource, const ClearMode mode)
3892 : m_context (context)
3893 , m_resource (resource)
3894 , m_clearValue (makeClearValue(m_resource.getImage().format))
3895 , m_mode (mode)
3896 {
3897 const VkDeviceSize size = getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent);
3898 const VkExtent3D& extent = m_resource.getImage().extent;
3899 const VkFormat format = m_resource.getImage().format;
3900 const tcu::TextureFormat texFormat = mapVkFormat(format);
3901
3902 m_data.resize(static_cast<std::size_t>(size));
3903 tcu::PixelBufferAccess imagePixels(texFormat, extent.width, extent.height, extent.depth, &m_data[0]);
3904 clearPixelBuffer(imagePixels, m_clearValue);
3905 }
3906
recordCommands(const VkCommandBuffer cmdBuffer)3907 void recordCommands (const VkCommandBuffer cmdBuffer)
3908 {
3909 const DeviceInterface& vk = m_context.getDeviceInterface();
3910 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
3911
3912 VkPipelineStageFlags2KHR dstStageMask = VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR;
3913 if (m_context.getSynchronizationType() == SynchronizationType::SYNCHRONIZATION2)
3914 dstStageMask = VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR;
3915
3916 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3917 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3918 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3919 dstStageMask, // VkPipelineStageFlags2KHR dstStageMask
3920 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3921 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3922 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3923 m_resource.getImage().handle, // VkImage image
3924 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
3925 );
3926 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
3927 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3928
3929 if (m_mode == CLEAR_MODE_COLOR)
3930 vk.cmdClearColorImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &m_clearValue.color, 1u, &m_resource.getImage().subresourceRange);
3931 else
3932 vk.cmdClearDepthStencilImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &m_clearValue.depthStencil, 1u, &m_resource.getImage().subresourceRange);
3933 }
3934
getInSyncInfo(void) const3935 SyncInfo getInSyncInfo (void) const
3936 {
3937 return emptySyncInfo;
3938 }
3939
getOutSyncInfo(void) const3940 SyncInfo getOutSyncInfo (void) const
3941 {
3942 VkPipelineStageFlags2KHR stageMask = VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR;
3943 if (m_context.getSynchronizationType() == SynchronizationType::SYNCHRONIZATION2)
3944 stageMask = VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR;
3945
3946 return
3947 {
3948 stageMask, // VkPipelineStageFlags stageMask;
3949 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3950 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
3951 };
3952 }
3953
getData(void) const3954 Data getData (void) const
3955 {
3956 const Data data =
3957 {
3958 m_data.size(), // std::size_t size;
3959 &m_data[0], // const deUint8* data;
3960 };
3961 return data;
3962 }
3963
setData(const Data&)3964 void setData (const Data&)
3965 {
3966 DE_ASSERT(0);
3967 }
3968
3969 private:
3970 OperationContext& m_context;
3971 Resource& m_resource;
3972 std::vector<deUint8> m_data;
3973 const VkClearValue m_clearValue;
3974 const ClearMode m_mode;
3975 };
3976
3977 class Support : public OperationSupport
3978 {
3979 public:
Support(const ResourceDescription& resourceDesc, const ClearMode mode)3980 Support (const ResourceDescription& resourceDesc, const ClearMode mode)
3981 : m_resourceDesc (resourceDesc)
3982 , m_mode (mode)
3983 {
3984 DE_ASSERT(m_mode == CLEAR_MODE_COLOR || m_mode == CLEAR_MODE_DEPTH_STENCIL);
3985 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
3986 DE_ASSERT(m_resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT || (m_mode != CLEAR_MODE_COLOR));
3987 DE_ASSERT((m_resourceDesc.imageAspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) || (m_mode != CLEAR_MODE_DEPTH_STENCIL));
3988 }
3989
getInResourceUsageFlags(void) const3990 deUint32 getInResourceUsageFlags (void) const
3991 {
3992 return 0;
3993 }
3994
getOutResourceUsageFlags(void) const3995 deUint32 getOutResourceUsageFlags (void) const
3996 {
3997 return VK_IMAGE_USAGE_TRANSFER_DST_BIT;
3998 }
3999
getQueueFlags(const OperationContext& context) const4000 VkQueueFlags getQueueFlags (const OperationContext& context) const
4001 {
4002 DE_UNREF(context);
4003 if (m_mode == CLEAR_MODE_COLOR)
4004 return VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
4005 else
4006 return VK_QUEUE_GRAPHICS_BIT;
4007 }
4008
build(OperationContext& context, Resource& resource) const4009 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
4010 {
4011 return de::MovePtr<Operation>(new Implementation(context, resource, m_mode));
4012 }
4013
build(OperationContext&, Resource&, Resource&) const4014 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
4015 {
4016 DE_ASSERT(0);
4017 return de::MovePtr<Operation>();
4018 }
4019
4020 private:
4021 const ResourceDescription m_resourceDesc;
4022 const ClearMode m_mode;
4023 };
4024
4025 } // ClearImage ns
4026
4027 namespace Draw
4028 {
4029
4030 enum DrawCall
4031 {
4032 DRAW_CALL_DRAW,
4033 DRAW_CALL_DRAW_INDEXED,
4034 DRAW_CALL_DRAW_INDIRECT,
4035 DRAW_CALL_DRAW_INDEXED_INDIRECT,
4036 };
4037
4038 //! A write operation that is a result of drawing to an image.
4039 //! \todo Add support for depth/stencil too?
4040 class Implementation : public Operation
4041 {
4042 public:
Implementation(OperationContext& context, Resource& resource, const DrawCall drawCall)4043 Implementation (OperationContext& context, Resource& resource, const DrawCall drawCall)
4044 : m_context (context)
4045 , m_resource (resource)
4046 , m_drawCall (drawCall)
4047 , m_vertices (context)
4048 {
4049 const DeviceInterface& vk = context.getDeviceInterface();
4050 const VkDevice device = context.getDevice();
4051 Allocator& allocator = context.getAllocator();
4052
4053 // Indirect buffer
4054
4055 if (m_drawCall == DRAW_CALL_DRAW_INDIRECT)
4056 {
4057 m_indirectBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator,
4058 makeBufferCreateInfo(sizeof(VkDrawIndirectCommand), VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT), MemoryRequirement::HostVisible));
4059
4060 const Allocation& alloc = m_indirectBuffer->getAllocation();
4061 VkDrawIndirectCommand* const pIndirectCommand = static_cast<VkDrawIndirectCommand*>(alloc.getHostPtr());
4062
4063 pIndirectCommand->vertexCount = m_vertices.getNumVertices();
4064 pIndirectCommand->instanceCount = 1u;
4065 pIndirectCommand->firstVertex = 0u;
4066 pIndirectCommand->firstInstance = 0u;
4067
4068 flushAlloc(vk, device, alloc);
4069 }
4070 else if (m_drawCall == DRAW_CALL_DRAW_INDEXED_INDIRECT)
4071 {
4072 m_indirectBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator,
4073 makeBufferCreateInfo(sizeof(VkDrawIndexedIndirectCommand), VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT), MemoryRequirement::HostVisible));
4074
4075 const Allocation& alloc = m_indirectBuffer->getAllocation();
4076 VkDrawIndexedIndirectCommand* const pIndirectCommand = static_cast<VkDrawIndexedIndirectCommand*>(alloc.getHostPtr());
4077
4078 pIndirectCommand->indexCount = m_vertices.getNumIndices();
4079 pIndirectCommand->instanceCount = 1u;
4080 pIndirectCommand->firstIndex = 0u;
4081 pIndirectCommand->vertexOffset = 0u;
4082 pIndirectCommand->firstInstance = 0u;
4083
4084 flushAlloc(vk, device, alloc);
4085 }
4086
4087 // Resource image is the color attachment
4088
4089 m_colorFormat = m_resource.getImage().format;
4090 m_colorSubresourceRange = m_resource.getImage().subresourceRange;
4091 m_colorImage = m_resource.getImage().handle;
4092 m_attachmentExtent = m_resource.getImage().extent;
4093
4094 // Pipeline
4095
4096 m_colorAttachmentView = makeImageView (vk, device, m_colorImage, VK_IMAGE_VIEW_TYPE_2D, m_colorFormat, m_colorSubresourceRange);
4097 m_renderPass = makeRenderPass (vk, device, m_colorFormat);
4098 m_framebuffer = makeFramebuffer (vk, device, *m_renderPass, *m_colorAttachmentView, m_attachmentExtent.width, m_attachmentExtent.height);
4099 m_pipelineLayout = makePipelineLayout(vk, device);
4100
4101 GraphicsPipelineBuilder pipelineBuilder;
4102 pipelineBuilder
4103 .setRenderSize (tcu::IVec2(m_attachmentExtent.width, m_attachmentExtent.height))
4104 .setVertexInputSingleAttribute (m_vertices.getVertexFormat(), m_vertices.getVertexStride())
4105 .setShader (vk, device, VK_SHADER_STAGE_VERTEX_BIT, context.getBinaryCollection().get("draw_vert"), DE_NULL)
4106 .setShader (vk, device, VK_SHADER_STAGE_FRAGMENT_BIT, context.getBinaryCollection().get("draw_frag"), DE_NULL);
4107
4108 m_pipeline = pipelineBuilder.build(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData(), context.getResourceInterface() );
4109
4110 // Set expected draw values
4111
4112 m_expectedData.resize(static_cast<size_t>(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent)));
4113 tcu::PixelBufferAccess imagePixels(mapVkFormat(m_colorFormat), m_attachmentExtent.width, m_attachmentExtent.height, m_attachmentExtent.depth, &m_expectedData[0]);
4114 clearPixelBuffer(imagePixels, makeClearValue(m_colorFormat));
4115 }
4116
recordCommands(const VkCommandBuffer cmdBuffer)4117 void recordCommands (const VkCommandBuffer cmdBuffer)
4118 {
4119 const DeviceInterface& vk = m_context.getDeviceInterface();
4120 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
4121
4122 // Change color attachment image layout
4123 {
4124 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
4125 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
4126 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
4127 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
4128 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
4129 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
4130 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout
4131 m_colorImage, // VkImage image
4132 m_colorSubresourceRange // VkImageSubresourceRange subresourceRange
4133 );
4134 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
4135 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
4136 }
4137
4138 {
4139 const VkRect2D renderArea = makeRect2D(m_attachmentExtent);
4140 const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
4141
4142 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
4143 }
4144
4145 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
4146 {
4147 const VkDeviceSize vertexBufferOffset = 0ull;
4148 const VkBuffer vertexBuffer = m_vertices.getVertexBuffer();
4149 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &vertexBuffer, &vertexBufferOffset);
4150 }
4151
4152 if (m_drawCall == DRAW_CALL_DRAW_INDEXED || m_drawCall == DRAW_CALL_DRAW_INDEXED_INDIRECT)
4153 vk.cmdBindIndexBuffer(cmdBuffer, m_vertices.getIndexBuffer(), 0u, m_vertices.getIndexType());
4154
4155 switch (m_drawCall)
4156 {
4157 case DRAW_CALL_DRAW:
4158 vk.cmdDraw(cmdBuffer, m_vertices.getNumVertices(), 1u, 0u, 0u);
4159 break;
4160
4161 case DRAW_CALL_DRAW_INDEXED:
4162 vk.cmdDrawIndexed(cmdBuffer, m_vertices.getNumIndices(), 1u, 0u, 0, 0u);
4163 break;
4164
4165 case DRAW_CALL_DRAW_INDIRECT:
4166 vk.cmdDrawIndirect(cmdBuffer, **m_indirectBuffer, 0u, 1u, 0u);
4167 break;
4168
4169 case DRAW_CALL_DRAW_INDEXED_INDIRECT:
4170 vk.cmdDrawIndexedIndirect(cmdBuffer, **m_indirectBuffer, 0u, 1u, 0u);
4171 break;
4172 }
4173
4174 endRenderPass(vk, cmdBuffer);
4175 }
4176
getInSyncInfo(void) const4177 SyncInfo getInSyncInfo (void) const
4178 {
4179 return emptySyncInfo;
4180 }
4181
getOutSyncInfo(void) const4182 SyncInfo getOutSyncInfo (void) const
4183 {
4184 const SyncInfo syncInfo =
4185 {
4186 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, // VkPipelineStageFlags stageMask;
4187 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags accessMask;
4188 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout imageLayout;
4189 };
4190 return syncInfo;
4191 }
4192
getData(void) const4193 Data getData (void) const
4194 {
4195 const Data data =
4196 {
4197 m_expectedData.size(), // std::size_t size;
4198 &m_expectedData[0], // const deUint8* data;
4199 };
4200 return data;
4201 }
4202
setData(const Data& data)4203 void setData (const Data& data)
4204 {
4205 DE_ASSERT(m_expectedData.size() == data.size);
4206 deMemcpy(&m_expectedData[0], data.data, data.size);
4207 }
4208
4209 private:
4210 OperationContext& m_context;
4211 Resource& m_resource;
4212 const DrawCall m_drawCall;
4213 const VertexGrid m_vertices;
4214 std::vector<deUint8> m_expectedData;
4215 de::MovePtr<Buffer> m_indirectBuffer;
4216 VkFormat m_colorFormat;
4217 VkImage m_colorImage;
4218 Move<VkImageView> m_colorAttachmentView;
4219 VkImageSubresourceRange m_colorSubresourceRange;
4220 VkExtent3D m_attachmentExtent;
4221 Move<VkRenderPass> m_renderPass;
4222 Move<VkFramebuffer> m_framebuffer;
4223 Move<VkPipelineLayout> m_pipelineLayout;
4224 Move<VkPipeline> m_pipeline;
4225 };
4226
4227 template<typename T, std::size_t N>
4228 std::string toString (const T (&values)[N])
4229 {
4230 std::ostringstream str;
4231 for (std::size_t i = 0; i < N; ++i)
4232 str << (i != 0 ? ", " : "") << values[i];
4233 return str.str();
4234 }
4235
4236 class Support : public OperationSupport
4237 {
4238 public:
Support(const ResourceDescription& resourceDesc, const DrawCall drawCall)4239 Support (const ResourceDescription& resourceDesc, const DrawCall drawCall)
4240 : m_resourceDesc (resourceDesc)
4241 , m_drawCall (drawCall)
4242 {
4243 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE && m_resourceDesc.imageType == VK_IMAGE_TYPE_2D);
4244 DE_ASSERT(!isDepthStencilFormat(m_resourceDesc.imageFormat));
4245 }
4246
initPrograms(SourceCollections& programCollection) const4247 void initPrograms (SourceCollections& programCollection) const
4248 {
4249 // Vertex
4250 {
4251 std::ostringstream src;
4252 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
4253 << "\n"
4254 << "layout(location = 0) in vec4 v_in_position;\n"
4255 << "\n"
4256 << "out " << s_perVertexBlock << ";\n"
4257 << "\n"
4258 << "void main (void)\n"
4259 << "{\n"
4260 << " gl_Position = v_in_position;\n"
4261 << "}\n";
4262
4263 programCollection.glslSources.add("draw_vert") << glu::VertexSource(src.str());
4264 }
4265
4266 // Fragment
4267 {
4268 const VkClearValue clearValue = makeClearValue(m_resourceDesc.imageFormat);
4269 const bool isIntegerFormat = isIntFormat(m_resourceDesc.imageFormat) || isUintFormat(m_resourceDesc.imageFormat);
4270 const std::string colorType = (isIntegerFormat ? "uvec4" : "vec4");
4271
4272 std::ostringstream src;
4273 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
4274 << "\n"
4275 << "layout(location = 0) out " << colorType << " o_color;\n"
4276 << "\n"
4277 << "void main (void)\n"
4278 << "{\n"
4279 << " o_color = " << colorType << "(" << (isIntegerFormat ? toString(clearValue.color.uint32) : toString(clearValue.color.float32)) << ");\n"
4280 << "}\n";
4281
4282 programCollection.glslSources.add("draw_frag") << glu::FragmentSource(src.str());
4283 }
4284 }
4285
getInResourceUsageFlags(void) const4286 deUint32 getInResourceUsageFlags (void) const
4287 {
4288 return 0;
4289 }
4290
getOutResourceUsageFlags(void) const4291 deUint32 getOutResourceUsageFlags (void) const
4292 {
4293 return VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
4294 }
4295
getQueueFlags(const OperationContext& context) const4296 VkQueueFlags getQueueFlags (const OperationContext& context) const
4297 {
4298 DE_UNREF(context);
4299 return VK_QUEUE_GRAPHICS_BIT;
4300 }
4301
build(OperationContext& context, Resource& resource) const4302 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
4303 {
4304 return de::MovePtr<Operation>(new Implementation(context, resource, m_drawCall));
4305 }
4306
build(OperationContext&, Resource&, Resource&) const4307 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
4308 {
4309 DE_ASSERT(0);
4310 return de::MovePtr<Operation>();
4311 }
4312
4313 private:
4314 const ResourceDescription m_resourceDesc;
4315 const DrawCall m_drawCall;
4316 };
4317
4318 } // Draw ns
4319
4320 namespace ClearAttachments
4321 {
4322
4323 class Implementation : public Operation
4324 {
4325 public:
Implementation(OperationContext& context, Resource& resource)4326 Implementation (OperationContext& context, Resource& resource)
4327 : m_context (context)
4328 , m_resource (resource)
4329 , m_clearValue (makeClearValue(m_resource.getImage().format))
4330 {
4331 const DeviceInterface& vk = context.getDeviceInterface();
4332 const VkDevice device = context.getDevice();
4333
4334 const VkDeviceSize size = getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent);
4335 const VkExtent3D& extent = m_resource.getImage().extent;
4336 const VkFormat format = m_resource.getImage().format;
4337 const tcu::TextureFormat texFormat = mapVkFormat(format);
4338 const SyncInfo syncInfo = getOutSyncInfo();
4339
4340 m_data.resize(static_cast<std::size_t>(size));
4341 tcu::PixelBufferAccess imagePixels(texFormat, extent.width, extent.height, extent.depth, &m_data[0]);
4342 clearPixelBuffer(imagePixels, m_clearValue);
4343
4344 m_attachmentView = makeImageView(vk, device, m_resource.getImage().handle, getImageViewType(m_resource.getImage().imageType), m_resource.getImage().format, m_resource.getImage().subresourceRange);
4345
4346 switch (m_resource.getImage().subresourceRange.aspectMask)
4347 {
4348 case VK_IMAGE_ASPECT_COLOR_BIT:
4349 m_renderPass = makeRenderPass(vk, device, m_resource.getImage().format, VK_FORMAT_UNDEFINED, VK_ATTACHMENT_LOAD_OP_DONT_CARE, syncInfo.imageLayout);
4350 break;
4351 case VK_IMAGE_ASPECT_STENCIL_BIT:
4352 case VK_IMAGE_ASPECT_DEPTH_BIT:
4353 m_renderPass = makeRenderPass(vk, device, VK_FORMAT_UNDEFINED, m_resource.getImage().format, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, syncInfo.imageLayout);
4354 break;
4355 default:
4356 DE_ASSERT(0);
4357 break;
4358 }
4359
4360 m_frameBuffer = makeFramebuffer(vk, device, *m_renderPass, *m_attachmentView, m_resource.getImage().extent.width, m_resource.getImage().extent.height);
4361 }
4362
recordCommands(const VkCommandBuffer cmdBuffer)4363 void recordCommands (const VkCommandBuffer cmdBuffer)
4364 {
4365 const DeviceInterface& vk = m_context.getDeviceInterface();
4366 if ((m_resource.getImage().subresourceRange.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) != 0)
4367 {
4368 const VkImageMemoryBarrier imageBarrier =
4369 {
4370 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType
4371 DE_NULL, // pNext
4372 0u, // srcAccessMask
4373 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, // dstAccessMask
4374 VK_IMAGE_LAYOUT_UNDEFINED, // oldLayout
4375 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, // newLayout
4376 VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex
4377 VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex
4378 m_resource.getImage().handle, // image
4379 m_resource.getImage().subresourceRange // subresourceRange
4380 };
4381 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, 0u, 0u, DE_NULL, 0u, DE_NULL, 1u, &imageBarrier);
4382 }
4383 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_frameBuffer, makeRect2D(0 ,0, m_resource.getImage().extent.width, m_resource.getImage().extent.height), m_clearValue);
4384
4385 const VkClearAttachment clearAttachment =
4386 {
4387 m_resource.getImage().subresourceRange.aspectMask, // VkImageAspectFlags aspectMask;
4388 0, // deUint32 colorAttachment;
4389 m_clearValue // VkClearValue clearValue;
4390 };
4391
4392 const VkRect2D rect2D = makeRect2D(m_resource.getImage().extent);
4393
4394 const VkClearRect clearRect =
4395 {
4396 rect2D, // VkRect2D rect;
4397 0u, // deUint32 baseArrayLayer;
4398 m_resource.getImage().subresourceLayers.layerCount // deUint32 layerCount;
4399 };
4400
4401 vk.cmdClearAttachments(cmdBuffer, 1, &clearAttachment, 1, &clearRect);
4402
4403 endRenderPass(vk, cmdBuffer);
4404 }
4405
getInSyncInfo(void) const4406 SyncInfo getInSyncInfo (void) const
4407 {
4408 return emptySyncInfo;
4409 }
4410
getOutSyncInfo(void) const4411 SyncInfo getOutSyncInfo (void) const
4412 {
4413 SyncInfo syncInfo;
4414 syncInfo.stageMask = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR;
4415
4416 switch (m_resource.getImage().subresourceRange.aspectMask)
4417 {
4418 case VK_IMAGE_ASPECT_COLOR_BIT:
4419 syncInfo.accessMask = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR;
4420 syncInfo.imageLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
4421 break;
4422 case VK_IMAGE_ASPECT_STENCIL_BIT:
4423 case VK_IMAGE_ASPECT_DEPTH_BIT:
4424 syncInfo.accessMask = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR;
4425 syncInfo.imageLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
4426 break;
4427 default:
4428 DE_ASSERT(0);
4429 break;
4430 }
4431
4432 return syncInfo;
4433 }
4434
getData(void) const4435 Data getData (void) const
4436 {
4437 const Data data =
4438 {
4439 m_data.size(), // std::size_t size;
4440 &m_data[0], // const deUint8* data;
4441 };
4442 return data;
4443 }
4444
setData(const Data&)4445 void setData (const Data&)
4446 {
4447 DE_ASSERT(0);
4448 }
4449
4450 private:
4451 OperationContext& m_context;
4452 Resource& m_resource;
4453 std::vector<deUint8> m_data;
4454 const VkClearValue m_clearValue;
4455 Move<VkImageView> m_attachmentView;
4456 Move<VkRenderPass> m_renderPass;
4457 Move<VkFramebuffer> m_frameBuffer;
4458 };
4459
4460 class Support : public OperationSupport
4461 {
4462 public:
Support(const ResourceDescription& resourceDesc)4463 Support (const ResourceDescription& resourceDesc)
4464 : m_resourceDesc (resourceDesc)
4465 {
4466 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
4467 }
4468
getInResourceUsageFlags(void) const4469 deUint32 getInResourceUsageFlags (void) const
4470 {
4471 return 0;
4472 }
4473
getOutResourceUsageFlags(void) const4474 deUint32 getOutResourceUsageFlags (void) const
4475 {
4476 switch (m_resourceDesc.imageAspect)
4477 {
4478 case VK_IMAGE_ASPECT_COLOR_BIT:
4479 return VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
4480 case VK_IMAGE_ASPECT_STENCIL_BIT:
4481 case VK_IMAGE_ASPECT_DEPTH_BIT:
4482 return VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
4483 default:
4484 DE_ASSERT(0);
4485 }
4486 return 0u;
4487 }
4488
getQueueFlags(const OperationContext& context) const4489 VkQueueFlags getQueueFlags (const OperationContext& context) const
4490 {
4491 DE_UNREF(context);
4492 return VK_QUEUE_GRAPHICS_BIT;
4493 }
4494
build(OperationContext& context, Resource& resource) const4495 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
4496 {
4497 return de::MovePtr<Operation>(new Implementation(context, resource));
4498 }
4499
build(OperationContext&, Resource&, Resource&) const4500 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
4501 {
4502 DE_ASSERT(0);
4503 return de::MovePtr<Operation>();
4504 }
4505
4506 private:
4507 const ResourceDescription m_resourceDesc;
4508 };
4509
4510 } // ClearAttachments
4511
4512 namespace IndirectBuffer
4513 {
4514
4515 class GraphicsPipeline : public Pipeline
4516 {
4517 public:
GraphicsPipeline(OperationContext& context, const ResourceType resourceType, const VkBuffer indirectBuffer, const std::string& shaderPrefix, const VkDescriptorSetLayout descriptorSetLayout)4518 GraphicsPipeline (OperationContext& context,
4519 const ResourceType resourceType,
4520 const VkBuffer indirectBuffer,
4521 const std::string& shaderPrefix,
4522 const VkDescriptorSetLayout descriptorSetLayout)
4523 : m_resourceType (resourceType)
4524 , m_indirectBuffer (indirectBuffer)
4525 , m_vertices (context)
4526 {
4527 const DeviceInterface& vk = context.getDeviceInterface();
4528 const VkDevice device = context.getDevice();
4529 Allocator& allocator = context.getAllocator();
4530
4531 // Color attachment
4532
4533 m_colorFormat = VK_FORMAT_R8G8B8A8_UNORM;
4534 m_colorImageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
4535 m_colorImageExtent = makeExtent3D(16u, 16u, 1u);
4536 m_colorAttachmentImage = de::MovePtr<Image>(new Image(vk, device, allocator,
4537 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_colorImageExtent, m_colorFormat, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
4538 VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
4539 MemoryRequirement::Any));
4540
4541 // Pipeline
4542
4543 m_colorAttachmentView = makeImageView (vk, device, **m_colorAttachmentImage, VK_IMAGE_VIEW_TYPE_2D, m_colorFormat, m_colorImageSubresourceRange);
4544 m_renderPass = makeRenderPass (vk, device, m_colorFormat);
4545 m_framebuffer = makeFramebuffer (vk, device, *m_renderPass, *m_colorAttachmentView, m_colorImageExtent.width, m_colorImageExtent.height);
4546 m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
4547
4548 GraphicsPipelineBuilder pipelineBuilder;
4549 pipelineBuilder
4550 .setRenderSize (tcu::IVec2(m_colorImageExtent.width, m_colorImageExtent.height))
4551 .setVertexInputSingleAttribute (m_vertices.getVertexFormat(), m_vertices.getVertexStride())
4552 .setShader (vk, device, VK_SHADER_STAGE_VERTEX_BIT, context.getBinaryCollection().get(shaderPrefix + "vert"), DE_NULL)
4553 .setShader (vk, device, VK_SHADER_STAGE_FRAGMENT_BIT, context.getBinaryCollection().get(shaderPrefix + "frag"), DE_NULL);
4554
4555 m_pipeline = pipelineBuilder.build(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData(), context.getResourceInterface());
4556 }
4557
recordCommands(OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)4558 void recordCommands (OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
4559 {
4560 const DeviceInterface& vk = context.getDeviceInterface();
4561 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(context.getSynchronizationType(), vk, DE_FALSE);
4562
4563 // Change color attachment image layout
4564 {
4565 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
4566 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
4567 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
4568 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
4569 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
4570 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
4571 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout
4572 **m_colorAttachmentImage, // VkImage image
4573 m_colorImageSubresourceRange // VkImageSubresourceRange subresourceRange
4574 );
4575 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
4576 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
4577 }
4578
4579 {
4580 const VkRect2D renderArea = makeRect2D(m_colorImageExtent);
4581 const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
4582
4583 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
4584 }
4585
4586 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
4587 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1u, &descriptorSet, 0u, DE_NULL);
4588 {
4589 const VkDeviceSize vertexBufferOffset = 0ull;
4590 const VkBuffer vertexBuffer = m_vertices.getVertexBuffer();
4591 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &vertexBuffer, &vertexBufferOffset);
4592 }
4593
4594 switch (m_resourceType)
4595 {
4596 case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW:
4597 vk.cmdDrawIndirect(cmdBuffer, m_indirectBuffer, 0u, 1u, 0u);
4598 break;
4599
4600 case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW_INDEXED:
4601 vk.cmdBindIndexBuffer(cmdBuffer, m_vertices.getIndexBuffer(), 0u, m_vertices.getIndexType());
4602 vk.cmdDrawIndexedIndirect(cmdBuffer, m_indirectBuffer, 0u, 1u, 0u);
4603 break;
4604
4605 default:
4606 DE_ASSERT(0);
4607 break;
4608 }
4609 endRenderPass(vk, cmdBuffer);
4610 }
4611
4612 private:
4613 const ResourceType m_resourceType;
4614 const VkBuffer m_indirectBuffer;
4615 const VertexGrid m_vertices;
4616 VkFormat m_colorFormat;
4617 de::MovePtr<Image> m_colorAttachmentImage;
4618 Move<VkImageView> m_colorAttachmentView;
4619 VkExtent3D m_colorImageExtent;
4620 VkImageSubresourceRange m_colorImageSubresourceRange;
4621 Move<VkRenderPass> m_renderPass;
4622 Move<VkFramebuffer> m_framebuffer;
4623 Move<VkPipelineLayout> m_pipelineLayout;
4624 Move<VkPipeline> m_pipeline;
4625 };
4626
4627 class ComputePipeline : public Pipeline
4628 {
4629 public:
ComputePipeline(OperationContext& context, const VkBuffer indirectBuffer, const std::string& shaderPrefix, const VkDescriptorSetLayout descriptorSetLayout)4630 ComputePipeline (OperationContext& context,
4631 const VkBuffer indirectBuffer,
4632 const std::string& shaderPrefix,
4633 const VkDescriptorSetLayout descriptorSetLayout)
4634 : m_indirectBuffer (indirectBuffer)
4635 {
4636 const DeviceInterface& vk = context.getDeviceInterface();
4637 const VkDevice device = context.getDevice();
4638
4639 const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, context.getBinaryCollection().get(shaderPrefix + "comp"), (VkShaderModuleCreateFlags)0));
4640
4641 m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
4642 m_pipeline = makeComputePipeline(vk, device, *m_pipelineLayout, *shaderModule, DE_NULL, context.getPipelineCacheData(), context.getResourceInterface());
4643 }
4644
recordCommands(OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)4645 void recordCommands (OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
4646 {
4647 const DeviceInterface& vk = context.getDeviceInterface();
4648
4649 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipeline);
4650 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0u, 1u, &descriptorSet, 0u, DE_NULL);
4651 vk.cmdDispatchIndirect(cmdBuffer, m_indirectBuffer, 0u);
4652 }
4653
4654 private:
4655 const VkBuffer m_indirectBuffer;
4656 Move<VkPipelineLayout> m_pipelineLayout;
4657 Move<VkPipeline> m_pipeline;
4658 };
4659
4660 //! Read indirect buffer by executing an indirect draw or dispatch command.
4661 class ReadImplementation : public Operation
4662 {
4663 public:
ReadImplementation(OperationContext& context, Resource& resource)4664 ReadImplementation (OperationContext& context, Resource& resource)
4665 : m_context (context)
4666 , m_resource (resource)
4667 , m_stage (resource.getType() == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH ? VK_SHADER_STAGE_COMPUTE_BIT : VK_SHADER_STAGE_VERTEX_BIT)
4668 , m_pipelineStage (pipelineStageFlagsFromShaderStageFlagBits(m_stage))
4669 , m_hostBufferSizeBytes (sizeof(deUint32))
4670 {
4671 requireFeaturesForSSBOAccess (m_context, m_stage);
4672
4673 const DeviceInterface& vk = m_context.getDeviceInterface();
4674 const VkDevice device = m_context.getDevice();
4675 Allocator& allocator = m_context.getAllocator();
4676
4677 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
4678 vk, device, allocator, makeBufferCreateInfo(m_hostBufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible));
4679
4680 // Init host buffer data
4681 {
4682 const Allocation& alloc = m_hostBuffer->getAllocation();
4683 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_hostBufferSizeBytes));
4684 flushAlloc(vk, device, alloc);
4685 }
4686
4687 // Prepare descriptors
4688 {
4689 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
4690 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, m_stage)
4691 .build(vk, device);
4692
4693 m_descriptorPool = DescriptorPoolBuilder()
4694 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
4695 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
4696
4697 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
4698
4699 const VkDescriptorBufferInfo hostBufferInfo = makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_hostBufferSizeBytes);
4700
4701 DescriptorSetUpdateBuilder()
4702 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
4703 .update(vk, device);
4704 }
4705
4706 // Create pipeline
4707 m_pipeline = (m_resource.getType() == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH
4708 ? de::MovePtr<Pipeline>(new ComputePipeline(context, m_resource.getBuffer().handle, "read_ib_", *m_descriptorSetLayout))
4709 : de::MovePtr<Pipeline>(new GraphicsPipeline(context, m_resource.getType(), m_resource.getBuffer().handle, "read_ib_", *m_descriptorSetLayout)));
4710 }
4711
recordCommands(const VkCommandBuffer cmdBuffer)4712 void recordCommands (const VkCommandBuffer cmdBuffer)
4713 {
4714 const DeviceInterface& vk = m_context.getDeviceInterface();
4715 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
4716
4717 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
4718
4719 // Insert a barrier so data written by the shader is available to the host
4720 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
4721 m_pipelineStage, // VkPipelineStageFlags2KHR srcStageMask
4722 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
4723 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
4724 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
4725 **m_hostBuffer, // VkBuffer buffer
4726 0u, // VkDeviceSize offset
4727 m_hostBufferSizeBytes // VkDeviceSize size
4728 );
4729 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
4730 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
4731 }
4732
getInSyncInfo(void) const4733 SyncInfo getInSyncInfo (void) const
4734 {
4735 const SyncInfo syncInfo =
4736 {
4737 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, // VkPipelineStageFlags stageMask;
4738 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR, // VkAccessFlags accessMask;
4739 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
4740 };
4741 return syncInfo;
4742 }
4743
getOutSyncInfo(void) const4744 SyncInfo getOutSyncInfo (void) const
4745 {
4746 return emptySyncInfo;
4747 }
4748
getData(void) const4749 Data getData (void) const
4750 {
4751 return getHostBufferData(m_context, *m_hostBuffer, m_hostBufferSizeBytes);
4752 }
4753
setData(const Data&)4754 void setData (const Data&)
4755 {
4756 DE_ASSERT(0);
4757 }
4758
getShaderStage(void)4759 vk::VkShaderStageFlagBits getShaderStage (void) { return m_stage; }
4760
4761 private:
4762 OperationContext& m_context;
4763 Resource& m_resource;
4764 const VkShaderStageFlagBits m_stage;
4765 const VkPipelineStageFlags m_pipelineStage;
4766 const VkDeviceSize m_hostBufferSizeBytes;
4767 de::MovePtr<Buffer> m_hostBuffer;
4768 Move<VkDescriptorPool> m_descriptorPool;
4769 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
4770 Move<VkDescriptorSet> m_descriptorSet;
4771 de::MovePtr<Pipeline> m_pipeline;
4772 };
4773
4774 //! Prepare indirect buffer for a draw/dispatch call.
4775 class WriteImplementation : public Operation
4776 {
4777 public:
WriteImplementation(OperationContext& context, Resource& resource)4778 WriteImplementation (OperationContext& context, Resource& resource)
4779 : m_context (context)
4780 , m_resource (resource)
4781 {
4782 switch (m_resource.getType())
4783 {
4784 case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW:
4785 {
4786 m_drawIndirect.vertexCount = 6u;
4787 m_drawIndirect.instanceCount = 1u;
4788 m_drawIndirect.firstVertex = 0u;
4789 m_drawIndirect.firstInstance = 0u;
4790
4791 m_indirectData = reinterpret_cast<deUint32*>(&m_drawIndirect);
4792 m_expectedValue = 6u;
4793 }
4794 break;
4795
4796 case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW_INDEXED:
4797 {
4798 m_drawIndexedIndirect.indexCount = 6u;
4799 m_drawIndexedIndirect.instanceCount = 1u;
4800 m_drawIndexedIndirect.firstIndex = 0u;
4801 m_drawIndexedIndirect.vertexOffset = 0u;
4802 m_drawIndexedIndirect.firstInstance = 0u;
4803
4804 m_indirectData = reinterpret_cast<deUint32*>(&m_drawIndexedIndirect);
4805 m_expectedValue = 6u;
4806 }
4807 break;
4808
4809 case RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH:
4810 {
4811 m_dispatchIndirect.x = 7u;
4812 m_dispatchIndirect.y = 2u;
4813 m_dispatchIndirect.z = 1u;
4814
4815 m_indirectData = reinterpret_cast<deUint32*>(&m_dispatchIndirect);
4816 m_expectedValue = 14u;
4817 }
4818 break;
4819
4820 default:
4821 DE_ASSERT(0);
4822 break;
4823 }
4824 }
4825
recordCommands(const VkCommandBuffer cmdBuffer)4826 void recordCommands (const VkCommandBuffer cmdBuffer)
4827 {
4828 const DeviceInterface& vk = m_context.getDeviceInterface();
4829
4830 vk.cmdUpdateBuffer(cmdBuffer, m_resource.getBuffer().handle, m_resource.getBuffer().offset, m_resource.getBuffer().size, m_indirectData);
4831 }
4832
getInSyncInfo(void) const4833 SyncInfo getInSyncInfo (void) const
4834 {
4835 return emptySyncInfo;
4836 }
4837
getOutSyncInfo(void) const4838 SyncInfo getOutSyncInfo (void) const
4839 {
4840 const SyncInfo syncInfo =
4841 {
4842 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
4843 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
4844 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
4845 };
4846 return syncInfo;
4847 }
4848
getData(void) const4849 Data getData (void) const
4850 {
4851 const Data data =
4852 {
4853 sizeof(deUint32), // std::size_t size;
4854 reinterpret_cast<const deUint8*>(&m_expectedValue), // const deUint8* data;
4855 };
4856 return data;
4857 }
4858
setData(const Data&)4859 void setData (const Data&)
4860 {
4861 DE_ASSERT(0);
4862 }
4863
4864 private:
4865 OperationContext& m_context;
4866 Resource& m_resource;
4867 VkDrawIndirectCommand m_drawIndirect;
4868 VkDrawIndexedIndirectCommand m_drawIndexedIndirect;
4869 VkDispatchIndirectCommand m_dispatchIndirect;
4870 deUint32* m_indirectData;
4871 deUint32 m_expectedValue; //! Side-effect value expected to be computed by a read (draw/dispatch) command.
4872 };
4873
4874 class ReadSupport : public OperationSupport
4875 {
4876 public:
ReadSupport(const ResourceDescription& resourceDesc)4877 ReadSupport (const ResourceDescription& resourceDesc)
4878 : m_resourceDesc (resourceDesc)
4879 {
4880 DE_ASSERT(isIndirectBuffer(m_resourceDesc.type));
4881 }
4882
initPrograms(SourceCollections& programCollection) const4883 void initPrograms (SourceCollections& programCollection) const
4884 {
4885 std::ostringstream decl;
4886 decl << "layout(set = 0, binding = 0, std140) coherent buffer Data {\n"
4887 << " uint value;\n"
4888 << "} sb_out;\n";
4889
4890 std::ostringstream main;
4891 main << " atomicAdd(sb_out.value, 1u);\n";
4892
4893 // Vertex
4894 {
4895 std::ostringstream src;
4896 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
4897 << "\n"
4898 << "layout(location = 0) in vec4 v_in_position;\n"
4899 << "\n"
4900 << "out " << s_perVertexBlock << ";\n"
4901 << "\n"
4902 << decl.str()
4903 << "\n"
4904 << "void main (void)\n"
4905 << "{\n"
4906 << " gl_Position = v_in_position;\n"
4907 << main.str()
4908 << "}\n";
4909
4910 programCollection.glslSources.add("read_ib_vert") << glu::VertexSource(src.str());
4911 }
4912
4913 // Fragment
4914 {
4915 std::ostringstream src;
4916 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
4917 << "\n"
4918 << "layout(location = 0) out vec4 o_color;\n"
4919 << "\n"
4920 << "void main (void)\n"
4921 << "{\n"
4922 << " o_color = vec4(1.0);\n"
4923 << "}\n";
4924
4925 programCollection.glslSources.add("read_ib_frag") << glu::FragmentSource(src.str());
4926 }
4927
4928 // Compute
4929 {
4930 std::ostringstream src;
4931 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
4932 << "\n"
4933 << "layout(local_size_x = 1) in;\n"
4934 << "\n"
4935 << decl.str()
4936 << "\n"
4937 << "void main (void)\n"
4938 << "{\n"
4939 << main.str()
4940 << "}\n";
4941
4942 programCollection.glslSources.add("read_ib_comp") << glu::ComputeSource(src.str());
4943 }
4944 }
4945
getInResourceUsageFlags(void) const4946 deUint32 getInResourceUsageFlags (void) const
4947 {
4948 return VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
4949 }
4950
getOutResourceUsageFlags(void) const4951 deUint32 getOutResourceUsageFlags (void) const
4952 {
4953 return 0;
4954 }
4955
getQueueFlags(const OperationContext& context) const4956 VkQueueFlags getQueueFlags (const OperationContext& context) const
4957 {
4958 DE_UNREF(context);
4959 return (m_resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
4960 }
4961
build(OperationContext& context, Resource& resource) const4962 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
4963 {
4964 return de::MovePtr<Operation>(new ReadImplementation(context, resource));
4965 }
4966
build(OperationContext&, Resource&, Resource&) const4967 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
4968 {
4969 DE_ASSERT(0);
4970 return de::MovePtr<Operation>();
4971 }
4972
4973 private:
4974 const ResourceDescription m_resourceDesc;
4975 };
4976
4977
4978 class WriteSupport : public OperationSupport
4979 {
4980 public:
WriteSupport(const ResourceDescription& resourceDesc)4981 WriteSupport (const ResourceDescription& resourceDesc)
4982 {
4983 DE_ASSERT(isIndirectBuffer(resourceDesc.type));
4984 DE_UNREF(resourceDesc);
4985 }
4986
getInResourceUsageFlags(void) const4987 deUint32 getInResourceUsageFlags (void) const
4988 {
4989 return 0;
4990 }
4991
getOutResourceUsageFlags(void) const4992 deUint32 getOutResourceUsageFlags (void) const
4993 {
4994 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4995 }
4996
getQueueFlags(const OperationContext& context) const4997 VkQueueFlags getQueueFlags (const OperationContext& context) const
4998 {
4999 DE_UNREF(context);
5000 return VK_QUEUE_TRANSFER_BIT;
5001 }
5002
build(OperationContext& context, Resource& resource) const5003 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
5004 {
5005 return de::MovePtr<Operation>(new WriteImplementation(context, resource));
5006 }
5007
build(OperationContext&, Resource&, Resource&) const5008 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
5009 {
5010 DE_ASSERT(0);
5011 return de::MovePtr<Operation>();
5012 }
5013 };
5014
5015 } // IndirectBuffer ns
5016
5017 namespace VertexInput
5018 {
5019
5020 enum DrawMode
5021 {
5022 DRAW_MODE_VERTEX = 0,
5023 DRAW_MODE_INDEXED,
5024 };
5025
5026 class Implementation : public Operation
5027 {
5028 public:
Implementation(OperationContext& context, Resource& resource, DrawMode drawMode)5029 Implementation (OperationContext& context, Resource& resource, DrawMode drawMode)
5030 : m_context (context)
5031 , m_resource (resource)
5032 , m_drawMode (drawMode)
5033 {
5034 requireFeaturesForSSBOAccess (m_context, VK_SHADER_STAGE_VERTEX_BIT);
5035
5036 const DeviceInterface& vk = context.getDeviceInterface();
5037 const VkDevice device = context.getDevice();
5038 Allocator& allocator = context.getAllocator();
5039 VkFormat attributeFormat = VK_FORMAT_R32G32B32A32_UINT;
5040 const VkDeviceSize dataSizeBytes = m_resource.getBuffer().size;
5041
5042 // allocate ssbo that will store data used for verification
5043 {
5044 m_outputBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator,
5045 makeBufferCreateInfo(dataSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible));
5046
5047 const Allocation& alloc = m_outputBuffer->getAllocation();
5048 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(dataSizeBytes));
5049 flushAlloc(vk, device, alloc);
5050 }
5051
5052 // allocate buffer that will be used for vertex attributes when we use resource for indices
5053 if (m_drawMode == DRAW_MODE_INDEXED)
5054 {
5055 attributeFormat = VK_FORMAT_R32_UINT;
5056
5057 m_inputBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator,
5058 makeBufferCreateInfo(dataSizeBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT), MemoryRequirement::HostVisible));
5059
5060 const Allocation& alloc = m_inputBuffer->getAllocation();
5061 fillPattern(alloc.getHostPtr(), dataSizeBytes, true);
5062 flushAlloc(vk, device, alloc);
5063 }
5064
5065 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
5066 .addSingleBinding (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_VERTEX_BIT)
5067 .build (vk, device);
5068
5069 m_descriptorPool = DescriptorPoolBuilder()
5070 .addType (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
5071 .build (vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
5072
5073 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
5074
5075 const VkDescriptorBufferInfo outputBufferDescriptorInfo = makeDescriptorBufferInfo(m_outputBuffer->get(), 0ull, dataSizeBytes);
5076 DescriptorSetUpdateBuilder()
5077 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &outputBufferDescriptorInfo)
5078 .update (vk, device);
5079
5080 // Color attachment
5081 m_colorFormat = VK_FORMAT_R8G8B8A8_UNORM;
5082 m_colorImageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
5083 m_colorImageExtent = makeExtent3D(16u, 16u, 1u);
5084 m_colorAttachmentImage = de::MovePtr<Image>(new Image(vk, device, allocator,
5085 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_colorImageExtent, m_colorFormat, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
5086 VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
5087 MemoryRequirement::Any));
5088
5089 // Pipeline
5090 m_colorAttachmentView = makeImageView (vk, device, **m_colorAttachmentImage, VK_IMAGE_VIEW_TYPE_2D, m_colorFormat, m_colorImageSubresourceRange);
5091 m_renderPass = makeRenderPass (vk, device, m_colorFormat);
5092 m_framebuffer = makeFramebuffer (vk, device, *m_renderPass, *m_colorAttachmentView, m_colorImageExtent.width, m_colorImageExtent.height);
5093 m_pipelineLayout = makePipelineLayout(vk, device, *m_descriptorSetLayout);
5094
5095 m_pipeline = GraphicsPipelineBuilder()
5096 .setPrimitiveTopology (VK_PRIMITIVE_TOPOLOGY_POINT_LIST)
5097 .setRenderSize (tcu::IVec2(static_cast<int>(m_colorImageExtent.width), static_cast<int>(m_colorImageExtent.height)))
5098 .setVertexInputSingleAttribute (attributeFormat, tcu::getPixelSize(mapVkFormat(attributeFormat)))
5099 .setShader (vk, device, VK_SHADER_STAGE_VERTEX_BIT, context.getBinaryCollection().get("input_vert"), DE_NULL)
5100 .setShader (vk, device, VK_SHADER_STAGE_FRAGMENT_BIT, context.getBinaryCollection().get("input_frag"), DE_NULL)
5101 .build (vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData(), context.getResourceInterface());
5102 }
5103
recordCommands(const VkCommandBuffer cmdBuffer)5104 void recordCommands (const VkCommandBuffer cmdBuffer)
5105 {
5106 const DeviceInterface& vk = m_context.getDeviceInterface();
5107 const VkDeviceSize dataSizeBytes = m_resource.getBuffer().size;
5108 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
5109
5110 // Change color attachment image layout
5111 {
5112 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
5113 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
5114 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
5115 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
5116 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
5117 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
5118 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout
5119 **m_colorAttachmentImage, // VkImage image
5120 m_colorImageSubresourceRange // VkImageSubresourceRange subresourceRange
5121 );
5122 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
5123 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
5124 }
5125
5126 {
5127 const VkRect2D renderArea = makeRect2D(m_colorImageExtent);
5128 const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
5129
5130 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
5131 }
5132
5133 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
5134 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1u, &m_descriptorSet.get(), 0u, DE_NULL);
5135
5136 const VkDeviceSize vertexBufferOffset = 0ull;
5137 if (m_drawMode == DRAW_MODE_VERTEX)
5138 {
5139 const deUint32 count = static_cast<deUint32>(dataSizeBytes / sizeof(tcu::UVec4));
5140 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &m_resource.getBuffer().handle, &vertexBufferOffset);
5141 vk.cmdDraw(cmdBuffer, count, 1u, 0u, 0u);
5142 }
5143 else // (m_drawMode == DRAW_MODE_INDEXED)
5144 {
5145 const deUint32 count = static_cast<deUint32>(dataSizeBytes / sizeof(deUint32));
5146 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &**m_inputBuffer, &vertexBufferOffset);
5147 vk.cmdBindIndexBuffer(cmdBuffer, m_resource.getBuffer().handle, 0u, VK_INDEX_TYPE_UINT32);
5148 vk.cmdDrawIndexed(cmdBuffer, count, 1, 0, 0, 0);
5149 }
5150
5151 endRenderPass(vk, cmdBuffer);
5152
5153 // Insert a barrier so data written by the shader is available to the host
5154 {
5155 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
5156 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
5157 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
5158 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
5159 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
5160 **m_outputBuffer, // VkBuffer buffer
5161 0u, // VkDeviceSize offset
5162 m_resource.getBuffer().size // VkDeviceSize size
5163 );
5164 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
5165 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
5166 }
5167 }
5168
getInSyncInfo(void) const5169 SyncInfo getInSyncInfo (void) const
5170 {
5171 const bool usingIndexedDraw = (m_drawMode == DRAW_MODE_INDEXED);
5172 VkPipelineStageFlags2KHR stageMask = VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR;
5173 VkAccessFlags2KHR accessMask = usingIndexedDraw ? VK_ACCESS_2_INDEX_READ_BIT_KHR
5174 : VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR;
5175
5176 if (m_context.getSynchronizationType() == SynchronizationType::SYNCHRONIZATION2)
5177 {
5178 stageMask = usingIndexedDraw ? VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR
5179 : VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR;
5180 }
5181
5182 const SyncInfo syncInfo =
5183 {
5184 stageMask, // VkPipelineStageFlags stageMask;
5185 accessMask, // VkAccessFlags accessMask;
5186 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
5187 };
5188 return syncInfo;
5189 }
5190
getOutSyncInfo(void) const5191 SyncInfo getOutSyncInfo (void) const
5192 {
5193 return emptySyncInfo;
5194 }
5195
getData(void) const5196 Data getData (void) const
5197 {
5198 return getHostBufferData(m_context, *m_outputBuffer, m_resource.getBuffer().size);
5199 }
5200
setData(const Data& data)5201 void setData (const Data& data)
5202 {
5203 setHostBufferData(m_context, *m_outputBuffer, data);
5204 }
5205
5206 private:
5207 OperationContext& m_context;
5208 Resource& m_resource;
5209 DrawMode m_drawMode;
5210 de::MovePtr<Buffer> m_inputBuffer;
5211 de::MovePtr<Buffer> m_outputBuffer;
5212 Move<VkRenderPass> m_renderPass;
5213 Move<VkFramebuffer> m_framebuffer;
5214 Move<VkPipelineLayout> m_pipelineLayout;
5215 Move<VkPipeline> m_pipeline;
5216 VkFormat m_colorFormat;
5217 de::MovePtr<Image> m_colorAttachmentImage;
5218 Move<VkImageView> m_colorAttachmentView;
5219 VkExtent3D m_colorImageExtent;
5220 VkImageSubresourceRange m_colorImageSubresourceRange;
5221 Move<VkDescriptorPool> m_descriptorPool;
5222 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
5223 Move<VkDescriptorSet> m_descriptorSet;
5224 };
5225
5226 class Support : public OperationSupport
5227 {
5228 public:
Support(const ResourceDescription& resourceDesc, DrawMode drawMode)5229 Support(const ResourceDescription& resourceDesc, DrawMode drawMode)
5230 : m_resourceDesc (resourceDesc)
5231 , m_drawMode (drawMode)
5232 {
5233 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER || m_resourceDesc.type == RESOURCE_TYPE_INDEX_BUFFER);
5234 }
5235
initPrograms(SourceCollections& programCollection) const5236 void initPrograms (SourceCollections& programCollection) const
5237 {
5238 // Vertex
5239 {
5240 std::ostringstream src;
5241 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n";
5242 if (m_drawMode == DRAW_MODE_VERTEX)
5243 {
5244 src << "layout(location = 0) in uvec4 v_in_data;\n"
5245 << "layout(set = 0, binding = 0, std140) writeonly buffer Output {\n"
5246 << " uvec4 data[" << m_resourceDesc.size.x() / sizeof(tcu::UVec4) << "];\n"
5247 << "} b_out;\n"
5248 << "\n"
5249 << "void main (void)\n"
5250 << "{\n"
5251 << " b_out.data[gl_VertexIndex] = v_in_data;\n"
5252 << " gl_PointSize = 1.0f;\n"
5253 << "}\n";
5254 }
5255 else // DRAW_MODE_INDEXED
5256 {
5257 src << "layout(location = 0) in uint v_in_data;\n"
5258 << "layout(set = 0, binding = 0, std430) writeonly buffer Output {\n"
5259 << " uint data[" << m_resourceDesc.size.x() / sizeof(deUint32) << "];\n"
5260 << "} b_out;\n"
5261 << "\n"
5262 << "void main (void)\n"
5263 << "{\n"
5264 << " b_out.data[gl_VertexIndex] = v_in_data;\n"
5265 << " gl_PointSize = 1.0f;\n"
5266 << "}\n";
5267 }
5268 programCollection.glslSources.add("input_vert") << glu::VertexSource(src.str());
5269 }
5270
5271 // Fragment
5272 {
5273 std::ostringstream src;
5274 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
5275 << "\n"
5276 << "layout(location = 0) out vec4 o_color;\n"
5277 << "\n"
5278 << "void main (void)\n"
5279 << "{\n"
5280 << " o_color = vec4(1.0);\n"
5281 << "}\n";
5282 programCollection.glslSources.add("input_frag") << glu::FragmentSource(src.str());
5283 }
5284 }
5285
getInResourceUsageFlags(void) const5286 deUint32 getInResourceUsageFlags (void) const
5287 {
5288 return (m_drawMode == DRAW_MODE_VERTEX) ? VK_BUFFER_USAGE_VERTEX_BUFFER_BIT : VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
5289 }
5290
getOutResourceUsageFlags(void) const5291 deUint32 getOutResourceUsageFlags (void) const
5292 {
5293 return VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5294 }
5295
getQueueFlags(const OperationContext&) const5296 VkQueueFlags getQueueFlags (const OperationContext&) const
5297 {
5298 return VK_QUEUE_GRAPHICS_BIT;
5299 }
5300
build(OperationContext& context, Resource& resource) const5301 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
5302 {
5303 return de::MovePtr<Operation>(new Implementation(context, resource, m_drawMode));
5304 }
5305
build(OperationContext&, Resource&, Resource&) const5306 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
5307 {
5308 DE_ASSERT(0);
5309 return de::MovePtr<Operation>();
5310 }
5311
5312 private:
5313 const ResourceDescription m_resourceDesc;
5314 const DrawMode m_drawMode;
5315 };
5316
5317 } // VertexInput
5318
5319 } // anonymous ns
5320
OperationContext(Context& context, SynchronizationType syncType, PipelineCacheData& pipelineCacheData)5321 OperationContext::OperationContext (Context& context, SynchronizationType syncType, PipelineCacheData& pipelineCacheData)
5322 : m_context (context)
5323 , m_syncType (syncType)
5324 , m_vki (context.getInstanceInterface())
5325 , m_vk (context.getDeviceInterface())
5326 , m_physicalDevice (context.getPhysicalDevice())
5327 , m_device (context.getDevice())
5328 , m_allocator (context.getDefaultAllocator())
5329 , m_progCollection (context.getBinaryCollection())
5330 , m_pipelineCacheData (pipelineCacheData)
5331 {
5332 }
5333
OperationContext(Context& context, SynchronizationType syncType, const DeviceInterface& vk, const VkDevice device, vk::Allocator& allocator, PipelineCacheData& pipelineCacheData)5334 OperationContext::OperationContext (Context& context,
5335 SynchronizationType syncType,
5336 const DeviceInterface& vk,
5337 const VkDevice device,
5338 vk::Allocator& allocator,
5339 PipelineCacheData& pipelineCacheData)
5340 : m_context (context)
5341 , m_syncType (syncType)
5342 , m_vki (context.getInstanceInterface())
5343 , m_vk (vk)
5344 , m_physicalDevice (context.getPhysicalDevice())
5345 , m_device (device)
5346 , m_allocator (allocator)
5347 , m_progCollection (context.getBinaryCollection())
5348 , m_pipelineCacheData (pipelineCacheData)
5349 {
5350 }
5351
OperationContext(Context& context, SynchronizationType syncType, const vk::InstanceInterface& vki, const vk::DeviceInterface& vkd, vk::VkPhysicalDevice physicalDevice, vk::VkDevice device, vk::Allocator& allocator, vk::BinaryCollection& programCollection, PipelineCacheData& pipelineCacheData)5352 OperationContext::OperationContext (Context& context,
5353 SynchronizationType syncType,
5354 const vk::InstanceInterface& vki,
5355 const vk::DeviceInterface& vkd,
5356 vk::VkPhysicalDevice physicalDevice,
5357 vk::VkDevice device,
5358 vk::Allocator& allocator,
5359 vk::BinaryCollection& programCollection,
5360 PipelineCacheData& pipelineCacheData)
5361 : m_context (context)
5362 , m_syncType (syncType)
5363 , m_vki (vki)
5364 , m_vk (vkd)
5365 , m_physicalDevice (physicalDevice)
5366 , m_device (device)
5367 , m_allocator (allocator)
5368 , m_progCollection (programCollection)
5369 , m_pipelineCacheData (pipelineCacheData)
5370 {
5371 }
5372
Resource(OperationContext& context, const ResourceDescription& desc, const deUint32 usage, const vk::VkSharingMode sharingMode, const std::vector<deUint32>& queueFamilyIndex)5373 Resource::Resource (OperationContext& context, const ResourceDescription& desc, const deUint32 usage, const vk::VkSharingMode sharingMode, const std::vector<deUint32>& queueFamilyIndex)
5374 : m_type (desc.type)
5375 {
5376 const DeviceInterface& vk = context.getDeviceInterface();
5377 const InstanceInterface& vki = context.getInstanceInterface();
5378 const VkDevice device = context.getDevice();
5379 const VkPhysicalDevice physDevice = context.getPhysicalDevice();
5380 Allocator& allocator = context.getAllocator();
5381
5382 if (m_type == RESOURCE_TYPE_BUFFER || m_type == RESOURCE_TYPE_INDEX_BUFFER || isIndirectBuffer(m_type))
5383 {
5384 m_bufferData = de::MovePtr<BufferResource>(new BufferResource(DE_NULL, 0u, static_cast<VkDeviceSize>(desc.size.x())));
5385 VkBufferCreateInfo bufferCreateInfo = makeBufferCreateInfo(m_bufferData->size, usage);
5386 bufferCreateInfo.sharingMode = sharingMode;
5387 if (queueFamilyIndex.size() > 0)
5388 {
5389 bufferCreateInfo.queueFamilyIndexCount = static_cast<deUint32>(queueFamilyIndex.size());
5390 bufferCreateInfo.pQueueFamilyIndices = &queueFamilyIndex[0];
5391 }
5392 m_buffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator, bufferCreateInfo, MemoryRequirement::Any));
5393 m_bufferData->handle = **m_buffer;
5394 }
5395 else if (m_type == RESOURCE_TYPE_IMAGE)
5396 {
5397 m_imageData = de::MovePtr<ImageResource>(new ImageResource(
5398 DE_NULL,
5399 makeExtent3D(desc.size.x(), std::max(1, desc.size.y()), std::max(1, desc.size.z())),
5400 desc.imageType,
5401 desc.imageFormat,
5402 makeImageSubresourceRange(desc.imageAspect, 0u, 1u, 0u, 1u),
5403 makeImageSubresourceLayers(desc.imageAspect, 0u, 0u, 1u),
5404 vk::VK_IMAGE_TILING_OPTIMAL
5405 ));
5406 VkImageCreateInfo imageInfo = makeImageCreateInfo(m_imageData->imageType, m_imageData->extent, m_imageData->format, usage, desc.imageSamples, m_imageData->tiling);
5407 imageInfo.sharingMode = sharingMode;
5408 if (queueFamilyIndex.size() > 0)
5409 {
5410 imageInfo.queueFamilyIndexCount = static_cast<deUint32>(queueFamilyIndex.size());
5411 imageInfo.pQueueFamilyIndices = &queueFamilyIndex[0];
5412 }
5413
5414 VkImageFormatProperties imageFormatProperties;
5415 const VkResult formatResult = vki.getPhysicalDeviceImageFormatProperties(physDevice, imageInfo.format, imageInfo.imageType, imageInfo.tiling, imageInfo.usage, imageInfo.flags, &imageFormatProperties);
5416
5417 if (formatResult != VK_SUCCESS)
5418 TCU_THROW(NotSupportedError, "Image format is not supported");
5419
5420 if ((imageFormatProperties.sampleCounts & desc.imageSamples) != desc.imageSamples)
5421 TCU_THROW(NotSupportedError, "Requested sample count is not supported");
5422
5423 m_image = de::MovePtr<Image>(new Image(vk, device, allocator, imageInfo, MemoryRequirement::Any));
5424 m_imageData->handle = **m_image;
5425 }
5426 else
5427 DE_ASSERT(0);
5428 }
5429
Resource(ResourceType type, vk::Move<vk::VkBuffer> buffer, de::MovePtr<vk::Allocation> allocation, vk::VkDeviceSize offset, vk::VkDeviceSize size)5430 Resource::Resource (ResourceType type,
5431 vk::Move<vk::VkBuffer> buffer,
5432 de::MovePtr<vk::Allocation> allocation,
5433 vk::VkDeviceSize offset,
5434 vk::VkDeviceSize size)
5435 : m_type (type)
5436 , m_buffer (new Buffer(buffer, allocation))
5437 , m_bufferData (de::MovePtr<BufferResource>(new BufferResource(m_buffer->get(), offset, size)))
5438 {
5439 DE_ASSERT(type != RESOURCE_TYPE_IMAGE);
5440 }
5441
Resource(vk::Move<vk::VkImage> image, de::MovePtr<vk::Allocation> allocation, const vk::VkExtent3D& extent, vk::VkImageType imageType, vk::VkFormat format, vk::VkImageSubresourceRange subresourceRange, vk::VkImageSubresourceLayers subresourceLayers, vk::VkImageTiling tiling)5442 Resource::Resource (vk::Move<vk::VkImage> image,
5443 de::MovePtr<vk::Allocation> allocation,
5444 const vk::VkExtent3D& extent,
5445 vk::VkImageType imageType,
5446 vk::VkFormat format,
5447 vk::VkImageSubresourceRange subresourceRange,
5448 vk::VkImageSubresourceLayers subresourceLayers,
5449 vk::VkImageTiling tiling)
5450 : m_type (RESOURCE_TYPE_IMAGE)
5451 , m_image (new Image(image, allocation))
5452 , m_imageData (de::MovePtr<ImageResource>(new ImageResource(m_image->get(), extent, imageType, format, subresourceRange, subresourceLayers, tiling)))
5453 {
5454 }
5455
getMemory(void) const5456 vk::VkDeviceMemory Resource::getMemory (void) const
5457 {
5458 if (m_type == RESOURCE_TYPE_IMAGE)
5459 return m_image->getAllocation().getMemory();
5460 else
5461 return m_buffer->getAllocation().getMemory();
5462 }
5463
5464 //! \note This function exists for performance reasons. We're creating a lot of tests and checking requirements here
5465 //! before creating an OperationSupport object is faster.
isResourceSupported(const OperationName opName, const ResourceDescription& resourceDesc)5466 bool isResourceSupported (const OperationName opName, const ResourceDescription& resourceDesc)
5467 {
5468 switch (opName)
5469 {
5470 case OPERATION_NAME_WRITE_FILL_BUFFER:
5471 case OPERATION_NAME_WRITE_COPY_BUFFER:
5472 case OPERATION_NAME_WRITE_COPY_IMAGE_TO_BUFFER:
5473 case OPERATION_NAME_WRITE_SSBO_VERTEX:
5474 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL:
5475 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION:
5476 case OPERATION_NAME_WRITE_SSBO_GEOMETRY:
5477 case OPERATION_NAME_WRITE_SSBO_FRAGMENT:
5478 case OPERATION_NAME_WRITE_SSBO_COMPUTE:
5479 case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT:
5480 case OPERATION_NAME_READ_COPY_BUFFER:
5481 case OPERATION_NAME_READ_COPY_BUFFER_TO_IMAGE:
5482 case OPERATION_NAME_READ_SSBO_VERTEX:
5483 case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL:
5484 case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION:
5485 case OPERATION_NAME_READ_SSBO_GEOMETRY:
5486 case OPERATION_NAME_READ_SSBO_FRAGMENT:
5487 case OPERATION_NAME_READ_SSBO_COMPUTE:
5488 case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT:
5489 case OPERATION_NAME_READ_VERTEX_INPUT:
5490 return resourceDesc.type == RESOURCE_TYPE_BUFFER;
5491
5492 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW:
5493 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW:
5494 return resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DRAW;
5495
5496 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW_INDEXED:
5497 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW_INDEXED:
5498 return resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DRAW_INDEXED;
5499
5500 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DISPATCH:
5501 case OPERATION_NAME_READ_INDIRECT_BUFFER_DISPATCH:
5502 return resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH;
5503
5504 case OPERATION_NAME_WRITE_UPDATE_INDEX_BUFFER:
5505 case OPERATION_NAME_READ_INDEX_INPUT:
5506 return resourceDesc.type == RESOURCE_TYPE_INDEX_BUFFER;
5507
5508 case OPERATION_NAME_WRITE_UPDATE_BUFFER:
5509 return resourceDesc.type == RESOURCE_TYPE_BUFFER && resourceDesc.size.x() <= MAX_UPDATE_BUFFER_SIZE;
5510
5511 case OPERATION_NAME_WRITE_COPY_IMAGE:
5512 case OPERATION_NAME_WRITE_COPY_BUFFER_TO_IMAGE:
5513 case OPERATION_NAME_READ_COPY_IMAGE:
5514 case OPERATION_NAME_READ_COPY_IMAGE_TO_BUFFER:
5515 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5516
5517 case OPERATION_NAME_WRITE_CLEAR_ATTACHMENTS:
5518 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageType != VK_IMAGE_TYPE_3D
5519 && resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5520
5521 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_MULTISAMPLE:
5522 case OPERATION_NAME_READ_RESOLVE_IMAGE:
5523 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT
5524 && resourceDesc.imageSamples != VK_SAMPLE_COUNT_1_BIT;
5525
5526 case OPERATION_NAME_WRITE_BLIT_IMAGE:
5527 case OPERATION_NAME_READ_BLIT_IMAGE:
5528 case OPERATION_NAME_WRITE_IMAGE_VERTEX:
5529 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL:
5530 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION:
5531 case OPERATION_NAME_WRITE_IMAGE_GEOMETRY:
5532 case OPERATION_NAME_WRITE_IMAGE_FRAGMENT:
5533 case OPERATION_NAME_WRITE_IMAGE_COMPUTE:
5534 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT:
5535 case OPERATION_NAME_READ_IMAGE_VERTEX:
5536 case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL:
5537 case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION:
5538 case OPERATION_NAME_READ_IMAGE_GEOMETRY:
5539 case OPERATION_NAME_READ_IMAGE_FRAGMENT:
5540 case OPERATION_NAME_READ_IMAGE_COMPUTE:
5541 case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT:
5542 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT
5543 && resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5544
5545 case OPERATION_NAME_READ_UBO_VERTEX:
5546 case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL:
5547 case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION:
5548 case OPERATION_NAME_READ_UBO_GEOMETRY:
5549 case OPERATION_NAME_READ_UBO_FRAGMENT:
5550 case OPERATION_NAME_READ_UBO_COMPUTE:
5551 case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT:
5552 case OPERATION_NAME_READ_UBO_TEXEL_VERTEX:
5553 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_CONTROL:
5554 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_EVALUATION:
5555 case OPERATION_NAME_READ_UBO_TEXEL_GEOMETRY:
5556 case OPERATION_NAME_READ_UBO_TEXEL_FRAGMENT:
5557 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE:
5558 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE_INDIRECT:
5559 return resourceDesc.type == RESOURCE_TYPE_BUFFER && resourceDesc.size.x() <= MAX_UBO_RANGE;
5560
5561 case OPERATION_NAME_WRITE_CLEAR_COLOR_IMAGE:
5562 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT
5563 && resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5564
5565 case OPERATION_NAME_WRITE_CLEAR_DEPTH_STENCIL_IMAGE:
5566 return resourceDesc.type == RESOURCE_TYPE_IMAGE && (resourceDesc.imageAspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT))
5567 && resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5568
5569 case OPERATION_NAME_WRITE_DRAW:
5570 case OPERATION_NAME_WRITE_DRAW_INDEXED:
5571 case OPERATION_NAME_WRITE_DRAW_INDIRECT:
5572 case OPERATION_NAME_WRITE_DRAW_INDEXED_INDIRECT:
5573 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageType == VK_IMAGE_TYPE_2D
5574 && (resourceDesc.imageAspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) == 0
5575 && resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5576
5577 case OPERATION_NAME_COPY_BUFFER:
5578 case OPERATION_NAME_COPY_SSBO_VERTEX:
5579 case OPERATION_NAME_COPY_SSBO_TESSELLATION_CONTROL:
5580 case OPERATION_NAME_COPY_SSBO_TESSELLATION_EVALUATION:
5581 case OPERATION_NAME_COPY_SSBO_GEOMETRY:
5582 case OPERATION_NAME_COPY_SSBO_FRAGMENT:
5583 case OPERATION_NAME_COPY_SSBO_COMPUTE:
5584 case OPERATION_NAME_COPY_SSBO_COMPUTE_INDIRECT:
5585 return resourceDesc.type == RESOURCE_TYPE_BUFFER;
5586
5587 case OPERATION_NAME_COPY_IMAGE:
5588 case OPERATION_NAME_BLIT_IMAGE:
5589 case OPERATION_NAME_COPY_IMAGE_VERTEX:
5590 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_CONTROL:
5591 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_EVALUATION:
5592 case OPERATION_NAME_COPY_IMAGE_GEOMETRY:
5593 case OPERATION_NAME_COPY_IMAGE_FRAGMENT:
5594 case OPERATION_NAME_COPY_IMAGE_COMPUTE:
5595 case OPERATION_NAME_COPY_IMAGE_COMPUTE_INDIRECT:
5596 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT
5597 && resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5598
5599 default:
5600 DE_ASSERT(0);
5601 return false;
5602 }
5603 }
5604
getOperationName(const OperationName opName)5605 std::string getOperationName (const OperationName opName)
5606 {
5607 switch (opName)
5608 {
5609 case OPERATION_NAME_WRITE_FILL_BUFFER: return "write_fill_buffer";
5610 case OPERATION_NAME_WRITE_UPDATE_BUFFER: return "write_update_buffer";
5611 case OPERATION_NAME_WRITE_COPY_BUFFER: return "write_copy_buffer";
5612 case OPERATION_NAME_WRITE_COPY_BUFFER_TO_IMAGE: return "write_copy_buffer_to_image";
5613 case OPERATION_NAME_WRITE_COPY_IMAGE_TO_BUFFER: return "write_copy_image_to_buffer";
5614 case OPERATION_NAME_WRITE_COPY_IMAGE: return "write_copy_image";
5615 case OPERATION_NAME_WRITE_BLIT_IMAGE: return "write_blit_image";
5616 case OPERATION_NAME_WRITE_SSBO_VERTEX: return "write_ssbo_vertex";
5617 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL: return "write_ssbo_tess_control";
5618 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION: return "write_ssbo_tess_eval";
5619 case OPERATION_NAME_WRITE_SSBO_GEOMETRY: return "write_ssbo_geometry";
5620 case OPERATION_NAME_WRITE_SSBO_FRAGMENT: return "write_ssbo_fragment";
5621 case OPERATION_NAME_WRITE_SSBO_COMPUTE: return "write_ssbo_compute";
5622 case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT: return "write_ssbo_compute_indirect";
5623 case OPERATION_NAME_WRITE_IMAGE_VERTEX: return "write_image_vertex";
5624 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL: return "write_image_tess_control";
5625 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION: return "write_image_tess_eval";
5626 case OPERATION_NAME_WRITE_IMAGE_GEOMETRY: return "write_image_geometry";
5627 case OPERATION_NAME_WRITE_IMAGE_FRAGMENT: return "write_image_fragment";
5628 case OPERATION_NAME_WRITE_IMAGE_COMPUTE: return "write_image_compute";
5629 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_MULTISAMPLE: return "write_image_compute_multisample";
5630 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT: return "write_image_compute_indirect";
5631 case OPERATION_NAME_WRITE_CLEAR_COLOR_IMAGE: return "write_clear_color_image";
5632 case OPERATION_NAME_WRITE_CLEAR_DEPTH_STENCIL_IMAGE: return "write_clear_depth_stencil_image";
5633 case OPERATION_NAME_WRITE_DRAW: return "write_draw";
5634 case OPERATION_NAME_WRITE_DRAW_INDEXED: return "write_draw_indexed";
5635 case OPERATION_NAME_WRITE_DRAW_INDIRECT: return "write_draw_indirect";
5636 case OPERATION_NAME_WRITE_DRAW_INDEXED_INDIRECT: return "write_draw_indexed_indirect";
5637 case OPERATION_NAME_WRITE_CLEAR_ATTACHMENTS: return "write_clear_attachments";
5638 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW: return "write_indirect_buffer_draw";
5639 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW_INDEXED: return "write_indirect_buffer_draw_indexed";
5640 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DISPATCH: return "write_indirect_buffer_dispatch";
5641 case OPERATION_NAME_WRITE_UPDATE_INDEX_BUFFER: return "write_update_index_buffer";
5642
5643 case OPERATION_NAME_READ_COPY_BUFFER: return "read_copy_buffer";
5644 case OPERATION_NAME_READ_COPY_BUFFER_TO_IMAGE: return "read_copy_buffer_to_image";
5645 case OPERATION_NAME_READ_COPY_IMAGE_TO_BUFFER: return "read_copy_image_to_buffer";
5646 case OPERATION_NAME_READ_COPY_IMAGE: return "read_copy_image";
5647 case OPERATION_NAME_READ_BLIT_IMAGE: return "read_blit_image";
5648 case OPERATION_NAME_READ_RESOLVE_IMAGE: return "read_resolve_image";
5649 case OPERATION_NAME_READ_UBO_VERTEX: return "read_ubo_vertex";
5650 case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL: return "read_ubo_tess_control";
5651 case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION: return "read_ubo_tess_eval";
5652 case OPERATION_NAME_READ_UBO_GEOMETRY: return "read_ubo_geometry";
5653 case OPERATION_NAME_READ_UBO_FRAGMENT: return "read_ubo_fragment";
5654 case OPERATION_NAME_READ_UBO_COMPUTE: return "read_ubo_compute";
5655 case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT: return "read_ubo_compute_indirect";
5656 case OPERATION_NAME_READ_UBO_TEXEL_VERTEX: return "read_ubo_texel_vertex";
5657 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_CONTROL: return "read_ubo_texel_tess_control";
5658 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_EVALUATION: return "read_ubo_texel_tess_eval";
5659 case OPERATION_NAME_READ_UBO_TEXEL_GEOMETRY: return "read_ubo_texel_geometry";
5660 case OPERATION_NAME_READ_UBO_TEXEL_FRAGMENT: return "read_ubo_texel_fragment";
5661 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE: return "read_ubo_texel_compute";
5662 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE_INDIRECT: return "read_ubo_texel_compute_indirect";
5663 case OPERATION_NAME_READ_SSBO_VERTEX: return "read_ssbo_vertex";
5664 case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL: return "read_ssbo_tess_control";
5665 case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION: return "read_ssbo_tess_eval";
5666 case OPERATION_NAME_READ_SSBO_GEOMETRY: return "read_ssbo_geometry";
5667 case OPERATION_NAME_READ_SSBO_FRAGMENT: return "read_ssbo_fragment";
5668 case OPERATION_NAME_READ_SSBO_COMPUTE: return "read_ssbo_compute";
5669 case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT: return "read_ssbo_compute_indirect";
5670 case OPERATION_NAME_READ_IMAGE_VERTEX: return "read_image_vertex";
5671 case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL: return "read_image_tess_control";
5672 case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION: return "read_image_tess_eval";
5673 case OPERATION_NAME_READ_IMAGE_GEOMETRY: return "read_image_geometry";
5674 case OPERATION_NAME_READ_IMAGE_FRAGMENT: return "read_image_fragment";
5675 case OPERATION_NAME_READ_IMAGE_COMPUTE: return "read_image_compute";
5676 case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT: return "read_image_compute_indirect";
5677 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW: return "read_indirect_buffer_draw";
5678 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW_INDEXED: return "read_indirect_buffer_draw_indexed";
5679 case OPERATION_NAME_READ_INDIRECT_BUFFER_DISPATCH: return "read_indirect_buffer_dispatch";
5680 case OPERATION_NAME_READ_VERTEX_INPUT: return "read_vertex_input";
5681 case OPERATION_NAME_READ_INDEX_INPUT: return "read_index_input";
5682
5683 case OPERATION_NAME_COPY_BUFFER: return "copy_buffer";
5684 case OPERATION_NAME_COPY_IMAGE: return "copy_image";
5685 case OPERATION_NAME_BLIT_IMAGE: return "blit_image";
5686 case OPERATION_NAME_COPY_SSBO_VERTEX: return "copy_buffer_vertex";
5687 case OPERATION_NAME_COPY_SSBO_TESSELLATION_CONTROL: return "copy_ssbo_tess_control";
5688 case OPERATION_NAME_COPY_SSBO_TESSELLATION_EVALUATION: return "copy_ssbo_tess_eval";
5689 case OPERATION_NAME_COPY_SSBO_GEOMETRY: return "copy_ssbo_geometry";
5690 case OPERATION_NAME_COPY_SSBO_FRAGMENT: return "copy_ssbo_fragment";
5691 case OPERATION_NAME_COPY_SSBO_COMPUTE: return "copy_ssbo_compute";
5692 case OPERATION_NAME_COPY_SSBO_COMPUTE_INDIRECT: return "copy_ssbo_compute_indirect";
5693 case OPERATION_NAME_COPY_IMAGE_VERTEX: return "copy_image_vertex";
5694 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_CONTROL: return "copy_image_tess_control";
5695 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_EVALUATION: return "copy_image_tess_eval";
5696 case OPERATION_NAME_COPY_IMAGE_GEOMETRY: return "copy_image_geometry";
5697 case OPERATION_NAME_COPY_IMAGE_FRAGMENT: return "copy_image_fragment";
5698 case OPERATION_NAME_COPY_IMAGE_COMPUTE: return "copy_image_compute";
5699 case OPERATION_NAME_COPY_IMAGE_COMPUTE_INDIRECT: return "copy_image_compute_indirect";
5700 default:
5701 DE_ASSERT(0);
5702 return "";
5703 }
5704 }
5705
isSpecializedAccessFlagSupported(const OperationName opName)5706 bool isSpecializedAccessFlagSupported (const OperationName opName)
5707 {
5708 switch (opName)
5709 {
5710 case OPERATION_NAME_WRITE_SSBO_VERTEX:
5711 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL:
5712 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION:
5713 case OPERATION_NAME_WRITE_SSBO_GEOMETRY:
5714 case OPERATION_NAME_WRITE_SSBO_FRAGMENT:
5715 case OPERATION_NAME_WRITE_SSBO_COMPUTE:
5716 case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT:
5717 case OPERATION_NAME_WRITE_IMAGE_VERTEX:
5718 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL:
5719 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION:
5720 case OPERATION_NAME_WRITE_IMAGE_GEOMETRY:
5721 case OPERATION_NAME_WRITE_IMAGE_FRAGMENT:
5722 case OPERATION_NAME_WRITE_IMAGE_COMPUTE:
5723 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT:
5724 case OPERATION_NAME_READ_UBO_VERTEX:
5725 case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL:
5726 case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION:
5727 case OPERATION_NAME_READ_UBO_GEOMETRY:
5728 case OPERATION_NAME_READ_UBO_FRAGMENT:
5729 case OPERATION_NAME_READ_UBO_COMPUTE:
5730 case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT:
5731 case OPERATION_NAME_READ_UBO_TEXEL_VERTEX:
5732 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_CONTROL:
5733 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_EVALUATION:
5734 case OPERATION_NAME_READ_UBO_TEXEL_GEOMETRY:
5735 case OPERATION_NAME_READ_UBO_TEXEL_FRAGMENT:
5736 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE:
5737 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE_INDIRECT:
5738 case OPERATION_NAME_READ_SSBO_VERTEX:
5739 case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL:
5740 case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION:
5741 case OPERATION_NAME_READ_SSBO_GEOMETRY:
5742 case OPERATION_NAME_READ_SSBO_FRAGMENT:
5743 case OPERATION_NAME_READ_SSBO_COMPUTE:
5744 case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT:
5745 case OPERATION_NAME_READ_IMAGE_VERTEX:
5746 case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL:
5747 case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION:
5748 case OPERATION_NAME_READ_IMAGE_GEOMETRY:
5749 case OPERATION_NAME_READ_IMAGE_FRAGMENT:
5750 case OPERATION_NAME_READ_IMAGE_COMPUTE:
5751 case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT:
5752 case OPERATION_NAME_COPY_SSBO_VERTEX:
5753 case OPERATION_NAME_COPY_SSBO_TESSELLATION_CONTROL:
5754 case OPERATION_NAME_COPY_SSBO_GEOMETRY:
5755 case OPERATION_NAME_COPY_SSBO_FRAGMENT:
5756 case OPERATION_NAME_COPY_SSBO_COMPUTE:
5757 case OPERATION_NAME_COPY_SSBO_COMPUTE_INDIRECT:
5758 case OPERATION_NAME_COPY_IMAGE_VERTEX:
5759 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_CONTROL:
5760 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_EVALUATION:
5761 case OPERATION_NAME_COPY_IMAGE_GEOMETRY:
5762 case OPERATION_NAME_COPY_IMAGE_FRAGMENT:
5763 case OPERATION_NAME_COPY_IMAGE_COMPUTE:
5764 case OPERATION_NAME_COPY_IMAGE_COMPUTE_INDIRECT:
5765 return true;
5766 default:
5767 return false;
5768
5769 }
5770 }
makeOperationSupport(const OperationName opName, const ResourceDescription& resourceDesc, const bool specializedAccess)5771 de::MovePtr<OperationSupport> makeOperationSupport (const OperationName opName, const ResourceDescription& resourceDesc, const bool specializedAccess)
5772 {
5773 switch (opName)
5774 {
5775 case OPERATION_NAME_WRITE_FILL_BUFFER: return de::MovePtr<OperationSupport>(new FillUpdateBuffer ::Support (resourceDesc, FillUpdateBuffer::BUFFER_OP_FILL));
5776 case OPERATION_NAME_WRITE_UPDATE_BUFFER: return de::MovePtr<OperationSupport>(new FillUpdateBuffer ::Support (resourceDesc, FillUpdateBuffer::BUFFER_OP_UPDATE));
5777 case OPERATION_NAME_WRITE_COPY_BUFFER: return de::MovePtr<OperationSupport>(new CopyBuffer ::Support (resourceDesc, ACCESS_MODE_WRITE));
5778 case OPERATION_NAME_WRITE_COPY_BUFFER_TO_IMAGE: return de::MovePtr<OperationSupport>(new CopyBufferToImage ::Support (resourceDesc, ACCESS_MODE_WRITE));
5779 case OPERATION_NAME_WRITE_COPY_IMAGE_TO_BUFFER: return de::MovePtr<OperationSupport>(new CopyImageToBuffer ::Support (resourceDesc, ACCESS_MODE_WRITE));
5780 case OPERATION_NAME_WRITE_COPY_IMAGE: return de::MovePtr<OperationSupport>(new CopyBlitResolveImage ::Support (resourceDesc, CopyBlitResolveImage::TYPE_COPY, ACCESS_MODE_WRITE));
5781 case OPERATION_NAME_WRITE_BLIT_IMAGE: return de::MovePtr<OperationSupport>(new CopyBlitResolveImage ::Support (resourceDesc, CopyBlitResolveImage::TYPE_BLIT, ACCESS_MODE_WRITE));
5782 case OPERATION_NAME_WRITE_SSBO_VERTEX: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
5783 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
5784 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
5785 case OPERATION_NAME_WRITE_SSBO_GEOMETRY: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
5786 case OPERATION_NAME_WRITE_SSBO_FRAGMENT: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
5787 case OPERATION_NAME_WRITE_SSBO_COMPUTE: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
5788 case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
5789 case OPERATION_NAME_WRITE_IMAGE_VERTEX: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
5790 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
5791 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
5792 case OPERATION_NAME_WRITE_IMAGE_GEOMETRY: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
5793 case OPERATION_NAME_WRITE_IMAGE_FRAGMENT: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
5794 case OPERATION_NAME_WRITE_IMAGE_COMPUTE: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
5795 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
5796 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_MULTISAMPLE: return de::MovePtr<OperationSupport>(new ShaderAccess ::MSImageSupport(resourceDesc));
5797 case OPERATION_NAME_WRITE_CLEAR_COLOR_IMAGE: return de::MovePtr<OperationSupport>(new ClearImage ::Support (resourceDesc, ClearImage::CLEAR_MODE_COLOR));
5798 case OPERATION_NAME_WRITE_CLEAR_DEPTH_STENCIL_IMAGE: return de::MovePtr<OperationSupport>(new ClearImage ::Support (resourceDesc, ClearImage::CLEAR_MODE_DEPTH_STENCIL));
5799 case OPERATION_NAME_WRITE_DRAW: return de::MovePtr<OperationSupport>(new Draw ::Support (resourceDesc, Draw::DRAW_CALL_DRAW));
5800 case OPERATION_NAME_WRITE_DRAW_INDEXED: return de::MovePtr<OperationSupport>(new Draw ::Support (resourceDesc, Draw::DRAW_CALL_DRAW_INDEXED));
5801 case OPERATION_NAME_WRITE_DRAW_INDIRECT: return de::MovePtr<OperationSupport>(new Draw ::Support (resourceDesc, Draw::DRAW_CALL_DRAW_INDIRECT));
5802 case OPERATION_NAME_WRITE_DRAW_INDEXED_INDIRECT: return de::MovePtr<OperationSupport>(new Draw ::Support (resourceDesc, Draw::DRAW_CALL_DRAW_INDEXED_INDIRECT));
5803 case OPERATION_NAME_WRITE_CLEAR_ATTACHMENTS: return de::MovePtr<OperationSupport>(new ClearAttachments ::Support (resourceDesc));
5804 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW: return de::MovePtr<OperationSupport>(new IndirectBuffer ::WriteSupport (resourceDesc));
5805 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW_INDEXED: return de::MovePtr<OperationSupport>(new IndirectBuffer ::WriteSupport (resourceDesc));
5806 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DISPATCH: return de::MovePtr<OperationSupport>(new IndirectBuffer ::WriteSupport (resourceDesc));
5807 case OPERATION_NAME_WRITE_UPDATE_INDEX_BUFFER: return de::MovePtr<OperationSupport>(new FillUpdateBuffer ::Support (resourceDesc, FillUpdateBuffer::BUFFER_OP_UPDATE_WITH_INDEX_PATTERN));
5808
5809 case OPERATION_NAME_READ_COPY_BUFFER: return de::MovePtr<OperationSupport>(new CopyBuffer ::Support (resourceDesc, ACCESS_MODE_READ));
5810 case OPERATION_NAME_READ_COPY_BUFFER_TO_IMAGE: return de::MovePtr<OperationSupport>(new CopyBufferToImage ::Support (resourceDesc, ACCESS_MODE_READ));
5811 case OPERATION_NAME_READ_COPY_IMAGE_TO_BUFFER: return de::MovePtr<OperationSupport>(new CopyImageToBuffer ::Support (resourceDesc, ACCESS_MODE_READ));
5812 case OPERATION_NAME_READ_COPY_IMAGE: return de::MovePtr<OperationSupport>(new CopyBlitResolveImage::Support (resourceDesc, CopyBlitResolveImage::TYPE_COPY, ACCESS_MODE_READ));
5813 case OPERATION_NAME_READ_BLIT_IMAGE: return de::MovePtr<OperationSupport>(new CopyBlitResolveImage::Support (resourceDesc, CopyBlitResolveImage::TYPE_BLIT, ACCESS_MODE_READ));
5814 case OPERATION_NAME_READ_RESOLVE_IMAGE: return de::MovePtr<OperationSupport>(new CopyBlitResolveImage::Support (resourceDesc, CopyBlitResolveImage::TYPE_RESOLVE, ACCESS_MODE_READ));
5815 case OPERATION_NAME_READ_UBO_VERTEX: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
5816 case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
5817 case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
5818 case OPERATION_NAME_READ_UBO_GEOMETRY: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
5819 case OPERATION_NAME_READ_UBO_FRAGMENT: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
5820 case OPERATION_NAME_READ_UBO_COMPUTE: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
5821 case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
5822 case OPERATION_NAME_READ_UBO_TEXEL_VERTEX: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
5823 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_CONTROL: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
5824 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_EVALUATION: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
5825 case OPERATION_NAME_READ_UBO_TEXEL_GEOMETRY: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
5826 case OPERATION_NAME_READ_UBO_TEXEL_FRAGMENT: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
5827 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
5828 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE_INDIRECT: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
5829 case OPERATION_NAME_READ_SSBO_VERTEX: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
5830 case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
5831 case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
5832 case OPERATION_NAME_READ_SSBO_GEOMETRY: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
5833 case OPERATION_NAME_READ_SSBO_FRAGMENT: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
5834 case OPERATION_NAME_READ_SSBO_COMPUTE: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
5835 case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
5836 case OPERATION_NAME_READ_IMAGE_VERTEX: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
5837 case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
5838 case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
5839 case OPERATION_NAME_READ_IMAGE_GEOMETRY: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
5840 case OPERATION_NAME_READ_IMAGE_FRAGMENT: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
5841 case OPERATION_NAME_READ_IMAGE_COMPUTE: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
5842 case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
5843 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW: return de::MovePtr<OperationSupport>(new IndirectBuffer ::ReadSupport (resourceDesc));
5844 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW_INDEXED: return de::MovePtr<OperationSupport>(new IndirectBuffer ::ReadSupport (resourceDesc));
5845 case OPERATION_NAME_READ_INDIRECT_BUFFER_DISPATCH: return de::MovePtr<OperationSupport>(new IndirectBuffer ::ReadSupport (resourceDesc));
5846 case OPERATION_NAME_READ_VERTEX_INPUT: return de::MovePtr<OperationSupport>(new VertexInput ::Support (resourceDesc, VertexInput::DRAW_MODE_VERTEX));
5847 case OPERATION_NAME_READ_INDEX_INPUT: return de::MovePtr<OperationSupport>(new VertexInput ::Support (resourceDesc, VertexInput::DRAW_MODE_INDEXED));
5848
5849 case OPERATION_NAME_COPY_BUFFER: return de::MovePtr<OperationSupport>(new CopyBuffer ::CopySupport (resourceDesc));
5850 case OPERATION_NAME_COPY_IMAGE: return de::MovePtr<OperationSupport>(new CopyBlitResolveImage::CopySupport (resourceDesc, CopyBlitResolveImage::TYPE_COPY));
5851 case OPERATION_NAME_BLIT_IMAGE: return de::MovePtr<OperationSupport>(new CopyBlitResolveImage::CopySupport (resourceDesc, CopyBlitResolveImage::TYPE_BLIT));
5852 case OPERATION_NAME_COPY_SSBO_VERTEX: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
5853 case OPERATION_NAME_COPY_SSBO_TESSELLATION_CONTROL: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
5854 case OPERATION_NAME_COPY_SSBO_TESSELLATION_EVALUATION: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
5855 case OPERATION_NAME_COPY_SSBO_GEOMETRY: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
5856 case OPERATION_NAME_COPY_SSBO_FRAGMENT: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
5857 case OPERATION_NAME_COPY_SSBO_COMPUTE: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
5858 case OPERATION_NAME_COPY_SSBO_COMPUTE_INDIRECT: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
5859 case OPERATION_NAME_COPY_IMAGE_VERTEX: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport (resourceDesc, VK_SHADER_STAGE_VERTEX_BIT, specializedAccess));
5860 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_CONTROL: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport (resourceDesc, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, specializedAccess));
5861 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_EVALUATION: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport (resourceDesc, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, specializedAccess));
5862 case OPERATION_NAME_COPY_IMAGE_GEOMETRY: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport (resourceDesc, VK_SHADER_STAGE_GEOMETRY_BIT, specializedAccess));
5863 case OPERATION_NAME_COPY_IMAGE_FRAGMENT: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport (resourceDesc, VK_SHADER_STAGE_FRAGMENT_BIT, specializedAccess));
5864 case OPERATION_NAME_COPY_IMAGE_COMPUTE: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport (resourceDesc, VK_SHADER_STAGE_COMPUTE_BIT, specializedAccess));
5865 case OPERATION_NAME_COPY_IMAGE_COMPUTE_INDIRECT: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport (resourceDesc, VK_SHADER_STAGE_COMPUTE_BIT, specializedAccess, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
5866
5867 default:
5868 DE_ASSERT(0);
5869 return de::MovePtr<OperationSupport>();
5870 }
5871 }
5872
isStageSupported(const vk::VkShaderStageFlagBits stage, const vk::VkQueueFlags queueFlags)5873 bool isStageSupported (const vk::VkShaderStageFlagBits stage, const vk::VkQueueFlags queueFlags) {
5874 switch (stage) {
5875 case vk::VK_SHADER_STAGE_VERTEX_BIT:
5876 case vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
5877 case vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
5878 case vk::VK_SHADER_STAGE_GEOMETRY_BIT:
5879 case vk::VK_SHADER_STAGE_FRAGMENT_BIT:
5880 if ((queueFlags & (vk::VK_QUEUE_GRAPHICS_BIT)) == 0)
5881 return false;
5882 break;
5883 case vk::VK_SHADER_STAGE_COMPUTE_BIT:
5884 if ((queueFlags & (vk::VK_QUEUE_COMPUTE_BIT)) == 0)
5885 return false;
5886 break;
5887 default:
5888 break;
5889 }
5890 return true;
5891 }
5892
5893 } // synchronization
5894 } // vkt
5895