1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2019 The Khronos Group Inc.
6 * Copyright (c) 2019 Google Inc.
7 * Copyright (c) 2017 Codeplay Software Ltd.
8 *
9 * Licensed under the Apache License, Version 2.0 (the "License");
10 * you may not use this file except in compliance with the License.
11 * You may obtain a copy of the License at
12 *
13 * http://www.apache.org/licenses/LICENSE-2.0
14 *
15 * Unless required by applicable law or agreed to in writing, software
16 * distributed under the License is distributed on an "AS IS" BASIS,
17 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18 * See the License for the specific language governing permissions and
19 * limitations under the License.
20 *
21 */ /*!
22 * \file
23 * \brief Subgroups Tests
24 */ /*--------------------------------------------------------------------*/
25
26 #include "vktSubgroupsQuadTests.hpp"
27 #include "vktSubgroupsTestsUtils.hpp"
28
29 #include <string>
30 #include <vector>
31
32 using namespace tcu;
33 using namespace std;
34 using namespace vk;
35 using namespace vkt;
36
37 namespace
38 {
39 enum OpType
40 {
41 OPTYPE_QUAD_BROADCAST = 0,
42 OPTYPE_QUAD_BROADCAST_NONCONST,
43 OPTYPE_QUAD_SWAP_HORIZONTAL,
44 OPTYPE_QUAD_SWAP_VERTICAL,
45 OPTYPE_QUAD_SWAP_DIAGONAL,
46 OPTYPE_LAST
47 };
48
49 struct CaseDefinition
50 {
51 OpType opType;
52 VkShaderStageFlags shaderStage;
53 VkFormat format;
54 de::SharedPtr<bool> geometryPointSizeSupported;
55 deBool requiredSubgroupSize;
56 deBool requires8BitUniformBuffer;
57 deBool requires16BitUniformBuffer;
58 };
59
checkVertexPipelineStages(const void* internalData, vector<const void*> datas, deUint32 width, deUint32)60 static bool checkVertexPipelineStages (const void* internalData,
61 vector<const void*> datas,
62 deUint32 width,
63 deUint32)
64 {
65 DE_UNREF(internalData);
66
67 return subgroups::check(datas, width, 1);
68 }
69
checkComputeOrMesh(const void* internalData, vector<const void*> datas, const deUint32 numWorkgroups[3], const deUint32 localSize[3], deUint32)70 static bool checkComputeOrMesh (const void* internalData,
71 vector<const void*> datas,
72 const deUint32 numWorkgroups[3],
73 const deUint32 localSize[3],
74 deUint32)
75 {
76 DE_UNREF(internalData);
77
78 return subgroups::checkComputeOrMesh(datas, numWorkgroups, localSize, 1);
79 }
80
getOpTypeName(OpType opType)81 string getOpTypeName (OpType opType)
82 {
83 switch (opType)
84 {
85 case OPTYPE_QUAD_BROADCAST: return "subgroupQuadBroadcast";
86 case OPTYPE_QUAD_BROADCAST_NONCONST: return "subgroupQuadBroadcast";
87 case OPTYPE_QUAD_SWAP_HORIZONTAL: return "subgroupQuadSwapHorizontal";
88 case OPTYPE_QUAD_SWAP_VERTICAL: return "subgroupQuadSwapVertical";
89 case OPTYPE_QUAD_SWAP_DIAGONAL: return "subgroupQuadSwapDiagonal";
90 default: TCU_THROW(InternalError, "Unsupported op type");
91 }
92 }
93
getOpTypeCaseName(OpType opType)94 string getOpTypeCaseName (OpType opType)
95 {
96 switch (opType)
97 {
98 case OPTYPE_QUAD_BROADCAST: return "subgroupquadbroadcast";
99 case OPTYPE_QUAD_BROADCAST_NONCONST: return "subgroupquadbroadcast_nonconst";
100 case OPTYPE_QUAD_SWAP_HORIZONTAL: return "subgroupquadswaphorizontal";
101 case OPTYPE_QUAD_SWAP_VERTICAL: return "subgroupquadswapvertical";
102 case OPTYPE_QUAD_SWAP_DIAGONAL: return "subgroupquadswapdiagonal";
103 default: TCU_THROW(InternalError, "Unsupported op type");
104 }
105 }
106
getExtHeader(VkFormat format)107 string getExtHeader (VkFormat format)
108 {
109 return "#extension GL_KHR_shader_subgroup_quad: enable\n"
110 "#extension GL_KHR_shader_subgroup_ballot: enable\n" +
111 subgroups::getAdditionalExtensionForFormat(format);
112 }
113
getTestSrc(const CaseDefinition &caseDef)114 string getTestSrc (const CaseDefinition &caseDef)
115 {
116 const string swapTable[OPTYPE_LAST] =
117 {
118 "",
119 "",
120 " const uint swapTable[4] = {1, 0, 3, 2};\n",
121 " const uint swapTable[4] = {2, 3, 0, 1};\n",
122 " const uint swapTable[4] = {3, 2, 1, 0};\n",
123 };
124 const string validate =
125 " if (subgroupBallotBitExtract(mask, otherID) && op !=data[otherID])\n"
126 " tempRes = 0;\n";
127 const string fmt = subgroups::getFormatNameForGLSL(caseDef.format);
128 const string op = getOpTypeName(caseDef.opType);
129 ostringstream testSrc;
130
131 testSrc << " uvec4 mask = subgroupBallot(true);\n"
132 << swapTable[caseDef.opType]
133 << " tempRes = 1;\n";
134
135 if (caseDef.opType == OPTYPE_QUAD_BROADCAST)
136 {
137 for (int i=0; i<4; i++)
138 {
139 testSrc << " {\n"
140 << " " << fmt << " op = " << op << "(data[gl_SubgroupInvocationID], " << i << ");\n"
141 << " uint otherID = (gl_SubgroupInvocationID & ~0x3) + " << i << ";\n"
142 << validate
143 << " }\n";
144 }
145 }
146 else if (caseDef.opType == OPTYPE_QUAD_BROADCAST_NONCONST)
147 {
148 testSrc << " for (int i=0; i<4; i++)"
149 << " {\n"
150 << " " << fmt << " op = " << op << "(data[gl_SubgroupInvocationID], i);\n"
151 << " uint otherID = (gl_SubgroupInvocationID & ~0x3) + i;\n"
152 << validate
153 << " }\n"
154 << " uint quadID = gl_SubgroupInvocationID >> 2;\n"
155 << " uint quadInvocation = gl_SubgroupInvocationID & 0x3;\n"
156 << " // Test lane ID that is only uniform in active lanes\n"
157 << " if (quadInvocation >= 2)\n"
158 << " {\n"
159 << " uint id = quadInvocation & ~1;\n"
160 << " " << fmt << " op = " << op << "(data[gl_SubgroupInvocationID], id);\n"
161 << " uint otherID = 4*quadID + id;\n"
162 << validate
163 << " }\n"
164 << " // Test lane ID that is only quad uniform, not subgroup uniform\n"
165 << " {\n"
166 << " uint id = quadID & 0x3;\n"
167 << " " << fmt << " op = " << op << "(data[gl_SubgroupInvocationID], id);\n"
168 << " uint otherID = 4*quadID + id;\n"
169 << validate
170 << " }\n";
171 }
172 else
173 {
174 testSrc << " " << fmt << " op = " << op << "(data[gl_SubgroupInvocationID]);\n"
175 << " uint otherID = (gl_SubgroupInvocationID & ~0x3) + swapTable[gl_SubgroupInvocationID & 0x3];\n"
176 << validate;
177 }
178
179 return testSrc.str();
180 }
181
initFrameBufferPrograms(SourceCollections& programCollection, CaseDefinition caseDef)182 void initFrameBufferPrograms (SourceCollections& programCollection, CaseDefinition caseDef)
183 {
184 const SpirvVersion spirvVersion = (caseDef.opType == OPTYPE_QUAD_BROADCAST_NONCONST) ? SPIRV_VERSION_1_5 : SPIRV_VERSION_1_3;
185 const ShaderBuildOptions buildOptions (programCollection.usedVulkanVersion, spirvVersion, 0u);
186
187 subgroups::initStdFrameBufferPrograms(programCollection, buildOptions, caseDef.shaderStage, caseDef.format, *caseDef.geometryPointSizeSupported, getExtHeader(caseDef.format), getTestSrc(caseDef), "");
188 }
189
initPrograms(SourceCollections& programCollection, CaseDefinition caseDef)190 void initPrograms (SourceCollections& programCollection, CaseDefinition caseDef)
191 {
192 const bool spirv15required = caseDef.opType == OPTYPE_QUAD_BROADCAST_NONCONST;
193 #ifndef CTS_USES_VULKANSC
194 const bool spirv14required = (isAllRayTracingStages(caseDef.shaderStage) || isAllMeshShadingStages(caseDef.shaderStage));
195 #else
196 const bool spirv14required = false;
197 #endif // CTS_USES_VULKANSC
198 const SpirvVersion spirvVersion = spirv15required ? SPIRV_VERSION_1_5
199 : spirv14required ? SPIRV_VERSION_1_4
200 : SPIRV_VERSION_1_3;
201 const ShaderBuildOptions buildOptions (programCollection.usedVulkanVersion, spirvVersion, 0u, (spirv14required && !spirv15required));
202 const string extHeader = getExtHeader(caseDef.format);
203 const string testSrc = getTestSrc(caseDef);
204
205 subgroups::initStdPrograms(programCollection, buildOptions, caseDef.shaderStage, caseDef.format, *caseDef.geometryPointSizeSupported, extHeader, testSrc, "");
206 }
207
supportedCheck(Context& context, CaseDefinition caseDef)208 void supportedCheck (Context& context, CaseDefinition caseDef)
209 {
210 if (!subgroups::isSubgroupSupported(context))
211 TCU_THROW(NotSupportedError, "Subgroup operations are not supported");
212
213 if (!subgroups::areQuadOperationsSupportedForStages(context, caseDef.shaderStage))
214 TCU_THROW(NotSupportedError, "Device does not support subgroup quad operations in this shader stage");
215
216 if (!subgroups::isFormatSupportedForDevice(context, caseDef.format))
217 TCU_THROW(NotSupportedError, "Device does not support the specified format in subgroup operations");
218
219 if (caseDef.requires16BitUniformBuffer)
220 {
221 if (!subgroups::is16BitUBOStorageSupported(context))
222 {
223 TCU_THROW(NotSupportedError, "Device does not support the specified format in subgroup operations");
224 }
225 }
226
227 if (caseDef.requires8BitUniformBuffer)
228 {
229 if (!subgroups::is8BitUBOStorageSupported(context))
230 {
231 TCU_THROW(NotSupportedError, "Device does not support the specified format in subgroup operations");
232 }
233 }
234
235 if ((caseDef.opType == OPTYPE_QUAD_BROADCAST_NONCONST) && !subgroups::isSubgroupBroadcastDynamicIdSupported(context))
236 TCU_THROW(NotSupportedError, "Device does not support SubgroupBroadcastDynamicId");
237
238 if (caseDef.requiredSubgroupSize)
239 {
240 context.requireDeviceFunctionality("VK_EXT_subgroup_size_control");
241
242 #ifndef CTS_USES_VULKANSC
243 const VkPhysicalDeviceSubgroupSizeControlFeatures& subgroupSizeControlFeatures = context.getSubgroupSizeControlFeatures();
244 const VkPhysicalDeviceSubgroupSizeControlProperties& subgroupSizeControlProperties = context.getSubgroupSizeControlProperties();
245 #else
246 const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT& subgroupSizeControlFeatures = context.getSubgroupSizeControlFeaturesEXT();
247 const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT& subgroupSizeControlProperties = context.getSubgroupSizeControlPropertiesEXT();
248 #endif // CTS_USES_VULKANSC
249
250 if (subgroupSizeControlFeatures.subgroupSizeControl == DE_FALSE)
251 TCU_THROW(NotSupportedError, "Device does not support varying subgroup sizes nor required subgroup size");
252
253 if (subgroupSizeControlFeatures.computeFullSubgroups == DE_FALSE)
254 TCU_THROW(NotSupportedError, "Device does not support full subgroups in compute shaders");
255
256 if ((subgroupSizeControlProperties.requiredSubgroupSizeStages & caseDef.shaderStage) != caseDef.shaderStage)
257 TCU_THROW(NotSupportedError, "Required subgroup size is not supported for shader stage");
258 }
259
260 *caseDef.geometryPointSizeSupported = subgroups::isTessellationAndGeometryPointSizeSupported(context);
261
262 #ifndef CTS_USES_VULKANSC
263 if (isAllRayTracingStages(caseDef.shaderStage))
264 {
265 context.requireDeviceFunctionality("VK_KHR_ray_tracing_pipeline");
266 }
267 else if (isAllMeshShadingStages(caseDef.shaderStage))
268 {
269 context.requireDeviceCoreFeature(DEVICE_CORE_FEATURE_VERTEX_PIPELINE_STORES_AND_ATOMICS);
270 context.requireDeviceFunctionality("VK_EXT_mesh_shader");
271
272 if ((caseDef.shaderStage & VK_SHADER_STAGE_TASK_BIT_EXT) != 0u)
273 {
274 const auto& features = context.getMeshShaderFeaturesEXT();
275 if (!features.taskShader)
276 TCU_THROW(NotSupportedError, "Task shaders not supported");
277 }
278 }
279 #endif // CTS_USES_VULKANSC
280
281 subgroups::supportedCheckShader(context, caseDef.shaderStage);
282 }
283
noSSBOtest(Context& context, const CaseDefinition caseDef)284 TestStatus noSSBOtest (Context& context, const CaseDefinition caseDef)
285 {
286 subgroups::SSBOData inputData;
287 inputData.format = caseDef.format;
288 inputData.layout = subgroups::SSBOData::LayoutStd140;
289 inputData.numElements = subgroups::maxSupportedSubgroupSize();
290 inputData.initializeType = subgroups::SSBOData::InitializeNonZero;
291 inputData.bindingType = subgroups::SSBOData::BindingUBO;
292
293 switch (caseDef.shaderStage)
294 {
295 case VK_SHADER_STAGE_VERTEX_BIT: return subgroups::makeVertexFrameBufferTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkVertexPipelineStages);
296 case VK_SHADER_STAGE_GEOMETRY_BIT: return subgroups::makeGeometryFrameBufferTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkVertexPipelineStages);
297 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT: return subgroups::makeTessellationEvaluationFrameBufferTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkVertexPipelineStages, caseDef.shaderStage);
298 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT: return subgroups::makeTessellationEvaluationFrameBufferTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkVertexPipelineStages, caseDef.shaderStage);
299 default: TCU_THROW(InternalError, "Unhandled shader stage");
300 }
301 }
302
test(Context& context, const CaseDefinition caseDef)303 TestStatus test (Context& context, const CaseDefinition caseDef)
304 {
305 const bool isCompute = isAllComputeStages(caseDef.shaderStage);
306 #ifndef CTS_USES_VULKANSC
307 const bool isMesh = isAllMeshShadingStages(caseDef.shaderStage);
308 #else
309 const bool isMesh = false;
310 #endif // CTS_USES_VULKANSC
311 DE_ASSERT(!(isCompute && isMesh));
312
313 if (isCompute || isMesh)
314 {
315 #ifndef CTS_USES_VULKANSC
316 const VkPhysicalDeviceSubgroupSizeControlProperties& subgroupSizeControlProperties = context.getSubgroupSizeControlProperties();
317 #else
318 const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT& subgroupSizeControlProperties = context.getSubgroupSizeControlPropertiesEXT();
319 #endif // CTS_USES_VULKANSC
320 TestLog& log = context.getTestContext().getLog();
321 const subgroups::SSBOData inputData
322 {
323 subgroups::SSBOData::InitializeNonZero, // InputDataInitializeType initializeType;
324 subgroups::SSBOData::LayoutStd430, // InputDataLayoutType layout;
325 caseDef.format, // vk::VkFormat format;
326 subgroups::maxSupportedSubgroupSize(), // vk::VkDeviceSize numElements;
327 };
328
329 if (caseDef.requiredSubgroupSize == DE_FALSE)
330 {
331 if (isCompute)
332 return subgroups::makeComputeTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkComputeOrMesh);
333 else
334 return subgroups::makeMeshTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkComputeOrMesh);
335 }
336
337 log << TestLog::Message << "Testing required subgroup size range [" << subgroupSizeControlProperties.minSubgroupSize << ", "
338 << subgroupSizeControlProperties.maxSubgroupSize << "]" << TestLog::EndMessage;
339
340 // According to the spec, requiredSubgroupSize must be a power-of-two integer.
341 for (deUint32 size = subgroupSizeControlProperties.minSubgroupSize; size <= subgroupSizeControlProperties.maxSubgroupSize; size *= 2)
342 {
343 TestStatus result (QP_TEST_RESULT_INTERNAL_ERROR, "Internal Error");
344
345 if (isCompute)
346 result = subgroups::makeComputeTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkComputeOrMesh, size);
347 else
348 result = subgroups::makeMeshTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkComputeOrMesh, size);
349
350 if (result.getCode() != QP_TEST_RESULT_PASS)
351 {
352 log << TestLog::Message << "subgroupSize " << size << " failed" << TestLog::EndMessage;
353 return result;
354 }
355 }
356
357 return TestStatus::pass("OK");
358 }
359 else if (isAllGraphicsStages(caseDef.shaderStage))
360 {
361 const VkShaderStageFlags stages = subgroups::getPossibleGraphicsSubgroupStages(context, caseDef.shaderStage);
362 subgroups::SSBOData inputData;
363
364 inputData.format = caseDef.format;
365 inputData.layout = subgroups::SSBOData::LayoutStd430;
366 inputData.numElements = subgroups::maxSupportedSubgroupSize();
367 inputData.initializeType = subgroups::SSBOData::InitializeNonZero;
368 inputData.binding = 4u;
369 inputData.stages = stages;
370
371 return subgroups::allStages(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkVertexPipelineStages, stages);
372 }
373 #ifndef CTS_USES_VULKANSC
374 else if (isAllRayTracingStages(caseDef.shaderStage))
375 {
376 const VkShaderStageFlags stages = subgroups::getPossibleRayTracingSubgroupStages(context, caseDef.shaderStage);
377 const subgroups::SSBOData inputData =
378 {
379 subgroups::SSBOData::InitializeNonZero, // InputDataInitializeType initializeType;
380 subgroups::SSBOData::LayoutStd430, // InputDataLayoutType layout;
381 caseDef.format, // vk::VkFormat format;
382 subgroups::maxSupportedSubgroupSize(), // vk::VkDeviceSize numElements;
383 subgroups::SSBOData::BindingSSBO, // bool isImage;
384 6u, // deUint32 binding;
385 stages, // vk::VkShaderStageFlags stages;
386 };
387
388 return subgroups::allRayTracingStages(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkVertexPipelineStages, stages);
389 }
390 #endif // CTS_USES_VULKANSC
391 else
392 TCU_THROW(InternalError, "Unknown stage or invalid stage set");
393 }
394 }
395
396 namespace vkt
397 {
398 namespace subgroups
399 {
createSubgroupsQuadTests(TestContext& testCtx)400 TestCaseGroup* createSubgroupsQuadTests (TestContext& testCtx)
401 {
402 de::MovePtr<TestCaseGroup> group (new TestCaseGroup(testCtx, "quad", "Subgroup quad category tests"));
403 de::MovePtr<TestCaseGroup> graphicGroup (new TestCaseGroup(testCtx, "graphics", "Subgroup arithmetic category tests: graphics"));
404 de::MovePtr<TestCaseGroup> computeGroup (new TestCaseGroup(testCtx, "compute", "Subgroup arithmetic category tests: compute"));
405 de::MovePtr<TestCaseGroup> framebufferGroup (new TestCaseGroup(testCtx, "framebuffer", "Subgroup arithmetic category tests: framebuffer"));
406 #ifndef CTS_USES_VULKANSC
407 de::MovePtr<TestCaseGroup> raytracingGroup (new TestCaseGroup(testCtx, "ray_tracing", "Subgroup arithmetic category tests: ray tracing"));
408 de::MovePtr<TestCaseGroup> meshGroup (new TestCaseGroup(testCtx, "mesh", "Subgroup arithmetic category tests: mesh shading"));
409 #endif // CTS_USES_VULKANSC
410 const VkShaderStageFlags fbStages[] =
411 {
412 VK_SHADER_STAGE_VERTEX_BIT,
413 VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
414 VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
415 VK_SHADER_STAGE_GEOMETRY_BIT,
416 };
417 #ifndef CTS_USES_VULKANSC
418 const VkShaderStageFlags meshStages[] =
419 {
420 VK_SHADER_STAGE_MESH_BIT_EXT,
421 VK_SHADER_STAGE_TASK_BIT_EXT,
422 };
423 #endif // CTS_USES_VULKANSC
424 const deBool boolValues[] =
425 {
426 DE_FALSE,
427 DE_TRUE
428 };
429
430 {
431 const vector<VkFormat> formats = subgroups::getAllFormats();
432
433 for (size_t formatIndex = 0; formatIndex < formats.size(); ++formatIndex)
434 {
435 const VkFormat format = formats[formatIndex];
436 const string formatName = subgroups::getFormatNameForGLSL(format);
437 const bool needs8BitUBOStorage = isFormat8bitTy(format);
438 const bool needs16BitUBOStorage = isFormat16BitTy(format);
439
440 for (int opTypeIndex = 0; opTypeIndex < OPTYPE_LAST; ++opTypeIndex)
441 {
442 const OpType opType = static_cast<OpType>(opTypeIndex);
443 const string name = getOpTypeCaseName(opType) + "_" + formatName;
444
445 for (size_t groupSizeNdx = 0; groupSizeNdx < DE_LENGTH_OF_ARRAY(boolValues); ++groupSizeNdx)
446 {
447 const deBool requiredSubgroupSize = boolValues[groupSizeNdx];
448 const string testNameSuffix = requiredSubgroupSize ? "_requiredsubgroupsize" : "";
449 const string testName = name + testNameSuffix;
450 const CaseDefinition caseDef =
451 {
452 opType, // OpType opType;
453 VK_SHADER_STAGE_COMPUTE_BIT, // VkShaderStageFlags shaderStage;
454 format, // VkFormat format;
455 de::SharedPtr<bool>(new bool), // de::SharedPtr<bool> geometryPointSizeSupported;
456 requiredSubgroupSize, // deBool requiredSubgroupSize;
457 DE_FALSE, // deBool requires8BitUniformBuffer;
458 DE_FALSE // deBool requires16BitUniformBuffer;
459 };
460
461 addFunctionCaseWithPrograms(computeGroup.get(), testName,supportedCheck, initPrograms, test, caseDef);
462 }
463
464 #ifndef CTS_USES_VULKANSC
465 for (size_t groupSizeNdx = 0; groupSizeNdx < DE_LENGTH_OF_ARRAY(boolValues); ++groupSizeNdx)
466 {
467 for (const auto& stage : meshStages)
468 {
469 const deBool requiredSubgroupSize = boolValues[groupSizeNdx];
470 const string testNameSuffix = requiredSubgroupSize ? "_requiredsubgroupsize" : "";
471 const string testName = name + testNameSuffix + "_" + getShaderStageName(stage);
472 const CaseDefinition caseDef =
473 {
474 opType, // OpType opType;
475 stage, // VkShaderStageFlags shaderStage;
476 format, // VkFormat format;
477 de::SharedPtr<bool>(new bool), // de::SharedPtr<bool> geometryPointSizeSupported;
478 requiredSubgroupSize, // deBool requiredSubgroupSize;
479 DE_FALSE, // deBool requires8BitUniformBuffer;
480 DE_FALSE // deBool requires16BitUniformBuffer;
481 };
482
483 addFunctionCaseWithPrograms(meshGroup.get(), testName,supportedCheck, initPrograms, test, caseDef);
484 }
485 }
486 #endif // CTS_USES_VULKANSC
487
488 {
489 const CaseDefinition caseDef =
490 {
491 opType, // OpType opType;
492 VK_SHADER_STAGE_ALL_GRAPHICS, // VkShaderStageFlags shaderStage;
493 format, // VkFormat format;
494 de::SharedPtr<bool>(new bool), // de::SharedPtr<bool> geometryPointSizeSupported;
495 DE_FALSE, // deBool requiredSubgroupSize;
496 DE_FALSE, // deBool requires8BitUniformBuffer;
497 DE_FALSE // deBool requires16BitUniformBuffer;
498 };
499
500 addFunctionCaseWithPrograms(graphicGroup.get(), name, supportedCheck, initPrograms, test, caseDef);
501 }
502
503 for (int stageIndex = 0; stageIndex < DE_LENGTH_OF_ARRAY(fbStages); ++stageIndex)
504 {
505 const CaseDefinition caseDef =
506 {
507 opType, // OpType opType;
508 fbStages[stageIndex], // VkShaderStageFlags shaderStage;
509 format, // VkFormat format;
510 de::SharedPtr<bool>(new bool), // de::SharedPtr<bool> geometryPointSizeSupported;
511 DE_FALSE, // deBool requiredSubgroupSize;
512 deBool(needs8BitUBOStorage), // deBool requires8BitUniformBuffer;
513 deBool(needs16BitUBOStorage) // deBool requires16BitUniformBuffer;
514 };
515 const string testName = name + "_" + getShaderStageName(caseDef.shaderStage);
516
517 addFunctionCaseWithPrograms(framebufferGroup.get(), testName,supportedCheck, initFrameBufferPrograms, noSSBOtest, caseDef);
518 }
519 }
520 }
521 }
522
523 #ifndef CTS_USES_VULKANSC
524 {
525 const vector<VkFormat> formats = subgroups::getAllRayTracingFormats();
526
527 for (size_t formatIndex = 0; formatIndex < formats.size(); ++formatIndex)
528 {
529 const VkFormat format = formats[formatIndex];
530 const string formatName = subgroups::getFormatNameForGLSL(format);
531
532 for (int opTypeIndex = 0; opTypeIndex < OPTYPE_LAST; ++opTypeIndex)
533 {
534 const OpType opType = static_cast<OpType>(opTypeIndex);
535 const string testName = getOpTypeCaseName(opType) + "_" + formatName;
536 const CaseDefinition caseDef =
537 {
538 opType, // OpType opType;
539 SHADER_STAGE_ALL_RAY_TRACING, // VkShaderStageFlags shaderStage;
540 format, // VkFormat format;
541 de::SharedPtr<bool>(new bool), // de::SharedPtr<bool> geometryPointSizeSupported;
542 DE_FALSE, // deBool requiredSubgroupSize;
543 DE_FALSE, // deBool requires8BitUniformBuffer;
544 DE_FALSE // deBool requires16BitUniformBuffer;
545 };
546
547 addFunctionCaseWithPrograms(raytracingGroup.get(), testName,supportedCheck, initPrograms, test, caseDef);
548 }
549 }
550 }
551 #endif // CTS_USES_VULKANSC
552
553 group->addChild(graphicGroup.release());
554 group->addChild(computeGroup.release());
555 group->addChild(framebufferGroup.release());
556 #ifndef CTS_USES_VULKANSC
557 group->addChild(raytracingGroup.release());
558 group->addChild(meshGroup.release());
559 #endif // CTS_USES_VULKANSC
560
561 return group.release();
562 }
563 } // subgroups
564 } // vkt
565