1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2019 The Khronos Group Inc.
6 * Copyright (c) 2019 Google Inc.
7 * Copyright (c) 2017 Codeplay Software Ltd.
8 *
9 * Licensed under the Apache License, Version 2.0 (the "License");
10 * you may not use this file except in compliance with the License.
11 * You may obtain a copy of the License at
12 *
13 * http://www.apache.org/licenses/LICENSE-2.0
14 *
15 * Unless required by applicable law or agreed to in writing, software
16 * distributed under the License is distributed on an "AS IS" BASIS,
17 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18 * See the License for the specific language governing permissions and
19 * limitations under the License.
20 *
21 */ /*!
22 * \file
23 * \brief Subgroups Tests
24 */ /*--------------------------------------------------------------------*/
25
26 #include "vktSubgroupsQuadTests.hpp"
27 #include "vktSubgroupsTestsUtils.hpp"
28
29 #include <string>
30 #include <vector>
31
32 using namespace tcu;
33 using namespace std;
34 using namespace vk;
35 using namespace vkt;
36
37 namespace
38 {
39 enum OpType
40 {
41 OPTYPE_QUAD_BROADCAST = 0,
42 OPTYPE_QUAD_BROADCAST_NONCONST,
43 OPTYPE_QUAD_SWAP_HORIZONTAL,
44 OPTYPE_QUAD_SWAP_VERTICAL,
45 OPTYPE_QUAD_SWAP_DIAGONAL,
46 OPTYPE_LAST
47 };
48
49 struct CaseDefinition
50 {
51 OpType opType;
52 VkShaderStageFlags shaderStage;
53 VkFormat format;
54 de::SharedPtr<bool> geometryPointSizeSupported;
55 bool requiredSubgroupSize;
56 bool requires8BitUniformBuffer;
57 bool requires16BitUniformBuffer;
58 };
59
checkVertexPipelineStages(const void * internalData,vector<const void * > datas,uint32_t width,uint32_t)60 static bool checkVertexPipelineStages(const void *internalData, vector<const void *> datas, uint32_t width, uint32_t)
61 {
62 DE_UNREF(internalData);
63
64 return subgroups::check(datas, width, 1);
65 }
66
checkComputeOrMesh(const void * internalData,vector<const void * > datas,const uint32_t numWorkgroups[3],const uint32_t localSize[3],uint32_t)67 static bool checkComputeOrMesh(const void *internalData, vector<const void *> datas, const uint32_t numWorkgroups[3],
68 const uint32_t localSize[3], uint32_t)
69 {
70 DE_UNREF(internalData);
71
72 return subgroups::checkComputeOrMesh(datas, numWorkgroups, localSize, 1);
73 }
74
getOpTypeName(OpType opType)75 string getOpTypeName(OpType opType)
76 {
77 switch (opType)
78 {
79 case OPTYPE_QUAD_BROADCAST:
80 return "subgroupQuadBroadcast";
81 case OPTYPE_QUAD_BROADCAST_NONCONST:
82 return "subgroupQuadBroadcast";
83 case OPTYPE_QUAD_SWAP_HORIZONTAL:
84 return "subgroupQuadSwapHorizontal";
85 case OPTYPE_QUAD_SWAP_VERTICAL:
86 return "subgroupQuadSwapVertical";
87 case OPTYPE_QUAD_SWAP_DIAGONAL:
88 return "subgroupQuadSwapDiagonal";
89 default:
90 TCU_THROW(InternalError, "Unsupported op type");
91 }
92 }
93
getOpTypeCaseName(OpType opType)94 string getOpTypeCaseName(OpType opType)
95 {
96 switch (opType)
97 {
98 case OPTYPE_QUAD_BROADCAST:
99 return "subgroupquadbroadcast";
100 case OPTYPE_QUAD_BROADCAST_NONCONST:
101 return "subgroupquadbroadcast_nonconst";
102 case OPTYPE_QUAD_SWAP_HORIZONTAL:
103 return "subgroupquadswaphorizontal";
104 case OPTYPE_QUAD_SWAP_VERTICAL:
105 return "subgroupquadswapvertical";
106 case OPTYPE_QUAD_SWAP_DIAGONAL:
107 return "subgroupquadswapdiagonal";
108 default:
109 TCU_THROW(InternalError, "Unsupported op type");
110 }
111 }
112
getExtHeader(VkFormat format)113 string getExtHeader(VkFormat format)
114 {
115 return "#extension GL_KHR_shader_subgroup_quad: enable\n"
116 "#extension GL_KHR_shader_subgroup_ballot: enable\n" +
117 subgroups::getAdditionalExtensionForFormat(format);
118 }
119
getTestSrc(const CaseDefinition & caseDef)120 string getTestSrc(const CaseDefinition &caseDef)
121 {
122 const string swapTable[OPTYPE_LAST] = {
123 "",
124 "",
125 " const uint swapTable[4] = {1, 0, 3, 2};\n",
126 " const uint swapTable[4] = {2, 3, 0, 1};\n",
127 " const uint swapTable[4] = {3, 2, 1, 0};\n",
128 };
129 const string validate = " if (subgroupBallotBitExtract(mask, otherID) && op !=data[otherID])\n"
130 " tempRes = 0;\n";
131 const string fmt = subgroups::getFormatNameForGLSL(caseDef.format);
132 const string op = getOpTypeName(caseDef.opType);
133 ostringstream testSrc;
134
135 testSrc << " uvec4 mask = subgroupBallot(true);\n" << swapTable[caseDef.opType] << " tempRes = 1;\n";
136
137 if (caseDef.opType == OPTYPE_QUAD_BROADCAST)
138 {
139 for (int i = 0; i < 4; i++)
140 {
141 testSrc << " {\n"
142 << " " << fmt << " op = " << op << "(data[gl_SubgroupInvocationID], " << i << ");\n"
143 << " uint otherID = (gl_SubgroupInvocationID & ~0x3) + " << i << ";\n"
144 << validate << " }\n";
145 }
146 }
147 else if (caseDef.opType == OPTYPE_QUAD_BROADCAST_NONCONST)
148 {
149 testSrc << " for (int i=0; i<4; i++)"
150 << " {\n"
151 << " " << fmt << " op = " << op << "(data[gl_SubgroupInvocationID], i);\n"
152 << " uint otherID = (gl_SubgroupInvocationID & ~0x3) + i;\n"
153 << validate << " }\n"
154 << " uint quadID = gl_SubgroupInvocationID >> 2;\n"
155 << " uint quadInvocation = gl_SubgroupInvocationID & 0x3;\n"
156 << " // Test lane ID that is only uniform in active lanes\n"
157 << " if (quadInvocation >= 2)\n"
158 << " {\n"
159 << " uint id = quadInvocation & ~1;\n"
160 << " " << fmt << " op = " << op << "(data[gl_SubgroupInvocationID], id);\n"
161 << " uint otherID = 4*quadID + id;\n"
162 << validate << " }\n"
163 << " // Test lane ID that is only quad uniform, not subgroup uniform\n"
164 << " {\n"
165 << " uint id = quadID & 0x3;\n"
166 << " " << fmt << " op = " << op << "(data[gl_SubgroupInvocationID], id);\n"
167 << " uint otherID = 4*quadID + id;\n"
168 << validate << " }\n";
169 }
170 else
171 {
172 testSrc << " " << fmt << " op = " << op << "(data[gl_SubgroupInvocationID]);\n"
173 << " uint otherID = (gl_SubgroupInvocationID & ~0x3) + swapTable[gl_SubgroupInvocationID & 0x3];\n"
174 << validate;
175 }
176
177 return testSrc.str();
178 }
179
initFrameBufferPrograms(SourceCollections & programCollection,CaseDefinition caseDef)180 void initFrameBufferPrograms(SourceCollections &programCollection, CaseDefinition caseDef)
181 {
182 const SpirvVersion spirvVersion =
183 (caseDef.opType == OPTYPE_QUAD_BROADCAST_NONCONST) ? SPIRV_VERSION_1_5 : SPIRV_VERSION_1_3;
184 const ShaderBuildOptions buildOptions(programCollection.usedVulkanVersion, spirvVersion, 0u);
185
186 subgroups::initStdFrameBufferPrograms(programCollection, buildOptions, caseDef.shaderStage, caseDef.format,
187 *caseDef.geometryPointSizeSupported, getExtHeader(caseDef.format),
188 getTestSrc(caseDef), "");
189 }
190
initPrograms(SourceCollections & programCollection,CaseDefinition caseDef)191 void initPrograms(SourceCollections &programCollection, CaseDefinition caseDef)
192 {
193 const bool spirv15required = caseDef.opType == OPTYPE_QUAD_BROADCAST_NONCONST;
194 #ifndef CTS_USES_VULKANSC
195 const bool spirv14required =
196 (isAllRayTracingStages(caseDef.shaderStage) || isAllMeshShadingStages(caseDef.shaderStage));
197 #else
198 const bool spirv14required = false;
199 #endif // CTS_USES_VULKANSC
200 const SpirvVersion spirvVersion = spirv15required ? SPIRV_VERSION_1_5 :
201 spirv14required ? SPIRV_VERSION_1_4 :
202 SPIRV_VERSION_1_3;
203 const ShaderBuildOptions buildOptions(programCollection.usedVulkanVersion, spirvVersion, 0u,
204 (spirv14required && !spirv15required));
205 const string extHeader = getExtHeader(caseDef.format);
206 const string testSrc = getTestSrc(caseDef);
207
208 subgroups::initStdPrograms(programCollection, buildOptions, caseDef.shaderStage, caseDef.format,
209 *caseDef.geometryPointSizeSupported, extHeader, testSrc, "");
210 }
211
supportedCheck(Context & context,CaseDefinition caseDef)212 void supportedCheck(Context &context, CaseDefinition caseDef)
213 {
214 if (!subgroups::isSubgroupSupported(context))
215 TCU_THROW(NotSupportedError, "Subgroup operations are not supported");
216
217 if (!subgroups::areQuadOperationsSupportedForStages(context, caseDef.shaderStage))
218 TCU_THROW(NotSupportedError, "Device does not support subgroup quad operations in this shader stage");
219
220 if (!subgroups::isFormatSupportedForDevice(context, caseDef.format))
221 TCU_THROW(NotSupportedError, "Device does not support the specified format in subgroup operations");
222
223 if (caseDef.requires16BitUniformBuffer)
224 {
225 if (!subgroups::is16BitUBOStorageSupported(context))
226 {
227 TCU_THROW(NotSupportedError, "Device does not support the specified format in subgroup operations");
228 }
229 }
230
231 if (caseDef.requires8BitUniformBuffer)
232 {
233 if (!subgroups::is8BitUBOStorageSupported(context))
234 {
235 TCU_THROW(NotSupportedError, "Device does not support the specified format in subgroup operations");
236 }
237 }
238
239 if ((caseDef.opType == OPTYPE_QUAD_BROADCAST_NONCONST) &&
240 !subgroups::isSubgroupBroadcastDynamicIdSupported(context))
241 TCU_THROW(NotSupportedError, "Device does not support SubgroupBroadcastDynamicId");
242
243 if (caseDef.requiredSubgroupSize)
244 {
245 context.requireDeviceFunctionality("VK_EXT_subgroup_size_control");
246
247 #ifndef CTS_USES_VULKANSC
248 const VkPhysicalDeviceSubgroupSizeControlFeatures &subgroupSizeControlFeatures =
249 context.getSubgroupSizeControlFeatures();
250 const VkPhysicalDeviceSubgroupSizeControlProperties &subgroupSizeControlProperties =
251 context.getSubgroupSizeControlProperties();
252 #else
253 const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT &subgroupSizeControlFeatures =
254 context.getSubgroupSizeControlFeaturesEXT();
255 const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT &subgroupSizeControlProperties =
256 context.getSubgroupSizeControlPropertiesEXT();
257 #endif // CTS_USES_VULKANSC
258
259 if (subgroupSizeControlFeatures.subgroupSizeControl == false)
260 TCU_THROW(NotSupportedError, "Device does not support varying subgroup sizes nor required subgroup size");
261
262 if (subgroupSizeControlFeatures.computeFullSubgroups == false)
263 TCU_THROW(NotSupportedError, "Device does not support full subgroups in compute shaders");
264
265 if ((subgroupSizeControlProperties.requiredSubgroupSizeStages & caseDef.shaderStage) != caseDef.shaderStage)
266 TCU_THROW(NotSupportedError, "Required subgroup size is not supported for shader stage");
267 }
268
269 *caseDef.geometryPointSizeSupported = subgroups::isTessellationAndGeometryPointSizeSupported(context);
270
271 #ifndef CTS_USES_VULKANSC
272 if (isAllRayTracingStages(caseDef.shaderStage))
273 {
274 context.requireDeviceFunctionality("VK_KHR_ray_tracing_pipeline");
275 }
276 else if (isAllMeshShadingStages(caseDef.shaderStage))
277 {
278 context.requireDeviceCoreFeature(DEVICE_CORE_FEATURE_VERTEX_PIPELINE_STORES_AND_ATOMICS);
279 context.requireDeviceFunctionality("VK_EXT_mesh_shader");
280
281 if ((caseDef.shaderStage & VK_SHADER_STAGE_TASK_BIT_EXT) != 0u)
282 {
283 const auto &features = context.getMeshShaderFeaturesEXT();
284 if (!features.taskShader)
285 TCU_THROW(NotSupportedError, "Task shaders not supported");
286 }
287 }
288 #endif // CTS_USES_VULKANSC
289
290 subgroups::supportedCheckShader(context, caseDef.shaderStage);
291 }
292
noSSBOtest(Context & context,const CaseDefinition caseDef)293 TestStatus noSSBOtest(Context &context, const CaseDefinition caseDef)
294 {
295 subgroups::SSBOData inputData;
296 inputData.format = caseDef.format;
297 inputData.layout = subgroups::SSBOData::LayoutStd140;
298 inputData.numElements = subgroups::maxSupportedSubgroupSize();
299 inputData.initializeType = subgroups::SSBOData::InitializeNonZero;
300 inputData.bindingType = subgroups::SSBOData::BindingUBO;
301
302 switch (caseDef.shaderStage)
303 {
304 case VK_SHADER_STAGE_VERTEX_BIT:
305 return subgroups::makeVertexFrameBufferTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL,
306 checkVertexPipelineStages);
307 case VK_SHADER_STAGE_GEOMETRY_BIT:
308 return subgroups::makeGeometryFrameBufferTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL,
309 checkVertexPipelineStages);
310 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
311 return subgroups::makeTessellationEvaluationFrameBufferTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL,
312 checkVertexPipelineStages, caseDef.shaderStage);
313 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
314 return subgroups::makeTessellationEvaluationFrameBufferTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL,
315 checkVertexPipelineStages, caseDef.shaderStage);
316 default:
317 TCU_THROW(InternalError, "Unhandled shader stage");
318 }
319 }
320
test(Context & context,const CaseDefinition caseDef)321 TestStatus test(Context &context, const CaseDefinition caseDef)
322 {
323 const bool isCompute = isAllComputeStages(caseDef.shaderStage);
324 #ifndef CTS_USES_VULKANSC
325 const bool isMesh = isAllMeshShadingStages(caseDef.shaderStage);
326 #else
327 const bool isMesh = false;
328 #endif // CTS_USES_VULKANSC
329 DE_ASSERT(!(isCompute && isMesh));
330
331 if (isCompute || isMesh)
332 {
333 #ifndef CTS_USES_VULKANSC
334 const VkPhysicalDeviceSubgroupSizeControlProperties &subgroupSizeControlProperties =
335 context.getSubgroupSizeControlProperties();
336 #else
337 const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT &subgroupSizeControlProperties =
338 context.getSubgroupSizeControlPropertiesEXT();
339 #endif // CTS_USES_VULKANSC
340 TestLog &log = context.getTestContext().getLog();
341 const subgroups::SSBOData inputData{
342 subgroups::SSBOData::InitializeNonZero, // InputDataInitializeType initializeType;
343 subgroups::SSBOData::LayoutStd430, // InputDataLayoutType layout;
344 caseDef.format, // vk::VkFormat format;
345 subgroups::maxSupportedSubgroupSize(), // vk::VkDeviceSize numElements;
346 };
347
348 if (caseDef.requiredSubgroupSize == false)
349 {
350 if (isCompute)
351 return subgroups::makeComputeTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL,
352 checkComputeOrMesh);
353 else
354 return subgroups::makeMeshTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkComputeOrMesh);
355 }
356
357 log << TestLog::Message << "Testing required subgroup size range ["
358 << subgroupSizeControlProperties.minSubgroupSize << ", " << subgroupSizeControlProperties.maxSubgroupSize
359 << "]" << TestLog::EndMessage;
360
361 // According to the spec, requiredSubgroupSize must be a power-of-two integer.
362 for (uint32_t size = subgroupSizeControlProperties.minSubgroupSize;
363 size <= subgroupSizeControlProperties.maxSubgroupSize; size *= 2)
364 {
365 TestStatus result(QP_TEST_RESULT_INTERNAL_ERROR, "Internal Error");
366
367 if (isCompute)
368 result = subgroups::makeComputeTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL,
369 checkComputeOrMesh, size);
370 else
371 result = subgroups::makeMeshTest(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL,
372 checkComputeOrMesh, size);
373
374 if (result.getCode() != QP_TEST_RESULT_PASS)
375 {
376 log << TestLog::Message << "subgroupSize " << size << " failed" << TestLog::EndMessage;
377 return result;
378 }
379 }
380
381 return TestStatus::pass("OK");
382 }
383 else if (isAllGraphicsStages(caseDef.shaderStage))
384 {
385 const VkShaderStageFlags stages = subgroups::getPossibleGraphicsSubgroupStages(context, caseDef.shaderStage);
386 subgroups::SSBOData inputData;
387
388 inputData.format = caseDef.format;
389 inputData.layout = subgroups::SSBOData::LayoutStd430;
390 inputData.numElements = subgroups::maxSupportedSubgroupSize();
391 inputData.initializeType = subgroups::SSBOData::InitializeNonZero;
392 inputData.binding = 4u;
393 inputData.stages = stages;
394
395 return subgroups::allStages(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL, checkVertexPipelineStages,
396 stages);
397 }
398 #ifndef CTS_USES_VULKANSC
399 else if (isAllRayTracingStages(caseDef.shaderStage))
400 {
401 const VkShaderStageFlags stages = subgroups::getPossibleRayTracingSubgroupStages(context, caseDef.shaderStage);
402 const subgroups::SSBOData inputData = {
403 subgroups::SSBOData::InitializeNonZero, // InputDataInitializeType initializeType;
404 subgroups::SSBOData::LayoutStd430, // InputDataLayoutType layout;
405 caseDef.format, // vk::VkFormat format;
406 subgroups::maxSupportedSubgroupSize(), // vk::VkDeviceSize numElements;
407 subgroups::SSBOData::BindingSSBO, // bool isImage;
408 6u, // uint32_t binding;
409 stages, // vk::VkShaderStageFlags stages;
410 };
411
412 return subgroups::allRayTracingStages(context, VK_FORMAT_R32_UINT, &inputData, 1, DE_NULL,
413 checkVertexPipelineStages, stages);
414 }
415 #endif // CTS_USES_VULKANSC
416 else
417 TCU_THROW(InternalError, "Unknown stage or invalid stage set");
418 }
419 } // namespace
420
421 namespace vkt
422 {
423 namespace subgroups
424 {
createSubgroupsQuadTests(TestContext & testCtx)425 TestCaseGroup *createSubgroupsQuadTests(TestContext &testCtx)
426 {
427 de::MovePtr<TestCaseGroup> group(new TestCaseGroup(testCtx, "quad"));
428 de::MovePtr<TestCaseGroup> graphicGroup(new TestCaseGroup(testCtx, "graphics"));
429 de::MovePtr<TestCaseGroup> computeGroup(new TestCaseGroup(testCtx, "compute"));
430 de::MovePtr<TestCaseGroup> framebufferGroup(new TestCaseGroup(testCtx, "framebuffer"));
431 #ifndef CTS_USES_VULKANSC
432 de::MovePtr<TestCaseGroup> raytracingGroup(new TestCaseGroup(testCtx, "ray_tracing"));
433 de::MovePtr<TestCaseGroup> meshGroup(new TestCaseGroup(testCtx, "mesh"));
434 #endif // CTS_USES_VULKANSC
435 const VkShaderStageFlags fbStages[] = {
436 VK_SHADER_STAGE_VERTEX_BIT,
437 VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
438 VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
439 VK_SHADER_STAGE_GEOMETRY_BIT,
440 };
441 #ifndef CTS_USES_VULKANSC
442 const VkShaderStageFlags meshStages[] = {
443 VK_SHADER_STAGE_MESH_BIT_EXT,
444 VK_SHADER_STAGE_TASK_BIT_EXT,
445 };
446 #endif // CTS_USES_VULKANSC
447 const bool boolValues[] = {false, true};
448
449 {
450 const vector<VkFormat> formats = subgroups::getAllFormats();
451
452 for (size_t formatIndex = 0; formatIndex < formats.size(); ++formatIndex)
453 {
454 const VkFormat format = formats[formatIndex];
455 const string formatName = subgroups::getFormatNameForGLSL(format);
456 const bool needs8BitUBOStorage = isFormat8bitTy(format);
457 const bool needs16BitUBOStorage = isFormat16BitTy(format);
458
459 for (int opTypeIndex = 0; opTypeIndex < OPTYPE_LAST; ++opTypeIndex)
460 {
461 const OpType opType = static_cast<OpType>(opTypeIndex);
462 const string name = getOpTypeCaseName(opType) + "_" + formatName;
463
464 for (size_t groupSizeNdx = 0; groupSizeNdx < DE_LENGTH_OF_ARRAY(boolValues); ++groupSizeNdx)
465 {
466 const bool requiredSubgroupSize = boolValues[groupSizeNdx];
467 const string testNameSuffix = requiredSubgroupSize ? "_requiredsubgroupsize" : "";
468 const string testName = name + testNameSuffix;
469 const CaseDefinition caseDef = {
470 opType, // OpType opType;
471 VK_SHADER_STAGE_COMPUTE_BIT, // VkShaderStageFlags shaderStage;
472 format, // VkFormat format;
473 de::SharedPtr<bool>(new bool), // de::SharedPtr<bool> geometryPointSizeSupported;
474 requiredSubgroupSize, // bool requiredSubgroupSize;
475 false, // bool requires8BitUniformBuffer;
476 false // bool requires16BitUniformBuffer;
477 };
478
479 addFunctionCaseWithPrograms(computeGroup.get(), testName, supportedCheck, initPrograms, test,
480 caseDef);
481 }
482
483 #ifndef CTS_USES_VULKANSC
484 for (size_t groupSizeNdx = 0; groupSizeNdx < DE_LENGTH_OF_ARRAY(boolValues); ++groupSizeNdx)
485 {
486 for (const auto &stage : meshStages)
487 {
488 const bool requiredSubgroupSize = boolValues[groupSizeNdx];
489 const string testNameSuffix = requiredSubgroupSize ? "_requiredsubgroupsize" : "";
490 const string testName = name + testNameSuffix + "_" + getShaderStageName(stage);
491 const CaseDefinition caseDef = {
492 opType, // OpType opType;
493 stage, // VkShaderStageFlags shaderStage;
494 format, // VkFormat format;
495 de::SharedPtr<bool>(new bool), // de::SharedPtr<bool> geometryPointSizeSupported;
496 requiredSubgroupSize, // bool requiredSubgroupSize;
497 false, // bool requires8BitUniformBuffer;
498 false // bool requires16BitUniformBuffer;
499 };
500
501 addFunctionCaseWithPrograms(meshGroup.get(), testName, supportedCheck, initPrograms, test,
502 caseDef);
503 }
504 }
505 #endif // CTS_USES_VULKANSC
506
507 {
508 const CaseDefinition caseDef = {
509 opType, // OpType opType;
510 VK_SHADER_STAGE_ALL_GRAPHICS, // VkShaderStageFlags shaderStage;
511 format, // VkFormat format;
512 de::SharedPtr<bool>(new bool), // de::SharedPtr<bool> geometryPointSizeSupported;
513 false, // bool requiredSubgroupSize;
514 false, // bool requires8BitUniformBuffer;
515 false // bool requires16BitUniformBuffer;
516 };
517
518 addFunctionCaseWithPrograms(graphicGroup.get(), name, supportedCheck, initPrograms, test, caseDef);
519 }
520
521 for (int stageIndex = 0; stageIndex < DE_LENGTH_OF_ARRAY(fbStages); ++stageIndex)
522 {
523 const CaseDefinition caseDef = {
524 opType, // OpType opType;
525 fbStages[stageIndex], // VkShaderStageFlags shaderStage;
526 format, // VkFormat format;
527 de::SharedPtr<bool>(new bool), // de::SharedPtr<bool> geometryPointSizeSupported;
528 false, // bool requiredSubgroupSize;
529 bool(needs8BitUBOStorage), // bool requires8BitUniformBuffer;
530 bool(needs16BitUBOStorage) // bool requires16BitUniformBuffer;
531 };
532 const string testName = name + "_" + getShaderStageName(caseDef.shaderStage);
533
534 addFunctionCaseWithPrograms(framebufferGroup.get(), testName, supportedCheck,
535 initFrameBufferPrograms, noSSBOtest, caseDef);
536 }
537 }
538 }
539 }
540
541 #ifndef CTS_USES_VULKANSC
542 {
543 const vector<VkFormat> formats = subgroups::getAllRayTracingFormats();
544
545 for (size_t formatIndex = 0; formatIndex < formats.size(); ++formatIndex)
546 {
547 const VkFormat format = formats[formatIndex];
548 const string formatName = subgroups::getFormatNameForGLSL(format);
549
550 for (int opTypeIndex = 0; opTypeIndex < OPTYPE_LAST; ++opTypeIndex)
551 {
552 const OpType opType = static_cast<OpType>(opTypeIndex);
553 const string testName = getOpTypeCaseName(opType) + "_" + formatName;
554 const CaseDefinition caseDef = {
555 opType, // OpType opType;
556 SHADER_STAGE_ALL_RAY_TRACING, // VkShaderStageFlags shaderStage;
557 format, // VkFormat format;
558 de::SharedPtr<bool>(new bool), // de::SharedPtr<bool> geometryPointSizeSupported;
559 false, // bool requiredSubgroupSize;
560 false, // bool requires8BitUniformBuffer;
561 false // bool requires16BitUniformBuffer;
562 };
563
564 addFunctionCaseWithPrograms(raytracingGroup.get(), testName, supportedCheck, initPrograms, test,
565 caseDef);
566 }
567 }
568 }
569 #endif // CTS_USES_VULKANSC
570
571 group->addChild(graphicGroup.release());
572 group->addChild(computeGroup.release());
573 group->addChild(framebufferGroup.release());
574 #ifndef CTS_USES_VULKANSC
575 group->addChild(raytracingGroup.release());
576 group->addChild(meshGroup.release());
577 #endif // CTS_USES_VULKANSC
578
579 return group.release();
580 }
581 } // namespace subgroups
582 } // namespace vkt
583