1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2021 The Khronos Group Inc.
6 * Copyright (c) 2021 Valve Corporation.
7 * Copyright (c) 2023 LunarG, Inc.
8 * Copyright (c) 2023 Nintendo
9 *
10 * Licensed under the Apache License, Version 2.0 (the "License");
11 * you may not use this file except in compliance with the License.
12 * You may obtain a copy of the License at
13 *
14 * http://www.apache.org/licenses/LICENSE-2.0
15 *
16 * Unless required by applicable law or agreed to in writing, software
17 * distributed under the License is distributed on an "AS IS" BASIS,
18 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19 * See the License for the specific language governing permissions and
20 * limitations under the License.
21 *
22 *//*!
23 * \file
24 * \brief Dynamic State tests mixing it with compute and transfer.
25 *//*--------------------------------------------------------------------*/
26 #include "vktDynamicStateComputeTests.hpp"
27 #include "vktCustomInstancesDevices.hpp"
28
29 #include "vkBufferWithMemory.hpp"
30 #include "vkObjUtil.hpp"
31 #include "vkCmdUtil.hpp"
32 #include "vkBarrierUtil.hpp"
33 #include "vkBuilderUtil.hpp"
34 #include "vkTypeUtil.hpp"
35
36 #include "tcuCommandLine.hpp"
37 #include "tcuVector.hpp"
38
39 #include <vector>
40 #include <string>
41 #include <functional>
42 #include <map>
43 #include <sstream>
44 #include <cstring>
45 #include <iterator>
46 #include <numeric>
47 #include <memory>
48
49 namespace vkt
50 {
51 namespace DynamicState
52 {
53
54 namespace
55 {
56
57 using namespace vk;
58
59 // Additional objects needed to set a given dynamic state that need to exist beyond the state-setting call. Empty by default.
60 struct DynamicStateData
61 {
~DynamicStateDatavkt::DynamicState::__anon716701940111::DynamicStateData62 virtual ~DynamicStateData()
63 {
64 }
65 };
66
67 // A vertex buffer and graphics pipeline are needed for vkCmdBindVertexBuffers2EXT().
68 struct BindVertexBuffersData : public DynamicStateData
69 {
70 private:
71 using BufferPtr = de::MovePtr<BufferWithMemory>;
72 using RenderPassPtr = RenderPassWrapper;
73 using LayoutPtr = Move<VkPipelineLayout>;
74 using ModulePtr = Move<VkShaderModule>;
75 using PipelinePtr = Move<VkPipeline>;
76
77 static constexpr uint32_t kWidth = 16u;
78 static constexpr uint32_t kHeight = 16u;
79
getExtentvkt::DynamicState::__anon716701940111::BindVertexBuffersData80 VkExtent3D getExtent(void)
81 {
82 return makeExtent3D(kWidth, kHeight, 1u);
83 }
84
85 public:
BindVertexBuffersDatavkt::DynamicState::__anon716701940111::BindVertexBuffersData86 BindVertexBuffersData(Context &ctx, VkDevice device, PipelineConstructionType pipelineConstructionType)
87 : m_vertexBuffer()
88 , m_dataSize(0u)
89 , m_vertexBufferSize(0ull)
90 , m_renderPass()
91 , m_pipelineLayout()
92 , m_vertexShader()
93 , m_graphicsPipeline()
94 {
95 const auto &vki = ctx.getInstanceInterface();
96 const auto phyDev = ctx.getPhysicalDevice();
97 const auto &vkd = ctx.getDeviceInterface();
98 auto &alloc = ctx.getDefaultAllocator();
99
100 // Vertex buffer.
101 tcu::Vec4 vertex(0.f, 0.f, 0.f, 1.f);
102 m_dataSize = sizeof(vertex);
103 m_vertexBufferSize = de::roundUp(static_cast<VkDeviceSize>(m_dataSize),
104 getPhysicalDeviceProperties(vki, phyDev).limits.nonCoherentAtomSize);
105 const auto bufferInfo = makeBufferCreateInfo(m_vertexBufferSize, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT);
106
107 m_vertexBuffer =
108 BufferPtr(new BufferWithMemory(vkd, device, alloc, bufferInfo, MemoryRequirement::HostVisible));
109 auto &bufferAlloc = m_vertexBuffer->getAllocation();
110
111 deMemcpy(bufferAlloc.getHostPtr(), &vertex, m_dataSize);
112 flushAlloc(vkd, device, bufferAlloc);
113
114 // Empty render pass.
115 m_renderPass = RenderPassWrapper(pipelineConstructionType, vkd, device);
116
117 // Empty pipeline layout.
118 m_pipelineLayout = makePipelineLayout(vkd, device);
119
120 // Passthrough vertex shader.
121 m_vertexShader = createShaderModule(vkd, device, ctx.getBinaryCollection().get("vert"), 0u);
122
123 const auto extent = getExtent();
124 const std::vector<VkViewport> viewports(1, makeViewport(extent));
125 const std::vector<VkRect2D> scissors(1, makeRect2D(extent));
126 const VkDynamicState state = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT;
127
128 const VkPipelineDynamicStateCreateInfo dynamicStateInfo = {
129 VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, // VkStructureType sType;
130 nullptr, // const void* pNext;
131 0u, // VkPipelineDynamicStateCreateFlags flags;
132 1u, // uint32_t dynamicStateCount;
133 &state, // const VkDynamicState* pDynamicStates;
134 };
135
136 // Graphics pipeline.
137 m_graphicsPipeline =
138 makeGraphicsPipeline(vkd, device, m_pipelineLayout.get(), m_vertexShader.get(), DE_NULL, DE_NULL, DE_NULL,
139 DE_NULL, m_renderPass.get(), viewports, scissors, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
140 0u, 0u, nullptr, nullptr, nullptr, nullptr, nullptr, &dynamicStateInfo);
141 }
142
getVertexBuffervkt::DynamicState::__anon716701940111::BindVertexBuffersData143 const BufferWithMemory *getVertexBuffer() const
144 {
145 return m_vertexBuffer.get();
146 }
147
getDataSizevkt::DynamicState::__anon716701940111::BindVertexBuffersData148 size_t getDataSize() const
149 {
150 return m_dataSize;
151 }
152
getPipelinevkt::DynamicState::__anon716701940111::BindVertexBuffersData153 VkPipeline getPipeline() const
154 {
155 return m_graphicsPipeline.get();
156 }
157
~BindVertexBuffersDatavkt::DynamicState::__anon716701940111::BindVertexBuffersData158 virtual ~BindVertexBuffersData()
159 {
160 }
161
162 private:
163 BufferPtr m_vertexBuffer;
164 size_t m_dataSize;
165 VkDeviceSize m_vertexBufferSize;
166 RenderPassPtr m_renderPass;
167 LayoutPtr m_pipelineLayout;
168 ModulePtr m_vertexShader;
169 PipelinePtr m_graphicsPipeline;
170 };
171
172 // Function that records a state-setting command in the given command buffer.
173 using RecordStateFunction = std::function<void(const DeviceInterface *, VkCommandBuffer, const DynamicStateData *)>;
174
175 // State-setting functions
setViewport(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)176 void setViewport(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
177 {
178 const VkViewport viewport = {
179 0.0f, // float x;
180 0.0f, // float y;
181 1.0f, // float width;
182 1.0f, // float height;
183 0.0f, // float minDepth;
184 1.0f, // float maxDepth;
185 };
186 vkd->cmdSetViewport(cmdBuffer, 0u, 1u, &viewport);
187 }
188
setScissor(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)189 void setScissor(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
190 {
191 const VkRect2D scissor = {
192 {0, 0}, // VkOffset2D offset;
193 {1u, 1u}, // VkExtent2D extent;
194 };
195 vkd->cmdSetScissor(cmdBuffer, 0u, 1u, &scissor);
196 }
197
setLineWidth(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)198 void setLineWidth(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
199 {
200 vkd->cmdSetLineWidth(cmdBuffer, 1.0f);
201 }
202
setDepthBias(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)203 void setDepthBias(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
204 {
205 vkd->cmdSetDepthBias(cmdBuffer, 0.0f, 0.0f, 0.0f);
206 }
207
setBlendConstants(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)208 void setBlendConstants(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
209 {
210 const float blendConstants[4] = {0.0f, 0.0f, 0.0f, 0.0f};
211 vkd->cmdSetBlendConstants(cmdBuffer, blendConstants);
212 }
213
setDepthBounds(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)214 void setDepthBounds(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
215 {
216 vkd->cmdSetDepthBounds(cmdBuffer, 0.0f, 1.0f);
217 }
218
setStencilCompareMask(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)219 void setStencilCompareMask(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
220 {
221 vkd->cmdSetStencilCompareMask(cmdBuffer, VK_STENCIL_FACE_FRONT_AND_BACK, 0xFFu);
222 }
223
setStencilWriteMask(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)224 void setStencilWriteMask(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
225 {
226 vkd->cmdSetStencilWriteMask(cmdBuffer, VK_STENCIL_FACE_FRONT_AND_BACK, 0xFFu);
227 }
228
setStencilReference(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)229 void setStencilReference(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
230 {
231 vkd->cmdSetStencilReference(cmdBuffer, VK_STENCIL_FACE_FRONT_AND_BACK, 0xFFu);
232 }
233
setDiscardRectangle(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)234 void setDiscardRectangle(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
235 {
236 const VkRect2D rectangle = {
237 {0, 0}, // VkOffset2D offset;
238 {1u, 1u}, // VkExtent2D extent;
239 };
240 vkd->cmdSetDiscardRectangleEXT(cmdBuffer, 0u, 1u, &rectangle);
241 }
242
setSampleLocations(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)243 void setSampleLocations(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
244 {
245 const VkSampleLocationEXT locations[] = {
246 {0.5f, 0.5f},
247 {0.5f, 1.5f},
248 {1.5f, 0.5f},
249 {1.5f, 1.5f},
250 };
251 const VkSampleLocationsInfoEXT info = {
252 VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, // VkStructureType sType;
253 nullptr, // const void* pNext;
254 VK_SAMPLE_COUNT_4_BIT, // VkSampleCountFlagBits sampleLocationsPerPixel;
255 {1u, 1u}, // VkExtent2D sampleLocationGridSize;
256 4u, // uint32_t sampleLocationsCount;
257 locations, // const VkSampleLocationEXT* pSampleLocations;
258 };
259 vkd->cmdSetSampleLocationsEXT(cmdBuffer, &info);
260 }
261
262 #ifndef CTS_USES_VULKANSC
setRTPipelineStatckSize(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)263 void setRTPipelineStatckSize(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
264 {
265 vkd->cmdSetRayTracingPipelineStackSizeKHR(cmdBuffer, 4096u);
266 }
267 #endif // CTS_USES_VULKANSC
268
setFragmentShadingRage(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)269 void setFragmentShadingRage(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
270 {
271 const VkExtent2D fragmentSize = {1u, 1u};
272 const VkFragmentShadingRateCombinerOpKHR combinerOps[2] = {
273 VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR,
274 VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR,
275 };
276 vkd->cmdSetFragmentShadingRateKHR(cmdBuffer, &fragmentSize, combinerOps);
277 }
278
setLineStipple(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)279 void setLineStipple(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
280 {
281 vkd->cmdSetLineStippleKHR(cmdBuffer, 1u, 1u);
282 }
283
setCullMode(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)284 void setCullMode(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
285 {
286 #ifndef CTS_USES_VULKANSC
287 vkd->cmdSetCullMode(cmdBuffer, VK_CULL_MODE_FRONT_AND_BACK);
288 #else
289 vkd->cmdSetCullModeEXT(cmdBuffer, VK_CULL_MODE_FRONT_AND_BACK);
290 #endif // CTS_USES_VULKANSC
291 }
292
setFrontFace(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)293 void setFrontFace(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
294 {
295 #ifndef CTS_USES_VULKANSC
296 vkd->cmdSetFrontFace(cmdBuffer, VK_FRONT_FACE_COUNTER_CLOCKWISE);
297 #else
298 vkd->cmdSetFrontFaceEXT(cmdBuffer, VK_FRONT_FACE_COUNTER_CLOCKWISE);
299 #endif // CTS_USES_VULKANSC
300 }
301
setPrimitiveTopology(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)302 void setPrimitiveTopology(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
303 {
304 #ifndef CTS_USES_VULKANSC
305 vkd->cmdSetPrimitiveTopology(cmdBuffer, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP);
306 #else
307 vkd->cmdSetPrimitiveTopologyEXT(cmdBuffer, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP);
308 #endif // CTS_USES_VULKANSC
309 }
310
setViewportWithCount(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)311 void setViewportWithCount(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
312 {
313 const VkViewport viewport = {
314 0.0f, // float x;
315 0.0f, // float y;
316 1.0f, // float width;
317 1.0f, // float height;
318 0.0f, // float minDepth;
319 1.0f, // float maxDepth;
320 };
321 #ifndef CTS_USES_VULKANSC
322 vkd->cmdSetViewportWithCount(cmdBuffer, 1u, &viewport);
323 #else
324 vkd->cmdSetViewportWithCountEXT(cmdBuffer, 1u, &viewport);
325 #endif // CTS_USES_VULKANSC
326 }
327
setScissorWithCount(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)328 void setScissorWithCount(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
329 {
330 const VkRect2D scissor = {
331 {0, 0}, // VkOffset2D offset;
332 {1u, 1u}, // VkExtent2D extent;
333 };
334 #ifndef CTS_USES_VULKANSC
335 vkd->cmdSetScissorWithCount(cmdBuffer, 1u, &scissor);
336 #else
337 vkd->cmdSetScissorWithCountEXT(cmdBuffer, 1u, &scissor);
338 #endif // CTS_USES_VULKANSC
339 }
340
bindVertexBuffers(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData * data)341 void bindVertexBuffers(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *data)
342 {
343 const auto bindData = dynamic_cast<const BindVertexBuffersData *>(data);
344 DE_ASSERT(bindData != nullptr);
345 const auto vertexBuffer = bindData->getVertexBuffer();
346 const auto dataSize = static_cast<VkDeviceSize>(bindData->getDataSize());
347 const auto bufferOffset = vertexBuffer->getAllocation().getOffset();
348 const auto stride = static_cast<VkDeviceSize>(0);
349 const auto pipeline = bindData->getPipeline();
350
351 vkd->cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline);
352 #ifndef CTS_USES_VULKANSC
353 vkd->cmdBindVertexBuffers2(cmdBuffer, 0u, 1u, &vertexBuffer->get(), &bufferOffset, &dataSize, &stride);
354 #else
355 vkd->cmdBindVertexBuffers2EXT(cmdBuffer, 0u, 1u, &vertexBuffer->get(), &bufferOffset, &dataSize, &stride);
356 #endif // CTS_USES_VULKANSC
357 }
358
setDepthTestEnable(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)359 void setDepthTestEnable(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
360 {
361 #ifndef CTS_USES_VULKANSC
362 vkd->cmdSetDepthTestEnable(cmdBuffer, VK_TRUE);
363 #else
364 vkd->cmdSetDepthTestEnableEXT(cmdBuffer, VK_TRUE);
365 #endif // CTS_USES_VULKANSC
366 }
367
setDepthWriteEnable(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)368 void setDepthWriteEnable(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
369 {
370 #ifndef CTS_USES_VULKANSC
371 vkd->cmdSetDepthWriteEnable(cmdBuffer, VK_TRUE);
372 #else
373 vkd->cmdSetDepthWriteEnableEXT(cmdBuffer, VK_TRUE);
374 #endif // CTS_USES_VULKANSC
375 }
376
setDepthCompareOp(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)377 void setDepthCompareOp(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
378 {
379 #ifndef CTS_USES_VULKANSC
380 vkd->cmdSetDepthCompareOp(cmdBuffer, VK_COMPARE_OP_LESS);
381 #else
382 vkd->cmdSetDepthCompareOpEXT(cmdBuffer, VK_COMPARE_OP_LESS);
383 #endif // CTS_USES_VULKANSC
384 }
385
setDepthBoundsTestEnable(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)386 void setDepthBoundsTestEnable(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
387 {
388 #ifndef CTS_USES_VULKANSC
389 vkd->cmdSetDepthBoundsTestEnable(cmdBuffer, VK_TRUE);
390 #else
391 vkd->cmdSetDepthBoundsTestEnableEXT(cmdBuffer, VK_TRUE);
392 #endif // CTS_USES_VULKANSC
393 }
394
setStencilTestEnable(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)395 void setStencilTestEnable(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
396 {
397 #ifndef CTS_USES_VULKANSC
398 vkd->cmdSetStencilTestEnable(cmdBuffer, VK_TRUE);
399 #else
400 vkd->cmdSetStencilTestEnableEXT(cmdBuffer, VK_TRUE);
401 #endif // CTS_USES_VULKANSC
402 }
403
setStencilOp(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)404 void setStencilOp(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
405 {
406 #ifndef CTS_USES_VULKANSC
407 vkd->cmdSetStencilOp(cmdBuffer, VK_STENCIL_FACE_FRONT_AND_BACK, VK_STENCIL_OP_ZERO,
408 VK_STENCIL_OP_INCREMENT_AND_CLAMP, VK_STENCIL_OP_KEEP, VK_COMPARE_OP_ALWAYS);
409 #else
410 vkd->cmdSetStencilOpEXT(cmdBuffer, VK_STENCIL_FACE_FRONT_AND_BACK, VK_STENCIL_OP_ZERO,
411 VK_STENCIL_OP_INCREMENT_AND_CLAMP, VK_STENCIL_OP_KEEP, VK_COMPARE_OP_ALWAYS);
412 #endif // CTS_USES_VULKANSC
413 }
414
415 #ifndef CTS_USES_VULKANSC
416
setViewportWScaling(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)417 void setViewportWScaling(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
418 {
419 const VkViewportWScalingNV viewport = {
420 1.0f, // float xcoeff;
421 1.0f, // float ycoeff;
422 };
423 vkd->cmdSetViewportWScalingNV(cmdBuffer, 0u, 1u, &viewport);
424 }
425
setViewportShadingRatePalette(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)426 void setViewportShadingRatePalette(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
427 {
428 const VkShadingRatePaletteEntryNV entry = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV;
429 const VkShadingRatePaletteNV palette = {
430 1u, // uint32_t shadingRatePaletteEntryCount;
431 &entry, // const VkShadingRatePaletteEntryNV* pShadingRatePaletteEntries;
432 };
433 vkd->cmdSetViewportShadingRatePaletteNV(cmdBuffer, 0u, 1u, &palette);
434 }
435
setCoarseSamplingOrder(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)436 void setCoarseSamplingOrder(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
437 {
438 const VkCoarseSampleLocationNV locations[2] = {
439 {
440 0u, // uint32_t pixelX;
441 0u, // uint32_t pixelY;
442 0u, // uint32_t sample;
443 },
444 {
445 0u, // uint32_t pixelX;
446 1u, // uint32_t pixelY;
447 0u, // uint32_t sample;
448 },
449 };
450 const VkCoarseSampleOrderCustomNV order = {
451 VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, // VkShadingRatePaletteEntryNV shadingRate;
452 1u, // uint32_t sampleCount;
453 2u, // uint32_t sampleLocationCount;
454 locations // const VkCoarseSampleLocationNV* pSampleLocations;
455 };
456 vkd->cmdSetCoarseSampleOrderNV(cmdBuffer, VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, 1u, &order);
457 }
458
setExclusiveScissor(const DeviceInterface * vkd,VkCommandBuffer cmdBuffer,const DynamicStateData *)459 void setExclusiveScissor(const DeviceInterface *vkd, VkCommandBuffer cmdBuffer, const DynamicStateData *)
460 {
461 const VkRect2D scissor = {
462 {0, 0}, // VkOffset2D offset;
463 {1u, 1u}, // VkExtent2D extent;
464 };
465 vkd->cmdSetExclusiveScissorNV(cmdBuffer, 0u, 1u, &scissor);
466 }
467
468 #endif // CTS_USES_VULKANSC
469
470 const VkDynamicState dynamicStateList[] = {
471 VK_DYNAMIC_STATE_VIEWPORT,
472 VK_DYNAMIC_STATE_SCISSOR,
473 VK_DYNAMIC_STATE_LINE_WIDTH,
474 VK_DYNAMIC_STATE_DEPTH_BIAS,
475 VK_DYNAMIC_STATE_BLEND_CONSTANTS,
476 VK_DYNAMIC_STATE_DEPTH_BOUNDS,
477 VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
478 VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
479 VK_DYNAMIC_STATE_STENCIL_REFERENCE,
480 VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT,
481 VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT,
482 #ifndef CTS_USES_VULKANSC
483 VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR,
484 #endif // CTS_USES_VULKANSC
485 VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR,
486 VK_DYNAMIC_STATE_LINE_STIPPLE_EXT,
487 VK_DYNAMIC_STATE_CULL_MODE_EXT,
488 VK_DYNAMIC_STATE_FRONT_FACE_EXT,
489 VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT,
490 VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT,
491 VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT,
492 VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT,
493 VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT,
494 VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT,
495 VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT,
496 VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT,
497 VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT,
498 VK_DYNAMIC_STATE_STENCIL_OP_EXT,
499 #ifndef CTS_USES_VULKANSC
500 VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV,
501 VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV,
502 VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV,
503 VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV,
504 #endif // CTS_USES_VULKANSC
505 };
506
507 // Information about a dynamic state.
508 struct StateInfo
509 {
510 std::vector<std::string> requirements; // List of required functionalities.
511 RecordStateFunction recorder; // Function that records the state to the command buffer being used.
512 };
513
514 // Returns the state info for a given dynamic state.
getDynamicStateInfo(VkDynamicState state)515 const StateInfo &getDynamicStateInfo(VkDynamicState state)
516 {
517 // Maps a given state to its state info structure.
518 using StateInfoMap = std::map<VkDynamicState, StateInfo>;
519
520 static const StateInfoMap result = {
521 {VK_DYNAMIC_STATE_VIEWPORT, {{}, setViewport}},
522 {VK_DYNAMIC_STATE_SCISSOR, {{}, setScissor}},
523 {VK_DYNAMIC_STATE_LINE_WIDTH, {{}, setLineWidth}},
524 {VK_DYNAMIC_STATE_DEPTH_BIAS, {{}, setDepthBias}},
525 {VK_DYNAMIC_STATE_BLEND_CONSTANTS, {{}, setBlendConstants}},
526 {VK_DYNAMIC_STATE_DEPTH_BOUNDS, {{}, setDepthBounds}},
527 {VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, {{}, setStencilCompareMask}},
528 {VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, {{}, setStencilWriteMask}},
529 {VK_DYNAMIC_STATE_STENCIL_REFERENCE, {{}, setStencilReference}},
530 {VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, {{"VK_EXT_discard_rectangles"}, setDiscardRectangle}},
531 {VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, {{"VK_EXT_sample_locations"}, setSampleLocations}},
532 #ifndef CTS_USES_VULKANSC
533 {VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR,
534 {{"VK_KHR_ray_tracing_pipeline"}, setRTPipelineStatckSize}},
535 #endif // CTS_USES_VULKANSC
536 {VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR, {{"VK_KHR_fragment_shading_rate"}, setFragmentShadingRage}},
537 {VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, {{"VK_KHR_or_EXT_line_rasterization"}, setLineStipple}},
538 {VK_DYNAMIC_STATE_CULL_MODE_EXT, {{"VK_EXT_extended_dynamic_state"}, setCullMode}},
539 {VK_DYNAMIC_STATE_FRONT_FACE_EXT, {{"VK_EXT_extended_dynamic_state"}, setFrontFace}},
540 {VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT, {{"VK_EXT_extended_dynamic_state"}, setPrimitiveTopology}},
541 {VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT, {{"VK_EXT_extended_dynamic_state"}, setViewportWithCount}},
542 {VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT, {{"VK_EXT_extended_dynamic_state"}, setScissorWithCount}},
543 {VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT, {{"VK_EXT_extended_dynamic_state"}, bindVertexBuffers}},
544 {VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT, {{"VK_EXT_extended_dynamic_state"}, setDepthTestEnable}},
545 {VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT, {{"VK_EXT_extended_dynamic_state"}, setDepthWriteEnable}},
546 {VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT, {{"VK_EXT_extended_dynamic_state"}, setDepthCompareOp}},
547 {VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT, {{"VK_EXT_extended_dynamic_state"}, setDepthBoundsTestEnable}},
548 {VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT, {{"VK_EXT_extended_dynamic_state"}, setStencilTestEnable}},
549 {VK_DYNAMIC_STATE_STENCIL_OP_EXT, {{"VK_EXT_extended_dynamic_state"}, setStencilOp}},
550 #ifndef CTS_USES_VULKANSC
551 {VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, {{"VK_NV_clip_space_w_scaling"}, setViewportWScaling}},
552 {VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV,
553 {{"VK_NV_shading_rate_image"}, setViewportShadingRatePalette}},
554 {VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV, {{"VK_NV_shading_rate_image"}, setCoarseSamplingOrder}},
555 {VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, {{"VK_NV_scissor_exclusive"}, setExclusiveScissor}},
556 #endif // CTS_USES_VULKANSC
557 };
558
559 const auto itr = result.find(state);
560 DE_ASSERT(itr != result.end());
561
562 return itr->second;
563 }
564
565 // Device helper: this is needed in some tests when we create custom devices.
566 class DeviceHelper
567 {
568 public:
~DeviceHelper()569 virtual ~DeviceHelper()
570 {
571 }
572 virtual const DeviceInterface &getDeviceInterface(void) const = 0;
573 virtual VkDevice getDevice(void) const = 0;
574 virtual uint32_t getQueueFamilyIndex(void) const = 0;
575 virtual VkQueue getQueue(void) const = 0;
576 virtual Allocator &getAllocator(void) const = 0;
577 virtual const std::vector<std::string> &getDeviceExtensions(void) const = 0;
578 };
579
580 // This one just reuses the default device from the context.
581 class ContextDeviceHelper : public DeviceHelper
582 {
583 public:
ContextDeviceHelper(Context & context)584 ContextDeviceHelper(Context &context)
585 : m_deviceInterface(context.getDeviceInterface())
586 , m_device(context.getDevice())
587 , m_queueFamilyIndex(context.getUniversalQueueFamilyIndex())
588 , m_queue(context.getUniversalQueue())
589 , m_allocator(context.getDefaultAllocator())
590 , m_extensions(context.getDeviceExtensions())
591 {
592 }
593
~ContextDeviceHelper()594 virtual ~ContextDeviceHelper()
595 {
596 }
597
getDeviceInterface(void) const598 const DeviceInterface &getDeviceInterface(void) const override
599 {
600 return m_deviceInterface;
601 }
getDevice(void) const602 VkDevice getDevice(void) const override
603 {
604 return m_device;
605 }
getQueueFamilyIndex(void) const606 uint32_t getQueueFamilyIndex(void) const override
607 {
608 return m_queueFamilyIndex;
609 }
getQueue(void) const610 VkQueue getQueue(void) const override
611 {
612 return m_queue;
613 }
getAllocator(void) const614 Allocator &getAllocator(void) const override
615 {
616 return m_allocator;
617 }
getDeviceExtensions(void) const618 const std::vector<std::string> &getDeviceExtensions(void) const override
619 {
620 return m_extensions;
621 }
622
623 protected:
624 const DeviceInterface &m_deviceInterface;
625 const VkDevice m_device;
626 const uint32_t m_queueFamilyIndex;
627 const VkQueue m_queue;
628 Allocator &m_allocator;
629 std::vector<std::string> m_extensions;
630 };
631
632 // This one creates a new device with VK_NV_shading_rate_image.
633 class ShadingRateImageDeviceHelper : public DeviceHelper
634 {
635 public:
ShadingRateImageDeviceHelper(Context & context)636 ShadingRateImageDeviceHelper(Context &context)
637 {
638 const auto &vkp = context.getPlatformInterface();
639 const auto &vki = context.getInstanceInterface();
640 const auto instance = context.getInstance();
641 const auto physicalDevice = context.getPhysicalDevice();
642 const auto queuePriority = 1.0f;
643
644 // Queue index first.
645 m_queueFamilyIndex = context.getUniversalQueueFamilyIndex();
646
647 // Create a universal queue that supports graphics and compute.
648 const VkDeviceQueueCreateInfo queueParams = {
649 VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, // VkStructureType sType;
650 DE_NULL, // const void* pNext;
651 0u, // VkDeviceQueueCreateFlags flags;
652 m_queueFamilyIndex, // uint32_t queueFamilyIndex;
653 1u, // uint32_t queueCount;
654 &queuePriority // const float* pQueuePriorities;
655 };
656
657 const char *extensions[] = {
658 "VK_NV_shading_rate_image",
659 };
660 m_extensions.push_back("VK_NV_shading_rate_image");
661
662 #ifndef CTS_USES_VULKANSC
663 VkPhysicalDeviceShadingRateImageFeaturesNV shadingRateImageFeatures = initVulkanStructure();
664 VkPhysicalDeviceFeatures2 features2 = initVulkanStructure(&shadingRateImageFeatures);
665
666 vki.getPhysicalDeviceFeatures2(physicalDevice, &features2);
667 #endif // CTS_USES_VULKANSC
668
669 const VkDeviceCreateInfo deviceCreateInfo = {
670 VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, //sType;
671 #ifndef CTS_USES_VULKANSC
672 &features2, //pNext;
673 #else
674 DE_NULL,
675 #endif // CTS_USES_VULKANSC
676 0u, //flags
677 1u, //queueRecordCount;
678 &queueParams, //pRequestedQueues;
679 0u, //layerCount;
680 nullptr, //ppEnabledLayerNames;
681 static_cast<uint32_t>(de::arrayLength(extensions)), // uint32_t enabledExtensionCount;
682 extensions, // const char* const* ppEnabledExtensionNames;
683 nullptr, //pEnabledFeatures;
684 };
685
686 m_device = createCustomDevice(context.getTestContext().getCommandLine().isValidationEnabled(), vkp, instance,
687 vki, physicalDevice, &deviceCreateInfo);
688 m_vkd.reset(new DeviceDriver(vkp, instance, m_device.get(), context.getUsedApiVersion(),
689 context.getTestContext().getCommandLine()));
690 m_queue = getDeviceQueue(*m_vkd, *m_device, m_queueFamilyIndex, 0u);
691 m_allocator.reset(
692 new SimpleAllocator(*m_vkd, m_device.get(), getPhysicalDeviceMemoryProperties(vki, physicalDevice)));
693 }
694
~ShadingRateImageDeviceHelper()695 virtual ~ShadingRateImageDeviceHelper()
696 {
697 }
698
getDeviceInterface(void) const699 const DeviceInterface &getDeviceInterface(void) const override
700 {
701 return *m_vkd;
702 }
getDevice(void) const703 VkDevice getDevice(void) const override
704 {
705 return m_device.get();
706 }
getQueueFamilyIndex(void) const707 uint32_t getQueueFamilyIndex(void) const override
708 {
709 return m_queueFamilyIndex;
710 }
getQueue(void) const711 VkQueue getQueue(void) const override
712 {
713 return m_queue;
714 }
getAllocator(void) const715 Allocator &getAllocator(void) const override
716 {
717 return *m_allocator;
718 }
getDeviceExtensions(void) const719 const std::vector<std::string> &getDeviceExtensions(void) const override
720 {
721 return m_extensions;
722 }
723
724 protected:
725 Move<VkDevice> m_device;
726 std::unique_ptr<DeviceDriver> m_vkd;
727 uint32_t m_queueFamilyIndex;
728 VkQueue m_queue;
729 std::unique_ptr<SimpleAllocator> m_allocator;
730 std::vector<std::string> m_extensions;
731 };
732
733 std::unique_ptr<DeviceHelper> g_shadingRateDeviceHelper;
734 std::unique_ptr<DeviceHelper> g_contextDeviceHelper;
735
getDeviceHelper(Context & context,VkDynamicState dynamicState)736 DeviceHelper &getDeviceHelper(Context &context, VkDynamicState dynamicState)
737 {
738 const auto &stateInfo = getDynamicStateInfo(dynamicState);
739
740 if (de::contains(stateInfo.requirements.begin(), stateInfo.requirements.end(), "VK_NV_shading_rate_image"))
741 {
742 if (!g_shadingRateDeviceHelper)
743 g_shadingRateDeviceHelper.reset(new ShadingRateImageDeviceHelper(context));
744 return *g_shadingRateDeviceHelper;
745 }
746
747 if (!g_contextDeviceHelper)
748 g_contextDeviceHelper.reset(new ContextDeviceHelper(context));
749 return *g_contextDeviceHelper;
750 }
751
752 // Returns the set of auxiliary data needed to set a given state.
getDynamicStateData(Context & ctx,VkDevice device,VkDynamicState state,PipelineConstructionType pipelineConstructionType)753 de::MovePtr<DynamicStateData> getDynamicStateData(Context &ctx, VkDevice device, VkDynamicState state,
754 PipelineConstructionType pipelineConstructionType)
755 {
756 // Create vertex buffer for VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT.
757 if (state == VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT)
758 return de::MovePtr<DynamicStateData>(new BindVertexBuffersData(ctx, device, pipelineConstructionType));
759
760 // null pointer normally.
761 return de::MovePtr<DynamicStateData>();
762 }
763
764 enum class OperType
765 {
766 COMPUTE = 0,
767 TRANSFER
768 };
769 enum class WhenToSet
770 {
771 BEFORE = 0,
772 AFTER
773 };
774
775 // Set dynamic state before or after attempting to run a compute or transfer operation.
776 struct TestParams
777 {
778 OperType operationType;
779 WhenToSet whenToSet;
780 std::vector<VkDynamicState> states;
781 };
782
783 class DynamicStateComputeCase : public vkt::TestCase
784 {
785 public:
786 DynamicStateComputeCase(tcu::TestContext &testCtx, const std::string &name, const TestParams ¶ms,
787 PipelineConstructionType pipelineConstructionType);
~DynamicStateComputeCase(void)788 virtual ~DynamicStateComputeCase(void)
789 {
790 }
791
792 virtual void checkSupport(Context &context) const;
793 virtual void initPrograms(vk::SourceCollections &programCollection) const;
794 virtual TestInstance *createInstance(Context &context) const;
795
796 protected:
797 TestParams m_params;
798 PipelineConstructionType m_pipelineConstructionType;
799 };
800
801 class DynamicStateComputeInstance : public vkt::TestInstance
802 {
803 public:
804 DynamicStateComputeInstance(Context &context, const TestParams ¶ms,
805 PipelineConstructionType pipelineConstructionType);
~DynamicStateComputeInstance(void)806 virtual ~DynamicStateComputeInstance(void)
807 {
808 }
809
810 virtual tcu::TestStatus iterate(void);
811
812 protected:
813 tcu::TestStatus iterateTransfer(void);
814 tcu::TestStatus iterateCompute(void);
815
816 TestParams m_params;
817 PipelineConstructionType m_pipelineConstructionType;
818 };
819
DynamicStateComputeCase(tcu::TestContext & testCtx,const std::string & name,const TestParams & params,PipelineConstructionType pipelineConstructionType)820 DynamicStateComputeCase::DynamicStateComputeCase(tcu::TestContext &testCtx, const std::string &name,
821 const TestParams ¶ms,
822 PipelineConstructionType pipelineConstructionType)
823 : vkt::TestCase(testCtx, name)
824 , m_params(params)
825 , m_pipelineConstructionType(pipelineConstructionType)
826 {
827 }
828
DynamicStateComputeInstance(Context & context,const TestParams & params,PipelineConstructionType pipelineConstructionType)829 DynamicStateComputeInstance::DynamicStateComputeInstance(Context &context, const TestParams ¶ms,
830 PipelineConstructionType pipelineConstructionType)
831 : vkt::TestInstance(context)
832 , m_params(params)
833 , m_pipelineConstructionType(pipelineConstructionType)
834 {
835 }
836
checkSupport(Context & context) const837 void DynamicStateComputeCase::checkSupport(Context &context) const
838 {
839 checkPipelineConstructionRequirements(context.getInstanceInterface(), context.getPhysicalDevice(),
840 m_pipelineConstructionType);
841
842 // Check required functionalities.
843 for (const auto &state : m_params.states)
844 {
845 const auto stateInfo = getDynamicStateInfo(state);
846 for (const auto &functionality : stateInfo.requirements)
847 {
848 if (functionality == "VK_KHR_or_EXT_line_rasterization")
849 {
850 if (!context.isDeviceFunctionalitySupported("VK_KHR_line_rasterization") &&
851 !context.isDeviceFunctionalitySupported("VK_EXT_line_rasterization"))
852 {
853 TCU_THROW(NotSupportedError,
854 "VK_KHR_line_rasterization and VK_EXT_line_rasterization are not supported");
855 }
856 }
857 else
858 {
859 context.requireDeviceFunctionality(functionality);
860 }
861 }
862 }
863 }
864
initPrograms(vk::SourceCollections & programCollection) const865 void DynamicStateComputeCase::initPrograms(vk::SourceCollections &programCollection) const
866 {
867 if (m_params.operationType == OperType::COMPUTE)
868 {
869 std::ostringstream comp;
870 comp << "#version 450\n"
871 << "\n"
872 << "layout (local_size_x = 1, local_size_y = 1, local_size_z = 1) in;\n"
873 << "\n"
874 << "layout (push_constant, std430) uniform PushConstants {\n"
875 << " uint valueIndex;\n"
876 << "} pc;\n"
877 << "\n"
878 << "layout (set=0, binding=0, std430) buffer OutputBlock {\n"
879 << " uint value[];\n"
880 << "} ob;\n"
881 << "\n"
882 << "void main ()\n"
883 << "{\n"
884 << " ob.value[pc.valueIndex] = 1u;\n"
885 << "}\n";
886
887 programCollection.glslSources.add("comp") << glu::ComputeSource(comp.str());
888 }
889
890 if (de::contains(begin(m_params.states), end(m_params.states), VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT))
891 {
892 // Passthrough vertex shader for stand-in graphics pipeline.
893 std::ostringstream vert;
894 vert << "#version 450\n"
895 << "layout (location=0) in vec4 inVertex;\n"
896 << "void main() {\n"
897 << " gl_Position = inVertex;\n"
898 << "}\n";
899
900 programCollection.glslSources.add("vert") << glu::VertexSource(vert.str());
901 }
902 }
903
createInstance(Context & context) const904 vkt::TestInstance *DynamicStateComputeCase::createInstance(Context &context) const
905 {
906 return new DynamicStateComputeInstance(context, m_params, m_pipelineConstructionType);
907 }
908
iterate(void)909 tcu::TestStatus DynamicStateComputeInstance::iterate(void)
910 {
911 if (m_params.operationType == OperType::COMPUTE)
912 return iterateCompute();
913 else
914 return iterateTransfer();
915 }
916
fillBuffer(const DeviceInterface & vkd,VkDevice device,BufferWithMemory & buffer,const std::vector<uint32_t> & values)917 void fillBuffer(const DeviceInterface &vkd, VkDevice device, BufferWithMemory &buffer,
918 const std::vector<uint32_t> &values)
919 {
920 auto &alloc = buffer.getAllocation();
921
922 deMemcpy(alloc.getHostPtr(), values.data(), de::dataSize(values));
923 flushAlloc(vkd, device, alloc);
924 }
925
iterateTransfer(void)926 tcu::TestStatus DynamicStateComputeInstance::iterateTransfer(void)
927 {
928 const auto &vki = m_context.getInstanceInterface();
929 const auto phyDev = m_context.getPhysicalDevice();
930 auto &devHelper = getDeviceHelper(m_context, m_params.states.at(0));
931 const auto &vkd = devHelper.getDeviceInterface();
932 const auto device = devHelper.getDevice();
933 const auto qIndex = devHelper.getQueueFamilyIndex();
934 const auto queue = devHelper.getQueue();
935 auto &alloc = devHelper.getAllocator();
936
937 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
938 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
939 const auto cmdBuffer = cmdBufferPtr.get();
940
941 // Prepare two host-visible buffers for a transfer operation, with one element per dynamic state.
942 const uint32_t seqStart = 1611747605u;
943
944 DE_ASSERT(!m_params.states.empty());
945 std::vector<uint32_t> srcValues(m_params.states.size());
946 const decltype(srcValues) dstValues(srcValues.size(), 0u);
947 std::iota(begin(srcValues), end(srcValues), seqStart);
948
949 const auto elemSize = static_cast<VkDeviceSize>(sizeof(decltype(srcValues)::value_type));
950 const auto dataSize = static_cast<VkDeviceSize>(de::dataSize(srcValues));
951 const auto bufferSize = de::roundUp(dataSize, getPhysicalDeviceProperties(vki, phyDev).limits.nonCoherentAtomSize);
952 const auto srcInfo = makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
953 const auto dstInfo = makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
954 BufferWithMemory srcBuffer(vkd, device, alloc, srcInfo, MemoryRequirement::HostVisible);
955 BufferWithMemory dstBuffer(vkd, device, alloc, dstInfo, MemoryRequirement::HostVisible);
956
957 // Fill source and destination buffer.
958 fillBuffer(vkd, device, srcBuffer, srcValues);
959 fillBuffer(vkd, device, dstBuffer, dstValues);
960
961 beginCommandBuffer(vkd, cmdBuffer);
962
963 // We need to preserve dynamic state data until the command buffer has run.
964 std::vector<de::MovePtr<DynamicStateData>> statesData;
965
966 for (size_t stateIdx = 0; stateIdx < m_params.states.size(); ++stateIdx)
967 {
968 // Get extra data needed for using the dynamic state.
969 const auto offset = elemSize * stateIdx;
970 const auto &state = m_params.states[stateIdx];
971 const auto stateInfo = getDynamicStateInfo(state);
972 statesData.push_back(getDynamicStateData(m_context, device, state, m_pipelineConstructionType));
973
974 // Record command if before.
975 if (m_params.whenToSet == WhenToSet::BEFORE)
976 stateInfo.recorder(&vkd, cmdBuffer, statesData.back().get());
977
978 // Transfer op (copy one buffer element per dynamic state).
979 const VkBufferCopy region = {offset, offset, elemSize};
980 vkd.cmdCopyBuffer(cmdBuffer, srcBuffer.get(), dstBuffer.get(), 1u, ®ion);
981
982 // Record command if after.
983 if (m_params.whenToSet == WhenToSet::AFTER)
984 stateInfo.recorder(&vkd, cmdBuffer, statesData.back().get());
985 }
986
987 const auto barrier = makeMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT);
988 vkd.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0u, 1u, &barrier, 0u,
989 nullptr, 0u, nullptr);
990
991 endCommandBuffer(vkd, cmdBuffer);
992 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
993
994 // Invalidate alloc and check destination buffer.
995 auto &dstBufferAlloc = dstBuffer.getAllocation();
996 invalidateAlloc(vkd, device, dstBufferAlloc);
997
998 decltype(srcValues) results(srcValues.size());
999 deMemcpy(results.data(), dstBufferAlloc.getHostPtr(), de::dataSize(srcValues));
1000
1001 for (size_t valueIdx = 0; valueIdx < srcValues.size(); ++valueIdx)
1002 {
1003 const auto &orig = srcValues[valueIdx];
1004 const auto &res = results[valueIdx];
1005
1006 if (orig != res)
1007 {
1008 std::ostringstream msg;
1009 msg << "Unexpected value found in destination buffer at position " << valueIdx << " (found=" << res
1010 << " expected=" << orig << ")";
1011 TCU_FAIL(msg.str());
1012 }
1013 }
1014
1015 return tcu::TestStatus::pass("Pass");
1016 }
1017
iterateCompute(void)1018 tcu::TestStatus DynamicStateComputeInstance::iterateCompute(void)
1019 {
1020 const auto &vki = m_context.getInstanceInterface();
1021 const auto phyDev = m_context.getPhysicalDevice();
1022 auto &devHelper = getDeviceHelper(m_context, m_params.states.at(0));
1023 const auto &vkd = devHelper.getDeviceInterface();
1024 const auto device = devHelper.getDevice();
1025 const auto qIndex = devHelper.getQueueFamilyIndex();
1026 const auto queue = devHelper.getQueue();
1027 auto &alloc = devHelper.getAllocator();
1028
1029 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
1030 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
1031 const auto cmdBuffer = cmdBufferPtr.get();
1032
1033 DescriptorSetLayoutBuilder setLayoutBuilder;
1034 setLayoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT);
1035 const auto setLayout = setLayoutBuilder.build(vkd, device);
1036
1037 // Push constants.
1038 const uint32_t pcSize = static_cast<uint32_t>(sizeof(uint32_t));
1039 const auto pcRange = makePushConstantRange(VK_SHADER_STAGE_COMPUTE_BIT, 0u, pcSize);
1040
1041 // Pipeline.
1042 const VkPipelineLayoutCreateInfo layoutInfo = {
1043 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, // VkStructureType sType;
1044 nullptr, // const void* pNext;
1045 0u, // VkPipelineLayoutCreateFlags flags;
1046 1u, // uint32_t setLayoutCount;
1047 &setLayout.get(), // const VkDescriptorSetLayout* pSetLayouts;
1048 1u, // uint32_t pushConstantRangeCount;
1049 &pcRange, // const VkPushConstantRange* pPushConstantRanges;
1050 };
1051 const auto pipelineLayout = createPipelineLayout(vkd, device, &layoutInfo);
1052
1053 const auto shaderModule = createShaderModule(vkd, device, m_context.getBinaryCollection().get("comp"), 0u);
1054
1055 const VkPipelineShaderStageCreateInfo shaderStageInfo = {
1056 VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, // VkStructureType sType;
1057 nullptr, // const void* pNext;
1058 0u, // VkPipelineShaderStageCreateFlags flags;
1059 VK_SHADER_STAGE_COMPUTE_BIT, // VkShaderStageFlagBits stage;
1060 shaderModule.get(), // VkShaderModule module;
1061 "main", // const char* pName;
1062 nullptr, // const VkSpecializationInfo* pSpecializationInfo;
1063 };
1064
1065 const VkComputePipelineCreateInfo pipelineInfo = {
1066 VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, // VkStructureType sType;
1067 nullptr, // const void* pNext;
1068 0u, // VkPipelineCreateFlags flags;
1069 shaderStageInfo, // VkPipelineShaderStageCreateInfo stage;
1070 pipelineLayout.get(), // VkPipelineLayout layout;
1071 DE_NULL, // VkPipeline basePipelineHandle;
1072 0, // int32_t basePipelineIndex;
1073 };
1074 const auto pipeline = createComputePipeline(vkd, device, DE_NULL, &pipelineInfo);
1075
1076 DE_ASSERT(!m_params.states.empty());
1077
1078 // Output buffer with one value per state.
1079 std::vector<uint32_t> bufferData(m_params.states.size(), 0u);
1080 const auto dataSize(de::dataSize(bufferData));
1081 const auto outputBufferSize = de::roundUp(static_cast<VkDeviceSize>(dataSize),
1082 getPhysicalDeviceProperties(vki, phyDev).limits.nonCoherentAtomSize);
1083 const auto bufferCreateInfo = makeBufferCreateInfo(outputBufferSize, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
1084
1085 BufferWithMemory outputBuffer(vkd, device, alloc, bufferCreateInfo, MemoryRequirement::HostVisible);
1086 auto &outputBufferAlloc = outputBuffer.getAllocation();
1087 auto outputBufferPtr = outputBufferAlloc.getHostPtr();
1088
1089 deMemcpy(outputBufferPtr, bufferData.data(), dataSize);
1090 flushAlloc(vkd, device, outputBufferAlloc);
1091
1092 // Descriptor set.
1093 DescriptorPoolBuilder poolBuilder;
1094 poolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
1095 const auto descriptorPool = poolBuilder.build(vkd, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
1096
1097 const auto descriptorSet = makeDescriptorSet(vkd, device, descriptorPool.get(), setLayout.get());
1098
1099 const auto bufferInfo = makeDescriptorBufferInfo(outputBuffer.get(), 0ull, outputBufferSize);
1100 DescriptorSetUpdateBuilder updateBuilder;
1101 updateBuilder.writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(0u),
1102 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &bufferInfo);
1103 updateBuilder.update(vkd, device);
1104
1105 // Record and submit.
1106 beginCommandBuffer(vkd, cmdBuffer);
1107
1108 // We need to preserve dynamic state data until the command buffer has run.
1109 std::vector<de::MovePtr<DynamicStateData>> statesData;
1110
1111 for (size_t stateIdx = 0; stateIdx < m_params.states.size(); ++stateIdx)
1112 {
1113 // Objects needed to set the dynamic state.
1114 auto state = m_params.states[stateIdx];
1115 if (vk::isConstructionTypeShaderObject(m_pipelineConstructionType))
1116 {
1117 if (state == vk::VK_DYNAMIC_STATE_VIEWPORT)
1118 state = vk::VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT;
1119 if (state == vk::VK_DYNAMIC_STATE_SCISSOR)
1120 state = vk::VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT;
1121 }
1122
1123 const auto stateInfo = getDynamicStateInfo(state);
1124 statesData.push_back(getDynamicStateData(m_context, device, state, m_pipelineConstructionType));
1125
1126 if (m_params.whenToSet == WhenToSet::BEFORE)
1127 stateInfo.recorder(&vkd, cmdBuffer, statesData.back().get());
1128
1129 vkd.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, pipeline.get());
1130 vkd.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, pipelineLayout.get(), 0u, 1u,
1131 &descriptorSet.get(), 0u, nullptr);
1132 {
1133 // Each state will write to a different buffer position.
1134 const uint32_t pcData = static_cast<uint32_t>(stateIdx);
1135 vkd.cmdPushConstants(cmdBuffer, pipelineLayout.get(), VK_SHADER_STAGE_COMPUTE_BIT, 0u, pcSize, &pcData);
1136 }
1137 vkd.cmdDispatch(cmdBuffer, 1u, 1u, 1u);
1138
1139 if (m_params.whenToSet == WhenToSet::AFTER)
1140 stateInfo.recorder(&vkd, cmdBuffer, statesData.back().get());
1141 }
1142
1143 // Barrier to read buffer contents.
1144 const auto barrier = makeMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT);
1145 vkd.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0u, 1u,
1146 &barrier, 0u, nullptr, 0u, nullptr);
1147
1148 endCommandBuffer(vkd, cmdBuffer);
1149 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
1150
1151 // Read and verify buffer contents.
1152 invalidateAlloc(vkd, device, outputBufferAlloc);
1153 deMemcpy(bufferData.data(), outputBufferPtr, dataSize);
1154
1155 for (size_t idx = 0u; idx < bufferData.size(); ++idx)
1156 {
1157 if (bufferData[idx] != 1u)
1158 {
1159 std::ostringstream msg;
1160 msg << "Unexpected value found at buffer position " << idx << ": " << bufferData[idx];
1161 TCU_FAIL(msg.str());
1162 }
1163 }
1164
1165 return tcu::TestStatus::pass("Pass");
1166 }
1167
getDynamicStateBriefName(VkDynamicState state)1168 std::string getDynamicStateBriefName(VkDynamicState state)
1169 {
1170 const auto fullName = de::toString(state);
1171 const auto prefixLen = strlen("VK_DYNAMIC_STATE_");
1172
1173 return de::toLower(fullName.substr(prefixLen));
1174 }
1175
1176 } // namespace
1177
createDynamicStateComputeTests(tcu::TestContext & testCtx,vk::PipelineConstructionType pipelineConstructionType)1178 tcu::TestCaseGroup *createDynamicStateComputeTests(tcu::TestContext &testCtx,
1179 vk::PipelineConstructionType pipelineConstructionType)
1180 {
1181 using GroupPtr = de::MovePtr<tcu::TestCaseGroup>;
1182
1183 // Dynamic state mixed with compute and transfer operations
1184 GroupPtr mainGroup(new tcu::TestCaseGroup(testCtx, "compute_transfer"));
1185
1186 const struct
1187 {
1188 OperType operationType;
1189 const char *name;
1190 } operations[] = {
1191 {OperType::COMPUTE, "compute"},
1192 {OperType::TRANSFER, "transfer"},
1193 };
1194
1195 const struct
1196 {
1197 WhenToSet when;
1198 const char *name;
1199 } moments[] = {
1200 {WhenToSet::BEFORE, "before"},
1201 {WhenToSet::AFTER, "after"},
1202 };
1203
1204 // Tests with a single dynamic state.
1205 {
1206 GroupPtr singleStateGroup(new tcu::TestCaseGroup(testCtx, "single"));
1207
1208 for (int operIdx = 0; operIdx < DE_LENGTH_OF_ARRAY(operations); ++operIdx)
1209 {
1210 GroupPtr operationGroup(new tcu::TestCaseGroup(testCtx, operations[operIdx].name));
1211
1212 for (int stateIdx = 0; stateIdx < DE_LENGTH_OF_ARRAY(dynamicStateList); ++stateIdx)
1213 {
1214 const auto state = dynamicStateList[stateIdx];
1215 const auto stateName = getDynamicStateBriefName(state);
1216
1217 GroupPtr stateGroup(new tcu::TestCaseGroup(testCtx, stateName.c_str()));
1218
1219 for (int momentIdx = 0; momentIdx < DE_LENGTH_OF_ARRAY(moments); ++momentIdx)
1220 {
1221 const TestParams testParams = {
1222 operations[operIdx].operationType, // OperType operationType;
1223 moments[momentIdx].when, // WhenToSet whenToSet;
1224 std::vector<VkDynamicState>(1, state), // std::vector<VkDynamicState> state;
1225 };
1226
1227 stateGroup->addChild(new DynamicStateComputeCase(testCtx, moments[momentIdx].name, testParams,
1228 pipelineConstructionType));
1229 }
1230
1231 operationGroup->addChild(stateGroup.release());
1232 }
1233
1234 singleStateGroup->addChild(operationGroup.release());
1235 }
1236
1237 mainGroup->addChild(singleStateGroup.release());
1238 }
1239
1240 // A few tests with several dynamic states.
1241 {
1242 GroupPtr multiStateGroup(new tcu::TestCaseGroup(testCtx, "multi"));
1243
1244 for (int operIdx = 0; operIdx < DE_LENGTH_OF_ARRAY(operations); ++operIdx)
1245 {
1246 GroupPtr operationGroup(new tcu::TestCaseGroup(testCtx, operations[operIdx].name));
1247
1248 for (int momentIdx = 0; momentIdx < DE_LENGTH_OF_ARRAY(moments); ++momentIdx)
1249 {
1250 TestParams testParams = {
1251 operations[operIdx].operationType, // OperType operationType;
1252 moments[momentIdx].when, // WhenToSet whenToSet;
1253 std::vector<VkDynamicState>(), // std::vector<VkDynamicState> states;
1254 };
1255
1256 // Use the basic states so as not to introduce extra requirements.
1257 for (int stateIdx = 0; stateIdx < DE_LENGTH_OF_ARRAY(dynamicStateList); ++stateIdx)
1258 {
1259 testParams.states.push_back(dynamicStateList[stateIdx]);
1260 if (dynamicStateList[stateIdx] == VK_DYNAMIC_STATE_STENCIL_REFERENCE)
1261 break;
1262 }
1263
1264 operationGroup->addChild(new DynamicStateComputeCase(testCtx, moments[momentIdx].name, testParams,
1265 pipelineConstructionType));
1266 }
1267
1268 multiStateGroup->addChild(operationGroup.release());
1269 }
1270
1271 mainGroup->addChild(multiStateGroup.release());
1272 }
1273
1274 return mainGroup.release();
1275 }
1276
cleanupDevice()1277 void cleanupDevice()
1278 {
1279 g_shadingRateDeviceHelper.reset(nullptr);
1280 g_contextDeviceHelper.reset(nullptr);
1281 }
1282
1283 } // namespace DynamicState
1284 } // namespace vkt
1285