1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2016 The Khronos Group Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 *//*!
20 * \file
21 * \brief Synchronization operation abstraction
22 *//*--------------------------------------------------------------------*/
23
24 #include "vktSynchronizationOperation.hpp"
25 #include "vkDefs.hpp"
26 #include "vktTestCase.hpp"
27 #include "vktTestCaseUtil.hpp"
28 #include "vkRef.hpp"
29 #include "vkRefUtil.hpp"
30 #include "vkMemUtil.hpp"
31 #include "vkBarrierUtil.hpp"
32 #include "vkQueryUtil.hpp"
33 #include "vkTypeUtil.hpp"
34 #include "vkImageUtil.hpp"
35 #include "vkBuilderUtil.hpp"
36 #include "vkCmdUtil.hpp"
37 #include "vkObjUtil.hpp"
38 #include "deUniquePtr.hpp"
39 #include "tcuTestLog.hpp"
40 #include "tcuTextureUtil.hpp"
41 #include <vector>
42 #include <sstream>
43
44 namespace vkt
45 {
46 namespace synchronization
47 {
48 namespace
49 {
50 using namespace vk;
51
52 enum Constants
53 {
54 MAX_IMAGE_DIMENSION_2D = 0x1000u,
55 MAX_UBO_RANGE = 0x4000u,
56 MAX_UPDATE_BUFFER_SIZE = 0x10000u,
57 };
58
59 enum BufferType
60 {
61 BUFFER_TYPE_UNIFORM,
62 BUFFER_TYPE_STORAGE,
63 BUFFER_TYPE_UNIFORM_TEXEL,
64 };
65
66 enum AccessMode
67 {
68 ACCESS_MODE_READ,
69 ACCESS_MODE_WRITE,
70 };
71
72 enum PipelineType
73 {
74 PIPELINE_TYPE_GRAPHICS,
75 PIPELINE_TYPE_COMPUTE,
76 };
77
78 static const char *const s_perVertexBlock = "gl_PerVertex {\n"
79 " vec4 gl_Position;\n"
80 "}";
81
82 static const SyncInfo emptySyncInfo = {
83 0, // VkPipelineStageFlags stageMask;
84 0, // VkAccessFlags accessMask;
85 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
86 };
87
getShaderStageName(VkShaderStageFlagBits stage)88 std::string getShaderStageName(VkShaderStageFlagBits stage)
89 {
90 switch (stage)
91 {
92 default:
93 DE_FATAL("Unhandled stage!");
94 return "";
95 case VK_SHADER_STAGE_COMPUTE_BIT:
96 return "compute";
97 case VK_SHADER_STAGE_FRAGMENT_BIT:
98 return "fragment";
99 case VK_SHADER_STAGE_VERTEX_BIT:
100 return "vertex";
101 case VK_SHADER_STAGE_GEOMETRY_BIT:
102 return "geometry";
103 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
104 return "tess_control";
105 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
106 return "tess_eval";
107 }
108 }
109
110 //! A pipeline that can be embedded inside an operation.
111 class Pipeline
112 {
113 public:
~Pipeline(void)114 virtual ~Pipeline(void)
115 {
116 }
117 virtual void recordCommands(OperationContext &context, const VkCommandBuffer cmdBuffer,
118 const VkDescriptorSet descriptorSet) = 0;
119 };
120
121 //! Vertex data that covers the whole viewport with two triangles.
122 class VertexGrid
123 {
124 public:
VertexGrid(OperationContext & context)125 VertexGrid(OperationContext &context)
126 : m_vertexFormat(VK_FORMAT_R32G32B32A32_SFLOAT)
127 , m_vertexStride(tcu::getPixelSize(mapVkFormat(m_vertexFormat)))
128 {
129 const DeviceInterface &vk = context.getDeviceInterface();
130 const VkDevice device = context.getDevice();
131 Allocator &allocator = context.getAllocator();
132
133 // Vertex positions
134 {
135 m_vertexData.push_back(tcu::Vec4(1.0f, 1.0f, 0.0f, 1.0f));
136 m_vertexData.push_back(tcu::Vec4(-1.0f, 1.0f, 0.0f, 1.0f));
137 m_vertexData.push_back(tcu::Vec4(-1.0f, -1.0f, 0.0f, 1.0f));
138
139 m_vertexData.push_back(tcu::Vec4(-1.0f, -1.0f, 0.0f, 1.0f));
140 m_vertexData.push_back(tcu::Vec4(1.0f, -1.0f, 0.0f, 1.0f));
141 m_vertexData.push_back(tcu::Vec4(1.0f, 1.0f, 0.0f, 1.0f));
142 }
143
144 {
145 const VkDeviceSize vertexDataSizeBytes = m_vertexData.size() * sizeof(m_vertexData[0]);
146
147 m_vertexBuffer = de::MovePtr<Buffer>(new Buffer(
148 vk, device, allocator, makeBufferCreateInfo(vertexDataSizeBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT),
149 MemoryRequirement::HostVisible));
150 DE_ASSERT(sizeof(m_vertexData[0]) == m_vertexStride);
151
152 {
153 const Allocation &alloc = m_vertexBuffer->getAllocation();
154
155 deMemcpy(alloc.getHostPtr(), &m_vertexData[0], static_cast<std::size_t>(vertexDataSizeBytes));
156 flushAlloc(vk, device, alloc);
157 }
158 }
159
160 // Indices
161 {
162 const VkDeviceSize indexBufferSizeBytes = sizeof(uint32_t) * m_vertexData.size();
163 const uint32_t numIndices = static_cast<uint32_t>(m_vertexData.size());
164
165 m_indexBuffer = de::MovePtr<Buffer>(new Buffer(
166 vk, device, allocator, makeBufferCreateInfo(indexBufferSizeBytes, VK_BUFFER_USAGE_INDEX_BUFFER_BIT),
167 MemoryRequirement::HostVisible));
168
169 {
170 const Allocation &alloc = m_indexBuffer->getAllocation();
171 uint32_t *const pData = static_cast<uint32_t *>(alloc.getHostPtr());
172
173 for (uint32_t i = 0; i < numIndices; ++i)
174 pData[i] = i;
175
176 flushAlloc(vk, device, alloc);
177 }
178 }
179 }
180
getVertexFormat(void) const181 VkFormat getVertexFormat(void) const
182 {
183 return m_vertexFormat;
184 }
getVertexStride(void) const185 uint32_t getVertexStride(void) const
186 {
187 return m_vertexStride;
188 }
getIndexType(void) const189 VkIndexType getIndexType(void) const
190 {
191 return VK_INDEX_TYPE_UINT32;
192 }
getNumVertices(void) const193 uint32_t getNumVertices(void) const
194 {
195 return static_cast<uint32_t>(m_vertexData.size());
196 }
getNumIndices(void) const197 uint32_t getNumIndices(void) const
198 {
199 return getNumVertices();
200 }
getVertexBuffer(void) const201 VkBuffer getVertexBuffer(void) const
202 {
203 return **m_vertexBuffer;
204 }
getIndexBuffer(void) const205 VkBuffer getIndexBuffer(void) const
206 {
207 return **m_indexBuffer;
208 }
209
210 private:
211 const VkFormat m_vertexFormat;
212 const uint32_t m_vertexStride;
213 std::vector<tcu::Vec4> m_vertexData;
214 de::MovePtr<Buffer> m_vertexBuffer;
215 de::MovePtr<Buffer> m_indexBuffer;
216 };
217
218 //! Add flags for all shader stages required to support a particular stage (e.g. fragment requires vertex as well).
getRequiredStages(const VkShaderStageFlagBits stage)219 VkShaderStageFlags getRequiredStages(const VkShaderStageFlagBits stage)
220 {
221 VkShaderStageFlags flags = 0;
222
223 DE_ASSERT(stage == VK_SHADER_STAGE_COMPUTE_BIT || (stage & VK_SHADER_STAGE_COMPUTE_BIT) == 0);
224
225 if (stage & VK_SHADER_STAGE_ALL_GRAPHICS)
226 flags |= VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
227
228 if (stage & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))
229 flags |= VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
230
231 if (stage & VK_SHADER_STAGE_GEOMETRY_BIT)
232 flags |= VK_SHADER_STAGE_GEOMETRY_BIT;
233
234 if (stage & VK_SHADER_STAGE_COMPUTE_BIT)
235 flags |= VK_SHADER_STAGE_COMPUTE_BIT;
236
237 return flags;
238 }
239
240 //! Check that SSBO read/write is available and that all shader stages are supported.
requireFeaturesForSSBOAccess(OperationContext & context,const VkShaderStageFlags usedStages)241 void requireFeaturesForSSBOAccess(OperationContext &context, const VkShaderStageFlags usedStages)
242 {
243 const InstanceInterface &vki = context.getInstanceInterface();
244 const VkPhysicalDevice physDevice = context.getPhysicalDevice();
245 FeatureFlags flags = (FeatureFlags)0;
246
247 if (usedStages & VK_SHADER_STAGE_FRAGMENT_BIT)
248 flags |= FEATURE_FRAGMENT_STORES_AND_ATOMICS;
249
250 if (usedStages & (VK_SHADER_STAGE_ALL_GRAPHICS & (~VK_SHADER_STAGE_FRAGMENT_BIT)))
251 flags |= FEATURE_VERTEX_PIPELINE_STORES_AND_ATOMICS;
252
253 if (usedStages & VK_SHADER_STAGE_GEOMETRY_BIT)
254 flags |= FEATURE_GEOMETRY_SHADER;
255
256 if (usedStages & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))
257 flags |= FEATURE_TESSELLATION_SHADER;
258
259 requireFeatures(vki, physDevice, flags);
260 }
261
getHostBufferData(const OperationContext & context,const Buffer & hostBuffer,const VkDeviceSize size)262 Data getHostBufferData(const OperationContext &context, const Buffer &hostBuffer, const VkDeviceSize size)
263 {
264 const DeviceInterface &vk = context.getDeviceInterface();
265 const VkDevice device = context.getDevice();
266 const Allocation &alloc = hostBuffer.getAllocation();
267 const Data data = {
268 static_cast<std::size_t>(size), // std::size_t size;
269 static_cast<uint8_t *>(alloc.getHostPtr()), // const uint8_t* data;
270 };
271
272 invalidateAlloc(vk, device, alloc);
273
274 return data;
275 }
276
setHostBufferData(const OperationContext & context,const Buffer & hostBuffer,const Data & data)277 void setHostBufferData(const OperationContext &context, const Buffer &hostBuffer, const Data &data)
278 {
279 const DeviceInterface &vk = context.getDeviceInterface();
280 const VkDevice device = context.getDevice();
281 const Allocation &alloc = hostBuffer.getAllocation();
282
283 deMemcpy(alloc.getHostPtr(), data.data, data.size);
284 flushAlloc(vk, device, alloc);
285 }
286
assertValidShaderStage(const VkShaderStageFlagBits stage)287 void assertValidShaderStage(const VkShaderStageFlagBits stage)
288 {
289 switch (stage)
290 {
291 case VK_SHADER_STAGE_VERTEX_BIT:
292 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
293 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
294 case VK_SHADER_STAGE_GEOMETRY_BIT:
295 case VK_SHADER_STAGE_FRAGMENT_BIT:
296 case VK_SHADER_STAGE_COMPUTE_BIT:
297 // OK
298 break;
299
300 default:
301 DE_FATAL("Invalid shader stage");
302 break;
303 }
304 }
305
pipelineStageFlagsFromShaderStageFlagBits(const VkShaderStageFlagBits shaderStage)306 VkPipelineStageFlags pipelineStageFlagsFromShaderStageFlagBits(const VkShaderStageFlagBits shaderStage)
307 {
308 switch (shaderStage)
309 {
310 case VK_SHADER_STAGE_VERTEX_BIT:
311 return VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR;
312 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
313 return VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR;
314 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
315 return VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR;
316 case VK_SHADER_STAGE_GEOMETRY_BIT:
317 return VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR;
318 case VK_SHADER_STAGE_FRAGMENT_BIT:
319 return VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR;
320 case VK_SHADER_STAGE_COMPUTE_BIT:
321 return VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR;
322
323 // Other usages are probably an error, so flag that.
324 default:
325 DE_FATAL("Invalid shader stage");
326 return (VkPipelineStageFlags)0;
327 }
328 }
329
330 //! Fill destination buffer with a repeating pattern.
fillPattern(void * const pData,const VkDeviceSize size,bool useIndexPattern=false)331 void fillPattern(void *const pData, const VkDeviceSize size, bool useIndexPattern = false)
332 {
333 // There are two pattern options - most operations use primePattern,
334 // indexPattern is only needed for testing vertex index bufffer.
335 static const uint8_t primePattern[] = {2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31};
336 static const uint32_t indexPattern[] = {0, 1, 2, 3, 4};
337
338 const uint8_t *pattern = (useIndexPattern ? reinterpret_cast<const uint8_t *>(indexPattern) : primePattern);
339 const uint32_t patternSize = static_cast<uint32_t>(
340 useIndexPattern ? DE_LENGTH_OF_ARRAY(indexPattern) * sizeof(uint32_t) : DE_LENGTH_OF_ARRAY(primePattern));
341 uint8_t *const pBytes = static_cast<uint8_t *>(pData);
342
343 for (uint32_t i = 0; i < size; ++i)
344 pBytes[i] = pattern[i % patternSize];
345 }
346
347 //! Get size in bytes of a pixel buffer with given extent.
getPixelBufferSize(const VkFormat format,const VkExtent3D & extent)348 VkDeviceSize getPixelBufferSize(const VkFormat format, const VkExtent3D &extent)
349 {
350 const int pixelSize = tcu::getPixelSize(mapVkFormat(format));
351 return (pixelSize * extent.width * extent.height * extent.depth);
352 }
353
354 //! Determine the size of a 2D image that can hold sizeBytes data.
get2DImageExtentWithSize(const VkDeviceSize sizeBytes,const uint32_t pixelSize)355 VkExtent3D get2DImageExtentWithSize(const VkDeviceSize sizeBytes, const uint32_t pixelSize)
356 {
357 const uint32_t size = static_cast<uint32_t>(sizeBytes / pixelSize);
358
359 DE_ASSERT(size <= MAX_IMAGE_DIMENSION_2D * MAX_IMAGE_DIMENSION_2D);
360
361 return makeExtent3D(std::min(size, static_cast<uint32_t>(MAX_IMAGE_DIMENSION_2D)),
362 (size / MAX_IMAGE_DIMENSION_2D) + (size % MAX_IMAGE_DIMENSION_2D != 0 ? 1u : 0u), 1u);
363 }
364
makeClearValue(const VkFormat format)365 VkClearValue makeClearValue(const VkFormat format)
366 {
367 if (isDepthStencilFormat(format))
368 return makeClearValueDepthStencil(0.4f, 21u);
369 else
370 {
371 if (isIntFormat(format) || isUintFormat(format))
372 return makeClearValueColorU32(8u, 16u, 24u, 32u);
373 else
374 return makeClearValueColorF32(0.25f, 0.49f, 0.75f, 1.0f);
375 }
376 }
377
clearPixelBuffer(tcu::PixelBufferAccess & pixels,const VkClearValue & clearValue)378 void clearPixelBuffer(tcu::PixelBufferAccess &pixels, const VkClearValue &clearValue)
379 {
380 const tcu::TextureFormat format = pixels.getFormat();
381 const tcu::TextureChannelClass channelClass = tcu::getTextureChannelClass(format.type);
382
383 if (format.order == tcu::TextureFormat::D)
384 {
385 for (int z = 0; z < pixels.getDepth(); z++)
386 for (int y = 0; y < pixels.getHeight(); y++)
387 for (int x = 0; x < pixels.getWidth(); x++)
388 pixels.setPixDepth(clearValue.depthStencil.depth, x, y, z);
389 }
390 else if (format.order == tcu::TextureFormat::S)
391 {
392 for (int z = 0; z < pixels.getDepth(); z++)
393 for (int y = 0; y < pixels.getHeight(); y++)
394 for (int x = 0; x < pixels.getWidth(); x++)
395 pixels.setPixStencil(clearValue.depthStencil.stencil, x, y, z);
396 }
397 else if (format.order == tcu::TextureFormat::DS)
398 {
399 for (int z = 0; z < pixels.getDepth(); z++)
400 for (int y = 0; y < pixels.getHeight(); y++)
401 for (int x = 0; x < pixels.getWidth(); x++)
402 {
403 pixels.setPixDepth(clearValue.depthStencil.depth, x, y, z);
404 pixels.setPixStencil(clearValue.depthStencil.stencil, x, y, z);
405 }
406 }
407 else if (channelClass == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER ||
408 channelClass == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER)
409 {
410 const tcu::UVec4 color(clearValue.color.uint32);
411
412 for (int z = 0; z < pixels.getDepth(); z++)
413 for (int y = 0; y < pixels.getHeight(); y++)
414 for (int x = 0; x < pixels.getWidth(); x++)
415 pixels.setPixel(color, x, y, z);
416 }
417 else
418 {
419 const tcu::Vec4 color(clearValue.color.float32);
420
421 for (int z = 0; z < pixels.getDepth(); z++)
422 for (int y = 0; y < pixels.getHeight(); y++)
423 for (int x = 0; x < pixels.getWidth(); x++)
424 pixels.setPixel(color, x, y, z);
425 }
426 }
427
getImageViewType(const VkImageType imageType)428 VkImageViewType getImageViewType(const VkImageType imageType)
429 {
430 switch (imageType)
431 {
432 case VK_IMAGE_TYPE_1D:
433 return VK_IMAGE_VIEW_TYPE_1D;
434 case VK_IMAGE_TYPE_2D:
435 return VK_IMAGE_VIEW_TYPE_2D;
436 case VK_IMAGE_TYPE_3D:
437 return VK_IMAGE_VIEW_TYPE_3D;
438
439 default:
440 DE_FATAL("Unknown image type");
441 return VK_IMAGE_VIEW_TYPE_LAST;
442 }
443 }
444
getShaderImageType(const VkFormat format,const VkImageType imageType)445 std::string getShaderImageType(const VkFormat format, const VkImageType imageType)
446 {
447 const tcu::TextureFormat texFormat = mapVkFormat(format);
448 const std::string formatPart =
449 tcu::getTextureChannelClass(texFormat.type) == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER ? "u" :
450 tcu::getTextureChannelClass(texFormat.type) == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER ? "i" :
451 "";
452 switch (imageType)
453 {
454 case VK_IMAGE_TYPE_1D:
455 return formatPart + "image1D";
456 case VK_IMAGE_TYPE_2D:
457 return formatPart + "image2D";
458 case VK_IMAGE_TYPE_3D:
459 return formatPart + "image3D";
460
461 default:
462 DE_FATAL("Unknown image type");
463 return "";
464 }
465 }
466
getShaderImageFormatQualifier(const VkFormat format)467 std::string getShaderImageFormatQualifier(const VkFormat format)
468 {
469 const tcu::TextureFormat texFormat = mapVkFormat(format);
470 const char *orderPart = DE_NULL;
471 const char *typePart = DE_NULL;
472
473 switch (texFormat.order)
474 {
475 case tcu::TextureFormat::R:
476 orderPart = "r";
477 break;
478 case tcu::TextureFormat::RG:
479 orderPart = "rg";
480 break;
481 case tcu::TextureFormat::RGB:
482 orderPart = "rgb";
483 break;
484 case tcu::TextureFormat::RGBA:
485 orderPart = "rgba";
486 break;
487
488 default:
489 DE_FATAL("Unksupported texture channel order");
490 break;
491 }
492
493 switch (texFormat.type)
494 {
495 case tcu::TextureFormat::FLOAT:
496 typePart = "32f";
497 break;
498 case tcu::TextureFormat::HALF_FLOAT:
499 typePart = "16f";
500 break;
501
502 case tcu::TextureFormat::UNSIGNED_INT32:
503 typePart = "32ui";
504 break;
505 case tcu::TextureFormat::UNSIGNED_INT16:
506 typePart = "16ui";
507 break;
508 case tcu::TextureFormat::UNSIGNED_INT8:
509 typePart = "8ui";
510 break;
511
512 case tcu::TextureFormat::SIGNED_INT32:
513 typePart = "32i";
514 break;
515 case tcu::TextureFormat::SIGNED_INT16:
516 typePart = "16i";
517 break;
518 case tcu::TextureFormat::SIGNED_INT8:
519 typePart = "8i";
520 break;
521
522 case tcu::TextureFormat::UNORM_INT16:
523 typePart = "16";
524 break;
525 case tcu::TextureFormat::UNORM_INT8:
526 typePart = "8";
527 break;
528
529 case tcu::TextureFormat::SNORM_INT16:
530 typePart = "16_snorm";
531 break;
532 case tcu::TextureFormat::SNORM_INT8:
533 typePart = "8_snorm";
534 break;
535
536 default:
537 DE_FATAL("Unksupported texture channel type");
538 break;
539 }
540
541 return std::string(orderPart) + typePart;
542 }
543
544 namespace FillUpdateBuffer
545 {
546
547 enum BufferOp
548 {
549 BUFFER_OP_FILL,
550 BUFFER_OP_UPDATE,
551 BUFFER_OP_UPDATE_WITH_INDEX_PATTERN,
552 };
553
554 class Implementation : public Operation
555 {
556 public:
Implementation(OperationContext & context,Resource & resource,const BufferOp bufferOp)557 Implementation(OperationContext &context, Resource &resource, const BufferOp bufferOp)
558 : m_context(context)
559 , m_resource(resource)
560 , m_fillValue(0x13)
561 , m_bufferOp(bufferOp)
562 {
563 DE_ASSERT((m_resource.getBuffer().size % sizeof(uint32_t)) == 0);
564 DE_ASSERT(m_bufferOp == BUFFER_OP_FILL || m_resource.getBuffer().size <= MAX_UPDATE_BUFFER_SIZE);
565
566 m_data.resize(static_cast<size_t>(m_resource.getBuffer().size));
567
568 if (m_bufferOp == BUFFER_OP_FILL)
569 {
570 const std::size_t size = m_data.size() / sizeof(m_fillValue);
571 uint32_t *const pData = reinterpret_cast<uint32_t *>(&m_data[0]);
572
573 for (uint32_t i = 0; i < size; ++i)
574 pData[i] = m_fillValue;
575 }
576 else if (m_bufferOp == BUFFER_OP_UPDATE)
577 {
578 fillPattern(&m_data[0], m_data.size());
579 }
580 else if (m_bufferOp == BUFFER_OP_UPDATE_WITH_INDEX_PATTERN)
581 {
582 fillPattern(&m_data[0], m_data.size(), true);
583 }
584 }
585
recordCommands(const VkCommandBuffer cmdBuffer)586 void recordCommands(const VkCommandBuffer cmdBuffer)
587 {
588 const DeviceInterface &vk = m_context.getDeviceInterface();
589
590 if (m_bufferOp == BUFFER_OP_FILL)
591 {
592 vk.cmdFillBuffer(cmdBuffer, m_resource.getBuffer().handle, m_resource.getBuffer().offset,
593 m_resource.getBuffer().size, m_fillValue);
594
595 SynchronizationWrapperPtr synchronizationWrapper =
596 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
597 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
598 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
599 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
600 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
601 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
602 m_resource.getBuffer().handle, // VkBuffer buffer
603 0u, // VkDeviceSize offset
604 m_resource.getBuffer().size // VkDeviceSize size
605 );
606 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
607 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
608 }
609 else
610 vk.cmdUpdateBuffer(cmdBuffer, m_resource.getBuffer().handle, m_resource.getBuffer().offset,
611 m_resource.getBuffer().size, reinterpret_cast<uint32_t *>(&m_data[0]));
612 }
613
getInSyncInfo(void) const614 SyncInfo getInSyncInfo(void) const
615 {
616 return emptySyncInfo;
617 }
618
getOutSyncInfo(void) const619 SyncInfo getOutSyncInfo(void) const
620 {
621 const SyncInfo syncInfo = {
622 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
623 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
624 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
625 };
626
627 return syncInfo;
628 }
629
getData(void) const630 Data getData(void) const
631 {
632 const Data data = {
633 m_data.size(), // std::size_t size;
634 &m_data[0], // const uint8_t* data;
635 };
636 return data;
637 }
638
setData(const Data & data)639 void setData(const Data &data)
640 {
641 deMemcpy(&m_data[0], data.data, data.size);
642 }
643
644 private:
645 OperationContext &m_context;
646 Resource &m_resource;
647 std::vector<uint8_t> m_data;
648 const uint32_t m_fillValue;
649 const BufferOp m_bufferOp;
650 };
651
652 class Support : public OperationSupport
653 {
654 public:
Support(const ResourceDescription & resourceDesc,const BufferOp bufferOp)655 Support(const ResourceDescription &resourceDesc, const BufferOp bufferOp)
656 : m_resourceDesc(resourceDesc)
657 , m_bufferOp(bufferOp)
658 {
659 DE_ASSERT(m_bufferOp == BUFFER_OP_FILL || m_bufferOp == BUFFER_OP_UPDATE ||
660 m_bufferOp == BUFFER_OP_UPDATE_WITH_INDEX_PATTERN);
661 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER || m_resourceDesc.type == RESOURCE_TYPE_INDEX_BUFFER);
662 }
663
getInResourceUsageFlags(void) const664 uint32_t getInResourceUsageFlags(void) const
665 {
666 return 0;
667 }
668
getOutResourceUsageFlags(void) const669 uint32_t getOutResourceUsageFlags(void) const
670 {
671 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
672 }
673
getQueueFlags(const OperationContext & context) const674 VkQueueFlags getQueueFlags(const OperationContext &context) const
675 {
676 if (m_bufferOp == BUFFER_OP_FILL && !context.isDeviceFunctionalitySupported("VK_KHR_maintenance1"))
677 {
678 return VK_QUEUE_COMPUTE_BIT | VK_QUEUE_GRAPHICS_BIT;
679 }
680
681 return VK_QUEUE_TRANSFER_BIT;
682 }
683
build(OperationContext & context,Resource & resource) const684 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
685 {
686 return de::MovePtr<Operation>(new Implementation(context, resource, m_bufferOp));
687 }
688
build(OperationContext &,Resource &,Resource &) const689 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
690 {
691 DE_ASSERT(0);
692 return de::MovePtr<Operation>();
693 }
694
695 private:
696 const ResourceDescription m_resourceDesc;
697 const BufferOp m_bufferOp;
698 };
699
700 } // namespace FillUpdateBuffer
701
702 namespace CopyBuffer
703 {
704
705 class Implementation : public Operation
706 {
707 public:
Implementation(OperationContext & context,Resource & resource,const AccessMode mode)708 Implementation(OperationContext &context, Resource &resource, const AccessMode mode)
709 : m_context(context)
710 , m_resource(resource)
711 , m_mode(mode)
712 {
713 const DeviceInterface &vk = m_context.getDeviceInterface();
714 const VkDevice device = m_context.getDevice();
715 Allocator &allocator = m_context.getAllocator();
716 const VkBufferUsageFlags hostBufferUsage =
717 (m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
718
719 m_hostBuffer = de::MovePtr<Buffer>(
720 new Buffer(vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, hostBufferUsage),
721 MemoryRequirement::HostVisible));
722
723 const Allocation &alloc = m_hostBuffer->getAllocation();
724
725 if (m_mode == ACCESS_MODE_READ)
726 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_resource.getBuffer().size));
727 else
728 fillPattern(alloc.getHostPtr(), m_resource.getBuffer().size);
729
730 flushAlloc(vk, device, alloc);
731 }
732
recordCommands(const VkCommandBuffer cmdBuffer)733 void recordCommands(const VkCommandBuffer cmdBuffer)
734 {
735 const DeviceInterface &vk = m_context.getDeviceInterface();
736 const VkBufferCopy copyRegion = makeBufferCopy(0u, 0u, m_resource.getBuffer().size);
737 SynchronizationWrapperPtr synchronizationWrapper =
738 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
739
740 if (m_mode == ACCESS_MODE_READ)
741 {
742 vk.cmdCopyBuffer(cmdBuffer, m_resource.getBuffer().handle, **m_hostBuffer, 1u, ©Region);
743
744 // Insert a barrier so copied data is available to the host
745 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
746 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
747 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
748 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
749 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
750 **m_hostBuffer, // VkBuffer buffer
751 0u, // VkDeviceSize offset
752 m_resource.getBuffer().size // VkDeviceSize size
753 );
754 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
755 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
756 }
757 else
758 {
759 // Insert a barrier so buffer data is available to the device
760 //const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
761 // VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
762 // VK_ACCESS_2_HOST_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
763 // VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
764 // VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
765 // **m_hostBuffer, // VkBuffer buffer
766 // 0u, // VkDeviceSize offset
767 // m_resource.getBuffer().size // VkDeviceSize size
768 //);
769 //VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
770 //synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
771
772 vk.cmdCopyBuffer(cmdBuffer, **m_hostBuffer, m_resource.getBuffer().handle, 1u, ©Region);
773 }
774 }
775
getInSyncInfo(void) const776 SyncInfo getInSyncInfo(void) const
777 {
778 const VkAccessFlags access = (m_mode == ACCESS_MODE_READ ? VK_ACCESS_2_TRANSFER_READ_BIT_KHR : 0);
779 const SyncInfo syncInfo = {
780 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
781 access, // VkAccessFlags accessMask;
782 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
783 };
784 return syncInfo;
785 }
786
getOutSyncInfo(void) const787 SyncInfo getOutSyncInfo(void) const
788 {
789 const VkAccessFlags access = (m_mode == ACCESS_MODE_WRITE ? VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR : 0);
790 const SyncInfo syncInfo = {
791 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
792 access, // VkAccessFlags accessMask;
793 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
794 };
795 return syncInfo;
796 }
797
getData(void) const798 Data getData(void) const
799 {
800 return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
801 }
802
setData(const Data & data)803 void setData(const Data &data)
804 {
805 DE_ASSERT(m_mode == ACCESS_MODE_WRITE);
806 setHostBufferData(m_context, *m_hostBuffer, data);
807 }
808
809 private:
810 OperationContext &m_context;
811 Resource &m_resource;
812 const AccessMode m_mode;
813 de::MovePtr<Buffer> m_hostBuffer;
814 };
815
816 class Support : public OperationSupport
817 {
818 public:
Support(const ResourceDescription & resourceDesc,const AccessMode mode)819 Support(const ResourceDescription &resourceDesc, const AccessMode mode) : m_mode(mode)
820 {
821 DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_BUFFER);
822 DE_UNREF(resourceDesc);
823 }
824
getInResourceUsageFlags(void) const825 uint32_t getInResourceUsageFlags(void) const
826 {
827 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT : 0;
828 }
829
getOutResourceUsageFlags(void) const830 uint32_t getOutResourceUsageFlags(void) const
831 {
832 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : 0;
833 }
834
getQueueFlags(const OperationContext & context) const835 VkQueueFlags getQueueFlags(const OperationContext &context) const
836 {
837 DE_UNREF(context);
838 return VK_QUEUE_TRANSFER_BIT;
839 }
840
build(OperationContext & context,Resource & resource) const841 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
842 {
843 return de::MovePtr<Operation>(new Implementation(context, resource, m_mode));
844 }
845
build(OperationContext &,Resource &,Resource &) const846 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
847 {
848 DE_ASSERT(0);
849 return de::MovePtr<Operation>();
850 }
851
852 private:
853 const AccessMode m_mode;
854 };
855
856 class CopyImplementation : public Operation
857 {
858 public:
CopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource)859 CopyImplementation(OperationContext &context, Resource &inResource, Resource &outResource)
860 : m_context(context)
861 , m_inResource(inResource)
862 , m_outResource(outResource)
863 {
864 }
865
recordCommands(const VkCommandBuffer cmdBuffer)866 void recordCommands(const VkCommandBuffer cmdBuffer)
867 {
868 const DeviceInterface &vk = m_context.getDeviceInterface();
869 const VkBufferCopy copyRegion = makeBufferCopy(0u, 0u, m_inResource.getBuffer().size);
870
871 vk.cmdCopyBuffer(cmdBuffer, m_inResource.getBuffer().handle, m_outResource.getBuffer().handle, 1u, ©Region);
872 }
873
getInSyncInfo(void) const874 SyncInfo getInSyncInfo(void) const
875 {
876 const SyncInfo syncInfo = {
877 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
878 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
879 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
880 };
881 return syncInfo;
882 }
883
getOutSyncInfo(void) const884 SyncInfo getOutSyncInfo(void) const
885 {
886 const SyncInfo syncInfo = {
887 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
888 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
889 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
890 };
891 return syncInfo;
892 }
893
getData(void) const894 Data getData(void) const
895 {
896 Data data = {0, DE_NULL};
897 return data;
898 }
899
setData(const Data &)900 void setData(const Data &)
901 {
902 DE_ASSERT(0);
903 }
904
905 private:
906 OperationContext &m_context;
907 Resource &m_inResource;
908 Resource &m_outResource;
909 de::MovePtr<Buffer> m_hostBuffer;
910 };
911
912 class CopySupport : public OperationSupport
913 {
914 public:
CopySupport(const ResourceDescription & resourceDesc)915 CopySupport(const ResourceDescription &resourceDesc)
916 {
917 DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_BUFFER);
918 DE_UNREF(resourceDesc);
919 }
920
getInResourceUsageFlags(void) const921 uint32_t getInResourceUsageFlags(void) const
922 {
923 return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
924 }
925
getOutResourceUsageFlags(void) const926 uint32_t getOutResourceUsageFlags(void) const
927 {
928 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
929 }
930
getQueueFlags(const OperationContext & context) const931 VkQueueFlags getQueueFlags(const OperationContext &context) const
932 {
933 DE_UNREF(context);
934 return VK_QUEUE_TRANSFER_BIT;
935 }
936
build(OperationContext &,Resource &) const937 de::MovePtr<Operation> build(OperationContext &, Resource &) const
938 {
939 DE_ASSERT(0);
940 return de::MovePtr<Operation>();
941 }
942
build(OperationContext & context,Resource & inResource,Resource & outResource) const943 de::MovePtr<Operation> build(OperationContext &context, Resource &inResource, Resource &outResource) const
944 {
945 return de::MovePtr<Operation>(new CopyImplementation(context, inResource, outResource));
946 }
947 };
948
949 } // namespace CopyBuffer
950
951 namespace CopyBlitResolveImage
952 {
953
954 class ImplementationBase : public Operation
955 {
956 public:
957 //! Copy/Blit/Resolve etc. operation
958 virtual void recordCopyCommand(const VkCommandBuffer cmdBuffer) = 0;
959
960 //! Get source stage mask that is used during read - added to test synchronization2 new stage masks
961 virtual VkPipelineStageFlags2KHR getReadSrcStageMask() const = 0;
962
ImplementationBase(OperationContext & context,Resource & resource,const AccessMode mode)963 ImplementationBase(OperationContext &context, Resource &resource, const AccessMode mode)
964 : m_context(context)
965 , m_resource(resource)
966 , m_mode(mode)
967 , m_bufferSize(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
968 {
969 const DeviceInterface &vk = m_context.getDeviceInterface();
970 const VkDevice device = m_context.getDevice();
971 Allocator &allocator = m_context.getAllocator();
972
973 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
974 vk, device, allocator,
975 makeBufferCreateInfo(m_bufferSize, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT),
976 MemoryRequirement::HostVisible));
977
978 const Allocation &alloc = m_hostBuffer->getAllocation();
979 if (m_mode == ACCESS_MODE_READ)
980 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_bufferSize));
981 else
982 fillPattern(alloc.getHostPtr(), m_bufferSize);
983 flushAlloc(vk, device, alloc);
984
985 // Staging image
986 const auto &imgResource = m_resource.getImage();
987 m_image = de::MovePtr<Image>(
988 new Image(vk, device, allocator,
989 makeImageCreateInfo(imgResource.imageType, imgResource.extent, imgResource.format,
990 (VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
991 VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
992 MemoryRequirement::Any));
993 }
994
recordCommands(const VkCommandBuffer cmdBuffer)995 void recordCommands(const VkCommandBuffer cmdBuffer)
996 {
997 const DeviceInterface &vk = m_context.getDeviceInterface();
998 const VkBufferImageCopy bufferCopyRegion =
999 makeBufferImageCopy(m_resource.getImage().extent, m_resource.getImage().subresourceLayers);
1000 SynchronizationWrapperPtr synchronizationWrapper =
1001 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
1002
1003 // Staging image layout
1004 {
1005 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1006 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
1007 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
1008 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
1009 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1010 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1011 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
1012 **m_image, // VkImage image
1013 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1014 );
1015 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1016 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1017 }
1018
1019 if (m_mode == ACCESS_MODE_READ)
1020 {
1021 // Resource Image -> Staging image
1022 recordCopyCommand(cmdBuffer);
1023
1024 // Staging image layout
1025 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1026 getReadSrcStageMask(), // VkPipelineStageFlags2KHR srcStageMask
1027 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
1028 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
1029 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1030 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
1031 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
1032 **m_image, // VkImage image
1033 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1034 );
1035 VkDependencyInfoKHR imageDependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1036 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &imageDependencyInfo);
1037
1038 // Image -> Host buffer
1039 vk.cmdCopyImageToBuffer(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u,
1040 &bufferCopyRegion);
1041
1042 // Insert a barrier so copied data is available to the host
1043 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
1044 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
1045 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
1046 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
1047 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1048 **m_hostBuffer, // VkBuffer buffer
1049 0u, // VkDeviceSize offset
1050 m_bufferSize // VkDeviceSize size
1051 );
1052 VkDependencyInfoKHR bufferDependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
1053 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &bufferDependencyInfo);
1054 }
1055 else
1056 {
1057 // Host buffer -> Staging image
1058 vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u,
1059 &bufferCopyRegion);
1060
1061 // Staging image layout
1062 {
1063 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1064 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
1065 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
1066 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
1067 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1068 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
1069 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
1070 **m_image, // VkImage image
1071 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1072 );
1073 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1074 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1075 }
1076
1077 // Resource image layout
1078 {
1079 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1080 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
1081 (VkAccessFlags2KHR)0, // VkAccessFlags2KHR srcAccessMask
1082 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
1083 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1084 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1085 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
1086 m_resource.getImage().handle, // VkImage image
1087 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1088 );
1089 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1090 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1091 }
1092
1093 // Staging image -> Resource Image
1094 recordCopyCommand(cmdBuffer);
1095 }
1096 }
1097
getInSyncInfo(void) const1098 SyncInfo getInSyncInfo(void) const
1099 {
1100 const VkAccessFlags2KHR access =
1101 (m_mode == ACCESS_MODE_READ ? VK_ACCESS_2_TRANSFER_READ_BIT_KHR : VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR);
1102 const VkImageLayout layout =
1103 (m_mode == ACCESS_MODE_READ ? VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL : VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
1104 const SyncInfo syncInfo = {
1105 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
1106 access, // VkAccessFlags accessMask;
1107 layout, // VkImageLayout imageLayout;
1108 };
1109 return syncInfo;
1110 }
1111
getOutSyncInfo(void) const1112 SyncInfo getOutSyncInfo(void) const
1113 {
1114 const VkAccessFlags2KHR access =
1115 (m_mode == ACCESS_MODE_READ ? VK_ACCESS_2_TRANSFER_READ_BIT_KHR : VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR);
1116 const VkImageLayout layout =
1117 (m_mode == ACCESS_MODE_READ ? VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL : VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
1118 const SyncInfo syncInfo = {
1119 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
1120 access, // VkAccessFlags accessMask;
1121 layout, // VkImageLayout imageLayout;
1122 };
1123 return syncInfo;
1124 }
1125
getData(void) const1126 Data getData(void) const
1127 {
1128 return getHostBufferData(m_context, *m_hostBuffer, m_bufferSize);
1129 }
1130
setData(const Data & data)1131 void setData(const Data &data)
1132 {
1133 DE_ASSERT(m_mode == ACCESS_MODE_WRITE);
1134 setHostBufferData(m_context, *m_hostBuffer, data);
1135 }
1136
1137 protected:
1138 OperationContext &m_context;
1139 Resource &m_resource;
1140 const AccessMode m_mode;
1141 const VkDeviceSize m_bufferSize;
1142 de::MovePtr<Buffer> m_hostBuffer;
1143 de::MovePtr<Image> m_image;
1144 };
1145
makeExtentOffset(const Resource & resource)1146 VkOffset3D makeExtentOffset(const Resource &resource)
1147 {
1148 DE_ASSERT(resource.getType() == RESOURCE_TYPE_IMAGE);
1149 const VkExtent3D extent = resource.getImage().extent;
1150
1151 switch (resource.getImage().imageType)
1152 {
1153 case VK_IMAGE_TYPE_1D:
1154 return makeOffset3D(extent.width, 1, 1);
1155 case VK_IMAGE_TYPE_2D:
1156 return makeOffset3D(extent.width, extent.height, 1);
1157 case VK_IMAGE_TYPE_3D:
1158 return makeOffset3D(extent.width, extent.height, extent.depth);
1159 default:
1160 DE_ASSERT(0);
1161 return VkOffset3D();
1162 }
1163 }
1164
makeBlitRegion(const Resource & resource)1165 VkImageBlit makeBlitRegion(const Resource &resource)
1166 {
1167 const VkImageBlit blitRegion = {
1168 resource.getImage().subresourceLayers, // VkImageSubresourceLayers srcSubresource;
1169 {makeOffset3D(0, 0, 0), makeExtentOffset(resource)}, // VkOffset3D srcOffsets[2];
1170 resource.getImage().subresourceLayers, // VkImageSubresourceLayers dstSubresource;
1171 {makeOffset3D(0, 0, 0), makeExtentOffset(resource)}, // VkOffset3D dstOffsets[2];
1172 };
1173 return blitRegion;
1174 }
1175
1176 class BlitImplementation : public ImplementationBase
1177 {
1178 public:
BlitImplementation(OperationContext & context,Resource & resource,const AccessMode mode)1179 BlitImplementation(OperationContext &context, Resource &resource, const AccessMode mode)
1180 : ImplementationBase(context, resource, mode)
1181 , m_blitRegion(makeBlitRegion(m_resource))
1182 {
1183 const InstanceInterface &vki = m_context.getInstanceInterface();
1184 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
1185 const auto &imgResource = m_resource.getImage();
1186 const VkFormatProperties formatProps = getPhysicalDeviceFormatProperties(vki, physDevice, imgResource.format);
1187 const auto &features = ((imgResource.tiling == VK_IMAGE_TILING_LINEAR) ? formatProps.linearTilingFeatures :
1188 formatProps.optimalTilingFeatures);
1189 const VkFormatFeatureFlags requiredFlags = (VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT);
1190
1191 // Blit image command not allowed when using --deqp-compute-only=enable
1192 if (m_context.isComputeOnly())
1193 THROW_NOT_SUPPORTED_COMPUTE_ONLY();
1194
1195 // SRC and DST blit is required because both images are using the same format.
1196 if ((features & requiredFlags) != requiredFlags)
1197 TCU_THROW(NotSupportedError, "Format doesn't support blits");
1198 }
1199
recordCopyCommand(const VkCommandBuffer cmdBuffer)1200 void recordCopyCommand(const VkCommandBuffer cmdBuffer)
1201 {
1202 const DeviceInterface &vk = m_context.getDeviceInterface();
1203
1204 if (m_mode == ACCESS_MODE_READ)
1205 {
1206 // Resource Image -> Staging image
1207 vk.cmdBlitImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_image,
1208 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_blitRegion, VK_FILTER_NEAREST);
1209 }
1210 else
1211 {
1212 // Staging image -> Resource Image
1213 vk.cmdBlitImage(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_resource.getImage().handle,
1214 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_blitRegion, VK_FILTER_NEAREST);
1215 }
1216 }
1217
getReadSrcStageMask() const1218 VkPipelineStageFlags2KHR getReadSrcStageMask() const
1219 {
1220 return (m_context.getSynchronizationType() == SynchronizationType::LEGACY) ?
1221 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR :
1222 VK_PIPELINE_STAGE_2_BLIT_BIT_KHR;
1223 }
1224
1225 private:
1226 const VkImageBlit m_blitRegion;
1227 };
1228
1229 template <typename ImageCopyOrResolve>
makeImageRegion(const Resource & resource)1230 ImageCopyOrResolve makeImageRegion(const Resource &resource)
1231 {
1232 return {
1233 resource.getImage().subresourceLayers, // VkImageSubresourceLayers srcSubresource;
1234 makeOffset3D(0, 0, 0), // VkOffset3D srcOffset;
1235 resource.getImage().subresourceLayers, // VkImageSubresourceLayers dstSubresource;
1236 makeOffset3D(0, 0, 0), // VkOffset3D dstOffset;
1237 resource.getImage().extent, // VkExtent3D extent;
1238 };
1239 }
1240
1241 class CopyImplementation : public ImplementationBase
1242 {
1243 public:
CopyImplementation(OperationContext & context,Resource & resource,const AccessMode mode)1244 CopyImplementation(OperationContext &context, Resource &resource, const AccessMode mode)
1245 : ImplementationBase(context, resource, mode)
1246 , m_imageCopyRegion(makeImageRegion<VkImageCopy>(m_resource))
1247 {
1248 }
1249
recordCopyCommand(const VkCommandBuffer cmdBuffer)1250 void recordCopyCommand(const VkCommandBuffer cmdBuffer)
1251 {
1252 const DeviceInterface &vk = m_context.getDeviceInterface();
1253
1254 if (m_mode == ACCESS_MODE_READ)
1255 {
1256 // Resource Image -> Staging image
1257 vk.cmdCopyImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_image,
1258 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_imageCopyRegion);
1259 }
1260 else
1261 {
1262 // Staging image -> Resource Image
1263 vk.cmdCopyImage(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_resource.getImage().handle,
1264 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_imageCopyRegion);
1265 }
1266 }
1267
getReadSrcStageMask() const1268 VkPipelineStageFlags2KHR getReadSrcStageMask() const
1269 {
1270 return (m_context.getSynchronizationType() == SynchronizationType::LEGACY) ?
1271 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR :
1272 VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
1273 }
1274
1275 private:
1276 const VkImageCopy m_imageCopyRegion;
1277 };
1278
1279 class ResolveImplementation : public ImplementationBase
1280 {
1281 public:
ResolveImplementation(OperationContext & context,Resource & resource,const AccessMode mode)1282 ResolveImplementation(OperationContext &context, Resource &resource, const AccessMode mode)
1283 : ImplementationBase(context, resource, mode)
1284 , m_imageResolveRegion(makeImageRegion<VkImageResolve>(resource))
1285 {
1286 DE_ASSERT(m_mode == ACCESS_MODE_READ);
1287 }
1288
recordCopyCommand(const VkCommandBuffer cmdBuffer)1289 void recordCopyCommand(const VkCommandBuffer cmdBuffer)
1290 {
1291 const DeviceInterface &vk = m_context.getDeviceInterface();
1292
1293 // Resource Image -> Staging image
1294 vk.cmdResolveImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_image,
1295 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_imageResolveRegion);
1296 }
1297
getReadSrcStageMask() const1298 VkPipelineStageFlags2KHR getReadSrcStageMask() const
1299 {
1300 return (m_context.getSynchronizationType() == SynchronizationType::LEGACY) ?
1301 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR :
1302 VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR;
1303 }
1304
1305 private:
1306 VkImageResolve m_imageResolveRegion;
1307 };
1308
1309 enum Type
1310 {
1311 TYPE_COPY,
1312 TYPE_BLIT,
1313 TYPE_RESOLVE,
1314 };
1315
1316 class Support : public OperationSupport
1317 {
1318 public:
Support(const ResourceDescription & resourceDesc,const Type type,const AccessMode mode)1319 Support(const ResourceDescription &resourceDesc, const Type type, const AccessMode mode)
1320 : m_type(type)
1321 , m_mode(mode)
1322 {
1323 DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_IMAGE);
1324
1325 const bool isDepthStencil = isDepthStencilFormat(resourceDesc.imageFormat);
1326 m_requiredQueueFlags = (isDepthStencil || m_type != TYPE_COPY ? VK_QUEUE_GRAPHICS_BIT : VK_QUEUE_TRANSFER_BIT);
1327
1328 // Don't blit depth/stencil images.
1329 DE_ASSERT(m_type != TYPE_BLIT || !isDepthStencil);
1330 }
1331
getInResourceUsageFlags(void) const1332 uint32_t getInResourceUsageFlags(void) const
1333 {
1334 return (m_mode == ACCESS_MODE_READ ? VK_IMAGE_USAGE_TRANSFER_SRC_BIT : 0);
1335 }
1336
getOutResourceUsageFlags(void) const1337 uint32_t getOutResourceUsageFlags(void) const
1338 {
1339 return (m_mode == ACCESS_MODE_WRITE ? VK_IMAGE_USAGE_TRANSFER_DST_BIT : 0);
1340 }
1341
getQueueFlags(const OperationContext & context) const1342 VkQueueFlags getQueueFlags(const OperationContext &context) const
1343 {
1344 DE_UNREF(context);
1345 return m_requiredQueueFlags;
1346 }
1347
build(OperationContext & context,Resource & resource) const1348 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
1349 {
1350 if (m_type == TYPE_COPY)
1351 return de::MovePtr<Operation>(new CopyImplementation(context, resource, m_mode));
1352 else if (m_type == TYPE_BLIT)
1353 return de::MovePtr<Operation>(new BlitImplementation(context, resource, m_mode));
1354 else
1355 return de::MovePtr<Operation>(new ResolveImplementation(context, resource, m_mode));
1356 }
1357
build(OperationContext &,Resource &,Resource &) const1358 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
1359 {
1360 DE_ASSERT(0);
1361 return de::MovePtr<Operation>();
1362 }
1363
1364 private:
1365 const Type m_type;
1366 const AccessMode m_mode;
1367 VkQueueFlags m_requiredQueueFlags;
1368 };
1369
1370 class BlitCopyImplementation : public Operation
1371 {
1372 public:
BlitCopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource)1373 BlitCopyImplementation(OperationContext &context, Resource &inResource, Resource &outResource)
1374 : m_context(context)
1375 , m_inResource(inResource)
1376 , m_outResource(outResource)
1377 , m_blitRegion(makeBlitRegion(m_inResource))
1378 {
1379 DE_ASSERT(m_inResource.getType() == RESOURCE_TYPE_IMAGE);
1380 DE_ASSERT(m_outResource.getType() == RESOURCE_TYPE_IMAGE);
1381
1382 const InstanceInterface &vki = m_context.getInstanceInterface();
1383 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
1384 const auto &imgResource = m_inResource.getImage();
1385 const VkFormatProperties formatProps = getPhysicalDeviceFormatProperties(vki, physDevice, imgResource.format);
1386 const auto &features = ((imgResource.tiling == VK_IMAGE_TILING_LINEAR) ? formatProps.linearTilingFeatures :
1387 formatProps.optimalTilingFeatures);
1388 const VkFormatFeatureFlags requiredFlags = (VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT);
1389
1390 // SRC and DST blit is required because both images are using the same format.
1391 if ((features & requiredFlags) != requiredFlags)
1392 TCU_THROW(NotSupportedError, "Format doesn't support blits");
1393 }
1394
recordCommands(const VkCommandBuffer cmdBuffer)1395 void recordCommands(const VkCommandBuffer cmdBuffer)
1396 {
1397 const DeviceInterface &vk = m_context.getDeviceInterface();
1398 SynchronizationWrapperPtr synchronizationWrapper =
1399 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
1400
1401 {
1402 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1403 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
1404 (VkAccessFlags2KHR)0, // VkAccessFlags2KHR srcAccessMask
1405 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR dstStageMask
1406 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1407 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1408 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
1409 m_outResource.getImage().handle, // VkImage image
1410 m_outResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1411 );
1412 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1413 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1414 }
1415
1416 vk.cmdBlitImage(cmdBuffer, m_inResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1417 m_outResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_blitRegion,
1418 VK_FILTER_NEAREST);
1419 }
1420
getInSyncInfo(void) const1421 SyncInfo getInSyncInfo(void) const
1422 {
1423 const SyncInfo syncInfo = {
1424 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags stageMask;
1425 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
1426 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout imageLayout;
1427 };
1428 return syncInfo;
1429 }
1430
getOutSyncInfo(void) const1431 SyncInfo getOutSyncInfo(void) const
1432 {
1433 const SyncInfo syncInfo = {
1434 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags stageMask;
1435 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
1436 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
1437 };
1438 return syncInfo;
1439 }
1440
getData(void) const1441 Data getData(void) const
1442 {
1443 Data data = {0, DE_NULL};
1444 return data;
1445 }
1446
setData(const Data &)1447 void setData(const Data &)
1448 {
1449 DE_ASSERT(0);
1450 }
1451
1452 private:
1453 OperationContext &m_context;
1454 Resource &m_inResource;
1455 Resource &m_outResource;
1456 const VkImageBlit m_blitRegion;
1457 };
1458
1459 class CopyCopyImplementation : public Operation
1460 {
1461 public:
CopyCopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource)1462 CopyCopyImplementation(OperationContext &context, Resource &inResource, Resource &outResource)
1463 : m_context(context)
1464 , m_inResource(inResource)
1465 , m_outResource(outResource)
1466 , m_imageCopyRegion(makeImageRegion<VkImageCopy>(m_inResource))
1467 {
1468 DE_ASSERT(m_inResource.getType() == RESOURCE_TYPE_IMAGE);
1469 DE_ASSERT(m_outResource.getType() == RESOURCE_TYPE_IMAGE);
1470 }
1471
recordCommands(const VkCommandBuffer cmdBuffer)1472 void recordCommands(const VkCommandBuffer cmdBuffer)
1473 {
1474 const DeviceInterface &vk = m_context.getDeviceInterface();
1475 SynchronizationWrapperPtr synchronizationWrapper =
1476 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
1477
1478 {
1479 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1480 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
1481 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
1482 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR dstStageMask
1483 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1484 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1485 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
1486 m_outResource.getImage().handle, // VkImage image
1487 m_outResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1488 );
1489 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1490 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1491 }
1492
1493 vk.cmdCopyImage(cmdBuffer, m_inResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1494 m_outResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_imageCopyRegion);
1495 }
1496
getInSyncInfo(void) const1497 SyncInfo getInSyncInfo(void) const
1498 {
1499 const SyncInfo syncInfo = {
1500 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags stageMask;
1501 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
1502 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout imageLayout;
1503 };
1504 return syncInfo;
1505 }
1506
getOutSyncInfo(void) const1507 SyncInfo getOutSyncInfo(void) const
1508 {
1509 const SyncInfo syncInfo = {
1510 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags stageMask;
1511 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
1512 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
1513 };
1514 return syncInfo;
1515 }
1516
getData(void) const1517 Data getData(void) const
1518 {
1519 Data data = {0, DE_NULL};
1520 return data;
1521 }
1522
setData(const Data &)1523 void setData(const Data &)
1524 {
1525 DE_ASSERT(0);
1526 }
1527
1528 private:
1529 OperationContext &m_context;
1530 Resource &m_inResource;
1531 Resource &m_outResource;
1532 const VkImageCopy m_imageCopyRegion;
1533 };
1534
1535 class CopySupport : public OperationSupport
1536 {
1537 public:
CopySupport(const ResourceDescription & resourceDesc,const Type type)1538 CopySupport(const ResourceDescription &resourceDesc, const Type type) : m_type(type)
1539 {
1540 DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_IMAGE);
1541
1542 const bool isDepthStencil = isDepthStencilFormat(resourceDesc.imageFormat);
1543 m_requiredQueueFlags = (isDepthStencil || m_type == TYPE_BLIT ? VK_QUEUE_GRAPHICS_BIT : VK_QUEUE_TRANSFER_BIT);
1544
1545 // Don't blit depth/stencil images.
1546 DE_ASSERT(m_type != TYPE_BLIT || !isDepthStencil);
1547 }
1548
getInResourceUsageFlags(void) const1549 uint32_t getInResourceUsageFlags(void) const
1550 {
1551 return VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1552 }
1553
getOutResourceUsageFlags(void) const1554 uint32_t getOutResourceUsageFlags(void) const
1555 {
1556 return VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1557 }
1558
getQueueFlags(const OperationContext & context) const1559 VkQueueFlags getQueueFlags(const OperationContext &context) const
1560 {
1561 DE_UNREF(context);
1562 return m_requiredQueueFlags;
1563 }
1564
build(OperationContext &,Resource &) const1565 de::MovePtr<Operation> build(OperationContext &, Resource &) const
1566 {
1567 DE_ASSERT(0);
1568 return de::MovePtr<Operation>();
1569 }
1570
build(OperationContext & context,Resource & inResource,Resource & outResource) const1571 de::MovePtr<Operation> build(OperationContext &context, Resource &inResource, Resource &outResource) const
1572 {
1573 if (m_type == TYPE_COPY)
1574 return de::MovePtr<Operation>(new CopyCopyImplementation(context, inResource, outResource));
1575 else
1576 return de::MovePtr<Operation>(new BlitCopyImplementation(context, inResource, outResource));
1577 }
1578
1579 private:
1580 const Type m_type;
1581 VkQueueFlags m_requiredQueueFlags;
1582 };
1583
1584 } // namespace CopyBlitResolveImage
1585
1586 namespace ShaderAccess
1587 {
1588
1589 enum DispatchCall
1590 {
1591 DISPATCH_CALL_DISPATCH,
1592 DISPATCH_CALL_DISPATCH_INDIRECT,
1593 };
1594
1595 class GraphicsPipeline : public Pipeline
1596 {
1597 public:
GraphicsPipeline(OperationContext & context,const VkShaderStageFlagBits stage,const std::string & shaderPrefix,const VkDescriptorSetLayout descriptorSetLayout)1598 GraphicsPipeline(OperationContext &context, const VkShaderStageFlagBits stage, const std::string &shaderPrefix,
1599 const VkDescriptorSetLayout descriptorSetLayout)
1600 : m_vertices(context)
1601 {
1602 const DeviceInterface &vk = context.getDeviceInterface();
1603 const VkDevice device = context.getDevice();
1604 Allocator &allocator = context.getAllocator();
1605 const VkShaderStageFlags requiredStages = getRequiredStages(stage);
1606
1607 // Color attachment
1608
1609 m_colorFormat = VK_FORMAT_R8G8B8A8_UNORM;
1610 m_colorImageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
1611 m_colorImageExtent = makeExtent3D(16u, 16u, 1u);
1612 m_colorAttachmentImage = de::MovePtr<Image>(new Image(
1613 vk, device, allocator,
1614 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_colorImageExtent, m_colorFormat,
1615 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
1616 MemoryRequirement::Any));
1617
1618 // Pipeline
1619
1620 m_colorAttachmentView = makeImageView(vk, device, **m_colorAttachmentImage, VK_IMAGE_VIEW_TYPE_2D,
1621 m_colorFormat, m_colorImageSubresourceRange);
1622 m_renderPass = makeRenderPass(vk, device, m_colorFormat);
1623 m_framebuffer = makeFramebuffer(vk, device, *m_renderPass, *m_colorAttachmentView, m_colorImageExtent.width,
1624 m_colorImageExtent.height);
1625 m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
1626
1627 GraphicsPipelineBuilder pipelineBuilder;
1628 pipelineBuilder.setRenderSize(tcu::IVec2(m_colorImageExtent.width, m_colorImageExtent.height))
1629 .setVertexInputSingleAttribute(m_vertices.getVertexFormat(), m_vertices.getVertexStride())
1630 .setShader(vk, device, VK_SHADER_STAGE_VERTEX_BIT, context.getBinaryCollection().get(shaderPrefix + "vert"),
1631 DE_NULL)
1632 .setShader(vk, device, VK_SHADER_STAGE_FRAGMENT_BIT,
1633 context.getBinaryCollection().get(shaderPrefix + "frag"), DE_NULL);
1634
1635 if (requiredStages & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))
1636 pipelineBuilder.setPatchControlPoints(m_vertices.getNumVertices())
1637 .setShader(vk, device, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
1638 context.getBinaryCollection().get(shaderPrefix + "tesc"), DE_NULL)
1639 .setShader(vk, device, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
1640 context.getBinaryCollection().get(shaderPrefix + "tese"), DE_NULL);
1641
1642 if (requiredStages & VK_SHADER_STAGE_GEOMETRY_BIT)
1643 pipelineBuilder.setShader(vk, device, VK_SHADER_STAGE_GEOMETRY_BIT,
1644 context.getBinaryCollection().get(shaderPrefix + "geom"), DE_NULL);
1645
1646 m_pipeline = pipelineBuilder.build(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData(),
1647 context.getResourceInterface());
1648 }
1649
recordCommands(OperationContext & context,const VkCommandBuffer cmdBuffer,const VkDescriptorSet descriptorSet)1650 void recordCommands(OperationContext &context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
1651 {
1652 const DeviceInterface &vk = context.getDeviceInterface();
1653 SynchronizationWrapperPtr synchronizationWrapper =
1654 getSynchronizationWrapper(context.getSynchronizationType(), vk, false);
1655
1656 // Change color attachment image layout
1657 {
1658 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1659 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
1660 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
1661 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, // VkPipelineStageFlags2KHR dstStageMask
1662 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1663 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1664 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout
1665 **m_colorAttachmentImage, // VkImage image
1666 m_colorImageSubresourceRange // VkImageSubresourceRange subresourceRange
1667 );
1668 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1669 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1670 }
1671
1672 {
1673 const VkRect2D renderArea = makeRect2D(m_colorImageExtent);
1674 const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
1675
1676 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
1677 }
1678
1679 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
1680 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1u, &descriptorSet,
1681 0u, DE_NULL);
1682 {
1683 const VkDeviceSize vertexBufferOffset = 0ull;
1684 const VkBuffer vertexBuffer = m_vertices.getVertexBuffer();
1685 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &vertexBuffer, &vertexBufferOffset);
1686 }
1687
1688 vk.cmdDraw(cmdBuffer, m_vertices.getNumVertices(), 1u, 0u, 0u);
1689 endRenderPass(vk, cmdBuffer);
1690 }
1691
1692 private:
1693 const VertexGrid m_vertices;
1694 VkFormat m_colorFormat;
1695 de::MovePtr<Image> m_colorAttachmentImage;
1696 Move<VkImageView> m_colorAttachmentView;
1697 VkExtent3D m_colorImageExtent;
1698 VkImageSubresourceRange m_colorImageSubresourceRange;
1699 Move<VkRenderPass> m_renderPass;
1700 Move<VkFramebuffer> m_framebuffer;
1701 Move<VkPipelineLayout> m_pipelineLayout;
1702 Move<VkPipeline> m_pipeline;
1703 };
1704
1705 class ComputePipeline : public Pipeline
1706 {
1707 public:
ComputePipeline(OperationContext & context,const DispatchCall dispatchCall,const std::string & shaderPrefix,const VkDescriptorSetLayout descriptorSetLayout)1708 ComputePipeline(OperationContext &context, const DispatchCall dispatchCall, const std::string &shaderPrefix,
1709 const VkDescriptorSetLayout descriptorSetLayout)
1710 : m_dispatchCall(dispatchCall)
1711 {
1712 const DeviceInterface &vk = context.getDeviceInterface();
1713 const VkDevice device = context.getDevice();
1714 Allocator &allocator = context.getAllocator();
1715
1716 if (m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT)
1717 {
1718 m_indirectBuffer = de::MovePtr<Buffer>(
1719 new Buffer(vk, device, allocator,
1720 makeBufferCreateInfo(sizeof(VkDispatchIndirectCommand), VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT),
1721 MemoryRequirement::HostVisible));
1722
1723 const Allocation &alloc = m_indirectBuffer->getAllocation();
1724 VkDispatchIndirectCommand *const pIndirectCommand =
1725 static_cast<VkDispatchIndirectCommand *>(alloc.getHostPtr());
1726
1727 pIndirectCommand->x = 1u;
1728 pIndirectCommand->y = 1u;
1729 pIndirectCommand->z = 1u;
1730
1731 flushAlloc(vk, device, alloc);
1732 }
1733
1734 const Unique<VkShaderModule> shaderModule(createShaderModule(
1735 vk, device, context.getBinaryCollection().get(shaderPrefix + "comp"), (VkShaderModuleCreateFlags)0));
1736
1737 m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
1738 m_pipeline = makeComputePipeline(vk, device, *m_pipelineLayout, *shaderModule, DE_NULL,
1739 context.getPipelineCacheData(), context.getResourceInterface());
1740 }
1741
recordCommands(OperationContext & context,const VkCommandBuffer cmdBuffer,const VkDescriptorSet descriptorSet)1742 void recordCommands(OperationContext &context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
1743 {
1744 const DeviceInterface &vk = context.getDeviceInterface();
1745
1746 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipeline);
1747 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0u, 1u, &descriptorSet,
1748 0u, DE_NULL);
1749
1750 if (m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT)
1751 vk.cmdDispatchIndirect(cmdBuffer, **m_indirectBuffer, 0u);
1752 else
1753 vk.cmdDispatch(cmdBuffer, 1u, 1u, 1u);
1754 }
1755
1756 private:
1757 const DispatchCall m_dispatchCall;
1758 de::MovePtr<Buffer> m_indirectBuffer;
1759 Move<VkPipelineLayout> m_pipelineLayout;
1760 Move<VkPipeline> m_pipeline;
1761 };
1762
1763 //! Read/write operation on a UBO/SSBO in graphics/compute pipeline.
1764 class BufferImplementation : public Operation
1765 {
1766 public:
BufferImplementation(OperationContext & context,Resource & resource,const VkShaderStageFlagBits stage,const BufferType bufferType,const std::string & shaderPrefix,const AccessMode mode,const bool specializedAccess,const PipelineType pipelineType,const DispatchCall dispatchCall)1767 BufferImplementation(OperationContext &context, Resource &resource, const VkShaderStageFlagBits stage,
1768 const BufferType bufferType, const std::string &shaderPrefix, const AccessMode mode,
1769 const bool specializedAccess, const PipelineType pipelineType, const DispatchCall dispatchCall)
1770 : Operation(specializedAccess)
1771 , m_context(context)
1772 , m_resource(resource)
1773 , m_stage(stage)
1774 , m_pipelineStage(pipelineStageFlagsFromShaderStageFlagBits(m_stage))
1775 , m_bufferType(bufferType)
1776 , m_mode(mode)
1777 , m_dispatchCall(dispatchCall)
1778 {
1779 requireFeaturesForSSBOAccess(m_context, m_stage);
1780
1781 const DeviceInterface &vk = m_context.getDeviceInterface();
1782 const VkDevice device = m_context.getDevice();
1783 Allocator &allocator = m_context.getAllocator();
1784
1785 m_hostBuffer = de::MovePtr<Buffer>(
1786 new Buffer(vk, device, allocator,
1787 makeBufferCreateInfo(m_resource.getBuffer().size, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT),
1788 MemoryRequirement::HostVisible));
1789
1790 // Init host buffer data
1791 {
1792 const Allocation &alloc = m_hostBuffer->getAllocation();
1793 if (m_mode == ACCESS_MODE_READ)
1794 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_resource.getBuffer().size));
1795 else
1796 fillPattern(alloc.getHostPtr(), m_resource.getBuffer().size);
1797 flushAlloc(vk, device, alloc);
1798 }
1799 // Prepare descriptors
1800 {
1801 VkDescriptorType bufferDescriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
1802
1803 if (m_bufferType == BUFFER_TYPE_UNIFORM)
1804 bufferDescriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1805 else if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
1806 bufferDescriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
1807
1808 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
1809 .addSingleBinding(bufferDescriptorType, m_stage)
1810 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, m_stage)
1811 .build(vk, device);
1812
1813 m_descriptorPool = DescriptorPoolBuilder()
1814 .addType(bufferDescriptorType)
1815 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
1816 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
1817
1818 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
1819
1820 if (m_mode == ACCESS_MODE_READ)
1821 {
1822 if ((m_bufferType == BUFFER_TYPE_UNIFORM) || (m_bufferType == BUFFER_TYPE_STORAGE))
1823 {
1824 const VkDescriptorBufferInfo bufferInfo = makeDescriptorBufferInfo(
1825 m_resource.getBuffer().handle, m_resource.getBuffer().offset, m_resource.getBuffer().size);
1826 const VkDescriptorBufferInfo hostBufferInfo =
1827 makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_resource.getBuffer().size);
1828 DescriptorSetUpdateBuilder()
1829 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
1830 bufferDescriptorType, &bufferInfo)
1831 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u),
1832 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
1833 .update(vk, device);
1834 }
1835 else
1836 {
1837 m_pBufferView =
1838 vk::makeBufferView(vk, device, m_resource.getBuffer().handle, VK_FORMAT_R32G32B32A32_UINT,
1839 m_resource.getBuffer().offset, m_resource.getBuffer().size);
1840 const VkDescriptorBufferInfo hostBufferInfo =
1841 makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_resource.getBuffer().size);
1842 DescriptorSetUpdateBuilder()
1843 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
1844 bufferDescriptorType, &m_pBufferView.get())
1845 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u),
1846 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
1847 .update(vk, device);
1848 }
1849 }
1850 else
1851 {
1852 const VkDescriptorBufferInfo bufferInfo = makeDescriptorBufferInfo(
1853 m_resource.getBuffer().handle, m_resource.getBuffer().offset, m_resource.getBuffer().size);
1854 const VkDescriptorBufferInfo hostBufferInfo =
1855 makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_resource.getBuffer().size);
1856 DescriptorSetUpdateBuilder()
1857 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
1858 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
1859 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u),
1860 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &bufferInfo)
1861 .update(vk, device);
1862 }
1863 }
1864
1865 // Create pipeline
1866 m_pipeline =
1867 (pipelineType == PIPELINE_TYPE_GRAPHICS ?
1868 de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout)) :
1869 de::MovePtr<Pipeline>(
1870 new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
1871 }
1872
recordCommands(const VkCommandBuffer cmdBuffer)1873 void recordCommands(const VkCommandBuffer cmdBuffer)
1874 {
1875 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
1876
1877 // Post draw/dispatch commands
1878
1879 if (m_mode == ACCESS_MODE_READ)
1880 {
1881 const DeviceInterface &vk = m_context.getDeviceInterface();
1882 SynchronizationWrapperPtr synchronizationWrapper =
1883 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
1884
1885 // Insert a barrier so data written by the shader is available to the host
1886 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
1887 m_pipelineStage, // VkPipelineStageFlags2KHR srcStageMask
1888 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
1889 VK_PIPELINE_STAGE_HOST_BIT, // VkPipelineStageFlags2KHR dstStageMask
1890 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1891 **m_hostBuffer, // VkBuffer buffer
1892 0u, // VkDeviceSize offset
1893 m_resource.getBuffer().size // VkDeviceSize size
1894 );
1895 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
1896 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1897 }
1898 }
1899
getInSyncInfo(void) const1900 SyncInfo getInSyncInfo(void) const
1901 {
1902 VkAccessFlags2KHR accessFlags = VK_ACCESS_2_NONE_KHR;
1903
1904 if (m_mode == ACCESS_MODE_READ)
1905 {
1906 if (m_bufferType == BUFFER_TYPE_UNIFORM)
1907 accessFlags = VK_ACCESS_2_UNIFORM_READ_BIT_KHR;
1908
1909 else if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
1910 {
1911 if (m_specializedAccess)
1912 accessFlags = VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR;
1913 else
1914 accessFlags = VK_ACCESS_2_SHADER_READ_BIT_KHR;
1915 }
1916 else
1917 {
1918 if (m_specializedAccess)
1919 accessFlags = VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR;
1920 else
1921 accessFlags = VK_ACCESS_2_SHADER_READ_BIT_KHR;
1922 }
1923 }
1924 else
1925 {
1926 if (m_specializedAccess)
1927 accessFlags = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR;
1928 else
1929 accessFlags = VK_ACCESS_2_SHADER_WRITE_BIT_KHR;
1930 }
1931
1932 const SyncInfo syncInfo = {
1933 m_pipelineStage, // VkPipelineStageFlags stageMask;
1934 accessFlags, // VkAccessFlags accessMask;
1935 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
1936 };
1937 return syncInfo;
1938 }
1939
getOutSyncInfo(void) const1940 SyncInfo getOutSyncInfo(void) const
1941 {
1942 VkAccessFlags2KHR accessFlags = VK_ACCESS_2_NONE_KHR;
1943
1944 if (m_mode == ACCESS_MODE_WRITE)
1945 {
1946 if (m_specializedAccess)
1947 accessFlags = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR;
1948 else
1949 accessFlags = VK_ACCESS_2_SHADER_WRITE_BIT_KHR;
1950 }
1951
1952 const SyncInfo syncInfo = {
1953 m_pipelineStage, // VkPipelineStageFlags stageMask;
1954 accessFlags, // VkAccessFlags accessMask;
1955 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
1956 };
1957 return syncInfo;
1958 }
1959
getData(void) const1960 Data getData(void) const
1961 {
1962 return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
1963 }
1964
setData(const Data & data)1965 void setData(const Data &data)
1966 {
1967 DE_ASSERT(m_mode == ACCESS_MODE_WRITE);
1968 setHostBufferData(m_context, *m_hostBuffer, data);
1969 }
1970
getShaderStage(void)1971 vk::VkShaderStageFlagBits getShaderStage(void)
1972 {
1973 return m_stage;
1974 }
1975
1976 private:
1977 OperationContext &m_context;
1978 Resource &m_resource;
1979 const VkShaderStageFlagBits m_stage;
1980 const VkPipelineStageFlags m_pipelineStage;
1981 const BufferType m_bufferType;
1982 const AccessMode m_mode;
1983 const DispatchCall m_dispatchCall;
1984 de::MovePtr<Buffer> m_hostBuffer;
1985 Move<VkDescriptorPool> m_descriptorPool;
1986 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
1987 Move<VkDescriptorSet> m_descriptorSet;
1988 de::MovePtr<Pipeline> m_pipeline;
1989 Move<VkBufferView> m_pBufferView;
1990 };
1991
1992 class ImageImplementation : public Operation
1993 {
1994 public:
ImageImplementation(OperationContext & context,Resource & resource,const VkShaderStageFlagBits stage,const std::string & shaderPrefix,const AccessMode mode,const bool specializedAccess,const PipelineType pipelineType,const DispatchCall dispatchCall)1995 ImageImplementation(OperationContext &context, Resource &resource, const VkShaderStageFlagBits stage,
1996 const std::string &shaderPrefix, const AccessMode mode, const bool specializedAccess,
1997 const PipelineType pipelineType, const DispatchCall dispatchCall)
1998 : Operation(specializedAccess)
1999 , m_context(context)
2000 , m_resource(resource)
2001 , m_stage(stage)
2002 , m_pipelineStage(pipelineStageFlagsFromShaderStageFlagBits(m_stage))
2003 , m_mode(mode)
2004 , m_dispatchCall(dispatchCall)
2005 , m_hostBufferSizeBytes(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
2006 {
2007 const DeviceInterface &vk = m_context.getDeviceInterface();
2008 const InstanceInterface &vki = m_context.getInstanceInterface();
2009 const VkDevice device = m_context.getDevice();
2010 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
2011 Allocator &allocator = m_context.getAllocator();
2012
2013 // Image stores are always required, in either access mode.
2014 requireFeaturesForSSBOAccess(m_context, m_stage);
2015
2016 // Some storage image formats may not be supported
2017 const auto &imgResource = m_resource.getImage();
2018 requireStorageImageSupport(vki, physDevice, imgResource.format, imgResource.tiling);
2019
2020 m_hostBuffer = de::MovePtr<Buffer>(
2021 new Buffer(vk, device, allocator,
2022 makeBufferCreateInfo(m_hostBufferSizeBytes,
2023 VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT),
2024 MemoryRequirement::HostVisible));
2025
2026 // Init host buffer data
2027 {
2028 const Allocation &alloc = m_hostBuffer->getAllocation();
2029 if (m_mode == ACCESS_MODE_READ)
2030 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_hostBufferSizeBytes));
2031 else
2032 fillPattern(alloc.getHostPtr(), m_hostBufferSizeBytes);
2033 flushAlloc(vk, device, alloc);
2034 }
2035
2036 // Image resources
2037 {
2038 m_image = de::MovePtr<Image>(new Image(
2039 vk, device, allocator,
2040 makeImageCreateInfo(
2041 m_resource.getImage().imageType, m_resource.getImage().extent, m_resource.getImage().format,
2042 (VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_STORAGE_BIT),
2043 VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
2044 MemoryRequirement::Any));
2045
2046 if (m_mode == ACCESS_MODE_READ)
2047 {
2048 m_srcImage = &m_resource.getImage().handle;
2049 m_dstImage = &(**m_image);
2050 }
2051 else
2052 {
2053 m_srcImage = &(**m_image);
2054 m_dstImage = &m_resource.getImage().handle;
2055 }
2056
2057 const VkImageViewType viewType = getImageViewType(m_resource.getImage().imageType);
2058
2059 m_srcImageView = makeImageView(vk, device, *m_srcImage, viewType, m_resource.getImage().format,
2060 m_resource.getImage().subresourceRange);
2061 m_dstImageView = makeImageView(vk, device, *m_dstImage, viewType, m_resource.getImage().format,
2062 m_resource.getImage().subresourceRange);
2063 }
2064
2065 // Prepare descriptors
2066 {
2067 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
2068 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
2069 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
2070 .build(vk, device);
2071
2072 m_descriptorPool = DescriptorPoolBuilder()
2073 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2074 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2075 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
2076
2077 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
2078
2079 const VkDescriptorImageInfo srcImageInfo =
2080 makeDescriptorImageInfo(DE_NULL, *m_srcImageView, VK_IMAGE_LAYOUT_GENERAL);
2081 const VkDescriptorImageInfo dstImageInfo =
2082 makeDescriptorImageInfo(DE_NULL, *m_dstImageView, VK_IMAGE_LAYOUT_GENERAL);
2083
2084 DescriptorSetUpdateBuilder()
2085 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
2086 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &srcImageInfo)
2087 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u),
2088 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &dstImageInfo)
2089 .update(vk, device);
2090 }
2091
2092 // Create pipeline
2093 m_pipeline =
2094 (pipelineType == PIPELINE_TYPE_GRAPHICS ?
2095 de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout)) :
2096 de::MovePtr<Pipeline>(
2097 new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
2098 }
2099
recordCommands(const VkCommandBuffer cmdBuffer)2100 void recordCommands(const VkCommandBuffer cmdBuffer)
2101 {
2102 const DeviceInterface &vk = m_context.getDeviceInterface();
2103 const VkBufferImageCopy bufferCopyRegion =
2104 makeBufferImageCopy(m_resource.getImage().extent, m_resource.getImage().subresourceLayers);
2105 SynchronizationWrapperPtr synchronizationWrapper =
2106 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
2107
2108 // Destination image layout
2109 {
2110 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
2111 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
2112 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
2113 m_pipelineStage, // VkPipelineStageFlags2KHR dstStageMask
2114 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2115 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
2116 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout newLayout
2117 *m_dstImage, // VkImage image
2118 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
2119 );
2120 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
2121 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2122 }
2123
2124 // In write mode, source image must be filled with data.
2125 if (m_mode == ACCESS_MODE_WRITE)
2126 {
2127 // Layout for transfer
2128 {
2129 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
2130 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
2131 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
2132 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR dstStageMask
2133 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2134 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
2135 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
2136 *m_srcImage, // VkImage image
2137 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
2138 );
2139 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
2140 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2141 }
2142
2143 // Host buffer -> Src image
2144 vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, *m_srcImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u,
2145 &bufferCopyRegion);
2146
2147 // Layout for shader reading
2148 {
2149 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
2150 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR srcStageMask
2151 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
2152 m_pipelineStage, // VkPipelineStageFlags2KHR dstStageMask
2153 VK_ACCESS_2_SHADER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2154 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
2155 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout newLayout
2156 *m_srcImage, // VkImage image
2157 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
2158 );
2159 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
2160 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2161 }
2162 }
2163
2164 // Execute shaders
2165
2166 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
2167
2168 // Post draw/dispatch commands
2169
2170 if (m_mode == ACCESS_MODE_READ)
2171 {
2172 // Layout for transfer
2173 {
2174 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
2175 m_pipelineStage, // VkPipelineStageFlags2KHR srcStageMask
2176 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
2177 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR dstStageMask
2178 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2179 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout oldLayout
2180 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
2181 *m_dstImage, // VkImage image
2182 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
2183 );
2184 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
2185 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2186 }
2187
2188 // Dst image -> Host buffer
2189 vk.cmdCopyImageToBuffer(cmdBuffer, *m_dstImage, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u,
2190 &bufferCopyRegion);
2191
2192 // Insert a barrier so data written by the shader is available to the host
2193 {
2194 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
2195 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR srcStageMask
2196 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
2197 VK_PIPELINE_STAGE_HOST_BIT, // VkPipelineStageFlags2KHR dstStageMask
2198 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2199 **m_hostBuffer, // VkBuffer buffer
2200 0u, // VkDeviceSize offset
2201 m_hostBufferSizeBytes // VkDeviceSize size
2202 );
2203 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
2204 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2205 }
2206 }
2207 }
2208
getInSyncInfo(void) const2209 SyncInfo getInSyncInfo(void) const
2210 {
2211 VkAccessFlags2KHR accessFlags = VK_ACCESS_2_NONE_KHR;
2212
2213 if (m_mode == ACCESS_MODE_READ)
2214 {
2215 if (m_specializedAccess)
2216 accessFlags = VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR;
2217 else
2218 accessFlags = VK_ACCESS_2_SHADER_READ_BIT_KHR;
2219 }
2220
2221 const SyncInfo syncInfo = {
2222 m_pipelineStage, // VkPipelineStageFlags stageMask;
2223 accessFlags, // VkAccessFlags accessMask;
2224 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
2225 };
2226 return syncInfo;
2227 }
2228
getOutSyncInfo(void) const2229 SyncInfo getOutSyncInfo(void) const
2230 {
2231 VkAccessFlags2KHR accessFlags = VK_ACCESS_2_NONE_KHR;
2232
2233 if (m_mode == ACCESS_MODE_WRITE)
2234 {
2235 if (m_specializedAccess)
2236 accessFlags = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR;
2237 else
2238 accessFlags = VK_ACCESS_2_SHADER_WRITE_BIT_KHR;
2239 }
2240
2241 const SyncInfo syncInfo = {
2242 m_pipelineStage, // VkPipelineStageFlags stageMask;
2243 accessFlags, // VkAccessFlags accessMask;
2244 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
2245 };
2246 return syncInfo;
2247 }
2248
getData(void) const2249 Data getData(void) const
2250 {
2251 return getHostBufferData(m_context, *m_hostBuffer, m_hostBufferSizeBytes);
2252 }
2253
setData(const Data & data)2254 void setData(const Data &data)
2255 {
2256 DE_ASSERT(m_mode == ACCESS_MODE_WRITE);
2257 setHostBufferData(m_context, *m_hostBuffer, data);
2258 }
2259
getShaderStage(void)2260 vk::VkShaderStageFlagBits getShaderStage(void)
2261 {
2262 return m_stage;
2263 }
2264
2265 private:
2266 OperationContext &m_context;
2267 Resource &m_resource;
2268 const VkShaderStageFlagBits m_stage;
2269 const VkPipelineStageFlags m_pipelineStage;
2270 const AccessMode m_mode;
2271 const DispatchCall m_dispatchCall;
2272 const VkDeviceSize m_hostBufferSizeBytes;
2273 de::MovePtr<Buffer> m_hostBuffer;
2274 de::MovePtr<Image> m_image; //! Additional image used as src or dst depending on operation mode.
2275 const VkImage *m_srcImage;
2276 const VkImage *m_dstImage;
2277 Move<VkImageView> m_srcImageView;
2278 Move<VkImageView> m_dstImageView;
2279 Move<VkDescriptorPool> m_descriptorPool;
2280 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
2281 Move<VkDescriptorSet> m_descriptorSet;
2282 de::MovePtr<Pipeline> m_pipeline;
2283 };
2284
2285 //! Create generic passthrough shaders with bits of custom code inserted in a specific shader stage.
initPassthroughPrograms(SourceCollections & programCollection,const std::string & shaderPrefix,const std::string & declCode,const std::string & mainCode,const VkShaderStageFlagBits stage)2286 void initPassthroughPrograms(SourceCollections &programCollection, const std::string &shaderPrefix,
2287 const std::string &declCode, const std::string &mainCode,
2288 const VkShaderStageFlagBits stage)
2289 {
2290 const VkShaderStageFlags requiredStages = getRequiredStages(stage);
2291
2292 if (requiredStages & VK_SHADER_STAGE_VERTEX_BIT)
2293 {
2294 std::ostringstream src;
2295 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2296 << "\n"
2297 << "layout(location = 0) in vec4 v_in_position;\n"
2298 << "\n"
2299 << "out " << s_perVertexBlock << ";\n"
2300 << "\n"
2301 << (stage & VK_SHADER_STAGE_VERTEX_BIT ? declCode + "\n" : "") << "void main (void)\n"
2302 << "{\n"
2303 << " gl_Position = v_in_position;\n"
2304 << (stage & VK_SHADER_STAGE_VERTEX_BIT ? mainCode : "") << "}\n";
2305
2306 if (!programCollection.glslSources.contains(shaderPrefix + "vert"))
2307 programCollection.glslSources.add(shaderPrefix + "vert") << glu::VertexSource(src.str());
2308 }
2309
2310 if (requiredStages & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT)
2311 {
2312 std::ostringstream src;
2313 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2314 << "\n"
2315 << "layout(vertices = 3) out;\n"
2316 << "\n"
2317 << "in " << s_perVertexBlock << " gl_in[gl_MaxPatchVertices];\n"
2318 << "\n"
2319 << "out " << s_perVertexBlock << " gl_out[];\n"
2320 << "\n"
2321 << (stage & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT ? declCode + "\n" : "") << "void main (void)\n"
2322 << "{\n"
2323 << " gl_TessLevelInner[0] = 1.0;\n"
2324 << " gl_TessLevelInner[1] = 1.0;\n"
2325 << "\n"
2326 << " gl_TessLevelOuter[0] = 1.0;\n"
2327 << " gl_TessLevelOuter[1] = 1.0;\n"
2328 << " gl_TessLevelOuter[2] = 1.0;\n"
2329 << " gl_TessLevelOuter[3] = 1.0;\n"
2330 << "\n"
2331 << " gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
2332 << (stage & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT ? "\n" + mainCode : "") << "}\n";
2333
2334 if (!programCollection.glslSources.contains(shaderPrefix + "tesc"))
2335 programCollection.glslSources.add(shaderPrefix + "tesc") << glu::TessellationControlSource(src.str());
2336 }
2337
2338 if (requiredStages & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
2339 {
2340 std::ostringstream src;
2341 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2342 << "\n"
2343 << "layout(triangles, equal_spacing, ccw) in;\n"
2344 << "\n"
2345 << "in " << s_perVertexBlock << " gl_in[gl_MaxPatchVertices];\n"
2346 << "\n"
2347 << "out " << s_perVertexBlock << ";\n"
2348 << "\n"
2349 << (stage & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT ? declCode + "\n" : "") << "void main (void)\n"
2350 << "{\n"
2351 << " vec3 px = gl_TessCoord.x * gl_in[0].gl_Position.xyz;\n"
2352 << " vec3 py = gl_TessCoord.y * gl_in[1].gl_Position.xyz;\n"
2353 << " vec3 pz = gl_TessCoord.z * gl_in[2].gl_Position.xyz;\n"
2354 << " gl_Position = vec4(px + py + pz, 1.0);\n"
2355 << (stage & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT ? mainCode : "") << "}\n";
2356
2357 if (!programCollection.glslSources.contains(shaderPrefix + "tese"))
2358 programCollection.glslSources.add(shaderPrefix + "tese") << glu::TessellationEvaluationSource(src.str());
2359 }
2360
2361 if (requiredStages & VK_SHADER_STAGE_GEOMETRY_BIT)
2362 {
2363 std::ostringstream src;
2364 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2365 << "\n"
2366 << "layout(triangles) in;\n"
2367 << "layout(triangle_strip, max_vertices = 3) out;\n"
2368 << "\n"
2369 << "in " << s_perVertexBlock << " gl_in[];\n"
2370 << "\n"
2371 << "out " << s_perVertexBlock << ";\n"
2372 << "\n"
2373 << (stage & VK_SHADER_STAGE_GEOMETRY_BIT ? declCode + "\n" : "") << "void main (void)\n"
2374 << "{\n"
2375 << " gl_Position = gl_in[0].gl_Position;\n"
2376 << " EmitVertex();\n"
2377 << "\n"
2378 << " gl_Position = gl_in[1].gl_Position;\n"
2379 << " EmitVertex();\n"
2380 << "\n"
2381 << " gl_Position = gl_in[2].gl_Position;\n"
2382 << " EmitVertex();\n"
2383 << (stage & VK_SHADER_STAGE_GEOMETRY_BIT ? "\n" + mainCode : "") << "}\n";
2384
2385 if (!programCollection.glslSources.contains(shaderPrefix + "geom"))
2386 programCollection.glslSources.add(shaderPrefix + "geom") << glu::GeometrySource(src.str());
2387 }
2388
2389 if (requiredStages & VK_SHADER_STAGE_FRAGMENT_BIT)
2390 {
2391 std::ostringstream src;
2392 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2393 << "\n"
2394 << "layout(location = 0) out vec4 o_color;\n"
2395 << "\n"
2396 << (stage & VK_SHADER_STAGE_FRAGMENT_BIT ? declCode + "\n" : "") << "void main (void)\n"
2397 << "{\n"
2398 << " o_color = vec4(1.0);\n"
2399 << (stage & VK_SHADER_STAGE_FRAGMENT_BIT ? "\n" + mainCode : "") << "}\n";
2400
2401 if (!programCollection.glslSources.contains(shaderPrefix + "frag"))
2402 programCollection.glslSources.add(shaderPrefix + "frag") << glu::FragmentSource(src.str());
2403 }
2404
2405 if (requiredStages & VK_SHADER_STAGE_COMPUTE_BIT)
2406 {
2407 std::ostringstream src;
2408 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2409 << "\n"
2410 << "layout(local_size_x = 1) in;\n"
2411 << "\n"
2412 << (stage & VK_SHADER_STAGE_COMPUTE_BIT ? declCode + "\n" : "") << "void main (void)\n"
2413 << "{\n"
2414 << (stage & VK_SHADER_STAGE_COMPUTE_BIT ? mainCode : "") << "}\n";
2415
2416 if (!programCollection.glslSources.contains(shaderPrefix + "comp"))
2417 programCollection.glslSources.add(shaderPrefix + "comp") << glu::ComputeSource(src.str());
2418 }
2419 }
2420
2421 class BufferSupport : public OperationSupport
2422 {
2423 public:
BufferSupport(const ResourceDescription & resourceDesc,const BufferType bufferType,const AccessMode mode,const bool specializedAccess,const VkShaderStageFlagBits stage,const DispatchCall dispatchCall=DISPATCH_CALL_DISPATCH)2424 BufferSupport(const ResourceDescription &resourceDesc, const BufferType bufferType, const AccessMode mode,
2425 const bool specializedAccess, const VkShaderStageFlagBits stage,
2426 const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)
2427 : OperationSupport(specializedAccess)
2428 , m_resourceDesc(resourceDesc)
2429 , m_bufferType(bufferType)
2430 , m_mode(mode)
2431 , m_stage(stage)
2432 , m_shaderPrefix(std::string(m_mode == ACCESS_MODE_READ ? "read_" : "write_") +
2433 (m_bufferType == BUFFER_TYPE_UNIFORM ?
2434 "ubo_" :
2435 (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL ? "ubo_texel_" : "ssbo_")))
2436 , m_dispatchCall(dispatchCall)
2437 {
2438 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER);
2439 DE_ASSERT(m_bufferType == BUFFER_TYPE_UNIFORM || m_bufferType == BUFFER_TYPE_STORAGE ||
2440 m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL);
2441 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
2442 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_bufferType == BUFFER_TYPE_STORAGE);
2443 DE_ASSERT(m_bufferType != BUFFER_TYPE_UNIFORM || m_resourceDesc.size.x() <= MAX_UBO_RANGE);
2444 DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
2445
2446 assertValidShaderStage(m_stage);
2447 }
2448
initPrograms(SourceCollections & programCollection) const2449 void initPrograms(SourceCollections &programCollection) const
2450 {
2451 DE_ASSERT((m_resourceDesc.size.x() % sizeof(tcu::UVec4)) == 0);
2452
2453 std::string bufferTypeStr = "";
2454 if (m_bufferType == BUFFER_TYPE_UNIFORM)
2455 bufferTypeStr = "uniform";
2456 else
2457 {
2458 if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
2459 bufferTypeStr = "uniform utextureBuffer";
2460 else
2461 bufferTypeStr = "buffer";
2462 }
2463
2464 const int numVecElements = static_cast<int>(m_resourceDesc.size.x() /
2465 sizeof(tcu::UVec4)); // std140 must be aligned to a multiple of 16
2466 std::ostringstream declSrc;
2467 std::ostringstream copySrc;
2468 std::string outputBuff = "layout(set = 0, binding = 1, std140) writeonly buffer Output {\n"
2469 " uvec4 data[" +
2470 std::to_string(numVecElements) +
2471 "];\n"
2472 "} b_out;\n";
2473 if ((m_bufferType == BUFFER_TYPE_UNIFORM) || (m_bufferType == BUFFER_TYPE_STORAGE))
2474 {
2475 declSrc << "layout(set = 0, binding = 0, std140) readonly " << bufferTypeStr << " Input {\n"
2476 << " uvec4 data[" << numVecElements << "];\n"
2477 << "} b_in;\n"
2478 << "\n"
2479 << outputBuff;
2480
2481 copySrc << " for (int i = 0; i < " << numVecElements << "; ++i) {\n"
2482 << " b_out.data[i] = b_in.data[i];\n"
2483 << " }\n";
2484 }
2485 else if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
2486 {
2487 declSrc << "layout(set = 0, binding = 0) " << bufferTypeStr << " Input;\n"
2488 << "\n"
2489 << outputBuff;
2490
2491 copySrc << " for (int i = 0; i < " << numVecElements << "; ++i) {\n"
2492 << " b_out.data[i] = texelFetch(Input, i);\n"
2493 << " }\n";
2494 }
2495
2496 initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), copySrc.str(), m_stage);
2497 }
2498
getInResourceUsageFlags(void) const2499 uint32_t getInResourceUsageFlags(void) const
2500 {
2501 if (m_bufferType == BUFFER_TYPE_UNIFORM)
2502 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT : 0;
2503 else if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
2504 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT : 0;
2505 else
2506 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_STORAGE_BUFFER_BIT : 0;
2507 }
2508
getOutResourceUsageFlags(void) const2509 uint32_t getOutResourceUsageFlags(void) const
2510 {
2511 if (m_bufferType == BUFFER_TYPE_UNIFORM)
2512 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT : 0;
2513 else if (m_bufferType == BUFFER_TYPE_UNIFORM_TEXEL)
2514 return m_mode == ACCESS_MODE_WRITE ? 0 : VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
2515 else
2516 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_STORAGE_BUFFER_BIT : 0;
2517 }
2518
getQueueFlags(const OperationContext & context) const2519 VkQueueFlags getQueueFlags(const OperationContext &context) const
2520 {
2521 DE_UNREF(context);
2522 return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
2523 }
2524
build(OperationContext & context,Resource & resource) const2525 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
2526 {
2527 if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
2528 return de::MovePtr<Operation>(new BufferImplementation(context, resource, m_stage, m_bufferType,
2529 m_shaderPrefix, m_mode, m_specializedAccess,
2530 PIPELINE_TYPE_COMPUTE, m_dispatchCall));
2531 else
2532 return de::MovePtr<Operation>(new BufferImplementation(context, resource, m_stage, m_bufferType,
2533 m_shaderPrefix, m_mode, m_specializedAccess,
2534 PIPELINE_TYPE_GRAPHICS, m_dispatchCall));
2535 }
2536
build(OperationContext &,Resource &,Resource &) const2537 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
2538 {
2539 DE_ASSERT(0);
2540 return de::MovePtr<Operation>();
2541 }
2542
getShaderStage(void)2543 vk::VkShaderStageFlagBits getShaderStage(void)
2544 {
2545 return m_stage;
2546 }
2547
2548 private:
2549 const ResourceDescription m_resourceDesc;
2550 const BufferType m_bufferType;
2551 const AccessMode m_mode;
2552 const VkShaderStageFlagBits m_stage;
2553 const std::string m_shaderPrefix;
2554 const DispatchCall m_dispatchCall;
2555 };
2556
2557 class ImageSupport : public OperationSupport
2558 {
2559 public:
ImageSupport(const ResourceDescription & resourceDesc,const AccessMode mode,const bool specializedAccess,const VkShaderStageFlagBits stage,const DispatchCall dispatchCall=DISPATCH_CALL_DISPATCH)2560 ImageSupport(const ResourceDescription &resourceDesc, const AccessMode mode, const bool specializedAccess,
2561 const VkShaderStageFlagBits stage, const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)
2562 : OperationSupport(specializedAccess)
2563 , m_resourceDesc(resourceDesc)
2564 , m_mode(mode)
2565 , m_stage(stage)
2566 , m_shaderPrefix(m_mode == ACCESS_MODE_READ ? "read_image_" : "write_image_")
2567 , m_dispatchCall(dispatchCall)
2568 {
2569 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
2570 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
2571 DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
2572
2573 assertValidShaderStage(m_stage);
2574 }
2575
initPrograms(SourceCollections & programCollection) const2576 void initPrograms(SourceCollections &programCollection) const
2577 {
2578 const std::string imageFormat = getShaderImageFormatQualifier(m_resourceDesc.imageFormat);
2579 const std::string imageType = getShaderImageType(m_resourceDesc.imageFormat, m_resourceDesc.imageType);
2580
2581 std::ostringstream declSrc;
2582 declSrc << "layout(set = 0, binding = 0, " << imageFormat << ") readonly uniform " << imageType << " srcImg;\n"
2583 << "layout(set = 0, binding = 1, " << imageFormat << ") writeonly uniform " << imageType
2584 << " dstImg;\n";
2585
2586 std::ostringstream mainSrc;
2587 if (m_resourceDesc.imageType == VK_IMAGE_TYPE_1D)
2588 mainSrc << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2589 << " imageStore(dstImg, x, imageLoad(srcImg, x));\n";
2590 else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_2D)
2591 mainSrc << " for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
2592 << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2593 << " imageStore(dstImg, ivec2(x, y), imageLoad(srcImg, ivec2(x, y)));\n";
2594 else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_3D)
2595 mainSrc << " for (int z = 0; z < " << m_resourceDesc.size.z() << "; ++z)\n"
2596 << " for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
2597 << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2598 << " imageStore(dstImg, ivec3(x, y, z), imageLoad(srcImg, ivec3(x, y, z)));\n";
2599 else
2600 DE_ASSERT(0);
2601
2602 initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), mainSrc.str(), m_stage);
2603 }
2604
getInResourceUsageFlags(void) const2605 uint32_t getInResourceUsageFlags(void) const
2606 {
2607 return VK_IMAGE_USAGE_STORAGE_BIT;
2608 }
2609
getOutResourceUsageFlags(void) const2610 uint32_t getOutResourceUsageFlags(void) const
2611 {
2612 return VK_IMAGE_USAGE_STORAGE_BIT;
2613 }
2614
getQueueFlags(const OperationContext & context) const2615 VkQueueFlags getQueueFlags(const OperationContext &context) const
2616 {
2617 DE_UNREF(context);
2618 return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
2619 }
2620
build(OperationContext & context,Resource & resource) const2621 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
2622 {
2623 if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
2624 return de::MovePtr<Operation>(new ImageImplementation(context, resource, m_stage, m_shaderPrefix, m_mode,
2625 m_specializedAccess, PIPELINE_TYPE_COMPUTE,
2626 m_dispatchCall));
2627 else
2628 return de::MovePtr<Operation>(new ImageImplementation(context, resource, m_stage, m_shaderPrefix, m_mode,
2629 m_specializedAccess, PIPELINE_TYPE_GRAPHICS,
2630 m_dispatchCall));
2631 }
2632
build(OperationContext &,Resource &,Resource &) const2633 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
2634 {
2635 DE_ASSERT(0);
2636 return de::MovePtr<Operation>();
2637 }
2638
getShaderStage(void)2639 vk::VkShaderStageFlagBits getShaderStage(void)
2640 {
2641 return m_stage;
2642 }
2643
2644 private:
2645 const ResourceDescription m_resourceDesc;
2646 const AccessMode m_mode;
2647 const VkShaderStageFlagBits m_stage;
2648 const std::string m_shaderPrefix;
2649 const DispatchCall m_dispatchCall;
2650 };
2651
2652 //! Copy operation on a UBO/SSBO in graphics/compute pipeline.
2653 class BufferCopyImplementation : public Operation
2654 {
2655 public:
BufferCopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource,const VkShaderStageFlagBits stage,const BufferType bufferType,const std::string & shaderPrefix,const bool specializedAccess,const PipelineType pipelineType,const DispatchCall dispatchCall)2656 BufferCopyImplementation(OperationContext &context, Resource &inResource, Resource &outResource,
2657 const VkShaderStageFlagBits stage, const BufferType bufferType,
2658 const std::string &shaderPrefix, const bool specializedAccess,
2659 const PipelineType pipelineType, const DispatchCall dispatchCall)
2660 : Operation(specializedAccess)
2661 , m_context(context)
2662 , m_inResource(inResource)
2663 , m_outResource(outResource)
2664 , m_stage(stage)
2665 , m_pipelineStage(pipelineStageFlagsFromShaderStageFlagBits(m_stage))
2666 , m_bufferType(bufferType)
2667 , m_dispatchCall(dispatchCall)
2668 {
2669 requireFeaturesForSSBOAccess(m_context, m_stage);
2670
2671 const DeviceInterface &vk = m_context.getDeviceInterface();
2672 const VkDevice device = m_context.getDevice();
2673
2674 // Prepare descriptors
2675 {
2676 const VkDescriptorType bufferDescriptorType =
2677 (m_bufferType == BUFFER_TYPE_UNIFORM ? VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER :
2678 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
2679
2680 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
2681 .addSingleBinding(bufferDescriptorType, m_stage)
2682 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, m_stage)
2683 .build(vk, device);
2684
2685 m_descriptorPool = DescriptorPoolBuilder()
2686 .addType(bufferDescriptorType)
2687 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
2688 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
2689
2690 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
2691
2692 const VkDescriptorBufferInfo inBufferInfo = makeDescriptorBufferInfo(
2693 m_inResource.getBuffer().handle, m_inResource.getBuffer().offset, m_inResource.getBuffer().size);
2694 const VkDescriptorBufferInfo outBufferInfo = makeDescriptorBufferInfo(
2695 m_outResource.getBuffer().handle, m_outResource.getBuffer().offset, m_outResource.getBuffer().size);
2696
2697 DescriptorSetUpdateBuilder()
2698 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
2699 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &inBufferInfo)
2700 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u),
2701 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &outBufferInfo)
2702 .update(vk, device);
2703 }
2704
2705 // Create pipeline
2706 m_pipeline =
2707 (pipelineType == PIPELINE_TYPE_GRAPHICS ?
2708 de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout)) :
2709 de::MovePtr<Pipeline>(
2710 new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
2711 }
2712
recordCommands(const VkCommandBuffer cmdBuffer)2713 void recordCommands(const VkCommandBuffer cmdBuffer)
2714 {
2715 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
2716 }
2717
getInSyncInfo(void) const2718 SyncInfo getInSyncInfo(void) const
2719 {
2720 VkAccessFlags2KHR accessFlags =
2721 (m_specializedAccess ? VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR : VK_ACCESS_2_SHADER_READ_BIT_KHR);
2722 const SyncInfo syncInfo = {
2723 m_pipelineStage, // VkPipelineStageFlags stageMask;
2724 accessFlags, // VkAccessFlags accessMask;
2725 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
2726 };
2727 return syncInfo;
2728 }
2729
getOutSyncInfo(void) const2730 SyncInfo getOutSyncInfo(void) const
2731 {
2732 VkAccessFlags2KHR accessFlags =
2733 (m_specializedAccess ? VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR : VK_ACCESS_2_SHADER_WRITE_BIT_KHR);
2734 const SyncInfo syncInfo = {
2735 m_pipelineStage, // VkPipelineStageFlags stageMask;
2736 accessFlags, // VkAccessFlags accessMask;
2737 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
2738 };
2739 return syncInfo;
2740 }
2741
getData(void) const2742 Data getData(void) const
2743 {
2744 Data data = {0, DE_NULL};
2745 return data;
2746 }
2747
setData(const Data &)2748 void setData(const Data &)
2749 {
2750 DE_ASSERT(0);
2751 }
2752
getShaderStage(void)2753 vk::VkShaderStageFlagBits getShaderStage(void)
2754 {
2755 return m_stage;
2756 }
2757
2758 private:
2759 OperationContext &m_context;
2760 Resource &m_inResource;
2761 Resource &m_outResource;
2762 const VkShaderStageFlagBits m_stage;
2763 const VkPipelineStageFlags m_pipelineStage;
2764 const BufferType m_bufferType;
2765 const DispatchCall m_dispatchCall;
2766 Move<VkDescriptorPool> m_descriptorPool;
2767 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
2768 Move<VkDescriptorSet> m_descriptorSet;
2769 de::MovePtr<Pipeline> m_pipeline;
2770 };
2771
2772 class CopyBufferSupport : public OperationSupport
2773 {
2774 public:
CopyBufferSupport(const ResourceDescription & resourceDesc,const BufferType bufferType,const bool specializedAccess,const VkShaderStageFlagBits stage,const DispatchCall dispatchCall=DISPATCH_CALL_DISPATCH)2775 CopyBufferSupport(const ResourceDescription &resourceDesc, const BufferType bufferType,
2776 const bool specializedAccess, const VkShaderStageFlagBits stage,
2777 const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)
2778 : OperationSupport(specializedAccess)
2779 , m_resourceDesc(resourceDesc)
2780 , m_bufferType(bufferType)
2781 , m_stage(stage)
2782 , m_shaderPrefix(std::string("copy_") + getShaderStageName(stage) +
2783 (m_bufferType == BUFFER_TYPE_UNIFORM ? "_ubo_" : "_ssbo_"))
2784 , m_dispatchCall(dispatchCall)
2785 {
2786 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER);
2787 DE_ASSERT(m_bufferType == BUFFER_TYPE_UNIFORM || m_bufferType == BUFFER_TYPE_STORAGE);
2788 DE_ASSERT(m_bufferType != BUFFER_TYPE_UNIFORM || m_resourceDesc.size.x() <= MAX_UBO_RANGE);
2789 DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
2790
2791 assertValidShaderStage(m_stage);
2792 }
2793
initPrograms(SourceCollections & programCollection) const2794 void initPrograms(SourceCollections &programCollection) const
2795 {
2796 DE_ASSERT((m_resourceDesc.size.x() % sizeof(tcu::UVec4)) == 0);
2797
2798 const std::string bufferTypeStr = (m_bufferType == BUFFER_TYPE_UNIFORM ? "uniform" : "buffer");
2799 const int numVecElements = static_cast<int>(m_resourceDesc.size.x() /
2800 sizeof(tcu::UVec4)); // std140 must be aligned to a multiple of 16
2801
2802 std::ostringstream declSrc;
2803 declSrc << "layout(set = 0, binding = 0, std140) readonly " << bufferTypeStr << " Input {\n"
2804 << " uvec4 data[" << numVecElements << "];\n"
2805 << "} b_in;\n"
2806 << "\n"
2807 << "layout(set = 0, binding = 1, std140) writeonly buffer Output {\n"
2808 << " uvec4 data[" << numVecElements << "];\n"
2809 << "} b_out;\n";
2810
2811 std::ostringstream copySrc;
2812 copySrc << " for (int i = 0; i < " << numVecElements << "; ++i) {\n"
2813 << " b_out.data[i] = b_in.data[i];\n"
2814 << " }\n";
2815
2816 initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), copySrc.str(), m_stage);
2817 }
2818
getInResourceUsageFlags(void) const2819 uint32_t getInResourceUsageFlags(void) const
2820 {
2821 return (m_bufferType == BUFFER_TYPE_UNIFORM ? VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT :
2822 VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
2823 }
2824
getOutResourceUsageFlags(void) const2825 uint32_t getOutResourceUsageFlags(void) const
2826 {
2827 return VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
2828 }
2829
getQueueFlags(const OperationContext & context) const2830 VkQueueFlags getQueueFlags(const OperationContext &context) const
2831 {
2832 DE_UNREF(context);
2833 return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
2834 }
2835
build(OperationContext &,Resource &) const2836 de::MovePtr<Operation> build(OperationContext &, Resource &) const
2837 {
2838 DE_ASSERT(0);
2839 return de::MovePtr<Operation>();
2840 }
2841
build(OperationContext & context,Resource & inResource,Resource & outResource) const2842 de::MovePtr<Operation> build(OperationContext &context, Resource &inResource, Resource &outResource) const
2843 {
2844 if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
2845 return de::MovePtr<Operation>(
2846 new BufferCopyImplementation(context, inResource, outResource, m_stage, m_bufferType, m_shaderPrefix,
2847 m_specializedAccess, PIPELINE_TYPE_COMPUTE, m_dispatchCall));
2848 else
2849 return de::MovePtr<Operation>(
2850 new BufferCopyImplementation(context, inResource, outResource, m_stage, m_bufferType, m_shaderPrefix,
2851 m_specializedAccess, PIPELINE_TYPE_GRAPHICS, m_dispatchCall));
2852 }
2853
getShaderStage(void)2854 vk::VkShaderStageFlagBits getShaderStage(void)
2855 {
2856 return m_stage;
2857 }
2858
2859 private:
2860 const ResourceDescription m_resourceDesc;
2861 const BufferType m_bufferType;
2862 const VkShaderStageFlagBits m_stage;
2863 const std::string m_shaderPrefix;
2864 const DispatchCall m_dispatchCall;
2865 };
2866
2867 class CopyImageImplementation : public Operation
2868 {
2869 public:
CopyImageImplementation(OperationContext & context,Resource & inResource,Resource & outResource,const VkShaderStageFlagBits stage,const std::string & shaderPrefix,const bool specializedAccess,const PipelineType pipelineType,const DispatchCall dispatchCall)2870 CopyImageImplementation(OperationContext &context, Resource &inResource, Resource &outResource,
2871 const VkShaderStageFlagBits stage, const std::string &shaderPrefix,
2872 const bool specializedAccess, const PipelineType pipelineType,
2873 const DispatchCall dispatchCall)
2874 : Operation(specializedAccess)
2875 , m_context(context)
2876 , m_inResource(inResource)
2877 , m_outResource(outResource)
2878 , m_stage(stage)
2879 , m_pipelineStage(pipelineStageFlagsFromShaderStageFlagBits(m_stage))
2880 , m_dispatchCall(dispatchCall)
2881 {
2882 const DeviceInterface &vk = m_context.getDeviceInterface();
2883 const InstanceInterface &vki = m_context.getInstanceInterface();
2884 const VkDevice device = m_context.getDevice();
2885 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
2886
2887 // Image stores are always required, in either access mode.
2888 requireFeaturesForSSBOAccess(m_context, m_stage);
2889
2890 // Some storage image formats may not be supported
2891 const auto &imgResource = m_inResource.getImage();
2892 requireStorageImageSupport(vki, physDevice, imgResource.format, imgResource.tiling);
2893
2894 // Image resources
2895 {
2896 const VkImageViewType viewType = getImageViewType(m_inResource.getImage().imageType);
2897
2898 m_srcImageView = makeImageView(vk, device, m_inResource.getImage().handle, viewType,
2899 m_inResource.getImage().format, m_inResource.getImage().subresourceRange);
2900 m_dstImageView = makeImageView(vk, device, m_outResource.getImage().handle, viewType,
2901 m_outResource.getImage().format, m_outResource.getImage().subresourceRange);
2902 }
2903
2904 // Prepare descriptors
2905 {
2906 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
2907 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
2908 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
2909 .build(vk, device);
2910
2911 m_descriptorPool = DescriptorPoolBuilder()
2912 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2913 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2914 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
2915
2916 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
2917
2918 const VkDescriptorImageInfo srcImageInfo =
2919 makeDescriptorImageInfo(DE_NULL, *m_srcImageView, VK_IMAGE_LAYOUT_GENERAL);
2920 const VkDescriptorImageInfo dstImageInfo =
2921 makeDescriptorImageInfo(DE_NULL, *m_dstImageView, VK_IMAGE_LAYOUT_GENERAL);
2922
2923 DescriptorSetUpdateBuilder()
2924 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
2925 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &srcImageInfo)
2926 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u),
2927 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &dstImageInfo)
2928 .update(vk, device);
2929 }
2930
2931 // Create pipeline
2932 m_pipeline =
2933 (pipelineType == PIPELINE_TYPE_GRAPHICS ?
2934 de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout)) :
2935 de::MovePtr<Pipeline>(
2936 new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
2937 }
2938
recordCommands(const VkCommandBuffer cmdBuffer)2939 void recordCommands(const VkCommandBuffer cmdBuffer)
2940 {
2941 {
2942 const DeviceInterface &vk = m_context.getDeviceInterface();
2943 SynchronizationWrapperPtr synchronizationWrapper =
2944 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
2945
2946 const VkImageMemoryBarrier2KHR imageMemoryBarriers2 = makeImageMemoryBarrier2(
2947 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
2948 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
2949 m_pipelineStage, // VkPipelineStageFlags2KHR dstStageMask
2950 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2951 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
2952 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout newLayout
2953 m_outResource.getImage().handle, // VkImage image
2954 m_outResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
2955 );
2956 VkDependencyInfoKHR dependencyInfo{
2957 VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR, // VkStructureType sType
2958 DE_NULL, // const void* pNext
2959 VK_DEPENDENCY_BY_REGION_BIT, // VkDependencyFlags dependencyFlags
2960 0u, // uint32_t memoryBarrierCount
2961 DE_NULL, // const VkMemoryBarrier2KHR* pMemoryBarriers
2962 0u, // uint32_t bufferMemoryBarrierCount
2963 DE_NULL, // const VkBufferMemoryBarrier2KHR* pBufferMemoryBarriers
2964 1, // uint32_t imageMemoryBarrierCount
2965 &imageMemoryBarriers2 // const VkImageMemoryBarrier2KHR* pImageMemoryBarriers
2966 };
2967 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2968 }
2969
2970 // Execute shaders
2971 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
2972 }
2973
getInSyncInfo(void) const2974 SyncInfo getInSyncInfo(void) const
2975 {
2976 VkAccessFlags2KHR accessFlags =
2977 (m_specializedAccess ? VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR : VK_ACCESS_2_SHADER_READ_BIT_KHR);
2978 const SyncInfo syncInfo = {
2979 m_pipelineStage, // VkPipelineStageFlags stageMask;
2980 accessFlags, // VkAccessFlags accessMask;
2981 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
2982 };
2983 return syncInfo;
2984 }
2985
getOutSyncInfo(void) const2986 SyncInfo getOutSyncInfo(void) const
2987 {
2988 VkAccessFlags2KHR accessFlags =
2989 (m_specializedAccess ? VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR : VK_ACCESS_2_SHADER_WRITE_BIT_KHR);
2990 const SyncInfo syncInfo = {
2991 m_pipelineStage, // VkPipelineStageFlags stageMask;
2992 accessFlags, // VkAccessFlags accessMask;
2993 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
2994 };
2995 return syncInfo;
2996 }
2997
getData(void) const2998 Data getData(void) const
2999 {
3000 Data data = {0, DE_NULL};
3001 return data;
3002 }
3003
setData(const Data &)3004 void setData(const Data &)
3005 {
3006 DE_ASSERT(0);
3007 }
3008
getShaderStage(void)3009 vk::VkShaderStageFlagBits getShaderStage(void)
3010 {
3011 return m_stage;
3012 }
3013
3014 private:
3015 OperationContext &m_context;
3016 Resource &m_inResource;
3017 Resource &m_outResource;
3018 const VkShaderStageFlagBits m_stage;
3019 const VkPipelineStageFlags m_pipelineStage;
3020 const DispatchCall m_dispatchCall;
3021 Move<VkImageView> m_srcImageView;
3022 Move<VkImageView> m_dstImageView;
3023 Move<VkDescriptorPool> m_descriptorPool;
3024 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
3025 Move<VkDescriptorSet> m_descriptorSet;
3026 de::MovePtr<Pipeline> m_pipeline;
3027 };
3028
3029 class CopyImageSupport : public OperationSupport
3030 {
3031 public:
CopyImageSupport(const ResourceDescription & resourceDesc,const VkShaderStageFlagBits stage,const bool specializedAccess,const DispatchCall dispatchCall=DISPATCH_CALL_DISPATCH)3032 CopyImageSupport(const ResourceDescription &resourceDesc, const VkShaderStageFlagBits stage,
3033 const bool specializedAccess, const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)
3034 : OperationSupport(specializedAccess)
3035 , m_resourceDesc(resourceDesc)
3036 , m_stage(stage)
3037 , m_shaderPrefix(std::string("copy_image_") + getShaderStageName(stage) + "_")
3038 , m_dispatchCall(dispatchCall)
3039 {
3040 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
3041 DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
3042
3043 assertValidShaderStage(m_stage);
3044 }
3045
initPrograms(SourceCollections & programCollection) const3046 void initPrograms(SourceCollections &programCollection) const
3047 {
3048 const std::string imageFormat = getShaderImageFormatQualifier(m_resourceDesc.imageFormat);
3049 const std::string imageType = getShaderImageType(m_resourceDesc.imageFormat, m_resourceDesc.imageType);
3050
3051 std::ostringstream declSrc;
3052 declSrc << "layout(set = 0, binding = 0, " << imageFormat << ") readonly uniform " << imageType << " srcImg;\n"
3053 << "layout(set = 0, binding = 1, " << imageFormat << ") writeonly uniform " << imageType
3054 << " dstImg;\n";
3055
3056 std::ostringstream mainSrc;
3057 if (m_resourceDesc.imageType == VK_IMAGE_TYPE_1D)
3058 mainSrc << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
3059 << " imageStore(dstImg, x, imageLoad(srcImg, x));\n";
3060 else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_2D)
3061 mainSrc << " for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
3062 << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
3063 << " imageStore(dstImg, ivec2(x, y), imageLoad(srcImg, ivec2(x, y)));\n";
3064 else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_3D)
3065 mainSrc << " for (int z = 0; z < " << m_resourceDesc.size.z() << "; ++z)\n"
3066 << " for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
3067 << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
3068 << " imageStore(dstImg, ivec3(x, y, z), imageLoad(srcImg, ivec3(x, y, z)));\n";
3069 else
3070 DE_ASSERT(0);
3071
3072 initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), mainSrc.str(), m_stage);
3073 }
3074
getInResourceUsageFlags(void) const3075 uint32_t getInResourceUsageFlags(void) const
3076 {
3077 return VK_IMAGE_USAGE_STORAGE_BIT;
3078 }
3079
getOutResourceUsageFlags(void) const3080 uint32_t getOutResourceUsageFlags(void) const
3081 {
3082 return VK_IMAGE_USAGE_STORAGE_BIT;
3083 }
3084
getQueueFlags(const OperationContext & context) const3085 VkQueueFlags getQueueFlags(const OperationContext &context) const
3086 {
3087 DE_UNREF(context);
3088 return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
3089 }
3090
build(OperationContext &,Resource &) const3091 de::MovePtr<Operation> build(OperationContext &, Resource &) const
3092 {
3093 DE_ASSERT(0);
3094 return de::MovePtr<Operation>();
3095 }
3096
build(OperationContext & context,Resource & inResource,Resource & outResource) const3097 de::MovePtr<Operation> build(OperationContext &context, Resource &inResource, Resource &outResource) const
3098 {
3099 if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
3100 return de::MovePtr<Operation>(new CopyImageImplementation(context, inResource, outResource, m_stage,
3101 m_shaderPrefix, m_specializedAccess,
3102 PIPELINE_TYPE_COMPUTE, m_dispatchCall));
3103 else
3104 return de::MovePtr<Operation>(new CopyImageImplementation(context, inResource, outResource, m_stage,
3105 m_shaderPrefix, m_specializedAccess,
3106 PIPELINE_TYPE_GRAPHICS, m_dispatchCall));
3107 }
3108
getShaderStage(void)3109 vk::VkShaderStageFlagBits getShaderStage(void)
3110 {
3111 return m_stage;
3112 }
3113
3114 private:
3115 const ResourceDescription m_resourceDesc;
3116 const VkShaderStageFlagBits m_stage;
3117 const std::string m_shaderPrefix;
3118 const DispatchCall m_dispatchCall;
3119 };
3120
3121 class MSImageImplementation : public Operation
3122 {
3123 public:
MSImageImplementation(OperationContext & context,Resource & resource)3124 MSImageImplementation(OperationContext &context, Resource &resource)
3125 : m_context(context)
3126 , m_resource(resource)
3127 , m_hostBufferSizeBytes(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
3128 {
3129 const DeviceInterface &vk = m_context.getDeviceInterface();
3130 const InstanceInterface &vki = m_context.getInstanceInterface();
3131 const VkDevice device = m_context.getDevice();
3132 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
3133 const VkPhysicalDeviceFeatures features = getPhysicalDeviceFeatures(vki, physDevice);
3134 Allocator &allocator = m_context.getAllocator();
3135
3136 const auto &imgResource = m_resource.getImage();
3137 requireStorageImageSupport(vki, physDevice, imgResource.format, imgResource.tiling);
3138 if (!features.shaderStorageImageMultisample)
3139 TCU_THROW(NotSupportedError, "Using multisample images as storage is not supported");
3140
3141 VkBufferCreateInfo bufferCreateInfo = makeBufferCreateInfo(
3142 m_hostBufferSizeBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
3143 m_hostBuffer =
3144 de::MovePtr<Buffer>(new Buffer(vk, device, allocator, bufferCreateInfo, MemoryRequirement::HostVisible));
3145 const Allocation &alloc = m_hostBuffer->getAllocation();
3146 fillPattern(alloc.getHostPtr(), m_hostBufferSizeBytes);
3147 flushAlloc(vk, device, alloc);
3148
3149 const ImageResource &image = m_resource.getImage();
3150 const VkImageViewType viewType = getImageViewType(image.imageType);
3151 m_imageView = makeImageView(vk, device, image.handle, viewType, image.format, image.subresourceRange);
3152
3153 // Prepare descriptors
3154 {
3155 m_descriptorSetLayout =
3156 DescriptorSetLayoutBuilder()
3157 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
3158 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
3159 .build(vk, device);
3160
3161 m_descriptorPool = DescriptorPoolBuilder()
3162 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
3163 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
3164 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
3165
3166 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
3167
3168 const VkDescriptorBufferInfo bufferInfo =
3169 makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_hostBufferSizeBytes);
3170 const VkDescriptorImageInfo imageInfo =
3171 makeDescriptorImageInfo(DE_NULL, *m_imageView, VK_IMAGE_LAYOUT_GENERAL);
3172
3173 DescriptorSetUpdateBuilder()
3174 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
3175 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &bufferInfo)
3176 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u),
3177 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &imageInfo)
3178 .update(vk, device);
3179 }
3180
3181 // Create pipeline
3182 const Unique<VkShaderModule> shaderModule(
3183 createShaderModule(vk, device, context.getBinaryCollection().get("comp"), (VkShaderModuleCreateFlags)0));
3184 m_pipelineLayout = makePipelineLayout(vk, device, *m_descriptorSetLayout);
3185 m_pipeline = makeComputePipeline(vk, device, *m_pipelineLayout, *shaderModule, DE_NULL,
3186 context.getPipelineCacheData(), context.getResourceInterface());
3187 }
3188
recordCommands(const VkCommandBuffer cmdBuffer)3189 void recordCommands(const VkCommandBuffer cmdBuffer)
3190 {
3191 const DeviceInterface &vk = m_context.getDeviceInterface();
3192 SynchronizationWrapperPtr synchronizationWrapper =
3193 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
3194
3195 // change image layout
3196 {
3197 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3198 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3199 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3200 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3201 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3202 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3203 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout newLayout
3204 m_resource.getImage().handle, // VkImage image
3205 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
3206 );
3207 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
3208 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3209 }
3210
3211 // execute shader
3212 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipeline);
3213 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0u, 1u,
3214 &*m_descriptorSet, 0u, DE_NULL);
3215 vk.cmdDispatch(cmdBuffer, m_resource.getImage().extent.width, m_resource.getImage().extent.height, 1u);
3216 }
3217
getInSyncInfo(void) const3218 SyncInfo getInSyncInfo(void) const
3219 {
3220 DE_ASSERT(false);
3221 return emptySyncInfo;
3222 }
3223
getOutSyncInfo(void) const3224 SyncInfo getOutSyncInfo(void) const
3225 {
3226 return {
3227 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR, // VkPipelineStageFlags stageMask;
3228 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3229 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
3230 };
3231 }
3232
getData(void) const3233 Data getData(void) const
3234 {
3235 return getHostBufferData(m_context, *m_hostBuffer, m_hostBufferSizeBytes);
3236 }
3237
setData(const Data &)3238 void setData(const Data &)
3239 {
3240 DE_ASSERT(false);
3241 }
3242
3243 private:
3244 OperationContext &m_context;
3245 Resource &m_resource;
3246 Move<VkImageView> m_imageView;
3247
3248 const VkDeviceSize m_hostBufferSizeBytes;
3249 de::MovePtr<Buffer> m_hostBuffer;
3250
3251 Move<VkDescriptorPool> m_descriptorPool;
3252 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
3253 Move<VkDescriptorSet> m_descriptorSet;
3254 Move<VkPipelineLayout> m_pipelineLayout;
3255 Move<VkPipeline> m_pipeline;
3256 };
3257
3258 class MSImageSupport : public OperationSupport
3259 {
3260 public:
MSImageSupport(const ResourceDescription & resourceDesc)3261 MSImageSupport(const ResourceDescription &resourceDesc) : m_resourceDesc(resourceDesc)
3262 {
3263 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
3264 }
3265
initPrograms(SourceCollections & programCollection) const3266 void initPrograms(SourceCollections &programCollection) const
3267 {
3268 std::stringstream source;
3269 source << "#version 440\n"
3270 "\n"
3271 "layout(local_size_x = 1) in;\n"
3272 "layout(set = 0, binding = 0, std430) readonly buffer Input {\n"
3273 " uint data[];\n"
3274 "} inData;\n"
3275 "layout(set = 0, binding = 1, r32ui) writeonly uniform uimage2DMS msImage;\n"
3276 "\n"
3277 "void main (void)\n"
3278 "{\n"
3279 " int gx = int(gl_GlobalInvocationID.x);\n"
3280 " int gy = int(gl_GlobalInvocationID.y);\n"
3281 " uint value = inData.data[gy * "
3282 << m_resourceDesc.size.x()
3283 << " + gx];\n"
3284 " for (int sampleNdx = 0; sampleNdx < "
3285 << m_resourceDesc.imageSamples
3286 << "; ++sampleNdx)\n"
3287 " imageStore(msImage, ivec2(gx, gy), sampleNdx, uvec4(value));\n"
3288 "}\n";
3289 programCollection.glslSources.add("comp") << glu::ComputeSource(source.str().c_str());
3290 }
3291
getInResourceUsageFlags(void) const3292 uint32_t getInResourceUsageFlags(void) const
3293 {
3294 return 0;
3295 }
3296
getOutResourceUsageFlags(void) const3297 uint32_t getOutResourceUsageFlags(void) const
3298 {
3299 return VK_IMAGE_USAGE_STORAGE_BIT;
3300 }
3301
getQueueFlags(const OperationContext &) const3302 VkQueueFlags getQueueFlags(const OperationContext &) const
3303 {
3304 return VK_QUEUE_COMPUTE_BIT;
3305 }
3306
build(OperationContext & context,Resource & resource) const3307 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
3308 {
3309 return de::MovePtr<Operation>(new MSImageImplementation(context, resource));
3310 }
3311
build(OperationContext &,Resource &,Resource &) const3312 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
3313 {
3314 DE_ASSERT(0);
3315 return de::MovePtr<Operation>();
3316 }
3317
3318 private:
3319 const ResourceDescription m_resourceDesc;
3320 };
3321
3322 } // namespace ShaderAccess
3323
3324 namespace CopyBufferToImage
3325 {
3326
3327 class WriteImplementation : public Operation
3328 {
3329 public:
WriteImplementation(OperationContext & context,Resource & resource)3330 WriteImplementation(OperationContext &context, Resource &resource)
3331 : m_context(context)
3332 , m_resource(resource)
3333 , m_bufferSize(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
3334 {
3335 DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_IMAGE);
3336
3337 const DeviceInterface &vk = m_context.getDeviceInterface();
3338 const VkDevice device = m_context.getDevice();
3339 Allocator &allocator = m_context.getAllocator();
3340
3341 m_hostBuffer = de::MovePtr<Buffer>(
3342 new Buffer(vk, device, allocator, makeBufferCreateInfo(m_bufferSize, VK_BUFFER_USAGE_TRANSFER_SRC_BIT),
3343 MemoryRequirement::HostVisible));
3344
3345 const Allocation &alloc = m_hostBuffer->getAllocation();
3346 fillPattern(alloc.getHostPtr(), m_bufferSize);
3347 flushAlloc(vk, device, alloc);
3348 }
3349
recordCommands(const VkCommandBuffer cmdBuffer)3350 void recordCommands(const VkCommandBuffer cmdBuffer)
3351 {
3352 const DeviceInterface &vk = m_context.getDeviceInterface();
3353 const VkBufferImageCopy copyRegion =
3354 makeBufferImageCopy(m_resource.getImage().extent, m_resource.getImage().subresourceLayers);
3355 SynchronizationWrapperPtr synchronizationWrapper =
3356 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
3357
3358 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3359 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3360 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3361 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3362 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3363 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3364 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3365 m_resource.getImage().handle, // VkImage image
3366 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
3367 );
3368 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
3369 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3370
3371 vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, m_resource.getImage().handle,
3372 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
3373 }
3374
getInSyncInfo(void) const3375 SyncInfo getInSyncInfo(void) const
3376 {
3377 return emptySyncInfo;
3378 }
3379
getOutSyncInfo(void) const3380 SyncInfo getOutSyncInfo(void) const
3381 {
3382 const SyncInfo syncInfo = {
3383 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3384 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3385 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
3386 };
3387 return syncInfo;
3388 }
3389
getData(void) const3390 Data getData(void) const
3391 {
3392 return getHostBufferData(m_context, *m_hostBuffer, m_bufferSize);
3393 }
3394
setData(const Data & data)3395 void setData(const Data &data)
3396 {
3397 setHostBufferData(m_context, *m_hostBuffer, data);
3398 }
3399
3400 private:
3401 OperationContext &m_context;
3402 Resource &m_resource;
3403 de::MovePtr<Buffer> m_hostBuffer;
3404 const VkDeviceSize m_bufferSize;
3405 };
3406
3407 class ReadImplementation : public Operation
3408 {
3409 public:
ReadImplementation(OperationContext & context,Resource & resource)3410 ReadImplementation(OperationContext &context, Resource &resource)
3411 : m_context(context)
3412 , m_resource(resource)
3413 , m_subresourceRange(makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u))
3414 , m_subresourceLayers(makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u))
3415 {
3416 DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_BUFFER);
3417
3418 const DeviceInterface &vk = m_context.getDeviceInterface();
3419 const VkDevice device = m_context.getDevice();
3420 Allocator &allocator = m_context.getAllocator();
3421 const VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
3422 const uint32_t pixelSize = tcu::getPixelSize(mapVkFormat(format));
3423
3424 DE_ASSERT((m_resource.getBuffer().size % pixelSize) == 0);
3425 m_imageExtent = get2DImageExtentWithSize(m_resource.getBuffer().size,
3426 pixelSize); // there may be some unused space at the end
3427
3428 // Copy destination image.
3429 m_image = de::MovePtr<Image>(
3430 new Image(vk, device, allocator,
3431 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_imageExtent, format,
3432 (VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
3433 VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
3434 MemoryRequirement::Any));
3435
3436 // Image data will be copied here, so it can be read on the host.
3437 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
3438 vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, VK_BUFFER_USAGE_TRANSFER_DST_BIT),
3439 MemoryRequirement::HostVisible));
3440 }
3441
recordCommands(const VkCommandBuffer cmdBuffer)3442 void recordCommands(const VkCommandBuffer cmdBuffer)
3443 {
3444 const DeviceInterface &vk = m_context.getDeviceInterface();
3445 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_imageExtent, m_subresourceLayers);
3446 SynchronizationWrapperPtr synchronizationWrapper =
3447 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
3448
3449 // Resource -> Image
3450 {
3451 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3452 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3453 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3454 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3455 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3456 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3457 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3458 **m_image, // VkImage image
3459 m_subresourceRange // VkImageSubresourceRange subresourceRange
3460 );
3461 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
3462 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3463
3464 vk.cmdCopyBufferToImage(cmdBuffer, m_resource.getBuffer().handle, **m_image,
3465 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
3466 }
3467 // Image -> Host buffer
3468 {
3469 const VkImageMemoryBarrier2KHR imageLayoutBarrier2 = makeImageMemoryBarrier2(
3470 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3471 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
3472 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3473 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3474 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
3475 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
3476 **m_image, // VkImage image
3477 m_subresourceRange // VkImageSubresourceRange subresourceRange
3478 );
3479 VkDependencyInfoKHR layoutDependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageLayoutBarrier2);
3480 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &layoutDependencyInfo);
3481
3482 vk.cmdCopyImageToBuffer(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u,
3483 ©Region);
3484
3485 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
3486 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3487 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
3488 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3489 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3490 **m_hostBuffer, // VkBuffer buffer
3491 0u, // VkDeviceSize offset
3492 m_resource.getBuffer().size // VkDeviceSize size
3493 );
3494 VkDependencyInfoKHR bufferDependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
3495 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &bufferDependencyInfo);
3496 }
3497 }
3498
getInSyncInfo(void) const3499 SyncInfo getInSyncInfo(void) const
3500 {
3501 const SyncInfo syncInfo = {
3502 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3503 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
3504 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
3505 };
3506 return syncInfo;
3507 }
3508
getOutSyncInfo(void) const3509 SyncInfo getOutSyncInfo(void) const
3510 {
3511 return emptySyncInfo;
3512 }
3513
getData(void) const3514 Data getData(void) const
3515 {
3516 return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
3517 }
3518
setData(const Data & data)3519 void setData(const Data &data)
3520 {
3521 setHostBufferData(m_context, *m_hostBuffer, data);
3522 }
3523
3524 private:
3525 OperationContext &m_context;
3526 Resource &m_resource;
3527 const VkImageSubresourceRange m_subresourceRange;
3528 const VkImageSubresourceLayers m_subresourceLayers;
3529 de::MovePtr<Buffer> m_hostBuffer;
3530 de::MovePtr<Image> m_image;
3531 VkExtent3D m_imageExtent;
3532 };
3533
3534 class Support : public OperationSupport
3535 {
3536 public:
Support(const ResourceDescription & resourceDesc,const AccessMode mode)3537 Support(const ResourceDescription &resourceDesc, const AccessMode mode)
3538 : m_mode(mode)
3539 , m_resourceType(resourceDesc.type)
3540 , m_requiredQueueFlags(resourceDesc.type == RESOURCE_TYPE_IMAGE &&
3541 isDepthStencilFormat(resourceDesc.imageFormat) ?
3542 VK_QUEUE_GRAPHICS_BIT :
3543 VK_QUEUE_TRANSFER_BIT)
3544 {
3545 // From spec:
3546 // Because depth or stencil aspect buffer to image copies may require format conversions on some implementations,
3547 // they are not supported on queues that do not support graphics.
3548
3549 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
3550 DE_ASSERT(m_mode == ACCESS_MODE_READ || resourceDesc.type != RESOURCE_TYPE_BUFFER);
3551 DE_ASSERT(m_mode == ACCESS_MODE_WRITE || resourceDesc.type != RESOURCE_TYPE_IMAGE);
3552 }
3553
getInResourceUsageFlags(void) const3554 uint32_t getInResourceUsageFlags(void) const
3555 {
3556 if (m_resourceType == RESOURCE_TYPE_IMAGE)
3557 return m_mode == ACCESS_MODE_READ ? VK_IMAGE_USAGE_TRANSFER_SRC_BIT : 0;
3558 else
3559 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT : 0;
3560 }
3561
getOutResourceUsageFlags(void) const3562 uint32_t getOutResourceUsageFlags(void) const
3563 {
3564 if (m_resourceType == RESOURCE_TYPE_IMAGE)
3565 return m_mode == ACCESS_MODE_WRITE ? VK_IMAGE_USAGE_TRANSFER_DST_BIT : 0;
3566 else
3567 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : 0;
3568 }
3569
getQueueFlags(const OperationContext & context) const3570 VkQueueFlags getQueueFlags(const OperationContext &context) const
3571 {
3572 DE_UNREF(context);
3573 return m_requiredQueueFlags;
3574 }
3575
build(OperationContext & context,Resource & resource) const3576 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
3577 {
3578 if (m_mode == ACCESS_MODE_READ)
3579 return de::MovePtr<Operation>(new ReadImplementation(context, resource));
3580 else
3581 return de::MovePtr<Operation>(new WriteImplementation(context, resource));
3582 }
3583
build(OperationContext &,Resource &,Resource &) const3584 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
3585 {
3586 DE_ASSERT(0);
3587 return de::MovePtr<Operation>();
3588 }
3589
3590 private:
3591 const AccessMode m_mode;
3592 const enum ResourceType m_resourceType;
3593 const VkQueueFlags m_requiredQueueFlags;
3594 };
3595
3596 class CopyImplementation : public Operation
3597 {
3598 public:
CopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource)3599 CopyImplementation(OperationContext &context, Resource &inResource, Resource &outResource)
3600 : m_context(context)
3601 , m_inResource(inResource)
3602 , m_outResource(outResource)
3603 {
3604 DE_ASSERT(m_inResource.getType() == RESOURCE_TYPE_BUFFER);
3605 DE_ASSERT(m_outResource.getType() == RESOURCE_TYPE_IMAGE);
3606 }
3607
recordCommands(const VkCommandBuffer cmdBuffer)3608 void recordCommands(const VkCommandBuffer cmdBuffer)
3609 {
3610 const DeviceInterface &vk = m_context.getDeviceInterface();
3611 const VkBufferImageCopy copyRegion =
3612 makeBufferImageCopy(m_outResource.getImage().extent, m_outResource.getImage().subresourceLayers);
3613 SynchronizationWrapperPtr synchronizationWrapper =
3614 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
3615
3616 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
3617 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3618 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3619 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3620 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3621 m_inResource.getBuffer().handle, // VkBuffer buffer
3622 0u, // VkDeviceSize offset
3623 m_inResource.getBuffer().size // VkDeviceSize size
3624 );
3625 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3626 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3627 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3628 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3629 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3630 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3631 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3632 m_outResource.getImage().handle, // VkImage image
3633 m_outResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
3634 );
3635 VkDependencyInfoKHR dependencyInfo =
3636 makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2, &imageMemoryBarrier2);
3637 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3638
3639 vk.cmdCopyBufferToImage(cmdBuffer, m_inResource.getBuffer().handle, m_outResource.getImage().handle,
3640 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
3641 }
3642
getInSyncInfo(void) const3643 SyncInfo getInSyncInfo(void) const
3644 {
3645 const SyncInfo syncInfo = {
3646 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3647 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
3648 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout imageLayout;
3649 };
3650 return syncInfo;
3651 }
3652
getOutSyncInfo(void) const3653 SyncInfo getOutSyncInfo(void) const
3654 {
3655 const SyncInfo syncInfo = {
3656 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3657 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3658 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
3659 };
3660 return syncInfo;
3661 }
3662
getData(void) const3663 Data getData(void) const
3664 {
3665 Data data = {0, DE_NULL};
3666 return data;
3667 }
3668
setData(const Data &)3669 void setData(const Data &)
3670 {
3671 DE_ASSERT(0);
3672 }
3673
3674 private:
3675 OperationContext &m_context;
3676 Resource &m_inResource;
3677 Resource &m_outResource;
3678 };
3679
3680 class CopySupport : public OperationSupport
3681 {
3682 public:
CopySupport(const ResourceDescription & resourceDesc)3683 CopySupport(const ResourceDescription &resourceDesc)
3684 : m_resourceType(resourceDesc.type)
3685 , m_requiredQueueFlags(resourceDesc.type == RESOURCE_TYPE_IMAGE &&
3686 isDepthStencilFormat(resourceDesc.imageFormat) ?
3687 VK_QUEUE_GRAPHICS_BIT :
3688 VK_QUEUE_TRANSFER_BIT)
3689 {
3690 }
3691
getInResourceUsageFlags(void) const3692 uint32_t getInResourceUsageFlags(void) const
3693 {
3694 if (m_resourceType == RESOURCE_TYPE_IMAGE)
3695 return VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
3696 else
3697 return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3698 }
3699
getOutResourceUsageFlags(void) const3700 uint32_t getOutResourceUsageFlags(void) const
3701 {
3702 if (m_resourceType == RESOURCE_TYPE_IMAGE)
3703 return VK_IMAGE_USAGE_TRANSFER_DST_BIT;
3704 else
3705 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3706 }
3707
getQueueFlags(const OperationContext & context) const3708 VkQueueFlags getQueueFlags(const OperationContext &context) const
3709 {
3710 DE_UNREF(context);
3711 return m_requiredQueueFlags;
3712 }
3713
build(OperationContext &,Resource &) const3714 de::MovePtr<Operation> build(OperationContext &, Resource &) const
3715 {
3716 DE_ASSERT(0);
3717 return de::MovePtr<Operation>();
3718 }
3719
build(OperationContext & context,Resource & inResource,Resource & outResource) const3720 de::MovePtr<Operation> build(OperationContext &context, Resource &inResource, Resource &outResource) const
3721 {
3722 return de::MovePtr<Operation>(new CopyImplementation(context, inResource, outResource));
3723 }
3724
3725 private:
3726 const enum ResourceType m_resourceType;
3727 const VkQueueFlags m_requiredQueueFlags;
3728 };
3729
3730 } // namespace CopyBufferToImage
3731
3732 namespace CopyImageToBuffer
3733 {
3734
3735 class WriteImplementation : public Operation
3736 {
3737 public:
WriteImplementation(OperationContext & context,Resource & resource)3738 WriteImplementation(OperationContext &context, Resource &resource)
3739 : m_context(context)
3740 , m_resource(resource)
3741 , m_subresourceRange(makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u))
3742 , m_subresourceLayers(makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u))
3743 {
3744 DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_BUFFER);
3745
3746 const DeviceInterface &vk = m_context.getDeviceInterface();
3747 const VkDevice device = m_context.getDevice();
3748 Allocator &allocator = m_context.getAllocator();
3749 const VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
3750 const uint32_t pixelSize = tcu::getPixelSize(mapVkFormat(format));
3751
3752 DE_ASSERT((m_resource.getBuffer().size % pixelSize) == 0);
3753 m_imageExtent = get2DImageExtentWithSize(m_resource.getBuffer().size, pixelSize);
3754
3755 // Source data staging buffer
3756 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
3757 vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT),
3758 MemoryRequirement::HostVisible));
3759
3760 const Allocation &alloc = m_hostBuffer->getAllocation();
3761 fillPattern(alloc.getHostPtr(), m_resource.getBuffer().size);
3762 flushAlloc(vk, device, alloc);
3763
3764 // Source data image
3765 m_image = de::MovePtr<Image>(
3766 new Image(vk, device, allocator,
3767 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_imageExtent, format,
3768 (VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
3769 VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
3770 MemoryRequirement::Any));
3771 }
3772
recordCommands(const VkCommandBuffer cmdBuffer)3773 void recordCommands(const VkCommandBuffer cmdBuffer)
3774 {
3775 const DeviceInterface &vk = m_context.getDeviceInterface();
3776 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_imageExtent, m_subresourceLayers);
3777 SynchronizationWrapperPtr synchronizationWrapper =
3778 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
3779
3780 // Host buffer -> Image
3781 {
3782 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3783 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3784 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3785 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3786 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3787 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3788 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3789 **m_image, // VkImage image
3790 m_subresourceRange // VkImageSubresourceRange subresourceRange
3791 );
3792 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
3793 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3794
3795 vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u,
3796 ©Region);
3797 }
3798 // Image -> Resource
3799 {
3800 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3801 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3802 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
3803 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3804 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3805 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
3806 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
3807 **m_image, // VkImage image
3808 m_subresourceRange // VkImageSubresourceRange subresourceRange
3809 );
3810 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
3811 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3812
3813 vk.cmdCopyImageToBuffer(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3814 m_resource.getBuffer().handle, 1u, ©Region);
3815 }
3816 }
3817
getInSyncInfo(void) const3818 SyncInfo getInSyncInfo(void) const
3819 {
3820 return emptySyncInfo;
3821 }
3822
getOutSyncInfo(void) const3823 SyncInfo getOutSyncInfo(void) const
3824 {
3825 const SyncInfo syncInfo = {
3826 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3827 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3828 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
3829 };
3830 return syncInfo;
3831 }
3832
getData(void) const3833 Data getData(void) const
3834 {
3835 return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
3836 }
3837
setData(const Data & data)3838 void setData(const Data &data)
3839 {
3840 setHostBufferData(m_context, *m_hostBuffer, data);
3841 }
3842
3843 private:
3844 OperationContext &m_context;
3845 Resource &m_resource;
3846 const VkImageSubresourceRange m_subresourceRange;
3847 const VkImageSubresourceLayers m_subresourceLayers;
3848 de::MovePtr<Buffer> m_hostBuffer;
3849 de::MovePtr<Image> m_image;
3850 VkExtent3D m_imageExtent;
3851 };
3852
3853 class ReadImplementation : public Operation
3854 {
3855 public:
ReadImplementation(OperationContext & context,Resource & resource)3856 ReadImplementation(OperationContext &context, Resource &resource)
3857 : m_context(context)
3858 , m_resource(resource)
3859 , m_bufferSize(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
3860 {
3861 DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_IMAGE);
3862
3863 const DeviceInterface &vk = m_context.getDeviceInterface();
3864 const VkDevice device = m_context.getDevice();
3865 Allocator &allocator = m_context.getAllocator();
3866
3867 m_hostBuffer = de::MovePtr<Buffer>(
3868 new Buffer(vk, device, allocator, makeBufferCreateInfo(m_bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT),
3869 MemoryRequirement::HostVisible));
3870
3871 const Allocation &alloc = m_hostBuffer->getAllocation();
3872 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_bufferSize));
3873 flushAlloc(vk, device, alloc);
3874 }
3875
recordCommands(const VkCommandBuffer cmdBuffer)3876 void recordCommands(const VkCommandBuffer cmdBuffer)
3877 {
3878 const DeviceInterface &vk = m_context.getDeviceInterface();
3879 const VkBufferImageCopy copyRegion =
3880 makeBufferImageCopy(m_resource.getImage().extent, m_resource.getImage().subresourceLayers);
3881 SynchronizationWrapperPtr synchronizationWrapper =
3882 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
3883
3884 vk.cmdCopyImageToBuffer(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3885 **m_hostBuffer, 1u, ©Region);
3886
3887 // Insert a barrier so data written by the transfer is available to the host
3888 {
3889 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
3890 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3891 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
3892 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3893 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3894 **m_hostBuffer, // VkBuffer buffer
3895 0u, // VkDeviceSize offset
3896 VK_WHOLE_SIZE // VkDeviceSize size
3897 );
3898 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
3899 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3900 }
3901 }
3902
getInSyncInfo(void) const3903 SyncInfo getInSyncInfo(void) const
3904 {
3905 const SyncInfo syncInfo = {
3906 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3907 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
3908 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout imageLayout;
3909 };
3910 return syncInfo;
3911 }
3912
getOutSyncInfo(void) const3913 SyncInfo getOutSyncInfo(void) const
3914 {
3915 return emptySyncInfo;
3916 }
3917
getData(void) const3918 Data getData(void) const
3919 {
3920 return getHostBufferData(m_context, *m_hostBuffer, m_bufferSize);
3921 }
3922
setData(const Data &)3923 void setData(const Data &)
3924 {
3925 DE_ASSERT(0);
3926 }
3927
3928 private:
3929 OperationContext &m_context;
3930 Resource &m_resource;
3931 de::MovePtr<Buffer> m_hostBuffer;
3932 const VkDeviceSize m_bufferSize;
3933 };
3934
3935 class CopyImplementation : public Operation
3936 {
3937 public:
CopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource)3938 CopyImplementation(OperationContext &context, Resource &inResource, Resource &outResource)
3939 : m_context(context)
3940 , m_inResource(inResource)
3941 , m_outResource(outResource)
3942 , m_subresourceRange(makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u))
3943 , m_subresourceLayers(makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u))
3944 {
3945 DE_ASSERT(m_inResource.getType() == RESOURCE_TYPE_IMAGE);
3946 DE_ASSERT(m_outResource.getType() == RESOURCE_TYPE_BUFFER);
3947 }
3948
recordCommands(const VkCommandBuffer cmdBuffer)3949 void recordCommands(const VkCommandBuffer cmdBuffer)
3950 {
3951 const DeviceInterface &vk = m_context.getDeviceInterface();
3952 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_inResource.getImage().extent, m_subresourceLayers);
3953 SynchronizationWrapperPtr synchronizationWrapper =
3954 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
3955
3956 {
3957 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3958 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3959 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3960 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3961 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3962 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3963 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3964 m_inResource.getImage().handle, // VkImage image
3965 m_inResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
3966 );
3967 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
3968 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3969 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3970 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3971 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3972 m_outResource.getBuffer().handle, // VkBuffer buffer
3973 0u, // VkDeviceSize offset
3974 m_outResource.getBuffer().size // VkDeviceSize size
3975 );
3976 VkDependencyInfoKHR dependencyInfo =
3977 makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2, &imageMemoryBarrier2);
3978 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3979 }
3980
3981 vk.cmdCopyImageToBuffer(cmdBuffer, m_inResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3982 m_outResource.getBuffer().handle, 1u, ©Region);
3983 }
3984
getInSyncInfo(void) const3985 SyncInfo getInSyncInfo(void) const
3986 {
3987 const SyncInfo syncInfo = {
3988 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3989 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
3990 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
3991 };
3992 return syncInfo;
3993 }
3994
getOutSyncInfo(void) const3995 SyncInfo getOutSyncInfo(void) const
3996 {
3997 const SyncInfo syncInfo = {
3998 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3999 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
4000 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
4001 };
4002 return syncInfo;
4003 }
4004
getData(void) const4005 Data getData(void) const
4006 {
4007 Data data = {0, DE_NULL};
4008 return data;
4009 }
4010
setData(const Data &)4011 void setData(const Data &)
4012 {
4013 DE_ASSERT(0);
4014 }
4015
4016 private:
4017 OperationContext &m_context;
4018 Resource &m_inResource;
4019 Resource &m_outResource;
4020 const VkImageSubresourceRange m_subresourceRange;
4021 const VkImageSubresourceLayers m_subresourceLayers;
4022 };
4023
4024 class Support : public OperationSupport
4025 {
4026 public:
Support(const ResourceDescription & resourceDesc,const AccessMode mode)4027 Support(const ResourceDescription &resourceDesc, const AccessMode mode)
4028 : m_mode(mode)
4029 , m_requiredQueueFlags(resourceDesc.type == RESOURCE_TYPE_IMAGE &&
4030 isDepthStencilFormat(resourceDesc.imageFormat) ?
4031 VK_QUEUE_GRAPHICS_BIT :
4032 VK_QUEUE_TRANSFER_BIT)
4033 {
4034 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
4035 DE_ASSERT(m_mode == ACCESS_MODE_READ || resourceDesc.type != RESOURCE_TYPE_IMAGE);
4036 DE_ASSERT(m_mode == ACCESS_MODE_WRITE || resourceDesc.type != RESOURCE_TYPE_BUFFER);
4037 }
4038
getInResourceUsageFlags(void) const4039 uint32_t getInResourceUsageFlags(void) const
4040 {
4041 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT : 0;
4042 }
4043
getOutResourceUsageFlags(void) const4044 uint32_t getOutResourceUsageFlags(void) const
4045 {
4046 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : 0;
4047 }
4048
getQueueFlags(const OperationContext & context) const4049 VkQueueFlags getQueueFlags(const OperationContext &context) const
4050 {
4051 DE_UNREF(context);
4052 return m_requiredQueueFlags;
4053 }
4054
build(OperationContext & context,Resource & resource) const4055 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
4056 {
4057 if (m_mode == ACCESS_MODE_READ)
4058 return de::MovePtr<Operation>(new ReadImplementation(context, resource));
4059 else
4060 return de::MovePtr<Operation>(new WriteImplementation(context, resource));
4061 }
4062
build(OperationContext &,Resource &,Resource &) const4063 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
4064 {
4065 DE_ASSERT(0);
4066 return de::MovePtr<Operation>();
4067 }
4068
4069 private:
4070 const AccessMode m_mode;
4071 const VkQueueFlags m_requiredQueueFlags;
4072 };
4073
4074 } // namespace CopyImageToBuffer
4075
4076 namespace ClearImage
4077 {
4078
4079 enum ClearMode
4080 {
4081 CLEAR_MODE_COLOR,
4082 CLEAR_MODE_DEPTH_STENCIL,
4083 };
4084
4085 class Implementation : public Operation
4086 {
4087 public:
Implementation(OperationContext & context,Resource & resource,const ClearMode mode)4088 Implementation(OperationContext &context, Resource &resource, const ClearMode mode)
4089 : m_context(context)
4090 , m_resource(resource)
4091 , m_clearValue(makeClearValue(m_resource.getImage().format))
4092 , m_mode(mode)
4093 {
4094 const VkDeviceSize size = getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent);
4095 const VkExtent3D &extent = m_resource.getImage().extent;
4096 const VkFormat format = m_resource.getImage().format;
4097 const tcu::TextureFormat texFormat = mapVkFormat(format);
4098
4099 m_data.resize(static_cast<std::size_t>(size));
4100 tcu::PixelBufferAccess imagePixels(texFormat, extent.width, extent.height, extent.depth, &m_data[0]);
4101 clearPixelBuffer(imagePixels, m_clearValue);
4102 }
4103
recordCommands(const VkCommandBuffer cmdBuffer)4104 void recordCommands(const VkCommandBuffer cmdBuffer)
4105 {
4106 const DeviceInterface &vk = m_context.getDeviceInterface();
4107 SynchronizationWrapperPtr synchronizationWrapper =
4108 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
4109
4110 VkPipelineStageFlags2KHR dstStageMask = VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR;
4111 if (m_context.getSynchronizationType() == SynchronizationType::SYNCHRONIZATION2)
4112 dstStageMask = VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR;
4113
4114 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
4115 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
4116 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
4117 dstStageMask, // VkPipelineStageFlags2KHR dstStageMask
4118 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
4119 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
4120 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
4121 m_resource.getImage().handle, // VkImage image
4122 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
4123 );
4124 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
4125 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
4126
4127 if (m_mode == CLEAR_MODE_COLOR)
4128 vk.cmdClearColorImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4129 &m_clearValue.color, 1u, &m_resource.getImage().subresourceRange);
4130 else
4131 vk.cmdClearDepthStencilImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4132 &m_clearValue.depthStencil, 1u, &m_resource.getImage().subresourceRange);
4133 }
4134
getInSyncInfo(void) const4135 SyncInfo getInSyncInfo(void) const
4136 {
4137 return emptySyncInfo;
4138 }
4139
getOutSyncInfo(void) const4140 SyncInfo getOutSyncInfo(void) const
4141 {
4142 VkPipelineStageFlags2KHR stageMask = VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR;
4143 if (m_context.getSynchronizationType() == SynchronizationType::SYNCHRONIZATION2)
4144 stageMask = VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR;
4145
4146 return {
4147 stageMask, // VkPipelineStageFlags stageMask;
4148 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
4149 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
4150 };
4151 }
4152
getData(void) const4153 Data getData(void) const
4154 {
4155 const Data data = {
4156 m_data.size(), // std::size_t size;
4157 &m_data[0], // const uint8_t* data;
4158 };
4159 return data;
4160 }
4161
setData(const Data &)4162 void setData(const Data &)
4163 {
4164 DE_ASSERT(0);
4165 }
4166
4167 private:
4168 OperationContext &m_context;
4169 Resource &m_resource;
4170 std::vector<uint8_t> m_data;
4171 const VkClearValue m_clearValue;
4172 const ClearMode m_mode;
4173 };
4174
4175 class Support : public OperationSupport
4176 {
4177 public:
Support(const ResourceDescription & resourceDesc,const ClearMode mode)4178 Support(const ResourceDescription &resourceDesc, const ClearMode mode) : m_resourceDesc(resourceDesc), m_mode(mode)
4179 {
4180 DE_ASSERT(m_mode == CLEAR_MODE_COLOR || m_mode == CLEAR_MODE_DEPTH_STENCIL);
4181 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
4182 DE_ASSERT(m_resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT || (m_mode != CLEAR_MODE_COLOR));
4183 DE_ASSERT((m_resourceDesc.imageAspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) ||
4184 (m_mode != CLEAR_MODE_DEPTH_STENCIL));
4185 }
4186
getInResourceUsageFlags(void) const4187 uint32_t getInResourceUsageFlags(void) const
4188 {
4189 return 0;
4190 }
4191
getOutResourceUsageFlags(void) const4192 uint32_t getOutResourceUsageFlags(void) const
4193 {
4194 return VK_IMAGE_USAGE_TRANSFER_DST_BIT;
4195 }
4196
getQueueFlags(const OperationContext & context) const4197 VkQueueFlags getQueueFlags(const OperationContext &context) const
4198 {
4199 DE_UNREF(context);
4200 if (m_mode == CLEAR_MODE_COLOR)
4201 return VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
4202 else
4203 return VK_QUEUE_GRAPHICS_BIT;
4204 }
4205
build(OperationContext & context,Resource & resource) const4206 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
4207 {
4208 return de::MovePtr<Operation>(new Implementation(context, resource, m_mode));
4209 }
4210
build(OperationContext &,Resource &,Resource &) const4211 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
4212 {
4213 DE_ASSERT(0);
4214 return de::MovePtr<Operation>();
4215 }
4216
4217 private:
4218 const ResourceDescription m_resourceDesc;
4219 const ClearMode m_mode;
4220 };
4221
4222 } // namespace ClearImage
4223
4224 namespace Draw
4225 {
4226
4227 enum DrawCall
4228 {
4229 DRAW_CALL_DRAW,
4230 DRAW_CALL_DRAW_INDEXED,
4231 DRAW_CALL_DRAW_INDIRECT,
4232 DRAW_CALL_DRAW_INDEXED_INDIRECT,
4233 };
4234
4235 //! A write operation that is a result of drawing to an image.
4236 //! \todo Add support for depth/stencil too?
4237 class Implementation : public Operation
4238 {
4239 public:
Implementation(OperationContext & context,Resource & resource,const DrawCall drawCall)4240 Implementation(OperationContext &context, Resource &resource, const DrawCall drawCall)
4241 : m_context(context)
4242 , m_resource(resource)
4243 , m_drawCall(drawCall)
4244 , m_vertices(context)
4245 {
4246 const DeviceInterface &vk = context.getDeviceInterface();
4247 const VkDevice device = context.getDevice();
4248 Allocator &allocator = context.getAllocator();
4249
4250 // Indirect buffer
4251
4252 if (m_drawCall == DRAW_CALL_DRAW_INDIRECT)
4253 {
4254 m_indirectBuffer = de::MovePtr<Buffer>(
4255 new Buffer(vk, device, allocator,
4256 makeBufferCreateInfo(sizeof(VkDrawIndirectCommand), VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT),
4257 MemoryRequirement::HostVisible));
4258
4259 const Allocation &alloc = m_indirectBuffer->getAllocation();
4260 VkDrawIndirectCommand *const pIndirectCommand = static_cast<VkDrawIndirectCommand *>(alloc.getHostPtr());
4261
4262 pIndirectCommand->vertexCount = m_vertices.getNumVertices();
4263 pIndirectCommand->instanceCount = 1u;
4264 pIndirectCommand->firstVertex = 0u;
4265 pIndirectCommand->firstInstance = 0u;
4266
4267 flushAlloc(vk, device, alloc);
4268 }
4269 else if (m_drawCall == DRAW_CALL_DRAW_INDEXED_INDIRECT)
4270 {
4271 m_indirectBuffer = de::MovePtr<Buffer>(new Buffer(
4272 vk, device, allocator,
4273 makeBufferCreateInfo(sizeof(VkDrawIndexedIndirectCommand), VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT),
4274 MemoryRequirement::HostVisible));
4275
4276 const Allocation &alloc = m_indirectBuffer->getAllocation();
4277 VkDrawIndexedIndirectCommand *const pIndirectCommand =
4278 static_cast<VkDrawIndexedIndirectCommand *>(alloc.getHostPtr());
4279
4280 pIndirectCommand->indexCount = m_vertices.getNumIndices();
4281 pIndirectCommand->instanceCount = 1u;
4282 pIndirectCommand->firstIndex = 0u;
4283 pIndirectCommand->vertexOffset = 0u;
4284 pIndirectCommand->firstInstance = 0u;
4285
4286 flushAlloc(vk, device, alloc);
4287 }
4288
4289 // Resource image is the color attachment
4290
4291 m_colorFormat = m_resource.getImage().format;
4292 m_colorSubresourceRange = m_resource.getImage().subresourceRange;
4293 m_colorImage = m_resource.getImage().handle;
4294 m_attachmentExtent = m_resource.getImage().extent;
4295
4296 // Pipeline
4297
4298 m_colorAttachmentView =
4299 makeImageView(vk, device, m_colorImage, VK_IMAGE_VIEW_TYPE_2D, m_colorFormat, m_colorSubresourceRange);
4300 m_renderPass = makeRenderPass(vk, device, m_colorFormat);
4301 m_framebuffer = makeFramebuffer(vk, device, *m_renderPass, *m_colorAttachmentView, m_attachmentExtent.width,
4302 m_attachmentExtent.height);
4303 m_pipelineLayout = makePipelineLayout(vk, device);
4304
4305 GraphicsPipelineBuilder pipelineBuilder;
4306 pipelineBuilder.setRenderSize(tcu::IVec2(m_attachmentExtent.width, m_attachmentExtent.height))
4307 .setVertexInputSingleAttribute(m_vertices.getVertexFormat(), m_vertices.getVertexStride())
4308 .setShader(vk, device, VK_SHADER_STAGE_VERTEX_BIT, context.getBinaryCollection().get("draw_vert"), DE_NULL)
4309 .setShader(vk, device, VK_SHADER_STAGE_FRAGMENT_BIT, context.getBinaryCollection().get("draw_frag"),
4310 DE_NULL);
4311
4312 m_pipeline = pipelineBuilder.build(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData(),
4313 context.getResourceInterface());
4314
4315 // Set expected draw values
4316
4317 m_expectedData.resize(
4318 static_cast<size_t>(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent)));
4319 tcu::PixelBufferAccess imagePixels(mapVkFormat(m_colorFormat), m_attachmentExtent.width,
4320 m_attachmentExtent.height, m_attachmentExtent.depth, &m_expectedData[0]);
4321 clearPixelBuffer(imagePixels, makeClearValue(m_colorFormat));
4322 }
4323
recordCommands(const VkCommandBuffer cmdBuffer)4324 void recordCommands(const VkCommandBuffer cmdBuffer)
4325 {
4326 const DeviceInterface &vk = m_context.getDeviceInterface();
4327 SynchronizationWrapperPtr synchronizationWrapper =
4328 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
4329
4330 // Change color attachment image layout
4331 {
4332 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
4333 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
4334 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
4335 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
4336 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
4337 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
4338 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout
4339 m_colorImage, // VkImage image
4340 m_colorSubresourceRange // VkImageSubresourceRange subresourceRange
4341 );
4342 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
4343 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
4344 }
4345
4346 {
4347 const VkRect2D renderArea = makeRect2D(m_attachmentExtent);
4348 const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
4349
4350 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
4351 }
4352
4353 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
4354 {
4355 const VkDeviceSize vertexBufferOffset = 0ull;
4356 const VkBuffer vertexBuffer = m_vertices.getVertexBuffer();
4357 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &vertexBuffer, &vertexBufferOffset);
4358 }
4359
4360 if (m_drawCall == DRAW_CALL_DRAW_INDEXED || m_drawCall == DRAW_CALL_DRAW_INDEXED_INDIRECT)
4361 vk.cmdBindIndexBuffer(cmdBuffer, m_vertices.getIndexBuffer(), 0u, m_vertices.getIndexType());
4362
4363 switch (m_drawCall)
4364 {
4365 case DRAW_CALL_DRAW:
4366 vk.cmdDraw(cmdBuffer, m_vertices.getNumVertices(), 1u, 0u, 0u);
4367 break;
4368
4369 case DRAW_CALL_DRAW_INDEXED:
4370 vk.cmdDrawIndexed(cmdBuffer, m_vertices.getNumIndices(), 1u, 0u, 0, 0u);
4371 break;
4372
4373 case DRAW_CALL_DRAW_INDIRECT:
4374 vk.cmdDrawIndirect(cmdBuffer, **m_indirectBuffer, 0u, 1u, 0u);
4375 break;
4376
4377 case DRAW_CALL_DRAW_INDEXED_INDIRECT:
4378 vk.cmdDrawIndexedIndirect(cmdBuffer, **m_indirectBuffer, 0u, 1u, 0u);
4379 break;
4380 }
4381
4382 endRenderPass(vk, cmdBuffer);
4383 }
4384
getInSyncInfo(void) const4385 SyncInfo getInSyncInfo(void) const
4386 {
4387 return emptySyncInfo;
4388 }
4389
getOutSyncInfo(void) const4390 SyncInfo getOutSyncInfo(void) const
4391 {
4392 const SyncInfo syncInfo = {
4393 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, // VkPipelineStageFlags stageMask;
4394 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags accessMask;
4395 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout imageLayout;
4396 };
4397 return syncInfo;
4398 }
4399
getData(void) const4400 Data getData(void) const
4401 {
4402 const Data data = {
4403 m_expectedData.size(), // std::size_t size;
4404 &m_expectedData[0], // const uint8_t* data;
4405 };
4406 return data;
4407 }
4408
setData(const Data & data)4409 void setData(const Data &data)
4410 {
4411 DE_ASSERT(m_expectedData.size() == data.size);
4412 deMemcpy(&m_expectedData[0], data.data, data.size);
4413 }
4414
4415 private:
4416 OperationContext &m_context;
4417 Resource &m_resource;
4418 const DrawCall m_drawCall;
4419 const VertexGrid m_vertices;
4420 std::vector<uint8_t> m_expectedData;
4421 de::MovePtr<Buffer> m_indirectBuffer;
4422 VkFormat m_colorFormat;
4423 VkImage m_colorImage;
4424 Move<VkImageView> m_colorAttachmentView;
4425 VkImageSubresourceRange m_colorSubresourceRange;
4426 VkExtent3D m_attachmentExtent;
4427 Move<VkRenderPass> m_renderPass;
4428 Move<VkFramebuffer> m_framebuffer;
4429 Move<VkPipelineLayout> m_pipelineLayout;
4430 Move<VkPipeline> m_pipeline;
4431 };
4432
4433 template <typename T, std::size_t N>
toString(const T (& values)[N])4434 std::string toString(const T (&values)[N])
4435 {
4436 std::ostringstream str;
4437 for (std::size_t i = 0; i < N; ++i)
4438 str << (i != 0 ? ", " : "") << values[i];
4439 return str.str();
4440 }
4441
4442 class Support : public OperationSupport
4443 {
4444 public:
Support(const ResourceDescription & resourceDesc,const DrawCall drawCall)4445 Support(const ResourceDescription &resourceDesc, const DrawCall drawCall)
4446 : m_resourceDesc(resourceDesc)
4447 , m_drawCall(drawCall)
4448 {
4449 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE && m_resourceDesc.imageType == VK_IMAGE_TYPE_2D);
4450 DE_ASSERT(!isDepthStencilFormat(m_resourceDesc.imageFormat));
4451 }
4452
initPrograms(SourceCollections & programCollection) const4453 void initPrograms(SourceCollections &programCollection) const
4454 {
4455 // Vertex
4456 {
4457 std::ostringstream src;
4458 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
4459 << "\n"
4460 << "layout(location = 0) in vec4 v_in_position;\n"
4461 << "\n"
4462 << "out " << s_perVertexBlock << ";\n"
4463 << "\n"
4464 << "void main (void)\n"
4465 << "{\n"
4466 << " gl_Position = v_in_position;\n"
4467 << "}\n";
4468
4469 programCollection.glslSources.add("draw_vert") << glu::VertexSource(src.str());
4470 }
4471
4472 // Fragment
4473 {
4474 const VkClearValue clearValue = makeClearValue(m_resourceDesc.imageFormat);
4475 const bool isIntegerFormat =
4476 isIntFormat(m_resourceDesc.imageFormat) || isUintFormat(m_resourceDesc.imageFormat);
4477 const std::string colorType = (isIntegerFormat ? "uvec4" : "vec4");
4478
4479 std::ostringstream src;
4480 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
4481 << "\n"
4482 << "layout(location = 0) out " << colorType << " o_color;\n"
4483 << "\n"
4484 << "void main (void)\n"
4485 << "{\n"
4486 << " o_color = " << colorType << "("
4487 << (isIntegerFormat ? toString(clearValue.color.uint32) : toString(clearValue.color.float32)) << ");\n"
4488 << "}\n";
4489
4490 programCollection.glslSources.add("draw_frag") << glu::FragmentSource(src.str());
4491 }
4492 }
4493
getInResourceUsageFlags(void) const4494 uint32_t getInResourceUsageFlags(void) const
4495 {
4496 return 0;
4497 }
4498
getOutResourceUsageFlags(void) const4499 uint32_t getOutResourceUsageFlags(void) const
4500 {
4501 return VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
4502 }
4503
getQueueFlags(const OperationContext & context) const4504 VkQueueFlags getQueueFlags(const OperationContext &context) const
4505 {
4506 DE_UNREF(context);
4507 return VK_QUEUE_GRAPHICS_BIT;
4508 }
4509
build(OperationContext & context,Resource & resource) const4510 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
4511 {
4512 return de::MovePtr<Operation>(new Implementation(context, resource, m_drawCall));
4513 }
4514
build(OperationContext &,Resource &,Resource &) const4515 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
4516 {
4517 DE_ASSERT(0);
4518 return de::MovePtr<Operation>();
4519 }
4520
4521 private:
4522 const ResourceDescription m_resourceDesc;
4523 const DrawCall m_drawCall;
4524 };
4525
4526 } // namespace Draw
4527
4528 namespace ClearAttachments
4529 {
4530
4531 class Implementation : public Operation
4532 {
4533 public:
Implementation(OperationContext & context,Resource & resource)4534 Implementation(OperationContext &context, Resource &resource)
4535 : m_context(context)
4536 , m_resource(resource)
4537 , m_clearValue(makeClearValue(m_resource.getImage().format))
4538 {
4539 const DeviceInterface &vk = context.getDeviceInterface();
4540 const VkDevice device = context.getDevice();
4541
4542 const VkDeviceSize size = getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent);
4543 const VkExtent3D &extent = m_resource.getImage().extent;
4544 const VkFormat format = m_resource.getImage().format;
4545 const tcu::TextureFormat texFormat = mapVkFormat(format);
4546 const SyncInfo syncInfo = getOutSyncInfo();
4547
4548 m_data.resize(static_cast<std::size_t>(size));
4549 tcu::PixelBufferAccess imagePixels(texFormat, extent.width, extent.height, extent.depth, &m_data[0]);
4550 clearPixelBuffer(imagePixels, m_clearValue);
4551
4552 m_attachmentView =
4553 makeImageView(vk, device, m_resource.getImage().handle, getImageViewType(m_resource.getImage().imageType),
4554 m_resource.getImage().format, m_resource.getImage().subresourceRange);
4555
4556 switch (m_resource.getImage().subresourceRange.aspectMask)
4557 {
4558 case VK_IMAGE_ASPECT_COLOR_BIT:
4559 m_renderPass = makeRenderPass(vk, device, m_resource.getImage().format, VK_FORMAT_UNDEFINED,
4560 VK_ATTACHMENT_LOAD_OP_DONT_CARE, syncInfo.imageLayout);
4561 break;
4562 case VK_IMAGE_ASPECT_STENCIL_BIT:
4563 case VK_IMAGE_ASPECT_DEPTH_BIT:
4564 m_renderPass = makeRenderPass(vk, device, VK_FORMAT_UNDEFINED, m_resource.getImage().format,
4565 VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
4566 syncInfo.imageLayout);
4567 break;
4568 default:
4569 DE_ASSERT(0);
4570 break;
4571 }
4572
4573 m_frameBuffer = makeFramebuffer(vk, device, *m_renderPass, *m_attachmentView,
4574 m_resource.getImage().extent.width, m_resource.getImage().extent.height);
4575 }
4576
recordCommands(const VkCommandBuffer cmdBuffer)4577 void recordCommands(const VkCommandBuffer cmdBuffer)
4578 {
4579 const DeviceInterface &vk = m_context.getDeviceInterface();
4580 if ((m_resource.getImage().subresourceRange.aspectMask &
4581 (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) != 0)
4582 {
4583 const VkImageMemoryBarrier imageBarrier = {
4584 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType
4585 DE_NULL, // pNext
4586 0u, // srcAccessMask
4587 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, // dstAccessMask
4588 VK_IMAGE_LAYOUT_UNDEFINED, // oldLayout
4589 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, // newLayout
4590 VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex
4591 VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex
4592 m_resource.getImage().handle, // image
4593 m_resource.getImage().subresourceRange // subresourceRange
4594 };
4595 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT,
4596 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
4597 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
4598 0u, 0u, DE_NULL, 0u, DE_NULL, 1u, &imageBarrier);
4599 }
4600 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_frameBuffer,
4601 makeRect2D(0, 0, m_resource.getImage().extent.width, m_resource.getImage().extent.height),
4602 m_clearValue);
4603
4604 const VkClearAttachment clearAttachment = {
4605 m_resource.getImage().subresourceRange.aspectMask, // VkImageAspectFlags aspectMask;
4606 0, // uint32_t colorAttachment;
4607 m_clearValue // VkClearValue clearValue;
4608 };
4609
4610 const VkRect2D rect2D = makeRect2D(m_resource.getImage().extent);
4611
4612 const VkClearRect clearRect = {
4613 rect2D, // VkRect2D rect;
4614 0u, // uint32_t baseArrayLayer;
4615 m_resource.getImage().subresourceLayers.layerCount // uint32_t layerCount;
4616 };
4617
4618 vk.cmdClearAttachments(cmdBuffer, 1, &clearAttachment, 1, &clearRect);
4619
4620 endRenderPass(vk, cmdBuffer);
4621 }
4622
getInSyncInfo(void) const4623 SyncInfo getInSyncInfo(void) const
4624 {
4625 return emptySyncInfo;
4626 }
4627
getOutSyncInfo(void) const4628 SyncInfo getOutSyncInfo(void) const
4629 {
4630 SyncInfo syncInfo;
4631 syncInfo.stageMask = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR;
4632
4633 switch (m_resource.getImage().subresourceRange.aspectMask)
4634 {
4635 case VK_IMAGE_ASPECT_COLOR_BIT:
4636 syncInfo.accessMask = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR;
4637 syncInfo.imageLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
4638 break;
4639 case VK_IMAGE_ASPECT_STENCIL_BIT:
4640 case VK_IMAGE_ASPECT_DEPTH_BIT:
4641 syncInfo.accessMask = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR;
4642 syncInfo.imageLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
4643 break;
4644 default:
4645 DE_ASSERT(0);
4646 break;
4647 }
4648
4649 return syncInfo;
4650 }
4651
getData(void) const4652 Data getData(void) const
4653 {
4654 const Data data = {
4655 m_data.size(), // std::size_t size;
4656 &m_data[0], // const uint8_t* data;
4657 };
4658 return data;
4659 }
4660
setData(const Data &)4661 void setData(const Data &)
4662 {
4663 DE_ASSERT(0);
4664 }
4665
4666 private:
4667 OperationContext &m_context;
4668 Resource &m_resource;
4669 std::vector<uint8_t> m_data;
4670 const VkClearValue m_clearValue;
4671 Move<VkImageView> m_attachmentView;
4672 Move<VkRenderPass> m_renderPass;
4673 Move<VkFramebuffer> m_frameBuffer;
4674 };
4675
4676 class Support : public OperationSupport
4677 {
4678 public:
Support(const ResourceDescription & resourceDesc)4679 Support(const ResourceDescription &resourceDesc) : m_resourceDesc(resourceDesc)
4680 {
4681 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
4682 }
4683
getInResourceUsageFlags(void) const4684 uint32_t getInResourceUsageFlags(void) const
4685 {
4686 return 0;
4687 }
4688
getOutResourceUsageFlags(void) const4689 uint32_t getOutResourceUsageFlags(void) const
4690 {
4691 switch (m_resourceDesc.imageAspect)
4692 {
4693 case VK_IMAGE_ASPECT_COLOR_BIT:
4694 return VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
4695 case VK_IMAGE_ASPECT_STENCIL_BIT:
4696 case VK_IMAGE_ASPECT_DEPTH_BIT:
4697 return VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
4698 default:
4699 DE_ASSERT(0);
4700 }
4701 return 0u;
4702 }
4703
getQueueFlags(const OperationContext & context) const4704 VkQueueFlags getQueueFlags(const OperationContext &context) const
4705 {
4706 DE_UNREF(context);
4707 return VK_QUEUE_GRAPHICS_BIT;
4708 }
4709
build(OperationContext & context,Resource & resource) const4710 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
4711 {
4712 return de::MovePtr<Operation>(new Implementation(context, resource));
4713 }
4714
build(OperationContext &,Resource &,Resource &) const4715 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
4716 {
4717 DE_ASSERT(0);
4718 return de::MovePtr<Operation>();
4719 }
4720
4721 private:
4722 const ResourceDescription m_resourceDesc;
4723 };
4724
4725 } // namespace ClearAttachments
4726
4727 namespace IndirectBuffer
4728 {
4729
4730 class GraphicsPipeline : public Pipeline
4731 {
4732 public:
GraphicsPipeline(OperationContext & context,const ResourceType resourceType,const VkBuffer indirectBuffer,const std::string & shaderPrefix,const VkDescriptorSetLayout descriptorSetLayout)4733 GraphicsPipeline(OperationContext &context, const ResourceType resourceType, const VkBuffer indirectBuffer,
4734 const std::string &shaderPrefix, const VkDescriptorSetLayout descriptorSetLayout)
4735 : m_resourceType(resourceType)
4736 , m_indirectBuffer(indirectBuffer)
4737 , m_vertices(context)
4738 {
4739 const DeviceInterface &vk = context.getDeviceInterface();
4740 const VkDevice device = context.getDevice();
4741 Allocator &allocator = context.getAllocator();
4742
4743 // Color attachment
4744
4745 m_colorFormat = VK_FORMAT_R8G8B8A8_UNORM;
4746 m_colorImageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
4747 m_colorImageExtent = makeExtent3D(16u, 16u, 1u);
4748 m_colorAttachmentImage = de::MovePtr<Image>(new Image(
4749 vk, device, allocator,
4750 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_colorImageExtent, m_colorFormat,
4751 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
4752 MemoryRequirement::Any));
4753
4754 // Pipeline
4755
4756 m_colorAttachmentView = makeImageView(vk, device, **m_colorAttachmentImage, VK_IMAGE_VIEW_TYPE_2D,
4757 m_colorFormat, m_colorImageSubresourceRange);
4758 m_renderPass = makeRenderPass(vk, device, m_colorFormat);
4759 m_framebuffer = makeFramebuffer(vk, device, *m_renderPass, *m_colorAttachmentView, m_colorImageExtent.width,
4760 m_colorImageExtent.height);
4761 m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
4762
4763 GraphicsPipelineBuilder pipelineBuilder;
4764 pipelineBuilder.setRenderSize(tcu::IVec2(m_colorImageExtent.width, m_colorImageExtent.height))
4765 .setVertexInputSingleAttribute(m_vertices.getVertexFormat(), m_vertices.getVertexStride())
4766 .setShader(vk, device, VK_SHADER_STAGE_VERTEX_BIT, context.getBinaryCollection().get(shaderPrefix + "vert"),
4767 DE_NULL)
4768 .setShader(vk, device, VK_SHADER_STAGE_FRAGMENT_BIT,
4769 context.getBinaryCollection().get(shaderPrefix + "frag"), DE_NULL);
4770
4771 m_pipeline = pipelineBuilder.build(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData(),
4772 context.getResourceInterface());
4773 }
4774
recordCommands(OperationContext & context,const VkCommandBuffer cmdBuffer,const VkDescriptorSet descriptorSet)4775 void recordCommands(OperationContext &context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
4776 {
4777 const DeviceInterface &vk = context.getDeviceInterface();
4778 SynchronizationWrapperPtr synchronizationWrapper =
4779 getSynchronizationWrapper(context.getSynchronizationType(), vk, false);
4780
4781 // Change color attachment image layout
4782 {
4783 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
4784 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
4785 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
4786 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
4787 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
4788 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
4789 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout
4790 **m_colorAttachmentImage, // VkImage image
4791 m_colorImageSubresourceRange // VkImageSubresourceRange subresourceRange
4792 );
4793 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
4794 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
4795 }
4796
4797 {
4798 const VkRect2D renderArea = makeRect2D(m_colorImageExtent);
4799 const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
4800
4801 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
4802 }
4803
4804 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
4805 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1u, &descriptorSet,
4806 0u, DE_NULL);
4807 {
4808 const VkDeviceSize vertexBufferOffset = 0ull;
4809 const VkBuffer vertexBuffer = m_vertices.getVertexBuffer();
4810 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &vertexBuffer, &vertexBufferOffset);
4811 }
4812
4813 switch (m_resourceType)
4814 {
4815 case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW:
4816 vk.cmdDrawIndirect(cmdBuffer, m_indirectBuffer, 0u, 1u, 0u);
4817 break;
4818
4819 case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW_INDEXED:
4820 vk.cmdBindIndexBuffer(cmdBuffer, m_vertices.getIndexBuffer(), 0u, m_vertices.getIndexType());
4821 vk.cmdDrawIndexedIndirect(cmdBuffer, m_indirectBuffer, 0u, 1u, 0u);
4822 break;
4823
4824 default:
4825 DE_ASSERT(0);
4826 break;
4827 }
4828 endRenderPass(vk, cmdBuffer);
4829 }
4830
4831 private:
4832 const ResourceType m_resourceType;
4833 const VkBuffer m_indirectBuffer;
4834 const VertexGrid m_vertices;
4835 VkFormat m_colorFormat;
4836 de::MovePtr<Image> m_colorAttachmentImage;
4837 Move<VkImageView> m_colorAttachmentView;
4838 VkExtent3D m_colorImageExtent;
4839 VkImageSubresourceRange m_colorImageSubresourceRange;
4840 Move<VkRenderPass> m_renderPass;
4841 Move<VkFramebuffer> m_framebuffer;
4842 Move<VkPipelineLayout> m_pipelineLayout;
4843 Move<VkPipeline> m_pipeline;
4844 };
4845
4846 class ComputePipeline : public Pipeline
4847 {
4848 public:
ComputePipeline(OperationContext & context,const VkBuffer indirectBuffer,const std::string & shaderPrefix,const VkDescriptorSetLayout descriptorSetLayout)4849 ComputePipeline(OperationContext &context, const VkBuffer indirectBuffer, const std::string &shaderPrefix,
4850 const VkDescriptorSetLayout descriptorSetLayout)
4851 : m_indirectBuffer(indirectBuffer)
4852 {
4853 const DeviceInterface &vk = context.getDeviceInterface();
4854 const VkDevice device = context.getDevice();
4855
4856 const Unique<VkShaderModule> shaderModule(createShaderModule(
4857 vk, device, context.getBinaryCollection().get(shaderPrefix + "comp"), (VkShaderModuleCreateFlags)0));
4858
4859 m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
4860 m_pipeline = makeComputePipeline(vk, device, *m_pipelineLayout, *shaderModule, DE_NULL,
4861 context.getPipelineCacheData(), context.getResourceInterface());
4862 }
4863
recordCommands(OperationContext & context,const VkCommandBuffer cmdBuffer,const VkDescriptorSet descriptorSet)4864 void recordCommands(OperationContext &context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
4865 {
4866 const DeviceInterface &vk = context.getDeviceInterface();
4867
4868 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipeline);
4869 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0u, 1u, &descriptorSet,
4870 0u, DE_NULL);
4871 vk.cmdDispatchIndirect(cmdBuffer, m_indirectBuffer, 0u);
4872 }
4873
4874 private:
4875 const VkBuffer m_indirectBuffer;
4876 Move<VkPipelineLayout> m_pipelineLayout;
4877 Move<VkPipeline> m_pipeline;
4878 };
4879
4880 //! Read indirect buffer by executing an indirect draw or dispatch command.
4881 class ReadImplementation : public Operation
4882 {
4883 public:
ReadImplementation(OperationContext & context,Resource & resource)4884 ReadImplementation(OperationContext &context, Resource &resource)
4885 : m_context(context)
4886 , m_resource(resource)
4887 , m_stage(resource.getType() == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH ? VK_SHADER_STAGE_COMPUTE_BIT :
4888 VK_SHADER_STAGE_VERTEX_BIT)
4889 , m_pipelineStage(pipelineStageFlagsFromShaderStageFlagBits(m_stage))
4890 , m_hostBufferSizeBytes(sizeof(uint32_t))
4891 {
4892 requireFeaturesForSSBOAccess(m_context, m_stage);
4893
4894 const DeviceInterface &vk = m_context.getDeviceInterface();
4895 const VkDevice device = m_context.getDevice();
4896 Allocator &allocator = m_context.getAllocator();
4897
4898 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
4899 vk, device, allocator, makeBufferCreateInfo(m_hostBufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT),
4900 MemoryRequirement::HostVisible));
4901
4902 // Init host buffer data
4903 {
4904 const Allocation &alloc = m_hostBuffer->getAllocation();
4905 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_hostBufferSizeBytes));
4906 flushAlloc(vk, device, alloc);
4907 }
4908
4909 // Prepare descriptors
4910 {
4911 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
4912 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, m_stage)
4913 .build(vk, device);
4914
4915 m_descriptorPool = DescriptorPoolBuilder()
4916 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
4917 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
4918
4919 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
4920
4921 const VkDescriptorBufferInfo hostBufferInfo =
4922 makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_hostBufferSizeBytes);
4923
4924 DescriptorSetUpdateBuilder()
4925 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
4926 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
4927 .update(vk, device);
4928 }
4929
4930 // Create pipeline
4931 m_pipeline = (m_resource.getType() == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH ?
4932 de::MovePtr<Pipeline>(new ComputePipeline(context, m_resource.getBuffer().handle, "read_ib_",
4933 *m_descriptorSetLayout)) :
4934 de::MovePtr<Pipeline>(new GraphicsPipeline(context, m_resource.getType(),
4935 m_resource.getBuffer().handle, "read_ib_",
4936 *m_descriptorSetLayout)));
4937 }
4938
recordCommands(const VkCommandBuffer cmdBuffer)4939 void recordCommands(const VkCommandBuffer cmdBuffer)
4940 {
4941 const DeviceInterface &vk = m_context.getDeviceInterface();
4942 SynchronizationWrapperPtr synchronizationWrapper =
4943 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
4944
4945 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
4946
4947 // Insert a barrier so data written by the shader is available to the host
4948 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
4949 m_pipelineStage, // VkPipelineStageFlags2KHR srcStageMask
4950 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
4951 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
4952 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
4953 **m_hostBuffer, // VkBuffer buffer
4954 0u, // VkDeviceSize offset
4955 m_hostBufferSizeBytes // VkDeviceSize size
4956 );
4957 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
4958 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
4959 }
4960
getInSyncInfo(void) const4961 SyncInfo getInSyncInfo(void) const
4962 {
4963 const SyncInfo syncInfo = {
4964 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, // VkPipelineStageFlags stageMask;
4965 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR, // VkAccessFlags accessMask;
4966 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
4967 };
4968 return syncInfo;
4969 }
4970
getOutSyncInfo(void) const4971 SyncInfo getOutSyncInfo(void) const
4972 {
4973 return emptySyncInfo;
4974 }
4975
getData(void) const4976 Data getData(void) const
4977 {
4978 return getHostBufferData(m_context, *m_hostBuffer, m_hostBufferSizeBytes);
4979 }
4980
setData(const Data &)4981 void setData(const Data &)
4982 {
4983 DE_ASSERT(0);
4984 }
4985
getShaderStage(void)4986 vk::VkShaderStageFlagBits getShaderStage(void)
4987 {
4988 return m_stage;
4989 }
4990
4991 private:
4992 OperationContext &m_context;
4993 Resource &m_resource;
4994 const VkShaderStageFlagBits m_stage;
4995 const VkPipelineStageFlags m_pipelineStage;
4996 const VkDeviceSize m_hostBufferSizeBytes;
4997 de::MovePtr<Buffer> m_hostBuffer;
4998 Move<VkDescriptorPool> m_descriptorPool;
4999 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
5000 Move<VkDescriptorSet> m_descriptorSet;
5001 de::MovePtr<Pipeline> m_pipeline;
5002 };
5003
5004 //! Prepare indirect buffer for a draw/dispatch call.
5005 class WriteImplementation : public Operation
5006 {
5007 public:
WriteImplementation(OperationContext & context,Resource & resource)5008 WriteImplementation(OperationContext &context, Resource &resource) : m_context(context), m_resource(resource)
5009 {
5010 switch (m_resource.getType())
5011 {
5012 case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW:
5013 {
5014 m_drawIndirect.vertexCount = 6u;
5015 m_drawIndirect.instanceCount = 1u;
5016 m_drawIndirect.firstVertex = 0u;
5017 m_drawIndirect.firstInstance = 0u;
5018
5019 m_indirectData = reinterpret_cast<uint32_t *>(&m_drawIndirect);
5020 m_expectedValue = 6u;
5021 }
5022 break;
5023
5024 case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW_INDEXED:
5025 {
5026 m_drawIndexedIndirect.indexCount = 6u;
5027 m_drawIndexedIndirect.instanceCount = 1u;
5028 m_drawIndexedIndirect.firstIndex = 0u;
5029 m_drawIndexedIndirect.vertexOffset = 0u;
5030 m_drawIndexedIndirect.firstInstance = 0u;
5031
5032 m_indirectData = reinterpret_cast<uint32_t *>(&m_drawIndexedIndirect);
5033 m_expectedValue = 6u;
5034 }
5035 break;
5036
5037 case RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH:
5038 {
5039 m_dispatchIndirect.x = 7u;
5040 m_dispatchIndirect.y = 2u;
5041 m_dispatchIndirect.z = 1u;
5042
5043 m_indirectData = reinterpret_cast<uint32_t *>(&m_dispatchIndirect);
5044 m_expectedValue = 14u;
5045 }
5046 break;
5047
5048 default:
5049 DE_ASSERT(0);
5050 break;
5051 }
5052 }
5053
recordCommands(const VkCommandBuffer cmdBuffer)5054 void recordCommands(const VkCommandBuffer cmdBuffer)
5055 {
5056 const DeviceInterface &vk = m_context.getDeviceInterface();
5057
5058 vk.cmdUpdateBuffer(cmdBuffer, m_resource.getBuffer().handle, m_resource.getBuffer().offset,
5059 m_resource.getBuffer().size, m_indirectData);
5060 }
5061
getInSyncInfo(void) const5062 SyncInfo getInSyncInfo(void) const
5063 {
5064 return emptySyncInfo;
5065 }
5066
getOutSyncInfo(void) const5067 SyncInfo getOutSyncInfo(void) const
5068 {
5069 const SyncInfo syncInfo = {
5070 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
5071 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
5072 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
5073 };
5074 return syncInfo;
5075 }
5076
getData(void) const5077 Data getData(void) const
5078 {
5079 const Data data = {
5080 sizeof(uint32_t), // std::size_t size;
5081 reinterpret_cast<const uint8_t *>(&m_expectedValue), // const uint8_t* data;
5082 };
5083 return data;
5084 }
5085
setData(const Data &)5086 void setData(const Data &)
5087 {
5088 DE_ASSERT(0);
5089 }
5090
5091 private:
5092 OperationContext &m_context;
5093 Resource &m_resource;
5094 VkDrawIndirectCommand m_drawIndirect;
5095 VkDrawIndexedIndirectCommand m_drawIndexedIndirect;
5096 VkDispatchIndirectCommand m_dispatchIndirect;
5097 uint32_t *m_indirectData;
5098 uint32_t m_expectedValue; //! Side-effect value expected to be computed by a read (draw/dispatch) command.
5099 };
5100
5101 class ReadSupport : public OperationSupport
5102 {
5103 public:
ReadSupport(const ResourceDescription & resourceDesc)5104 ReadSupport(const ResourceDescription &resourceDesc) : m_resourceDesc(resourceDesc)
5105 {
5106 DE_ASSERT(isIndirectBuffer(m_resourceDesc.type));
5107 }
5108
initPrograms(SourceCollections & programCollection) const5109 void initPrograms(SourceCollections &programCollection) const
5110 {
5111 std::ostringstream decl;
5112 decl << "layout(set = 0, binding = 0, std140) coherent buffer Data {\n"
5113 << " uint value;\n"
5114 << "} sb_out;\n";
5115
5116 std::ostringstream main;
5117 main << " atomicAdd(sb_out.value, 1u);\n";
5118
5119 // Vertex
5120 {
5121 std::ostringstream src;
5122 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
5123 << "\n"
5124 << "layout(location = 0) in vec4 v_in_position;\n"
5125 << "\n"
5126 << "out " << s_perVertexBlock << ";\n"
5127 << "\n"
5128 << decl.str() << "\n"
5129 << "void main (void)\n"
5130 << "{\n"
5131 << " gl_Position = v_in_position;\n"
5132 << main.str() << "}\n";
5133
5134 programCollection.glslSources.add("read_ib_vert") << glu::VertexSource(src.str());
5135 }
5136
5137 // Fragment
5138 {
5139 std::ostringstream src;
5140 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
5141 << "\n"
5142 << "layout(location = 0) out vec4 o_color;\n"
5143 << "\n"
5144 << "void main (void)\n"
5145 << "{\n"
5146 << " o_color = vec4(1.0);\n"
5147 << "}\n";
5148
5149 programCollection.glslSources.add("read_ib_frag") << glu::FragmentSource(src.str());
5150 }
5151
5152 // Compute
5153 {
5154 std::ostringstream src;
5155 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
5156 << "\n"
5157 << "layout(local_size_x = 1) in;\n"
5158 << "\n"
5159 << decl.str() << "\n"
5160 << "void main (void)\n"
5161 << "{\n"
5162 << main.str() << "}\n";
5163
5164 programCollection.glslSources.add("read_ib_comp") << glu::ComputeSource(src.str());
5165 }
5166 }
5167
getInResourceUsageFlags(void) const5168 uint32_t getInResourceUsageFlags(void) const
5169 {
5170 return VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
5171 }
5172
getOutResourceUsageFlags(void) const5173 uint32_t getOutResourceUsageFlags(void) const
5174 {
5175 return 0;
5176 }
5177
getQueueFlags(const OperationContext & context) const5178 VkQueueFlags getQueueFlags(const OperationContext &context) const
5179 {
5180 DE_UNREF(context);
5181 return (m_resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH ? VK_QUEUE_COMPUTE_BIT :
5182 VK_QUEUE_GRAPHICS_BIT);
5183 }
5184
build(OperationContext & context,Resource & resource) const5185 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
5186 {
5187 return de::MovePtr<Operation>(new ReadImplementation(context, resource));
5188 }
5189
build(OperationContext &,Resource &,Resource &) const5190 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
5191 {
5192 DE_ASSERT(0);
5193 return de::MovePtr<Operation>();
5194 }
5195
5196 private:
5197 const ResourceDescription m_resourceDesc;
5198 };
5199
5200 class WriteSupport : public OperationSupport
5201 {
5202 public:
WriteSupport(const ResourceDescription & resourceDesc)5203 WriteSupport(const ResourceDescription &resourceDesc)
5204 {
5205 DE_ASSERT(isIndirectBuffer(resourceDesc.type));
5206 DE_UNREF(resourceDesc);
5207 }
5208
getInResourceUsageFlags(void) const5209 uint32_t getInResourceUsageFlags(void) const
5210 {
5211 return 0;
5212 }
5213
getOutResourceUsageFlags(void) const5214 uint32_t getOutResourceUsageFlags(void) const
5215 {
5216 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
5217 }
5218
getQueueFlags(const OperationContext & context) const5219 VkQueueFlags getQueueFlags(const OperationContext &context) const
5220 {
5221 DE_UNREF(context);
5222 return VK_QUEUE_TRANSFER_BIT;
5223 }
5224
build(OperationContext & context,Resource & resource) const5225 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
5226 {
5227 return de::MovePtr<Operation>(new WriteImplementation(context, resource));
5228 }
5229
build(OperationContext &,Resource &,Resource &) const5230 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
5231 {
5232 DE_ASSERT(0);
5233 return de::MovePtr<Operation>();
5234 }
5235 };
5236
5237 } // namespace IndirectBuffer
5238
5239 namespace VertexInput
5240 {
5241
5242 enum DrawMode
5243 {
5244 DRAW_MODE_VERTEX = 0,
5245 DRAW_MODE_INDEXED,
5246 };
5247
5248 class Implementation : public Operation
5249 {
5250 public:
Implementation(OperationContext & context,Resource & resource,DrawMode drawMode)5251 Implementation(OperationContext &context, Resource &resource, DrawMode drawMode)
5252 : m_context(context)
5253 , m_resource(resource)
5254 , m_drawMode(drawMode)
5255 {
5256 requireFeaturesForSSBOAccess(m_context, VK_SHADER_STAGE_VERTEX_BIT);
5257
5258 const DeviceInterface &vk = context.getDeviceInterface();
5259 const VkDevice device = context.getDevice();
5260 Allocator &allocator = context.getAllocator();
5261 VkFormat attributeFormat = VK_FORMAT_R32G32B32A32_UINT;
5262 const VkDeviceSize dataSizeBytes = m_resource.getBuffer().size;
5263
5264 // allocate ssbo that will store data used for verification
5265 {
5266 m_outputBuffer = de::MovePtr<Buffer>(new Buffer(
5267 vk, device, allocator, makeBufferCreateInfo(dataSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT),
5268 MemoryRequirement::HostVisible));
5269
5270 const Allocation &alloc = m_outputBuffer->getAllocation();
5271 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(dataSizeBytes));
5272 flushAlloc(vk, device, alloc);
5273 }
5274
5275 // allocate buffer that will be used for vertex attributes when we use resource for indices
5276 if (m_drawMode == DRAW_MODE_INDEXED)
5277 {
5278 attributeFormat = VK_FORMAT_R32_UINT;
5279
5280 m_inputBuffer = de::MovePtr<Buffer>(new Buffer(
5281 vk, device, allocator, makeBufferCreateInfo(dataSizeBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT),
5282 MemoryRequirement::HostVisible));
5283
5284 const Allocation &alloc = m_inputBuffer->getAllocation();
5285 fillPattern(alloc.getHostPtr(), dataSizeBytes, true);
5286 flushAlloc(vk, device, alloc);
5287 }
5288
5289 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
5290 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_VERTEX_BIT)
5291 .build(vk, device);
5292
5293 m_descriptorPool = DescriptorPoolBuilder()
5294 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
5295 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
5296
5297 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
5298
5299 const VkDescriptorBufferInfo outputBufferDescriptorInfo =
5300 makeDescriptorBufferInfo(m_outputBuffer->get(), 0ull, dataSizeBytes);
5301 DescriptorSetUpdateBuilder()
5302 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u),
5303 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &outputBufferDescriptorInfo)
5304 .update(vk, device);
5305
5306 // Color attachment
5307 m_colorFormat = VK_FORMAT_R8G8B8A8_UNORM;
5308 m_colorImageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
5309 m_colorImageExtent = makeExtent3D(16u, 16u, 1u);
5310 m_colorAttachmentImage = de::MovePtr<Image>(new Image(
5311 vk, device, allocator,
5312 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_colorImageExtent, m_colorFormat,
5313 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL),
5314 MemoryRequirement::Any));
5315
5316 // Pipeline
5317 m_colorAttachmentView = makeImageView(vk, device, **m_colorAttachmentImage, VK_IMAGE_VIEW_TYPE_2D,
5318 m_colorFormat, m_colorImageSubresourceRange);
5319 m_renderPass = makeRenderPass(vk, device, m_colorFormat);
5320 m_framebuffer = makeFramebuffer(vk, device, *m_renderPass, *m_colorAttachmentView, m_colorImageExtent.width,
5321 m_colorImageExtent.height);
5322 m_pipelineLayout = makePipelineLayout(vk, device, *m_descriptorSetLayout);
5323
5324 m_pipeline =
5325 GraphicsPipelineBuilder()
5326 .setPrimitiveTopology(VK_PRIMITIVE_TOPOLOGY_POINT_LIST)
5327 .setRenderSize(
5328 tcu::IVec2(static_cast<int>(m_colorImageExtent.width), static_cast<int>(m_colorImageExtent.height)))
5329 .setVertexInputSingleAttribute(attributeFormat, tcu::getPixelSize(mapVkFormat(attributeFormat)))
5330 .setShader(vk, device, VK_SHADER_STAGE_VERTEX_BIT, context.getBinaryCollection().get("input_vert"),
5331 DE_NULL)
5332 .setShader(vk, device, VK_SHADER_STAGE_FRAGMENT_BIT, context.getBinaryCollection().get("input_frag"),
5333 DE_NULL)
5334 .build(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData(),
5335 context.getResourceInterface());
5336 }
5337
recordCommands(const VkCommandBuffer cmdBuffer)5338 void recordCommands(const VkCommandBuffer cmdBuffer)
5339 {
5340 const DeviceInterface &vk = m_context.getDeviceInterface();
5341 const VkDeviceSize dataSizeBytes = m_resource.getBuffer().size;
5342 SynchronizationWrapperPtr synchronizationWrapper =
5343 getSynchronizationWrapper(m_context.getSynchronizationType(), vk, false);
5344
5345 // Change color attachment image layout
5346 {
5347 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
5348 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
5349 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
5350 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
5351 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
5352 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
5353 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout
5354 **m_colorAttachmentImage, // VkImage image
5355 m_colorImageSubresourceRange // VkImageSubresourceRange subresourceRange
5356 );
5357 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
5358 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
5359 }
5360
5361 {
5362 const VkRect2D renderArea = makeRect2D(m_colorImageExtent);
5363 const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
5364
5365 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
5366 }
5367
5368 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
5369 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1u,
5370 &m_descriptorSet.get(), 0u, DE_NULL);
5371
5372 const VkDeviceSize vertexBufferOffset = 0ull;
5373 if (m_drawMode == DRAW_MODE_VERTEX)
5374 {
5375 const uint32_t count = static_cast<uint32_t>(dataSizeBytes / sizeof(tcu::UVec4));
5376 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &m_resource.getBuffer().handle, &vertexBufferOffset);
5377 vk.cmdDraw(cmdBuffer, count, 1u, 0u, 0u);
5378 }
5379 else // (m_drawMode == DRAW_MODE_INDEXED)
5380 {
5381 const uint32_t count = static_cast<uint32_t>(dataSizeBytes / sizeof(uint32_t));
5382 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &**m_inputBuffer, &vertexBufferOffset);
5383 vk.cmdBindIndexBuffer(cmdBuffer, m_resource.getBuffer().handle, 0u, VK_INDEX_TYPE_UINT32);
5384 vk.cmdDrawIndexed(cmdBuffer, count, 1, 0, 0, 0);
5385 }
5386
5387 endRenderPass(vk, cmdBuffer);
5388
5389 // Insert a barrier so data written by the shader is available to the host
5390 {
5391 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
5392 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
5393 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
5394 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
5395 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
5396 **m_outputBuffer, // VkBuffer buffer
5397 0u, // VkDeviceSize offset
5398 m_resource.getBuffer().size // VkDeviceSize size
5399 );
5400 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
5401 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
5402 }
5403 }
5404
getInSyncInfo(void) const5405 SyncInfo getInSyncInfo(void) const
5406 {
5407 const bool usingIndexedDraw = (m_drawMode == DRAW_MODE_INDEXED);
5408 VkPipelineStageFlags2KHR stageMask = VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR;
5409 VkAccessFlags2KHR accessMask =
5410 usingIndexedDraw ? VK_ACCESS_2_INDEX_READ_BIT_KHR : VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR;
5411
5412 if (m_context.getSynchronizationType() == SynchronizationType::SYNCHRONIZATION2)
5413 {
5414 stageMask = usingIndexedDraw ? VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR :
5415 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR;
5416 }
5417
5418 const SyncInfo syncInfo = {
5419 stageMask, // VkPipelineStageFlags stageMask;
5420 accessMask, // VkAccessFlags accessMask;
5421 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
5422 };
5423 return syncInfo;
5424 }
5425
getOutSyncInfo(void) const5426 SyncInfo getOutSyncInfo(void) const
5427 {
5428 return emptySyncInfo;
5429 }
5430
getData(void) const5431 Data getData(void) const
5432 {
5433 return getHostBufferData(m_context, *m_outputBuffer, m_resource.getBuffer().size);
5434 }
5435
setData(const Data & data)5436 void setData(const Data &data)
5437 {
5438 setHostBufferData(m_context, *m_outputBuffer, data);
5439 }
5440
5441 private:
5442 OperationContext &m_context;
5443 Resource &m_resource;
5444 DrawMode m_drawMode;
5445 de::MovePtr<Buffer> m_inputBuffer;
5446 de::MovePtr<Buffer> m_outputBuffer;
5447 Move<VkRenderPass> m_renderPass;
5448 Move<VkFramebuffer> m_framebuffer;
5449 Move<VkPipelineLayout> m_pipelineLayout;
5450 Move<VkPipeline> m_pipeline;
5451 VkFormat m_colorFormat;
5452 de::MovePtr<Image> m_colorAttachmentImage;
5453 Move<VkImageView> m_colorAttachmentView;
5454 VkExtent3D m_colorImageExtent;
5455 VkImageSubresourceRange m_colorImageSubresourceRange;
5456 Move<VkDescriptorPool> m_descriptorPool;
5457 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
5458 Move<VkDescriptorSet> m_descriptorSet;
5459 };
5460
5461 class Support : public OperationSupport
5462 {
5463 public:
Support(const ResourceDescription & resourceDesc,DrawMode drawMode)5464 Support(const ResourceDescription &resourceDesc, DrawMode drawMode)
5465 : m_resourceDesc(resourceDesc)
5466 , m_drawMode(drawMode)
5467 {
5468 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER || m_resourceDesc.type == RESOURCE_TYPE_INDEX_BUFFER);
5469 }
5470
initPrograms(SourceCollections & programCollection) const5471 void initPrograms(SourceCollections &programCollection) const
5472 {
5473 // Vertex
5474 {
5475 std::ostringstream src;
5476 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n";
5477 if (m_drawMode == DRAW_MODE_VERTEX)
5478 {
5479 src << "layout(location = 0) in uvec4 v_in_data;\n"
5480 << "layout(set = 0, binding = 0, std140) writeonly buffer Output {\n"
5481 << " uvec4 data[" << m_resourceDesc.size.x() / sizeof(tcu::UVec4) << "];\n"
5482 << "} b_out;\n"
5483 << "\n"
5484 << "void main (void)\n"
5485 << "{\n"
5486 << " b_out.data[gl_VertexIndex] = v_in_data;\n"
5487 << " gl_PointSize = 1.0f;\n"
5488 << "}\n";
5489 }
5490 else // DRAW_MODE_INDEXED
5491 {
5492 src << "layout(location = 0) in uint v_in_data;\n"
5493 << "layout(set = 0, binding = 0, std430) writeonly buffer Output {\n"
5494 << " uint data[" << m_resourceDesc.size.x() / sizeof(uint32_t) << "];\n"
5495 << "} b_out;\n"
5496 << "\n"
5497 << "void main (void)\n"
5498 << "{\n"
5499 << " b_out.data[gl_VertexIndex] = v_in_data;\n"
5500 << " gl_PointSize = 1.0f;\n"
5501 << "}\n";
5502 }
5503 programCollection.glslSources.add("input_vert") << glu::VertexSource(src.str());
5504 }
5505
5506 // Fragment
5507 {
5508 std::ostringstream src;
5509 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
5510 << "\n"
5511 << "layout(location = 0) out vec4 o_color;\n"
5512 << "\n"
5513 << "void main (void)\n"
5514 << "{\n"
5515 << " o_color = vec4(1.0);\n"
5516 << "}\n";
5517 programCollection.glslSources.add("input_frag") << glu::FragmentSource(src.str());
5518 }
5519 }
5520
getInResourceUsageFlags(void) const5521 uint32_t getInResourceUsageFlags(void) const
5522 {
5523 return (m_drawMode == DRAW_MODE_VERTEX) ? VK_BUFFER_USAGE_VERTEX_BUFFER_BIT : VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
5524 }
5525
getOutResourceUsageFlags(void) const5526 uint32_t getOutResourceUsageFlags(void) const
5527 {
5528 return VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5529 }
5530
getQueueFlags(const OperationContext &) const5531 VkQueueFlags getQueueFlags(const OperationContext &) const
5532 {
5533 return VK_QUEUE_GRAPHICS_BIT;
5534 }
5535
build(OperationContext & context,Resource & resource) const5536 de::MovePtr<Operation> build(OperationContext &context, Resource &resource) const
5537 {
5538 return de::MovePtr<Operation>(new Implementation(context, resource, m_drawMode));
5539 }
5540
build(OperationContext &,Resource &,Resource &) const5541 de::MovePtr<Operation> build(OperationContext &, Resource &, Resource &) const
5542 {
5543 DE_ASSERT(0);
5544 return de::MovePtr<Operation>();
5545 }
5546
5547 private:
5548 const ResourceDescription m_resourceDesc;
5549 const DrawMode m_drawMode;
5550 };
5551
5552 } // namespace VertexInput
5553
5554 } // namespace
5555
OperationContext(Context & context,SynchronizationType syncType,PipelineCacheData & pipelineCacheData)5556 OperationContext::OperationContext(Context &context, SynchronizationType syncType, PipelineCacheData &pipelineCacheData)
5557 : m_context(context)
5558 , m_syncType(syncType)
5559 , m_vki(context.getInstanceInterface())
5560 , m_vk(context.getDeviceInterface())
5561 , m_physicalDevice(context.getPhysicalDevice())
5562 , m_device(context.getDevice())
5563 , m_allocator(context.getDefaultAllocator())
5564 , m_progCollection(context.getBinaryCollection())
5565 , m_pipelineCacheData(pipelineCacheData)
5566 {
5567 }
5568
OperationContext(Context & context,SynchronizationType syncType,const DeviceInterface & vk,const VkDevice device,vk::Allocator & allocator,PipelineCacheData & pipelineCacheData)5569 OperationContext::OperationContext(Context &context, SynchronizationType syncType, const DeviceInterface &vk,
5570 const VkDevice device, vk::Allocator &allocator,
5571 PipelineCacheData &pipelineCacheData)
5572 : m_context(context)
5573 , m_syncType(syncType)
5574 , m_vki(context.getInstanceInterface())
5575 , m_vk(vk)
5576 , m_physicalDevice(context.getPhysicalDevice())
5577 , m_device(device)
5578 , m_allocator(allocator)
5579 , m_progCollection(context.getBinaryCollection())
5580 , m_pipelineCacheData(pipelineCacheData)
5581 {
5582 }
5583
OperationContext(Context & context,SynchronizationType syncType,const vk::InstanceInterface & vki,const vk::DeviceInterface & vkd,vk::VkPhysicalDevice physicalDevice,vk::VkDevice device,vk::Allocator & allocator,vk::BinaryCollection & programCollection,PipelineCacheData & pipelineCacheData)5584 OperationContext::OperationContext(Context &context, SynchronizationType syncType, const vk::InstanceInterface &vki,
5585 const vk::DeviceInterface &vkd, vk::VkPhysicalDevice physicalDevice,
5586 vk::VkDevice device, vk::Allocator &allocator,
5587 vk::BinaryCollection &programCollection, PipelineCacheData &pipelineCacheData)
5588 : m_context(context)
5589 , m_syncType(syncType)
5590 , m_vki(vki)
5591 , m_vk(vkd)
5592 , m_physicalDevice(physicalDevice)
5593 , m_device(device)
5594 , m_allocator(allocator)
5595 , m_progCollection(programCollection)
5596 , m_pipelineCacheData(pipelineCacheData)
5597 {
5598 }
5599
Resource(OperationContext & context,const ResourceDescription & desc,const uint32_t usage,const vk::VkSharingMode sharingMode,const std::vector<uint32_t> & queueFamilyIndex)5600 Resource::Resource(OperationContext &context, const ResourceDescription &desc, const uint32_t usage,
5601 const vk::VkSharingMode sharingMode, const std::vector<uint32_t> &queueFamilyIndex)
5602 : m_type(desc.type)
5603 {
5604 const DeviceInterface &vk = context.getDeviceInterface();
5605 const InstanceInterface &vki = context.getInstanceInterface();
5606 const VkDevice device = context.getDevice();
5607 const VkPhysicalDevice physDevice = context.getPhysicalDevice();
5608 Allocator &allocator = context.getAllocator();
5609
5610 if (m_type == RESOURCE_TYPE_BUFFER || m_type == RESOURCE_TYPE_INDEX_BUFFER || isIndirectBuffer(m_type))
5611 {
5612 m_bufferData =
5613 de::MovePtr<BufferResource>(new BufferResource(DE_NULL, 0u, static_cast<VkDeviceSize>(desc.size.x())));
5614 VkBufferCreateInfo bufferCreateInfo = makeBufferCreateInfo(m_bufferData->size, usage);
5615 bufferCreateInfo.sharingMode = sharingMode;
5616 if (queueFamilyIndex.size() > 0)
5617 {
5618 bufferCreateInfo.queueFamilyIndexCount = static_cast<uint32_t>(queueFamilyIndex.size());
5619 bufferCreateInfo.pQueueFamilyIndices = &queueFamilyIndex[0];
5620 }
5621 m_buffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator, bufferCreateInfo, MemoryRequirement::Any));
5622 m_bufferData->handle = **m_buffer;
5623 }
5624 else if (m_type == RESOURCE_TYPE_IMAGE)
5625 {
5626 m_imageData = de::MovePtr<ImageResource>(new ImageResource(
5627 DE_NULL, makeExtent3D(desc.size.x(), std::max(1, desc.size.y()), std::max(1, desc.size.z())),
5628 desc.imageType, desc.imageFormat, makeImageSubresourceRange(desc.imageAspect, 0u, 1u, 0u, 1u),
5629 makeImageSubresourceLayers(desc.imageAspect, 0u, 0u, 1u), vk::VK_IMAGE_TILING_OPTIMAL));
5630 VkImageCreateInfo imageInfo =
5631 makeImageCreateInfo(m_imageData->imageType, m_imageData->extent, m_imageData->format, usage,
5632 desc.imageSamples, m_imageData->tiling);
5633 imageInfo.sharingMode = sharingMode;
5634 if (queueFamilyIndex.size() > 0)
5635 {
5636 imageInfo.queueFamilyIndexCount = static_cast<uint32_t>(queueFamilyIndex.size());
5637 imageInfo.pQueueFamilyIndices = &queueFamilyIndex[0];
5638 }
5639
5640 VkImageFormatProperties imageFormatProperties;
5641 const VkResult formatResult = vki.getPhysicalDeviceImageFormatProperties(
5642 physDevice, imageInfo.format, imageInfo.imageType, imageInfo.tiling, imageInfo.usage, imageInfo.flags,
5643 &imageFormatProperties);
5644
5645 if (formatResult != VK_SUCCESS)
5646 TCU_THROW(NotSupportedError, "Image format is not supported");
5647
5648 if ((imageFormatProperties.sampleCounts & desc.imageSamples) != desc.imageSamples)
5649 TCU_THROW(NotSupportedError, "Requested sample count is not supported");
5650
5651 m_image = de::MovePtr<Image>(new Image(vk, device, allocator, imageInfo, MemoryRequirement::Any));
5652 m_imageData->handle = **m_image;
5653 }
5654 else
5655 DE_ASSERT(0);
5656 }
5657
Resource(ResourceType type,vk::Move<vk::VkBuffer> buffer,de::MovePtr<vk::Allocation> allocation,vk::VkDeviceSize offset,vk::VkDeviceSize size)5658 Resource::Resource(ResourceType type, vk::Move<vk::VkBuffer> buffer, de::MovePtr<vk::Allocation> allocation,
5659 vk::VkDeviceSize offset, vk::VkDeviceSize size)
5660 : m_type(type)
5661 , m_buffer(new Buffer(buffer, allocation))
5662 , m_bufferData(de::MovePtr<BufferResource>(new BufferResource(m_buffer->get(), offset, size)))
5663 {
5664 DE_ASSERT(type != RESOURCE_TYPE_IMAGE);
5665 }
5666
Resource(vk::Move<vk::VkImage> image,de::MovePtr<vk::Allocation> allocation,const vk::VkExtent3D & extent,vk::VkImageType imageType,vk::VkFormat format,vk::VkImageSubresourceRange subresourceRange,vk::VkImageSubresourceLayers subresourceLayers,vk::VkImageTiling tiling)5667 Resource::Resource(vk::Move<vk::VkImage> image, de::MovePtr<vk::Allocation> allocation, const vk::VkExtent3D &extent,
5668 vk::VkImageType imageType, vk::VkFormat format, vk::VkImageSubresourceRange subresourceRange,
5669 vk::VkImageSubresourceLayers subresourceLayers, vk::VkImageTiling tiling)
5670 : m_type(RESOURCE_TYPE_IMAGE)
5671 , m_image(new Image(image, allocation))
5672 , m_imageData(de::MovePtr<ImageResource>(
5673 new ImageResource(m_image->get(), extent, imageType, format, subresourceRange, subresourceLayers, tiling)))
5674 {
5675 }
5676
getMemory(void) const5677 vk::VkDeviceMemory Resource::getMemory(void) const
5678 {
5679 if (m_type == RESOURCE_TYPE_IMAGE)
5680 return m_image->getAllocation().getMemory();
5681 else
5682 return m_buffer->getAllocation().getMemory();
5683 }
5684
5685 //! \note This function exists for performance reasons. We're creating a lot of tests and checking requirements here
5686 //! before creating an OperationSupport object is faster.
isResourceSupported(const OperationName opName,const ResourceDescription & resourceDesc)5687 bool isResourceSupported(const OperationName opName, const ResourceDescription &resourceDesc)
5688 {
5689 switch (opName)
5690 {
5691 case OPERATION_NAME_WRITE_FILL_BUFFER:
5692 case OPERATION_NAME_WRITE_COPY_BUFFER:
5693 case OPERATION_NAME_WRITE_COPY_IMAGE_TO_BUFFER:
5694 case OPERATION_NAME_WRITE_SSBO_VERTEX:
5695 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL:
5696 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION:
5697 case OPERATION_NAME_WRITE_SSBO_GEOMETRY:
5698 case OPERATION_NAME_WRITE_SSBO_FRAGMENT:
5699 case OPERATION_NAME_WRITE_SSBO_COMPUTE:
5700 case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT:
5701 case OPERATION_NAME_READ_COPY_BUFFER:
5702 case OPERATION_NAME_READ_COPY_BUFFER_TO_IMAGE:
5703 case OPERATION_NAME_READ_SSBO_VERTEX:
5704 case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL:
5705 case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION:
5706 case OPERATION_NAME_READ_SSBO_GEOMETRY:
5707 case OPERATION_NAME_READ_SSBO_FRAGMENT:
5708 case OPERATION_NAME_READ_SSBO_COMPUTE:
5709 case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT:
5710 case OPERATION_NAME_READ_VERTEX_INPUT:
5711 return resourceDesc.type == RESOURCE_TYPE_BUFFER;
5712
5713 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW:
5714 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW:
5715 return resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DRAW;
5716
5717 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW_INDEXED:
5718 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW_INDEXED:
5719 return resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DRAW_INDEXED;
5720
5721 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DISPATCH:
5722 case OPERATION_NAME_READ_INDIRECT_BUFFER_DISPATCH:
5723 return resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH;
5724
5725 case OPERATION_NAME_WRITE_UPDATE_INDEX_BUFFER:
5726 case OPERATION_NAME_READ_INDEX_INPUT:
5727 return resourceDesc.type == RESOURCE_TYPE_INDEX_BUFFER;
5728
5729 case OPERATION_NAME_WRITE_UPDATE_BUFFER:
5730 return resourceDesc.type == RESOURCE_TYPE_BUFFER && resourceDesc.size.x() <= MAX_UPDATE_BUFFER_SIZE;
5731
5732 case OPERATION_NAME_WRITE_COPY_IMAGE:
5733 case OPERATION_NAME_WRITE_COPY_BUFFER_TO_IMAGE:
5734 case OPERATION_NAME_READ_COPY_IMAGE:
5735 case OPERATION_NAME_READ_COPY_IMAGE_TO_BUFFER:
5736 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5737
5738 case OPERATION_NAME_WRITE_CLEAR_ATTACHMENTS:
5739 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageType != VK_IMAGE_TYPE_3D &&
5740 resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5741
5742 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_MULTISAMPLE:
5743 case OPERATION_NAME_READ_RESOLVE_IMAGE:
5744 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT &&
5745 resourceDesc.imageSamples != VK_SAMPLE_COUNT_1_BIT;
5746
5747 case OPERATION_NAME_WRITE_BLIT_IMAGE:
5748 case OPERATION_NAME_READ_BLIT_IMAGE:
5749 case OPERATION_NAME_WRITE_IMAGE_VERTEX:
5750 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL:
5751 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION:
5752 case OPERATION_NAME_WRITE_IMAGE_GEOMETRY:
5753 case OPERATION_NAME_WRITE_IMAGE_FRAGMENT:
5754 case OPERATION_NAME_WRITE_IMAGE_COMPUTE:
5755 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT:
5756 case OPERATION_NAME_READ_IMAGE_VERTEX:
5757 case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL:
5758 case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION:
5759 case OPERATION_NAME_READ_IMAGE_GEOMETRY:
5760 case OPERATION_NAME_READ_IMAGE_FRAGMENT:
5761 case OPERATION_NAME_READ_IMAGE_COMPUTE:
5762 case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT:
5763 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT &&
5764 resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5765
5766 case OPERATION_NAME_READ_UBO_VERTEX:
5767 case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL:
5768 case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION:
5769 case OPERATION_NAME_READ_UBO_GEOMETRY:
5770 case OPERATION_NAME_READ_UBO_FRAGMENT:
5771 case OPERATION_NAME_READ_UBO_COMPUTE:
5772 case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT:
5773 case OPERATION_NAME_READ_UBO_TEXEL_VERTEX:
5774 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_CONTROL:
5775 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_EVALUATION:
5776 case OPERATION_NAME_READ_UBO_TEXEL_GEOMETRY:
5777 case OPERATION_NAME_READ_UBO_TEXEL_FRAGMENT:
5778 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE:
5779 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE_INDIRECT:
5780 return resourceDesc.type == RESOURCE_TYPE_BUFFER && resourceDesc.size.x() <= MAX_UBO_RANGE;
5781
5782 case OPERATION_NAME_WRITE_CLEAR_COLOR_IMAGE:
5783 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT &&
5784 resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5785
5786 case OPERATION_NAME_WRITE_CLEAR_DEPTH_STENCIL_IMAGE:
5787 return resourceDesc.type == RESOURCE_TYPE_IMAGE &&
5788 (resourceDesc.imageAspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) &&
5789 resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5790
5791 case OPERATION_NAME_WRITE_DRAW:
5792 case OPERATION_NAME_WRITE_DRAW_INDEXED:
5793 case OPERATION_NAME_WRITE_DRAW_INDIRECT:
5794 case OPERATION_NAME_WRITE_DRAW_INDEXED_INDIRECT:
5795 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageType == VK_IMAGE_TYPE_2D &&
5796 (resourceDesc.imageAspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) == 0 &&
5797 resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5798
5799 case OPERATION_NAME_COPY_BUFFER:
5800 case OPERATION_NAME_COPY_SSBO_VERTEX:
5801 case OPERATION_NAME_COPY_SSBO_TESSELLATION_CONTROL:
5802 case OPERATION_NAME_COPY_SSBO_TESSELLATION_EVALUATION:
5803 case OPERATION_NAME_COPY_SSBO_GEOMETRY:
5804 case OPERATION_NAME_COPY_SSBO_FRAGMENT:
5805 case OPERATION_NAME_COPY_SSBO_COMPUTE:
5806 case OPERATION_NAME_COPY_SSBO_COMPUTE_INDIRECT:
5807 return resourceDesc.type == RESOURCE_TYPE_BUFFER;
5808
5809 case OPERATION_NAME_COPY_IMAGE:
5810 case OPERATION_NAME_BLIT_IMAGE:
5811 case OPERATION_NAME_COPY_IMAGE_VERTEX:
5812 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_CONTROL:
5813 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_EVALUATION:
5814 case OPERATION_NAME_COPY_IMAGE_GEOMETRY:
5815 case OPERATION_NAME_COPY_IMAGE_FRAGMENT:
5816 case OPERATION_NAME_COPY_IMAGE_COMPUTE:
5817 case OPERATION_NAME_COPY_IMAGE_COMPUTE_INDIRECT:
5818 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT &&
5819 resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5820
5821 default:
5822 DE_ASSERT(0);
5823 return false;
5824 }
5825 }
5826
getOperationName(const OperationName opName)5827 std::string getOperationName(const OperationName opName)
5828 {
5829 switch (opName)
5830 {
5831 case OPERATION_NAME_WRITE_FILL_BUFFER:
5832 return "write_fill_buffer";
5833 case OPERATION_NAME_WRITE_UPDATE_BUFFER:
5834 return "write_update_buffer";
5835 case OPERATION_NAME_WRITE_COPY_BUFFER:
5836 return "write_copy_buffer";
5837 case OPERATION_NAME_WRITE_COPY_BUFFER_TO_IMAGE:
5838 return "write_copy_buffer_to_image";
5839 case OPERATION_NAME_WRITE_COPY_IMAGE_TO_BUFFER:
5840 return "write_copy_image_to_buffer";
5841 case OPERATION_NAME_WRITE_COPY_IMAGE:
5842 return "write_copy_image";
5843 case OPERATION_NAME_WRITE_BLIT_IMAGE:
5844 return "write_blit_image";
5845 case OPERATION_NAME_WRITE_SSBO_VERTEX:
5846 return "write_ssbo_vertex";
5847 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL:
5848 return "write_ssbo_tess_control";
5849 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION:
5850 return "write_ssbo_tess_eval";
5851 case OPERATION_NAME_WRITE_SSBO_GEOMETRY:
5852 return "write_ssbo_geometry";
5853 case OPERATION_NAME_WRITE_SSBO_FRAGMENT:
5854 return "write_ssbo_fragment";
5855 case OPERATION_NAME_WRITE_SSBO_COMPUTE:
5856 return "write_ssbo_compute";
5857 case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT:
5858 return "write_ssbo_compute_indirect";
5859 case OPERATION_NAME_WRITE_IMAGE_VERTEX:
5860 return "write_image_vertex";
5861 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL:
5862 return "write_image_tess_control";
5863 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION:
5864 return "write_image_tess_eval";
5865 case OPERATION_NAME_WRITE_IMAGE_GEOMETRY:
5866 return "write_image_geometry";
5867 case OPERATION_NAME_WRITE_IMAGE_FRAGMENT:
5868 return "write_image_fragment";
5869 case OPERATION_NAME_WRITE_IMAGE_COMPUTE:
5870 return "write_image_compute";
5871 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_MULTISAMPLE:
5872 return "write_image_compute_multisample";
5873 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT:
5874 return "write_image_compute_indirect";
5875 case OPERATION_NAME_WRITE_CLEAR_COLOR_IMAGE:
5876 return "write_clear_color_image";
5877 case OPERATION_NAME_WRITE_CLEAR_DEPTH_STENCIL_IMAGE:
5878 return "write_clear_depth_stencil_image";
5879 case OPERATION_NAME_WRITE_DRAW:
5880 return "write_draw";
5881 case OPERATION_NAME_WRITE_DRAW_INDEXED:
5882 return "write_draw_indexed";
5883 case OPERATION_NAME_WRITE_DRAW_INDIRECT:
5884 return "write_draw_indirect";
5885 case OPERATION_NAME_WRITE_DRAW_INDEXED_INDIRECT:
5886 return "write_draw_indexed_indirect";
5887 case OPERATION_NAME_WRITE_CLEAR_ATTACHMENTS:
5888 return "write_clear_attachments";
5889 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW:
5890 return "write_indirect_buffer_draw";
5891 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW_INDEXED:
5892 return "write_indirect_buffer_draw_indexed";
5893 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DISPATCH:
5894 return "write_indirect_buffer_dispatch";
5895 case OPERATION_NAME_WRITE_UPDATE_INDEX_BUFFER:
5896 return "write_update_index_buffer";
5897
5898 case OPERATION_NAME_READ_COPY_BUFFER:
5899 return "read_copy_buffer";
5900 case OPERATION_NAME_READ_COPY_BUFFER_TO_IMAGE:
5901 return "read_copy_buffer_to_image";
5902 case OPERATION_NAME_READ_COPY_IMAGE_TO_BUFFER:
5903 return "read_copy_image_to_buffer";
5904 case OPERATION_NAME_READ_COPY_IMAGE:
5905 return "read_copy_image";
5906 case OPERATION_NAME_READ_BLIT_IMAGE:
5907 return "read_blit_image";
5908 case OPERATION_NAME_READ_RESOLVE_IMAGE:
5909 return "read_resolve_image";
5910 case OPERATION_NAME_READ_UBO_VERTEX:
5911 return "read_ubo_vertex";
5912 case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL:
5913 return "read_ubo_tess_control";
5914 case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION:
5915 return "read_ubo_tess_eval";
5916 case OPERATION_NAME_READ_UBO_GEOMETRY:
5917 return "read_ubo_geometry";
5918 case OPERATION_NAME_READ_UBO_FRAGMENT:
5919 return "read_ubo_fragment";
5920 case OPERATION_NAME_READ_UBO_COMPUTE:
5921 return "read_ubo_compute";
5922 case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT:
5923 return "read_ubo_compute_indirect";
5924 case OPERATION_NAME_READ_UBO_TEXEL_VERTEX:
5925 return "read_ubo_texel_vertex";
5926 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_CONTROL:
5927 return "read_ubo_texel_tess_control";
5928 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_EVALUATION:
5929 return "read_ubo_texel_tess_eval";
5930 case OPERATION_NAME_READ_UBO_TEXEL_GEOMETRY:
5931 return "read_ubo_texel_geometry";
5932 case OPERATION_NAME_READ_UBO_TEXEL_FRAGMENT:
5933 return "read_ubo_texel_fragment";
5934 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE:
5935 return "read_ubo_texel_compute";
5936 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE_INDIRECT:
5937 return "read_ubo_texel_compute_indirect";
5938 case OPERATION_NAME_READ_SSBO_VERTEX:
5939 return "read_ssbo_vertex";
5940 case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL:
5941 return "read_ssbo_tess_control";
5942 case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION:
5943 return "read_ssbo_tess_eval";
5944 case OPERATION_NAME_READ_SSBO_GEOMETRY:
5945 return "read_ssbo_geometry";
5946 case OPERATION_NAME_READ_SSBO_FRAGMENT:
5947 return "read_ssbo_fragment";
5948 case OPERATION_NAME_READ_SSBO_COMPUTE:
5949 return "read_ssbo_compute";
5950 case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT:
5951 return "read_ssbo_compute_indirect";
5952 case OPERATION_NAME_READ_IMAGE_VERTEX:
5953 return "read_image_vertex";
5954 case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL:
5955 return "read_image_tess_control";
5956 case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION:
5957 return "read_image_tess_eval";
5958 case OPERATION_NAME_READ_IMAGE_GEOMETRY:
5959 return "read_image_geometry";
5960 case OPERATION_NAME_READ_IMAGE_FRAGMENT:
5961 return "read_image_fragment";
5962 case OPERATION_NAME_READ_IMAGE_COMPUTE:
5963 return "read_image_compute";
5964 case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT:
5965 return "read_image_compute_indirect";
5966 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW:
5967 return "read_indirect_buffer_draw";
5968 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW_INDEXED:
5969 return "read_indirect_buffer_draw_indexed";
5970 case OPERATION_NAME_READ_INDIRECT_BUFFER_DISPATCH:
5971 return "read_indirect_buffer_dispatch";
5972 case OPERATION_NAME_READ_VERTEX_INPUT:
5973 return "read_vertex_input";
5974 case OPERATION_NAME_READ_INDEX_INPUT:
5975 return "read_index_input";
5976
5977 case OPERATION_NAME_COPY_BUFFER:
5978 return "copy_buffer";
5979 case OPERATION_NAME_COPY_IMAGE:
5980 return "copy_image";
5981 case OPERATION_NAME_BLIT_IMAGE:
5982 return "blit_image";
5983 case OPERATION_NAME_COPY_SSBO_VERTEX:
5984 return "copy_buffer_vertex";
5985 case OPERATION_NAME_COPY_SSBO_TESSELLATION_CONTROL:
5986 return "copy_ssbo_tess_control";
5987 case OPERATION_NAME_COPY_SSBO_TESSELLATION_EVALUATION:
5988 return "copy_ssbo_tess_eval";
5989 case OPERATION_NAME_COPY_SSBO_GEOMETRY:
5990 return "copy_ssbo_geometry";
5991 case OPERATION_NAME_COPY_SSBO_FRAGMENT:
5992 return "copy_ssbo_fragment";
5993 case OPERATION_NAME_COPY_SSBO_COMPUTE:
5994 return "copy_ssbo_compute";
5995 case OPERATION_NAME_COPY_SSBO_COMPUTE_INDIRECT:
5996 return "copy_ssbo_compute_indirect";
5997 case OPERATION_NAME_COPY_IMAGE_VERTEX:
5998 return "copy_image_vertex";
5999 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_CONTROL:
6000 return "copy_image_tess_control";
6001 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_EVALUATION:
6002 return "copy_image_tess_eval";
6003 case OPERATION_NAME_COPY_IMAGE_GEOMETRY:
6004 return "copy_image_geometry";
6005 case OPERATION_NAME_COPY_IMAGE_FRAGMENT:
6006 return "copy_image_fragment";
6007 case OPERATION_NAME_COPY_IMAGE_COMPUTE:
6008 return "copy_image_compute";
6009 case OPERATION_NAME_COPY_IMAGE_COMPUTE_INDIRECT:
6010 return "copy_image_compute_indirect";
6011 default:
6012 DE_ASSERT(0);
6013 return "";
6014 }
6015 }
6016
isSpecializedAccessFlagSupported(const OperationName opName)6017 bool isSpecializedAccessFlagSupported(const OperationName opName)
6018 {
6019 switch (opName)
6020 {
6021 case OPERATION_NAME_WRITE_SSBO_VERTEX:
6022 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL:
6023 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION:
6024 case OPERATION_NAME_WRITE_SSBO_GEOMETRY:
6025 case OPERATION_NAME_WRITE_SSBO_FRAGMENT:
6026 case OPERATION_NAME_WRITE_SSBO_COMPUTE:
6027 case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT:
6028 case OPERATION_NAME_WRITE_IMAGE_VERTEX:
6029 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL:
6030 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION:
6031 case OPERATION_NAME_WRITE_IMAGE_GEOMETRY:
6032 case OPERATION_NAME_WRITE_IMAGE_FRAGMENT:
6033 case OPERATION_NAME_WRITE_IMAGE_COMPUTE:
6034 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT:
6035 case OPERATION_NAME_READ_UBO_VERTEX:
6036 case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL:
6037 case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION:
6038 case OPERATION_NAME_READ_UBO_GEOMETRY:
6039 case OPERATION_NAME_READ_UBO_FRAGMENT:
6040 case OPERATION_NAME_READ_UBO_COMPUTE:
6041 case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT:
6042 case OPERATION_NAME_READ_UBO_TEXEL_VERTEX:
6043 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_CONTROL:
6044 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_EVALUATION:
6045 case OPERATION_NAME_READ_UBO_TEXEL_GEOMETRY:
6046 case OPERATION_NAME_READ_UBO_TEXEL_FRAGMENT:
6047 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE:
6048 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE_INDIRECT:
6049 case OPERATION_NAME_READ_SSBO_VERTEX:
6050 case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL:
6051 case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION:
6052 case OPERATION_NAME_READ_SSBO_GEOMETRY:
6053 case OPERATION_NAME_READ_SSBO_FRAGMENT:
6054 case OPERATION_NAME_READ_SSBO_COMPUTE:
6055 case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT:
6056 case OPERATION_NAME_READ_IMAGE_VERTEX:
6057 case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL:
6058 case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION:
6059 case OPERATION_NAME_READ_IMAGE_GEOMETRY:
6060 case OPERATION_NAME_READ_IMAGE_FRAGMENT:
6061 case OPERATION_NAME_READ_IMAGE_COMPUTE:
6062 case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT:
6063 case OPERATION_NAME_COPY_SSBO_VERTEX:
6064 case OPERATION_NAME_COPY_SSBO_TESSELLATION_CONTROL:
6065 case OPERATION_NAME_COPY_SSBO_GEOMETRY:
6066 case OPERATION_NAME_COPY_SSBO_FRAGMENT:
6067 case OPERATION_NAME_COPY_SSBO_COMPUTE:
6068 case OPERATION_NAME_COPY_SSBO_COMPUTE_INDIRECT:
6069 case OPERATION_NAME_COPY_IMAGE_VERTEX:
6070 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_CONTROL:
6071 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_EVALUATION:
6072 case OPERATION_NAME_COPY_IMAGE_GEOMETRY:
6073 case OPERATION_NAME_COPY_IMAGE_FRAGMENT:
6074 case OPERATION_NAME_COPY_IMAGE_COMPUTE:
6075 case OPERATION_NAME_COPY_IMAGE_COMPUTE_INDIRECT:
6076 return true;
6077 default:
6078 return false;
6079 }
6080 }
makeOperationSupport(const OperationName opName,const ResourceDescription & resourceDesc,const bool specializedAccess)6081 de::MovePtr<OperationSupport> makeOperationSupport(const OperationName opName, const ResourceDescription &resourceDesc,
6082 const bool specializedAccess)
6083 {
6084 switch (opName)
6085 {
6086 case OPERATION_NAME_WRITE_FILL_BUFFER:
6087 return de::MovePtr<OperationSupport>(
6088 new FillUpdateBuffer ::Support(resourceDesc, FillUpdateBuffer::BUFFER_OP_FILL));
6089 case OPERATION_NAME_WRITE_UPDATE_BUFFER:
6090 return de::MovePtr<OperationSupport>(
6091 new FillUpdateBuffer ::Support(resourceDesc, FillUpdateBuffer::BUFFER_OP_UPDATE));
6092 case OPERATION_NAME_WRITE_COPY_BUFFER:
6093 return de::MovePtr<OperationSupport>(new CopyBuffer ::Support(resourceDesc, ACCESS_MODE_WRITE));
6094 case OPERATION_NAME_WRITE_COPY_BUFFER_TO_IMAGE:
6095 return de::MovePtr<OperationSupport>(new CopyBufferToImage ::Support(resourceDesc, ACCESS_MODE_WRITE));
6096 case OPERATION_NAME_WRITE_COPY_IMAGE_TO_BUFFER:
6097 return de::MovePtr<OperationSupport>(new CopyImageToBuffer ::Support(resourceDesc, ACCESS_MODE_WRITE));
6098 case OPERATION_NAME_WRITE_COPY_IMAGE:
6099 return de::MovePtr<OperationSupport>(
6100 new CopyBlitResolveImage ::Support(resourceDesc, CopyBlitResolveImage::TYPE_COPY, ACCESS_MODE_WRITE));
6101 case OPERATION_NAME_WRITE_BLIT_IMAGE:
6102 return de::MovePtr<OperationSupport>(
6103 new CopyBlitResolveImage ::Support(resourceDesc, CopyBlitResolveImage::TYPE_BLIT, ACCESS_MODE_WRITE));
6104 case OPERATION_NAME_WRITE_SSBO_VERTEX:
6105 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6106 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
6107 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL:
6108 return de::MovePtr<OperationSupport>(
6109 new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess,
6110 VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
6111 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION:
6112 return de::MovePtr<OperationSupport>(
6113 new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess,
6114 VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
6115 case OPERATION_NAME_WRITE_SSBO_GEOMETRY:
6116 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6117 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
6118 case OPERATION_NAME_WRITE_SSBO_FRAGMENT:
6119 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6120 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
6121 case OPERATION_NAME_WRITE_SSBO_COMPUTE:
6122 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6123 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
6124 case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT:
6125 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6126 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT,
6127 ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
6128 case OPERATION_NAME_WRITE_IMAGE_VERTEX:
6129 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6130 resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
6131 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL:
6132 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6133 resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
6134 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION:
6135 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6136 resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
6137 case OPERATION_NAME_WRITE_IMAGE_GEOMETRY:
6138 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6139 resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
6140 case OPERATION_NAME_WRITE_IMAGE_FRAGMENT:
6141 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6142 resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
6143 case OPERATION_NAME_WRITE_IMAGE_COMPUTE:
6144 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6145 resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
6146 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT:
6147 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6148 resourceDesc, ACCESS_MODE_WRITE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT,
6149 ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
6150 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_MULTISAMPLE:
6151 return de::MovePtr<OperationSupport>(new ShaderAccess ::MSImageSupport(resourceDesc));
6152 case OPERATION_NAME_WRITE_CLEAR_COLOR_IMAGE:
6153 return de::MovePtr<OperationSupport>(new ClearImage ::Support(resourceDesc, ClearImage::CLEAR_MODE_COLOR));
6154 case OPERATION_NAME_WRITE_CLEAR_DEPTH_STENCIL_IMAGE:
6155 return de::MovePtr<OperationSupport>(
6156 new ClearImage ::Support(resourceDesc, ClearImage::CLEAR_MODE_DEPTH_STENCIL));
6157 case OPERATION_NAME_WRITE_DRAW:
6158 return de::MovePtr<OperationSupport>(new Draw ::Support(resourceDesc, Draw::DRAW_CALL_DRAW));
6159 case OPERATION_NAME_WRITE_DRAW_INDEXED:
6160 return de::MovePtr<OperationSupport>(new Draw ::Support(resourceDesc, Draw::DRAW_CALL_DRAW_INDEXED));
6161 case OPERATION_NAME_WRITE_DRAW_INDIRECT:
6162 return de::MovePtr<OperationSupport>(new Draw ::Support(resourceDesc, Draw::DRAW_CALL_DRAW_INDIRECT));
6163 case OPERATION_NAME_WRITE_DRAW_INDEXED_INDIRECT:
6164 return de::MovePtr<OperationSupport>(new Draw ::Support(resourceDesc, Draw::DRAW_CALL_DRAW_INDEXED_INDIRECT));
6165 case OPERATION_NAME_WRITE_CLEAR_ATTACHMENTS:
6166 return de::MovePtr<OperationSupport>(new ClearAttachments ::Support(resourceDesc));
6167 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW:
6168 return de::MovePtr<OperationSupport>(new IndirectBuffer ::WriteSupport(resourceDesc));
6169 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW_INDEXED:
6170 return de::MovePtr<OperationSupport>(new IndirectBuffer ::WriteSupport(resourceDesc));
6171 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DISPATCH:
6172 return de::MovePtr<OperationSupport>(new IndirectBuffer ::WriteSupport(resourceDesc));
6173 case OPERATION_NAME_WRITE_UPDATE_INDEX_BUFFER:
6174 return de::MovePtr<OperationSupport>(
6175 new FillUpdateBuffer ::Support(resourceDesc, FillUpdateBuffer::BUFFER_OP_UPDATE_WITH_INDEX_PATTERN));
6176
6177 case OPERATION_NAME_READ_COPY_BUFFER:
6178 return de::MovePtr<OperationSupport>(new CopyBuffer ::Support(resourceDesc, ACCESS_MODE_READ));
6179 case OPERATION_NAME_READ_COPY_BUFFER_TO_IMAGE:
6180 return de::MovePtr<OperationSupport>(new CopyBufferToImage ::Support(resourceDesc, ACCESS_MODE_READ));
6181 case OPERATION_NAME_READ_COPY_IMAGE_TO_BUFFER:
6182 return de::MovePtr<OperationSupport>(new CopyImageToBuffer ::Support(resourceDesc, ACCESS_MODE_READ));
6183 case OPERATION_NAME_READ_COPY_IMAGE:
6184 return de::MovePtr<OperationSupport>(
6185 new CopyBlitResolveImage::Support(resourceDesc, CopyBlitResolveImage::TYPE_COPY, ACCESS_MODE_READ));
6186 case OPERATION_NAME_READ_BLIT_IMAGE:
6187 return de::MovePtr<OperationSupport>(
6188 new CopyBlitResolveImage::Support(resourceDesc, CopyBlitResolveImage::TYPE_BLIT, ACCESS_MODE_READ));
6189 case OPERATION_NAME_READ_RESOLVE_IMAGE:
6190 return de::MovePtr<OperationSupport>(
6191 new CopyBlitResolveImage::Support(resourceDesc, CopyBlitResolveImage::TYPE_RESOLVE, ACCESS_MODE_READ));
6192 case OPERATION_NAME_READ_UBO_VERTEX:
6193 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6194 resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
6195 case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL:
6196 return de::MovePtr<OperationSupport>(
6197 new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess,
6198 VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
6199 case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION:
6200 return de::MovePtr<OperationSupport>(
6201 new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess,
6202 VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
6203 case OPERATION_NAME_READ_UBO_GEOMETRY:
6204 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6205 resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
6206 case OPERATION_NAME_READ_UBO_FRAGMENT:
6207 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6208 resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
6209 case OPERATION_NAME_READ_UBO_COMPUTE:
6210 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6211 resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
6212 case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT:
6213 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6214 resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT,
6215 ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
6216 case OPERATION_NAME_READ_UBO_TEXEL_VERTEX:
6217 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6218 resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
6219 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_CONTROL:
6220 return de::MovePtr<OperationSupport>(
6221 new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL, ACCESS_MODE_READ,
6222 specializedAccess, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
6223 case OPERATION_NAME_READ_UBO_TEXEL_TESSELLATION_EVALUATION:
6224 return de::MovePtr<OperationSupport>(
6225 new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL, ACCESS_MODE_READ,
6226 specializedAccess, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
6227 case OPERATION_NAME_READ_UBO_TEXEL_GEOMETRY:
6228 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL,
6229 ACCESS_MODE_READ, specializedAccess,
6230 VK_SHADER_STAGE_GEOMETRY_BIT));
6231 case OPERATION_NAME_READ_UBO_TEXEL_FRAGMENT:
6232 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL,
6233 ACCESS_MODE_READ, specializedAccess,
6234 VK_SHADER_STAGE_FRAGMENT_BIT));
6235 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE:
6236 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6237 resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
6238 case OPERATION_NAME_READ_UBO_TEXEL_COMPUTE_INDIRECT:
6239 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6240 resourceDesc, BUFFER_TYPE_UNIFORM_TEXEL, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT,
6241 ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
6242 case OPERATION_NAME_READ_SSBO_VERTEX:
6243 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6244 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
6245 case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL:
6246 return de::MovePtr<OperationSupport>(
6247 new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess,
6248 VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
6249 case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION:
6250 return de::MovePtr<OperationSupport>(
6251 new ShaderAccess ::BufferSupport(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess,
6252 VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
6253 case OPERATION_NAME_READ_SSBO_GEOMETRY:
6254 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6255 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
6256 case OPERATION_NAME_READ_SSBO_FRAGMENT:
6257 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6258 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
6259 case OPERATION_NAME_READ_SSBO_COMPUTE:
6260 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6261 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
6262 case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT:
6263 return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport(
6264 resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT,
6265 ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
6266 case OPERATION_NAME_READ_IMAGE_VERTEX:
6267 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6268 resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
6269 case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL:
6270 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6271 resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
6272 case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION:
6273 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6274 resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
6275 case OPERATION_NAME_READ_IMAGE_GEOMETRY:
6276 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6277 resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
6278 case OPERATION_NAME_READ_IMAGE_FRAGMENT:
6279 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6280 resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
6281 case OPERATION_NAME_READ_IMAGE_COMPUTE:
6282 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6283 resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
6284 case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT:
6285 return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport(
6286 resourceDesc, ACCESS_MODE_READ, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT,
6287 ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
6288 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW:
6289 return de::MovePtr<OperationSupport>(new IndirectBuffer ::ReadSupport(resourceDesc));
6290 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW_INDEXED:
6291 return de::MovePtr<OperationSupport>(new IndirectBuffer ::ReadSupport(resourceDesc));
6292 case OPERATION_NAME_READ_INDIRECT_BUFFER_DISPATCH:
6293 return de::MovePtr<OperationSupport>(new IndirectBuffer ::ReadSupport(resourceDesc));
6294 case OPERATION_NAME_READ_VERTEX_INPUT:
6295 return de::MovePtr<OperationSupport>(new VertexInput ::Support(resourceDesc, VertexInput::DRAW_MODE_VERTEX));
6296 case OPERATION_NAME_READ_INDEX_INPUT:
6297 return de::MovePtr<OperationSupport>(new VertexInput ::Support(resourceDesc, VertexInput::DRAW_MODE_INDEXED));
6298
6299 case OPERATION_NAME_COPY_BUFFER:
6300 return de::MovePtr<OperationSupport>(new CopyBuffer ::CopySupport(resourceDesc));
6301 case OPERATION_NAME_COPY_IMAGE:
6302 return de::MovePtr<OperationSupport>(
6303 new CopyBlitResolveImage::CopySupport(resourceDesc, CopyBlitResolveImage::TYPE_COPY));
6304 case OPERATION_NAME_BLIT_IMAGE:
6305 return de::MovePtr<OperationSupport>(
6306 new CopyBlitResolveImage::CopySupport(resourceDesc, CopyBlitResolveImage::TYPE_BLIT));
6307 case OPERATION_NAME_COPY_SSBO_VERTEX:
6308 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport(
6309 resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_VERTEX_BIT));
6310 case OPERATION_NAME_COPY_SSBO_TESSELLATION_CONTROL:
6311 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport(
6312 resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
6313 case OPERATION_NAME_COPY_SSBO_TESSELLATION_EVALUATION:
6314 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport(
6315 resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
6316 case OPERATION_NAME_COPY_SSBO_GEOMETRY:
6317 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport(
6318 resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_GEOMETRY_BIT));
6319 case OPERATION_NAME_COPY_SSBO_FRAGMENT:
6320 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport(
6321 resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_FRAGMENT_BIT));
6322 case OPERATION_NAME_COPY_SSBO_COMPUTE:
6323 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport(
6324 resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT));
6325 case OPERATION_NAME_COPY_SSBO_COMPUTE_INDIRECT:
6326 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport(
6327 resourceDesc, BUFFER_TYPE_STORAGE, specializedAccess, VK_SHADER_STAGE_COMPUTE_BIT,
6328 ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
6329 case OPERATION_NAME_COPY_IMAGE_VERTEX:
6330 return de::MovePtr<OperationSupport>(
6331 new ShaderAccess ::CopyImageSupport(resourceDesc, VK_SHADER_STAGE_VERTEX_BIT, specializedAccess));
6332 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_CONTROL:
6333 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport(
6334 resourceDesc, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, specializedAccess));
6335 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_EVALUATION:
6336 return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport(
6337 resourceDesc, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, specializedAccess));
6338 case OPERATION_NAME_COPY_IMAGE_GEOMETRY:
6339 return de::MovePtr<OperationSupport>(
6340 new ShaderAccess ::CopyImageSupport(resourceDesc, VK_SHADER_STAGE_GEOMETRY_BIT, specializedAccess));
6341 case OPERATION_NAME_COPY_IMAGE_FRAGMENT:
6342 return de::MovePtr<OperationSupport>(
6343 new ShaderAccess ::CopyImageSupport(resourceDesc, VK_SHADER_STAGE_FRAGMENT_BIT, specializedAccess));
6344 case OPERATION_NAME_COPY_IMAGE_COMPUTE:
6345 return de::MovePtr<OperationSupport>(
6346 new ShaderAccess ::CopyImageSupport(resourceDesc, VK_SHADER_STAGE_COMPUTE_BIT, specializedAccess));
6347 case OPERATION_NAME_COPY_IMAGE_COMPUTE_INDIRECT:
6348 return de::MovePtr<OperationSupport>(
6349 new ShaderAccess ::CopyImageSupport(resourceDesc, VK_SHADER_STAGE_COMPUTE_BIT, specializedAccess,
6350 ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
6351
6352 default:
6353 DE_ASSERT(0);
6354 return de::MovePtr<OperationSupport>();
6355 }
6356 }
6357
isStageSupported(const vk::VkShaderStageFlagBits stage,const vk::VkQueueFlags queueFlags)6358 bool isStageSupported(const vk::VkShaderStageFlagBits stage, const vk::VkQueueFlags queueFlags)
6359 {
6360 switch (stage)
6361 {
6362 case vk::VK_SHADER_STAGE_VERTEX_BIT:
6363 case vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
6364 case vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
6365 case vk::VK_SHADER_STAGE_GEOMETRY_BIT:
6366 case vk::VK_SHADER_STAGE_FRAGMENT_BIT:
6367 if ((queueFlags & (vk::VK_QUEUE_GRAPHICS_BIT)) == 0)
6368 return false;
6369 break;
6370 case vk::VK_SHADER_STAGE_COMPUTE_BIT:
6371 if ((queueFlags & (vk::VK_QUEUE_COMPUTE_BIT)) == 0)
6372 return false;
6373 break;
6374 default:
6375 break;
6376 }
6377 return true;
6378 }
6379
6380 } // namespace synchronization
6381 } // namespace vkt
6382