1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2015 The Khronos Group Inc.
6 * Copyright (c) 2015 ARM Ltd.
7 * Copyright (c) 2023 LunarG, Inc.
8 * Copyright (c) 2023 Nintendo
9 *
10 * Licensed under the Apache License, Version 2.0 (the "License");
11 * you may not use this file except in compliance with the License.
12 * You may obtain a copy of the License at
13 *
14 * http://www.apache.org/licenses/LICENSE-2.0
15 *
16 * Unless required by applicable law or agreed to in writing, software
17 * distributed under the License is distributed on an "AS IS" BASIS,
18 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19 * See the License for the specific language governing permissions and
20 * limitations under the License.
21 *
22 *//*!
23 * \file
24 * \brief Timestamp Tests
25 *//*--------------------------------------------------------------------*/
26
27 #include "vktPipelineTimestampTests.hpp"
28 #include "vktPipelineClearUtil.hpp"
29 #include "vktPipelineImageUtil.hpp"
30 #include "vktPipelineVertexUtil.hpp"
31 #include "vktPipelineReferenceRenderer.hpp"
32 #include "vktCustomInstancesDevices.hpp"
33 #include "vktTestCaseUtil.hpp"
34 #include "vkSafetyCriticalUtil.hpp"
35 #include "vkImageUtil.hpp"
36 #include "vkMemUtil.hpp"
37 #include "vkPrograms.hpp"
38 #include "vkBuilderUtil.hpp"
39 #include "vkQueryUtil.hpp"
40 #include "vkRef.hpp"
41 #include "vkRefUtil.hpp"
42 #include "vkTypeUtil.hpp"
43 #include "vkCmdUtil.hpp"
44 #include "vkObjUtil.hpp"
45 #include "vkDeviceUtil.hpp"
46 #include "tcuImageCompare.hpp"
47 #include "tcuCommandLine.hpp"
48 #include "deUniquePtr.hpp"
49 #include "deStringUtil.hpp"
50 #include "deMemory.h"
51
52 #include <sstream>
53 #include <vector>
54 #include <set>
55 #include <cctype>
56 #include <locale>
57 #include <limits>
58 #include <thread>
59 #include <chrono>
60 #include <time.h>
61 #include <algorithm>
62
63 #ifdef CTS_USES_VULKANSC
64 // VulkanSC supports VK_EXT_calibrated_timestamps but not VK_KHR_calibrated_timestamps
65 #define VkCalibratedTimestampInfoKHR VkCalibratedTimestampInfoEXT
66 #define VkTimeDomainKHR VkTimeDomainEXT
67 #define VK_TIME_DOMAIN_DEVICE_KHR VK_TIME_DOMAIN_DEVICE_EXT
68 #define VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT
69 #define VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT
70 #define VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT
71 #endif // CTS_USES_VULKANSC
72
73 #if (DE_OS == DE_OS_WIN32)
74 #define VC_EXTRALEAN
75 #define WIN32_LEAN_AND_MEAN
76 #define NOMINMAX
77 #include <windows.h>
78 #endif
79
80 namespace vkt
81 {
82 namespace pipeline
83 {
84
85 using namespace vk;
86
87 namespace
88 {
89 typedef std::vector<VkPipelineStageFlagBits> StageFlagVector;
90
91 // helper functions
92 #define GEN_DESC_STRING(name, postfix) \
93 do \
94 { \
95 for (std::string::size_type ndx = 0; ndx < strlen(#name); ++ndx) \
96 if (isDescription && #name[ndx] == '_') \
97 desc << " "; \
98 else \
99 desc << std::tolower(#name[ndx], loc); \
100 if (isDescription) \
101 desc << " " << #postfix; \
102 else \
103 desc << "_" << #postfix; \
104 } while (false)
105
getPipelineStageFlagStr(const VkPipelineStageFlagBits stage,bool isDescription)106 std::string getPipelineStageFlagStr(const VkPipelineStageFlagBits stage, bool isDescription)
107 {
108 std::ostringstream desc;
109 std::locale loc;
110 switch (stage)
111 {
112 #define STAGE_CASE(p) \
113 case VK_PIPELINE_STAGE_##p##_BIT: \
114 { \
115 GEN_DESC_STRING(p, stage); \
116 break; \
117 }
118 STAGE_CASE(TOP_OF_PIPE)
119 STAGE_CASE(DRAW_INDIRECT)
120 STAGE_CASE(VERTEX_INPUT)
121 STAGE_CASE(VERTEX_SHADER)
122 STAGE_CASE(TESSELLATION_CONTROL_SHADER)
123 STAGE_CASE(TESSELLATION_EVALUATION_SHADER)
124 STAGE_CASE(GEOMETRY_SHADER)
125 STAGE_CASE(FRAGMENT_SHADER)
126 STAGE_CASE(EARLY_FRAGMENT_TESTS)
127 STAGE_CASE(LATE_FRAGMENT_TESTS)
128 STAGE_CASE(COLOR_ATTACHMENT_OUTPUT)
129 STAGE_CASE(COMPUTE_SHADER)
130 STAGE_CASE(TRANSFER)
131 STAGE_CASE(HOST)
132 STAGE_CASE(ALL_GRAPHICS)
133 STAGE_CASE(ALL_COMMANDS)
134 #undef STAGE_CASE
135 default:
136 desc << "unknown stage!";
137 DE_FATAL("Unknown Stage!");
138 break;
139 }
140
141 return desc.str();
142 }
143
144 enum TransferMethod
145 {
146 TRANSFER_METHOD_COPY_BUFFER = 0,
147 TRANSFER_METHOD_COPY_IMAGE,
148 TRANSFER_METHOD_BLIT_IMAGE,
149 TRANSFER_METHOD_COPY_BUFFER_TO_IMAGE,
150 TRANSFER_METHOD_COPY_IMAGE_TO_BUFFER,
151 TRANSFER_METHOD_UPDATE_BUFFER,
152 TRANSFER_METHOD_FILL_BUFFER,
153 TRANSFER_METHOD_CLEAR_COLOR_IMAGE,
154 TRANSFER_METHOD_CLEAR_DEPTH_STENCIL_IMAGE,
155 TRANSFER_METHOD_RESOLVE_IMAGE,
156 TRANSFER_METHOD_COPY_QUERY_POOL_RESULTS,
157 TRANSFER_METHOD_COPY_QUERY_POOL_RESULTS_STRIDE_ZERO,
158 TRANSFER_METHOD_LAST
159 };
160
getTransferMethodStr(const TransferMethod method,bool isDescription)161 std::string getTransferMethodStr(const TransferMethod method, bool isDescription)
162 {
163 std::ostringstream desc;
164 std::locale loc;
165
166 switch (method)
167 {
168 #define METHOD_CASE(p) \
169 case TRANSFER_METHOD_##p: \
170 { \
171 GEN_DESC_STRING(p, method); \
172 break; \
173 }
174 METHOD_CASE(COPY_BUFFER)
175 METHOD_CASE(COPY_IMAGE)
176 METHOD_CASE(BLIT_IMAGE)
177 METHOD_CASE(COPY_BUFFER_TO_IMAGE)
178 METHOD_CASE(COPY_IMAGE_TO_BUFFER)
179 METHOD_CASE(UPDATE_BUFFER)
180 METHOD_CASE(FILL_BUFFER)
181 METHOD_CASE(CLEAR_COLOR_IMAGE)
182 METHOD_CASE(CLEAR_DEPTH_STENCIL_IMAGE)
183 METHOD_CASE(RESOLVE_IMAGE)
184 METHOD_CASE(COPY_QUERY_POOL_RESULTS)
185 METHOD_CASE(COPY_QUERY_POOL_RESULTS_STRIDE_ZERO)
186 #undef METHOD_CASE
187 default:
188 desc << "unknown method!";
189 DE_FATAL("Unknown method!");
190 break;
191 }
192
193 return desc.str();
194 }
195
196 constexpr uint32_t MIN_TIMESTAMP_VALID_BITS = 36;
197 constexpr uint32_t MAX_TIMESTAMP_VALID_BITS = 64;
198
199 // Checks the number of valid bits for the given queue meets the spec requirements.
checkValidBits(uint32_t validBits,uint32_t queueFamilyIndex)200 void checkValidBits(uint32_t validBits, uint32_t queueFamilyIndex)
201 {
202 if (validBits < MIN_TIMESTAMP_VALID_BITS || validBits > MAX_TIMESTAMP_VALID_BITS)
203 {
204 std::ostringstream msg;
205 msg << "Invalid value for timestampValidBits (" << validBits << ") in queue index " << queueFamilyIndex;
206 TCU_FAIL(msg.str());
207 }
208 }
209
210 // Returns the timestamp mask given the number of valid timestamp bits.
timestampMaskFromValidBits(uint32_t validBits)211 uint64_t timestampMaskFromValidBits(uint32_t validBits)
212 {
213 return ((validBits == MAX_TIMESTAMP_VALID_BITS) ? std::numeric_limits<uint64_t>::max() : ((1ULL << validBits) - 1));
214 }
215
216 // Checks support for timestamps and returns the timestamp mask.
checkTimestampsSupported(const InstanceInterface & vki,const VkPhysicalDevice physDevice,const uint32_t queueFamilyIndex)217 uint64_t checkTimestampsSupported(const InstanceInterface &vki, const VkPhysicalDevice physDevice,
218 const uint32_t queueFamilyIndex)
219 {
220 const std::vector<VkQueueFamilyProperties> queueProperties =
221 vk::getPhysicalDeviceQueueFamilyProperties(vki, physDevice);
222 DE_ASSERT(queueFamilyIndex < queueProperties.size());
223 const uint32_t &validBits = queueProperties[queueFamilyIndex].timestampValidBits;
224
225 if (validBits == 0)
226 throw tcu::NotSupportedError("Queue does not support timestamps");
227
228 checkValidBits(validBits, queueFamilyIndex);
229 return timestampMaskFromValidBits(validBits);
230 }
231
checkTimestampBits(uint64_t timestamp,uint64_t mask)232 void checkTimestampBits(uint64_t timestamp, uint64_t mask)
233 {
234 // The spec says:
235 // timestampValidBits is the unsigned integer count of meaningful bits in
236 // the timestamps written via vkCmdWriteTimestamp. The valid range for the
237 // count is 36..64 bits, or a value of 0, indicating no support for
238 // timestamps. Bits outside the valid range are guaranteed to be zeros.
239 if (timestamp > mask)
240 {
241 std::ostringstream msg;
242 msg << std::hex << "Invalid device timestamp value 0x" << timestamp << " according to device timestamp mask 0x"
243 << mask;
244 TCU_FAIL(msg.str());
245 }
246 }
247
248 // helper classes
249 class TimestampTestParam
250 {
251 public:
252 TimestampTestParam(const PipelineConstructionType pipelineConstructionType, const VkPipelineStageFlagBits *stages,
253 const uint32_t stageCount, const bool inRenderPass, const bool hostQueryReset,
254 const bool transferOnlyQueue, const VkQueryResultFlags queryResultFlags);
255 virtual ~TimestampTestParam(void);
256 virtual const std::string generateTestName(void) const;
getPipelineConstructionType(void) const257 PipelineConstructionType getPipelineConstructionType(void) const
258 {
259 return m_pipelineConstructionType;
260 }
getStageVector(void) const261 StageFlagVector getStageVector(void) const
262 {
263 return m_stageVec;
264 }
getInRenderPass(void) const265 bool getInRenderPass(void) const
266 {
267 return m_inRenderPass;
268 }
getHostQueryReset(void) const269 bool getHostQueryReset(void) const
270 {
271 return m_hostQueryReset;
272 }
getTransferOnlyQueue(void) const273 bool getTransferOnlyQueue(void) const
274 {
275 return m_transferOnlyQueue;
276 }
getQueryResultFlags(void) const277 VkQueryResultFlags getQueryResultFlags(void) const
278 {
279 return m_queryResultFlags;
280 }
toggleInRenderPass(void)281 void toggleInRenderPass(void)
282 {
283 m_inRenderPass = !m_inRenderPass;
284 }
toggleHostQueryReset(void)285 void toggleHostQueryReset(void)
286 {
287 m_hostQueryReset = !m_hostQueryReset;
288 }
289
setQueryResultFlags(VkQueryResultFlags flags)290 void setQueryResultFlags(VkQueryResultFlags flags)
291 {
292 m_queryResultFlags = flags;
293 }
294
295 protected:
296 const PipelineConstructionType m_pipelineConstructionType;
297 StageFlagVector m_stageVec;
298 bool m_inRenderPass;
299 bool m_hostQueryReset;
300 bool m_transferOnlyQueue;
301 VkQueryResultFlags m_queryResultFlags;
302 };
303
TimestampTestParam(const PipelineConstructionType pipelineConstructionType,const VkPipelineStageFlagBits * stages,const uint32_t stageCount,const bool inRenderPass,const bool hostQueryReset,const bool transferOnlyQueue,const VkQueryResultFlags queryResultFlags)304 TimestampTestParam::TimestampTestParam(const PipelineConstructionType pipelineConstructionType,
305 const VkPipelineStageFlagBits *stages, const uint32_t stageCount,
306 const bool inRenderPass, const bool hostQueryReset, const bool transferOnlyQueue,
307 const VkQueryResultFlags queryResultFlags)
308 : m_pipelineConstructionType(pipelineConstructionType)
309 , m_inRenderPass(inRenderPass)
310 , m_hostQueryReset(hostQueryReset)
311 , m_transferOnlyQueue(transferOnlyQueue)
312 , m_queryResultFlags(queryResultFlags)
313 {
314 for (uint32_t ndx = 0; ndx < stageCount; ndx++)
315 {
316 m_stageVec.push_back(stages[ndx]);
317 }
318 }
319
~TimestampTestParam(void)320 TimestampTestParam::~TimestampTestParam(void)
321 {
322 }
323
generateTestName(void) const324 const std::string TimestampTestParam::generateTestName(void) const
325 {
326 std::string result("");
327
328 for (StageFlagVector::const_iterator it = m_stageVec.begin(); it != m_stageVec.end(); it++)
329 {
330 if (*it != VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT)
331 {
332 result += getPipelineStageFlagStr(*it, false) + '_';
333 }
334 }
335 if (m_inRenderPass)
336 result += "in_render_pass";
337 else
338 result += "out_of_render_pass";
339
340 if (m_hostQueryReset)
341 result += "_host_query_reset";
342
343 if (m_transferOnlyQueue)
344 result += "_transfer_queue";
345
346 if (m_queryResultFlags & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT)
347 result += "_with_availability_bit";
348
349 return result;
350 }
351
352 class TransferTimestampTestParam : public TimestampTestParam
353 {
354 public:
355 TransferTimestampTestParam(const PipelineConstructionType pipelineConstructionType,
356 const VkPipelineStageFlagBits *stages, const uint32_t stageCount,
357 const bool inRenderPass, const bool hostQueryReset, const bool transferOnlyQueue,
358 const uint32_t methodNdx, const VkQueryResultFlags flags);
~TransferTimestampTestParam(void)359 ~TransferTimestampTestParam(void)
360 {
361 }
362 const std::string generateTestName(void) const;
363 const std::string generateTestDescription(void) const;
getMethod(void) const364 TransferMethod getMethod(void) const
365 {
366 return m_method;
367 }
368
369 protected:
370 TransferMethod m_method;
371 };
372
TransferTimestampTestParam(const PipelineConstructionType pipelineConstructionType,const VkPipelineStageFlagBits * stages,const uint32_t stageCount,const bool inRenderPass,const bool hostQueryReset,const bool transferOnlyQueue,const uint32_t methodNdx,const VkQueryResultFlags flags)373 TransferTimestampTestParam::TransferTimestampTestParam(const PipelineConstructionType pipelineConstructionType,
374 const VkPipelineStageFlagBits *stages, const uint32_t stageCount,
375 const bool inRenderPass, const bool hostQueryReset,
376 const bool transferOnlyQueue, const uint32_t methodNdx,
377 const VkQueryResultFlags flags)
378 : TimestampTestParam(pipelineConstructionType, stages, stageCount, inRenderPass, hostQueryReset, transferOnlyQueue,
379 flags)
380 {
381 DE_ASSERT(methodNdx < (uint32_t)TRANSFER_METHOD_LAST);
382
383 m_method = (TransferMethod)methodNdx;
384 }
385
generateTestName(void) const386 const std::string TransferTimestampTestParam::generateTestName(void) const
387 {
388 std::string result("");
389
390 for (StageFlagVector::const_iterator it = m_stageVec.begin(); it != m_stageVec.end(); it++)
391 {
392 if (*it != VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT)
393 {
394 result += getPipelineStageFlagStr(*it, false) + '_';
395 }
396 }
397
398 result += "with_" + getTransferMethodStr(m_method, false);
399
400 if (m_hostQueryReset)
401 result += "_host_query_reset";
402
403 if (m_transferOnlyQueue)
404 result += "_transfer_queue";
405
406 if (m_queryResultFlags & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT)
407 result += "_with_availability_bit";
408
409 return result;
410 }
411
412 class TwoCmdBuffersTestParam : public TimestampTestParam
413 {
414 public:
415 TwoCmdBuffersTestParam(const PipelineConstructionType pipelineConstructionType,
416 const VkPipelineStageFlagBits *stages, const uint32_t stageCount, const bool inRenderPass,
417 const bool hostQueryReset, const bool transferOnlyQueue,
418 const VkCommandBufferLevel cmdBufferLevel, const VkQueryResultFlags queryPoolResultFlags);
~TwoCmdBuffersTestParam(void)419 ~TwoCmdBuffersTestParam(void)
420 {
421 }
getCmdBufferLevel(void) const422 VkCommandBufferLevel getCmdBufferLevel(void) const
423 {
424 return m_cmdBufferLevel;
425 }
426
427 protected:
428 VkCommandBufferLevel m_cmdBufferLevel;
429 };
430
TwoCmdBuffersTestParam(const PipelineConstructionType pipelineConstructionType,const VkPipelineStageFlagBits * stages,const uint32_t stageCount,const bool inRenderPass,const bool hostQueryReset,const bool transferOnlyQueue,const VkCommandBufferLevel cmdBufferLevel,const VkQueryResultFlags queryPoolResultFlags)431 TwoCmdBuffersTestParam::TwoCmdBuffersTestParam(const PipelineConstructionType pipelineConstructionType,
432 const VkPipelineStageFlagBits *stages, const uint32_t stageCount,
433 const bool inRenderPass, const bool hostQueryReset,
434 const bool transferOnlyQueue, const VkCommandBufferLevel cmdBufferLevel,
435 const VkQueryResultFlags queryPoolResultFlags)
436 : TimestampTestParam(pipelineConstructionType, stages, stageCount, inRenderPass, hostQueryReset, transferOnlyQueue,
437 queryPoolResultFlags)
438 , m_cmdBufferLevel(cmdBufferLevel)
439 {
440 }
441
442 template <class Test>
newTestCase(tcu::TestContext & testContext,TimestampTestParam * testParam)443 vkt::TestCase *newTestCase(tcu::TestContext &testContext, TimestampTestParam *testParam)
444 {
445 return new Test(testContext, testParam->generateTestName().c_str(), testParam);
446 }
447
448 // Test Classes
449 class TimestampTest : public vkt::TestCase
450 {
451 public:
452 enum
453 {
454 ENTRY_COUNT = 8
455 };
456
TimestampTest(tcu::TestContext & testContext,const std::string & name,const TimestampTestParam * param)457 TimestampTest(tcu::TestContext &testContext, const std::string &name, const TimestampTestParam *param)
458 : vkt::TestCase(testContext, name)
459 , m_pipelineConstructionType(param->getPipelineConstructionType())
460 , m_stages(param->getStageVector())
461 , m_inRenderPass(param->getInRenderPass())
462 , m_hostQueryReset(param->getHostQueryReset())
463 , m_transferOnlyQueue(param->getTransferOnlyQueue())
464 , m_queryResultFlags(param->getQueryResultFlags())
465 {
466 }
~TimestampTest(void)467 virtual ~TimestampTest(void)
468 {
469 }
470 virtual void initPrograms(SourceCollections &programCollection) const;
471 virtual TestInstance *createInstance(Context &context) const;
472 virtual void checkSupport(Context &context) const;
473
474 protected:
475 const PipelineConstructionType m_pipelineConstructionType;
476 const StageFlagVector m_stages;
477 const bool m_inRenderPass;
478 const bool m_hostQueryReset;
479 const bool m_transferOnlyQueue;
480 const VkQueryResultFlags m_queryResultFlags;
481 };
482
483 class TimestampTestInstance : public vkt::TestInstance
484 {
485 public:
486 TimestampTestInstance(Context &context, const StageFlagVector &stages, const bool inRenderPass,
487 const bool hostQueryReset, const bool transferOnlyQueue,
488 const VkQueryResultFlags queryResultFlags);
489
490 virtual ~TimestampTestInstance(void);
491 virtual tcu::TestStatus iterate(void);
492
493 protected:
494 virtual tcu::TestStatus verifyTimestamp(void);
495 virtual void buildPipeline(void);
496 virtual void configCommandBuffer(void);
497
498 Move<VkBuffer> createBufferAndBindMemory(VkDeviceSize size, VkBufferUsageFlags usage,
499 de::MovePtr<Allocation> *pAlloc);
500
501 Move<VkImage> createImage2DAndBindMemory(VkFormat format, uint32_t width, uint32_t height, VkImageUsageFlags usage,
502 VkSampleCountFlagBits sampleCount, de::MovePtr<Allocation> *pAlloc);
503
504 // Creates a device that has transfer only operations
505 void createCustomDeviceWithTransferOnlyQueue(void);
506
507 protected:
508 Move<VkDevice> m_customDevice;
509 de::MovePtr<Allocator> m_customAllocator;
510
511 VkDevice m_device;
512 #ifdef CTS_USES_VULKANSC
513 const CustomInstance m_customInstance;
514 #endif // CTS_USES_VULKANSC
515 Allocator *m_allocator;
516 uint32_t m_queueFamilyIndex;
517
518 const StageFlagVector m_stages;
519 bool m_inRenderPass;
520 bool m_hostQueryReset;
521 bool m_transferOnlyQueue;
522 VkQueryResultFlags m_queryResultFlags;
523
524 Move<VkCommandPool> m_cmdPool;
525 Move<VkCommandBuffer> m_cmdBuffer;
526 Move<VkQueryPool> m_queryPool;
527 uint64_t *m_timestampValues;
528 uint64_t *m_timestampValuesHostQueryReset;
529 uint64_t m_timestampMask;
530 };
531
initPrograms(SourceCollections & programCollection) const532 void TimestampTest::initPrograms(SourceCollections &programCollection) const
533 {
534 vkt::TestCase::initPrograms(programCollection);
535 }
536
checkSupport(Context & context) const537 void TimestampTest::checkSupport(Context &context) const
538 {
539 const InstanceInterface &vki = context.getInstanceInterface();
540 const VkPhysicalDevice physicalDevice = context.getPhysicalDevice();
541 uint32_t queueFamilyIndex = context.getUniversalQueueFamilyIndex();
542
543 if (m_transferOnlyQueue)
544 queueFamilyIndex = findQueueFamilyIndexWithCaps(vki, physicalDevice, VK_QUEUE_TRANSFER_BIT,
545 VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT);
546
547 // Check support for timestamp queries
548 const std::vector<VkQueueFamilyProperties> queueProperties =
549 vk::getPhysicalDeviceQueueFamilyProperties(context.getInstanceInterface(), context.getPhysicalDevice());
550 DE_ASSERT(queueFamilyIndex < (uint32_t)queueProperties.size());
551 if (!queueProperties[queueFamilyIndex].timestampValidBits)
552 throw tcu::NotSupportedError("Universal queue does not support timestamps");
553
554 if (m_hostQueryReset)
555 {
556 // Check VK_EXT_host_query_reset is supported
557 context.requireDeviceFunctionality("VK_EXT_host_query_reset");
558
559 if (context.getHostQueryResetFeatures().hostQueryReset == VK_FALSE)
560 throw tcu::NotSupportedError("Implementation doesn't support resetting queries from the host");
561 }
562 checkPipelineConstructionRequirements(context.getInstanceInterface(), context.getPhysicalDevice(),
563 m_pipelineConstructionType);
564 }
565
createInstance(Context & context) const566 TestInstance *TimestampTest::createInstance(Context &context) const
567 {
568 return new TimestampTestInstance(context, m_stages, m_inRenderPass, m_hostQueryReset, false, m_queryResultFlags);
569 }
570
TimestampTestInstance(Context & context,const StageFlagVector & stages,const bool inRenderPass,const bool hostQueryReset,const bool transferOnlyQueue,const VkQueryResultFlags queryResultFlags)571 TimestampTestInstance::TimestampTestInstance(Context &context, const StageFlagVector &stages, const bool inRenderPass,
572 const bool hostQueryReset, const bool transferOnlyQueue,
573 const VkQueryResultFlags queryResultFlags)
574 : TestInstance(context)
575 #ifdef CTS_USES_VULKANSC
576 , m_customInstance(createCustomInstanceFromContext(context))
577 #endif // CTS_USES_VULKANSC
578 , m_stages(stages)
579 , m_inRenderPass(inRenderPass)
580 , m_hostQueryReset(hostQueryReset)
581 , m_transferOnlyQueue(transferOnlyQueue)
582 , m_queryResultFlags(queryResultFlags)
583 {
584 const DeviceInterface &vk = context.getDeviceInterface();
585
586 m_device = context.getDevice();
587 m_allocator = &context.getDefaultAllocator();
588 m_queueFamilyIndex = context.getUniversalQueueFamilyIndex();
589
590 // when needed create custom device and overwrite m_device, m_allocator and m_queueFamilyIndex
591 if (m_transferOnlyQueue)
592 createCustomDeviceWithTransferOnlyQueue();
593
594 m_timestampMask =
595 checkTimestampsSupported(context.getInstanceInterface(), context.getPhysicalDevice(), m_queueFamilyIndex);
596
597 // Create Query Pool
598 {
599 const VkQueryPoolCreateInfo queryPoolParams = {
600 VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, // VkStructureType sType;
601 DE_NULL, // const void* pNext;
602 0u, // VkQueryPoolCreateFlags flags;
603 VK_QUERY_TYPE_TIMESTAMP, // VkQueryType queryType;
604 TimestampTest::ENTRY_COUNT, // uint32_t entryCount;
605 0u, // VkQueryPipelineStatisticFlags pipelineStatistics;
606 };
607
608 m_queryPool = createQueryPool(vk, m_device, &queryPoolParams);
609 }
610
611 // Create command pool
612 m_cmdPool = createCommandPool(vk, m_device, VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, m_queueFamilyIndex);
613
614 // Create command buffer
615 m_cmdBuffer = allocateCommandBuffer(vk, m_device, *m_cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY);
616
617 // alloc timestamp values
618 m_timestampValues =
619 new uint64_t[m_stages.size() * ((m_queryResultFlags & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT) ? 2u : 1u)];
620
621 if (m_hostQueryReset)
622 m_timestampValuesHostQueryReset = new uint64_t[m_stages.size() * 2];
623 else
624 m_timestampValuesHostQueryReset = DE_NULL;
625 }
626
~TimestampTestInstance(void)627 TimestampTestInstance::~TimestampTestInstance(void)
628 {
629 delete[] m_timestampValues;
630 m_timestampValues = NULL;
631
632 delete[] m_timestampValuesHostQueryReset;
633 m_timestampValuesHostQueryReset = NULL;
634 }
635
buildPipeline(void)636 void TimestampTestInstance::buildPipeline(void)
637 {
638 }
639
configCommandBuffer(void)640 void TimestampTestInstance::configCommandBuffer(void)
641 {
642 const DeviceInterface &vk = m_context.getDeviceInterface();
643
644 beginCommandBuffer(vk, *m_cmdBuffer, 0u);
645
646 if (!m_hostQueryReset)
647 vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, TimestampTest::ENTRY_COUNT);
648
649 uint32_t timestampEntry = 0;
650 for (const auto &stage : m_stages)
651 {
652 vk.cmdWriteTimestamp(*m_cmdBuffer, stage, *m_queryPool, timestampEntry++);
653 }
654
655 endCommandBuffer(vk, *m_cmdBuffer);
656 }
657
iterate(void)658 tcu::TestStatus TimestampTestInstance::iterate(void)
659 {
660 const DeviceInterface &vk = m_context.getDeviceInterface();
661 const VkQueue queue = getDeviceQueue(vk, m_device, m_queueFamilyIndex, 0);
662 const bool availabilityBit = m_queryResultFlags & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT;
663 const uint32_t stageSize = (uint32_t)m_stages.size();
664 const uint32_t queryDataSize = uint32_t(sizeof(uint64_t) * (availabilityBit ? 2u : 1u));
665
666 buildPipeline();
667 configCommandBuffer();
668 if (m_hostQueryReset)
669 {
670 vk.resetQueryPool(m_device, *m_queryPool, 0u, TimestampTest::ENTRY_COUNT);
671 }
672 submitCommandsAndWait(vk, m_device, queue, m_cmdBuffer.get());
673
674 // Get timestamp value from query pool
675 VK_CHECK(vk.getQueryPoolResults(m_device, *m_queryPool, 0u, stageSize, queryDataSize * stageSize,
676 (void *)m_timestampValues, queryDataSize, m_queryResultFlags));
677
678 for (uint32_t ndx = 0; ndx < stageSize; ndx++)
679 {
680 m_timestampValues[ndx] &= m_timestampMask;
681 if (availabilityBit)
682 ndx++;
683 }
684
685 if (m_hostQueryReset)
686 {
687 // Initialize timestampValuesHostQueryReset values
688 deMemset(m_timestampValuesHostQueryReset, 0, sizeof(uint64_t) * stageSize * 2);
689
690 for (uint32_t ndx = 0; ndx < stageSize; ndx++)
691 {
692 const uint32_t ndxTimestampValue = ndx * (availabilityBit ? 2u : 1u);
693 m_timestampValuesHostQueryReset[2 * ndx] = m_timestampValues[ndxTimestampValue];
694 }
695
696 // Host resets the query pool
697 vk.resetQueryPool(m_device, *m_queryPool, 0u, stageSize);
698 // Get timestamp value from query pool
699 vk::VkResult res =
700 vk.getQueryPoolResults(m_device, *m_queryPool, 0u, stageSize, sizeof(uint64_t) * stageSize * 2,
701 (void *)m_timestampValuesHostQueryReset, sizeof(uint64_t) * 2,
702 VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WITH_AVAILABILITY_BIT);
703
704 /* From Vulkan spec:
705 *
706 * If VK_QUERY_RESULT_WAIT_BIT and VK_QUERY_RESULT_PARTIAL_BIT are both not set then no result values are written to pData
707 * for queries that are in the unavailable state at the time of the call, and vkGetQueryPoolResults returns VK_NOT_READY.
708 * However, availability state is still written to pData for those queries if VK_QUERY_RESULT_WITH_AVAILABILITY_BIT is set.
709 */
710 if (res != vk::VK_NOT_READY)
711 return tcu::TestStatus::fail("QueryPoolResults incorrect reset");
712
713 for (uint32_t ndx = 0; ndx < stageSize; ndx++)
714 {
715 const uint32_t ndxTimestampValue = ndx * (availabilityBit ? 2u : 1u);
716 if ((m_timestampValuesHostQueryReset[2 * ndx] & m_timestampMask) != m_timestampValues[ndxTimestampValue])
717 return tcu::TestStatus::fail("QueryPoolResults returned value was modified");
718 if (m_timestampValuesHostQueryReset[2 * ndx + 1] != 0u)
719 return tcu::TestStatus::fail("QueryPoolResults availability status is not zero");
720 }
721 }
722
723 return verifyTimestamp();
724 }
725
verifyTimestamp(void)726 tcu::TestStatus TimestampTestInstance::verifyTimestamp(void)
727 {
728 bool availabilityBit = m_queryResultFlags & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT;
729 uint32_t increment = availabilityBit ? 2u : 1u;
730 for (uint32_t first = 0u; first < m_stages.size(); first += increment)
731 {
732 for (uint32_t second = 0u; second < first; second += increment)
733 {
734 if (availabilityBit && (m_timestampValues[first + 1u] == 0u || m_timestampValues[second + 1u] == 0u))
735 {
736 return tcu::TestStatus::fail("Timestamp query not available");
737 }
738
739 if (m_timestampValues[first] < m_timestampValues[second])
740 {
741 return tcu::TestStatus::fail("Latter stage timestamp is smaller than the former stage timestamp.");
742 }
743 }
744 }
745
746 return tcu::TestStatus::pass("Timestamp increases steadily.");
747 }
748
createBufferAndBindMemory(VkDeviceSize size,VkBufferUsageFlags usage,de::MovePtr<Allocation> * pAlloc)749 Move<VkBuffer> TimestampTestInstance::createBufferAndBindMemory(VkDeviceSize size, VkBufferUsageFlags usage,
750 de::MovePtr<Allocation> *pAlloc)
751 {
752 const DeviceInterface &vk = m_context.getDeviceInterface();
753 const VkBufferCreateInfo vertexBufferParams = {
754 VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, // VkStructureType sType;
755 DE_NULL, // const void* pNext;
756 0u, // VkBufferCreateFlags flags;
757 size, // VkDeviceSize size;
758 usage, // VkBufferUsageFlags usage;
759 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
760 1u, // uint32_t queueFamilyCount;
761 &m_queueFamilyIndex // const uint32_t* pQueueFamilyIndices;
762 };
763
764 Move<VkBuffer> vertexBuffer = createBuffer(vk, m_device, &vertexBufferParams);
765 de::MovePtr<Allocation> vertexBufferAlloc =
766 m_allocator->allocate(getBufferMemoryRequirements(vk, m_device, *vertexBuffer), MemoryRequirement::HostVisible);
767
768 VK_CHECK(
769 vk.bindBufferMemory(m_device, *vertexBuffer, vertexBufferAlloc->getMemory(), vertexBufferAlloc->getOffset()));
770
771 DE_ASSERT(pAlloc);
772 *pAlloc = vertexBufferAlloc;
773
774 return vertexBuffer;
775 }
776
createImage2DAndBindMemory(VkFormat format,uint32_t width,uint32_t height,VkImageUsageFlags usage,VkSampleCountFlagBits sampleCount,de::details::MovePtr<Allocation> * pAlloc)777 Move<VkImage> TimestampTestInstance::createImage2DAndBindMemory(VkFormat format, uint32_t width, uint32_t height,
778 VkImageUsageFlags usage,
779 VkSampleCountFlagBits sampleCount,
780 de::details::MovePtr<Allocation> *pAlloc)
781 {
782 const DeviceInterface &vk = m_context.getDeviceInterface();
783
784 // Optimal tiling feature check
785 VkFormatProperties formatProperty;
786
787 m_context.getInstanceInterface().getPhysicalDeviceFormatProperties(m_context.getPhysicalDevice(), format,
788 &formatProperty);
789
790 if ((usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) &&
791 !(formatProperty.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
792 {
793 // Remove color attachment usage if the optimal tiling feature does not support it
794 usage &= ~VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
795 }
796 if ((usage & VK_IMAGE_USAGE_STORAGE_BIT) &&
797 !(formatProperty.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT))
798 {
799 // Remove storage usage if the optimal tiling feature does not support it
800 usage &= ~VK_IMAGE_USAGE_STORAGE_BIT;
801 }
802
803 const VkImageCreateInfo colorImageParams = {
804 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
805 DE_NULL, // const void* pNext;
806 0u, // VkImageCreateFlags flags;
807 VK_IMAGE_TYPE_2D, // VkImageType imageType;
808 format, // VkFormat format;
809 {width, height, 1u}, // VkExtent3D extent;
810 1u, // uint32_t mipLevels;
811 1u, // uint32_t arraySize;
812 sampleCount, // uint32_t samples;
813 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
814 usage, // VkImageUsageFlags usage;
815 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
816 1u, // uint32_t queueFamilyCount;
817 &m_queueFamilyIndex, // const uint32_t* pQueueFamilyIndices;
818 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
819 };
820
821 Move<VkImage> image = createImage(vk, m_device, &colorImageParams);
822
823 // Allocate and bind image memory
824 de::MovePtr<Allocation> colorImageAlloc =
825 m_allocator->allocate(getImageMemoryRequirements(vk, m_device, *image), MemoryRequirement::Any);
826 VK_CHECK(vk.bindImageMemory(m_device, *image, colorImageAlloc->getMemory(), colorImageAlloc->getOffset()));
827
828 DE_ASSERT(pAlloc);
829 *pAlloc = colorImageAlloc;
830
831 return image;
832 }
833
createCustomDeviceWithTransferOnlyQueue(void)834 void TimestampTestInstance::createCustomDeviceWithTransferOnlyQueue(void)
835 {
836 #ifdef CTS_USES_VULKANSC
837 vk::VkInstance instance = m_customInstance;
838 const vk::InstanceInterface &vki = m_customInstance.getDriver();
839 const VkPhysicalDevice physicalDevice =
840 chooseDevice(vki, m_customInstance, m_context.getTestContext().getCommandLine());
841 #else
842 vk::VkInstance instance = m_context.getInstance();
843 const vk::InstanceInterface &vki = m_context.getInstanceInterface();
844 const VkPhysicalDevice physicalDevice = m_context.getPhysicalDevice();
845 #endif // CTS_USES_VULKANSC
846
847 const DeviceInterface &vk = m_context.getDeviceInterface();
848
849 m_queueFamilyIndex = findQueueFamilyIndexWithCaps(vki, physicalDevice, VK_QUEUE_TRANSFER_BIT,
850 VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT);
851
852 const std::vector<VkQueueFamilyProperties> queueFamilies =
853 getPhysicalDeviceQueueFamilyProperties(vki, physicalDevice);
854
855 // This must be found, findQueueFamilyIndexWithCaps would have
856 // thrown a NotSupported exception if the requested queue type did
857 // not exist. Similarly, this was written with the assumption the
858 // "alternative" queue would be different to the universal queue.
859 DE_ASSERT(m_queueFamilyIndex < queueFamilies.size() &&
860 m_queueFamilyIndex != m_context.getUniversalQueueFamilyIndex());
861 const float queuePriority = 1.0f;
862 const VkDeviceQueueCreateInfo deviceQueueCreateInfos{
863 VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, // VkStructureType sType;
864 nullptr, // const void* pNext;
865 (VkDeviceQueueCreateFlags)0u, // VkDeviceQueueCreateFlags flags;
866 m_queueFamilyIndex, // uint32_t queueFamilyIndex;
867 1u, // uint32_t queueCount;
868 &queuePriority, // const float* pQueuePriorities;
869 };
870
871 // Replicate default device extension list.
872 const auto extensionNames = m_context.getDeviceCreationExtensions();
873 auto queryResetFeatures = m_context.getHostQueryResetFeatures();
874 auto deviceFeatures2 = m_context.getDeviceFeatures2();
875
876 const void *pNext = &deviceFeatures2;
877 if (m_context.getUsedApiVersion() < VK_API_VERSION_1_2)
878 {
879 queryResetFeatures.pNext = &deviceFeatures2;
880 pNext = &queryResetFeatures;
881 }
882
883 #ifdef CTS_USES_VULKANSC
884 VkDeviceObjectReservationCreateInfo memReservationInfo =
885 m_context.getTestContext().getCommandLine().isSubProcess() ? m_context.getResourceInterface()->getStatMax() :
886 resetDeviceObjectReservationCreateInfo();
887 memReservationInfo.pNext = pNext;
888 pNext = &memReservationInfo;
889
890 VkPipelineCacheCreateInfo pcCI;
891 std::vector<VkPipelinePoolSize> poolSizes;
892 if (m_context.getTestContext().getCommandLine().isSubProcess())
893 {
894 if (m_context.getResourceInterface()->getCacheDataSize() > 0)
895 {
896 pcCI = {
897 VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, // VkStructureType sType;
898 DE_NULL, // const void* pNext;
899 VK_PIPELINE_CACHE_CREATE_READ_ONLY_BIT |
900 VK_PIPELINE_CACHE_CREATE_USE_APPLICATION_STORAGE_BIT, // VkPipelineCacheCreateFlags flags;
901 m_context.getResourceInterface()->getCacheDataSize(), // uintptr_t initialDataSize;
902 m_context.getResourceInterface()->getCacheData() // const void* pInitialData;
903 };
904 memReservationInfo.pipelineCacheCreateInfoCount = 1;
905 memReservationInfo.pPipelineCacheCreateInfos = &pcCI;
906 }
907 poolSizes = m_context.getResourceInterface()->getPipelinePoolSizes();
908 if (!poolSizes.empty())
909 {
910 memReservationInfo.pipelinePoolSizeCount = uint32_t(poolSizes.size());
911 memReservationInfo.pPipelinePoolSizes = poolSizes.data();
912 }
913 }
914 #endif // CTS_USES_VULKANSC
915
916 const VkDeviceCreateInfo deviceCreateInfo{
917 VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, // VkStructureType sType;
918 pNext, // const void* pNext;
919 (VkDeviceCreateFlags)0u, // VkDeviceCreateFlags flags;
920 1u, // uint32_t queueCreateInfoCount;
921 &deviceQueueCreateInfos, // const VkDeviceQueueCreateInfo* pQueueCreateInfos;
922 0u, // uint32_t enabledLayerCount;
923 DE_NULL, // const char* const* ppEnabledLayerNames;
924 static_cast<uint32_t>(extensionNames.size()), // uint32_t enabledExtensionCount;
925 extensionNames.data(), // const char* const* ppEnabledExtensionNames;
926 DE_NULL, // const VkPhysicalDeviceFeatures* pEnabledFeatures;
927 };
928
929 m_customDevice =
930 vkt::createCustomDevice(m_context.getTestContext().getCommandLine().isValidationEnabled(),
931 m_context.getPlatformInterface(), instance, vki, physicalDevice, &deviceCreateInfo);
932 m_customAllocator = de::MovePtr<Allocator>(
933 new SimpleAllocator(vk, *m_customDevice, getPhysicalDeviceMemoryProperties(vki, physicalDevice)));
934
935 m_device = *m_customDevice;
936 m_allocator = &(*m_customAllocator);
937 }
938
939 template <class T>
940 class CalibratedTimestampTest : public vkt::TestCase
941 {
942 public:
CalibratedTimestampTest(tcu::TestContext & testContext,const std::string & name)943 CalibratedTimestampTest(tcu::TestContext &testContext, const std::string &name) : vkt::TestCase{testContext, name}
944 {
945 }
946
~CalibratedTimestampTest(void)947 virtual ~CalibratedTimestampTest(void) override
948 {
949 }
950 virtual void initPrograms(SourceCollections &programCollection) const override;
951 virtual void checkSupport(Context &context) const override;
952 virtual vkt::TestInstance *createInstance(Context &context) const override;
953 };
954
955 class CalibratedTimestampTestInstance : public vkt::TestInstance
956 {
957 public:
958 CalibratedTimestampTestInstance(Context &context);
~CalibratedTimestampTestInstance(void)959 virtual ~CalibratedTimestampTestInstance(void) override
960 {
961 }
962 virtual tcu::TestStatus iterate(void) override;
963 virtual tcu::TestStatus runTest(void) = 0;
964
965 protected:
966 struct CalibratedTimestamp
967 {
CalibratedTimestampvkt::pipeline::__anonb6e788ec0111::CalibratedTimestampTestInstance::CalibratedTimestamp968 CalibratedTimestamp(uint64_t timestamp_, uint64_t deviation_) : timestamp{timestamp_}, deviation(deviation_)
969 {
970 }
CalibratedTimestampvkt::pipeline::__anonb6e788ec0111::CalibratedTimestampTestInstance::CalibratedTimestamp971 CalibratedTimestamp() : timestamp{}, deviation{}
972 {
973 }
974 uint64_t timestamp;
975 uint64_t deviation;
976 };
977
978 std::vector<VkTimeDomainKHR> getDomainSubset(const std::vector<VkTimeDomainKHR> &available,
979 const std::vector<VkTimeDomainKHR> &interesting) const;
980 std::string domainName(VkTimeDomainKHR domain) const;
981 uint64_t getHostNativeTimestamp(VkTimeDomainKHR hostDomain) const;
982 uint64_t getHostNanoseconds(uint64_t hostTimestamp) const;
983 uint64_t getDeviceNanoseconds(uint64_t devTicksDelta) const;
984 std::vector<CalibratedTimestamp> getCalibratedTimestamps(const std::vector<VkTimeDomainKHR> &domains);
985 CalibratedTimestamp getCalibratedTimestamp(VkTimeDomainKHR domain);
986 void appendQualityMessage(const std::string &message);
987
988 void verifyDevTimestampMask(uint64_t value) const;
989 uint64_t absDiffWithOverflow(uint64_t a, uint64_t b, uint64_t mask = std::numeric_limits<uint64_t>::max()) const;
990 uint64_t positiveDiffWithOverflow(uint64_t before, uint64_t after,
991 uint64_t mask = std::numeric_limits<uint64_t>::max()) const;
992 bool outOfRange(uint64_t begin, uint64_t middle, uint64_t end) const;
993
994 static constexpr uint64_t kBatchTimeLimitNanos = 1000000000u; // 1 sec.
995 static constexpr uint64_t kDeviationErrorLimitNanos = 100000000u; // 100 ms.
996 static constexpr uint64_t kDeviationWarningLimitNanos = 50000000u; // 50 ms.
997 static constexpr uint64_t kDefaultToleranceNanos = 100000000u; // 100 ms.
998
999 #if (DE_OS == DE_OS_WIN32)
1000 // Preprocessor used to avoid warning about unused variable.
1001 static constexpr uint64_t kNanosecondsPerSecond = 1000000000u;
1002 #endif
1003 static constexpr uint64_t kNanosecondsPerMillisecond = 1000000u;
1004
1005 std::string m_qualityMessage;
1006 float m_timestampPeriod;
1007 std::vector<VkTimeDomainKHR> m_devDomains;
1008 std::vector<VkTimeDomainKHR> m_hostDomains;
1009 #if (DE_OS == DE_OS_WIN32)
1010 uint64_t m_frequency;
1011 #endif
1012
1013 Move<VkCommandPool> m_cmdPool;
1014 Move<VkCommandBuffer> m_cmdBuffer;
1015 Move<VkQueryPool> m_queryPool;
1016 uint64_t m_devTimestampMask;
1017 };
1018
1019 class CalibratedTimestampDevDomainTestInstance : public CalibratedTimestampTestInstance
1020 {
1021 public:
CalibratedTimestampDevDomainTestInstance(Context & context)1022 CalibratedTimestampDevDomainTestInstance(Context &context) : CalibratedTimestampTestInstance{context}
1023 {
1024 }
1025
~CalibratedTimestampDevDomainTestInstance(void)1026 virtual ~CalibratedTimestampDevDomainTestInstance(void)
1027 {
1028 }
1029 virtual tcu::TestStatus runTest(void) override;
1030 };
1031
1032 class CalibratedTimestampHostDomainTestInstance : public CalibratedTimestampTestInstance
1033 {
1034 public:
CalibratedTimestampHostDomainTestInstance(Context & context)1035 CalibratedTimestampHostDomainTestInstance(Context &context) : CalibratedTimestampTestInstance{context}
1036 {
1037 }
1038
~CalibratedTimestampHostDomainTestInstance(void)1039 virtual ~CalibratedTimestampHostDomainTestInstance(void)
1040 {
1041 }
1042 virtual tcu::TestStatus runTest(void) override;
1043 };
1044
1045 class CalibratedTimestampCalibrationTestInstance : public CalibratedTimestampTestInstance
1046 {
1047 public:
CalibratedTimestampCalibrationTestInstance(Context & context)1048 CalibratedTimestampCalibrationTestInstance(Context &context) : CalibratedTimestampTestInstance{context}
1049 {
1050 }
1051
~CalibratedTimestampCalibrationTestInstance(void)1052 virtual ~CalibratedTimestampCalibrationTestInstance(void)
1053 {
1054 }
1055 virtual tcu::TestStatus runTest(void) override;
1056 };
1057
1058 template <class T>
initPrograms(SourceCollections & programCollection) const1059 void CalibratedTimestampTest<T>::initPrograms(SourceCollections &programCollection) const
1060 {
1061 vkt::TestCase::initPrograms(programCollection);
1062 }
1063
1064 template <class T>
createInstance(Context & context) const1065 vkt::TestInstance *CalibratedTimestampTest<T>::createInstance(Context &context) const
1066 {
1067 return new T{context};
1068 }
1069
1070 template <class T>
checkSupport(Context & context) const1071 void CalibratedTimestampTest<T>::checkSupport(Context &context) const
1072 {
1073 #ifdef CTS_USES_VULKANSC
1074 context.requireDeviceFunctionality("VK_EXT_calibrated_timestamps");
1075 #else
1076 if (!context.isDeviceFunctionalitySupported("VK_KHR_calibrated_timestamps") &&
1077 !context.isDeviceFunctionalitySupported("VK_EXT_calibrated_timestamps"))
1078 TCU_THROW(NotSupportedError, "VK_KHR_calibrated_timestamps and VK_EXT_calibrated_timestamps are not supported");
1079 #endif
1080 }
1081
CalibratedTimestampTestInstance(Context & context)1082 CalibratedTimestampTestInstance::CalibratedTimestampTestInstance(Context &context) : TestInstance{context}
1083 {
1084 #if (DE_OS == DE_OS_WIN32)
1085 LARGE_INTEGER freq;
1086 if (!QueryPerformanceFrequency(&freq))
1087 {
1088 throw tcu::ResourceError("Unable to get clock frequency with QueryPerformanceFrequency");
1089 }
1090 if (freq.QuadPart <= 0)
1091 {
1092 throw tcu::ResourceError("QueryPerformanceFrequency did not return a positive number");
1093 }
1094 m_frequency = static_cast<uint64_t>(freq.QuadPart);
1095 #endif
1096
1097 const InstanceInterface &vki = context.getInstanceInterface();
1098 const VkPhysicalDevice physDevice = context.getPhysicalDevice();
1099 const uint32_t queueFamilyIndex = context.getUniversalQueueFamilyIndex();
1100
1101 // Get timestamp mask.
1102 m_devTimestampMask = checkTimestampsSupported(vki, physDevice, queueFamilyIndex);
1103
1104 // Get calibreatable time domains.
1105 m_timestampPeriod = getPhysicalDeviceProperties(vki, physDevice).limits.timestampPeriod;
1106
1107 uint32_t domainCount;
1108 VK_CHECK(vki.getPhysicalDeviceCalibrateableTimeDomainsKHR(physDevice, &domainCount, DE_NULL));
1109 if (domainCount == 0)
1110 {
1111 throw tcu::NotSupportedError("No calibrateable time domains found");
1112 }
1113
1114 std::vector<VkTimeDomainKHR> domains;
1115 domains.resize(domainCount);
1116 VK_CHECK(vki.getPhysicalDeviceCalibrateableTimeDomainsKHR(physDevice, &domainCount, domains.data()));
1117
1118 // Find the dev domain.
1119 std::vector<VkTimeDomainKHR> preferredDevDomains;
1120 preferredDevDomains.push_back(VK_TIME_DOMAIN_DEVICE_KHR);
1121 m_devDomains = getDomainSubset(domains, preferredDevDomains);
1122
1123 // Find the host domain.
1124 std::vector<VkTimeDomainKHR> preferredHostDomains;
1125 #if (DE_OS == DE_OS_WIN32)
1126 preferredHostDomains.push_back(VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR);
1127 #else
1128 preferredHostDomains.push_back(VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR);
1129 preferredHostDomains.push_back(VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR);
1130 #endif
1131 m_hostDomains = getDomainSubset(domains, preferredHostDomains);
1132
1133 // Initialize command buffers and queries.
1134 const DeviceInterface &vk = context.getDeviceInterface();
1135 const VkDevice vkDevice = context.getDevice();
1136
1137 const VkQueryPoolCreateInfo queryPoolParams = {
1138 VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, // VkStructureType sType;
1139 DE_NULL, // const void* pNext;
1140 0u, // VkQueryPoolCreateFlags flags;
1141 VK_QUERY_TYPE_TIMESTAMP, // VkQueryType queryType;
1142 1u, // uint32_t entryCount;
1143 0u, // VkQueryPipelineStatisticFlags pipelineStatistics;
1144 };
1145
1146 m_queryPool = createQueryPool(vk, vkDevice, &queryPoolParams);
1147 m_cmdPool = createCommandPool(vk, vkDevice, VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, queueFamilyIndex);
1148 m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, *m_cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY);
1149
1150 beginCommandBuffer(vk, *m_cmdBuffer, 0u);
1151 vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, 1u);
1152 vk.cmdWriteTimestamp(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, *m_queryPool, 0u);
1153 endCommandBuffer(vk, *m_cmdBuffer);
1154 }
1155
getDomainSubset(const std::vector<VkTimeDomainKHR> & available,const std::vector<VkTimeDomainKHR> & interesting) const1156 std::vector<VkTimeDomainKHR> CalibratedTimestampTestInstance::getDomainSubset(
1157 const std::vector<VkTimeDomainKHR> &available, const std::vector<VkTimeDomainKHR> &interesting) const
1158 {
1159 const std::set<VkTimeDomainKHR> availableSet(begin(available), end(available));
1160 const std::set<VkTimeDomainKHR> interestingSet(begin(interesting), end(interesting));
1161
1162 std::vector<VkTimeDomainKHR> subset;
1163 std::set_intersection(begin(availableSet), end(availableSet), begin(interestingSet), end(interestingSet),
1164 std::back_inserter(subset));
1165 return subset;
1166 }
1167
domainName(VkTimeDomainKHR domain) const1168 std::string CalibratedTimestampTestInstance::domainName(VkTimeDomainKHR domain) const
1169 {
1170 switch (domain)
1171 {
1172 case VK_TIME_DOMAIN_DEVICE_KHR:
1173 return "Device Domain";
1174 case VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR:
1175 return "Monotonic Clock";
1176 case VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR:
1177 return "Raw Monotonic Clock";
1178 case VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR:
1179 return "Query Performance Counter";
1180 default:
1181 DE_ASSERT(false);
1182 return "Unknown Time Domain";
1183 }
1184 }
1185
getHostNativeTimestamp(VkTimeDomainKHR hostDomain) const1186 uint64_t CalibratedTimestampTestInstance::getHostNativeTimestamp(VkTimeDomainKHR hostDomain) const
1187 {
1188 #if (DE_OS == DE_OS_WIN32)
1189 DE_ASSERT(hostDomain == VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR);
1190 LARGE_INTEGER result;
1191 if (!QueryPerformanceCounter(&result))
1192 {
1193 throw tcu::ResourceError("Unable to obtain host native timestamp for Win32");
1194 }
1195 if (result.QuadPart < 0)
1196 {
1197 throw tcu::ResourceError("Host-native timestamp for Win32 less than zero");
1198 }
1199 return static_cast<uint64_t>(result.QuadPart);
1200 #else
1201 DE_ASSERT(hostDomain == VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR || hostDomain == VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR);
1202
1203 #if defined(CLOCK_MONOTONIC_RAW)
1204 clockid_t id = ((hostDomain == VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR) ? CLOCK_MONOTONIC : CLOCK_MONOTONIC_RAW);
1205 #else
1206 clockid_t id = CLOCK_MONOTONIC;
1207 #endif
1208 struct timespec ts;
1209 if (clock_gettime(id, &ts) != 0)
1210 {
1211 throw tcu::ResourceError("Unable to obtain host native timestamp for POSIX");
1212 }
1213 return (static_cast<uint64_t>(ts.tv_sec) * 1000000000ULL + ts.tv_nsec);
1214 #endif
1215 }
1216
getHostNanoseconds(uint64_t hostTimestamp) const1217 uint64_t CalibratedTimestampTestInstance::getHostNanoseconds(uint64_t hostTimestamp) const
1218 {
1219 #if (DE_OS == DE_OS_WIN32)
1220 uint64_t secs = hostTimestamp / m_frequency;
1221 uint64_t nanos = ((hostTimestamp % m_frequency) * kNanosecondsPerSecond) / m_frequency;
1222
1223 return ((secs * kNanosecondsPerSecond) + nanos);
1224 #else
1225 return hostTimestamp;
1226 #endif
1227 }
1228
1229 // This method will be used when devTicksDelta is (supposedly) a small amount of ticks between two events. We will check
1230 // devTicksDelta is reasonably small for the calculation below to succeed without losing precision.
getDeviceNanoseconds(uint64_t devTicksDelta) const1231 uint64_t CalibratedTimestampTestInstance::getDeviceNanoseconds(uint64_t devTicksDelta) const
1232 {
1233 if (devTicksDelta > static_cast<uint64_t>(std::numeric_limits<uint32_t>::max()))
1234 {
1235 std::ostringstream msg;
1236 msg << "Number of device ticks too big for conversion to nanoseconds: " << devTicksDelta;
1237 throw tcu::InternalError(msg.str());
1238 }
1239 return static_cast<uint64_t>(static_cast<double>(devTicksDelta) * m_timestampPeriod);
1240 }
1241
iterate(void)1242 tcu::TestStatus CalibratedTimestampTestInstance::iterate(void)
1243 {
1244 // Notes:
1245 // 1) Clocks may overflow.
1246 // 2) Because m_timestampPeriod is a floating point value, there may be less than one nano per tick.
1247
1248 const tcu::TestStatus result = runTest();
1249 if (result.getCode() != QP_TEST_RESULT_PASS)
1250 return result;
1251
1252 if (!m_qualityMessage.empty())
1253 {
1254 const std::string msg = "Warnings found: " + m_qualityMessage;
1255 return tcu::TestStatus(QP_TEST_RESULT_QUALITY_WARNING, msg);
1256 }
1257 return tcu::TestStatus::pass("Pass");
1258 }
1259
1260 // Verify all invalid timestamp bits are zero.
verifyDevTimestampMask(uint64_t value) const1261 void CalibratedTimestampTestInstance::verifyDevTimestampMask(uint64_t value) const
1262 {
1263 checkTimestampBits(value, m_devTimestampMask);
1264 }
1265
1266 // Absolute difference between two timestamps A and B taking overflow into account. Pick the smallest difference between the two
1267 // possibilities. We don't know beforehand if B > A or vice versa. Take the valid bit mask into account.
absDiffWithOverflow(uint64_t a,uint64_t b,uint64_t mask) const1268 uint64_t CalibratedTimestampTestInstance::absDiffWithOverflow(uint64_t a, uint64_t b, uint64_t mask) const
1269 {
1270 // <---------+ range +-------->
1271 //
1272 // +--------------------------+
1273 // | uint64_t |
1274 // +------^-----------^-------+
1275 // + +
1276 // a b
1277 // +----------->
1278 // ccccccccccccc
1279 // ------> +-------
1280 // ddddddd dddddddd
1281
1282 DE_ASSERT(a <= mask);
1283 DE_ASSERT(b <= mask);
1284
1285 const uint64_t c = ((a >= b) ? (a - b) : (b - a));
1286
1287 if (c == 0u)
1288 return c;
1289
1290 const uint64_t d = (mask - c) + 1;
1291
1292 return ((c < d) ? c : d);
1293 }
1294
1295 // Positive difference between both marks, advancing from before to after, taking overflow and the valid bit mask into account.
positiveDiffWithOverflow(uint64_t before,uint64_t after,uint64_t mask) const1296 uint64_t CalibratedTimestampTestInstance::positiveDiffWithOverflow(uint64_t before, uint64_t after, uint64_t mask) const
1297 {
1298 DE_ASSERT(before <= mask);
1299 DE_ASSERT(after <= mask);
1300
1301 return ((before <= after) ? (after - before) : ((mask - (before - after)) + 1));
1302 }
1303
1304 // Return true if middle is not between begin and end, taking overflow into account.
outOfRange(uint64_t begin,uint64_t middle,uint64_t end) const1305 bool CalibratedTimestampTestInstance::outOfRange(uint64_t begin, uint64_t middle, uint64_t end) const
1306 {
1307 return (((begin <= end) && (middle < begin || middle > end)) ||
1308 ((begin > end) && (middle > end && middle < begin)));
1309 }
1310
1311 std::vector<CalibratedTimestampTestInstance::CalibratedTimestamp> CalibratedTimestampTestInstance::
getCalibratedTimestamps(const std::vector<VkTimeDomainKHR> & domains)1312 getCalibratedTimestamps(const std::vector<VkTimeDomainKHR> &domains)
1313 {
1314 std::vector<VkCalibratedTimestampInfoKHR> infos;
1315
1316 for (auto domain : domains)
1317 {
1318 VkCalibratedTimestampInfoKHR info;
1319 info.sType = getStructureType<VkCalibratedTimestampInfoKHR>();
1320 info.pNext = DE_NULL;
1321 info.timeDomain = domain;
1322 infos.push_back(info);
1323 }
1324
1325 std::vector<uint64_t> timestamps(domains.size());
1326 uint64_t deviation;
1327
1328 const DeviceInterface &vk = m_context.getDeviceInterface();
1329 const VkDevice vkDevice = m_context.getDevice();
1330
1331 VK_CHECK(vk.getCalibratedTimestampsKHR(vkDevice, static_cast<uint32_t>(domains.size()), infos.data(),
1332 timestamps.data(), &deviation));
1333
1334 if (deviation > kDeviationErrorLimitNanos)
1335 {
1336 throw tcu::InternalError("Calibrated maximum deviation too big");
1337 }
1338 else if (deviation > kDeviationWarningLimitNanos)
1339 {
1340 appendQualityMessage("Calibrated maximum deviation beyond desirable limits");
1341 }
1342 else if (deviation == 0 && domains.size() > 1)
1343 {
1344 appendQualityMessage("Calibrated maximum deviation reported as zero");
1345 }
1346
1347 // Pack results.
1348 std::vector<CalibratedTimestamp> results;
1349
1350 for (size_t i = 0; i < domains.size(); ++i)
1351 {
1352 if (domains[i] == VK_TIME_DOMAIN_DEVICE_KHR)
1353 verifyDevTimestampMask(timestamps[i]);
1354 results.emplace_back(timestamps[i], deviation);
1355 }
1356
1357 return results;
1358 }
1359
getCalibratedTimestamp(VkTimeDomainKHR domain)1360 CalibratedTimestampTestInstance::CalibratedTimestamp CalibratedTimestampTestInstance::getCalibratedTimestamp(
1361 VkTimeDomainKHR domain)
1362 {
1363 // Single domain, single result.
1364 return getCalibratedTimestamps(std::vector<VkTimeDomainKHR>(1, domain))[0];
1365 }
1366
appendQualityMessage(const std::string & message)1367 void CalibratedTimestampTestInstance::appendQualityMessage(const std::string &message)
1368 {
1369 if (!m_qualityMessage.empty())
1370 m_qualityMessage += "; ";
1371
1372 m_qualityMessage += message;
1373 }
1374
1375 // Test device domain makes sense and is consistent with vkCmdWriteTimestamp().
runTest(void)1376 tcu::TestStatus CalibratedTimestampDevDomainTestInstance::runTest(void)
1377 {
1378 if (m_devDomains.empty())
1379 throw tcu::NotSupportedError("No suitable device time domains found");
1380
1381 const DeviceInterface &vk = m_context.getDeviceInterface();
1382 const VkDevice vkDevice = m_context.getDevice();
1383 const VkQueue queue = m_context.getUniversalQueue();
1384
1385 for (const auto devDomain : m_devDomains)
1386 {
1387 const CalibratedTimestamp before = getCalibratedTimestamp(devDomain);
1388 submitCommandsAndWait(vk, vkDevice, queue, m_cmdBuffer.get());
1389 const CalibratedTimestamp after = getCalibratedTimestamp(devDomain);
1390 const uint64_t diffNanos =
1391 getDeviceNanoseconds(positiveDiffWithOverflow(before.timestamp, after.timestamp, m_devTimestampMask));
1392 uint64_t written;
1393 VK_CHECK(vk.getQueryPoolResults(vkDevice, *m_queryPool, 0u, 1u, sizeof(written), &written, sizeof(written),
1394 (VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT)));
1395 verifyDevTimestampMask(written);
1396
1397 if (diffNanos > kBatchTimeLimitNanos)
1398 {
1399 return tcu::TestStatus::fail(domainName(devDomain) + ": Batch of work took too long to execute");
1400 }
1401
1402 if (outOfRange(before.timestamp, written, after.timestamp))
1403 {
1404 return tcu::TestStatus::fail(domainName(devDomain) +
1405 ": vkCmdWriteTimestamp() inconsistent with vkGetCalibratedTimestampsKHR()");
1406 }
1407 }
1408
1409 return tcu::TestStatus::pass("Pass");
1410 }
1411
1412 // Test host domain makes sense and is consistent with native host values.
runTest(void)1413 tcu::TestStatus CalibratedTimestampHostDomainTestInstance::runTest(void)
1414 {
1415 if (m_hostDomains.empty())
1416 throw tcu::NotSupportedError("No suitable host time domains found");
1417
1418 for (const auto hostDomain : m_hostDomains)
1419 {
1420 const uint64_t before = getHostNativeTimestamp(hostDomain);
1421 const CalibratedTimestamp vkTS = getCalibratedTimestamp(hostDomain);
1422 const uint64_t after = getHostNativeTimestamp(hostDomain);
1423 const uint64_t diffNanos = getHostNanoseconds(positiveDiffWithOverflow(before, after));
1424
1425 if (diffNanos > kBatchTimeLimitNanos)
1426 {
1427 return tcu::TestStatus::fail(domainName(hostDomain) + ": Querying host domain took too long to execute");
1428 }
1429
1430 if (outOfRange(before, vkTS.timestamp, after))
1431 {
1432 return tcu::TestStatus::fail(domainName(hostDomain) +
1433 ": vkGetCalibratedTimestampsKHR() inconsistent with native host API");
1434 }
1435 }
1436
1437 return tcu::TestStatus::pass("Pass");
1438 }
1439
1440 // Verify predictable timestamps and calibration possible.
runTest(void)1441 tcu::TestStatus CalibratedTimestampCalibrationTestInstance::runTest(void)
1442 {
1443 if (m_devDomains.empty())
1444 throw tcu::NotSupportedError("No suitable device time domains found");
1445 if (m_hostDomains.empty())
1446 throw tcu::NotSupportedError("No suitable host time domains found");
1447
1448 // Sleep time.
1449 constexpr uint32_t kSleepMilliseconds = 200;
1450 constexpr uint32_t kSleepNanoseconds = kSleepMilliseconds * kNanosecondsPerMillisecond;
1451
1452 for (const auto devDomain : m_devDomains)
1453 for (const auto hostDomain : m_hostDomains)
1454 {
1455 std::vector<VkTimeDomainKHR> domains;
1456 domains.push_back(devDomain); // Device results at index 0.
1457 domains.push_back(hostDomain); // Host results at index 1.
1458
1459 // Measure time.
1460 const std::vector<CalibratedTimestamp> before = getCalibratedTimestamps(domains);
1461 std::this_thread::sleep_for(std::chrono::nanoseconds(kSleepNanoseconds));
1462 const std::vector<CalibratedTimestamp> after = getCalibratedTimestamps(domains);
1463
1464 // Check device timestamp is as expected.
1465 const uint64_t devBeforeTicks = before[0].timestamp;
1466 const uint64_t devAfterTicks = after[0].timestamp;
1467 const uint64_t devExpectedTicks =
1468 ((devBeforeTicks + static_cast<uint64_t>(static_cast<double>(kSleepNanoseconds) / m_timestampPeriod)) &
1469 m_devTimestampMask);
1470 const uint64_t devDiffNanos =
1471 getDeviceNanoseconds(absDiffWithOverflow(devAfterTicks, devExpectedTicks, m_devTimestampMask));
1472 const uint64_t maxDevDiffNanos =
1473 std::max({kDefaultToleranceNanos, before[0].deviation + after[0].deviation});
1474
1475 if (devDiffNanos > maxDevDiffNanos)
1476 {
1477 std::ostringstream msg;
1478 msg << "[" << domainName(devDomain) << "] Device expected timestamp differs " << devDiffNanos
1479 << " nanoseconds (expect value <= " << maxDevDiffNanos << ")";
1480 return tcu::TestStatus::fail(msg.str());
1481 }
1482
1483 // Check host timestamp is as expected.
1484 const uint64_t hostBefore = getHostNanoseconds(before[1].timestamp);
1485 const uint64_t hostAfter = getHostNanoseconds(after[1].timestamp);
1486 const uint64_t hostExpected = hostBefore + kSleepNanoseconds;
1487 const uint64_t hostDiff = absDiffWithOverflow(hostAfter, hostExpected);
1488 const uint64_t maxHostDiff = std::max({kDefaultToleranceNanos, before[1].deviation + after[1].deviation});
1489
1490 if (hostDiff > maxHostDiff)
1491 {
1492 std::ostringstream msg;
1493 msg << "[" << domainName(hostDomain) << "] Host expected timestamp differs " << hostDiff
1494 << " nanoseconds (expected value <= " << maxHostDiff << ")";
1495 return tcu::TestStatus::fail(msg.str());
1496 }
1497 }
1498
1499 return tcu::TestStatus::pass("Pass");
1500 }
1501
1502 class BasicGraphicsTest : public TimestampTest
1503 {
1504 public:
BasicGraphicsTest(tcu::TestContext & testContext,const std::string & name,const TimestampTestParam * param)1505 BasicGraphicsTest(tcu::TestContext &testContext, const std::string &name, const TimestampTestParam *param)
1506 : TimestampTest(testContext, name, param)
1507 {
1508 }
~BasicGraphicsTest(void)1509 virtual ~BasicGraphicsTest(void)
1510 {
1511 }
1512 virtual void initPrograms(SourceCollections &programCollection) const;
1513 virtual TestInstance *createInstance(Context &context) const;
1514 };
1515
1516 class BasicGraphicsTestInstance : public TimestampTestInstance
1517 {
1518 public:
1519 enum
1520 {
1521 VK_MAX_SHADER_STAGES = 6,
1522 };
1523 BasicGraphicsTestInstance(Context &context, const PipelineConstructionType pipelineConstructionType,
1524 const StageFlagVector stages, const bool inRenderPass, const bool hostQueryReset,
1525 const VkQueryResultFlags queryResultFlags);
1526
1527 virtual ~BasicGraphicsTestInstance(void);
1528
1529 protected:
1530 virtual void buildPipeline(void);
1531 virtual void configCommandBuffer(void);
1532 virtual void buildVertexBuffer(void);
1533 virtual void buildRenderPass(VkFormat colorFormat, VkFormat depthFormat);
1534
1535 virtual void buildFrameBuffer(tcu::UVec2 renderSize, VkFormat colorFormat, VkFormat depthFormat);
1536
1537 protected:
1538 const PipelineConstructionType m_pipelineConstructionType;
1539 const tcu::UVec2 m_renderSize;
1540 const VkFormat m_colorFormat;
1541 const VkFormat m_depthFormat;
1542
1543 Move<VkImage> m_colorImage;
1544 de::MovePtr<Allocation> m_colorImageAlloc;
1545 Move<VkImage> m_depthImage;
1546 de::MovePtr<Allocation> m_depthImageAlloc;
1547 Move<VkImageView> m_colorAttachmentView;
1548 Move<VkImageView> m_depthAttachmentView;
1549 RenderPassWrapper m_renderPass;
1550 Move<VkFramebuffer> m_framebuffer;
1551 VkImageMemoryBarrier m_imageLayoutBarriers[2];
1552
1553 de::MovePtr<Allocation> m_vertexBufferAlloc;
1554 Move<VkBuffer> m_vertexBuffer;
1555 std::vector<Vertex4RGBA> m_vertices;
1556
1557 PipelineLayoutWrapper m_pipelineLayout;
1558 GraphicsPipelineWrapper m_graphicsPipeline;
1559 };
1560
initPrograms(SourceCollections & programCollection) const1561 void BasicGraphicsTest::initPrograms(SourceCollections &programCollection) const
1562 {
1563 programCollection.glslSources.add("color_vert")
1564 << glu::VertexSource("#version 310 es\n"
1565 "layout(location = 0) in vec4 position;\n"
1566 "layout(location = 1) in vec4 color;\n"
1567 "layout(location = 0) out highp vec4 vtxColor;\n"
1568 "void main (void)\n"
1569 "{\n"
1570 " gl_Position = position;\n"
1571 " vtxColor = color;\n"
1572 "}\n");
1573
1574 programCollection.glslSources.add("color_frag")
1575 << glu::FragmentSource("#version 310 es\n"
1576 "layout(location = 0) in highp vec4 vtxColor;\n"
1577 "layout(location = 0) out highp vec4 fragColor;\n"
1578 "void main (void)\n"
1579 "{\n"
1580 " fragColor = vtxColor;\n"
1581 "}\n");
1582 }
1583
createInstance(Context & context) const1584 TestInstance *BasicGraphicsTest::createInstance(Context &context) const
1585 {
1586 return new BasicGraphicsTestInstance(context, m_pipelineConstructionType, m_stages, m_inRenderPass,
1587 m_hostQueryReset, m_queryResultFlags);
1588 }
1589
buildVertexBuffer(void)1590 void BasicGraphicsTestInstance::buildVertexBuffer(void)
1591 {
1592 const DeviceInterface &vk = m_context.getDeviceInterface();
1593
1594 // Create vertex buffer
1595 {
1596 m_vertexBuffer = createBufferAndBindMemory(1024u, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, &m_vertexBufferAlloc);
1597 m_vertices = createOverlappingQuads();
1598
1599 // Load vertices into vertex buffer
1600 deMemcpy(m_vertexBufferAlloc->getHostPtr(), m_vertices.data(), m_vertices.size() * sizeof(Vertex4RGBA));
1601 flushAlloc(vk, m_device, *m_vertexBufferAlloc);
1602 }
1603 }
1604
buildRenderPass(VkFormat colorFormat,VkFormat depthFormat)1605 void BasicGraphicsTestInstance::buildRenderPass(VkFormat colorFormat, VkFormat depthFormat)
1606 {
1607 const DeviceInterface &vk = m_context.getDeviceInterface();
1608
1609 // Create render pass
1610 m_renderPass = RenderPassWrapper(m_pipelineConstructionType, vk, m_device, colorFormat, depthFormat);
1611 }
1612
buildFrameBuffer(tcu::UVec2 renderSize,VkFormat colorFormat,VkFormat depthFormat)1613 void BasicGraphicsTestInstance::buildFrameBuffer(tcu::UVec2 renderSize, VkFormat colorFormat, VkFormat depthFormat)
1614 {
1615 const DeviceInterface &vk = m_context.getDeviceInterface();
1616 const VkComponentMapping ComponentMappingRGBA = {VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G,
1617 VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A};
1618
1619 // Create color image
1620 {
1621 m_colorImage = createImage2DAndBindMemory(colorFormat, renderSize.x(), renderSize.y(),
1622 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
1623 VK_SAMPLE_COUNT_1_BIT, &m_colorImageAlloc);
1624 }
1625
1626 // Create depth image
1627 {
1628 m_depthImage = createImage2DAndBindMemory(depthFormat, renderSize.x(), renderSize.y(),
1629 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_SAMPLE_COUNT_1_BIT,
1630 &m_depthImageAlloc);
1631 }
1632
1633 // Set up image layout transition barriers
1634 {
1635 const VkImageMemoryBarrier colorImageBarrier = {
1636 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType sType;
1637 DE_NULL, // const void* pNext;
1638 0u, // VkAccessFlags srcAccessMask;
1639 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, // VkAccessFlags dstAccessMask;
1640 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout;
1641 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout;
1642 VK_QUEUE_FAMILY_IGNORED, // uint32_t srcQueueFamilyIndex;
1643 VK_QUEUE_FAMILY_IGNORED, // uint32_t dstQueueFamilyIndex;
1644 *m_colorImage, // VkImage image;
1645 {VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u}, // VkImageSubresourceRange subresourceRange;
1646 };
1647 const VkImageMemoryBarrier depthImageBarrier = {
1648 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType sType;
1649 DE_NULL, // const void* pNext;
1650 0u, // VkAccessFlags srcAccessMask;
1651 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, // VkAccessFlags dstAccessMask;
1652 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout;
1653 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout;
1654 VK_QUEUE_FAMILY_IGNORED, // uint32_t srcQueueFamilyIndex;
1655 VK_QUEUE_FAMILY_IGNORED, // uint32_t dstQueueFamilyIndex;
1656 *m_depthImage, // VkImage image;
1657 {VK_IMAGE_ASPECT_DEPTH_BIT, 0u, 1u, 0u, 1u}, // VkImageSubresourceRange subresourceRange;
1658 };
1659
1660 m_imageLayoutBarriers[0] = colorImageBarrier;
1661 m_imageLayoutBarriers[1] = depthImageBarrier;
1662 }
1663
1664 // Create color attachment view
1665 {
1666 const VkImageViewCreateInfo colorAttachmentViewParams = {
1667 VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, // VkStructureType sType;
1668 DE_NULL, // const void* pNext;
1669 0u, // VkImageViewCreateFlags flags;
1670 *m_colorImage, // VkImage image;
1671 VK_IMAGE_VIEW_TYPE_2D, // VkImageViewType viewType;
1672 colorFormat, // VkFormat format;
1673 ComponentMappingRGBA, // VkComponentMapping components;
1674 {VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u}, // VkImageSubresourceRange subresourceRange;
1675 };
1676
1677 m_colorAttachmentView = createImageView(vk, m_device, &colorAttachmentViewParams);
1678 }
1679
1680 // Create depth attachment view
1681 {
1682 const VkImageViewCreateInfo depthAttachmentViewParams = {
1683 VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, // VkStructureType sType;
1684 DE_NULL, // const void* pNext;
1685 0u, // VkImageViewCreateFlags flags;
1686 *m_depthImage, // VkImage image;
1687 VK_IMAGE_VIEW_TYPE_2D, // VkImageViewType viewType;
1688 depthFormat, // VkFormat format;
1689 ComponentMappingRGBA, // VkComponentMapping components;
1690 {VK_IMAGE_ASPECT_DEPTH_BIT, 0u, 1u, 0u, 1u}, // VkImageSubresourceRange subresourceRange;
1691 };
1692
1693 m_depthAttachmentView = createImageView(vk, m_device, &depthAttachmentViewParams);
1694 }
1695
1696 // Create framebuffer
1697 {
1698 const std::vector<VkImage> images = {
1699 *m_colorImage,
1700 *m_depthImage,
1701 };
1702 const VkImageView attachmentBindInfos[2] = {
1703 *m_colorAttachmentView,
1704 *m_depthAttachmentView,
1705 };
1706
1707 const VkFramebufferCreateInfo framebufferParams = {
1708 VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, // VkStructureType sType;
1709 DE_NULL, // const void* pNext;
1710 0u, // VkFramebufferCreateFlags flags;
1711 *m_renderPass, // VkRenderPass renderPass;
1712 2u, // uint32_t attachmentCount;
1713 attachmentBindInfos, // const VkImageView* pAttachments;
1714 (uint32_t)renderSize.x(), // uint32_t width;
1715 (uint32_t)renderSize.y(), // uint32_t height;
1716 1u, // uint32_t layers;
1717 };
1718
1719 m_renderPass.createFramebuffer(vk, m_device, &framebufferParams, images);
1720 }
1721 }
1722
BasicGraphicsTestInstance(Context & context,const PipelineConstructionType pipelineConstructionType,const StageFlagVector stages,const bool inRenderPass,const bool hostQueryReset,const VkQueryResultFlags queryResultFlags)1723 BasicGraphicsTestInstance::BasicGraphicsTestInstance(Context &context,
1724 const PipelineConstructionType pipelineConstructionType,
1725 const StageFlagVector stages, const bool inRenderPass,
1726 const bool hostQueryReset,
1727 const VkQueryResultFlags queryResultFlags)
1728 : TimestampTestInstance(context, stages, inRenderPass, hostQueryReset, false, queryResultFlags)
1729 , m_pipelineConstructionType(pipelineConstructionType)
1730 , m_renderSize(32, 32)
1731 , m_colorFormat(VK_FORMAT_R8G8B8A8_UNORM)
1732 , m_depthFormat(VK_FORMAT_D16_UNORM)
1733 , m_graphicsPipeline(context.getInstanceInterface(), context.getDeviceInterface(), context.getPhysicalDevice(),
1734 context.getDevice(), context.getDeviceExtensions(), pipelineConstructionType)
1735 {
1736 buildVertexBuffer();
1737
1738 buildRenderPass(m_colorFormat, m_depthFormat);
1739
1740 buildFrameBuffer(m_renderSize, m_colorFormat, m_depthFormat);
1741
1742 // Create pipeline layout
1743 const DeviceInterface &vk = m_context.getDeviceInterface();
1744 const VkPipelineLayoutCreateInfo pipelineLayoutParams = initVulkanStructure();
1745 m_pipelineLayout = PipelineLayoutWrapper(pipelineConstructionType, vk, m_device, &pipelineLayoutParams);
1746 }
1747
~BasicGraphicsTestInstance(void)1748 BasicGraphicsTestInstance::~BasicGraphicsTestInstance(void)
1749 {
1750 }
1751
1752 static const VkVertexInputBindingDescription defaultVertexInputBindingDescription{
1753 0u, // uint32_t binding;
1754 sizeof(Vertex4RGBA), // uint32_t strideInBytes;
1755 VK_VERTEX_INPUT_RATE_VERTEX, // VkVertexInputRate inputRate;
1756 };
1757
1758 static const VkVertexInputAttributeDescription defaultVertexInputAttributeDescriptions[2]{
1759 {
1760 0u, // uint32_t location;
1761 0u, // uint32_t binding;
1762 VK_FORMAT_R32G32B32A32_SFLOAT, // VkFormat format;
1763 0u // uint32_t offsetInBytes;
1764 },
1765 {
1766 1u, // uint32_t location;
1767 0u, // uint32_t binding;
1768 VK_FORMAT_R32G32B32A32_SFLOAT, // VkFormat format;
1769 offsetof(Vertex4RGBA, color), // uint32_t offsetInBytes;
1770 }};
1771
1772 static const VkPipelineVertexInputStateCreateInfo defaultVertexInputStateParams{
1773 VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, // VkStructureType sType;
1774 DE_NULL, // const void* pNext;
1775 0u, // VkPipelineVertexInputStateCreateFlags flags;
1776 1u, // uint32_t vertexBindingDescriptionCount;
1777 &defaultVertexInputBindingDescription, // const VkVertexInputBindingDescription* pVertexBindingDescriptions;
1778 2u, // uint32_t vertexAttributeDescriptionCount;
1779 defaultVertexInputAttributeDescriptions, // const VkVertexInputAttributeDescription* pVertexAttributeDescriptions;
1780 };
1781
1782 static const VkPipelineDepthStencilStateCreateInfo defaultDepthStencilStateParams{
1783 VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO, // VkStructureType sType;
1784 DE_NULL, // const void* pNext;
1785 0u, // VkPipelineDepthStencilStateCreateFlags flags;
1786 VK_TRUE, // VkBool32 depthTestEnable;
1787 VK_TRUE, // VkBool32 depthWriteEnable;
1788 VK_COMPARE_OP_LESS_OR_EQUAL, // VkCompareOp depthCompareOp;
1789 VK_FALSE, // VkBool32 depthBoundsTestEnable;
1790 VK_FALSE, // VkBool32 stencilTestEnable;
1791 {
1792 // VkStencilOpState front;
1793 VK_STENCIL_OP_KEEP, // VkStencilOp failOp
1794 VK_STENCIL_OP_KEEP, // VkStencilOp passOp
1795 VK_STENCIL_OP_KEEP, // VkStencilOp depthFailOp
1796 VK_COMPARE_OP_NEVER, // VkCompareOp compareOp
1797 0u, // uint32_t compareMask
1798 0u, // uint32_t writeMask
1799 0u // uint32_t reference
1800 },
1801 {
1802 // VkStencilOpState back;
1803 VK_STENCIL_OP_KEEP, // VkStencilOp failOp
1804 VK_STENCIL_OP_KEEP, // VkStencilOp passOp
1805 VK_STENCIL_OP_KEEP, // VkStencilOp depthFailOp
1806 VK_COMPARE_OP_NEVER, // VkCompareOp compareOp
1807 0u, // uint32_t compareMask
1808 0u, // uint32_t writeMask
1809 0u // uint32_t reference
1810 },
1811 0.0f, // float minDepthBounds;
1812 1.0f, // float maxDepthBounds;
1813 };
1814
buildPipeline(void)1815 void BasicGraphicsTestInstance::buildPipeline(void)
1816 {
1817 const DeviceInterface &vk = m_context.getDeviceInterface();
1818
1819 auto vertexShaderModule = ShaderWrapper(vk, m_device, m_context.getBinaryCollection().get("color_vert"), 0);
1820 auto fragmentShaderModule = ShaderWrapper(vk, m_device, m_context.getBinaryCollection().get("color_frag"), 0);
1821
1822 const std::vector<VkViewport> viewports{makeViewport(m_renderSize)};
1823 const std::vector<VkRect2D> scissors{makeRect2D(m_renderSize)};
1824
1825 // Create pipeline
1826 m_graphicsPipeline.setDefaultRasterizationState()
1827 .setDefaultColorBlendState()
1828 .setDefaultMultisampleState()
1829 .setupVertexInputState(&defaultVertexInputStateParams)
1830 .setupPreRasterizationShaderState(viewports, scissors, m_pipelineLayout, *m_renderPass, 0u, vertexShaderModule)
1831 .setupFragmentShaderState(m_pipelineLayout, *m_renderPass, 0u, fragmentShaderModule,
1832 &defaultDepthStencilStateParams)
1833 .setupFragmentOutputState(*m_renderPass)
1834 .setMonolithicPipelineLayout(m_pipelineLayout)
1835 .buildPipeline();
1836 }
1837
configCommandBuffer(void)1838 void BasicGraphicsTestInstance::configCommandBuffer(void)
1839 {
1840 const DeviceInterface &vk = m_context.getDeviceInterface();
1841
1842 const VkClearValue attachmentClearValues[2] = {
1843 defaultClearValue(m_colorFormat),
1844 defaultClearValue(m_depthFormat),
1845 };
1846
1847 beginCommandBuffer(vk, *m_cmdBuffer, 0u);
1848
1849 vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
1850 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
1851 (VkDependencyFlags)0, 0u, DE_NULL, 0u, DE_NULL, DE_LENGTH_OF_ARRAY(m_imageLayoutBarriers),
1852 m_imageLayoutBarriers);
1853
1854 if (!m_hostQueryReset)
1855 vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, TimestampTest::ENTRY_COUNT);
1856
1857 m_renderPass.begin(vk, *m_cmdBuffer, makeRect2D(0, 0, m_renderSize.x(), m_renderSize.y()), 2u,
1858 attachmentClearValues);
1859
1860 m_graphicsPipeline.bind(*m_cmdBuffer);
1861 VkDeviceSize offsets = 0u;
1862 vk.cmdBindVertexBuffers(*m_cmdBuffer, 0u, 1u, &m_vertexBuffer.get(), &offsets);
1863 vk.cmdDraw(*m_cmdBuffer, (uint32_t)m_vertices.size(), 1u, 0u, 0u);
1864
1865 if (m_inRenderPass)
1866 {
1867 uint32_t timestampEntry = 0u;
1868
1869 for (StageFlagVector::const_iterator it = m_stages.begin(); it != m_stages.end(); it++)
1870 {
1871 vk.cmdWriteTimestamp(*m_cmdBuffer, *it, *m_queryPool, timestampEntry++);
1872 }
1873 }
1874
1875 m_renderPass.end(vk, *m_cmdBuffer);
1876
1877 if (!m_inRenderPass)
1878 {
1879 uint32_t timestampEntry = 0u;
1880
1881 for (StageFlagVector::const_iterator it = m_stages.begin(); it != m_stages.end(); it++)
1882 {
1883 vk.cmdWriteTimestamp(*m_cmdBuffer, *it, *m_queryPool, timestampEntry++);
1884 }
1885 }
1886
1887 endCommandBuffer(vk, *m_cmdBuffer);
1888 }
1889
1890 class AdvGraphicsTest : public BasicGraphicsTest
1891 {
1892 public:
AdvGraphicsTest(tcu::TestContext & testContext,const std::string & name,const TimestampTestParam * param)1893 AdvGraphicsTest(tcu::TestContext &testContext, const std::string &name, const TimestampTestParam *param)
1894 : BasicGraphicsTest(testContext, name, param)
1895 {
1896 }
1897
~AdvGraphicsTest(void)1898 virtual ~AdvGraphicsTest(void)
1899 {
1900 }
1901 virtual void initPrograms(SourceCollections &programCollection) const;
1902 virtual TestInstance *createInstance(Context &context) const;
1903 };
1904
1905 class AdvGraphicsTestInstance : public BasicGraphicsTestInstance
1906 {
1907 public:
1908 AdvGraphicsTestInstance(Context &context, const PipelineConstructionType pipelineConstructionType,
1909 const StageFlagVector stages, const bool inRenderPass, const bool hostQueryReset,
1910 const VkQueryResultFlags queryResultFlags);
1911
1912 virtual ~AdvGraphicsTestInstance(void);
1913 virtual void buildPipeline(void);
1914 virtual void configCommandBuffer(void);
1915
1916 protected:
1917 virtual void featureSupportCheck(void);
1918
1919 protected:
1920 VkPhysicalDeviceFeatures m_features;
1921 uint32_t m_draw_count;
1922 de::MovePtr<Allocation> m_indirectBufferAlloc;
1923 Move<VkBuffer> m_indirectBuffer;
1924 };
1925
initPrograms(SourceCollections & programCollection) const1926 void AdvGraphicsTest::initPrograms(SourceCollections &programCollection) const
1927 {
1928 BasicGraphicsTest::initPrograms(programCollection);
1929
1930 programCollection.glslSources.add("unused_geo")
1931 << glu::GeometrySource("#version 310 es\n"
1932 "#extension GL_EXT_geometry_shader : enable\n"
1933 "layout(triangles) in;\n"
1934 "layout(triangle_strip, max_vertices = 3) out;\n"
1935 "layout(location = 0) in highp vec4 in_vtxColor[];\n"
1936 "layout(location = 0) out highp vec4 vtxColor;\n"
1937 "void main (void)\n"
1938 "{\n"
1939 " for(int ndx=0; ndx<3; ndx++)\n"
1940 " {\n"
1941 " gl_Position = gl_in[ndx].gl_Position;\n"
1942 " vtxColor = in_vtxColor[ndx];\n"
1943 " EmitVertex();\n"
1944 " }\n"
1945 " EndPrimitive();\n"
1946 "}\n");
1947
1948 programCollection.glslSources.add("basic_tcs") << glu::TessellationControlSource(
1949 "#version 310 es\n"
1950 "#extension GL_EXT_tessellation_shader : enable\n"
1951 "layout(vertices = 3) out;\n"
1952 "layout(location = 0) in highp vec4 color[];\n"
1953 "layout(location = 0) out highp vec4 vtxColor[];\n"
1954 "void main()\n"
1955 "{\n"
1956 " gl_TessLevelOuter[0] = 4.0;\n"
1957 " gl_TessLevelOuter[1] = 4.0;\n"
1958 " gl_TessLevelOuter[2] = 4.0;\n"
1959 " gl_TessLevelInner[0] = 4.0;\n"
1960 " gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
1961 " vtxColor[gl_InvocationID] = color[gl_InvocationID];\n"
1962 "}\n");
1963
1964 programCollection.glslSources.add("basic_tes")
1965 << glu::TessellationEvaluationSource("#version 310 es\n"
1966 "#extension GL_EXT_tessellation_shader : enable\n"
1967 "layout(triangles, fractional_even_spacing, ccw) in;\n"
1968 "layout(location = 0) in highp vec4 colors[];\n"
1969 "layout(location = 0) out highp vec4 vtxColor;\n"
1970 "void main() \n"
1971 "{\n"
1972 " float u = gl_TessCoord.x;\n"
1973 " float v = gl_TessCoord.y;\n"
1974 " float w = gl_TessCoord.z;\n"
1975 " vec4 pos = vec4(0);\n"
1976 " vec4 color = vec4(0);\n"
1977 " pos.xyz += u * gl_in[0].gl_Position.xyz;\n"
1978 " color.xyz += u * colors[0].xyz;\n"
1979 " pos.xyz += v * gl_in[1].gl_Position.xyz;\n"
1980 " color.xyz += v * colors[1].xyz;\n"
1981 " pos.xyz += w * gl_in[2].gl_Position.xyz;\n"
1982 " color.xyz += w * colors[2].xyz;\n"
1983 " pos.w = 1.0;\n"
1984 " color.w = 1.0;\n"
1985 " gl_Position = pos;\n"
1986 " vtxColor = color;\n"
1987 "}\n");
1988 }
1989
createInstance(Context & context) const1990 TestInstance *AdvGraphicsTest::createInstance(Context &context) const
1991 {
1992 return new AdvGraphicsTestInstance(context, m_pipelineConstructionType, m_stages, m_inRenderPass, m_hostQueryReset,
1993 m_queryResultFlags);
1994 }
1995
featureSupportCheck(void)1996 void AdvGraphicsTestInstance::featureSupportCheck(void)
1997 {
1998 for (StageFlagVector::const_iterator it = m_stages.begin(); it != m_stages.end(); it++)
1999 {
2000 switch (*it)
2001 {
2002 case VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT:
2003 if (m_features.geometryShader == VK_FALSE)
2004 {
2005 TCU_THROW(NotSupportedError, "Geometry Shader Not Supported");
2006 }
2007 break;
2008 case VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT:
2009 case VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT:
2010 if (m_features.tessellationShader == VK_FALSE)
2011 {
2012 TCU_THROW(NotSupportedError, "Tessellation Not Supported");
2013 }
2014 break;
2015 case VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT:
2016 default:
2017 break;
2018 }
2019 }
2020 }
2021
AdvGraphicsTestInstance(Context & context,const PipelineConstructionType pipelineConstructionType,const StageFlagVector stages,const bool inRenderPass,const bool hostQueryReset,const VkQueryResultFlags queryResultFlags)2022 AdvGraphicsTestInstance::AdvGraphicsTestInstance(Context &context,
2023 const PipelineConstructionType pipelineConstructionType,
2024 const StageFlagVector stages, const bool inRenderPass,
2025 const bool hostQueryReset, const VkQueryResultFlags queryResultFlags)
2026 : BasicGraphicsTestInstance(context, pipelineConstructionType, stages, inRenderPass, hostQueryReset,
2027 queryResultFlags)
2028 , m_features(context.getDeviceFeatures())
2029 {
2030
2031 const DeviceInterface &vk = m_context.getDeviceInterface();
2032
2033 // If necessary feature is not supported, throw error and fail current test
2034 featureSupportCheck();
2035
2036 // Prepare the indirect draw buffer
2037 if (m_features.multiDrawIndirect == VK_TRUE)
2038 {
2039 m_draw_count = 2;
2040 }
2041 else
2042 {
2043 m_draw_count = 1;
2044 }
2045
2046 m_indirectBuffer = createBufferAndBindMemory(32u, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, &m_indirectBufferAlloc);
2047
2048 const VkDrawIndirectCommand indirectCmds[] = {
2049 {
2050 12u, // uint32_t vertexCount;
2051 1u, // uint32_t instanceCount;
2052 0u, // uint32_t firstVertex;
2053 0u, // uint32_t firstInstance;
2054 },
2055 {
2056 12u, // uint32_t vertexCount;
2057 1u, // uint32_t instanceCount;
2058 11u, // uint32_t firstVertex;
2059 0u, // uint32_t firstInstance;
2060 },
2061 };
2062
2063 // Load data into indirect draw buffer
2064 deMemcpy(m_indirectBufferAlloc->getHostPtr(), indirectCmds, m_draw_count * sizeof(VkDrawIndirectCommand));
2065 flushAlloc(vk, m_device, *m_indirectBufferAlloc);
2066 }
2067
~AdvGraphicsTestInstance(void)2068 AdvGraphicsTestInstance::~AdvGraphicsTestInstance(void)
2069 {
2070 }
2071
buildPipeline(void)2072 void AdvGraphicsTestInstance::buildPipeline(void)
2073 {
2074 const DeviceInterface &vk = m_context.getDeviceInterface();
2075
2076 const std::vector<VkViewport> viewports{makeViewport(m_renderSize)};
2077 const std::vector<VkRect2D> scissors{makeRect2D(m_renderSize)};
2078
2079 ShaderWrapper vertShaderModule = ShaderWrapper(vk, m_device, m_context.getBinaryCollection().get("color_vert"), 0);
2080 ShaderWrapper fragShaderModule = ShaderWrapper(vk, m_device, m_context.getBinaryCollection().get("color_frag"), 0);
2081 ShaderWrapper tescShaderModule;
2082 ShaderWrapper teseShaderModule;
2083 ShaderWrapper geomShaderModule;
2084
2085 if (m_features.tessellationShader)
2086 {
2087 tescShaderModule = ShaderWrapper(vk, m_device, m_context.getBinaryCollection().get("basic_tcs"), 0);
2088 teseShaderModule = ShaderWrapper(vk, m_device, m_context.getBinaryCollection().get("basic_tes"), 0);
2089 }
2090
2091 if (m_features.geometryShader)
2092 geomShaderModule = ShaderWrapper(vk, m_device, m_context.getBinaryCollection().get("unused_geo"), 0);
2093
2094 // Create pipeline
2095 m_graphicsPipeline
2096 .setDefaultTopology(m_features.tessellationShader ? VK_PRIMITIVE_TOPOLOGY_PATCH_LIST :
2097 VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST)
2098 .setDefaultRasterizationState()
2099 .setDefaultColorBlendState()
2100 .setDefaultMultisampleState()
2101 .setupVertexInputState(&defaultVertexInputStateParams)
2102 .setupPreRasterizationShaderState(viewports, scissors, m_pipelineLayout, *m_renderPass, 0u, vertShaderModule,
2103 DE_NULL, tescShaderModule, teseShaderModule, geomShaderModule)
2104 .setupFragmentShaderState(m_pipelineLayout, *m_renderPass, 0u, fragShaderModule,
2105 &defaultDepthStencilStateParams)
2106 .setupFragmentOutputState(*m_renderPass)
2107 .setMonolithicPipelineLayout(m_pipelineLayout)
2108 .buildPipeline();
2109 }
2110
configCommandBuffer(void)2111 void AdvGraphicsTestInstance::configCommandBuffer(void)
2112 {
2113 const DeviceInterface &vk = m_context.getDeviceInterface();
2114
2115 const VkClearValue attachmentClearValues[2] = {
2116 defaultClearValue(m_colorFormat),
2117 defaultClearValue(m_depthFormat),
2118 };
2119
2120 beginCommandBuffer(vk, *m_cmdBuffer, 0u);
2121
2122 vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
2123 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
2124 (VkDependencyFlags)0, 0u, DE_NULL, 0u, DE_NULL, DE_LENGTH_OF_ARRAY(m_imageLayoutBarriers),
2125 m_imageLayoutBarriers);
2126
2127 if (!m_hostQueryReset)
2128 vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, TimestampTest::ENTRY_COUNT);
2129
2130 m_renderPass.begin(vk, *m_cmdBuffer, makeRect2D(0, 0, m_renderSize.x(), m_renderSize.y()), 2u,
2131 attachmentClearValues);
2132
2133 m_graphicsPipeline.bind(*m_cmdBuffer);
2134
2135 VkDeviceSize offsets = 0u;
2136 vk.cmdBindVertexBuffers(*m_cmdBuffer, 0u, 1u, &m_vertexBuffer.get(), &offsets);
2137
2138 vk.cmdDrawIndirect(*m_cmdBuffer, *m_indirectBuffer, 0u, m_draw_count, sizeof(VkDrawIndirectCommand));
2139
2140 if (m_inRenderPass)
2141 {
2142 uint32_t timestampEntry = 0u;
2143 for (StageFlagVector::const_iterator it = m_stages.begin(); it != m_stages.end(); it++)
2144 {
2145 vk.cmdWriteTimestamp(*m_cmdBuffer, *it, *m_queryPool, timestampEntry++);
2146 }
2147 }
2148
2149 m_renderPass.end(vk, *m_cmdBuffer);
2150
2151 if (!m_inRenderPass)
2152 {
2153 uint32_t timestampEntry = 0u;
2154 for (StageFlagVector::const_iterator it = m_stages.begin(); it != m_stages.end(); it++)
2155 {
2156 vk.cmdWriteTimestamp(*m_cmdBuffer, *it, *m_queryPool, timestampEntry++);
2157 }
2158 }
2159
2160 endCommandBuffer(vk, *m_cmdBuffer);
2161 }
2162
2163 class BasicComputeTest : public TimestampTest
2164 {
2165 public:
BasicComputeTest(tcu::TestContext & testContext,const std::string & name,const TimestampTestParam * param)2166 BasicComputeTest(tcu::TestContext &testContext, const std::string &name, const TimestampTestParam *param)
2167 : TimestampTest(testContext, name, param)
2168 {
2169 }
2170
~BasicComputeTest(void)2171 virtual ~BasicComputeTest(void)
2172 {
2173 }
2174 virtual void initPrograms(SourceCollections &programCollection) const;
2175 virtual TestInstance *createInstance(Context &context) const;
2176 };
2177
2178 class BasicComputeTestInstance : public TimestampTestInstance
2179 {
2180 public:
2181 BasicComputeTestInstance(Context &context, const StageFlagVector stages, const bool inRenderPass,
2182 const bool hostQueryReset, VkQueryResultFlags VkQueryResultFlags);
2183
2184 virtual ~BasicComputeTestInstance(void);
2185 virtual void configCommandBuffer(void);
2186
2187 protected:
2188 de::MovePtr<Allocation> m_inputBufAlloc;
2189 Move<VkBuffer> m_inputBuf;
2190 de::MovePtr<Allocation> m_outputBufAlloc;
2191 Move<VkBuffer> m_outputBuf;
2192
2193 Move<VkDescriptorPool> m_descriptorPool;
2194 Move<VkDescriptorSet> m_descriptorSet;
2195 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
2196
2197 Move<VkPipelineLayout> m_pipelineLayout;
2198 Move<VkShaderModule> m_computeShaderModule;
2199 Move<VkPipeline> m_computePipelines;
2200 };
2201
initPrograms(SourceCollections & programCollection) const2202 void BasicComputeTest::initPrograms(SourceCollections &programCollection) const
2203 {
2204 TimestampTest::initPrograms(programCollection);
2205
2206 programCollection.glslSources.add("basic_compute") << glu::ComputeSource(
2207 "#version 310 es\n"
2208 "layout(local_size_x = 128) in;\n"
2209 "layout(std430) buffer;\n"
2210 "layout(binding = 0) readonly buffer Input0\n"
2211 "{\n"
2212 " vec4 elements[];\n"
2213 "} input_data0;\n"
2214 "layout(binding = 1) writeonly buffer Output\n"
2215 "{\n"
2216 " vec4 elements[];\n"
2217 "} output_data;\n"
2218 "void main()\n"
2219 "{\n"
2220 " uint ident = gl_GlobalInvocationID.x;\n"
2221 " output_data.elements[ident] = input_data0.elements[ident] * input_data0.elements[ident];\n"
2222 "}");
2223 }
2224
createInstance(Context & context) const2225 TestInstance *BasicComputeTest::createInstance(Context &context) const
2226 {
2227 return new BasicComputeTestInstance(context, m_stages, m_inRenderPass, m_hostQueryReset, m_queryResultFlags);
2228 }
2229
BasicComputeTestInstance(Context & context,const StageFlagVector stages,const bool inRenderPass,const bool hostQueryReset,VkQueryResultFlags VkQueryResultFlags)2230 BasicComputeTestInstance::BasicComputeTestInstance(Context &context, const StageFlagVector stages,
2231 const bool inRenderPass, const bool hostQueryReset,
2232 VkQueryResultFlags VkQueryResultFlags)
2233 : TimestampTestInstance(context, stages, inRenderPass, hostQueryReset, false, VkQueryResultFlags)
2234 {
2235 const DeviceInterface &vk = context.getDeviceInterface();
2236 const VkDevice vkDevice = context.getDevice();
2237
2238 // Create buffer object, allocate storage, and generate input data
2239 const VkDeviceSize size = sizeof(tcu::Vec4) * 128u * 128u;
2240
2241 m_inputBuf = createBufferAndBindMemory(size, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, &m_inputBufAlloc);
2242
2243 // Load vertices into buffer
2244 tcu::Vec4 *pVec = reinterpret_cast<tcu::Vec4 *>(m_inputBufAlloc->getHostPtr());
2245
2246 for (uint32_t ndx = 0u; ndx < (128u * 128u); ndx++)
2247 {
2248 for (uint32_t component = 0u; component < 4u; component++)
2249 {
2250 pVec[ndx][component] = (float)(ndx * (component + 1u));
2251 }
2252 }
2253
2254 flushAlloc(vk, vkDevice, *m_inputBufAlloc);
2255
2256 m_outputBuf = createBufferAndBindMemory(size, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, &m_outputBufAlloc);
2257
2258 std::vector<VkDescriptorBufferInfo> descriptorInfos;
2259
2260 descriptorInfos.push_back(makeDescriptorBufferInfo(*m_inputBuf, 0u, size));
2261 descriptorInfos.push_back(makeDescriptorBufferInfo(*m_outputBuf, 0u, size));
2262
2263 // Create descriptor set layout
2264 DescriptorSetLayoutBuilder descLayoutBuilder;
2265
2266 for (uint32_t bindingNdx = 0u; bindingNdx < 2u; bindingNdx++)
2267 {
2268 descLayoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT);
2269 }
2270
2271 m_descriptorSetLayout = descLayoutBuilder.build(vk, vkDevice);
2272
2273 // Create descriptor pool
2274 m_descriptorPool = DescriptorPoolBuilder()
2275 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 2)
2276 .build(vk, vkDevice, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
2277
2278 // Create descriptor set
2279 const VkDescriptorSetAllocateInfo descriptorSetAllocInfo = {
2280 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, // VkStructureType sType;
2281 DE_NULL, // const void* pNext;
2282 *m_descriptorPool, // VkDescriptorPool descriptorPool;
2283 1u, // uint32_t setLayoutCount;
2284 &m_descriptorSetLayout.get(), // const VkDescriptorSetLayout* pSetLayouts;
2285 };
2286 m_descriptorSet = allocateDescriptorSet(vk, vkDevice, &descriptorSetAllocInfo);
2287
2288 DescriptorSetUpdateBuilder builder;
2289 for (uint32_t descriptorNdx = 0u; descriptorNdx < 2u; descriptorNdx++)
2290 {
2291 builder.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(descriptorNdx),
2292 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &descriptorInfos[descriptorNdx]);
2293 }
2294 builder.update(vk, vkDevice);
2295
2296 // Create compute pipeline layout
2297 const VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo = {
2298 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, // VkStructureType sType;
2299 DE_NULL, // const void* pNext;
2300 0u, // VkPipelineLayoutCreateFlags flags;
2301 1u, // uint32_t setLayoutCount;
2302 &m_descriptorSetLayout.get(), // const VkDescriptorSetLayout* pSetLayouts;
2303 0u, // uint32_t pushConstantRangeCount;
2304 DE_NULL, // const VkPushConstantRange* pPushConstantRanges;
2305 };
2306
2307 m_pipelineLayout = createPipelineLayout(vk, vkDevice, &pipelineLayoutCreateInfo);
2308
2309 // Create compute shader
2310 VkShaderModuleCreateInfo shaderModuleCreateInfo = {
2311 VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, // VkStructureType sType;
2312 DE_NULL, // const void* pNext;
2313 0u, // VkShaderModuleCreateFlags flags;
2314 m_context.getBinaryCollection().get("basic_compute").getSize(), // uintptr_t codeSize;
2315 (uint32_t *)m_context.getBinaryCollection()
2316 .get("basic_compute")
2317 .getBinary(), // const uint32_t* pCode;
2318
2319 };
2320
2321 m_computeShaderModule = createShaderModule(vk, vkDevice, &shaderModuleCreateInfo);
2322
2323 // Create compute pipeline
2324 const VkPipelineShaderStageCreateInfo stageCreateInfo = {
2325 VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, // VkStructureType sType;
2326 DE_NULL, // const void* pNext;
2327 0u, // VkPipelineShaderStageCreateFlags flags;
2328 VK_SHADER_STAGE_COMPUTE_BIT, // VkShaderStageFlagBits stage;
2329 *m_computeShaderModule, // VkShaderModule module;
2330 "main", // const char* pName;
2331 DE_NULL, // const VkSpecializationInfo* pSpecializationInfo;
2332 };
2333
2334 const VkComputePipelineCreateInfo pipelineCreateInfo = {
2335 VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, // VkStructureType sType;
2336 DE_NULL, // const void* pNext;
2337 0u, // VkPipelineCreateFlags flags;
2338 stageCreateInfo, // VkPipelineShaderStageCreateInfo stage;
2339 *m_pipelineLayout, // VkPipelineLayout layout;
2340 (VkPipeline)0, // VkPipeline basePipelineHandle;
2341 0u, // int32_t basePipelineIndex;
2342 };
2343
2344 m_computePipelines = createComputePipeline(vk, vkDevice, (VkPipelineCache)0u, &pipelineCreateInfo);
2345 }
2346
~BasicComputeTestInstance(void)2347 BasicComputeTestInstance::~BasicComputeTestInstance(void)
2348 {
2349 }
2350
configCommandBuffer(void)2351 void BasicComputeTestInstance::configCommandBuffer(void)
2352 {
2353 const DeviceInterface &vk = m_context.getDeviceInterface();
2354
2355 beginCommandBuffer(vk, *m_cmdBuffer, 0u);
2356
2357 if (!m_hostQueryReset)
2358 vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, TimestampTest::ENTRY_COUNT);
2359
2360 vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_computePipelines);
2361 vk.cmdBindDescriptorSets(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0u, 1u,
2362 &m_descriptorSet.get(), 0u, DE_NULL);
2363 vk.cmdDispatch(*m_cmdBuffer, 128u, 1u, 1u);
2364
2365 uint32_t timestampEntry = 0u;
2366 for (StageFlagVector::const_iterator it = m_stages.begin(); it != m_stages.end(); it++)
2367 {
2368 vk.cmdWriteTimestamp(*m_cmdBuffer, *it, *m_queryPool, timestampEntry++);
2369 }
2370
2371 endCommandBuffer(vk, *m_cmdBuffer);
2372 }
2373
2374 class TransferTest : public TimestampTest
2375 {
2376 public:
2377 TransferTest(tcu::TestContext &testContext, const std::string &name, const TimestampTestParam *param);
2378
~TransferTest(void)2379 virtual ~TransferTest(void)
2380 {
2381 }
2382 virtual void initPrograms(SourceCollections &programCollection) const;
2383 virtual TestInstance *createInstance(Context &context) const;
2384
2385 protected:
2386 TransferMethod m_method;
2387 };
2388
2389 class TransferTestInstance : public TimestampTestInstance
2390 {
2391 public:
2392 TransferTestInstance(Context &context, const StageFlagVector stages, const bool inRenderPass,
2393 const bool hostQueryReset, const bool transferOnlyQueue, const TransferMethod method,
2394 const VkQueryResultFlags queryResultFlags);
2395
2396 virtual ~TransferTestInstance(void);
2397 virtual void configCommandBuffer(void);
2398 virtual void initialImageTransition(VkCommandBuffer cmdBuffer, VkImage image, VkImageSubresourceRange subRange,
2399 VkImageLayout layout);
2400
2401 protected:
2402 TransferMethod m_method;
2403
2404 VkDeviceSize m_bufSize;
2405 Move<VkBuffer> m_srcBuffer;
2406 Move<VkBuffer> m_dstBuffer;
2407 de::MovePtr<Allocation> m_srcBufferAlloc;
2408 de::MovePtr<Allocation> m_dstBufferAlloc;
2409
2410 VkFormat m_imageFormat;
2411 int32_t m_imageWidth;
2412 int32_t m_imageHeight;
2413 VkDeviceSize m_imageSize;
2414 Move<VkImage> m_srcImage;
2415 Move<VkImage> m_dstImage;
2416 Move<VkImage> m_depthImage;
2417 Move<VkImage> m_msImage;
2418 de::MovePtr<Allocation> m_srcImageAlloc;
2419 de::MovePtr<Allocation> m_dstImageAlloc;
2420 de::MovePtr<Allocation> m_depthImageAlloc;
2421 de::MovePtr<Allocation> m_msImageAlloc;
2422 };
2423
TransferTest(tcu::TestContext & testContext,const std::string & name,const TimestampTestParam * param)2424 TransferTest::TransferTest(tcu::TestContext &testContext, const std::string &name, const TimestampTestParam *param)
2425 : TimestampTest(testContext, name, param)
2426 {
2427 const TransferTimestampTestParam *transferParam = dynamic_cast<const TransferTimestampTestParam *>(param);
2428 m_method = transferParam->getMethod();
2429 }
2430
initPrograms(SourceCollections & programCollection) const2431 void TransferTest::initPrograms(SourceCollections &programCollection) const
2432 {
2433 TimestampTest::initPrograms(programCollection);
2434 }
2435
createInstance(Context & context) const2436 TestInstance *TransferTest::createInstance(Context &context) const
2437 {
2438 return new TransferTestInstance(context, m_stages, m_inRenderPass, m_hostQueryReset, m_transferOnlyQueue, m_method,
2439 m_queryResultFlags);
2440 }
2441
TransferTestInstance(Context & context,const StageFlagVector stages,const bool inRenderPass,const bool hostQueryReset,const bool transferOnlyQueue,const TransferMethod method,const VkQueryResultFlags queryResultFlags)2442 TransferTestInstance::TransferTestInstance(Context &context, const StageFlagVector stages, const bool inRenderPass,
2443 const bool hostQueryReset, const bool transferOnlyQueue,
2444 const TransferMethod method, const VkQueryResultFlags queryResultFlags)
2445 : TimestampTestInstance(context, stages, inRenderPass, hostQueryReset, transferOnlyQueue, queryResultFlags)
2446 , m_method(method)
2447 , m_bufSize((queryResultFlags & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT) ? 512u : 256u)
2448 , m_imageFormat(VK_FORMAT_R8G8B8A8_UNORM)
2449 , m_imageWidth(4u)
2450 , m_imageHeight(4u)
2451 , m_imageSize(256u)
2452 {
2453 const DeviceInterface &vk = context.getDeviceInterface();
2454
2455 // Create src buffer
2456 m_srcBuffer = createBufferAndBindMemory(
2457 m_bufSize, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT, &m_srcBufferAlloc);
2458
2459 // Init the source buffer memory
2460 char *pBuf = reinterpret_cast<char *>(m_srcBufferAlloc->getHostPtr());
2461 deMemset(pBuf, 0xFF, sizeof(char) * (size_t)m_bufSize);
2462 flushAlloc(vk, m_device, *m_srcBufferAlloc);
2463
2464 // Create dst buffer
2465 m_dstBuffer = createBufferAndBindMemory(
2466 m_bufSize, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, &m_dstBufferAlloc);
2467
2468 // Create src/dst/depth image
2469 m_srcImage = createImage2DAndBindMemory(m_imageFormat, m_imageWidth, m_imageHeight,
2470 VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
2471 VK_SAMPLE_COUNT_1_BIT, &m_srcImageAlloc);
2472 m_dstImage = createImage2DAndBindMemory(m_imageFormat, m_imageWidth, m_imageHeight,
2473 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
2474 VK_SAMPLE_COUNT_1_BIT, &m_dstImageAlloc);
2475 m_depthImage =
2476 createImage2DAndBindMemory(VK_FORMAT_D16_UNORM, m_imageWidth, m_imageHeight, VK_IMAGE_USAGE_TRANSFER_DST_BIT,
2477 VK_SAMPLE_COUNT_1_BIT, &m_depthImageAlloc);
2478 m_msImage = createImage2DAndBindMemory(m_imageFormat, m_imageWidth, m_imageHeight,
2479 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
2480 VK_IMAGE_USAGE_TRANSFER_DST_BIT,
2481 VK_SAMPLE_COUNT_4_BIT, &m_msImageAlloc);
2482 }
2483
~TransferTestInstance(void)2484 TransferTestInstance::~TransferTestInstance(void)
2485 {
2486 }
2487
configCommandBuffer(void)2488 void TransferTestInstance::configCommandBuffer(void)
2489 {
2490 const DeviceInterface &vk = m_context.getDeviceInterface();
2491
2492 beginCommandBuffer(vk, *m_cmdBuffer, 0u);
2493
2494 // Initialize buffer/image
2495 vk.cmdFillBuffer(*m_cmdBuffer, *m_dstBuffer, 0u, m_bufSize, 0x0);
2496
2497 const VkClearColorValue srcClearValue = {{1.0f, 1.0f, 1.0f, 1.0f}};
2498 const VkClearColorValue dstClearValue = {{0.0f, 0.0f, 0.0f, 0.0f}};
2499 const struct VkImageSubresourceRange subRangeColor = {
2500 VK_IMAGE_ASPECT_COLOR_BIT, // VkImageAspectFlags aspectMask;
2501 0u, // uint32_t baseMipLevel;
2502 1u, // uint32_t mipLevels;
2503 0u, // uint32_t baseArrayLayer;
2504 1u, // uint32_t arraySize;
2505 };
2506 const struct VkImageSubresourceRange subRangeDepth = {
2507 VK_IMAGE_ASPECT_DEPTH_BIT, // VkImageAspectFlags aspectMask;
2508 0u, // uint32_t baseMipLevel;
2509 1u, // uint32_t mipLevels;
2510 0u, // uint32_t baseArrayLayer;
2511 1u, // uint32_t arraySize;
2512 };
2513
2514 initialImageTransition(*m_cmdBuffer, *m_srcImage, subRangeColor, VK_IMAGE_LAYOUT_GENERAL);
2515 initialImageTransition(*m_cmdBuffer, *m_dstImage, subRangeColor, VK_IMAGE_LAYOUT_GENERAL);
2516
2517 if (!m_transferOnlyQueue)
2518 {
2519 vk.cmdClearColorImage(*m_cmdBuffer, *m_srcImage, VK_IMAGE_LAYOUT_GENERAL, &srcClearValue, 1u, &subRangeColor);
2520 vk.cmdClearColorImage(*m_cmdBuffer, *m_dstImage, VK_IMAGE_LAYOUT_GENERAL, &dstClearValue, 1u, &subRangeColor);
2521 }
2522
2523 // synchronize the Clear commands before starting any copy
2524 const vk::VkMemoryBarrier barrier = {
2525 vk::VK_STRUCTURE_TYPE_MEMORY_BARRIER, // VkStructureType sType;
2526 DE_NULL, // const void* pNext;
2527 vk::VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags srcAccessMask;
2528 vk::VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags dstAccessMask;
2529 };
2530 vk.cmdPipelineBarrier(*m_cmdBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 0u, 1u,
2531 &barrier, 0u, DE_NULL, 0u, DE_NULL);
2532
2533 if (!m_hostQueryReset)
2534 vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, TimestampTest::ENTRY_COUNT);
2535
2536 // Copy Operations
2537 const VkImageSubresourceLayers imgSubResCopy = {
2538 VK_IMAGE_ASPECT_COLOR_BIT, // VkImageAspectFlags aspectMask;
2539 0u, // uint32_t mipLevel;
2540 0u, // uint32_t baseArrayLayer;
2541 1u, // uint32_t layerCount;
2542 };
2543
2544 const VkOffset3D nullOffset = {0u, 0u, 0u};
2545 const VkExtent3D imageExtent = {(uint32_t)m_imageWidth, (uint32_t)m_imageHeight, 1u};
2546 const VkOffset3D imageOffset = {(int)m_imageWidth, (int)m_imageHeight, 1};
2547
2548 switch (m_method)
2549 {
2550 case TRANSFER_METHOD_COPY_BUFFER:
2551 {
2552 const VkBufferCopy copyBufRegion = {
2553 0u, // VkDeviceSize srcOffset;
2554 0u, // VkDeviceSize destOffset;
2555 m_bufSize, // VkDeviceSize copySize;
2556 };
2557
2558 vk.cmdCopyBuffer(*m_cmdBuffer, *m_srcBuffer, *m_dstBuffer, 1u, ©BufRegion);
2559 break;
2560 }
2561 case TRANSFER_METHOD_COPY_IMAGE:
2562 {
2563 const VkImageCopy copyImageRegion = {
2564 imgSubResCopy, // VkImageSubresourceCopy srcSubresource;
2565 nullOffset, // VkOffset3D srcOffset;
2566 imgSubResCopy, // VkImageSubresourceCopy destSubresource;
2567 nullOffset, // VkOffset3D destOffset;
2568 imageExtent, // VkExtent3D extent;
2569
2570 };
2571
2572 vk.cmdCopyImage(*m_cmdBuffer, *m_srcImage, VK_IMAGE_LAYOUT_GENERAL, *m_dstImage, VK_IMAGE_LAYOUT_GENERAL, 1u,
2573 ©ImageRegion);
2574 break;
2575 }
2576 case TRANSFER_METHOD_COPY_BUFFER_TO_IMAGE:
2577 {
2578 const VkBufferImageCopy bufImageCopy = {
2579 0u, // VkDeviceSize bufferOffset;
2580 (uint32_t)m_imageWidth, // uint32_t bufferRowLength;
2581 (uint32_t)m_imageHeight, // uint32_t bufferImageHeight;
2582 imgSubResCopy, // VkImageSubresourceCopy imageSubresource;
2583 nullOffset, // VkOffset3D imageOffset;
2584 imageExtent, // VkExtent3D imageExtent;
2585 };
2586
2587 vk.cmdCopyBufferToImage(*m_cmdBuffer, *m_srcBuffer, *m_dstImage, VK_IMAGE_LAYOUT_GENERAL, 1u, &bufImageCopy);
2588 break;
2589 }
2590 case TRANSFER_METHOD_COPY_IMAGE_TO_BUFFER:
2591 {
2592 const VkBufferImageCopy imgBufferCopy = {
2593 0u, // VkDeviceSize bufferOffset;
2594 (uint32_t)m_imageWidth, // uint32_t bufferRowLength;
2595 (uint32_t)m_imageHeight, // uint32_t bufferImageHeight;
2596 imgSubResCopy, // VkImageSubresourceCopy imageSubresource;
2597 nullOffset, // VkOffset3D imageOffset;
2598 imageExtent, // VkExtent3D imageExtent;
2599 };
2600
2601 vk.cmdCopyImageToBuffer(*m_cmdBuffer, *m_srcImage, VK_IMAGE_LAYOUT_GENERAL, *m_dstBuffer, 1u, &imgBufferCopy);
2602 break;
2603 }
2604 case TRANSFER_METHOD_BLIT_IMAGE:
2605 {
2606 const VkImageBlit imageBlt = {imgSubResCopy, // VkImageSubresourceCopy srcSubresource;
2607 {
2608 nullOffset,
2609 imageOffset,
2610 },
2611 imgSubResCopy, // VkImageSubresourceCopy destSubresource;
2612 {
2613 nullOffset,
2614 imageOffset,
2615 }};
2616
2617 vk.cmdBlitImage(*m_cmdBuffer, *m_srcImage, VK_IMAGE_LAYOUT_GENERAL, *m_dstImage, VK_IMAGE_LAYOUT_GENERAL, 1u,
2618 &imageBlt, VK_FILTER_NEAREST);
2619 break;
2620 }
2621 case TRANSFER_METHOD_CLEAR_COLOR_IMAGE:
2622 {
2623 vk.cmdClearColorImage(*m_cmdBuffer, *m_dstImage, VK_IMAGE_LAYOUT_GENERAL, &srcClearValue, 1u, &subRangeColor);
2624 break;
2625 }
2626 case TRANSFER_METHOD_CLEAR_DEPTH_STENCIL_IMAGE:
2627 {
2628 initialImageTransition(*m_cmdBuffer, *m_depthImage, subRangeDepth, VK_IMAGE_LAYOUT_GENERAL);
2629
2630 const VkClearDepthStencilValue clearDSValue = {
2631 1.0f, // float depth;
2632 0u, // uint32_t stencil;
2633 };
2634
2635 vk.cmdClearDepthStencilImage(*m_cmdBuffer, *m_depthImage, VK_IMAGE_LAYOUT_GENERAL, &clearDSValue, 1u,
2636 &subRangeDepth);
2637 break;
2638 }
2639 case TRANSFER_METHOD_FILL_BUFFER:
2640 {
2641 vk.cmdFillBuffer(*m_cmdBuffer, *m_dstBuffer, 0u, m_bufSize, 0x0);
2642 break;
2643 }
2644 case TRANSFER_METHOD_UPDATE_BUFFER:
2645 {
2646 const uint32_t data[] = {0xdeadbeef, 0xabcdef00, 0x12345678};
2647
2648 vk.cmdUpdateBuffer(*m_cmdBuffer, *m_dstBuffer, 0x10, sizeof(data), data);
2649 break;
2650 }
2651 case TRANSFER_METHOD_COPY_QUERY_POOL_RESULTS:
2652 case TRANSFER_METHOD_COPY_QUERY_POOL_RESULTS_STRIDE_ZERO:
2653 {
2654 vk.cmdWriteTimestamp(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, *m_queryPool, 0u);
2655 VkDeviceSize copyStride = m_method == TRANSFER_METHOD_COPY_QUERY_POOL_RESULTS_STRIDE_ZERO ? 0u : 8u;
2656 vk.cmdCopyQueryPoolResults(*m_cmdBuffer, *m_queryPool, 0u, 1u, *m_dstBuffer, 0u, copyStride,
2657 VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT);
2658
2659 const vk::VkBufferMemoryBarrier bufferBarrier = {
2660 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // VkStructureType sType;
2661 DE_NULL, // const void* pNext;
2662 vk::VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags srcAccessMask;
2663 vk::VK_ACCESS_HOST_READ_BIT, // VkAccessFlags dstAccessMask;
2664 VK_QUEUE_FAMILY_IGNORED, // uint32_t srcQueueFamilyIndex;
2665 VK_QUEUE_FAMILY_IGNORED, // uint32_t dstQueueFamilyIndex;
2666 *m_dstBuffer, // VkBuffer buffer;
2667 0ull, // VkDeviceSize offset;
2668 VK_WHOLE_SIZE // VkDeviceSize size;
2669 };
2670
2671 vk.cmdPipelineBarrier(*m_cmdBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, 0u, 0u,
2672 DE_NULL, 1u, &bufferBarrier, 0u, DE_NULL);
2673
2674 vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, 1u);
2675 break;
2676 }
2677 case TRANSFER_METHOD_RESOLVE_IMAGE:
2678 {
2679 const VkImageResolve imageResolve = {
2680 imgSubResCopy, // VkImageSubresourceLayers srcSubresource;
2681 nullOffset, // VkOffset3D srcOffset;
2682 imgSubResCopy, // VkImageSubresourceLayers destSubresource;
2683 nullOffset, // VkOffset3D destOffset;
2684 imageExtent, // VkExtent3D extent;
2685 };
2686
2687 initialImageTransition(*m_cmdBuffer, *m_msImage, subRangeColor, VK_IMAGE_LAYOUT_GENERAL);
2688 vk.cmdClearColorImage(*m_cmdBuffer, *m_msImage, VK_IMAGE_LAYOUT_GENERAL, &srcClearValue, 1u, &subRangeColor);
2689 vk.cmdPipelineBarrier(*m_cmdBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 0u,
2690 1u, &barrier, 0u, DE_NULL, 0u, DE_NULL);
2691 vk.cmdResolveImage(*m_cmdBuffer, *m_msImage, VK_IMAGE_LAYOUT_GENERAL, *m_dstImage, VK_IMAGE_LAYOUT_GENERAL, 1u,
2692 &imageResolve);
2693 break;
2694 }
2695 default:
2696 DE_FATAL("Unknown Transfer Method!");
2697 break;
2698 }
2699
2700 uint32_t timestampEntry = 0u;
2701
2702 for (StageFlagVector::const_iterator it = m_stages.begin(); it != m_stages.end(); it++)
2703 {
2704 vk.cmdWriteTimestamp(*m_cmdBuffer, *it, *m_queryPool, timestampEntry++);
2705 }
2706
2707 endCommandBuffer(vk, *m_cmdBuffer);
2708 }
2709
initialImageTransition(VkCommandBuffer cmdBuffer,VkImage image,VkImageSubresourceRange subRange,VkImageLayout layout)2710 void TransferTestInstance::initialImageTransition(VkCommandBuffer cmdBuffer, VkImage image,
2711 VkImageSubresourceRange subRange, VkImageLayout layout)
2712 {
2713 const DeviceInterface &vk = m_context.getDeviceInterface();
2714
2715 const VkImageMemoryBarrier imageMemBarrier = {
2716 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType sType;
2717 DE_NULL, // const void* pNext;
2718 0u, // VkAccessFlags srcAccessMask;
2719 VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags dstAccessMask;
2720 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout;
2721 layout, // VkImageLayout newLayout;
2722 VK_QUEUE_FAMILY_IGNORED, // uint32_t srcQueueFamilyIndex;
2723 VK_QUEUE_FAMILY_IGNORED, // uint32_t dstQueueFamilyIndex;
2724 image, // VkImage image;
2725 subRange // VkImageSubresourceRange subresourceRange;
2726 };
2727
2728 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, DE_NULL, 0,
2729 DE_NULL, 1, &imageMemBarrier);
2730 }
2731
2732 class FillBufferBeforeCopyTest : public vkt::TestCase
2733 {
2734 public:
FillBufferBeforeCopyTest(tcu::TestContext & testContext,const std::string & name)2735 FillBufferBeforeCopyTest(tcu::TestContext &testContext, const std::string &name) : vkt::TestCase(testContext, name)
2736 {
2737 }
~FillBufferBeforeCopyTest(void)2738 virtual ~FillBufferBeforeCopyTest(void)
2739 {
2740 }
2741 virtual void initPrograms(SourceCollections &programCollection) const;
2742 virtual TestInstance *createInstance(Context &context) const;
2743 };
2744
2745 class FillBufferBeforeCopyTestInstance : public vkt::TestInstance
2746 {
2747 public:
2748 FillBufferBeforeCopyTestInstance(Context &context);
~FillBufferBeforeCopyTestInstance(void)2749 virtual ~FillBufferBeforeCopyTestInstance(void)
2750 {
2751 }
2752 virtual tcu::TestStatus iterate(void);
2753
2754 protected:
2755 struct TimestampWithAvailability
2756 {
2757 uint64_t timestamp;
2758 uint64_t availability;
2759 };
2760
2761 Move<VkCommandPool> m_cmdPool;
2762 Move<VkCommandBuffer> m_cmdBuffer;
2763 Move<VkQueryPool> m_queryPool;
2764
2765 Move<VkBuffer> m_resultBuffer;
2766 de::MovePtr<Allocation> m_resultBufferMemory;
2767 };
2768
initPrograms(SourceCollections & programCollection) const2769 void FillBufferBeforeCopyTest::initPrograms(SourceCollections &programCollection) const
2770 {
2771 vkt::TestCase::initPrograms(programCollection);
2772 }
2773
createInstance(Context & context) const2774 TestInstance *FillBufferBeforeCopyTest::createInstance(Context &context) const
2775 {
2776 return new FillBufferBeforeCopyTestInstance(context);
2777 }
2778
FillBufferBeforeCopyTestInstance(Context & context)2779 FillBufferBeforeCopyTestInstance::FillBufferBeforeCopyTestInstance(Context &context) : vkt::TestInstance(context)
2780 {
2781 const DeviceInterface &vk = context.getDeviceInterface();
2782 const VkDevice vkDevice = context.getDevice();
2783 const uint32_t queueFamilyIndex = context.getUniversalQueueFamilyIndex();
2784 Allocator &allocator = m_context.getDefaultAllocator();
2785
2786 // Check support for timestamp queries
2787 checkTimestampsSupported(context.getInstanceInterface(), context.getPhysicalDevice(), queueFamilyIndex);
2788
2789 const VkQueryPoolCreateInfo queryPoolParams = {
2790 VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, // VkStructureType sType;
2791 DE_NULL, // const void* pNext;
2792 0u, // VkQueryPoolCreateFlags flags;
2793 VK_QUERY_TYPE_TIMESTAMP, // VkQueryType queryType;
2794 1u, // uint32_t entryCount;
2795 0u, // VkQueryPipelineStatisticFlags pipelineStatistics;
2796 };
2797
2798 m_queryPool = createQueryPool(vk, vkDevice, &queryPoolParams);
2799 m_cmdPool = createCommandPool(vk, vkDevice, VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, queueFamilyIndex);
2800 m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, *m_cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY);
2801
2802 // Create results buffer.
2803 const VkBufferCreateInfo bufferCreateInfo = {
2804 VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, // VkStructureType sType;
2805 DE_NULL, // const void* pNext;
2806 0u, // VkBufferCreateFlags flags;
2807 sizeof(TimestampWithAvailability), // VkDeviceSize size;
2808 VK_BUFFER_USAGE_TRANSFER_DST_BIT, // VkBufferUsageFlags usage;
2809 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
2810 1u, // uint32_t queueFamilyIndexCount;
2811 &queueFamilyIndex // const uint32_t* pQueueFamilyIndices;
2812 };
2813
2814 m_resultBuffer = createBuffer(vk, vkDevice, &bufferCreateInfo);
2815 m_resultBufferMemory =
2816 allocator.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_resultBuffer), MemoryRequirement::HostVisible);
2817 VK_CHECK(vk.bindBufferMemory(vkDevice, *m_resultBuffer, m_resultBufferMemory->getMemory(),
2818 m_resultBufferMemory->getOffset()));
2819
2820 const vk::VkBufferMemoryBarrier fillBufferBarrier = {
2821 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // VkStructureType sType;
2822 DE_NULL, // const void* pNext;
2823 vk::VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags srcAccessMask;
2824 vk::VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags dstAccessMask;
2825 VK_QUEUE_FAMILY_IGNORED, // uint32_t srcQueueFamilyIndex;
2826 VK_QUEUE_FAMILY_IGNORED, // uint32_t dstQueueFamilyIndex;
2827 *m_resultBuffer, // VkBuffer buffer;
2828 0ull, // VkDeviceSize offset;
2829 VK_WHOLE_SIZE // VkDeviceSize size;
2830 };
2831
2832 const vk::VkBufferMemoryBarrier bufferBarrier = {
2833 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // VkStructureType sType;
2834 DE_NULL, // const void* pNext;
2835 vk::VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags srcAccessMask;
2836 vk::VK_ACCESS_HOST_READ_BIT, // VkAccessFlags dstAccessMask;
2837 VK_QUEUE_FAMILY_IGNORED, // uint32_t srcQueueFamilyIndex;
2838 VK_QUEUE_FAMILY_IGNORED, // uint32_t dstQueueFamilyIndex;
2839 *m_resultBuffer, // VkBuffer buffer;
2840 0ull, // VkDeviceSize offset;
2841 VK_WHOLE_SIZE // VkDeviceSize size;
2842 };
2843
2844 // Prepare command buffer.
2845 beginCommandBuffer(vk, *m_cmdBuffer, 0u);
2846 vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, 1u);
2847 vk.cmdFillBuffer(*m_cmdBuffer, *m_resultBuffer, 0u, bufferCreateInfo.size, 0u);
2848 vk.cmdPipelineBarrier(*m_cmdBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 0u, 0u,
2849 DE_NULL, 1u, &fillBufferBarrier, 0u, DE_NULL);
2850 vk.cmdWriteTimestamp(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, *m_queryPool, 0u);
2851 vk.cmdCopyQueryPoolResults(*m_cmdBuffer, *m_queryPool, 0u, 1u, *m_resultBuffer, 0u,
2852 sizeof(TimestampWithAvailability),
2853 (VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WITH_AVAILABILITY_BIT));
2854 vk.cmdPipelineBarrier(*m_cmdBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, 0u, 0u,
2855 DE_NULL, 1u, &bufferBarrier, 0u, DE_NULL);
2856 endCommandBuffer(vk, *m_cmdBuffer);
2857 }
2858
iterate(void)2859 tcu::TestStatus FillBufferBeforeCopyTestInstance::iterate(void)
2860 {
2861 const DeviceInterface &vk = m_context.getDeviceInterface();
2862 const VkDevice vkDevice = m_context.getDevice();
2863 const VkQueue queue = m_context.getUniversalQueue();
2864 TimestampWithAvailability ta;
2865
2866 submitCommandsAndWait(vk, vkDevice, queue, m_cmdBuffer.get());
2867 invalidateAlloc(vk, vkDevice, *m_resultBufferMemory);
2868 deMemcpy(&ta, m_resultBufferMemory->getHostPtr(), sizeof(ta));
2869 if (ta.availability)
2870 {
2871 if (ta.timestamp == 0)
2872 {
2873 return tcu::TestStatus::fail("Timestamp not written");
2874 }
2875 }
2876 return tcu::TestStatus::pass("Pass");
2877 }
2878
2879 class ResetTimestampQueryBeforeCopyTest : public vkt::TestCase
2880 {
2881 public:
ResetTimestampQueryBeforeCopyTest(tcu::TestContext & testContext,const std::string & name)2882 ResetTimestampQueryBeforeCopyTest(tcu::TestContext &testContext, const std::string &name)
2883 : vkt::TestCase(testContext, name)
2884 {
2885 }
~ResetTimestampQueryBeforeCopyTest(void)2886 virtual ~ResetTimestampQueryBeforeCopyTest(void)
2887 {
2888 }
2889 virtual void initPrograms(SourceCollections &programCollection) const;
2890 virtual TestInstance *createInstance(Context &context) const;
2891 };
2892
2893 class ResetTimestampQueryBeforeCopyTestInstance : public vkt::TestInstance
2894 {
2895 public:
2896 ResetTimestampQueryBeforeCopyTestInstance(Context &context);
~ResetTimestampQueryBeforeCopyTestInstance(void)2897 virtual ~ResetTimestampQueryBeforeCopyTestInstance(void)
2898 {
2899 }
2900 virtual tcu::TestStatus iterate(void);
2901
2902 protected:
2903 struct TimestampWithAvailability
2904 {
2905 uint64_t timestamp;
2906 uint64_t availability;
2907 };
2908
2909 Move<VkCommandPool> m_cmdPool;
2910 Move<VkCommandBuffer> m_cmdBuffer;
2911 Move<VkQueryPool> m_queryPool;
2912
2913 Move<VkBuffer> m_resultBuffer;
2914 de::MovePtr<Allocation> m_resultBufferMemory;
2915 };
2916
initPrograms(SourceCollections & programCollection) const2917 void ResetTimestampQueryBeforeCopyTest::initPrograms(SourceCollections &programCollection) const
2918 {
2919 vkt::TestCase::initPrograms(programCollection);
2920 }
2921
createInstance(Context & context) const2922 TestInstance *ResetTimestampQueryBeforeCopyTest::createInstance(Context &context) const
2923 {
2924 return new ResetTimestampQueryBeforeCopyTestInstance(context);
2925 }
2926
ResetTimestampQueryBeforeCopyTestInstance(Context & context)2927 ResetTimestampQueryBeforeCopyTestInstance::ResetTimestampQueryBeforeCopyTestInstance(Context &context)
2928 : vkt::TestInstance(context)
2929 {
2930 const DeviceInterface &vk = context.getDeviceInterface();
2931 const VkDevice vkDevice = context.getDevice();
2932 const uint32_t queueFamilyIndex = context.getUniversalQueueFamilyIndex();
2933 Allocator &allocator = m_context.getDefaultAllocator();
2934
2935 // Check support for timestamp queries
2936 checkTimestampsSupported(context.getInstanceInterface(), context.getPhysicalDevice(), queueFamilyIndex);
2937
2938 const VkQueryPoolCreateInfo queryPoolParams = {
2939 VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, // VkStructureType sType;
2940 DE_NULL, // const void* pNext;
2941 0u, // VkQueryPoolCreateFlags flags;
2942 VK_QUERY_TYPE_TIMESTAMP, // VkQueryType queryType;
2943 1u, // uint32_t entryCount;
2944 0u, // VkQueryPipelineStatisticFlags pipelineStatistics;
2945 };
2946
2947 m_queryPool = createQueryPool(vk, vkDevice, &queryPoolParams);
2948 m_cmdPool = createCommandPool(vk, vkDevice, VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, queueFamilyIndex);
2949 m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, *m_cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY);
2950
2951 // Create results buffer.
2952 const VkBufferCreateInfo bufferCreateInfo = {
2953 VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, // VkStructureType sType;
2954 DE_NULL, // const void* pNext;
2955 0u, // VkBufferCreateFlags flags;
2956 sizeof(TimestampWithAvailability), // VkDeviceSize size;
2957 VK_BUFFER_USAGE_TRANSFER_DST_BIT, // VkBufferUsageFlags usage;
2958 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
2959 1u, // uint32_t queueFamilyIndexCount;
2960 &queueFamilyIndex // const uint32_t* pQueueFamilyIndices;
2961 };
2962
2963 m_resultBuffer = createBuffer(vk, vkDevice, &bufferCreateInfo);
2964 m_resultBufferMemory =
2965 allocator.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_resultBuffer), MemoryRequirement::HostVisible);
2966 VK_CHECK(vk.bindBufferMemory(vkDevice, *m_resultBuffer, m_resultBufferMemory->getMemory(),
2967 m_resultBufferMemory->getOffset()));
2968
2969 const vk::VkBufferMemoryBarrier bufferBarrier = {
2970 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // VkStructureType sType;
2971 DE_NULL, // const void* pNext;
2972 vk::VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags srcAccessMask;
2973 vk::VK_ACCESS_HOST_READ_BIT, // VkAccessFlags dstAccessMask;
2974 VK_QUEUE_FAMILY_IGNORED, // uint32_t srcQueueFamilyIndex;
2975 VK_QUEUE_FAMILY_IGNORED, // uint32_t dstQueueFamilyIndex;
2976 *m_resultBuffer, // VkBuffer buffer;
2977 0ull, // VkDeviceSize offset;
2978 VK_WHOLE_SIZE // VkDeviceSize size;
2979 };
2980
2981 // Prepare command buffer.
2982 beginCommandBuffer(vk, *m_cmdBuffer, 0u);
2983 vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, 1u);
2984 vk.cmdWriteTimestamp(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, *m_queryPool, 0u);
2985 vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, 1u);
2986 vk.cmdCopyQueryPoolResults(*m_cmdBuffer, *m_queryPool, 0u, 1u, *m_resultBuffer, 0u,
2987 sizeof(TimestampWithAvailability),
2988 (VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WITH_AVAILABILITY_BIT));
2989 vk.cmdPipelineBarrier(*m_cmdBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, 0u, 0u,
2990 DE_NULL, 1u, &bufferBarrier, 0u, DE_NULL);
2991 endCommandBuffer(vk, *m_cmdBuffer);
2992 }
2993
iterate(void)2994 tcu::TestStatus ResetTimestampQueryBeforeCopyTestInstance::iterate(void)
2995 {
2996 const DeviceInterface &vk = m_context.getDeviceInterface();
2997 const VkDevice vkDevice = m_context.getDevice();
2998 const VkQueue queue = m_context.getUniversalQueue();
2999 TimestampWithAvailability ta;
3000
3001 submitCommandsAndWait(vk, vkDevice, queue, m_cmdBuffer.get());
3002 invalidateAlloc(vk, vkDevice, *m_resultBufferMemory);
3003 deMemcpy(&ta, m_resultBufferMemory->getHostPtr(), sizeof(ta));
3004 return ((ta.availability != 0) ? tcu::TestStatus::fail("Availability bit nonzero after resetting query") :
3005 tcu::TestStatus::pass("Pass"));
3006 }
3007
3008 class TwoCmdBuffersTest : public TimestampTest
3009 {
3010 public:
TwoCmdBuffersTest(tcu::TestContext & testContext,const std::string & name,const TwoCmdBuffersTestParam * param)3011 TwoCmdBuffersTest(tcu::TestContext &testContext, const std::string &name, const TwoCmdBuffersTestParam *param)
3012 : TimestampTest(testContext, name, param)
3013 , m_cmdBufferLevel(param->getCmdBufferLevel())
3014 {
3015 }
~TwoCmdBuffersTest(void)3016 virtual ~TwoCmdBuffersTest(void)
3017 {
3018 }
3019 virtual TestInstance *createInstance(Context &context) const;
3020 virtual void checkSupport(Context &context) const;
3021
3022 protected:
3023 VkCommandBufferLevel m_cmdBufferLevel;
3024 };
3025
3026 class TwoCmdBuffersTestInstance : public TimestampTestInstance
3027 {
3028 public:
3029 TwoCmdBuffersTestInstance(Context &context, const StageFlagVector stages, const bool inRenderPass,
3030 const bool hostQueryReset, const bool transferOnlyQueue,
3031 VkCommandBufferLevel cmdBufferLevel, VkQueryResultFlags queryResultFlags);
3032 virtual ~TwoCmdBuffersTestInstance(void);
3033 virtual tcu::TestStatus iterate(void);
3034
3035 protected:
3036 virtual void configCommandBuffer(void);
3037
3038 protected:
3039 Move<VkCommandBuffer> m_secondCmdBuffer;
3040 Move<VkBuffer> m_dstBuffer;
3041 de::MovePtr<Allocation> m_dstBufferAlloc;
3042 VkCommandBufferLevel m_cmdBufferLevel;
3043 };
3044
createInstance(Context & context) const3045 TestInstance *TwoCmdBuffersTest::createInstance(Context &context) const
3046 {
3047 return new TwoCmdBuffersTestInstance(context, m_stages, m_inRenderPass, m_hostQueryReset, m_transferOnlyQueue,
3048 m_cmdBufferLevel, m_queryResultFlags);
3049 }
3050
checkSupport(Context & context) const3051 void TwoCmdBuffersTest::checkSupport(Context &context) const
3052 {
3053 TimestampTest::checkSupport(context);
3054 #ifdef CTS_USES_VULKANSC
3055 if (m_cmdBufferLevel == VK_COMMAND_BUFFER_LEVEL_SECONDARY &&
3056 context.getDeviceVulkanSC10Properties().secondaryCommandBufferNullOrImagelessFramebuffer == VK_FALSE)
3057 TCU_THROW(NotSupportedError, "secondaryCommandBufferNullFramebuffer is not supported");
3058 #endif
3059 }
3060
TwoCmdBuffersTestInstance(Context & context,const StageFlagVector stages,const bool inRenderPass,const bool hostQueryReset,const bool transferOnlyQueue,VkCommandBufferLevel cmdBufferLevel,VkQueryResultFlags queryResultFlags)3061 TwoCmdBuffersTestInstance::TwoCmdBuffersTestInstance(Context &context, const StageFlagVector stages,
3062 const bool inRenderPass, const bool hostQueryReset,
3063 const bool transferOnlyQueue, VkCommandBufferLevel cmdBufferLevel,
3064 VkQueryResultFlags queryResultFlags)
3065 : TimestampTestInstance(context, stages, inRenderPass, hostQueryReset, transferOnlyQueue, queryResultFlags)
3066 , m_cmdBufferLevel(cmdBufferLevel)
3067 {
3068 const DeviceInterface &vk = context.getDeviceInterface();
3069
3070 m_secondCmdBuffer = allocateCommandBuffer(vk, m_device, *m_cmdPool, cmdBufferLevel);
3071 m_dstBuffer = createBufferAndBindMemory(1024, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT,
3072 &m_dstBufferAlloc);
3073 }
3074
~TwoCmdBuffersTestInstance(void)3075 TwoCmdBuffersTestInstance::~TwoCmdBuffersTestInstance(void)
3076 {
3077 }
3078
configCommandBuffer(void)3079 void TwoCmdBuffersTestInstance::configCommandBuffer(void)
3080 {
3081 const DeviceInterface &vk = m_context.getDeviceInterface();
3082
3083 const VkCommandBufferBeginInfo cmdBufferBeginInfo = {
3084 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, // VkStructureType sType;
3085 DE_NULL, // const void* pNext;
3086 0u, // VkCommandBufferUsageFlags flags;
3087 (const VkCommandBufferInheritanceInfo *)DE_NULL // const VkCommandBufferInheritanceInfo* pInheritanceInfo;
3088 };
3089
3090 const vk::VkBufferMemoryBarrier bufferBarrier = {
3091 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // VkStructureType sType;
3092 DE_NULL, // const void* pNext;
3093 vk::VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags srcAccessMask;
3094 vk::VK_ACCESS_HOST_READ_BIT, // VkAccessFlags dstAccessMask;
3095 VK_QUEUE_FAMILY_IGNORED, // uint32_t srcQueueFamilyIndex;
3096 VK_QUEUE_FAMILY_IGNORED, // uint32_t dstQueueFamilyIndex;
3097 *m_dstBuffer, // VkBuffer buffer;
3098 0ull, // VkDeviceSize offset;
3099 VK_WHOLE_SIZE // VkDeviceSize size;
3100 };
3101
3102 if (m_cmdBufferLevel == VK_COMMAND_BUFFER_LEVEL_PRIMARY)
3103 {
3104 VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
3105 if (!m_hostQueryReset)
3106 vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, TimestampTest::ENTRY_COUNT);
3107 vk.cmdWriteTimestamp(*m_cmdBuffer, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, *m_queryPool, 0);
3108 VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
3109 VK_CHECK(vk.beginCommandBuffer(*m_secondCmdBuffer, &cmdBufferBeginInfo));
3110 vk.cmdCopyQueryPoolResults(*m_secondCmdBuffer, *m_queryPool, 0u, 1u, *m_dstBuffer, 0u, 0u, m_queryResultFlags);
3111 vk.cmdPipelineBarrier(*m_secondCmdBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT,
3112 0u, 0u, DE_NULL, 1u, &bufferBarrier, 0u, DE_NULL);
3113 VK_CHECK(vk.endCommandBuffer(*m_secondCmdBuffer));
3114 }
3115 else
3116 {
3117 const VkCommandBufferInheritanceInfo inheritanceInfo = {
3118 VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, // VkStructureType sType;
3119 DE_NULL, // const void* pNext;
3120 DE_NULL, // VkRenderPass renderPass;
3121 0u, // uint32_t subpass;
3122 DE_NULL, // VkFramebuffer framebuffer;
3123 VK_FALSE, // VkBool32 occlusionQueryEnable;
3124 0u, // VkQueryControlFlags queryFlags;
3125 0u // VkQueryPipelineStatisticFlags pipelineStatistics;
3126 };
3127
3128 const VkCommandBufferBeginInfo cmdBufferBeginInfoSecondary = {
3129 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, // VkStructureType sType;
3130 DE_NULL, // const void* pNext;
3131 0u, // VkCommandBufferUsageFlags flags;
3132 &inheritanceInfo // const VkCommandBufferInheritanceInfo* pInheritanceInfo;
3133 };
3134
3135 VK_CHECK(vk.beginCommandBuffer(*m_secondCmdBuffer, &cmdBufferBeginInfoSecondary));
3136 if (!m_hostQueryReset)
3137 vk.cmdResetQueryPool(*m_secondCmdBuffer, *m_queryPool, 0u, TimestampTest::ENTRY_COUNT);
3138 vk::VkPipelineStageFlagBits pipelineStage =
3139 m_transferOnlyQueue ? VK_PIPELINE_STAGE_TRANSFER_BIT : VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT;
3140 vk.cmdWriteTimestamp(*m_secondCmdBuffer, pipelineStage, *m_queryPool, 0);
3141 VK_CHECK(vk.endCommandBuffer(*m_secondCmdBuffer));
3142 VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
3143 vk.cmdExecuteCommands(m_cmdBuffer.get(), 1u, &m_secondCmdBuffer.get());
3144 if (!m_transferOnlyQueue)
3145 vk.cmdCopyQueryPoolResults(*m_cmdBuffer, *m_queryPool, 0u, 1u, *m_dstBuffer, 0u, 0u, m_queryResultFlags);
3146 vk.cmdPipelineBarrier(*m_cmdBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, 0u, 0u,
3147 DE_NULL, 1u, &bufferBarrier, 0u, DE_NULL);
3148 VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
3149 }
3150 }
3151
iterate(void)3152 tcu::TestStatus TwoCmdBuffersTestInstance::iterate(void)
3153 {
3154 const DeviceInterface &vk = m_context.getDeviceInterface();
3155 const VkQueue queue = getDeviceQueue(vk, m_device, m_queueFamilyIndex, 0);
3156
3157 configCommandBuffer();
3158
3159 const VkCommandBuffer cmdBuffers[] = {m_cmdBuffer.get(), m_secondCmdBuffer.get()};
3160
3161 const VkSubmitInfo submitInfo = {
3162 VK_STRUCTURE_TYPE_SUBMIT_INFO, // VkStructureType sType;
3163 DE_NULL, // const void* pNext;
3164 0u, // uint32_t waitSemaphoreCount;
3165 DE_NULL, // const VkSemaphore* pWaitSemaphores;
3166 (const VkPipelineStageFlags *)DE_NULL, // const VkPipelineStageFlags* pWaitDstStageMask;
3167 m_cmdBufferLevel == VK_COMMAND_BUFFER_LEVEL_PRIMARY ? 2u :
3168 1u, // uint32_t commandBufferCount;
3169 cmdBuffers, // const VkCommandBuffer* pCommandBuffers;
3170 0u, // uint32_t signalSemaphoreCount;
3171 DE_NULL, // const VkSemaphore* pSignalSemaphores;
3172 };
3173
3174 if (m_hostQueryReset)
3175 {
3176 // Only reset the pool for the primary command buffer, the secondary command buffer will reset the pool by itself.
3177 vk.resetQueryPool(m_device, *m_queryPool, 0u, TimestampTest::ENTRY_COUNT);
3178 }
3179
3180 VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, DE_NULL));
3181 VK_CHECK(vk.queueWaitIdle(queue));
3182
3183 // Always pass in case no crash occurred.
3184 return tcu::TestStatus::pass("Pass");
3185 }
3186
3187 class ConsistentQueryResultsTest : public vkt::TestCase
3188 {
3189 public:
ConsistentQueryResultsTest(tcu::TestContext & testContext,const std::string & name)3190 ConsistentQueryResultsTest(tcu::TestContext &testContext, const std::string &name)
3191 : vkt::TestCase(testContext, name)
3192 {
3193 }
~ConsistentQueryResultsTest(void)3194 virtual ~ConsistentQueryResultsTest(void)
3195 {
3196 }
3197 virtual void initPrograms(SourceCollections &programCollection) const;
3198 virtual TestInstance *createInstance(Context &context) const;
3199 };
3200
3201 class ConsistentQueryResultsTestInstance : public vkt::TestInstance
3202 {
3203 public:
3204 ConsistentQueryResultsTestInstance(Context &context);
~ConsistentQueryResultsTestInstance(void)3205 virtual ~ConsistentQueryResultsTestInstance(void)
3206 {
3207 }
3208 virtual tcu::TestStatus iterate(void);
3209
3210 protected:
3211 Move<VkCommandPool> m_cmdPool;
3212 Move<VkCommandBuffer> m_cmdBuffer;
3213 Move<VkQueryPool> m_queryPool;
3214
3215 uint64_t m_timestampMask;
3216 Move<VkBuffer> m_resultBuffer32Bits;
3217 Move<VkBuffer> m_resultBuffer64Bits;
3218 de::MovePtr<Allocation> m_resultBufferMemory32Bits;
3219 de::MovePtr<Allocation> m_resultBufferMemory64Bits;
3220 };
3221
initPrograms(SourceCollections & programCollection) const3222 void ConsistentQueryResultsTest::initPrograms(SourceCollections &programCollection) const
3223 {
3224 vkt::TestCase::initPrograms(programCollection);
3225 }
3226
createInstance(Context & context) const3227 TestInstance *ConsistentQueryResultsTest::createInstance(Context &context) const
3228 {
3229 return new ConsistentQueryResultsTestInstance(context);
3230 }
3231
ConsistentQueryResultsTestInstance(Context & context)3232 ConsistentQueryResultsTestInstance::ConsistentQueryResultsTestInstance(Context &context) : vkt::TestInstance(context)
3233 {
3234 const DeviceInterface &vk = context.getDeviceInterface();
3235 const VkDevice vkDevice = context.getDevice();
3236 const uint32_t queueFamilyIndex = context.getUniversalQueueFamilyIndex();
3237 Allocator &allocator = m_context.getDefaultAllocator();
3238
3239 // Check support for timestamp queries
3240 m_timestampMask =
3241 checkTimestampsSupported(context.getInstanceInterface(), context.getPhysicalDevice(), queueFamilyIndex);
3242
3243 const VkQueryPoolCreateInfo queryPoolParams = {
3244 VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, // VkStructureType sType;
3245 DE_NULL, // const void* pNext;
3246 0u, // VkQueryPoolCreateFlags flags;
3247 VK_QUERY_TYPE_TIMESTAMP, // VkQueryType queryType;
3248 1u, // uint32_t entryCount;
3249 0u, // VkQueryPipelineStatisticFlags pipelineStatistics;
3250 };
3251
3252 m_queryPool = createQueryPool(vk, vkDevice, &queryPoolParams);
3253 m_cmdPool = createCommandPool(vk, vkDevice, VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, queueFamilyIndex);
3254 m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, *m_cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY);
3255
3256 // Create results buffer.
3257 VkBufferCreateInfo bufferCreateInfo = {
3258 VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, // VkStructureType sType;
3259 DE_NULL, // const void* pNext;
3260 0u, // VkBufferCreateFlags flags;
3261 0u, // VkDeviceSize size;
3262 VK_BUFFER_USAGE_TRANSFER_DST_BIT, // VkBufferUsageFlags usage;
3263 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
3264 1u, // uint32_t queueFamilyIndexCount;
3265 &queueFamilyIndex // const uint32_t* pQueueFamilyIndices;
3266 };
3267
3268 // 32 bits.
3269 bufferCreateInfo.size = sizeof(uint32_t);
3270 m_resultBuffer32Bits = createBuffer(vk, vkDevice, &bufferCreateInfo);
3271 m_resultBufferMemory32Bits = allocator.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_resultBuffer32Bits),
3272 MemoryRequirement::HostVisible);
3273 VK_CHECK(vk.bindBufferMemory(vkDevice, *m_resultBuffer32Bits, m_resultBufferMemory32Bits->getMemory(),
3274 m_resultBufferMemory32Bits->getOffset()));
3275
3276 // 64 bits.
3277 bufferCreateInfo.size = sizeof(uint64_t);
3278 m_resultBuffer64Bits = createBuffer(vk, vkDevice, &bufferCreateInfo);
3279 m_resultBufferMemory64Bits = allocator.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_resultBuffer64Bits),
3280 MemoryRequirement::HostVisible);
3281 VK_CHECK(vk.bindBufferMemory(vkDevice, *m_resultBuffer64Bits, m_resultBufferMemory64Bits->getMemory(),
3282 m_resultBufferMemory64Bits->getOffset()));
3283
3284 vk::VkBufferMemoryBarrier bufferBarrier = {
3285 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // VkStructureType sType;
3286 DE_NULL, // const void* pNext;
3287 vk::VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags srcAccessMask;
3288 vk::VK_ACCESS_HOST_READ_BIT, // VkAccessFlags dstAccessMask;
3289 VK_QUEUE_FAMILY_IGNORED, // uint32_t srcQueueFamilyIndex;
3290 VK_QUEUE_FAMILY_IGNORED, // uint32_t dstQueueFamilyIndex;
3291 DE_NULL, // VkBuffer buffer;
3292 0ull, // VkDeviceSize offset;
3293 VK_WHOLE_SIZE // VkDeviceSize size;
3294 };
3295
3296 // Prepare command buffer.
3297 beginCommandBuffer(vk, *m_cmdBuffer, 0u);
3298 vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, 1u);
3299 vk.cmdWriteTimestamp(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, *m_queryPool, 0u);
3300
3301 // 32 bits.
3302 bufferBarrier.buffer = *m_resultBuffer32Bits;
3303 vk.cmdCopyQueryPoolResults(*m_cmdBuffer, *m_queryPool, 0u, 1u, *m_resultBuffer32Bits, 0u, sizeof(uint32_t),
3304 VK_QUERY_RESULT_WAIT_BIT);
3305 vk.cmdPipelineBarrier(*m_cmdBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, 0u, 0u,
3306 DE_NULL, 1u, &bufferBarrier, 0u, DE_NULL);
3307
3308 // 64 bits.
3309 bufferBarrier.buffer = *m_resultBuffer64Bits;
3310 vk.cmdCopyQueryPoolResults(*m_cmdBuffer, *m_queryPool, 0u, 1u, *m_resultBuffer64Bits, 0u, sizeof(uint64_t),
3311 (VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT));
3312 vk.cmdPipelineBarrier(*m_cmdBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, 0u, 0u,
3313 DE_NULL, 1u, &bufferBarrier, 0u, DE_NULL);
3314
3315 endCommandBuffer(vk, *m_cmdBuffer);
3316 }
3317
iterate(void)3318 tcu::TestStatus ConsistentQueryResultsTestInstance::iterate(void)
3319 {
3320 const DeviceInterface &vk = m_context.getDeviceInterface();
3321 const VkDevice vkDevice = m_context.getDevice();
3322 const VkQueue queue = m_context.getUniversalQueue();
3323
3324 uint32_t tsBuffer32Bits;
3325 uint64_t tsBuffer64Bits;
3326 uint32_t tsGet32Bits;
3327 uint64_t tsGet64Bits;
3328
3329 constexpr uint32_t maxDeUint32Value = std::numeric_limits<uint32_t>::max();
3330
3331 submitCommandsAndWait(vk, vkDevice, queue, m_cmdBuffer.get());
3332
3333 // Get results from buffers.
3334 invalidateAlloc(vk, vkDevice, *m_resultBufferMemory32Bits);
3335 invalidateAlloc(vk, vkDevice, *m_resultBufferMemory64Bits);
3336 deMemcpy(&tsBuffer32Bits, m_resultBufferMemory32Bits->getHostPtr(), sizeof(tsBuffer32Bits));
3337 deMemcpy(&tsBuffer64Bits, m_resultBufferMemory64Bits->getHostPtr(), sizeof(tsBuffer64Bits));
3338
3339 // Get results with vkGetQueryPoolResults().
3340 VK_CHECK(vk.getQueryPoolResults(vkDevice, *m_queryPool, 0u, 1u, sizeof(tsGet32Bits), &tsGet32Bits,
3341 sizeof(tsGet32Bits), VK_QUERY_RESULT_WAIT_BIT));
3342 VK_CHECK(vk.getQueryPoolResults(vkDevice, *m_queryPool, 0u, 1u, sizeof(tsGet64Bits), &tsGet64Bits,
3343 sizeof(tsGet64Bits), (VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT)));
3344
3345 // Check timestamp mask for both 64-bit results.
3346 checkTimestampBits(tsBuffer64Bits, m_timestampMask);
3347 checkTimestampBits(tsGet64Bits, m_timestampMask);
3348
3349 // Check results are consistent.
3350 if (tsBuffer32Bits == tsGet32Bits && tsBuffer64Bits == tsGet64Bits &&
3351 (((tsGet64Bits & maxDeUint32Value) == tsGet32Bits) ||
3352 ((tsGet64Bits > maxDeUint32Value) && (maxDeUint32Value == tsGet32Bits))))
3353 {
3354 return tcu::TestStatus::pass("Pass");
3355 }
3356
3357 std::ostringstream msg;
3358 msg << std::hex << "Results are inconsistent:"
3359 << " B32=0x" << tsBuffer32Bits << " B64=0x" << tsBuffer64Bits << " G32=0x" << tsGet32Bits << " G64=0x"
3360 << tsGet64Bits;
3361 return tcu::TestStatus::fail(msg.str());
3362 }
3363
3364 } // namespace
3365
createTimestampTests(tcu::TestContext & testCtx,PipelineConstructionType pipelineConstructionType)3366 tcu::TestCaseGroup *createTimestampTests(tcu::TestContext &testCtx, PipelineConstructionType pipelineConstructionType)
3367 {
3368 de::MovePtr<tcu::TestCaseGroup> timestampTests(new tcu::TestCaseGroup(testCtx, "timestamp"));
3369 const VkQueryResultFlags queryResultFlagsTimestampTest[] = {
3370 VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT,
3371 VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT | VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
3372 };
3373
3374 // Basic Graphics Tests
3375 {
3376 de::MovePtr<tcu::TestCaseGroup> basicGraphicsTests(new tcu::TestCaseGroup(testCtx, "basic_graphics_tests"));
3377
3378 const VkPipelineStageFlagBits basicGraphicsStages0[][2] = {
3379 {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT},
3380 {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT},
3381 {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT},
3382 {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT},
3383 {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT},
3384 {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT},
3385 {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT},
3386 {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT},
3387 };
3388 for (uint32_t stageNdx = 0u; stageNdx < DE_LENGTH_OF_ARRAY(basicGraphicsStages0); stageNdx++)
3389 {
3390 for (uint32_t flagsIdx = 0u; flagsIdx < DE_LENGTH_OF_ARRAY(queryResultFlagsTimestampTest); flagsIdx++)
3391 {
3392 TimestampTestParam param(pipelineConstructionType, basicGraphicsStages0[stageNdx], 2u, true, false,
3393 false, queryResultFlagsTimestampTest[flagsIdx]);
3394 basicGraphicsTests->addChild(newTestCase<BasicGraphicsTest>(testCtx, ¶m));
3395 param.toggleInRenderPass();
3396 basicGraphicsTests->addChild(newTestCase<BasicGraphicsTest>(testCtx, ¶m));
3397 // Host Query reset tests
3398 param.toggleHostQueryReset();
3399 basicGraphicsTests->addChild(newTestCase<BasicGraphicsTest>(testCtx, ¶m));
3400 param.toggleInRenderPass();
3401 basicGraphicsTests->addChild(newTestCase<BasicGraphicsTest>(testCtx, ¶m));
3402 }
3403 }
3404
3405 const VkPipelineStageFlagBits basicGraphicsStages1[][3] = {
3406 {VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
3407 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT},
3408 {VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
3409 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT},
3410 };
3411 for (uint32_t stageNdx = 0u; stageNdx < DE_LENGTH_OF_ARRAY(basicGraphicsStages1); stageNdx++)
3412 {
3413 for (uint32_t flagsIdx = 0u; flagsIdx < DE_LENGTH_OF_ARRAY(queryResultFlagsTimestampTest); flagsIdx++)
3414 {
3415 TimestampTestParam param(pipelineConstructionType, basicGraphicsStages1[stageNdx], 3u, true, false,
3416 false, queryResultFlagsTimestampTest[flagsIdx]);
3417 basicGraphicsTests->addChild(newTestCase<BasicGraphicsTest>(testCtx, ¶m));
3418 param.toggleInRenderPass();
3419 basicGraphicsTests->addChild(newTestCase<BasicGraphicsTest>(testCtx, ¶m));
3420 // Host Query reset tests
3421 param.toggleHostQueryReset();
3422 basicGraphicsTests->addChild(newTestCase<BasicGraphicsTest>(testCtx, ¶m));
3423 param.toggleInRenderPass();
3424 basicGraphicsTests->addChild(newTestCase<BasicGraphicsTest>(testCtx, ¶m));
3425 }
3426 }
3427
3428 timestampTests->addChild(basicGraphicsTests.release());
3429 }
3430
3431 // Advanced Graphics Tests
3432 {
3433 // Record timestamp in different pipeline stages of advanced graphics tests
3434 de::MovePtr<tcu::TestCaseGroup> advGraphicsTests(new tcu::TestCaseGroup(testCtx, "advanced_graphics_tests"));
3435
3436 const VkPipelineStageFlagBits advGraphicsStages[][2] = {
3437 {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT},
3438 {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT},
3439 {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT},
3440 {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT},
3441 };
3442 for (uint32_t stageNdx = 0u; stageNdx < DE_LENGTH_OF_ARRAY(advGraphicsStages); stageNdx++)
3443 {
3444 for (uint32_t flagsIdx = 0u; flagsIdx < DE_LENGTH_OF_ARRAY(queryResultFlagsTimestampTest); flagsIdx++)
3445 {
3446 TimestampTestParam param(pipelineConstructionType, advGraphicsStages[stageNdx], 2u, true, false, false,
3447 queryResultFlagsTimestampTest[flagsIdx]);
3448 advGraphicsTests->addChild(newTestCase<AdvGraphicsTest>(testCtx, ¶m));
3449 param.toggleInRenderPass();
3450 advGraphicsTests->addChild(newTestCase<AdvGraphicsTest>(testCtx, ¶m));
3451 // Host Query reset tests
3452 param.toggleHostQueryReset();
3453 advGraphicsTests->addChild(newTestCase<AdvGraphicsTest>(testCtx, ¶m));
3454 param.toggleInRenderPass();
3455 advGraphicsTests->addChild(newTestCase<AdvGraphicsTest>(testCtx, ¶m));
3456 }
3457 }
3458
3459 timestampTests->addChild(advGraphicsTests.release());
3460 }
3461
3462 // Basic Compute Tests - don't repeat those tests for graphics pipeline library
3463 if (pipelineConstructionType == PIPELINE_CONSTRUCTION_TYPE_MONOLITHIC)
3464 {
3465 // Record timestamp for compute stages
3466 de::MovePtr<tcu::TestCaseGroup> basicComputeTests(new tcu::TestCaseGroup(testCtx, "basic_compute_tests"));
3467
3468 const VkPipelineStageFlagBits basicComputeStages[][2] = {
3469 {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT},
3470 {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT},
3471 };
3472 for (uint32_t stageNdx = 0u; stageNdx < DE_LENGTH_OF_ARRAY(basicComputeStages); stageNdx++)
3473 {
3474 for (uint32_t flagsIdx = 0u; flagsIdx < DE_LENGTH_OF_ARRAY(queryResultFlagsTimestampTest); flagsIdx++)
3475 {
3476 TimestampTestParam param(pipelineConstructionType, basicComputeStages[stageNdx], 2u, false, false,
3477 false, queryResultFlagsTimestampTest[flagsIdx]);
3478 basicComputeTests->addChild(newTestCase<BasicComputeTest>(testCtx, ¶m));
3479 // Host Query reset test
3480 param.toggleHostQueryReset();
3481 basicComputeTests->addChild(newTestCase<BasicComputeTest>(testCtx, ¶m));
3482 }
3483 }
3484
3485 timestampTests->addChild(basicComputeTests.release());
3486 }
3487
3488 // Transfer Tests - don't repeat those tests for graphics pipeline library
3489 if (pipelineConstructionType == PIPELINE_CONSTRUCTION_TYPE_MONOLITHIC)
3490 {
3491 de::MovePtr<tcu::TestCaseGroup> transferTests(new tcu::TestCaseGroup(testCtx, "transfer_tests"));
3492
3493 const VkPipelineStageFlagBits transferStages[][2] = {
3494 {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT},
3495 {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_HOST_BIT},
3496 };
3497
3498 for (uint32_t transferOnlyQueue = 0u; transferOnlyQueue < 2; transferOnlyQueue++)
3499 {
3500 for (uint32_t stageNdx = 0u; stageNdx < DE_LENGTH_OF_ARRAY(transferStages); stageNdx++)
3501 {
3502 for (uint32_t method = 0u; method < TRANSFER_METHOD_LAST; method++)
3503 {
3504 if (transferOnlyQueue)
3505 {
3506 // skip tests that use commands not supported on transfer only queue
3507 if (method == TRANSFER_METHOD_BLIT_IMAGE || method == TRANSFER_METHOD_CLEAR_COLOR_IMAGE ||
3508 method == TRANSFER_METHOD_CLEAR_DEPTH_STENCIL_IMAGE ||
3509 method == TRANSFER_METHOD_RESOLVE_IMAGE ||
3510 method == TRANSFER_METHOD_COPY_QUERY_POOL_RESULTS ||
3511 method == TRANSFER_METHOD_COPY_QUERY_POOL_RESULTS_STRIDE_ZERO)
3512 continue;
3513 }
3514
3515 for (uint32_t flagsIdx = 0u; flagsIdx < DE_LENGTH_OF_ARRAY(queryResultFlagsTimestampTest);
3516 flagsIdx++)
3517 {
3518 TransferTimestampTestParam param(pipelineConstructionType, transferStages[stageNdx], 2u, false,
3519 false, transferOnlyQueue, method,
3520 queryResultFlagsTimestampTest[flagsIdx]);
3521
3522 // execute tests that use cmdResetQueryPool only on default device
3523 if (!transferOnlyQueue)
3524 transferTests->addChild(newTestCase<TransferTest>(testCtx, ¶m));
3525
3526 // Host Query reset test
3527 param.toggleHostQueryReset();
3528 transferTests->addChild(newTestCase<TransferTest>(testCtx, ¶m));
3529 }
3530 }
3531 }
3532 }
3533
3534 timestampTests->addChild(transferTests.release());
3535 }
3536
3537 // Calibrated Timestamp Tests - don't repeat those tests for graphics pipeline library
3538 if (pipelineConstructionType == PIPELINE_CONSTRUCTION_TYPE_MONOLITHIC)
3539 {
3540 de::MovePtr<tcu::TestCaseGroup> calibratedTimestampTests(new tcu::TestCaseGroup(testCtx, "calibrated"));
3541
3542 calibratedTimestampTests->addChild(
3543 new CalibratedTimestampTest<CalibratedTimestampDevDomainTestInstance>(testCtx, "dev_domain_test"));
3544 calibratedTimestampTests->addChild(
3545 new CalibratedTimestampTest<CalibratedTimestampHostDomainTestInstance>(testCtx, "host_domain_test"));
3546 // Test calibration using device and host domains
3547 calibratedTimestampTests->addChild(
3548 new CalibratedTimestampTest<CalibratedTimestampCalibrationTestInstance>(testCtx, "calibration_test"));
3549
3550 timestampTests->addChild(calibratedTimestampTests.release());
3551 }
3552
3553 // Misc Tests - don't repeat those tests for graphics pipeline library
3554 if (pipelineConstructionType == PIPELINE_CONSTRUCTION_TYPE_MONOLITHIC)
3555 {
3556 const VkQueryResultFlags queryResultFlagsMiscTests[] = {
3557 VK_QUERY_RESULT_WAIT_BIT,
3558 VK_QUERY_RESULT_WAIT_BIT | VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
3559 };
3560
3561 const std::string queryResultsFlagsMiscTestsStr[] = {"", "_with_availability_bit"};
3562
3563 de::MovePtr<tcu::TestCaseGroup> miscTests(new tcu::TestCaseGroup(testCtx, "misc_tests"));
3564
3565 for (uint32_t flagsIdx = 0u; flagsIdx < DE_LENGTH_OF_ARRAY(queryResultFlagsMiscTests); flagsIdx++)
3566 {
3567 const VkPipelineStageFlagBits miscStages[] = {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT};
3568 TimestampTestParam param(pipelineConstructionType, miscStages, 1u, false, false, false,
3569 queryResultFlagsTimestampTest[flagsIdx]);
3570 // Only write timestamp command in the commmand buffer
3571 miscTests->addChild(
3572 new TimestampTest(testCtx, "timestamp_only" + queryResultsFlagsMiscTestsStr[flagsIdx], ¶m));
3573
3574 TwoCmdBuffersTestParam twoCmdBuffersParamPrimary(pipelineConstructionType, miscStages, 1u, false, false,
3575 false, VK_COMMAND_BUFFER_LEVEL_PRIMARY,
3576 queryResultFlagsMiscTests[flagsIdx]);
3577 // Issue query in a command buffer and copy it on another primary command buffer
3578 miscTests->addChild(
3579 new TwoCmdBuffersTest(testCtx, "two_cmd_buffers_primary" + queryResultsFlagsMiscTestsStr[flagsIdx],
3580 &twoCmdBuffersParamPrimary));
3581
3582 TwoCmdBuffersTestParam twoCmdBuffersParamSecondary(pipelineConstructionType, miscStages, 1u, false, false,
3583 false, VK_COMMAND_BUFFER_LEVEL_SECONDARY,
3584 queryResultFlagsMiscTests[flagsIdx]);
3585 // Issue query in a secondary command buffer and copy it on a primary command buffer
3586 miscTests->addChild(
3587 new TwoCmdBuffersTest(testCtx, "two_cmd_buffers_secondary" + queryResultsFlagsMiscTestsStr[flagsIdx],
3588 &twoCmdBuffersParamSecondary));
3589 // Misc: Host Query Reset tests
3590 param.toggleHostQueryReset();
3591 // Only write timestamp command in the commmand buffer
3592 miscTests->addChild(new TimestampTest(
3593 testCtx, "timestamp_only_host_query_reset" + queryResultsFlagsMiscTestsStr[flagsIdx], ¶m));
3594 TwoCmdBuffersTestParam twoCmdBuffersParamPrimaryHostQueryReset(
3595 pipelineConstructionType, miscStages, 1u, false, true, false, VK_COMMAND_BUFFER_LEVEL_PRIMARY,
3596 queryResultFlagsMiscTests[flagsIdx]);
3597 // Issue query in a command buffer and copy it on another primary command buffer
3598 miscTests->addChild(new TwoCmdBuffersTest(
3599 testCtx, "two_cmd_buffers_primary_host_query_reset" + queryResultsFlagsMiscTestsStr[flagsIdx],
3600 &twoCmdBuffersParamPrimaryHostQueryReset));
3601
3602 TwoCmdBuffersTestParam twoCmdBuffersParamSecondaryHostQueryReset(
3603 pipelineConstructionType, miscStages, 1u, false, true, false, VK_COMMAND_BUFFER_LEVEL_SECONDARY,
3604 queryResultFlagsMiscTests[flagsIdx]);
3605 // Issue query in a secondary command buffer and copy it on a primary command buffer
3606 miscTests->addChild(new TwoCmdBuffersTest(
3607 testCtx, "two_cmd_buffers_secondary_host_query_reset" + queryResultsFlagsMiscTestsStr[flagsIdx],
3608 &twoCmdBuffersParamSecondaryHostQueryReset));
3609 TwoCmdBuffersTestParam twoCmdBuffersParamSecondaryTransferQueue(
3610 pipelineConstructionType, miscStages, 1u, false, true, true, VK_COMMAND_BUFFER_LEVEL_SECONDARY,
3611 queryResultFlagsMiscTests[flagsIdx]);
3612 // Issue query in a secondary command buffer and copy it on a primary command buffer
3613 miscTests->addChild(new TwoCmdBuffersTest(
3614 testCtx, "two_cmd_buffers_secondary_transfer_queue" + queryResultsFlagsMiscTestsStr[flagsIdx],
3615 &twoCmdBuffersParamSecondaryTransferQueue));
3616 }
3617 // Reset timestamp query before copying results.
3618 miscTests->addChild(new ResetTimestampQueryBeforeCopyTest(testCtx, "reset_query_before_copy"));
3619
3620 // Fill buffer with 0s before copying results.
3621 miscTests->addChild(new FillBufferBeforeCopyTest(testCtx, "fill_buffer_before_copy"));
3622
3623 // Check consistency between 32 and 64 bits.
3624 miscTests->addChild(new ConsistentQueryResultsTest(testCtx, "consistent_results"));
3625
3626 timestampTests->addChild(miscTests.release());
3627 }
3628
3629 return timestampTests.release();
3630 }
3631
3632 } // namespace pipeline
3633
3634 } // namespace vkt
3635