1 // Copyright (C) 2023 The Android Open Source Project
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include <log/log.h>
16
17 #include <atomic>
18 #include <thread>
19
20 #include "GfxstreamEnd2EndTestUtils.h"
21 #include "GfxstreamEnd2EndTests.h"
22 #include "gfxstream/Expected.h"
23 #include "shaders/blit_sampler2d_frag.h"
24 #include "shaders/fullscreen_triangle_with_uv_vert.h"
25
26 namespace gfxstream {
27 namespace tests {
28 namespace {
29
30 using namespace std::chrono_literals;
31 using testing::Eq;
32 using testing::Ge;
33 using testing::IsEmpty;
34 using testing::IsNull;
35 using testing::IsTrue;
36 using testing::Ne;
37 using testing::Not;
38 using testing::NotNull;
39
40 template <typename DurationType>
AsVkTimeout(DurationType duration)41 constexpr uint64_t AsVkTimeout(DurationType duration) {
42 return static_cast<uint64_t>(std::chrono::duration_cast<std::chrono::nanoseconds>(duration).count());
43 }
44
45 class GfxstreamEnd2EndVkTest : public GfxstreamEnd2EndTest {
46 protected:
47 // Gfxstream uses a vkQueueSubmit() to signal the VkFence and VkSemaphore used
48 // in vkAcquireImageANDROID() calls. The guest is not aware of this and may try
49 // to vkDestroyFence() and vkDestroySemaphore() (because the VkImage, VkFence,
50 // and VkSemaphore may have been unused from the guest point of view) while the
51 // host's command buffer is running. Gfxstream needs to ensure that it performs
52 // the necessary tracking to not delete the VkFence and VkSemaphore while they
53 // are in use on the host.
DoAcquireImageAndroidWithSync(bool withFence,bool withSemaphore)54 void DoAcquireImageAndroidWithSync(bool withFence, bool withSemaphore) {
55 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
56 GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
57
58 const uint32_t width = 32;
59 const uint32_t height = 32;
60 auto ahb = GFXSTREAM_ASSERT(ScopedAHardwareBuffer::Allocate(
61 *mGralloc, width, height, GFXSTREAM_AHB_FORMAT_R8G8B8A8_UNORM));
62
63 const VkNativeBufferANDROID imageNativeBufferInfo = {
64 .sType = VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID,
65 .handle = mGralloc->getNativeHandle(ahb),
66 };
67
68 auto vkAcquireImageANDROID =
69 PFN_vkAcquireImageANDROID(device->getProcAddr("vkAcquireImageANDROID"));
70 ASSERT_THAT(vkAcquireImageANDROID, NotNull());
71
72 const vkhpp::ImageCreateInfo imageCreateInfo = {
73 .pNext = &imageNativeBufferInfo,
74 .imageType = vkhpp::ImageType::e2D,
75 .extent.width = width,
76 .extent.height = height,
77 .extent.depth = 1,
78 .mipLevels = 1,
79 .arrayLayers = 1,
80 .format = vkhpp::Format::eR8G8B8A8Unorm,
81 .tiling = vkhpp::ImageTiling::eOptimal,
82 .initialLayout = vkhpp::ImageLayout::eUndefined,
83 .usage = vkhpp::ImageUsageFlagBits::eSampled | vkhpp::ImageUsageFlagBits::eTransferDst |
84 vkhpp::ImageUsageFlagBits::eTransferSrc,
85 .sharingMode = vkhpp::SharingMode::eExclusive,
86 .samples = vkhpp::SampleCountFlagBits::e1,
87 };
88 auto image = device->createImageUnique(imageCreateInfo).value;
89
90 vkhpp::MemoryRequirements imageMemoryRequirements{};
91 device->getImageMemoryRequirements(*image, &imageMemoryRequirements);
92
93 const uint32_t imageMemoryIndex = utils::getMemoryType(
94 physicalDevice, imageMemoryRequirements, vkhpp::MemoryPropertyFlagBits::eDeviceLocal);
95 ASSERT_THAT(imageMemoryIndex, Not(Eq(-1)));
96
97 const vkhpp::MemoryAllocateInfo imageMemoryAllocateInfo = {
98 .allocationSize = imageMemoryRequirements.size,
99 .memoryTypeIndex = imageMemoryIndex,
100 };
101
102 auto imageMemory = device->allocateMemoryUnique(imageMemoryAllocateInfo).value;
103 ASSERT_THAT(imageMemory, IsValidHandle());
104 ASSERT_THAT(device->bindImageMemory(*image, *imageMemory, 0), IsVkSuccess());
105
106 vkhpp::UniqueFence fence;
107 if (withFence) {
108 fence = device->createFenceUnique(vkhpp::FenceCreateInfo()).value;
109 }
110
111 vkhpp::UniqueSemaphore semaphore;
112 if (withSemaphore) {
113 semaphore = device->createSemaphoreUnique(vkhpp::SemaphoreCreateInfo()).value;
114 }
115
116 auto result = vkAcquireImageANDROID(*device, *image, -1, *semaphore, *fence);
117 ASSERT_THAT(result, Eq(VK_SUCCESS));
118
119 if (withFence) {
120 fence.reset();
121 }
122 if (withSemaphore) {
123 semaphore.reset();
124 }
125 }
126
DoCommandsImmediate(TypicalVkTestEnvironment & vk,const std::function<Result<Ok> (vkhpp::UniqueCommandBuffer &)> & func,const std::vector<vkhpp::UniqueSemaphore> & semaphores_wait={},const std::vector<vkhpp::UniqueSemaphore> & semaphores_signal={})127 Result<Ok> DoCommandsImmediate(
128 TypicalVkTestEnvironment& vk,
129 const std::function<Result<Ok>(vkhpp::UniqueCommandBuffer&)>& func,
130 const std::vector<vkhpp::UniqueSemaphore>& semaphores_wait = {},
131 const std::vector<vkhpp::UniqueSemaphore>& semaphores_signal = {}) {
132 const vkhpp::CommandPoolCreateInfo commandPoolCreateInfo = {
133 .queueFamilyIndex = vk.queueFamilyIndex,
134 };
135 auto commandPool =
136 GFXSTREAM_EXPECT_VKHPP_RV(vk.device->createCommandPoolUnique(commandPoolCreateInfo));
137
138 const vkhpp::CommandBufferAllocateInfo commandBufferAllocateInfo = {
139 .commandPool = *commandPool,
140 .level = vkhpp::CommandBufferLevel::ePrimary,
141 .commandBufferCount = 1,
142 };
143 auto commandBuffers = GFXSTREAM_EXPECT_VKHPP_RV(
144 vk.device->allocateCommandBuffersUnique(commandBufferAllocateInfo));
145 auto commandBuffer = std::move(commandBuffers[0]);
146
147 const vkhpp::CommandBufferBeginInfo commandBufferBeginInfo = {
148 .flags = vkhpp::CommandBufferUsageFlagBits::eOneTimeSubmit,
149 };
150 commandBuffer->begin(commandBufferBeginInfo);
151 GFXSTREAM_EXPECT(func(commandBuffer));
152 commandBuffer->end();
153
154 std::vector<vkhpp::CommandBuffer> commandBufferHandles;
155 commandBufferHandles.push_back(*commandBuffer);
156
157 std::vector<vkhpp::Semaphore> semaphoreHandlesWait;
158 semaphoreHandlesWait.reserve(semaphores_wait.size());
159 for (const auto& s : semaphores_wait) {
160 semaphoreHandlesWait.emplace_back(*s);
161 }
162
163 std::vector<vkhpp::Semaphore> semaphoreHandlesSignal;
164 semaphoreHandlesSignal.reserve(semaphores_signal.size());
165 for (const auto& s : semaphores_signal) {
166 semaphoreHandlesSignal.emplace_back(*s);
167 }
168
169 vkhpp::SubmitInfo submitInfo = {
170 .commandBufferCount = static_cast<uint32_t>(commandBufferHandles.size()),
171 .pCommandBuffers = commandBufferHandles.data(),
172 };
173 if (!semaphoreHandlesWait.empty()) {
174 submitInfo.waitSemaphoreCount = static_cast<uint32_t>(semaphoreHandlesWait.size());
175 submitInfo.pWaitSemaphores = semaphoreHandlesWait.data();
176 }
177 if (!semaphoreHandlesSignal.empty()) {
178 submitInfo.signalSemaphoreCount = static_cast<uint32_t>(semaphoreHandlesSignal.size());
179 submitInfo.pSignalSemaphores = semaphoreHandlesSignal.data();
180 }
181 vk.queue.submit(submitInfo);
182 vk.queue.waitIdle();
183 return Ok{};
184 }
185
186 struct BufferWithMemory {
187 vkhpp::UniqueBuffer buffer;
188 vkhpp::UniqueDeviceMemory bufferMemory;
189 };
CreateBuffer(TypicalVkTestEnvironment & vk,vkhpp::DeviceSize bufferSize,vkhpp::BufferUsageFlags bufferUsages,vkhpp::MemoryPropertyFlags bufferMemoryProperties,const uint8_t * data=nullptr,vkhpp::DeviceSize dataSize=0)190 Result<BufferWithMemory> CreateBuffer(TypicalVkTestEnvironment& vk,
191 vkhpp::DeviceSize bufferSize,
192 vkhpp::BufferUsageFlags bufferUsages,
193 vkhpp::MemoryPropertyFlags bufferMemoryProperties,
194 const uint8_t* data = nullptr,
195 vkhpp::DeviceSize dataSize = 0) {
196 const vkhpp::BufferCreateInfo bufferCreateInfo = {
197 .size = static_cast<VkDeviceSize>(bufferSize),
198 .usage = bufferUsages,
199 .sharingMode = vkhpp::SharingMode::eExclusive,
200 };
201 auto buffer = GFXSTREAM_EXPECT_VKHPP_RV(vk.device->createBufferUnique(bufferCreateInfo));
202
203 vkhpp::MemoryRequirements bufferMemoryRequirements{};
204 vk.device->getBufferMemoryRequirements(*buffer, &bufferMemoryRequirements);
205
206 const auto bufferMemoryTypeIndex = utils::getMemoryType(
207 vk.physicalDevice, bufferMemoryRequirements, bufferMemoryProperties);
208
209 const vkhpp::MemoryAllocateInfo bufferMemoryAllocateInfo = {
210 .allocationSize = bufferMemoryRequirements.size,
211 .memoryTypeIndex = bufferMemoryTypeIndex,
212 };
213 auto bufferMemory =
214 GFXSTREAM_EXPECT_VKHPP_RV(vk.device->allocateMemoryUnique(bufferMemoryAllocateInfo));
215
216 GFXSTREAM_EXPECT_VKHPP_RESULT(vk.device->bindBufferMemory(*buffer, *bufferMemory, 0));
217
218 if (data != nullptr) {
219 if (!(bufferUsages & vkhpp::BufferUsageFlagBits::eTransferDst)) {
220 return gfxstream::unexpected(
221 "Must request transfer dst usage when creating buffer with data");
222 }
223 if (!(bufferMemoryProperties & vkhpp::MemoryPropertyFlagBits::eHostVisible)) {
224 return gfxstream::unexpected(
225 "Must request host visible mem property when creating buffer with data");
226 }
227
228 void* mapped =
229 GFXSTREAM_EXPECT_VKHPP_RV(vk.device->mapMemory(*bufferMemory, 0, bufferSize));
230
231 std::memcpy(mapped, data, dataSize);
232
233 if (!(bufferMemoryProperties & vkhpp::MemoryPropertyFlagBits::eHostVisible)) {
234 vk.device->flushMappedMemoryRanges(vkhpp::MappedMemoryRange{
235 .memory = *bufferMemory,
236 .offset = 0,
237 .size = VK_WHOLE_SIZE,
238 });
239 }
240
241 vk.device->unmapMemory(*bufferMemory);
242 }
243
244 return BufferWithMemory{
245 .buffer = std::move(buffer),
246 .bufferMemory = std::move(bufferMemory),
247 };
248 }
249
250 struct ImageWithMemory {
251 std::optional<vkhpp::UniqueSamplerYcbcrConversion> imageSamplerConversion;
252 vkhpp::UniqueSampler imageSampler;
253 vkhpp::UniqueDeviceMemory imageMemory;
254 vkhpp::UniqueImage image;
255 vkhpp::UniqueImageView imageView;
256 };
CreateImageWithAhb(TypicalVkTestEnvironment & vk,const ScopedAHardwareBuffer & ahb,const vkhpp::ImageUsageFlags usages,const vkhpp::ImageLayout layout)257 Result<ImageWithMemory> CreateImageWithAhb(TypicalVkTestEnvironment& vk,
258 const ScopedAHardwareBuffer& ahb,
259 const vkhpp::ImageUsageFlags usages,
260 const vkhpp::ImageLayout layout) {
261 const auto ahbHandle = mGralloc->getNativeHandle(ahb);
262 if (ahbHandle == nullptr) {
263 return gfxstream::unexpected("Failed to query native handle.");
264 }
265 const auto ahbFormat = mGralloc->getFormat(ahb);
266 const bool ahbIsYuv = ahbFormat == GFXSTREAM_AHB_FORMAT_YV12 ||
267 ahbFormat == GFXSTREAM_AHB_FORMAT_Y8Cb8Cr8_420;
268
269 auto vkGetAndroidHardwareBufferPropertiesANDROID =
270 reinterpret_cast<PFN_vkGetAndroidHardwareBufferPropertiesANDROID>(
271 vk.device->getProcAddr("vkGetAndroidHardwareBufferPropertiesANDROID"));
272 if (vkGetAndroidHardwareBufferPropertiesANDROID == nullptr) {
273 return gfxstream::unexpected(
274 "Failed to query vkGetAndroidHardwareBufferPropertiesANDROID().");
275 }
276 VkAndroidHardwareBufferFormatPropertiesANDROID ahbFormatProperties = {
277 .sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID,
278 .pNext = nullptr,
279 };
280 VkAndroidHardwareBufferPropertiesANDROID ahbProperties = {
281 .sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID,
282 .pNext = &ahbFormatProperties,
283 };
284 if (vkGetAndroidHardwareBufferPropertiesANDROID(*vk.device, ahb, &ahbProperties) !=
285 VK_SUCCESS) {
286 return gfxstream::unexpected("Failed to query ahb properties.");
287 }
288
289 const VkExternalFormatANDROID externalFormat = {
290 .sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID,
291 .externalFormat = ahbFormatProperties.externalFormat,
292 };
293
294 std::optional<vkhpp::UniqueSamplerYcbcrConversion> imageSamplerConversion;
295 std::optional<vkhpp::SamplerYcbcrConversionInfo> samplerConversionInfo;
296 if (ahbIsYuv) {
297 const vkhpp::SamplerYcbcrConversionCreateInfo conversionCreateInfo = {
298 .pNext = &externalFormat,
299 .format = static_cast<vkhpp::Format>(ahbFormatProperties.format),
300 .ycbcrModel = static_cast<vkhpp::SamplerYcbcrModelConversion>(
301 ahbFormatProperties.suggestedYcbcrModel),
302 .ycbcrRange =
303 static_cast<vkhpp::SamplerYcbcrRange>(ahbFormatProperties.suggestedYcbcrRange),
304 .components =
305 {
306 .r = static_cast<vkhpp::ComponentSwizzle>(
307 ahbFormatProperties.samplerYcbcrConversionComponents.r),
308 .g = static_cast<vkhpp::ComponentSwizzle>(
309 ahbFormatProperties.samplerYcbcrConversionComponents.g),
310 .b = static_cast<vkhpp::ComponentSwizzle>(
311 ahbFormatProperties.samplerYcbcrConversionComponents.b),
312 .a = static_cast<vkhpp::ComponentSwizzle>(
313 ahbFormatProperties.samplerYcbcrConversionComponents.a),
314 },
315 .xChromaOffset =
316 static_cast<vkhpp::ChromaLocation>(ahbFormatProperties.suggestedXChromaOffset),
317 .yChromaOffset =
318 static_cast<vkhpp::ChromaLocation>(ahbFormatProperties.suggestedYChromaOffset),
319 .chromaFilter = vkhpp::Filter::eNearest,
320 .forceExplicitReconstruction = VK_FALSE,
321 };
322 imageSamplerConversion = GFXSTREAM_EXPECT_VKHPP_RV(
323 vk.device->createSamplerYcbcrConversionUnique(conversionCreateInfo));
324
325 samplerConversionInfo = vkhpp::SamplerYcbcrConversionInfo{
326 .conversion = **imageSamplerConversion,
327 };
328 }
329 const vkhpp::SamplerCreateInfo samplerCreateInfo = {
330 .pNext = ahbIsYuv ? &samplerConversionInfo : nullptr,
331 .magFilter = vkhpp::Filter::eNearest,
332 .minFilter = vkhpp::Filter::eNearest,
333 .mipmapMode = vkhpp::SamplerMipmapMode::eNearest,
334 .addressModeU = vkhpp::SamplerAddressMode::eClampToEdge,
335 .addressModeV = vkhpp::SamplerAddressMode::eClampToEdge,
336 .addressModeW = vkhpp::SamplerAddressMode::eClampToEdge,
337 .mipLodBias = 0.0f,
338 .anisotropyEnable = VK_FALSE,
339 .maxAnisotropy = 1.0f,
340 .compareEnable = VK_FALSE,
341 .compareOp = vkhpp::CompareOp::eLessOrEqual,
342 .minLod = 0.0f,
343 .maxLod = 0.0f,
344 .borderColor = vkhpp::BorderColor::eIntTransparentBlack,
345 .unnormalizedCoordinates = VK_FALSE,
346 };
347 auto imageSampler =
348 GFXSTREAM_EXPECT_VKHPP_RV(vk.device->createSamplerUnique(samplerCreateInfo));
349
350 const VkExternalMemoryImageCreateInfo externalMemoryImageCreateInfo = {
351 .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
352 .pNext = &externalFormat,
353 .handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,
354 };
355 const vkhpp::ImageCreateInfo imageCreateInfo = {
356 .pNext = &externalMemoryImageCreateInfo,
357 .imageType = vkhpp::ImageType::e2D,
358 .format = static_cast<vkhpp::Format>(ahbFormatProperties.format),
359 .extent =
360 {
361 .width = mGralloc->getWidth(ahb),
362 .height = mGralloc->getHeight(ahb),
363 .depth = 1,
364 },
365 .mipLevels = 1,
366 .arrayLayers = 1,
367 .samples = vkhpp::SampleCountFlagBits::e1,
368 .tiling = vkhpp::ImageTiling::eOptimal,
369 .usage = usages,
370 .sharingMode = vkhpp::SharingMode::eExclusive,
371 .initialLayout = vkhpp::ImageLayout::eUndefined,
372 };
373 auto image = GFXSTREAM_EXPECT_VKHPP_RV(vk.device->createImageUnique(imageCreateInfo));
374
375 const vkhpp::MemoryRequirements imageMemoryRequirements = {
376 .size = ahbProperties.allocationSize,
377 .alignment = 0,
378 .memoryTypeBits = ahbProperties.memoryTypeBits,
379 };
380 const uint32_t imageMemoryIndex =
381 utils::getMemoryType(vk.physicalDevice, imageMemoryRequirements,
382 vkhpp::MemoryPropertyFlagBits::eDeviceLocal);
383
384 const vkhpp::ImportAndroidHardwareBufferInfoANDROID importAhbInfo = {
385 .buffer = ahb,
386 };
387 const vkhpp::MemoryDedicatedAllocateInfo importMemoryDedicatedInfo = {
388 .pNext = &importAhbInfo,
389 .image = *image,
390 };
391 const vkhpp::MemoryAllocateInfo imageMemoryAllocateInfo = {
392 .pNext = &importMemoryDedicatedInfo,
393 .allocationSize = imageMemoryRequirements.size,
394 .memoryTypeIndex = imageMemoryIndex,
395 };
396 auto imageMemory =
397 GFXSTREAM_EXPECT_VKHPP_RV(vk.device->allocateMemoryUnique(imageMemoryAllocateInfo));
398 vk.device->bindImageMemory(*image, *imageMemory, 0);
399
400 const vkhpp::ImageViewCreateInfo imageViewCreateInfo = {
401 .pNext = &samplerConversionInfo,
402 .image = *image,
403 .viewType = vkhpp::ImageViewType::e2D,
404 .format = static_cast<vkhpp::Format>(ahbFormatProperties.format),
405 .components =
406 {
407 .r = vkhpp::ComponentSwizzle::eIdentity,
408 .g = vkhpp::ComponentSwizzle::eIdentity,
409 .b = vkhpp::ComponentSwizzle::eIdentity,
410 .a = vkhpp::ComponentSwizzle::eIdentity,
411 },
412 .subresourceRange =
413 {
414 .aspectMask = vkhpp::ImageAspectFlagBits::eColor,
415 .baseMipLevel = 0,
416 .levelCount = 1,
417 .baseArrayLayer = 0,
418 .layerCount = 1,
419 },
420 };
421 auto imageView =
422 GFXSTREAM_EXPECT_VKHPP_RV(vk.device->createImageViewUnique(imageViewCreateInfo));
423
424 GFXSTREAM_EXPECT(DoCommandsImmediate(vk, [&](vkhpp::UniqueCommandBuffer& cmd) {
425 const std::vector<vkhpp::ImageMemoryBarrier> imageMemoryBarriers = {
426 vkhpp::ImageMemoryBarrier{
427 .srcAccessMask = {},
428 .dstAccessMask = vkhpp::AccessFlagBits::eTransferWrite,
429 .oldLayout = vkhpp::ImageLayout::eUndefined,
430 .newLayout = layout,
431 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
432 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
433 .image = *image,
434 .subresourceRange =
435 {
436 .aspectMask = vkhpp::ImageAspectFlagBits::eColor,
437 .baseMipLevel = 0,
438 .levelCount = 1,
439 .baseArrayLayer = 0,
440 .layerCount = 1,
441 },
442
443 },
444 };
445 cmd->pipelineBarrier(
446 /*srcStageMask=*/vkhpp::PipelineStageFlagBits::eAllCommands,
447 /*dstStageMask=*/vkhpp::PipelineStageFlagBits::eAllCommands,
448 /*dependencyFlags=*/{},
449 /*memoryBarriers=*/{},
450 /*bufferMemoryBarriers=*/{},
451 /*imageMemoryBarriers=*/imageMemoryBarriers);
452 return Ok{};
453 }));
454
455 return ImageWithMemory{
456 .imageSamplerConversion = std::move(imageSamplerConversion),
457 .imageSampler = std::move(imageSampler),
458 .imageMemory = std::move(imageMemory),
459 .image = std::move(image),
460 .imageView = std::move(imageView),
461 };
462 }
463
CreateImage(TypicalVkTestEnvironment & vk,uint32_t width,uint32_t height,vkhpp::Format format,vkhpp::ImageUsageFlags usages,vkhpp::MemoryPropertyFlags memoryProperties,vkhpp::ImageLayout returnedLayout)464 Result<ImageWithMemory> CreateImage(TypicalVkTestEnvironment& vk, uint32_t width,
465 uint32_t height, vkhpp::Format format,
466 vkhpp::ImageUsageFlags usages,
467 vkhpp::MemoryPropertyFlags memoryProperties,
468 vkhpp::ImageLayout returnedLayout) {
469 const vkhpp::ImageCreateInfo imageCreateInfo = {
470 .imageType = vkhpp::ImageType::e2D,
471 .format = format,
472 .extent =
473 {
474 .width = width,
475 .height = height,
476 .depth = 1,
477 },
478 .mipLevels = 1,
479 .arrayLayers = 1,
480 .samples = vkhpp::SampleCountFlagBits::e1,
481 .tiling = vkhpp::ImageTiling::eOptimal,
482 .usage = usages,
483 .sharingMode = vkhpp::SharingMode::eExclusive,
484 .initialLayout = vkhpp::ImageLayout::eUndefined,
485 };
486 auto image = GFXSTREAM_EXPECT_VKHPP_RV(vk.device->createImageUnique(imageCreateInfo));
487
488 const auto memoryRequirements = vk.device->getImageMemoryRequirements(*image);
489 const uint32_t memoryIndex =
490 utils::getMemoryType(vk.physicalDevice, memoryRequirements, memoryProperties);
491
492 const vkhpp::MemoryAllocateInfo imageMemoryAllocateInfo = {
493 .allocationSize = memoryRequirements.size,
494 .memoryTypeIndex = memoryIndex,
495 };
496 auto imageMemory =
497 GFXSTREAM_EXPECT_VKHPP_RV(vk.device->allocateMemoryUnique(imageMemoryAllocateInfo));
498
499 vk.device->bindImageMemory(*image, *imageMemory, 0);
500
501 const vkhpp::ImageViewCreateInfo imageViewCreateInfo = {
502 .image = *image,
503 .viewType = vkhpp::ImageViewType::e2D,
504 .format = format,
505 .components =
506 {
507 .r = vkhpp::ComponentSwizzle::eIdentity,
508 .g = vkhpp::ComponentSwizzle::eIdentity,
509 .b = vkhpp::ComponentSwizzle::eIdentity,
510 .a = vkhpp::ComponentSwizzle::eIdentity,
511 },
512 .subresourceRange =
513 {
514 .aspectMask = vkhpp::ImageAspectFlagBits::eColor,
515 .baseMipLevel = 0,
516 .levelCount = 1,
517 .baseArrayLayer = 0,
518 .layerCount = 1,
519 },
520 };
521 auto imageView =
522 GFXSTREAM_EXPECT_VKHPP_RV(vk.device->createImageViewUnique(imageViewCreateInfo));
523
524 GFXSTREAM_EXPECT(DoCommandsImmediate(vk, [&](vkhpp::UniqueCommandBuffer& cmd) {
525 const std::vector<vkhpp::ImageMemoryBarrier> imageMemoryBarriers = {
526 vkhpp::ImageMemoryBarrier{
527 .srcAccessMask = {},
528 .dstAccessMask = vkhpp::AccessFlagBits::eTransferWrite,
529 .oldLayout = vkhpp::ImageLayout::eUndefined,
530 .newLayout = returnedLayout,
531 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
532 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
533 .image = *image,
534 .subresourceRange =
535 {
536 .aspectMask = vkhpp::ImageAspectFlagBits::eColor,
537 .baseMipLevel = 0,
538 .levelCount = 1,
539 .baseArrayLayer = 0,
540 .layerCount = 1,
541 },
542 },
543 };
544 cmd->pipelineBarrier(
545 /*srcStageMask=*/vkhpp::PipelineStageFlagBits::eAllCommands,
546 /*dstStageMask=*/vkhpp::PipelineStageFlagBits::eAllCommands,
547 /*dependencyFlags=*/{},
548 /*memoryBarriers=*/{},
549 /*bufferMemoryBarriers=*/{},
550 /*imageMemoryBarriers=*/imageMemoryBarriers);
551
552 return Ok{};
553 }));
554
555 return ImageWithMemory{
556 .image = std::move(image),
557 .imageMemory = std::move(imageMemory),
558 .imageView = std::move(imageView),
559 };
560 }
561
562 struct FramebufferWithAttachments {
563 std::optional<ImageWithMemory> colorAttachment;
564 std::optional<ImageWithMemory> depthAttachment;
565 vkhpp::UniqueRenderPass renderpass;
566 vkhpp::UniqueFramebuffer framebuffer;
567 };
CreateFramebuffer(TypicalVkTestEnvironment & vk,uint32_t width,uint32_t height,vkhpp::Format colorAttachmentFormat=vkhpp::Format::eUndefined,vkhpp::Format depthAttachmentFormat=vkhpp::Format::eUndefined)568 Result<FramebufferWithAttachments> CreateFramebuffer(
569 TypicalVkTestEnvironment& vk, uint32_t width, uint32_t height,
570 vkhpp::Format colorAttachmentFormat = vkhpp::Format::eUndefined,
571 vkhpp::Format depthAttachmentFormat = vkhpp::Format::eUndefined) {
572 std::optional<ImageWithMemory> colorAttachment;
573 if (colorAttachmentFormat != vkhpp::Format::eUndefined) {
574 colorAttachment =
575 GFXSTREAM_EXPECT(CreateImage(vk, width, height, colorAttachmentFormat,
576 vkhpp::ImageUsageFlagBits::eColorAttachment |
577 vkhpp::ImageUsageFlagBits::eTransferSrc,
578 vkhpp::MemoryPropertyFlagBits::eDeviceLocal,
579 vkhpp::ImageLayout::eColorAttachmentOptimal));
580 }
581
582 std::optional<ImageWithMemory> depthAttachment;
583 if (depthAttachmentFormat != vkhpp::Format::eUndefined) {
584 depthAttachment =
585 GFXSTREAM_EXPECT(CreateImage(vk, width, height, depthAttachmentFormat,
586 vkhpp::ImageUsageFlagBits::eDepthStencilAttachment |
587 vkhpp::ImageUsageFlagBits::eTransferSrc,
588 vkhpp::MemoryPropertyFlagBits::eDeviceLocal,
589 vkhpp::ImageLayout::eDepthStencilAttachmentOptimal));
590 }
591
592 std::vector<vkhpp::AttachmentDescription> attachments;
593
594 std::optional<vkhpp::AttachmentReference> colorAttachmentReference;
595 if (colorAttachmentFormat != vkhpp::Format::eUndefined) {
596 attachments.push_back(vkhpp::AttachmentDescription{
597 .format = colorAttachmentFormat,
598 .samples = vkhpp::SampleCountFlagBits::e1,
599 .loadOp = vkhpp::AttachmentLoadOp::eClear,
600 .storeOp = vkhpp::AttachmentStoreOp::eStore,
601 .stencilLoadOp = vkhpp::AttachmentLoadOp::eClear,
602 .stencilStoreOp = vkhpp::AttachmentStoreOp::eStore,
603 .initialLayout = vkhpp::ImageLayout::eColorAttachmentOptimal,
604 .finalLayout = vkhpp::ImageLayout::eColorAttachmentOptimal,
605 });
606
607 colorAttachmentReference = vkhpp::AttachmentReference{
608 .attachment = static_cast<uint32_t>(attachments.size() - 1),
609 .layout = vkhpp::ImageLayout::eColorAttachmentOptimal,
610 };
611 }
612
613 std::optional<vkhpp::AttachmentReference> depthAttachmentReference;
614 if (depthAttachmentFormat != vkhpp::Format::eUndefined) {
615 attachments.push_back(vkhpp::AttachmentDescription{
616 .format = depthAttachmentFormat,
617 .samples = vkhpp::SampleCountFlagBits::e1,
618 .loadOp = vkhpp::AttachmentLoadOp::eClear,
619 .storeOp = vkhpp::AttachmentStoreOp::eStore,
620 .stencilLoadOp = vkhpp::AttachmentLoadOp::eClear,
621 .stencilStoreOp = vkhpp::AttachmentStoreOp::eStore,
622 .initialLayout = vkhpp::ImageLayout::eColorAttachmentOptimal,
623 .finalLayout = vkhpp::ImageLayout::eColorAttachmentOptimal,
624 });
625
626 depthAttachmentReference = vkhpp::AttachmentReference{
627 .attachment = static_cast<uint32_t>(attachments.size() - 1),
628 .layout = vkhpp::ImageLayout::eDepthStencilAttachmentOptimal,
629 };
630 }
631
632 vkhpp::SubpassDependency dependency = {
633 .srcSubpass = 0,
634 .dstSubpass = 0,
635 .srcStageMask = {},
636 .dstStageMask = vkhpp::PipelineStageFlagBits::eFragmentShader,
637 .srcAccessMask = {},
638 .dstAccessMask = vkhpp::AccessFlagBits::eInputAttachmentRead,
639 .dependencyFlags = vkhpp::DependencyFlagBits::eByRegion,
640 };
641 if (colorAttachmentFormat != vkhpp::Format::eUndefined) {
642 dependency.srcStageMask |= vkhpp::PipelineStageFlagBits::eColorAttachmentOutput;
643 dependency.dstStageMask |= vkhpp::PipelineStageFlagBits::eColorAttachmentOutput;
644 dependency.srcAccessMask |= vkhpp::AccessFlagBits::eColorAttachmentWrite;
645 }
646 if (depthAttachmentFormat != vkhpp::Format::eUndefined) {
647 dependency.srcStageMask |= vkhpp::PipelineStageFlagBits::eColorAttachmentOutput;
648 dependency.dstStageMask |= vkhpp::PipelineStageFlagBits::eColorAttachmentOutput;
649 dependency.srcAccessMask |= vkhpp::AccessFlagBits::eColorAttachmentWrite;
650 }
651
652 vkhpp::SubpassDescription subpass = {
653 .pipelineBindPoint = vkhpp::PipelineBindPoint::eGraphics,
654 .inputAttachmentCount = 0,
655 .pInputAttachments = nullptr,
656 .colorAttachmentCount = 0,
657 .pColorAttachments = nullptr,
658 .pResolveAttachments = nullptr,
659 .pDepthStencilAttachment = nullptr,
660 .pPreserveAttachments = nullptr,
661 };
662 if (colorAttachmentFormat != vkhpp::Format::eUndefined) {
663 subpass.colorAttachmentCount = 1;
664 subpass.pColorAttachments = &*colorAttachmentReference;
665 }
666 if (depthAttachmentFormat != vkhpp::Format::eUndefined) {
667 subpass.pDepthStencilAttachment = &*depthAttachmentReference;
668 }
669
670 const vkhpp::RenderPassCreateInfo renderpassCreateInfo = {
671 .attachmentCount = static_cast<uint32_t>(attachments.size()),
672 .pAttachments = attachments.data(),
673 .subpassCount = 1,
674 .pSubpasses = &subpass,
675 .dependencyCount = 1,
676 .pDependencies = &dependency,
677 };
678 auto renderpass =
679 GFXSTREAM_EXPECT_VKHPP_RV(vk.device->createRenderPassUnique(renderpassCreateInfo));
680
681 std::vector<vkhpp::ImageView> framebufferAttachments;
682 if (colorAttachment) {
683 framebufferAttachments.push_back(*colorAttachment->imageView);
684 }
685 if (depthAttachment) {
686 framebufferAttachments.push_back(*depthAttachment->imageView);
687 }
688 const vkhpp::FramebufferCreateInfo framebufferCreateInfo = {
689 .renderPass = *renderpass,
690 .attachmentCount = static_cast<uint32_t>(framebufferAttachments.size()),
691 .pAttachments = framebufferAttachments.data(),
692 .width = width,
693 .height = height,
694 .layers = 1,
695 };
696 auto framebuffer =
697 GFXSTREAM_EXPECT_VKHPP_RV(vk.device->createFramebufferUnique(framebufferCreateInfo));
698
699 return FramebufferWithAttachments{
700 .colorAttachment = std::move(colorAttachment),
701 .depthAttachment = std::move(depthAttachment),
702 .renderpass = std::move(renderpass),
703 .framebuffer = std::move(framebuffer),
704 };
705 }
706
707 struct DescriptorContents {
708 uint32_t binding = 0;
709 struct Image {
710 vkhpp::ImageView imageView;
711 vkhpp::ImageLayout imageLayout;
712 vkhpp::Sampler imageSampler;
713 };
714 std::optional<Image> image;
715 };
716 struct DescriptorSetBundle {
717 vkhpp::UniqueDescriptorPool pool;
718 vkhpp::UniqueDescriptorSetLayout layout;
719 vkhpp::UniqueDescriptorSet ds;
720 };
CreateDescriptorSet(TypicalVkTestEnvironment & vk,const std::vector<vkhpp::DescriptorSetLayoutBinding> & bindings,const std::vector<DescriptorContents> contents)721 Result<DescriptorSetBundle> CreateDescriptorSet(
722 TypicalVkTestEnvironment& vk,
723 const std::vector<vkhpp::DescriptorSetLayoutBinding>& bindings,
724 const std::vector<DescriptorContents> contents) {
725 std::unordered_map<vkhpp::DescriptorType, uint32_t> descriptorTypeToSizes;
726 for (const auto& binding : bindings) {
727 descriptorTypeToSizes[binding.descriptorType] += binding.descriptorCount;
728 }
729 std::vector<vkhpp::DescriptorPoolSize> descriptorPoolSizes;
730 for (const auto& [descriptorType, descriptorCount] : descriptorTypeToSizes) {
731 descriptorPoolSizes.push_back(vkhpp::DescriptorPoolSize{
732 .type = descriptorType,
733 .descriptorCount = descriptorCount,
734 });
735 }
736 const vkhpp::DescriptorPoolCreateInfo descriptorPoolCreateInfo = {
737 .flags = vkhpp::DescriptorPoolCreateFlagBits::eFreeDescriptorSet,
738 .maxSets = 1,
739 .poolSizeCount = static_cast<uint32_t>(descriptorPoolSizes.size()),
740 .pPoolSizes = descriptorPoolSizes.data(),
741 };
742 auto descriptorSetPool = GFXSTREAM_EXPECT_VKHPP_RV(
743 vk.device->createDescriptorPoolUnique(descriptorPoolCreateInfo));
744
745 const vkhpp::DescriptorSetLayoutCreateInfo descriptorSetLayoutCreateInfo = {
746 .bindingCount = static_cast<uint32_t>(bindings.size()),
747 .pBindings = bindings.data(),
748 };
749 auto descriptorSetLayout = GFXSTREAM_EXPECT_VKHPP_RV(
750 vk.device->createDescriptorSetLayoutUnique(descriptorSetLayoutCreateInfo));
751
752 const vkhpp::DescriptorSetLayout descriptorSetLayoutHandle = *descriptorSetLayout;
753 const vkhpp::DescriptorSetAllocateInfo descriptorSetAllocateInfo = {
754 .descriptorPool = *descriptorSetPool,
755 .descriptorSetCount = 1,
756 .pSetLayouts = &descriptorSetLayoutHandle,
757 };
758 auto descriptorSets = GFXSTREAM_EXPECT_VKHPP_RV(
759 vk.device->allocateDescriptorSetsUnique(descriptorSetAllocateInfo));
760 auto descriptorSet(std::move(descriptorSets[0]));
761
762 std::vector<std::unique_ptr<vkhpp::DescriptorImageInfo>> descriptorImageInfos;
763 std::vector<vkhpp::WriteDescriptorSet> descriptorSetWrites;
764 for (const auto& content : contents) {
765 if (content.image) {
766 descriptorImageInfos.emplace_back(new vkhpp::DescriptorImageInfo{
767 .sampler = content.image->imageSampler,
768 .imageView = content.image->imageView,
769 .imageLayout = content.image->imageLayout,
770 });
771 descriptorSetWrites.emplace_back(vkhpp::WriteDescriptorSet{
772 .dstSet = *descriptorSet,
773 .dstBinding = content.binding,
774 .dstArrayElement = 0,
775 .descriptorCount = 1,
776 .descriptorType = vkhpp::DescriptorType::eCombinedImageSampler,
777 .pImageInfo = descriptorImageInfos.back().get(),
778 });
779 } else {
780 return gfxstream::unexpected("Unhandled descriptor type");
781 ;
782 }
783 }
784 vk.device->updateDescriptorSets(descriptorSetWrites, {});
785
786 return DescriptorSetBundle{
787 .pool = std::move(descriptorSetPool),
788 .layout = std::move(descriptorSetLayout),
789 .ds = std::move(descriptorSet),
790 };
791 }
792
793 struct PipelineParams {
794 std::vector<uint32_t> vert;
795 std::vector<uint32_t> frag;
796 std::vector<DescriptorSetBundle*> descriptorSets;
797 const FramebufferWithAttachments* framebuffer = nullptr;
798 };
799 struct PipelineBundle {
800 vkhpp::UniqueShaderModule vert;
801 vkhpp::UniqueShaderModule frag;
802 vkhpp::UniquePipelineLayout pipelineLayout;
803 vkhpp::UniquePipeline pipeline;
804 };
CreatePipeline(TypicalVkTestEnvironment & vk,const PipelineParams & params)805 Result<PipelineBundle> CreatePipeline(TypicalVkTestEnvironment& vk,
806 const PipelineParams& params) {
807 const vkhpp::ShaderModuleCreateInfo vertShaderCreateInfo = {
808 .codeSize = params.vert.size() * sizeof(uint32_t),
809 .pCode = params.vert.data(),
810 };
811 auto vertShaderModule =
812 GFXSTREAM_EXPECT_VKHPP_RV(vk.device->createShaderModuleUnique(vertShaderCreateInfo));
813
814 const vkhpp::ShaderModuleCreateInfo fragShaderCreateInfo = {
815 .codeSize = params.frag.size() * sizeof(uint32_t),
816 .pCode = params.frag.data(),
817 };
818 auto fragShaderModule =
819 GFXSTREAM_EXPECT_VKHPP_RV(vk.device->createShaderModuleUnique(fragShaderCreateInfo));
820
821 const std::vector<vkhpp::PipelineShaderStageCreateInfo> pipelineStages = {
822 vkhpp::PipelineShaderStageCreateInfo{
823 .stage = vkhpp::ShaderStageFlagBits::eVertex,
824 .module = *vertShaderModule,
825 .pName = "main",
826 },
827 vkhpp::PipelineShaderStageCreateInfo{
828 .stage = vkhpp::ShaderStageFlagBits::eFragment,
829 .module = *fragShaderModule,
830 .pName = "main",
831 },
832 };
833
834 std::vector<vkhpp::DescriptorSetLayout> descriptorSetLayoutHandles;
835 for (const auto* descriptorSet : params.descriptorSets) {
836 descriptorSetLayoutHandles.push_back(*descriptorSet->layout);
837 }
838 const vkhpp::PipelineLayoutCreateInfo pipelineLayoutCreateInfo = {
839 .setLayoutCount = static_cast<uint32_t>(descriptorSetLayoutHandles.size()),
840 .pSetLayouts = descriptorSetLayoutHandles.data(),
841 };
842 auto pipelineLayout = GFXSTREAM_EXPECT_VKHPP_RV(
843 vk.device->createPipelineLayoutUnique(pipelineLayoutCreateInfo));
844
845 const vkhpp::PipelineVertexInputStateCreateInfo pipelineVertexInputStateCreateInfo = {};
846 const vkhpp::PipelineInputAssemblyStateCreateInfo pipelineInputAssemblyStateCreateInfo = {
847 .topology = vkhpp::PrimitiveTopology::eTriangleList,
848 };
849 const vkhpp::PipelineViewportStateCreateInfo pipelineViewportStateCreateInfo = {
850 .viewportCount = 1,
851 .pViewports = nullptr,
852 .scissorCount = 1,
853 .pScissors = nullptr,
854 };
855 const vkhpp::PipelineRasterizationStateCreateInfo pipelineRasterStateCreateInfo = {
856 .depthClampEnable = VK_FALSE,
857 .rasterizerDiscardEnable = VK_FALSE,
858 .polygonMode = vkhpp::PolygonMode::eFill,
859 .cullMode = {},
860 .frontFace = vkhpp::FrontFace::eCounterClockwise,
861 .depthBiasEnable = VK_FALSE,
862 .depthBiasConstantFactor = 0.0f,
863 .depthBiasClamp = 0.0f,
864 .depthBiasSlopeFactor = 0.0f,
865 .lineWidth = 1.0f,
866 };
867 const vkhpp::SampleMask pipelineSampleMask = 65535;
868 const vkhpp::PipelineMultisampleStateCreateInfo pipelineMultisampleStateCreateInfo = {
869 .rasterizationSamples = vkhpp::SampleCountFlagBits::e1,
870 .sampleShadingEnable = VK_FALSE,
871 .minSampleShading = 1.0f,
872 .pSampleMask = &pipelineSampleMask,
873 .alphaToCoverageEnable = VK_FALSE,
874 .alphaToOneEnable = VK_FALSE,
875 };
876 const vkhpp::PipelineDepthStencilStateCreateInfo pipelineDepthStencilStateCreateInfo = {
877 .depthTestEnable = VK_FALSE,
878 .depthWriteEnable = VK_FALSE,
879 .depthCompareOp = vkhpp::CompareOp::eLess,
880 .depthBoundsTestEnable = VK_FALSE,
881 .stencilTestEnable = VK_FALSE,
882 .front =
883 {
884 .failOp = vkhpp::StencilOp::eKeep,
885 .passOp = vkhpp::StencilOp::eKeep,
886 .depthFailOp = vkhpp::StencilOp::eKeep,
887 .compareOp = vkhpp::CompareOp::eAlways,
888 .compareMask = 0,
889 .writeMask = 0,
890 .reference = 0,
891 },
892 .back =
893 {
894 .failOp = vkhpp::StencilOp::eKeep,
895 .passOp = vkhpp::StencilOp::eKeep,
896 .depthFailOp = vkhpp::StencilOp::eKeep,
897 .compareOp = vkhpp::CompareOp::eAlways,
898 .compareMask = 0,
899 .writeMask = 0,
900 .reference = 0,
901 },
902 .minDepthBounds = 0.0f,
903 .maxDepthBounds = 0.0f,
904 };
905 const std::vector<vkhpp::PipelineColorBlendAttachmentState> pipelineColorBlendAttachments =
906 {
907 vkhpp::PipelineColorBlendAttachmentState{
908 .blendEnable = VK_FALSE,
909 .srcColorBlendFactor = vkhpp::BlendFactor::eOne,
910 .dstColorBlendFactor = vkhpp::BlendFactor::eOneMinusSrcAlpha,
911 .colorBlendOp = vkhpp::BlendOp::eAdd,
912 .srcAlphaBlendFactor = vkhpp::BlendFactor::eOne,
913 .dstAlphaBlendFactor = vkhpp::BlendFactor::eOneMinusSrcAlpha,
914 .alphaBlendOp = vkhpp::BlendOp::eAdd,
915 .colorWriteMask =
916 vkhpp::ColorComponentFlagBits::eR | vkhpp::ColorComponentFlagBits::eG |
917 vkhpp::ColorComponentFlagBits::eB | vkhpp::ColorComponentFlagBits::eA,
918 },
919 };
920 const vkhpp::PipelineColorBlendStateCreateInfo pipelineColorBlendStateCreateInfo = {
921 .logicOpEnable = VK_FALSE,
922 .logicOp = vkhpp::LogicOp::eCopy,
923 .attachmentCount = static_cast<uint32_t>(pipelineColorBlendAttachments.size()),
924 .pAttachments = pipelineColorBlendAttachments.data(),
925 .blendConstants = {{
926 0.0f,
927 0.0f,
928 0.0f,
929 0.0f,
930 }},
931 };
932 const std::vector<vkhpp::DynamicState> pipelineDynamicStates = {
933 vkhpp::DynamicState::eViewport,
934 vkhpp::DynamicState::eScissor,
935 };
936 const vkhpp::PipelineDynamicStateCreateInfo pipelineDynamicStateCreateInfo = {
937 .dynamicStateCount = static_cast<uint32_t>(pipelineDynamicStates.size()),
938 .pDynamicStates = pipelineDynamicStates.data(),
939 };
940 const vkhpp::GraphicsPipelineCreateInfo pipelineCreateInfo = {
941 .stageCount = static_cast<uint32_t>(pipelineStages.size()),
942 .pStages = pipelineStages.data(),
943 .pVertexInputState = &pipelineVertexInputStateCreateInfo,
944 .pInputAssemblyState = &pipelineInputAssemblyStateCreateInfo,
945 .pTessellationState = nullptr,
946 .pViewportState = &pipelineViewportStateCreateInfo,
947 .pRasterizationState = &pipelineRasterStateCreateInfo,
948 .pMultisampleState = &pipelineMultisampleStateCreateInfo,
949 .pDepthStencilState = &pipelineDepthStencilStateCreateInfo,
950 .pColorBlendState = &pipelineColorBlendStateCreateInfo,
951 .pDynamicState = &pipelineDynamicStateCreateInfo,
952 .layout = *pipelineLayout,
953 .renderPass = *params.framebuffer->renderpass,
954 .subpass = 0,
955 .basePipelineHandle = VK_NULL_HANDLE,
956 .basePipelineIndex = 0,
957 };
958 auto pipeline = GFXSTREAM_EXPECT_VKHPP_RV(
959 vk.device->createGraphicsPipelineUnique({}, pipelineCreateInfo));
960
961 return PipelineBundle{
962 .vert = std::move(vertShaderModule),
963 .frag = std::move(fragShaderModule),
964 .pipelineLayout = std::move(pipelineLayout),
965 .pipeline = std::move(pipeline),
966 };
967 }
968
DownloadImage(TypicalVkTestEnvironment & vk,uint32_t width,uint32_t height,const vkhpp::UniqueImage & image,vkhpp::ImageLayout currentLayout,vkhpp::ImageLayout returnedLayout)969 Result<Image> DownloadImage(TypicalVkTestEnvironment& vk, uint32_t width, uint32_t height,
970 const vkhpp::UniqueImage& image, vkhpp::ImageLayout currentLayout,
971 vkhpp::ImageLayout returnedLayout) {
972 static constexpr const VkDeviceSize kStagingBufferSize = 32 * 1024 * 1024;
973 auto stagingBuffer = GFXSTREAM_EXPECT(CreateBuffer(
974 vk, kStagingBufferSize,
975 vkhpp::BufferUsageFlagBits::eTransferDst | vkhpp::BufferUsageFlagBits::eTransferSrc,
976 vkhpp::MemoryPropertyFlagBits::eHostVisible |
977 vkhpp::MemoryPropertyFlagBits::eHostCoherent));
978
979 GFXSTREAM_EXPECT(DoCommandsImmediate(vk, [&](vkhpp::UniqueCommandBuffer& cmd) {
980 if (currentLayout != vkhpp::ImageLayout::eTransferSrcOptimal) {
981 const std::vector<vkhpp::ImageMemoryBarrier> imageMemoryBarriers = {
982 vkhpp::ImageMemoryBarrier{
983 .srcAccessMask = vkhpp::AccessFlagBits::eMemoryRead |
984 vkhpp::AccessFlagBits::eMemoryWrite,
985 .dstAccessMask = vkhpp::AccessFlagBits::eTransferRead,
986 .oldLayout = currentLayout,
987 .newLayout = vkhpp::ImageLayout::eTransferSrcOptimal,
988 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
989 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
990 .image = *image,
991 .subresourceRange =
992 {
993 .aspectMask = vkhpp::ImageAspectFlagBits::eColor,
994 .baseMipLevel = 0,
995 .levelCount = 1,
996 .baseArrayLayer = 0,
997 .layerCount = 1,
998 },
999 },
1000 };
1001 cmd->pipelineBarrier(
1002 /*srcStageMask=*/vkhpp::PipelineStageFlagBits::eAllCommands,
1003 /*dstStageMask=*/vkhpp::PipelineStageFlagBits::eAllCommands,
1004 /*dependencyFlags=*/{},
1005 /*memoryBarriers=*/{},
1006 /*bufferMemoryBarriers=*/{},
1007 /*imageMemoryBarriers=*/imageMemoryBarriers);
1008 }
1009
1010 const std::vector<vkhpp::BufferImageCopy> regions = {
1011 vkhpp::BufferImageCopy{
1012 .bufferOffset = 0,
1013 .bufferRowLength = 0,
1014 .bufferImageHeight = 0,
1015 .imageSubresource =
1016 {
1017 .aspectMask = vkhpp::ImageAspectFlagBits::eColor,
1018 .mipLevel = 0,
1019 .baseArrayLayer = 0,
1020 .layerCount = 1,
1021 },
1022 .imageOffset =
1023 {
1024 .x = 0,
1025 .y = 0,
1026 .z = 0,
1027 },
1028 .imageExtent =
1029 {
1030 .width = width,
1031 .height = height,
1032 .depth = 1,
1033 },
1034 },
1035 };
1036 cmd->copyImageToBuffer(*image, vkhpp::ImageLayout::eTransferSrcOptimal,
1037 *stagingBuffer.buffer, regions);
1038
1039 if (returnedLayout != vkhpp::ImageLayout::eTransferSrcOptimal) {
1040 const std::vector<vkhpp::ImageMemoryBarrier> imageMemoryBarriers = {
1041 vkhpp::ImageMemoryBarrier{
1042 .srcAccessMask = vkhpp::AccessFlagBits::eTransferRead,
1043 .dstAccessMask = vkhpp::AccessFlagBits::eMemoryRead |
1044 vkhpp::AccessFlagBits::eMemoryWrite,
1045 .oldLayout = vkhpp::ImageLayout::eTransferSrcOptimal,
1046 .newLayout = returnedLayout,
1047 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
1048 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
1049 .image = *image,
1050 .subresourceRange =
1051 {
1052 .aspectMask = vkhpp::ImageAspectFlagBits::eColor,
1053 .baseMipLevel = 0,
1054 .levelCount = 1,
1055 .baseArrayLayer = 0,
1056 .layerCount = 1,
1057 },
1058 },
1059 };
1060 cmd->pipelineBarrier(
1061 /*srcStageMask=*/vkhpp::PipelineStageFlagBits::eAllCommands,
1062 /*dstStageMask=*/vkhpp::PipelineStageFlagBits::eAllCommands,
1063 /*dependencyFlags=*/{},
1064 /*memoryBarriers=*/{},
1065 /*bufferMemoryBarriers=*/{},
1066 /*imageMemoryBarriers=*/imageMemoryBarriers);
1067 }
1068 return Ok{};
1069 }));
1070
1071 std::vector<uint32_t> outPixels;
1072 outPixels.resize(width * height);
1073
1074 auto* mapped = GFXSTREAM_EXPECT_VKHPP_RV(
1075 vk.device->mapMemory(*stagingBuffer.bufferMemory, 0, VK_WHOLE_SIZE));
1076 std::memcpy(outPixels.data(), mapped, sizeof(uint32_t) * outPixels.size());
1077 vk.device->unmapMemory(*stagingBuffer.bufferMemory);
1078
1079 return Image{
1080 .width = width,
1081 .height = height,
1082 .pixels = outPixels,
1083 };
1084 }
1085
DoFillAndRenderFromAhb(uint32_t ahbFormat)1086 void DoFillAndRenderFromAhb(uint32_t ahbFormat) {
1087 const uint32_t width = 1920;
1088 const uint32_t height = 1080;
1089 const auto goldenPixel = PixelR8G8B8A8(0, 255, 255, 255);
1090 const auto badPixel = PixelR8G8B8A8(0, 0, 0, 255);
1091
1092 // Bind to a placeholder ahb before rebinding to the real one.
1093 // This is to test the behavior of descriptors and make sure
1094 // it removes the references to the old one when overwritten.
1095 auto deletedAhb =
1096 GFXSTREAM_ASSERT(ScopedAHardwareBuffer::Allocate(*mGralloc, width, height, ahbFormat));
1097
1098 GFXSTREAM_ASSERT(FillAhb(deletedAhb, badPixel));
1099
1100 auto ahb =
1101 GFXSTREAM_ASSERT(ScopedAHardwareBuffer::Allocate(*mGralloc, width, height, ahbFormat));
1102
1103 GFXSTREAM_ASSERT(FillAhb(ahb, goldenPixel));
1104
1105 const vkhpp::PhysicalDeviceVulkan11Features deviceFeatures = {
1106 .samplerYcbcrConversion = VK_TRUE,
1107 };
1108 auto vk = GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment({
1109 .deviceExtensions = {{VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME}},
1110 .deviceCreateInfoPNext = &deviceFeatures,
1111 }));
1112
1113 auto deletedAhbImage =
1114 GFXSTREAM_ASSERT(CreateImageWithAhb(vk, deletedAhb, vkhpp::ImageUsageFlagBits::eSampled,
1115 vkhpp::ImageLayout::eShaderReadOnlyOptimal));
1116
1117 auto ahbImage =
1118 GFXSTREAM_ASSERT(CreateImageWithAhb(vk, ahb, vkhpp::ImageUsageFlagBits::eSampled,
1119 vkhpp::ImageLayout::eShaderReadOnlyOptimal));
1120
1121 auto framebuffer = GFXSTREAM_ASSERT(CreateFramebuffer(
1122 vk, width, height, /*colorAttachmentFormat=*/vkhpp::Format::eR8G8B8A8Unorm));
1123
1124 const vkhpp::Sampler ahbSamplerHandle = *ahbImage.imageSampler;
1125 auto descriptorSet0 = GFXSTREAM_ASSERT(
1126 CreateDescriptorSet(vk,
1127 /*bindings=*/
1128 {{
1129 .binding = 0,
1130 .descriptorType = vkhpp::DescriptorType::eCombinedImageSampler,
1131 .descriptorCount = 1,
1132 .stageFlags = vkhpp::ShaderStageFlagBits::eFragment,
1133 .pImmutableSamplers = &ahbSamplerHandle,
1134 }},
1135 /*writes=*/
1136 {{
1137 .binding = 0,
1138 .image = {{
1139 .imageView = *deletedAhbImage.imageView,
1140 .imageLayout = vkhpp::ImageLayout::eShaderReadOnlyOptimal,
1141 .imageSampler = *deletedAhbImage.imageSampler,
1142 }},
1143 }}));
1144
1145 auto pipeline =
1146 GFXSTREAM_ASSERT(CreatePipeline(vk, {
1147 .vert = kFullscreenTriangleWithUVVert,
1148 .frag = kBlitSampler2dFrag,
1149 .descriptorSets = {&descriptorSet0},
1150 .framebuffer = &framebuffer,
1151 }));
1152
1153 std::vector<vkhpp::WriteDescriptorSet> descriptorSetWrites;
1154 vkhpp::DescriptorImageInfo descriptorImageInfo = {
1155 .imageView = *ahbImage.imageView,
1156 .imageLayout = vkhpp::ImageLayout::eShaderReadOnlyOptimal,
1157 .sampler = *ahbImage.imageSampler,
1158 };
1159 descriptorSetWrites.emplace_back(vkhpp::WriteDescriptorSet{
1160 .dstSet = *descriptorSet0.ds,
1161 .dstBinding = 0,
1162 .dstArrayElement = 0,
1163 .descriptorCount = 1,
1164 .descriptorType = vkhpp::DescriptorType::eCombinedImageSampler,
1165 .pImageInfo = &descriptorImageInfo,
1166 });
1167 vk.device->updateDescriptorSets(descriptorSetWrites, {});
1168 deletedAhbImage = {};
1169 deletedAhb = {};
1170
1171 GFXSTREAM_ASSERT(DoCommandsImmediate(vk, [&](vkhpp::UniqueCommandBuffer& cmd) {
1172 const std::vector<vkhpp::ClearValue> renderPassBeginClearValues = {
1173 vkhpp::ClearValue{
1174 .color =
1175 {
1176 .float32 = {{
1177 1.0f,
1178 0.0f,
1179 0.0f,
1180 1.0f,
1181 }},
1182 },
1183 },
1184 };
1185 const vkhpp::RenderPassBeginInfo renderPassBeginInfo = {
1186 .renderPass = *framebuffer.renderpass,
1187 .framebuffer = *framebuffer.framebuffer,
1188 .renderArea =
1189 {
1190 .offset =
1191 {
1192 .x = 0,
1193 .y = 0,
1194 },
1195 .extent =
1196 {
1197 .width = width,
1198 .height = height,
1199 },
1200 },
1201 .clearValueCount = static_cast<uint32_t>(renderPassBeginClearValues.size()),
1202 .pClearValues = renderPassBeginClearValues.data(),
1203 };
1204 cmd->beginRenderPass(renderPassBeginInfo, vkhpp::SubpassContents::eInline);
1205 cmd->bindPipeline(vkhpp::PipelineBindPoint::eGraphics, *pipeline.pipeline);
1206 cmd->bindDescriptorSets(vkhpp::PipelineBindPoint::eGraphics, *pipeline.pipelineLayout,
1207 /*firstSet=*/0, {*descriptorSet0.ds},
1208 /*dynamicOffsets=*/{});
1209 const vkhpp::Viewport viewport = {
1210 .x = 0.0f,
1211 .y = 0.0f,
1212 .width = static_cast<float>(width),
1213 .height = static_cast<float>(height),
1214 .minDepth = 0.0f,
1215 .maxDepth = 1.0f,
1216 };
1217 cmd->setViewport(0, {viewport});
1218 const vkhpp::Rect2D scissor = {
1219 .offset =
1220 {
1221 .x = 0,
1222 .y = 0,
1223 },
1224 .extent =
1225 {
1226 .width = width,
1227 .height = height,
1228 },
1229 };
1230 cmd->setScissor(0, {scissor});
1231 cmd->draw(3, 1, 0, 0);
1232 cmd->endRenderPass();
1233 return Ok{};
1234 }));
1235
1236 const auto actualImage = GFXSTREAM_ASSERT(
1237 DownloadImage(vk, width, height, framebuffer.colorAttachment->image,
1238 /*currentLayout=*/vkhpp::ImageLayout::eColorAttachmentOptimal,
1239 /*returnedLayout=*/vkhpp::ImageLayout::eColorAttachmentOptimal));
1240
1241 const auto expectedImage = ImageFromColor(width, height, goldenPixel);
1242 EXPECT_THAT(AreImagesSimilar(expectedImage, actualImage), IsTrue());
1243 }
1244 };
1245
TEST_P(GfxstreamEnd2EndVkTest,Basic)1246 TEST_P(GfxstreamEnd2EndVkTest, Basic) {
1247 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
1248 GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
1249 }
1250
TEST_P(GfxstreamEnd2EndVkTest,ImportAHB)1251 TEST_P(GfxstreamEnd2EndVkTest, ImportAHB) {
1252 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
1253 GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
1254
1255 const uint32_t width = 32;
1256 const uint32_t height = 32;
1257 auto ahb = GFXSTREAM_ASSERT(ScopedAHardwareBuffer::Allocate(
1258 *mGralloc, width, height, GFXSTREAM_AHB_FORMAT_R8G8B8A8_UNORM));
1259
1260 const VkNativeBufferANDROID imageNativeBufferInfo = {
1261 .sType = VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID,
1262 .handle = mGralloc->getNativeHandle(ahb),
1263 };
1264
1265 auto vkQueueSignalReleaseImageANDROID = PFN_vkQueueSignalReleaseImageANDROID(
1266 device->getProcAddr("vkQueueSignalReleaseImageANDROID"));
1267 ASSERT_THAT(vkQueueSignalReleaseImageANDROID, NotNull());
1268
1269 const vkhpp::ImageCreateInfo imageCreateInfo = {
1270 .pNext = &imageNativeBufferInfo,
1271 .imageType = vkhpp::ImageType::e2D,
1272 .extent.width = width,
1273 .extent.height = height,
1274 .extent.depth = 1,
1275 .mipLevels = 1,
1276 .arrayLayers = 1,
1277 .format = vkhpp::Format::eR8G8B8A8Unorm,
1278 .tiling = vkhpp::ImageTiling::eOptimal,
1279 .initialLayout = vkhpp::ImageLayout::eUndefined,
1280 .usage = vkhpp::ImageUsageFlagBits::eSampled |
1281 vkhpp::ImageUsageFlagBits::eTransferDst |
1282 vkhpp::ImageUsageFlagBits::eTransferSrc,
1283 .sharingMode = vkhpp::SharingMode::eExclusive,
1284 .samples = vkhpp::SampleCountFlagBits::e1,
1285 };
1286 auto image = device->createImageUnique(imageCreateInfo).value;
1287
1288 vkhpp::MemoryRequirements imageMemoryRequirements{};
1289 device->getImageMemoryRequirements(*image, &imageMemoryRequirements);
1290
1291 const uint32_t imageMemoryIndex = utils::getMemoryType(
1292 physicalDevice, imageMemoryRequirements, vkhpp::MemoryPropertyFlagBits::eDeviceLocal);
1293 ASSERT_THAT(imageMemoryIndex, Not(Eq(-1)));
1294
1295 const vkhpp::MemoryAllocateInfo imageMemoryAllocateInfo = {
1296 .allocationSize = imageMemoryRequirements.size,
1297 .memoryTypeIndex = imageMemoryIndex,
1298 };
1299
1300 auto imageMemory = device->allocateMemoryUnique(imageMemoryAllocateInfo).value;
1301 ASSERT_THAT(imageMemory, IsValidHandle());
1302 ASSERT_THAT(device->bindImageMemory(*image, *imageMemory, 0), IsVkSuccess());
1303
1304 const vkhpp::BufferCreateInfo bufferCreateInfo = {
1305 .size = static_cast<VkDeviceSize>(12 * 1024 * 1024),
1306 .usage = vkhpp::BufferUsageFlagBits::eTransferDst |
1307 vkhpp::BufferUsageFlagBits::eTransferSrc,
1308 .sharingMode = vkhpp::SharingMode::eExclusive,
1309 };
1310 auto stagingBuffer = device->createBufferUnique(bufferCreateInfo).value;
1311 ASSERT_THAT(stagingBuffer, IsValidHandle());
1312
1313 vkhpp::MemoryRequirements stagingBufferMemoryRequirements{};
1314 device->getBufferMemoryRequirements(*stagingBuffer, &stagingBufferMemoryRequirements);
1315
1316 const auto stagingBufferMemoryType = utils::getMemoryType(
1317 physicalDevice, stagingBufferMemoryRequirements,
1318 vkhpp::MemoryPropertyFlagBits::eHostVisible | vkhpp::MemoryPropertyFlagBits::eHostCoherent);
1319
1320 const vkhpp::MemoryAllocateInfo stagingBufferMemoryAllocateInfo = {
1321 .allocationSize = stagingBufferMemoryRequirements.size,
1322 .memoryTypeIndex = stagingBufferMemoryType,
1323 };
1324 auto stagingBufferMemory = device->allocateMemoryUnique(stagingBufferMemoryAllocateInfo).value;
1325 ASSERT_THAT(stagingBufferMemory, IsValidHandle());
1326 ASSERT_THAT(device->bindBufferMemory(*stagingBuffer, *stagingBufferMemory, 0), IsVkSuccess());
1327
1328 const vkhpp::CommandPoolCreateInfo commandPoolCreateInfo = {
1329 .queueFamilyIndex = queueFamilyIndex,
1330 };
1331
1332 auto commandPool = device->createCommandPoolUnique(commandPoolCreateInfo).value;
1333 ASSERT_THAT(stagingBufferMemory, IsValidHandle());
1334
1335 const vkhpp::CommandBufferAllocateInfo commandBufferAllocateInfo = {
1336 .level = vkhpp::CommandBufferLevel::ePrimary,
1337 .commandPool = *commandPool,
1338 .commandBufferCount = 1,
1339 };
1340 auto commandBuffers = device->allocateCommandBuffersUnique(commandBufferAllocateInfo).value;
1341 ASSERT_THAT(commandBuffers, Not(IsEmpty()));
1342 auto commandBuffer = std::move(commandBuffers[0]);
1343 ASSERT_THAT(commandBuffer, IsValidHandle());
1344
1345 const vkhpp::CommandBufferBeginInfo commandBufferBeginInfo = {
1346 .flags = vkhpp::CommandBufferUsageFlagBits::eOneTimeSubmit,
1347 };
1348 commandBuffer->begin(commandBufferBeginInfo);
1349 commandBuffer->end();
1350
1351 std::vector<vkhpp::CommandBuffer> commandBufferHandles;
1352 commandBufferHandles.push_back(*commandBuffer);
1353
1354 auto transferFence = device->createFenceUnique(vkhpp::FenceCreateInfo()).value;
1355 ASSERT_THAT(commandBuffer, IsValidHandle());
1356
1357 const vkhpp::SubmitInfo submitInfo = {
1358 .commandBufferCount = static_cast<uint32_t>(commandBufferHandles.size()),
1359 .pCommandBuffers = commandBufferHandles.data(),
1360 };
1361 queue.submit(submitInfo, *transferFence);
1362
1363 auto waitResult = device->waitForFences(*transferFence, VK_TRUE, AsVkTimeout(3s));
1364 ASSERT_THAT(waitResult, IsVkSuccess());
1365
1366 int fence;
1367
1368 auto result = vkQueueSignalReleaseImageANDROID(queue, 0, nullptr, *image, &fence);
1369 ASSERT_THAT(result, Eq(VK_SUCCESS));
1370 ASSERT_THAT(fence, Not(Eq(-1)));
1371
1372 ASSERT_THAT(mSync->wait(fence, 3000), Eq(0));
1373 }
1374
TEST_P(GfxstreamEnd2EndVkTest,DeferredImportAHB)1375 TEST_P(GfxstreamEnd2EndVkTest, DeferredImportAHB) {
1376 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
1377 GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
1378
1379 const uint32_t width = 32;
1380 const uint32_t height = 32;
1381 auto ahb = GFXSTREAM_ASSERT(ScopedAHardwareBuffer::Allocate(
1382 *mGralloc, width, height, GFXSTREAM_AHB_FORMAT_R8G8B8A8_UNORM));
1383
1384 auto vkQueueSignalReleaseImageANDROID = PFN_vkQueueSignalReleaseImageANDROID(
1385 device->getProcAddr("vkQueueSignalReleaseImageANDROID"));
1386 ASSERT_THAT(vkQueueSignalReleaseImageANDROID, NotNull());
1387
1388 const vkhpp::ImageCreateInfo imageCreateInfo = {
1389 .pNext = nullptr,
1390 .imageType = vkhpp::ImageType::e2D,
1391 .extent.width = width,
1392 .extent.height = height,
1393 .extent.depth = 1,
1394 .mipLevels = 1,
1395 .arrayLayers = 1,
1396 .format = vkhpp::Format::eR8G8B8A8Unorm,
1397 .tiling = vkhpp::ImageTiling::eOptimal,
1398 .initialLayout = vkhpp::ImageLayout::eUndefined,
1399 .usage = vkhpp::ImageUsageFlagBits::eSampled |
1400 vkhpp::ImageUsageFlagBits::eTransferDst |
1401 vkhpp::ImageUsageFlagBits::eTransferSrc,
1402 .sharingMode = vkhpp::SharingMode::eExclusive,
1403 .samples = vkhpp::SampleCountFlagBits::e1,
1404 };
1405 auto image = device->createImageUnique(imageCreateInfo).value;
1406
1407 // NOTE: Binding the VkImage to the AHB happens after the VkImage is created.
1408 const VkNativeBufferANDROID imageNativeBufferInfo = {
1409 .sType = VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID,
1410 .handle = mGralloc->getNativeHandle(ahb),
1411 };
1412
1413 const vkhpp::BindImageMemoryInfo imageBindMemoryInfo = {
1414 .pNext = &imageNativeBufferInfo,
1415 .image = *image,
1416 .memory = VK_NULL_HANDLE,
1417 .memoryOffset = 0,
1418 };
1419 ASSERT_THAT(device->bindImageMemory2({imageBindMemoryInfo}), IsVkSuccess());
1420
1421 std::vector<vkhpp::Semaphore> semaphores;
1422 int fence;
1423
1424 auto result = vkQueueSignalReleaseImageANDROID(queue, 0, nullptr, *image, &fence);
1425 ASSERT_THAT(result, Eq(VK_SUCCESS));
1426 ASSERT_THAT(fence, Not(Eq(-1)));
1427
1428 ASSERT_THAT(mSync->wait(fence, 3000), Eq(0));
1429 }
1430
TEST_P(GfxstreamEnd2EndVkTest,BlobAHBIsNotMapable)1431 TEST_P(GfxstreamEnd2EndVkTest, BlobAHBIsNotMapable) {
1432 if (GetParam().with_gl) {
1433 GTEST_SKIP()
1434 << "Skipping test, data buffers are currently only supported in Vulkan only mode.";
1435 }
1436 if (GetParam().with_features.count("VulkanUseDedicatedAhbMemoryType") == 0) {
1437 GTEST_SKIP()
1438 << "Skipping test, AHB test only makes sense with VulkanUseDedicatedAhbMemoryType.";
1439 }
1440
1441 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
1442 GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
1443
1444 const uint32_t width = 32;
1445 const uint32_t height = 1;
1446 auto ahb = GFXSTREAM_ASSERT(
1447 ScopedAHardwareBuffer::Allocate(*mGralloc, width, height, GFXSTREAM_AHB_FORMAT_BLOB));
1448
1449 const vkhpp::ExternalMemoryBufferCreateInfo externalMemoryBufferCreateInfo = {
1450 .handleTypes = vkhpp::ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID,
1451 };
1452 const vkhpp::BufferCreateInfo bufferCreateInfo = {
1453 .pNext = &externalMemoryBufferCreateInfo,
1454 .size = width,
1455 .usage = vkhpp::BufferUsageFlagBits::eTransferDst |
1456 vkhpp::BufferUsageFlagBits::eTransferSrc |
1457 vkhpp::BufferUsageFlagBits::eVertexBuffer,
1458 .sharingMode = vkhpp::SharingMode::eExclusive,
1459 };
1460 auto buffer = device->createBufferUnique(bufferCreateInfo).value;
1461 ASSERT_THAT(buffer, IsValidHandle());
1462
1463 auto vkGetAndroidHardwareBufferPropertiesANDROID =
1464 reinterpret_cast<PFN_vkGetAndroidHardwareBufferPropertiesANDROID>(
1465 device->getProcAddr("vkGetAndroidHardwareBufferPropertiesANDROID"));
1466 ASSERT_THAT(vkGetAndroidHardwareBufferPropertiesANDROID, NotNull());
1467
1468 VkAndroidHardwareBufferPropertiesANDROID bufferProperties = {
1469 .sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID,
1470 .pNext = nullptr,
1471 };
1472 ASSERT_THAT(vkGetAndroidHardwareBufferPropertiesANDROID(*device, ahb, &bufferProperties),
1473 Eq(VK_SUCCESS));
1474
1475 const vkhpp::MemoryRequirements bufferMemoryRequirements{
1476 .size = bufferProperties.allocationSize,
1477 .alignment = 0,
1478 .memoryTypeBits = bufferProperties.memoryTypeBits,
1479 };
1480
1481 const auto memoryProperties = physicalDevice.getMemoryProperties();
1482 for (uint32_t i = 0; i < memoryProperties.memoryTypeCount; i++) {
1483 if (!(bufferMemoryRequirements.memoryTypeBits & (1 << i))) {
1484 continue;
1485 }
1486
1487 const auto memoryPropertyFlags = memoryProperties.memoryTypes[i].propertyFlags;
1488 EXPECT_THAT(memoryPropertyFlags & vkhpp::MemoryPropertyFlagBits::eHostVisible,
1489 Ne(vkhpp::MemoryPropertyFlagBits::eHostVisible));
1490 }
1491
1492 const auto bufferMemoryType = utils::getMemoryType(physicalDevice, bufferMemoryRequirements,
1493 vkhpp::MemoryPropertyFlagBits::eDeviceLocal);
1494 ASSERT_THAT(bufferMemoryType, Ne(-1));
1495
1496 const vkhpp::ImportAndroidHardwareBufferInfoANDROID importHardwareBufferInfo = {
1497 .buffer = ahb,
1498 };
1499 const vkhpp::MemoryAllocateInfo bufferMemoryAllocateInfo = {
1500 .pNext = &importHardwareBufferInfo,
1501 .allocationSize = bufferMemoryRequirements.size,
1502 .memoryTypeIndex = bufferMemoryType,
1503 };
1504 auto bufferMemory = device->allocateMemoryUnique(bufferMemoryAllocateInfo).value;
1505 ASSERT_THAT(bufferMemory, IsValidHandle());
1506
1507 ASSERT_THAT(device->bindBufferMemory(*buffer, *bufferMemory, 0), IsVkSuccess());
1508 }
1509
TEST_P(GfxstreamEnd2EndVkTest,HostMemory)1510 TEST_P(GfxstreamEnd2EndVkTest, HostMemory) {
1511 static constexpr const vkhpp::DeviceSize kSize = 16 * 1024;
1512
1513 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
1514 GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
1515
1516 uint32_t hostMemoryTypeIndex = -1;
1517 const auto memoryProperties = physicalDevice.getMemoryProperties();
1518 for (uint32_t i = 0; i < memoryProperties.memoryTypeCount; i++) {
1519 const vkhpp::MemoryType& memoryType = memoryProperties.memoryTypes[i];
1520 if (memoryType.propertyFlags & vkhpp::MemoryPropertyFlagBits::eHostVisible) {
1521 hostMemoryTypeIndex = i;
1522 }
1523 }
1524 if (hostMemoryTypeIndex == -1) {
1525 GTEST_SKIP() << "Skipping test due to no host visible memory type.";
1526 return;
1527 }
1528
1529 const vkhpp::MemoryAllocateInfo memoryAllocateInfo = {
1530 .allocationSize = kSize,
1531 .memoryTypeIndex = hostMemoryTypeIndex,
1532 };
1533 auto memory = device->allocateMemoryUnique(memoryAllocateInfo).value;
1534 ASSERT_THAT(memory, IsValidHandle());
1535
1536 void* mapped = nullptr;
1537
1538 auto mapResult = device->mapMemory(*memory, 0, VK_WHOLE_SIZE, vkhpp::MemoryMapFlags{}, &mapped);
1539 ASSERT_THAT(mapResult, IsVkSuccess());
1540 ASSERT_THAT(mapped, NotNull());
1541
1542 auto* bytes = reinterpret_cast<uint8_t*>(mapped);
1543 std::memset(bytes, 0xFF, kSize);
1544
1545 const vkhpp::MappedMemoryRange range = {
1546 .memory = *memory,
1547 .offset = 0,
1548 .size = kSize,
1549 };
1550 device->flushMappedMemoryRanges({range});
1551 device->invalidateMappedMemoryRanges({range});
1552
1553 for (uint32_t i = 0; i < kSize; ++i) {
1554 EXPECT_THAT(bytes[i], Eq(0xFF));
1555 }
1556 }
1557
TEST_P(GfxstreamEnd2EndVkTest,GetPhysicalDeviceProperties2)1558 TEST_P(GfxstreamEnd2EndVkTest, GetPhysicalDeviceProperties2) {
1559 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
1560 GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
1561
1562 auto props1 = physicalDevice.getProperties();
1563 auto props2 = physicalDevice.getProperties2();
1564
1565 EXPECT_THAT(props1.vendorID, Eq(props2.properties.vendorID));
1566 EXPECT_THAT(props1.deviceID, Eq(props2.properties.deviceID));
1567 }
1568
TEST_P(GfxstreamEnd2EndVkTest,GetPhysicalDeviceFeatures2KHR)1569 TEST_P(GfxstreamEnd2EndVkTest, GetPhysicalDeviceFeatures2KHR) {
1570 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
1571 GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
1572
1573 auto features1 = physicalDevice.getFeatures();
1574 auto features2 = physicalDevice.getFeatures2();
1575 EXPECT_THAT(features1.robustBufferAccess, Eq(features2.features.robustBufferAccess));
1576 }
1577
TEST_P(GfxstreamEnd2EndVkTest,GetPhysicalDeviceImageFormatProperties2KHR)1578 TEST_P(GfxstreamEnd2EndVkTest, GetPhysicalDeviceImageFormatProperties2KHR) {
1579 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
1580 GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
1581
1582 const vkhpp::PhysicalDeviceImageFormatInfo2 imageFormatInfo = {
1583 .format = vkhpp::Format::eR8G8B8A8Unorm,
1584 .type = vkhpp::ImageType::e2D,
1585 .tiling = vkhpp::ImageTiling::eOptimal,
1586 .usage = vkhpp::ImageUsageFlagBits::eSampled,
1587 };
1588 const auto properties =
1589 GFXSTREAM_ASSERT_VKHPP_RV(physicalDevice.getImageFormatProperties2(imageFormatInfo));
1590 EXPECT_THAT(properties.imageFormatProperties.maxExtent.width, Ge(1));
1591 EXPECT_THAT(properties.imageFormatProperties.maxExtent.height, Ge(1));
1592 EXPECT_THAT(properties.imageFormatProperties.maxExtent.depth, Ge(1));
1593 }
1594
1595 template <typename VkhppUniqueHandleType,
1596 typename VkhppHandleType = typename VkhppUniqueHandleType::element_type>
AsHandles(const std::vector<VkhppUniqueHandleType> & elements)1597 std::vector<VkhppHandleType> AsHandles(const std::vector<VkhppUniqueHandleType>& elements) {
1598 std::vector<VkhppHandleType> ret;
1599 ret.reserve(elements.size());
1600 for (const auto& e : elements) {
1601 ret.push_back(*e);
1602 }
1603 return ret;
1604 }
1605
1606 struct DescriptorBundle {
1607 vkhpp::UniqueDescriptorPool descriptorPool;
1608 vkhpp::UniqueDescriptorSetLayout descriptorSetLayout;
1609 std::vector<vkhpp::UniqueDescriptorSet> descriptorSets;
1610 };
1611
ReallocateDescriptorBundleSets(vkhpp::Device device,uint32_t count,DescriptorBundle * bundle)1612 Result<Ok> ReallocateDescriptorBundleSets(vkhpp::Device device, uint32_t count,
1613 DescriptorBundle* bundle) {
1614 if (!bundle->descriptorSetLayout) {
1615 return gfxstream::unexpected("Invalid descriptor set layout");
1616 }
1617
1618 const std::vector<vkhpp::DescriptorSetLayout> descriptorSetLayouts(count, *bundle->descriptorSetLayout);
1619 const vkhpp::DescriptorSetAllocateInfo descriptorSetAllocateInfo = {
1620 .descriptorPool = *bundle->descriptorPool,
1621 .descriptorSetCount = count,
1622 .pSetLayouts = descriptorSetLayouts.data(),
1623 };
1624 auto descriptorSets =
1625 GFXSTREAM_EXPECT_VKHPP_RV(device.allocateDescriptorSetsUnique(descriptorSetAllocateInfo));
1626 bundle->descriptorSets = std::move(descriptorSets);
1627 return Ok{};
1628 }
1629
AllocateDescriptorBundle(vkhpp::Device device,uint32_t count)1630 Result<DescriptorBundle> AllocateDescriptorBundle(vkhpp::Device device, uint32_t count) {
1631 const vkhpp::DescriptorPoolSize descriptorPoolSize = {
1632 .type = vkhpp::DescriptorType::eUniformBuffer,
1633 .descriptorCount = 1 * count,
1634 };
1635 const vkhpp::DescriptorPoolCreateInfo descriptorPoolCreateInfo = {
1636 .flags = vkhpp::DescriptorPoolCreateFlagBits::eFreeDescriptorSet,
1637 .maxSets = count,
1638 .poolSizeCount = 1,
1639 .pPoolSizes = &descriptorPoolSize,
1640 };
1641 auto descriptorPool =
1642 GFXSTREAM_EXPECT_VKHPP_RV(device.createDescriptorPoolUnique(descriptorPoolCreateInfo));
1643
1644 const vkhpp::DescriptorSetLayoutBinding descriptorSetBinding = {
1645 .binding = 0,
1646 .descriptorType = vkhpp::DescriptorType::eUniformBuffer,
1647 .descriptorCount = 1,
1648 .stageFlags = vkhpp::ShaderStageFlagBits::eVertex,
1649 };
1650 const vkhpp::DescriptorSetLayoutCreateInfo descriptorSetLayoutInfo = {
1651 .bindingCount = 1,
1652 .pBindings = &descriptorSetBinding,
1653 };
1654 auto descriptorSetLayout =
1655 GFXSTREAM_EXPECT_VKHPP_RV(device.createDescriptorSetLayoutUnique(descriptorSetLayoutInfo));
1656
1657 DescriptorBundle bundle = {
1658 .descriptorPool = std::move(descriptorPool),
1659 .descriptorSetLayout = std::move(descriptorSetLayout),
1660 };
1661 GFXSTREAM_EXPECT(ReallocateDescriptorBundleSets(device, count, &bundle));
1662 return std::move(bundle);
1663 }
1664
1665 // Tests creating a bunch of descriptor sets and freeing them via vkFreeDescriptorSet.
1666 // 1. Via vkFreeDescriptorSet directly
1667 // 2. Via vkResetDescriptorPool
1668 // 3. Via vkDestroyDescriptorPool
1669 // 4. Via vkResetDescriptorPool and double frees in vkFreeDescriptorSet
1670 // 5. Via vkResetDescriptorPool and double frees in vkFreeDescriptorSet
1671 // 4. Via vkResetDescriptorPool, creating more, and freeing vai vkFreeDescriptorSet
1672 // (because vkFree* APIs are expected to never fail)
1673 // https://github.com/KhronosGroup/Vulkan-Docs/issues/1070
TEST_P(GfxstreamEnd2EndVkTest,DescriptorSetAllocFree)1674 TEST_P(GfxstreamEnd2EndVkTest, DescriptorSetAllocFree) {
1675 constexpr const uint32_t kNumSets = 4;
1676
1677 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
1678 GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
1679
1680 auto bundle = GFXSTREAM_ASSERT(AllocateDescriptorBundle(*device, kNumSets));
1681
1682 auto descriptorSetHandles = AsHandles(bundle.descriptorSets);
1683 EXPECT_THAT(device->freeDescriptorSets(*bundle.descriptorPool, kNumSets, descriptorSetHandles.data()), IsVkSuccess());
1684
1685 // The double free should also work
1686 EXPECT_THAT(device->freeDescriptorSets(*bundle.descriptorPool, kNumSets, descriptorSetHandles.data()), IsVkSuccess());
1687
1688 // Alloc/free again should also work
1689 GFXSTREAM_ASSERT(ReallocateDescriptorBundleSets(*device, kNumSets, &bundle));
1690
1691 descriptorSetHandles = AsHandles(bundle.descriptorSets);
1692 EXPECT_THAT(device->freeDescriptorSets(*bundle.descriptorPool, kNumSets, descriptorSetHandles.data()), IsVkSuccess());
1693 }
1694
TEST_P(GfxstreamEnd2EndVkTest,DescriptorSetAllocFreeReset)1695 TEST_P(GfxstreamEnd2EndVkTest, DescriptorSetAllocFreeReset) {
1696 constexpr const uint32_t kNumSets = 4;
1697
1698 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
1699 GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
1700
1701 auto bundle = GFXSTREAM_ASSERT(AllocateDescriptorBundle(*device, kNumSets));
1702
1703 device->resetDescriptorPool(*bundle.descriptorPool);
1704
1705 // The double free should also work
1706 auto descriptorSetHandles = AsHandles(bundle.descriptorSets);
1707 EXPECT_THAT(device->freeDescriptorSets(*bundle.descriptorPool, kNumSets, descriptorSetHandles.data()), IsVkSuccess());
1708
1709 // Alloc/reset/free again should also work
1710 GFXSTREAM_ASSERT(ReallocateDescriptorBundleSets(*device, kNumSets, &bundle));
1711
1712 device->resetDescriptorPool(*bundle.descriptorPool);
1713
1714 descriptorSetHandles = AsHandles(bundle.descriptorSets);
1715 EXPECT_THAT(device->freeDescriptorSets(*bundle.descriptorPool, kNumSets, descriptorSetHandles.data()), IsVkSuccess());
1716 }
1717
TEST_P(GfxstreamEnd2EndVkTest,DISABLED_DescriptorSetAllocFreeDestroy)1718 TEST_P(GfxstreamEnd2EndVkTest, DISABLED_DescriptorSetAllocFreeDestroy) {
1719 constexpr const uint32_t kNumSets = 4;
1720
1721 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
1722 GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
1723
1724 auto bundle = GFXSTREAM_ASSERT(AllocateDescriptorBundle(*device, kNumSets));
1725
1726 device->destroyDescriptorPool(*bundle.descriptorPool);
1727
1728 // The double free should also work
1729 auto descriptorSetHandles = AsHandles(bundle.descriptorSets);
1730 EXPECT_THAT(device->freeDescriptorSets(*bundle.descriptorPool, kNumSets, descriptorSetHandles.data()), IsVkSuccess());
1731 }
1732
TEST_P(GfxstreamEnd2EndVkTest,MultiThreadedShutdown)1733 TEST_P(GfxstreamEnd2EndVkTest, MultiThreadedShutdown) {
1734 constexpr const int kNumIterations = 20;
1735 for (int i = 0; i < kNumIterations; i++) {
1736 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
1737 GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
1738
1739 const vkhpp::BufferCreateInfo bufferCreateInfo = {
1740 .size = 1024,
1741 .usage = vkhpp::BufferUsageFlagBits::eTransferSrc,
1742 };
1743
1744 // TODO: switch to std::barrier with arrive_and_wait().
1745 std::atomic_int threadsReady{0};
1746 std::vector<std::thread> threads;
1747
1748 constexpr const int kNumThreads = 5;
1749 for (int t = 0; t < kNumThreads; t++) {
1750 threads.emplace_back([&, this]() {
1751 // Perform some work to ensure host RenderThread started.
1752 auto buffer1 = device->createBufferUnique(bufferCreateInfo).value;
1753
1754 ++threadsReady;
1755 while (threadsReady.load() != kNumThreads) {
1756 }
1757
1758 // Sleep a little which is hopefully enough time to potentially get
1759 // the corresponding host ASG RenderThreads to go sleep waiting for
1760 // a WAKEUP via a GFXSTREAM_CONTEXT_PING.
1761 std::this_thread::sleep_for(std::chrono::milliseconds(100));
1762
1763 auto buffer2 = device->createBufferUnique(bufferCreateInfo).value;
1764
1765 // 2 vkDestroyBuffer() calls happen here with the destruction of `buffer1`
1766 // and `buffer2`. vkDestroy*() calls are async (return `void`) and the
1767 // guest thread continues execution without waiting for the command to
1768 // complete on the host.
1769 //
1770 // The guest ASG and corresponding virtio gpu resource will also be
1771 // destructed here as a part of the thread_local HostConnection being
1772 // destructed.
1773 //
1774 // Note: Vulkan commands are given a sequence number in order to ensure that
1775 // commands from multi-threaded guest Vulkan apps are executed in order on the
1776 // host. Gfxstream's host Vulkan decoders will spin loop waiting for their turn to
1777 // process their next command.
1778 //
1779 // With all of the above, a deadlock would previouly occur with the following
1780 // sequence:
1781 //
1782 // T1: Host-RenderThread-1: <sleeping waiting for wakeup>
1783 //
1784 // T2: Host-RenderThread-2: <sleeping waiting for wakeup>
1785 //
1786 // T3: Guest-Thread-1: vkDestroyBuffer() called,
1787 // VkEncoder grabs sequence-number-10,
1788 // writes sequence-number-10 into ASG-1 via resource-1
1789 //
1790 // T4: Guest-Thread-2: vkDestroyBuffer() called,
1791 // VkEncoder grabs sequence-number-11,
1792 // writes into ASG-2 via resource-2
1793 //
1794 // T5: Guest-Thread-2: ASG-2 sends a VIRTIO_GPU_CMD_SUBMIT_3D with
1795 // GFXSTREAM_CONTEXT_PING on ASG-resource-2
1796 //
1797 // T6: Guest-Thread-2: guest thread finishes,
1798 // ASG-2 destructor destroys the virtio-gpu resource used,
1799 // destruction sends VIRTIO_GPU_CMD_RESOURCE_UNREF on
1800 // resource-2
1801 //
1802 // T7: Guest-Thread-1: ASG-1 sends VIRTIO_GPU_CMD_SUBMIT_3D with
1803 // GFXSTREAM_CONTEXT_PING on ASG-resource-1
1804 //
1805 // T8: Host-Virtio-Gpu-Thread: performs VIRTIO_GPU_CMD_SUBMIT_3D from T5,
1806 // pings ASG-2 which wakes up Host-RenderThread-2
1807 //
1808 // T9: Host-RenderThread-2: woken from T8,
1809 // reads sequence-number-11 from ASG-2,
1810 // spin looping waiting for sequence-number-10 to execute
1811 //
1812 // T10: Host-Virtio-Gpu-Thread: performs VIRTIO_GPU_CMD_RESOURCE_UNREF for
1813 // resource-2 from T6,
1814 // resource-2 is used by ASG-2 / Host-RenderThread-2
1815 // waits for Host-RenderThread-2 to finish
1816 //
1817 // DEADLOCKED HERE:
1818 //
1819 // * Host-Virtio-GpuThread is waiting for Host-RenderThread-2 to finish before
1820 // it can finish destroying resource-2
1821 //
1822 // * Host-RenderThread-2 is waiting for Host-RenderThread-1 to execute
1823 // sequence-number-10
1824 //
1825 // * Host-RenderThread-1 is asleep waiting for a GFXSTREAM_CONTEXT_PING
1826 // from Host-Virtio-GpuThread
1827 });
1828 }
1829
1830 for (auto& thread : threads) {
1831 thread.join();
1832 }
1833 }
1834 }
1835
TEST_P(GfxstreamEnd2EndVkTest,DeviceCreateWithDeviceGroup)1836 TEST_P(GfxstreamEnd2EndVkTest, DeviceCreateWithDeviceGroup) {
1837 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
1838 GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
1839
1840 const vkhpp::DeviceGroupDeviceCreateInfo deviceGroupDeviceCreateInfo = {
1841 .physicalDeviceCount = 1,
1842 .pPhysicalDevices = &physicalDevice,
1843 };
1844
1845 const float queuePriority = 1.0f;
1846 const vkhpp::DeviceQueueCreateInfo deviceQueueCreateInfo = {
1847 .queueFamilyIndex = 0,
1848 .queueCount = 1,
1849 .pQueuePriorities = &queuePriority,
1850 };
1851 const vkhpp::DeviceCreateInfo deviceCreateInfo = {
1852 .pNext = &deviceGroupDeviceCreateInfo,
1853 .pQueueCreateInfos = &deviceQueueCreateInfo,
1854 .queueCreateInfoCount = 1,
1855 };
1856 auto device2 = GFXSTREAM_ASSERT_VKHPP_RV(physicalDevice.createDeviceUnique(deviceCreateInfo));
1857 ASSERT_THAT(device2, IsValidHandle());
1858 }
1859
TEST_P(GfxstreamEnd2EndVkTest,AcquireImageAndroidWithFence)1860 TEST_P(GfxstreamEnd2EndVkTest, AcquireImageAndroidWithFence) {
1861 DoAcquireImageAndroidWithSync(/*withFence=*/true, /*withSemaphore=*/false);
1862 }
1863
TEST_P(GfxstreamEnd2EndVkTest,AcquireImageAndroidWithSemaphore)1864 TEST_P(GfxstreamEnd2EndVkTest, AcquireImageAndroidWithSemaphore) {
1865 DoAcquireImageAndroidWithSync(/*withFence=*/false, /*withSemaphore=*/true);
1866 }
1867
TEST_P(GfxstreamEnd2EndVkTest,AcquireImageAndroidWithFenceAndSemaphore)1868 TEST_P(GfxstreamEnd2EndVkTest, AcquireImageAndroidWithFenceAndSemaphore) {
1869 DoAcquireImageAndroidWithSync(/*withFence=*/true, /*withSemaphore=*/true);
1870 }
1871
MemoryReportCallback(const VkDeviceMemoryReportCallbackDataEXT *,void *)1872 VKAPI_ATTR void VKAPI_CALL MemoryReportCallback(const VkDeviceMemoryReportCallbackDataEXT*, void*) {
1873 // Unused
1874 }
1875
TEST_P(GfxstreamEnd2EndVkTest,DeviceMemoryReport)1876 TEST_P(GfxstreamEnd2EndVkTest, DeviceMemoryReport) {
1877 int userdata = 1;
1878 vkhpp::DeviceDeviceMemoryReportCreateInfoEXT deviceDeviceMemoryReportInfo = {
1879 .pfnUserCallback = &MemoryReportCallback,
1880 .pUserData = &userdata,
1881 };
1882
1883 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
1884 GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment({
1885 .deviceExtensions = {{
1886 VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME,
1887 }},
1888 .deviceCreateInfoPNext = &deviceDeviceMemoryReportInfo,
1889 }));
1890
1891 const vkhpp::MemoryAllocateInfo memoryAllocateInfo = {
1892 .allocationSize = 1024,
1893 .memoryTypeIndex = 0,
1894 };
1895 auto memory = device->allocateMemoryUnique(memoryAllocateInfo).value;
1896 ASSERT_THAT(memory, IsValidHandle());
1897 }
1898
TEST_P(GfxstreamEnd2EndVkTest,DescriptorUpdateTemplateWithWrapping)1899 TEST_P(GfxstreamEnd2EndVkTest, DescriptorUpdateTemplateWithWrapping) {
1900 auto vk = GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
1901 auto& [instance, physicalDevice, device, queue, queueFamilyIndex] = vk;
1902
1903 const VkDeviceSize kBufferSize = 1024;
1904 auto buffer = GFXSTREAM_ASSERT(CreateBuffer(
1905 vk, kBufferSize,
1906 vkhpp::BufferUsageFlagBits::eTransferDst |
1907 vkhpp::BufferUsageFlagBits::eTransferSrc |
1908 vkhpp::BufferUsageFlagBits::eUniformBuffer,
1909 vkhpp::MemoryPropertyFlagBits::eHostVisible |
1910 vkhpp::MemoryPropertyFlagBits::eHostCoherent));
1911
1912 const std::vector<VkDescriptorBufferInfo> descriptorInfo = {
1913 VkDescriptorBufferInfo{
1914 .buffer = *buffer.buffer,
1915 .offset = 0,
1916 .range = kBufferSize,
1917 },
1918 VkDescriptorBufferInfo{
1919 .buffer = *buffer.buffer,
1920 .offset = 0,
1921 .range = kBufferSize,
1922 },
1923 VkDescriptorBufferInfo{
1924 .buffer = *buffer.buffer,
1925 .offset = 0,
1926 .range = kBufferSize,
1927 },
1928 VkDescriptorBufferInfo{
1929 .buffer = *buffer.buffer,
1930 .offset = 0,
1931 .range = kBufferSize,
1932 },
1933 };
1934
1935 const std::vector<vkhpp::DescriptorPoolSize> descriptorPoolSizes = {
1936 {
1937 .type = vkhpp::DescriptorType::eUniformBuffer,
1938 .descriptorCount = 4,
1939 },
1940 };
1941 const vkhpp::DescriptorPoolCreateInfo descriptorPoolCreateInfo = {
1942 .flags = vkhpp::DescriptorPoolCreateFlagBits::eFreeDescriptorSet,
1943 .maxSets = 1,
1944 .poolSizeCount = static_cast<uint32_t>(descriptorPoolSizes.size()),
1945 .pPoolSizes = descriptorPoolSizes.data(),
1946 };
1947 auto descriptorPool =
1948 GFXSTREAM_ASSERT_VKHPP_RV(device->createDescriptorPoolUnique(descriptorPoolCreateInfo));
1949
1950 const std::vector<vkhpp::DescriptorSetLayoutBinding> descriptorSetBindings = {
1951 {
1952 .binding = 0,
1953 .descriptorType = vkhpp::DescriptorType::eUniformBuffer,
1954 .descriptorCount = 1,
1955 .stageFlags = vkhpp::ShaderStageFlagBits::eVertex,
1956 },
1957 {
1958 .binding = 1,
1959 .descriptorType = vkhpp::DescriptorType::eUniformBuffer,
1960 .descriptorCount = 1,
1961 .stageFlags = vkhpp::ShaderStageFlagBits::eVertex,
1962 },
1963 {
1964 .binding = 2,
1965 .descriptorType = vkhpp::DescriptorType::eUniformBuffer,
1966 .descriptorCount = 1,
1967 .stageFlags = vkhpp::ShaderStageFlagBits::eVertex,
1968 },
1969 {
1970 .binding = 3,
1971 .descriptorType = vkhpp::DescriptorType::eUniformBuffer,
1972 .descriptorCount = 1,
1973 .stageFlags = vkhpp::ShaderStageFlagBits::eVertex,
1974 },
1975 };
1976 const vkhpp::DescriptorSetLayoutCreateInfo descriptorSetLayoutInfo = {
1977 .bindingCount = static_cast<uint32_t>(descriptorSetBindings.size()),
1978 .pBindings = descriptorSetBindings.data(),
1979 };
1980 auto descriptorSetLayout =
1981 GFXSTREAM_ASSERT_VKHPP_RV(device->createDescriptorSetLayoutUnique(descriptorSetLayoutInfo));
1982
1983 const std::vector<vkhpp::DescriptorSetLayout> descriptorSetLayouts = {*descriptorSetLayout};
1984 const vkhpp::DescriptorSetAllocateInfo descriptorSetAllocateInfo = {
1985 .descriptorPool = *descriptorPool,
1986 .descriptorSetCount = static_cast<uint32_t>(descriptorSetLayouts.size()),
1987 .pSetLayouts = descriptorSetLayouts.data(),
1988 };
1989 auto descriptorSets =
1990 GFXSTREAM_ASSERT_VKHPP_RV(device->allocateDescriptorSetsUnique(descriptorSetAllocateInfo));
1991 auto descriptorSet = std::move(descriptorSets[0]);
1992
1993 const vkhpp::PipelineLayoutCreateInfo pipelineLayoutCreateInfo = {
1994 .setLayoutCount = static_cast<uint32_t>(descriptorSetLayouts.size()),
1995 .pSetLayouts = descriptorSetLayouts.data(),
1996 };
1997 auto pipelineLayout =
1998 GFXSTREAM_ASSERT_VKHPP_RV(device->createPipelineLayoutUnique(pipelineLayoutCreateInfo));
1999
2000 const std::vector<vkhpp::DescriptorUpdateTemplateEntry> descriptorUpdateEntries = {
2001 {
2002 .dstBinding = 0,
2003 .dstArrayElement = 0,
2004 .descriptorCount = 4,
2005 .descriptorType = vkhpp::DescriptorType::eUniformBuffer,
2006 .offset = 0,
2007 .stride = sizeof(VkDescriptorBufferInfo),
2008 },
2009 };
2010 const vkhpp::DescriptorUpdateTemplateCreateInfo descriptorUpdateTemplateCreateInfo = {
2011 .descriptorUpdateEntryCount = static_cast<uint32_t>(descriptorUpdateEntries.size()),
2012 .pDescriptorUpdateEntries = descriptorUpdateEntries.data(),
2013 .descriptorSetLayout = *descriptorSetLayout,
2014 .pipelineBindPoint = vkhpp::PipelineBindPoint::eGraphics,
2015 .pipelineLayout = *pipelineLayout,
2016 .set = 0,
2017 };
2018 auto descriptorUpdateTemplate = GFXSTREAM_ASSERT_VKHPP_RV(
2019 device->createDescriptorUpdateTemplateUnique(descriptorUpdateTemplateCreateInfo));
2020
2021 device->updateDescriptorSetWithTemplate(*descriptorSet, *descriptorUpdateTemplate,
2022 (const void*)descriptorInfo.data());
2023
2024 // Gfxstream optimizes descriptor set updates by batching updates until there is an
2025 // actual use in a command buffer. Try to force that flush by binding the descriptor
2026 // set here:
2027 GFXSTREAM_ASSERT(DoCommandsImmediate(vk,
2028 [&](vkhpp::UniqueCommandBuffer& cmd) {
2029 cmd->bindDescriptorSets(vkhpp::PipelineBindPoint::eGraphics, *pipelineLayout,
2030 /*firstSet=*/0, {*descriptorSet},
2031 /*dynamicOffsets=*/{});
2032 return Ok{};
2033 }));
2034 }
2035
TEST_P(GfxstreamEnd2EndVkTest,MultiThreadedVkMapMemory)2036 TEST_P(GfxstreamEnd2EndVkTest, MultiThreadedVkMapMemory) {
2037 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
2038 GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
2039
2040 static constexpr const vkhpp::DeviceSize kSize = 1024;
2041 const vkhpp::BufferCreateInfo bufferCreateInfo = {
2042 .size = kSize,
2043 .usage = vkhpp::BufferUsageFlagBits::eTransferSrc,
2044 };
2045 auto buffer = device->createBufferUnique(bufferCreateInfo).value;
2046
2047 vkhpp::MemoryRequirements bufferMemoryRequirements{};
2048 device->getBufferMemoryRequirements(*buffer, &bufferMemoryRequirements);
2049
2050 const uint32_t bufferMemoryIndex = utils::getMemoryType(
2051 physicalDevice, bufferMemoryRequirements,
2052 vkhpp::MemoryPropertyFlagBits::eHostVisible | vkhpp::MemoryPropertyFlagBits::eHostCoherent);
2053 if (bufferMemoryIndex == -1) {
2054 GTEST_SKIP() << "Skipping test due to no memory type with HOST_VISIBLE | HOST_COHERENT.";
2055 }
2056
2057 std::vector<std::thread> threads;
2058 std::atomic_int threadsReady{0};
2059
2060 constexpr const int kNumThreads = 2;
2061 for (int t = 0; t < kNumThreads; t++) {
2062 threads.emplace_back([&, this]() {
2063 // Perform some work to ensure host RenderThread started.
2064 auto buffer2 = device->createBufferUnique(bufferCreateInfo).value;
2065 ASSERT_THAT(buffer2, IsValidHandle());
2066
2067 ++threadsReady;
2068 while (threadsReady.load() != kNumThreads) {
2069 }
2070
2071 constexpr const int kNumIterations = 100;
2072 for (int i = 0; i < kNumIterations; i++) {
2073 auto buffer3 = device->createBufferUnique(bufferCreateInfo).value;
2074 ASSERT_THAT(buffer3, IsValidHandle());
2075
2076 const vkhpp::MemoryAllocateInfo buffer3MemoryAllocateInfo = {
2077 .allocationSize = bufferMemoryRequirements.size,
2078 .memoryTypeIndex = bufferMemoryIndex,
2079 };
2080 auto buffer3Memory = device->allocateMemoryUnique(buffer3MemoryAllocateInfo).value;
2081 ASSERT_THAT(buffer3Memory, IsValidHandle());
2082
2083 ASSERT_THAT(device->bindBufferMemory(*buffer3, *buffer3Memory, 0), IsVkSuccess());
2084
2085 void* mapped = nullptr;
2086 ASSERT_THAT(device->mapMemory(*buffer3Memory, 0, VK_WHOLE_SIZE,
2087 vkhpp::MemoryMapFlags{}, &mapped),
2088 IsVkSuccess());
2089 ASSERT_THAT(mapped, NotNull());
2090
2091 device->unmapMemory(*buffer3Memory);
2092 }
2093 });
2094 }
2095
2096 for (auto& thread : threads) {
2097 thread.join();
2098 }
2099 }
2100
TEST_P(GfxstreamEnd2EndVkTest,MultiThreadedResetCommandBuffer)2101 TEST_P(GfxstreamEnd2EndVkTest, MultiThreadedResetCommandBuffer) {
2102 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
2103 GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
2104
2105 static constexpr const vkhpp::DeviceSize kSize = 1024;
2106 const vkhpp::BufferCreateInfo bufferCreateInfo = {
2107 .size = kSize,
2108 .usage = vkhpp::BufferUsageFlagBits::eTransferSrc,
2109 };
2110
2111 static std::mutex queue_mutex;
2112 std::vector<std::thread> threads;
2113 std::atomic_int threadsReady{0};
2114
2115 constexpr const int kNumThreads = 10;
2116 for (int t = 0; t < kNumThreads; t++) {
2117 threads.emplace_back([&, this]() {
2118 // Perform some work to ensure host RenderThread started.
2119 auto buffer2 = device->createBufferUnique(bufferCreateInfo).value;
2120 ASSERT_THAT(buffer2, IsValidHandle());
2121
2122 ++threadsReady;
2123 while (threadsReady.load() != kNumThreads) {
2124 }
2125
2126 const vkhpp::CommandPoolCreateInfo commandPoolCreateInfo = {
2127 .queueFamilyIndex = queueFamilyIndex,
2128 };
2129 auto commandPool = device->createCommandPoolUnique(commandPoolCreateInfo).value;
2130
2131 const vkhpp::CommandBufferAllocateInfo commandBufferAllocateInfo = {
2132 .level = vkhpp::CommandBufferLevel::ePrimary,
2133 .commandPool = *commandPool,
2134 .commandBufferCount = 1,
2135 };
2136 auto commandBuffers = device->allocateCommandBuffersUnique(commandBufferAllocateInfo).value;
2137 ASSERT_THAT(commandBuffers, Not(IsEmpty()));
2138 auto commandBuffer = std::move(commandBuffers[0]);
2139 ASSERT_THAT(commandBuffer, IsValidHandle());
2140
2141 auto transferFence = device->createFenceUnique(vkhpp::FenceCreateInfo()).value;
2142 ASSERT_THAT(commandBuffer, IsValidHandle());
2143
2144 constexpr const int kNumIterations = 1000;
2145 for (int i = 0; i < kNumIterations; i++) {
2146 commandBuffer->reset();
2147 const vkhpp::CommandBufferBeginInfo commandBufferBeginInfo = {
2148 .flags = vkhpp::CommandBufferUsageFlagBits::eOneTimeSubmit,
2149 };
2150 commandBuffer->begin(commandBufferBeginInfo);
2151
2152 commandBuffer->end();
2153
2154 std::vector<vkhpp::CommandBuffer> commandBufferHandles;
2155 commandBufferHandles.push_back(*commandBuffer);
2156
2157 const vkhpp::SubmitInfo submitInfo = {
2158 .commandBufferCount = static_cast<uint32_t>(commandBufferHandles.size()),
2159 .pCommandBuffers = commandBufferHandles.data(),
2160 };
2161 {
2162 std::lock_guard<std::mutex> qm(queue_mutex);
2163 queue.submit(submitInfo, *transferFence);
2164 }
2165 auto waitResult = device->waitForFences(*transferFence, VK_TRUE, AsVkTimeout(3s));
2166 ASSERT_THAT(waitResult, IsVkSuccess());
2167 }
2168 });
2169 }
2170
2171 for (auto& thread : threads) {
2172 thread.join();
2173 }
2174 }
2175
TEST_P(GfxstreamEnd2EndVkTest,ImportAndBlitFromR8G8B8A8Ahb)2176 TEST_P(GfxstreamEnd2EndVkTest, ImportAndBlitFromR8G8B8A8Ahb) {
2177 DoFillAndRenderFromAhb(GFXSTREAM_AHB_FORMAT_R8G8B8A8_UNORM);
2178 }
2179
TEST_P(GfxstreamEnd2EndVkTest,ImportAndBlitFromYCbCr888420Ahb)2180 TEST_P(GfxstreamEnd2EndVkTest, ImportAndBlitFromYCbCr888420Ahb) {
2181 DoFillAndRenderFromAhb(GFXSTREAM_AHB_FORMAT_Y8Cb8Cr8_420);
2182 }
2183
TEST_P(GfxstreamEnd2EndVkTest,ImportAndBlitFromYv12Ahb)2184 TEST_P(GfxstreamEnd2EndVkTest, ImportAndBlitFromYv12Ahb) {
2185 DoFillAndRenderFromAhb(GFXSTREAM_AHB_FORMAT_YV12);
2186 }
2187
GenerateTestCases()2188 std::vector<TestParams> GenerateTestCases() {
2189 std::vector<TestParams> cases = {TestParams{
2190 .with_gl = false,
2191 .with_vk = true,
2192 .with_transport = GfxstreamTransport::kVirtioGpuAsg,
2193 },
2194 TestParams{
2195 .with_gl = true,
2196 .with_vk = true,
2197 .with_transport = GfxstreamTransport::kVirtioGpuAsg,
2198 },
2199 TestParams{
2200 .with_gl = false,
2201 .with_vk = true,
2202 .with_transport = GfxstreamTransport::kVirtioGpuPipe,
2203 },
2204 TestParams{
2205 .with_gl = true,
2206 .with_vk = true,
2207 .with_transport = GfxstreamTransport::kVirtioGpuPipe,
2208 }};
2209 cases = WithAndWithoutFeatures(cases, {"VulkanSnapshots"});
2210 cases = WithAndWithoutFeatures(cases, {"VulkanUseDedicatedAhbMemoryType"});
2211 return cases;
2212 }
2213
TEST_P(GfxstreamEnd2EndVkTest,UseDoubleQueuesAndSynchronizeCorrectly)2214 TEST_P(GfxstreamEnd2EndVkTest, UseDoubleQueuesAndSynchronizeCorrectly) {
2215 auto vk = GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
2216 auto& [instance, physicalDevice, device, queue_dont_use, graphicsQueueFamilyIndex] = vk;
2217
2218 const uint32_t graphicsQueueCount =
2219 physicalDevice.getQueueFamilyProperties()[graphicsQueueFamilyIndex].queueCount;
2220 if (graphicsQueueCount < 2) {
2221 GTEST_SKIP() << "Device does not support at least 2 graphic_bit queues. Skipping test...";
2222 return;
2223 }
2224
2225 // Recreate logical device to get 2 queues
2226 const float queuePriority = 1.0f;
2227 const vkhpp::DeviceQueueCreateInfo deviceQueueCreateInfo = {
2228 .queueFamilyIndex = graphicsQueueFamilyIndex,
2229 .queueCount = 2,
2230 .pQueuePriorities = &queuePriority,
2231 };
2232 std::vector<const char*> deviceExtensions = {
2233 VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME,
2234 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME,
2235 };
2236 const vkhpp::DeviceCreateInfo deviceCreateInfo = {
2237 .pNext = nullptr,
2238 .pQueueCreateInfos = &deviceQueueCreateInfo,
2239 .queueCreateInfoCount = 1,
2240 .enabledLayerCount = 0,
2241 .ppEnabledLayerNames = nullptr,
2242 .enabledExtensionCount = static_cast<uint32_t>(deviceExtensions.size()),
2243 .ppEnabledExtensionNames = deviceExtensions.data(),
2244 };
2245
2246 device = GFXSTREAM_ASSERT_VKHPP_RV(physicalDevice.createDeviceUnique(deviceCreateInfo));
2247 auto queue1 = device->getQueue(graphicsQueueFamilyIndex, 0);
2248 auto queue2 = device->getQueue(graphicsQueueFamilyIndex, 1);
2249
2250 const VkDeviceSize kBufferSize = 1024;
2251 auto sourceBufferWithMemoryResult = CreateBuffer(
2252 vk, kBufferSize, vkhpp::BufferUsageFlagBits::eTransferSrc,
2253 vkhpp::MemoryPropertyFlagBits::eHostVisible | vkhpp::MemoryPropertyFlagBits::eHostCoherent);
2254 if (!sourceBufferWithMemoryResult.ok()) {
2255 GTEST_SKIP()
2256 << "Couldn't create hostVisible, hostCoherent, TransferSrc buffer. Skipping test...";
2257 return;
2258 }
2259 auto sourceBufferWithMemory = std::move(sourceBufferWithMemoryResult.value());
2260 auto destBufferWithMemoryResult = CreateBuffer(
2261 vk, kBufferSize, vkhpp::BufferUsageFlagBits::eTransferDst,
2262 vkhpp::MemoryPropertyFlagBits::eHostVisible | vkhpp::MemoryPropertyFlagBits::eHostCoherent);
2263 if (!destBufferWithMemoryResult.ok()) {
2264 GTEST_SKIP()
2265 << "Couldn't create hostVisible, hostCoherent, TransferDst buffer. Skipping test...";
2266 return;
2267 }
2268 auto destBufferWithMemory = std::move(destBufferWithMemoryResult.value());
2269 const vkhpp::CommandPoolCreateInfo commandPoolCreateInfo = {
2270 .queueFamilyIndex = graphicsQueueFamilyIndex,
2271 };
2272
2273 auto commandPool =
2274 GFXSTREAM_ASSERT_VKHPP_RV(device->createCommandPoolUnique(commandPoolCreateInfo));
2275
2276 const vkhpp::CommandBufferAllocateInfo commandBufferAllocateInfo = {
2277 .commandPool = *commandPool,
2278 .level = vkhpp::CommandBufferLevel::ePrimary,
2279 .commandBufferCount = 2,
2280 };
2281 auto commandBuffers =
2282 GFXSTREAM_ASSERT_VKHPP_RV(device->allocateCommandBuffersUnique(commandBufferAllocateInfo));
2283 auto commandBufferWriteInto = std::move(commandBuffers[0]);
2284 auto commandBufferCopy = std::move(commandBuffers[1]);
2285
2286 vkhpp::UniqueSemaphore semaphore =
2287 device->createSemaphoreUnique(vkhpp::SemaphoreCreateInfo()).value;
2288
2289 vkhpp::SubmitInfo submitInfoWrite = {.commandBufferCount = 1,
2290 .pCommandBuffers = &*commandBufferWriteInto,
2291 .signalSemaphoreCount = 1,
2292 .pSignalSemaphores = &*semaphore};
2293
2294 const vkhpp::PipelineStageFlags waitStage = vkhpp::PipelineStageFlagBits::eTransfer;
2295 vkhpp::SubmitInfo submitInfoCopy = {.commandBufferCount = 1,
2296 .pCommandBuffers = &*commandBufferCopy,
2297 .waitSemaphoreCount = 1,
2298 .pWaitSemaphores = &*semaphore,
2299 .pWaitDstStageMask = &waitStage};
2300 const vkhpp::CommandBufferBeginInfo commandBufferBeginInfo = {
2301 .flags = vkhpp::CommandBufferUsageFlagBits::eOneTimeSubmit,
2302 };
2303 commandBufferWriteInto->begin(commandBufferBeginInfo);
2304 std::vector<uint8_t> dataToWrite = utils::getRandomNByteData(100);
2305 commandBufferWriteInto->updateBuffer(sourceBufferWithMemory.buffer.get(), 0, dataToWrite.size(),
2306 dataToWrite.data());
2307 auto bufferBarrier = vkhpp::BufferMemoryBarrier()
2308 .setSrcAccessMask(vkhpp::AccessFlagBits::eTransferWrite)
2309 .setDstAccessMask(vkhpp::AccessFlagBits::eTransferRead)
2310 .setSrcQueueFamilyIndex(VK_QUEUE_FAMILY_IGNORED)
2311 .setDstQueueFamilyIndex(VK_QUEUE_FAMILY_IGNORED)
2312 .setBuffer(sourceBufferWithMemory.buffer.get())
2313 .setOffset(0)
2314 .setSize(kBufferSize);
2315 commandBufferWriteInto->pipelineBarrier(vkhpp::PipelineStageFlagBits::eTransfer,
2316 vkhpp::PipelineStageFlagBits::eTransfer,
2317 vkhpp::DependencyFlags(), {}, bufferBarrier, {});
2318 commandBufferWriteInto->end();
2319
2320 commandBufferCopy->begin(commandBufferBeginInfo);
2321 vkhpp::BufferCopy copyRegion{};
2322 copyRegion.srcOffset = 0;
2323 copyRegion.dstOffset = 0;
2324 copyRegion.size = kBufferSize;
2325 vkhpp::ArrayProxy<const vkhpp::BufferCopy> copyRegions = copyRegion;
2326 commandBufferCopy->copyBuffer(sourceBufferWithMemory.buffer.get(),
2327 destBufferWithMemory.buffer.get(), copyRegions);
2328 commandBufferCopy->end();
2329 queue1.submit(submitInfoWrite, nullptr);
2330 queue2.submit(submitInfoCopy, nullptr);
2331 queue2.waitIdle();
2332 void* mappedMemory = GFXSTREAM_ASSERT_VKHPP_RV(
2333 device->mapMemory(*destBufferWithMemory.bufferMemory, 0, kBufferSize));
2334 uint8_t* readData = static_cast<uint8_t*>(mappedMemory);
2335 device->unmapMemory(*destBufferWithMemory.bufferMemory);
2336 ASSERT_TRUE(std::memcmp(readData, dataToWrite.data(), dataToWrite.size()) == 0);
2337 }
2338
TEST_P(GfxstreamEnd2EndVkTest,GetFenceStatusOnExternalFence)2339 TEST_P(GfxstreamEnd2EndVkTest, GetFenceStatusOnExternalFence) {
2340 auto vk = GFXSTREAM_ASSERT(SetUpTypicalVkTestEnvironment());
2341 auto& [instance, physicalDevice, device, queue, queueFamilyIndex] = vk;
2342
2343 const uint32_t width = 32;
2344 const uint32_t height = 32;
2345 auto ahb = GFXSTREAM_ASSERT(ScopedAHardwareBuffer::Allocate(
2346 *mGralloc, width, height, GFXSTREAM_AHB_FORMAT_R8G8B8A8_UNORM));
2347
2348 const VkNativeBufferANDROID imageNativeBufferInfo = {
2349 .sType = VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID,
2350 .handle = mGralloc->getNativeHandle(ahb),
2351 };
2352 const vkhpp::ImageCreateInfo imageCreateInfo = {
2353 .pNext = &imageNativeBufferInfo,
2354 .imageType = vkhpp::ImageType::e2D,
2355 .extent.width = width,
2356 .extent.height = height,
2357 .extent.depth = 1,
2358 .mipLevels = 1,
2359 .arrayLayers = 1,
2360 .format = vkhpp::Format::eR8G8B8A8Unorm,
2361 .tiling = vkhpp::ImageTiling::eOptimal,
2362 .initialLayout = vkhpp::ImageLayout::eUndefined,
2363 .usage = vkhpp::ImageUsageFlagBits::eSampled | vkhpp::ImageUsageFlagBits::eTransferDst |
2364 vkhpp::ImageUsageFlagBits::eTransferSrc,
2365 .sharingMode = vkhpp::SharingMode::eExclusive,
2366 .samples = vkhpp::SampleCountFlagBits::e1,
2367 };
2368 auto image = device->createImageUnique(imageCreateInfo).value;
2369
2370 vkhpp::MemoryRequirements imageMemoryRequirements{};
2371 device->getImageMemoryRequirements(*image, &imageMemoryRequirements);
2372
2373 const uint32_t imageMemoryIndex = utils::getMemoryType(
2374 physicalDevice, imageMemoryRequirements, vkhpp::MemoryPropertyFlagBits::eDeviceLocal);
2375 ASSERT_THAT(imageMemoryIndex, Not(Eq(-1)));
2376
2377 const vkhpp::MemoryAllocateInfo imageMemoryAllocateInfo = {
2378 .allocationSize = imageMemoryRequirements.size,
2379 .memoryTypeIndex = imageMemoryIndex,
2380 };
2381
2382 auto imageMemory = device->allocateMemoryUnique(imageMemoryAllocateInfo).value;
2383 ASSERT_THAT(imageMemory, IsValidHandle());
2384 ASSERT_THAT(device->bindImageMemory(*image, *imageMemory, 0), IsVkSuccess());
2385
2386 auto vkQueueSignalReleaseImageANDROID = PFN_vkQueueSignalReleaseImageANDROID(
2387 device->getProcAddr("vkQueueSignalReleaseImageANDROID"));
2388 ASSERT_THAT(vkQueueSignalReleaseImageANDROID, NotNull());
2389
2390 int qsriSyncFd = -1;
2391 auto qsriResult = vkQueueSignalReleaseImageANDROID(queue, 0, nullptr, *image, &qsriSyncFd);
2392 ASSERT_THAT(qsriResult, Eq(VK_SUCCESS));
2393 ASSERT_THAT(qsriSyncFd, Not(Eq(-1)));
2394
2395 // Initially unsignaled.
2396 vkhpp::UniqueFence fence = device->createFenceUnique(vkhpp::FenceCreateInfo()).value;
2397
2398 const vkhpp::ImportFenceFdInfoKHR importFenceInfo = {
2399 .fence = *fence,
2400 .flags = vkhpp::FenceImportFlagBits::eTemporary,
2401 .handleType = vkhpp::ExternalFenceHandleTypeFlagBits::eSyncFd,
2402 .fd = qsriSyncFd,
2403 };
2404 auto importResult = device->importFenceFdKHR(&importFenceInfo);
2405 ASSERT_THAT(qsriResult, Eq(VK_SUCCESS));
2406
2407 const auto kMaxTimeout = std::chrono::seconds(10);
2408
2409 auto begin = std::chrono::steady_clock::now();
2410 while (true) {
2411 vkhpp::Result fenceStatus = device->getFenceStatus(*fence);
2412 if (fenceStatus == vkhpp::Result::eSuccess) {
2413 break;
2414 }
2415
2416 auto now = std::chrono::steady_clock::now();
2417 if ((now - begin) > kMaxTimeout) {
2418 ASSERT_THAT(fenceStatus, Eq(vkhpp::Result::eSuccess));
2419 }
2420 }
2421 }
2422
2423 INSTANTIATE_TEST_CASE_P(GfxstreamEnd2EndTests, GfxstreamEnd2EndVkTest,
2424 ::testing::ValuesIn(GenerateTestCases()), &GetTestName);
2425
2426 } // namespace
2427 } // namespace tests
2428 } // namespace gfxstream
2429