1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_utils:
7 // Helper functions for the Vulkan Renderer.
8 //
9
10 #include "libANGLE/renderer/vulkan/vk_utils.h"
11
12 #include "libANGLE/Context.h"
13 #include "libANGLE/Display.h"
14 #include "libANGLE/renderer/vulkan/BufferVk.h"
15 #include "libANGLE/renderer/vulkan/ContextVk.h"
16 #include "libANGLE/renderer/vulkan/DisplayVk.h"
17 #include "libANGLE/renderer/vulkan/android/vk_android_utils.h"
18 #include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
19 #include "libANGLE/renderer/vulkan/vk_ref_counted_event.h"
20 #include "libANGLE/renderer/vulkan/vk_renderer.h"
21 #include "libANGLE/renderer/vulkan/vk_resource.h"
22
23 namespace angle
24 {
ToEGL(Result result,EGLint errorCode)25 egl::Error ToEGL(Result result, EGLint errorCode)
26 {
27 if (result != angle::Result::Continue)
28 {
29 egl::Error error = std::move(*egl::Display::GetCurrentThreadErrorScratchSpace());
30 error.setCode(errorCode);
31 return error;
32 }
33 else
34 {
35 return egl::NoError();
36 }
37 }
38 } // namespace angle
39
40 namespace rx
41 {
42 namespace
43 {
44 // Pick an arbitrary value to initialize non-zero memory for sanitization. Note that 0x3F3F3F3F
45 // as float is about 0.75.
46 constexpr int kNonZeroInitValue = 0x3F;
47
GetStagingBufferUsageFlags(vk::StagingUsage usage)48 VkImageUsageFlags GetStagingBufferUsageFlags(vk::StagingUsage usage)
49 {
50 switch (usage)
51 {
52 case vk::StagingUsage::Read:
53 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
54 case vk::StagingUsage::Write:
55 return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
56 case vk::StagingUsage::Both:
57 return (VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
58 default:
59 UNREACHABLE();
60 return 0;
61 }
62 }
63
FindCompatibleMemory(const VkPhysicalDeviceMemoryProperties & memoryProperties,const VkMemoryRequirements & memoryRequirements,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * typeIndexOut)64 bool FindCompatibleMemory(const VkPhysicalDeviceMemoryProperties &memoryProperties,
65 const VkMemoryRequirements &memoryRequirements,
66 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
67 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
68 uint32_t *typeIndexOut)
69 {
70 for (size_t memoryIndex : angle::BitSet32<32>(memoryRequirements.memoryTypeBits))
71 {
72 ASSERT(memoryIndex < memoryProperties.memoryTypeCount);
73
74 if ((memoryProperties.memoryTypes[memoryIndex].propertyFlags &
75 requestedMemoryPropertyFlags) == requestedMemoryPropertyFlags)
76 {
77 *memoryPropertyFlagsOut = memoryProperties.memoryTypes[memoryIndex].propertyFlags;
78 *typeIndexOut = static_cast<uint32_t>(memoryIndex);
79 return true;
80 }
81 }
82
83 return false;
84 }
85
FindAndAllocateCompatibleMemory(vk::Context * context,vk::MemoryAllocationType memoryAllocationType,const vk::MemoryProperties & memoryProperties,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,uint32_t * memoryTypeIndexOut,vk::DeviceMemory * deviceMemoryOut)86 VkResult FindAndAllocateCompatibleMemory(vk::Context *context,
87 vk::MemoryAllocationType memoryAllocationType,
88 const vk::MemoryProperties &memoryProperties,
89 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
90 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
91 const VkMemoryRequirements &memoryRequirements,
92 const void *extraAllocationInfo,
93 uint32_t *memoryTypeIndexOut,
94 vk::DeviceMemory *deviceMemoryOut)
95 {
96 VkDevice device = context->getDevice();
97
98 VK_RESULT_TRY(memoryProperties.findCompatibleMemoryIndex(
99 context, memoryRequirements, requestedMemoryPropertyFlags, (extraAllocationInfo != nullptr),
100 memoryPropertyFlagsOut, memoryTypeIndexOut));
101
102 VkMemoryAllocateInfo allocInfo = {};
103 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
104 allocInfo.pNext = extraAllocationInfo;
105 allocInfo.memoryTypeIndex = *memoryTypeIndexOut;
106 allocInfo.allocationSize = memoryRequirements.size;
107
108 // Add the new allocation for tracking.
109 vk::Renderer *renderer = context->getRenderer();
110 renderer->getMemoryAllocationTracker()->setPendingMemoryAlloc(
111 memoryAllocationType, allocInfo.allocationSize, *memoryTypeIndexOut);
112
113 VkResult result = deviceMemoryOut->allocate(device, allocInfo);
114
115 if (result == VK_SUCCESS)
116 {
117 renderer->onMemoryAlloc(memoryAllocationType, allocInfo.allocationSize, *memoryTypeIndexOut,
118 deviceMemoryOut->getHandle());
119 }
120 return result;
121 }
122
123 template <typename T>
124 VkResult AllocateAndBindBufferOrImageMemory(vk::Context *context,
125 vk::MemoryAllocationType memoryAllocationType,
126 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
127 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
128 const VkMemoryRequirements &memoryRequirements,
129 const void *extraAllocationInfo,
130 const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
131 T *bufferOrImage,
132 uint32_t *memoryTypeIndexOut,
133 vk::DeviceMemory *deviceMemoryOut);
134
135 template <>
AllocateAndBindBufferOrImageMemory(vk::Context * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,const VkBindImagePlaneMemoryInfoKHR * extraBindInfo,vk::Image * image,uint32_t * memoryTypeIndexOut,vk::DeviceMemory * deviceMemoryOut)136 VkResult AllocateAndBindBufferOrImageMemory(vk::Context *context,
137 vk::MemoryAllocationType memoryAllocationType,
138 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
139 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
140 const VkMemoryRequirements &memoryRequirements,
141 const void *extraAllocationInfo,
142 const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
143 vk::Image *image,
144 uint32_t *memoryTypeIndexOut,
145 vk::DeviceMemory *deviceMemoryOut)
146 {
147 const vk::MemoryProperties &memoryProperties = context->getRenderer()->getMemoryProperties();
148
149 VK_RESULT_TRY(FindAndAllocateCompatibleMemory(
150 context, memoryAllocationType, memoryProperties, requestedMemoryPropertyFlags,
151 memoryPropertyFlagsOut, memoryRequirements, extraAllocationInfo, memoryTypeIndexOut,
152 deviceMemoryOut));
153
154 if (extraBindInfo)
155 {
156 VkBindImageMemoryInfoKHR bindInfo = {};
157 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
158 bindInfo.pNext = extraBindInfo;
159 bindInfo.image = image->getHandle();
160 bindInfo.memory = deviceMemoryOut->getHandle();
161 bindInfo.memoryOffset = 0;
162
163 VK_RESULT_TRY(image->bindMemory2(context->getDevice(), bindInfo));
164 }
165 else
166 {
167 VK_RESULT_TRY(image->bindMemory(context->getDevice(), *deviceMemoryOut));
168 }
169
170 return VK_SUCCESS;
171 }
172
173 template <>
AllocateAndBindBufferOrImageMemory(vk::Context * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,const VkBindImagePlaneMemoryInfoKHR * extraBindInfo,vk::Buffer * buffer,uint32_t * memoryTypeIndexOut,vk::DeviceMemory * deviceMemoryOut)174 VkResult AllocateAndBindBufferOrImageMemory(vk::Context *context,
175 vk::MemoryAllocationType memoryAllocationType,
176 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
177 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
178 const VkMemoryRequirements &memoryRequirements,
179 const void *extraAllocationInfo,
180 const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
181 vk::Buffer *buffer,
182 uint32_t *memoryTypeIndexOut,
183 vk::DeviceMemory *deviceMemoryOut)
184 {
185 ASSERT(extraBindInfo == nullptr);
186
187 const vk::MemoryProperties &memoryProperties = context->getRenderer()->getMemoryProperties();
188
189 VK_RESULT_TRY(FindAndAllocateCompatibleMemory(
190 context, memoryAllocationType, memoryProperties, requestedMemoryPropertyFlags,
191 memoryPropertyFlagsOut, memoryRequirements, extraAllocationInfo, memoryTypeIndexOut,
192 deviceMemoryOut));
193
194 VK_RESULT_TRY(buffer->bindMemory(context->getDevice(), *deviceMemoryOut, 0));
195 return VK_SUCCESS;
196 }
197
198 template <typename T>
AllocateBufferOrImageMemory(vk::Context * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,T * bufferOrImage,uint32_t * memoryTypeIndexOut,vk::DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)199 VkResult AllocateBufferOrImageMemory(vk::Context *context,
200 vk::MemoryAllocationType memoryAllocationType,
201 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
202 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
203 const void *extraAllocationInfo,
204 T *bufferOrImage,
205 uint32_t *memoryTypeIndexOut,
206 vk::DeviceMemory *deviceMemoryOut,
207 VkDeviceSize *sizeOut)
208 {
209 // Call driver to determine memory requirements.
210 VkMemoryRequirements memoryRequirements;
211 bufferOrImage->getMemoryRequirements(context->getDevice(), &memoryRequirements);
212
213 VK_RESULT_TRY(AllocateAndBindBufferOrImageMemory(
214 context, memoryAllocationType, requestedMemoryPropertyFlags, memoryPropertyFlagsOut,
215 memoryRequirements, extraAllocationInfo, nullptr, bufferOrImage, memoryTypeIndexOut,
216 deviceMemoryOut));
217
218 *sizeOut = memoryRequirements.size;
219
220 return VK_SUCCESS;
221 }
222
223 // Unified layer that includes full validation layer stack
224 constexpr char kVkKhronosValidationLayerName[] = "VK_LAYER_KHRONOS_validation";
225 constexpr char kVkStandardValidationLayerName[] = "VK_LAYER_LUNARG_standard_validation";
226 const char *kVkValidationLayerNames[] = {
227 "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation",
228 "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_core_validation",
229 "VK_LAYER_GOOGLE_unique_objects"};
230
231 } // anonymous namespace
232
VulkanResultString(VkResult result)233 const char *VulkanResultString(VkResult result)
234 {
235 switch (result)
236 {
237 case VK_SUCCESS:
238 return "Command successfully completed";
239 case VK_NOT_READY:
240 return "A fence or query has not yet completed";
241 case VK_TIMEOUT:
242 return "A wait operation has not completed in the specified time";
243 case VK_EVENT_SET:
244 return "An event is signaled";
245 case VK_EVENT_RESET:
246 return "An event is unsignaled";
247 case VK_INCOMPLETE:
248 return "A return array was too small for the result";
249 case VK_SUBOPTIMAL_KHR:
250 return "A swapchain no longer matches the surface properties exactly, but can still be "
251 "used to present to the surface successfully";
252 case VK_ERROR_OUT_OF_HOST_MEMORY:
253 return "A host memory allocation has failed";
254 case VK_ERROR_OUT_OF_DEVICE_MEMORY:
255 return "A device memory allocation has failed";
256 case VK_ERROR_INITIALIZATION_FAILED:
257 return "Initialization of an object could not be completed for implementation-specific "
258 "reasons";
259 case VK_ERROR_DEVICE_LOST:
260 return "The logical or physical device has been lost";
261 case VK_ERROR_MEMORY_MAP_FAILED:
262 return "Mapping of a memory object has failed";
263 case VK_ERROR_LAYER_NOT_PRESENT:
264 return "A requested layer is not present or could not be loaded";
265 case VK_ERROR_EXTENSION_NOT_PRESENT:
266 return "A requested extension is not supported";
267 case VK_ERROR_FEATURE_NOT_PRESENT:
268 return "A requested feature is not supported";
269 case VK_ERROR_INCOMPATIBLE_DRIVER:
270 return "The requested version of Vulkan is not supported by the driver or is otherwise "
271 "incompatible for implementation-specific reasons";
272 case VK_ERROR_TOO_MANY_OBJECTS:
273 return "Too many objects of the type have already been created";
274 case VK_ERROR_FORMAT_NOT_SUPPORTED:
275 return "A requested format is not supported on this device";
276 case VK_ERROR_SURFACE_LOST_KHR:
277 return "A surface is no longer available";
278 case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
279 return "The requested window is already connected to a VkSurfaceKHR, or to some other "
280 "non-Vulkan API";
281 case VK_ERROR_OUT_OF_DATE_KHR:
282 return "A surface has changed in such a way that it is no longer compatible with the "
283 "swapchain";
284 case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:
285 return "The display used by a swapchain does not use the same presentable image "
286 "layout, or is incompatible in a way that prevents sharing an image";
287 case VK_ERROR_VALIDATION_FAILED_EXT:
288 return "The validation layers detected invalid API usage";
289 case VK_ERROR_INVALID_SHADER_NV:
290 return "Invalid Vulkan shader was generated";
291 case VK_ERROR_OUT_OF_POOL_MEMORY:
292 return "A pool memory allocation has failed";
293 case VK_ERROR_FRAGMENTED_POOL:
294 return "A pool allocation has failed due to fragmentation of the pool's memory";
295 case VK_ERROR_INVALID_EXTERNAL_HANDLE:
296 return "An external handle is not a valid handle of the specified type";
297 default:
298 return "Unknown vulkan error code";
299 }
300 }
301
GetAvailableValidationLayers(const std::vector<VkLayerProperties> & layerProps,bool mustHaveLayers,VulkanLayerVector * enabledLayerNames)302 bool GetAvailableValidationLayers(const std::vector<VkLayerProperties> &layerProps,
303 bool mustHaveLayers,
304 VulkanLayerVector *enabledLayerNames)
305 {
306
307 ASSERT(enabledLayerNames);
308 for (const auto &layerProp : layerProps)
309 {
310 std::string layerPropLayerName = std::string(layerProp.layerName);
311
312 // Favor unified Khronos layer, but fallback to standard validation
313 if (layerPropLayerName == kVkKhronosValidationLayerName)
314 {
315 enabledLayerNames->push_back(kVkKhronosValidationLayerName);
316 continue;
317 }
318 else if (layerPropLayerName == kVkStandardValidationLayerName)
319 {
320 enabledLayerNames->push_back(kVkStandardValidationLayerName);
321 continue;
322 }
323
324 for (const char *validationLayerName : kVkValidationLayerNames)
325 {
326 if (layerPropLayerName == validationLayerName)
327 {
328 enabledLayerNames->push_back(validationLayerName);
329 break;
330 }
331 }
332 }
333
334 if (enabledLayerNames->size() == 0)
335 {
336 // Generate an error if the layers were explicitly requested, warning otherwise.
337 if (mustHaveLayers)
338 {
339 ERR() << "Vulkan validation layers are missing.";
340 }
341 else
342 {
343 WARN() << "Vulkan validation layers are missing.";
344 }
345
346 return false;
347 }
348
349 return true;
350 }
351
352 namespace vk
353 {
354 const char *gLoaderLayersPathEnv = "VK_LAYER_PATH";
355 const char *gLoaderICDFilenamesEnv = "VK_ICD_FILENAMES";
356
GetDepthStencilAspectFlags(const angle::Format & format)357 VkImageAspectFlags GetDepthStencilAspectFlags(const angle::Format &format)
358 {
359 return (format.depthBits > 0 ? VK_IMAGE_ASPECT_DEPTH_BIT : 0) |
360 (format.stencilBits > 0 ? VK_IMAGE_ASPECT_STENCIL_BIT : 0);
361 }
362
GetFormatAspectFlags(const angle::Format & format)363 VkImageAspectFlags GetFormatAspectFlags(const angle::Format &format)
364 {
365 VkImageAspectFlags dsAspect = GetDepthStencilAspectFlags(format);
366 // If the image is not depth stencil, assume color aspect. Note that detecting color formats
367 // is less trivial than depth/stencil, e.g. as block formats don't indicate any bits for RGBA
368 // channels.
369 return dsAspect != 0 ? dsAspect : VK_IMAGE_ASPECT_COLOR_BIT;
370 }
371
372 // Context implementation.
Context(Renderer * renderer)373 Context::Context(Renderer *renderer)
374 : mRenderer(renderer), mShareGroupRefCountedEventsGarbageRecycler(nullptr), mPerfCounters{}
375 {}
376
~Context()377 Context::~Context() {}
378
getDevice() const379 VkDevice Context::getDevice() const
380 {
381 return mRenderer->getDevice();
382 }
383
getFeatures() const384 const angle::FeaturesVk &Context::getFeatures() const
385 {
386 return mRenderer->getFeatures();
387 }
388
389 // MemoryProperties implementation.
MemoryProperties()390 MemoryProperties::MemoryProperties() : mMemoryProperties{} {}
391
init(VkPhysicalDevice physicalDevice)392 void MemoryProperties::init(VkPhysicalDevice physicalDevice)
393 {
394 ASSERT(mMemoryProperties.memoryTypeCount == 0);
395 vkGetPhysicalDeviceMemoryProperties(physicalDevice, &mMemoryProperties);
396 ASSERT(mMemoryProperties.memoryTypeCount > 0);
397 }
398
destroy()399 void MemoryProperties::destroy()
400 {
401 mMemoryProperties = {};
402 }
403
hasLazilyAllocatedMemory() const404 bool MemoryProperties::hasLazilyAllocatedMemory() const
405 {
406 for (uint32_t typeIndex = 0; typeIndex < mMemoryProperties.memoryTypeCount; ++typeIndex)
407 {
408 const VkMemoryType &memoryType = mMemoryProperties.memoryTypes[typeIndex];
409 if ((memoryType.propertyFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
410 {
411 return true;
412 }
413 }
414 return false;
415 }
416
findCompatibleMemoryIndex(Context * context,const VkMemoryRequirements & memoryRequirements,VkMemoryPropertyFlags requestedMemoryPropertyFlags,bool isExternalMemory,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * typeIndexOut) const417 VkResult MemoryProperties::findCompatibleMemoryIndex(
418 Context *context,
419 const VkMemoryRequirements &memoryRequirements,
420 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
421 bool isExternalMemory,
422 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
423 uint32_t *typeIndexOut) const
424 {
425 ASSERT(mMemoryProperties.memoryTypeCount > 0 && mMemoryProperties.memoryTypeCount <= 32);
426
427 // Find a compatible memory pool index. If the index doesn't change, we could cache it.
428 // Not finding a valid memory pool means an out-of-spec driver, or internal error.
429 // TODO(jmadill): Determine if it is possible to cache indexes.
430 // TODO(jmadill): More efficient memory allocation.
431 if (FindCompatibleMemory(mMemoryProperties, memoryRequirements, requestedMemoryPropertyFlags,
432 memoryPropertyFlagsOut, typeIndexOut))
433 {
434 return VK_SUCCESS;
435 }
436
437 // We did not find a compatible memory type. If the caller wanted a host visible memory, just
438 // return the memory index with fallback, guaranteed, memory flags.
439 if (requestedMemoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
440 {
441 // The Vulkan spec says the following -
442 // There must be at least one memory type with both the
443 // VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT and VK_MEMORY_PROPERTY_HOST_COHERENT_BIT
444 // bits set in its propertyFlags
445 constexpr VkMemoryPropertyFlags fallbackMemoryPropertyFlags =
446 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
447
448 if (FindCompatibleMemory(mMemoryProperties, memoryRequirements, fallbackMemoryPropertyFlags,
449 memoryPropertyFlagsOut, typeIndexOut))
450 {
451 return VK_SUCCESS;
452 }
453 }
454
455 // We did not find a compatible memory type. When importing external memory, there may be
456 // additional restrictions on memoryType. Find the first available memory type that Vulkan
457 // driver decides being compatible with external memory import.
458 if (isExternalMemory)
459 {
460 if (FindCompatibleMemory(mMemoryProperties, memoryRequirements, 0, memoryPropertyFlagsOut,
461 typeIndexOut))
462 {
463 return VK_SUCCESS;
464 }
465 }
466
467 // TODO(jmadill): Add error message to error.
468 return VK_ERROR_INCOMPATIBLE_DRIVER;
469 }
470
471 // StagingBuffer implementation.
StagingBuffer()472 StagingBuffer::StagingBuffer() : mSize(0) {}
473
destroy(Renderer * renderer)474 void StagingBuffer::destroy(Renderer *renderer)
475 {
476 VkDevice device = renderer->getDevice();
477 mBuffer.destroy(device);
478 mAllocation.destroy(renderer->getAllocator());
479 mSize = 0;
480 }
481
init(Context * context,VkDeviceSize size,StagingUsage usage)482 angle::Result StagingBuffer::init(Context *context, VkDeviceSize size, StagingUsage usage)
483 {
484 VkBufferCreateInfo createInfo = {};
485 createInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
486 createInfo.flags = 0;
487 createInfo.size = size;
488 createInfo.usage = GetStagingBufferUsageFlags(usage);
489 createInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
490 createInfo.queueFamilyIndexCount = 0;
491 createInfo.pQueueFamilyIndices = nullptr;
492
493 VkMemoryPropertyFlags preferredFlags = 0;
494 VkMemoryPropertyFlags requiredFlags =
495 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
496
497 Renderer *renderer = context->getRenderer();
498 const Allocator &allocator = renderer->getAllocator();
499
500 uint32_t memoryTypeIndex = 0;
501 ANGLE_VK_TRY(context,
502 allocator.createBuffer(createInfo, requiredFlags, preferredFlags,
503 renderer->getFeatures().persistentlyMappedBuffers.enabled,
504 &memoryTypeIndex, &mBuffer, &mAllocation));
505 mSize = static_cast<size_t>(size);
506
507 // Wipe memory to an invalid value when the 'allocateNonZeroMemory' feature is enabled. The
508 // invalid values ensures our testing doesn't assume zero-initialized memory.
509 if (renderer->getFeatures().allocateNonZeroMemory.enabled)
510 {
511 ANGLE_TRY(InitMappableAllocation(context, allocator, &mAllocation, size, kNonZeroInitValue,
512 requiredFlags));
513 }
514
515 return angle::Result::Continue;
516 }
517
release(ContextVk * contextVk)518 void StagingBuffer::release(ContextVk *contextVk)
519 {
520 contextVk->addGarbage(&mBuffer);
521 contextVk->addGarbage(&mAllocation);
522 }
523
collectGarbage(Renderer * renderer,const QueueSerial & queueSerial)524 void StagingBuffer::collectGarbage(Renderer *renderer, const QueueSerial &queueSerial)
525 {
526 GarbageObjects garbageObjects;
527 garbageObjects.emplace_back(GetGarbage(&mBuffer));
528 garbageObjects.emplace_back(GetGarbage(&mAllocation));
529
530 ResourceUse use(queueSerial);
531 renderer->collectGarbage(use, std::move(garbageObjects));
532 }
533
InitMappableAllocation(Context * context,const Allocator & allocator,Allocation * allocation,VkDeviceSize size,int value,VkMemoryPropertyFlags memoryPropertyFlags)534 angle::Result InitMappableAllocation(Context *context,
535 const Allocator &allocator,
536 Allocation *allocation,
537 VkDeviceSize size,
538 int value,
539 VkMemoryPropertyFlags memoryPropertyFlags)
540 {
541 uint8_t *mapPointer;
542 ANGLE_VK_TRY(context, allocation->map(allocator, &mapPointer));
543 memset(mapPointer, value, static_cast<size_t>(size));
544
545 if ((memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
546 {
547 allocation->flush(allocator, 0, size);
548 }
549
550 allocation->unmap(allocator);
551
552 return angle::Result::Continue;
553 }
554
AllocateBufferMemory(Context * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,Buffer * buffer,uint32_t * memoryTypeIndexOut,DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)555 VkResult AllocateBufferMemory(Context *context,
556 vk::MemoryAllocationType memoryAllocationType,
557 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
558 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
559 const void *extraAllocationInfo,
560 Buffer *buffer,
561 uint32_t *memoryTypeIndexOut,
562 DeviceMemory *deviceMemoryOut,
563 VkDeviceSize *sizeOut)
564 {
565 return AllocateBufferOrImageMemory(context, memoryAllocationType, requestedMemoryPropertyFlags,
566 memoryPropertyFlagsOut, extraAllocationInfo, buffer,
567 memoryTypeIndexOut, deviceMemoryOut, sizeOut);
568 }
569
AllocateImageMemory(Context * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags memoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,Image * image,uint32_t * memoryTypeIndexOut,DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)570 VkResult AllocateImageMemory(Context *context,
571 vk::MemoryAllocationType memoryAllocationType,
572 VkMemoryPropertyFlags memoryPropertyFlags,
573 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
574 const void *extraAllocationInfo,
575 Image *image,
576 uint32_t *memoryTypeIndexOut,
577 DeviceMemory *deviceMemoryOut,
578 VkDeviceSize *sizeOut)
579 {
580 return AllocateBufferOrImageMemory(context, memoryAllocationType, memoryPropertyFlags,
581 memoryPropertyFlagsOut, extraAllocationInfo, image,
582 memoryTypeIndexOut, deviceMemoryOut, sizeOut);
583 }
584
AllocateImageMemoryWithRequirements(Context * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags memoryPropertyFlags,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,const VkBindImagePlaneMemoryInfoKHR * extraBindInfo,Image * image,uint32_t * memoryTypeIndexOut,DeviceMemory * deviceMemoryOut)585 VkResult AllocateImageMemoryWithRequirements(Context *context,
586 vk::MemoryAllocationType memoryAllocationType,
587 VkMemoryPropertyFlags memoryPropertyFlags,
588 const VkMemoryRequirements &memoryRequirements,
589 const void *extraAllocationInfo,
590 const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
591 Image *image,
592 uint32_t *memoryTypeIndexOut,
593 DeviceMemory *deviceMemoryOut)
594 {
595 VkMemoryPropertyFlags memoryPropertyFlagsOut = 0;
596 return AllocateAndBindBufferOrImageMemory(context, memoryAllocationType, memoryPropertyFlags,
597 &memoryPropertyFlagsOut, memoryRequirements,
598 extraAllocationInfo, extraBindInfo, image,
599 memoryTypeIndexOut, deviceMemoryOut);
600 }
601
AllocateBufferMemoryWithRequirements(Context * context,MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags memoryPropertyFlags,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,Buffer * buffer,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * memoryTypeIndexOut,DeviceMemory * deviceMemoryOut)602 VkResult AllocateBufferMemoryWithRequirements(Context *context,
603 MemoryAllocationType memoryAllocationType,
604 VkMemoryPropertyFlags memoryPropertyFlags,
605 const VkMemoryRequirements &memoryRequirements,
606 const void *extraAllocationInfo,
607 Buffer *buffer,
608 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
609 uint32_t *memoryTypeIndexOut,
610 DeviceMemory *deviceMemoryOut)
611 {
612 return AllocateAndBindBufferOrImageMemory(context, memoryAllocationType, memoryPropertyFlags,
613 memoryPropertyFlagsOut, memoryRequirements,
614 extraAllocationInfo, nullptr, buffer,
615 memoryTypeIndexOut, deviceMemoryOut);
616 }
617
InitShaderModule(Context * context,ShaderModulePtr * shaderModulePtr,const uint32_t * shaderCode,size_t shaderCodeSize)618 angle::Result InitShaderModule(Context *context,
619 ShaderModulePtr *shaderModulePtr,
620 const uint32_t *shaderCode,
621 size_t shaderCodeSize)
622 {
623 ASSERT(!(*shaderModulePtr));
624 VkShaderModuleCreateInfo createInfo = {};
625 createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
626 createInfo.flags = 0;
627 createInfo.codeSize = shaderCodeSize;
628 createInfo.pCode = shaderCode;
629
630 ShaderModulePtr newShaderModule = ShaderModulePtr::MakeShared(context->getDevice());
631 ANGLE_VK_TRY(context, newShaderModule->init(context->getDevice(), createInfo));
632
633 *shaderModulePtr = std::move(newShaderModule);
634
635 return angle::Result::Continue;
636 }
637
Get2DTextureType(uint32_t layerCount,GLint samples)638 gl::TextureType Get2DTextureType(uint32_t layerCount, GLint samples)
639 {
640 if (layerCount > 1)
641 {
642 if (samples > 1)
643 {
644 return gl::TextureType::_2DMultisampleArray;
645 }
646 else
647 {
648 return gl::TextureType::_2DArray;
649 }
650 }
651 else
652 {
653 if (samples > 1)
654 {
655 return gl::TextureType::_2DMultisample;
656 }
657 else
658 {
659 return gl::TextureType::_2D;
660 }
661 }
662 }
663
GarbageObject()664 GarbageObject::GarbageObject() : mHandleType(HandleType::Invalid), mHandle(VK_NULL_HANDLE) {}
665
GarbageObject(HandleType handleType,GarbageHandle handle)666 GarbageObject::GarbageObject(HandleType handleType, GarbageHandle handle)
667 : mHandleType(handleType), mHandle(handle)
668 {}
669
GarbageObject(GarbageObject && other)670 GarbageObject::GarbageObject(GarbageObject &&other) : GarbageObject()
671 {
672 *this = std::move(other);
673 }
674
operator =(GarbageObject && rhs)675 GarbageObject &GarbageObject::operator=(GarbageObject &&rhs)
676 {
677 std::swap(mHandle, rhs.mHandle);
678 std::swap(mHandleType, rhs.mHandleType);
679 return *this;
680 }
681
682 // GarbageObject implementation
683 // Using c-style casts here to avoid conditional compile for MSVC 32-bit
684 // which fails to compile with reinterpret_cast, requiring static_cast.
destroy(Renderer * renderer)685 void GarbageObject::destroy(Renderer *renderer)
686 {
687 ANGLE_TRACE_EVENT0("gpu.angle", "GarbageObject::destroy");
688 VkDevice device = renderer->getDevice();
689 switch (mHandleType)
690 {
691 case HandleType::Semaphore:
692 vkDestroySemaphore(device, (VkSemaphore)mHandle, nullptr);
693 break;
694 case HandleType::CommandBuffer:
695 // Command buffers are pool allocated.
696 UNREACHABLE();
697 break;
698 case HandleType::Event:
699 vkDestroyEvent(device, (VkEvent)mHandle, nullptr);
700 break;
701 case HandleType::Fence:
702 vkDestroyFence(device, (VkFence)mHandle, nullptr);
703 break;
704 case HandleType::DeviceMemory:
705 vkFreeMemory(device, (VkDeviceMemory)mHandle, nullptr);
706 break;
707 case HandleType::Buffer:
708 vkDestroyBuffer(device, (VkBuffer)mHandle, nullptr);
709 break;
710 case HandleType::BufferView:
711 vkDestroyBufferView(device, (VkBufferView)mHandle, nullptr);
712 break;
713 case HandleType::Image:
714 vkDestroyImage(device, (VkImage)mHandle, nullptr);
715 break;
716 case HandleType::ImageView:
717 vkDestroyImageView(device, (VkImageView)mHandle, nullptr);
718 break;
719 case HandleType::ShaderModule:
720 vkDestroyShaderModule(device, (VkShaderModule)mHandle, nullptr);
721 break;
722 case HandleType::PipelineLayout:
723 vkDestroyPipelineLayout(device, (VkPipelineLayout)mHandle, nullptr);
724 break;
725 case HandleType::RenderPass:
726 vkDestroyRenderPass(device, (VkRenderPass)mHandle, nullptr);
727 break;
728 case HandleType::Pipeline:
729 vkDestroyPipeline(device, (VkPipeline)mHandle, nullptr);
730 break;
731 case HandleType::DescriptorSetLayout:
732 vkDestroyDescriptorSetLayout(device, (VkDescriptorSetLayout)mHandle, nullptr);
733 break;
734 case HandleType::Sampler:
735 vkDestroySampler(device, (VkSampler)mHandle, nullptr);
736 break;
737 case HandleType::DescriptorPool:
738 vkDestroyDescriptorPool(device, (VkDescriptorPool)mHandle, nullptr);
739 break;
740 case HandleType::Framebuffer:
741 vkDestroyFramebuffer(device, (VkFramebuffer)mHandle, nullptr);
742 break;
743 case HandleType::CommandPool:
744 vkDestroyCommandPool(device, (VkCommandPool)mHandle, nullptr);
745 break;
746 case HandleType::QueryPool:
747 vkDestroyQueryPool(device, (VkQueryPool)mHandle, nullptr);
748 break;
749 case HandleType::Allocation:
750 vma::FreeMemory(renderer->getAllocator().getHandle(), (VmaAllocation)mHandle);
751 break;
752 default:
753 UNREACHABLE();
754 break;
755 }
756
757 renderer->onDeallocateHandle(mHandleType, 1);
758 }
759
MakeDebugUtilsLabel(GLenum source,const char * marker,VkDebugUtilsLabelEXT * label)760 void MakeDebugUtilsLabel(GLenum source, const char *marker, VkDebugUtilsLabelEXT *label)
761 {
762 static constexpr angle::ColorF kLabelColors[6] = {
763 angle::ColorF(1.0f, 0.5f, 0.5f, 1.0f), // DEBUG_SOURCE_API
764 angle::ColorF(0.5f, 1.0f, 0.5f, 1.0f), // DEBUG_SOURCE_WINDOW_SYSTEM
765 angle::ColorF(0.5f, 0.5f, 1.0f, 1.0f), // DEBUG_SOURCE_SHADER_COMPILER
766 angle::ColorF(0.7f, 0.7f, 0.7f, 1.0f), // DEBUG_SOURCE_THIRD_PARTY
767 angle::ColorF(0.5f, 0.8f, 0.9f, 1.0f), // DEBUG_SOURCE_APPLICATION
768 angle::ColorF(0.9f, 0.8f, 0.5f, 1.0f), // DEBUG_SOURCE_OTHER
769 };
770
771 int colorIndex = source - GL_DEBUG_SOURCE_API;
772 ASSERT(colorIndex >= 0 && static_cast<size_t>(colorIndex) < ArraySize(kLabelColors));
773
774 label->sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
775 label->pNext = nullptr;
776 label->pLabelName = marker;
777 kLabelColors[colorIndex].writeData(label->color);
778 }
779
SetDebugUtilsObjectName(ContextVk * contextVk,VkObjectType objectType,uint64_t handle,const std::string & label)780 angle::Result SetDebugUtilsObjectName(ContextVk *contextVk,
781 VkObjectType objectType,
782 uint64_t handle,
783 const std::string &label)
784 {
785 Renderer *renderer = contextVk->getRenderer();
786
787 VkDebugUtilsObjectNameInfoEXT objectNameInfo = {};
788 objectNameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
789 objectNameInfo.objectType = objectType;
790 objectNameInfo.objectHandle = handle;
791 objectNameInfo.pObjectName = label.c_str();
792
793 if (vkSetDebugUtilsObjectNameEXT)
794 {
795 ANGLE_VK_TRY(contextVk,
796 vkSetDebugUtilsObjectNameEXT(renderer->getDevice(), &objectNameInfo));
797 }
798 return angle::Result::Continue;
799 }
800
801 // ClearValuesArray implementation.
ClearValuesArray()802 ClearValuesArray::ClearValuesArray() : mValues{}, mEnabled{} {}
803
804 ClearValuesArray::~ClearValuesArray() = default;
805
806 ClearValuesArray::ClearValuesArray(const ClearValuesArray &other) = default;
807
808 ClearValuesArray &ClearValuesArray::operator=(const ClearValuesArray &rhs) = default;
809
store(uint32_t index,VkImageAspectFlags aspectFlags,const VkClearValue & clearValue)810 void ClearValuesArray::store(uint32_t index,
811 VkImageAspectFlags aspectFlags,
812 const VkClearValue &clearValue)
813 {
814 ASSERT(aspectFlags != 0);
815
816 // We do this double if to handle the packed depth-stencil case.
817 if ((aspectFlags & VK_IMAGE_ASPECT_STENCIL_BIT) != 0)
818 {
819 // Ensure for packed DS we're writing to the depth index.
820 ASSERT(index == kUnpackedDepthIndex ||
821 (index == kUnpackedStencilIndex && aspectFlags == VK_IMAGE_ASPECT_STENCIL_BIT));
822
823 storeNoDepthStencil(kUnpackedStencilIndex, clearValue);
824 }
825
826 if (aspectFlags != VK_IMAGE_ASPECT_STENCIL_BIT)
827 {
828 storeNoDepthStencil(index, clearValue);
829 }
830 }
831
storeNoDepthStencil(uint32_t index,const VkClearValue & clearValue)832 void ClearValuesArray::storeNoDepthStencil(uint32_t index, const VkClearValue &clearValue)
833 {
834 mValues[index] = clearValue;
835 mEnabled.set(index);
836 }
837
getColorMask() const838 gl::DrawBufferMask ClearValuesArray::getColorMask() const
839 {
840 return gl::DrawBufferMask(mEnabled.bits() & kUnpackedColorBuffersMask);
841 }
842
843 // ResourceSerialFactory implementation.
ResourceSerialFactory()844 ResourceSerialFactory::ResourceSerialFactory() : mCurrentUniqueSerial(1) {}
845
~ResourceSerialFactory()846 ResourceSerialFactory::~ResourceSerialFactory() {}
847
issueSerial()848 uint32_t ResourceSerialFactory::issueSerial()
849 {
850 uint32_t newSerial = ++mCurrentUniqueSerial;
851 // make sure serial does not wrap
852 ASSERT(newSerial > 0);
853 return newSerial;
854 }
855
856 #define ANGLE_DEFINE_GEN_VK_SERIAL(Type) \
857 Type##Serial ResourceSerialFactory::generate##Type##Serial() \
858 { \
859 return Type##Serial(issueSerial()); \
860 }
861
ANGLE_VK_SERIAL_OP(ANGLE_DEFINE_GEN_VK_SERIAL)862 ANGLE_VK_SERIAL_OP(ANGLE_DEFINE_GEN_VK_SERIAL)
863
864 void ClampViewport(VkViewport *viewport)
865 {
866 // 0-sized viewports are invalid in Vulkan.
867 ASSERT(viewport);
868 if (viewport->width == 0.0f)
869 {
870 viewport->width = 1.0f;
871 }
872 if (viewport->height == 0.0f)
873 {
874 viewport->height = 1.0f;
875 }
876 }
877
ApplyPipelineCreationFeedback(Context * context,const VkPipelineCreationFeedback & feedback)878 void ApplyPipelineCreationFeedback(Context *context, const VkPipelineCreationFeedback &feedback)
879 {
880 const bool cacheHit =
881 (feedback.flags & VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT) != 0;
882
883 angle::VulkanPerfCounters &perfCounters = context->getPerfCounters();
884
885 if (cacheHit)
886 {
887 ++perfCounters.pipelineCreationCacheHits;
888 perfCounters.pipelineCreationTotalCacheHitsDurationNs += feedback.duration;
889 }
890 else
891 {
892 ++perfCounters.pipelineCreationCacheMisses;
893 perfCounters.pipelineCreationTotalCacheMissesDurationNs += feedback.duration;
894 }
895 }
896
hash() const897 size_t MemoryAllocInfoMapKey::hash() const
898 {
899 return angle::ComputeGenericHash(*this);
900 }
901 } // namespace vk
902
903 #if !defined(ANGLE_SHARED_LIBVULKAN)
904 // VK_EXT_debug_utils
905 PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = nullptr;
906 PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = nullptr;
907 PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = nullptr;
908 PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = nullptr;
909 PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = nullptr;
910 PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = nullptr;
911
912 // VK_KHR_get_physical_device_properties2
913 PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = nullptr;
914 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = nullptr;
915 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = nullptr;
916
917 // VK_KHR_external_semaphore_fd
918 PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = nullptr;
919
920 // VK_EXT_host_query_reset
921 PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = nullptr;
922
923 // VK_EXT_transform_feedback
924 PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = nullptr;
925 PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = nullptr;
926 PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = nullptr;
927 PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = nullptr;
928 PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = nullptr;
929 PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = nullptr;
930
931 // VK_KHR_get_memory_requirements2
932 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = nullptr;
933 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = nullptr;
934
935 // VK_KHR_bind_memory2
936 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = nullptr;
937 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = nullptr;
938
939 // VK_KHR_external_fence_capabilities
940 PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR =
941 nullptr;
942
943 // VK_KHR_external_fence_fd
944 PFN_vkGetFenceFdKHR vkGetFenceFdKHR = nullptr;
945 PFN_vkImportFenceFdKHR vkImportFenceFdKHR = nullptr;
946
947 // VK_KHR_external_semaphore_capabilities
948 PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR
949 vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = nullptr;
950
951 // VK_KHR_sampler_ycbcr_conversion
952 PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = nullptr;
953 PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = nullptr;
954
955 // VK_KHR_create_renderpass2
956 PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = nullptr;
957
958 # if defined(ANGLE_PLATFORM_FUCHSIA)
959 // VK_FUCHSIA_imagepipe_surface
960 PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = nullptr;
961 # endif
962
963 # if defined(ANGLE_PLATFORM_ANDROID)
964 PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID =
965 nullptr;
966 PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = nullptr;
967 # endif
968
969 # if defined(ANGLE_PLATFORM_GGP)
970 PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = nullptr;
971 # endif
972
973 # define GET_INSTANCE_FUNC(vkName) \
974 do \
975 { \
976 vkName = reinterpret_cast<PFN_##vkName>(vkGetInstanceProcAddr(instance, #vkName)); \
977 ASSERT(vkName); \
978 } while (0)
979
980 # define GET_DEVICE_FUNC(vkName) \
981 do \
982 { \
983 vkName = reinterpret_cast<PFN_##vkName>(vkGetDeviceProcAddr(device, #vkName)); \
984 ASSERT(vkName); \
985 } while (0)
986
987 // VK_KHR_shared_presentable_image
988 PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = nullptr;
989
990 // VK_EXT_extended_dynamic_state
991 PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = nullptr;
992 PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = nullptr;
993 PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = nullptr;
994 PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = nullptr;
995 PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = nullptr;
996 PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = nullptr;
997 PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = nullptr;
998 PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = nullptr;
999 PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = nullptr;
1000 PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = nullptr;
1001 PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = nullptr;
1002 PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = nullptr;
1003
1004 // VK_EXT_extended_dynamic_state2
1005 PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = nullptr;
1006 PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = nullptr;
1007 PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = nullptr;
1008 PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = nullptr;
1009 PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = nullptr;
1010
1011 // VK_EXT_vertex_input_dynamic_state
1012 PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = nullptr;
1013
1014 // VK_KHR_dynamic_rendering
1015 PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = nullptr;
1016 PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = nullptr;
1017
1018 // VK_KHR_dynamic_rendering_local_read
1019 PFN_vkCmdSetRenderingAttachmentLocationsKHR vkCmdSetRenderingAttachmentLocationsKHR = nullptr;
1020 PFN_vkCmdSetRenderingInputAttachmentIndicesKHR vkCmdSetRenderingInputAttachmentIndicesKHR = nullptr;
1021
1022 // VK_KHR_fragment_shading_rate
1023 PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = nullptr;
1024 PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = nullptr;
1025
1026 // VK_GOOGLE_display_timing
1027 PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = nullptr;
1028
1029 // VK_EXT_host_image_copy
1030 PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = nullptr;
1031 PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = nullptr;
1032 PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = nullptr;
1033 PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = nullptr;
1034 PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = nullptr;
1035
1036 // VK_KHR_Synchronization2
1037 PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = nullptr;
1038 PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = nullptr;
1039
InitDebugUtilsEXTFunctions(VkInstance instance)1040 void InitDebugUtilsEXTFunctions(VkInstance instance)
1041 {
1042 GET_INSTANCE_FUNC(vkCreateDebugUtilsMessengerEXT);
1043 GET_INSTANCE_FUNC(vkDestroyDebugUtilsMessengerEXT);
1044 GET_INSTANCE_FUNC(vkCmdBeginDebugUtilsLabelEXT);
1045 GET_INSTANCE_FUNC(vkCmdEndDebugUtilsLabelEXT);
1046 GET_INSTANCE_FUNC(vkCmdInsertDebugUtilsLabelEXT);
1047 GET_INSTANCE_FUNC(vkSetDebugUtilsObjectNameEXT);
1048 }
1049
InitTransformFeedbackEXTFunctions(VkDevice device)1050 void InitTransformFeedbackEXTFunctions(VkDevice device)
1051 {
1052 GET_DEVICE_FUNC(vkCmdBindTransformFeedbackBuffersEXT);
1053 GET_DEVICE_FUNC(vkCmdBeginTransformFeedbackEXT);
1054 GET_DEVICE_FUNC(vkCmdEndTransformFeedbackEXT);
1055 GET_DEVICE_FUNC(vkCmdBeginQueryIndexedEXT);
1056 GET_DEVICE_FUNC(vkCmdEndQueryIndexedEXT);
1057 GET_DEVICE_FUNC(vkCmdDrawIndirectByteCountEXT);
1058 }
1059
1060 // VK_KHR_create_renderpass2
InitRenderPass2KHRFunctions(VkDevice device)1061 void InitRenderPass2KHRFunctions(VkDevice device)
1062 {
1063 GET_DEVICE_FUNC(vkCreateRenderPass2KHR);
1064 }
1065
1066 # if defined(ANGLE_PLATFORM_FUCHSIA)
InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance)1067 void InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance)
1068 {
1069 GET_INSTANCE_FUNC(vkCreateImagePipeSurfaceFUCHSIA);
1070 }
1071 # endif
1072
1073 # if defined(ANGLE_PLATFORM_ANDROID)
InitExternalMemoryHardwareBufferANDROIDFunctions(VkDevice device)1074 void InitExternalMemoryHardwareBufferANDROIDFunctions(VkDevice device)
1075 {
1076 GET_DEVICE_FUNC(vkGetAndroidHardwareBufferPropertiesANDROID);
1077 GET_DEVICE_FUNC(vkGetMemoryAndroidHardwareBufferANDROID);
1078 }
1079 # endif
1080
1081 # if defined(ANGLE_PLATFORM_GGP)
InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance)1082 void InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance)
1083 {
1084 GET_INSTANCE_FUNC(vkCreateStreamDescriptorSurfaceGGP);
1085 }
1086 # endif // defined(ANGLE_PLATFORM_GGP)
1087
InitExternalSemaphoreFdFunctions(VkDevice device)1088 void InitExternalSemaphoreFdFunctions(VkDevice device)
1089 {
1090 GET_DEVICE_FUNC(vkImportSemaphoreFdKHR);
1091 }
1092
InitHostQueryResetFunctions(VkDevice device)1093 void InitHostQueryResetFunctions(VkDevice device)
1094 {
1095 GET_DEVICE_FUNC(vkResetQueryPoolEXT);
1096 }
1097
1098 // VK_KHR_external_fence_fd
InitExternalFenceFdFunctions(VkDevice device)1099 void InitExternalFenceFdFunctions(VkDevice device)
1100 {
1101 GET_DEVICE_FUNC(vkGetFenceFdKHR);
1102 GET_DEVICE_FUNC(vkImportFenceFdKHR);
1103 }
1104
1105 // VK_KHR_shared_presentable_image
InitGetSwapchainStatusKHRFunctions(VkDevice device)1106 void InitGetSwapchainStatusKHRFunctions(VkDevice device)
1107 {
1108 GET_DEVICE_FUNC(vkGetSwapchainStatusKHR);
1109 }
1110
1111 // VK_EXT_extended_dynamic_state
InitExtendedDynamicStateEXTFunctions(VkDevice device)1112 void InitExtendedDynamicStateEXTFunctions(VkDevice device)
1113 {
1114 GET_DEVICE_FUNC(vkCmdBindVertexBuffers2EXT);
1115 GET_DEVICE_FUNC(vkCmdSetCullModeEXT);
1116 GET_DEVICE_FUNC(vkCmdSetDepthBoundsTestEnableEXT);
1117 GET_DEVICE_FUNC(vkCmdSetDepthCompareOpEXT);
1118 GET_DEVICE_FUNC(vkCmdSetDepthTestEnableEXT);
1119 GET_DEVICE_FUNC(vkCmdSetDepthWriteEnableEXT);
1120 GET_DEVICE_FUNC(vkCmdSetFrontFaceEXT);
1121 GET_DEVICE_FUNC(vkCmdSetPrimitiveTopologyEXT);
1122 GET_DEVICE_FUNC(vkCmdSetScissorWithCountEXT);
1123 GET_DEVICE_FUNC(vkCmdSetStencilOpEXT);
1124 GET_DEVICE_FUNC(vkCmdSetStencilTestEnableEXT);
1125 GET_DEVICE_FUNC(vkCmdSetViewportWithCountEXT);
1126 }
1127
1128 // VK_EXT_extended_dynamic_state2
InitExtendedDynamicState2EXTFunctions(VkDevice device)1129 void InitExtendedDynamicState2EXTFunctions(VkDevice device)
1130 {
1131 GET_DEVICE_FUNC(vkCmdSetDepthBiasEnableEXT);
1132 GET_DEVICE_FUNC(vkCmdSetLogicOpEXT);
1133 GET_DEVICE_FUNC(vkCmdSetPatchControlPointsEXT);
1134 GET_DEVICE_FUNC(vkCmdSetPrimitiveRestartEnableEXT);
1135 GET_DEVICE_FUNC(vkCmdSetRasterizerDiscardEnableEXT);
1136 }
1137
1138 // VK_EXT_vertex_input_dynamic_state
InitVertexInputDynamicStateEXTFunctions(VkDevice device)1139 void InitVertexInputDynamicStateEXTFunctions(VkDevice device)
1140 {
1141 GET_DEVICE_FUNC(vkCmdSetVertexInputEXT);
1142 }
1143
1144 // VK_KHR_dynamic_rendering
InitDynamicRenderingFunctions(VkDevice device)1145 void InitDynamicRenderingFunctions(VkDevice device)
1146 {
1147 GET_DEVICE_FUNC(vkCmdBeginRenderingKHR);
1148 GET_DEVICE_FUNC(vkCmdEndRenderingKHR);
1149 }
1150
1151 // VK_KHR_dynamic_rendering_local_read
InitDynamicRenderingLocalReadFunctions(VkDevice device)1152 void InitDynamicRenderingLocalReadFunctions(VkDevice device)
1153 {
1154 GET_DEVICE_FUNC(vkCmdSetRenderingAttachmentLocationsKHR);
1155 GET_DEVICE_FUNC(vkCmdSetRenderingInputAttachmentIndicesKHR);
1156 }
1157
1158 // VK_KHR_fragment_shading_rate
InitFragmentShadingRateKHRInstanceFunction(VkInstance instance)1159 void InitFragmentShadingRateKHRInstanceFunction(VkInstance instance)
1160 {
1161 GET_INSTANCE_FUNC(vkGetPhysicalDeviceFragmentShadingRatesKHR);
1162 }
1163
InitFragmentShadingRateKHRDeviceFunction(VkDevice device)1164 void InitFragmentShadingRateKHRDeviceFunction(VkDevice device)
1165 {
1166 GET_DEVICE_FUNC(vkCmdSetFragmentShadingRateKHR);
1167 }
1168
1169 // VK_GOOGLE_display_timing
InitGetPastPresentationTimingGoogleFunction(VkDevice device)1170 void InitGetPastPresentationTimingGoogleFunction(VkDevice device)
1171 {
1172 GET_DEVICE_FUNC(vkGetPastPresentationTimingGOOGLE);
1173 }
1174
1175 // VK_EXT_host_image_copy
InitHostImageCopyFunctions(VkDevice device)1176 void InitHostImageCopyFunctions(VkDevice device)
1177 {
1178 GET_DEVICE_FUNC(vkCopyImageToImageEXT);
1179 GET_DEVICE_FUNC(vkCopyImageToMemoryEXT);
1180 GET_DEVICE_FUNC(vkCopyMemoryToImageEXT);
1181 GET_DEVICE_FUNC(vkGetImageSubresourceLayout2EXT);
1182 GET_DEVICE_FUNC(vkTransitionImageLayoutEXT);
1183 }
1184
InitSynchronization2Functions(VkDevice device)1185 void InitSynchronization2Functions(VkDevice device)
1186 {
1187 GET_DEVICE_FUNC(vkCmdPipelineBarrier2KHR);
1188 GET_DEVICE_FUNC(vkCmdWriteTimestamp2KHR);
1189 }
1190
1191 # undef GET_INSTANCE_FUNC
1192 # undef GET_DEVICE_FUNC
1193
1194 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1195
1196 #define ASSIGN_FROM_CORE(vkName, EXT) \
1197 do \
1198 { \
1199 /* The core entry point must be present */ \
1200 ASSERT(vkName != nullptr); \
1201 vkName##EXT = vkName; \
1202 } while (0)
1203
InitGetPhysicalDeviceProperties2KHRFunctionsFromCore()1204 void InitGetPhysicalDeviceProperties2KHRFunctionsFromCore()
1205 {
1206 ASSIGN_FROM_CORE(vkGetPhysicalDeviceProperties2, KHR);
1207 ASSIGN_FROM_CORE(vkGetPhysicalDeviceFeatures2, KHR);
1208 ASSIGN_FROM_CORE(vkGetPhysicalDeviceMemoryProperties2, KHR);
1209 }
1210
InitExternalFenceCapabilitiesFunctionsFromCore()1211 void InitExternalFenceCapabilitiesFunctionsFromCore()
1212 {
1213 ASSIGN_FROM_CORE(vkGetPhysicalDeviceExternalFenceProperties, KHR);
1214 }
1215
InitExternalSemaphoreCapabilitiesFunctionsFromCore()1216 void InitExternalSemaphoreCapabilitiesFunctionsFromCore()
1217 {
1218 ASSIGN_FROM_CORE(vkGetPhysicalDeviceExternalSemaphoreProperties, KHR);
1219 }
1220
InitSamplerYcbcrKHRFunctionsFromCore()1221 void InitSamplerYcbcrKHRFunctionsFromCore()
1222 {
1223 ASSIGN_FROM_CORE(vkCreateSamplerYcbcrConversion, KHR);
1224 ASSIGN_FROM_CORE(vkDestroySamplerYcbcrConversion, KHR);
1225 }
1226
InitGetMemoryRequirements2KHRFunctionsFromCore()1227 void InitGetMemoryRequirements2KHRFunctionsFromCore()
1228 {
1229 ASSIGN_FROM_CORE(vkGetBufferMemoryRequirements2, KHR);
1230 ASSIGN_FROM_CORE(vkGetImageMemoryRequirements2, KHR);
1231 }
1232
InitBindMemory2KHRFunctionsFromCore()1233 void InitBindMemory2KHRFunctionsFromCore()
1234 {
1235 ASSIGN_FROM_CORE(vkBindBufferMemory2, KHR);
1236 ASSIGN_FROM_CORE(vkBindImageMemory2, KHR);
1237 }
1238
1239 #undef ASSIGN_FROM_CORE
1240
CalculateGenerateMipmapFilter(ContextVk * contextVk,angle::FormatID formatID)1241 GLenum CalculateGenerateMipmapFilter(ContextVk *contextVk, angle::FormatID formatID)
1242 {
1243 const bool formatSupportsLinearFiltering = contextVk->getRenderer()->hasImageFormatFeatureBits(
1244 formatID, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT);
1245 const bool hintFastest = contextVk->getState().getGenerateMipmapHint() == GL_FASTEST;
1246
1247 return formatSupportsLinearFiltering && !hintFastest ? GL_LINEAR : GL_NEAREST;
1248 }
1249
1250 namespace gl_vk
1251 {
1252
GetFilter(const GLenum filter)1253 VkFilter GetFilter(const GLenum filter)
1254 {
1255 switch (filter)
1256 {
1257 case GL_LINEAR_MIPMAP_LINEAR:
1258 case GL_LINEAR_MIPMAP_NEAREST:
1259 case GL_LINEAR:
1260 return VK_FILTER_LINEAR;
1261 case GL_NEAREST_MIPMAP_LINEAR:
1262 case GL_NEAREST_MIPMAP_NEAREST:
1263 case GL_NEAREST:
1264 return VK_FILTER_NEAREST;
1265 default:
1266 UNIMPLEMENTED();
1267 return VK_FILTER_MAX_ENUM;
1268 }
1269 }
1270
GetSamplerMipmapMode(const GLenum filter)1271 VkSamplerMipmapMode GetSamplerMipmapMode(const GLenum filter)
1272 {
1273 switch (filter)
1274 {
1275 case GL_LINEAR_MIPMAP_LINEAR:
1276 case GL_NEAREST_MIPMAP_LINEAR:
1277 return VK_SAMPLER_MIPMAP_MODE_LINEAR;
1278 case GL_LINEAR:
1279 case GL_NEAREST:
1280 case GL_NEAREST_MIPMAP_NEAREST:
1281 case GL_LINEAR_MIPMAP_NEAREST:
1282 return VK_SAMPLER_MIPMAP_MODE_NEAREST;
1283 default:
1284 UNIMPLEMENTED();
1285 return VK_SAMPLER_MIPMAP_MODE_MAX_ENUM;
1286 }
1287 }
1288
GetSamplerAddressMode(const GLenum wrap)1289 VkSamplerAddressMode GetSamplerAddressMode(const GLenum wrap)
1290 {
1291 switch (wrap)
1292 {
1293 case GL_REPEAT:
1294 return VK_SAMPLER_ADDRESS_MODE_REPEAT;
1295 case GL_MIRRORED_REPEAT:
1296 return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
1297 case GL_CLAMP_TO_BORDER:
1298 return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
1299 case GL_CLAMP_TO_EDGE:
1300 return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
1301 case GL_MIRROR_CLAMP_TO_EDGE_EXT:
1302 return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
1303 default:
1304 UNIMPLEMENTED();
1305 return VK_SAMPLER_ADDRESS_MODE_MAX_ENUM;
1306 }
1307 }
1308
GetRect(const gl::Rectangle & source)1309 VkRect2D GetRect(const gl::Rectangle &source)
1310 {
1311 return {{source.x, source.y},
1312 {static_cast<uint32_t>(source.width), static_cast<uint32_t>(source.height)}};
1313 }
1314
GetPrimitiveTopology(gl::PrimitiveMode mode)1315 VkPrimitiveTopology GetPrimitiveTopology(gl::PrimitiveMode mode)
1316 {
1317 switch (mode)
1318 {
1319 case gl::PrimitiveMode::Triangles:
1320 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
1321 case gl::PrimitiveMode::Points:
1322 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1323 case gl::PrimitiveMode::Lines:
1324 return VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
1325 case gl::PrimitiveMode::LineStrip:
1326 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
1327 case gl::PrimitiveMode::TriangleFan:
1328 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN;
1329 case gl::PrimitiveMode::TriangleStrip:
1330 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
1331 case gl::PrimitiveMode::LineLoop:
1332 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
1333 case gl::PrimitiveMode::LinesAdjacency:
1334 return VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY;
1335 case gl::PrimitiveMode::LineStripAdjacency:
1336 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY;
1337 case gl::PrimitiveMode::TrianglesAdjacency:
1338 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY;
1339 case gl::PrimitiveMode::TriangleStripAdjacency:
1340 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY;
1341 case gl::PrimitiveMode::Patches:
1342 return VK_PRIMITIVE_TOPOLOGY_PATCH_LIST;
1343 default:
1344 UNREACHABLE();
1345 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1346 }
1347 }
1348
GetPolygonMode(const gl::PolygonMode polygonMode)1349 VkPolygonMode GetPolygonMode(const gl::PolygonMode polygonMode)
1350 {
1351 switch (polygonMode)
1352 {
1353 case gl::PolygonMode::Point:
1354 return VK_POLYGON_MODE_POINT;
1355 case gl::PolygonMode::Line:
1356 return VK_POLYGON_MODE_LINE;
1357 case gl::PolygonMode::Fill:
1358 return VK_POLYGON_MODE_FILL;
1359 default:
1360 UNREACHABLE();
1361 return VK_POLYGON_MODE_FILL;
1362 }
1363 }
1364
GetCullMode(const gl::RasterizerState & rasterState)1365 VkCullModeFlagBits GetCullMode(const gl::RasterizerState &rasterState)
1366 {
1367 if (!rasterState.cullFace)
1368 {
1369 return VK_CULL_MODE_NONE;
1370 }
1371
1372 switch (rasterState.cullMode)
1373 {
1374 case gl::CullFaceMode::Front:
1375 return VK_CULL_MODE_FRONT_BIT;
1376 case gl::CullFaceMode::Back:
1377 return VK_CULL_MODE_BACK_BIT;
1378 case gl::CullFaceMode::FrontAndBack:
1379 return VK_CULL_MODE_FRONT_AND_BACK;
1380 default:
1381 UNREACHABLE();
1382 return VK_CULL_MODE_NONE;
1383 }
1384 }
1385
GetFrontFace(GLenum frontFace,bool invertCullFace)1386 VkFrontFace GetFrontFace(GLenum frontFace, bool invertCullFace)
1387 {
1388 // Invert CW and CCW to have the same behavior as OpenGL.
1389 switch (frontFace)
1390 {
1391 case GL_CW:
1392 return invertCullFace ? VK_FRONT_FACE_CLOCKWISE : VK_FRONT_FACE_COUNTER_CLOCKWISE;
1393 case GL_CCW:
1394 return invertCullFace ? VK_FRONT_FACE_COUNTER_CLOCKWISE : VK_FRONT_FACE_CLOCKWISE;
1395 default:
1396 UNREACHABLE();
1397 return VK_FRONT_FACE_CLOCKWISE;
1398 }
1399 }
1400
GetSamples(GLint sampleCount,bool limitSampleCountTo2)1401 VkSampleCountFlagBits GetSamples(GLint sampleCount, bool limitSampleCountTo2)
1402 {
1403 if (limitSampleCountTo2)
1404 {
1405 // Limiting samples to 2 allows multisampling to work while reducing
1406 // how much graphics memory is required. This makes ANGLE nonconformant
1407 // (GLES 3.0+ requires 4 samples minimum) but gives low memory systems a
1408 // better chance of running applications.
1409 sampleCount = std::min(sampleCount, 2);
1410 }
1411
1412 switch (sampleCount)
1413 {
1414 case 0:
1415 UNREACHABLE();
1416 return VK_SAMPLE_COUNT_1_BIT;
1417 case 1:
1418 return VK_SAMPLE_COUNT_1_BIT;
1419 case 2:
1420 return VK_SAMPLE_COUNT_2_BIT;
1421 case 4:
1422 return VK_SAMPLE_COUNT_4_BIT;
1423 case 8:
1424 return VK_SAMPLE_COUNT_8_BIT;
1425 case 16:
1426 return VK_SAMPLE_COUNT_16_BIT;
1427 case 32:
1428 return VK_SAMPLE_COUNT_32_BIT;
1429 default:
1430 UNREACHABLE();
1431 return VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM;
1432 }
1433 }
1434
GetSwizzle(const GLenum swizzle)1435 VkComponentSwizzle GetSwizzle(const GLenum swizzle)
1436 {
1437 switch (swizzle)
1438 {
1439 case GL_ALPHA:
1440 return VK_COMPONENT_SWIZZLE_A;
1441 case GL_RED:
1442 return VK_COMPONENT_SWIZZLE_R;
1443 case GL_GREEN:
1444 return VK_COMPONENT_SWIZZLE_G;
1445 case GL_BLUE:
1446 return VK_COMPONENT_SWIZZLE_B;
1447 case GL_ZERO:
1448 return VK_COMPONENT_SWIZZLE_ZERO;
1449 case GL_ONE:
1450 return VK_COMPONENT_SWIZZLE_ONE;
1451 default:
1452 UNREACHABLE();
1453 return VK_COMPONENT_SWIZZLE_IDENTITY;
1454 }
1455 }
1456
GetCompareOp(const GLenum compareFunc)1457 VkCompareOp GetCompareOp(const GLenum compareFunc)
1458 {
1459 switch (compareFunc)
1460 {
1461 case GL_NEVER:
1462 return VK_COMPARE_OP_NEVER;
1463 case GL_LESS:
1464 return VK_COMPARE_OP_LESS;
1465 case GL_EQUAL:
1466 return VK_COMPARE_OP_EQUAL;
1467 case GL_LEQUAL:
1468 return VK_COMPARE_OP_LESS_OR_EQUAL;
1469 case GL_GREATER:
1470 return VK_COMPARE_OP_GREATER;
1471 case GL_NOTEQUAL:
1472 return VK_COMPARE_OP_NOT_EQUAL;
1473 case GL_GEQUAL:
1474 return VK_COMPARE_OP_GREATER_OR_EQUAL;
1475 case GL_ALWAYS:
1476 return VK_COMPARE_OP_ALWAYS;
1477 default:
1478 UNREACHABLE();
1479 return VK_COMPARE_OP_ALWAYS;
1480 }
1481 }
1482
GetStencilOp(GLenum compareOp)1483 VkStencilOp GetStencilOp(GLenum compareOp)
1484 {
1485 switch (compareOp)
1486 {
1487 case GL_KEEP:
1488 return VK_STENCIL_OP_KEEP;
1489 case GL_ZERO:
1490 return VK_STENCIL_OP_ZERO;
1491 case GL_REPLACE:
1492 return VK_STENCIL_OP_REPLACE;
1493 case GL_INCR:
1494 return VK_STENCIL_OP_INCREMENT_AND_CLAMP;
1495 case GL_DECR:
1496 return VK_STENCIL_OP_DECREMENT_AND_CLAMP;
1497 case GL_INCR_WRAP:
1498 return VK_STENCIL_OP_INCREMENT_AND_WRAP;
1499 case GL_DECR_WRAP:
1500 return VK_STENCIL_OP_DECREMENT_AND_WRAP;
1501 case GL_INVERT:
1502 return VK_STENCIL_OP_INVERT;
1503 default:
1504 UNREACHABLE();
1505 return VK_STENCIL_OP_KEEP;
1506 }
1507 }
1508
GetLogicOp(const GLenum logicOp)1509 VkLogicOp GetLogicOp(const GLenum logicOp)
1510 {
1511 // GL's logic op values are 0x1500 + op, where op is the same value as Vulkan's VkLogicOp.
1512 return static_cast<VkLogicOp>(logicOp - GL_CLEAR);
1513 }
1514
GetOffset(const gl::Offset & glOffset,VkOffset3D * vkOffset)1515 void GetOffset(const gl::Offset &glOffset, VkOffset3D *vkOffset)
1516 {
1517 vkOffset->x = glOffset.x;
1518 vkOffset->y = glOffset.y;
1519 vkOffset->z = glOffset.z;
1520 }
1521
GetExtent(const gl::Extents & glExtent,VkExtent3D * vkExtent)1522 void GetExtent(const gl::Extents &glExtent, VkExtent3D *vkExtent)
1523 {
1524 vkExtent->width = glExtent.width;
1525 vkExtent->height = glExtent.height;
1526 vkExtent->depth = glExtent.depth;
1527 }
1528
GetImageType(gl::TextureType textureType)1529 VkImageType GetImageType(gl::TextureType textureType)
1530 {
1531 switch (textureType)
1532 {
1533 case gl::TextureType::_2D:
1534 case gl::TextureType::_2DArray:
1535 case gl::TextureType::_2DMultisample:
1536 case gl::TextureType::_2DMultisampleArray:
1537 case gl::TextureType::CubeMap:
1538 case gl::TextureType::CubeMapArray:
1539 case gl::TextureType::External:
1540 return VK_IMAGE_TYPE_2D;
1541 case gl::TextureType::_3D:
1542 return VK_IMAGE_TYPE_3D;
1543 default:
1544 // We will need to implement all the texture types for ES3+.
1545 UNIMPLEMENTED();
1546 return VK_IMAGE_TYPE_MAX_ENUM;
1547 }
1548 }
1549
GetImageViewType(gl::TextureType textureType)1550 VkImageViewType GetImageViewType(gl::TextureType textureType)
1551 {
1552 switch (textureType)
1553 {
1554 case gl::TextureType::_2D:
1555 case gl::TextureType::_2DMultisample:
1556 case gl::TextureType::External:
1557 return VK_IMAGE_VIEW_TYPE_2D;
1558 case gl::TextureType::_2DArray:
1559 case gl::TextureType::_2DMultisampleArray:
1560 return VK_IMAGE_VIEW_TYPE_2D_ARRAY;
1561 case gl::TextureType::_3D:
1562 return VK_IMAGE_VIEW_TYPE_3D;
1563 case gl::TextureType::CubeMap:
1564 return VK_IMAGE_VIEW_TYPE_CUBE;
1565 case gl::TextureType::CubeMapArray:
1566 return VK_IMAGE_VIEW_TYPE_CUBE_ARRAY;
1567 default:
1568 // We will need to implement all the texture types for ES3+.
1569 UNIMPLEMENTED();
1570 return VK_IMAGE_VIEW_TYPE_MAX_ENUM;
1571 }
1572 }
1573
GetColorComponentFlags(bool red,bool green,bool blue,bool alpha)1574 VkColorComponentFlags GetColorComponentFlags(bool red, bool green, bool blue, bool alpha)
1575 {
1576 return (red ? VK_COLOR_COMPONENT_R_BIT : 0) | (green ? VK_COLOR_COMPONENT_G_BIT : 0) |
1577 (blue ? VK_COLOR_COMPONENT_B_BIT : 0) | (alpha ? VK_COLOR_COMPONENT_A_BIT : 0);
1578 }
1579
GetShaderStageFlags(gl::ShaderBitSet activeShaders)1580 VkShaderStageFlags GetShaderStageFlags(gl::ShaderBitSet activeShaders)
1581 {
1582 VkShaderStageFlags flags = 0;
1583 for (const gl::ShaderType shaderType : activeShaders)
1584 {
1585 flags |= kShaderStageMap[shaderType];
1586 }
1587 return flags;
1588 }
1589
GetViewport(const gl::Rectangle & viewport,float nearPlane,float farPlane,bool invertViewport,bool clipSpaceOriginUpperLeft,GLint renderAreaHeight,VkViewport * viewportOut)1590 void GetViewport(const gl::Rectangle &viewport,
1591 float nearPlane,
1592 float farPlane,
1593 bool invertViewport,
1594 bool clipSpaceOriginUpperLeft,
1595 GLint renderAreaHeight,
1596 VkViewport *viewportOut)
1597 {
1598 viewportOut->x = static_cast<float>(viewport.x);
1599 viewportOut->y = static_cast<float>(viewport.y);
1600 viewportOut->width = static_cast<float>(viewport.width);
1601 viewportOut->height = static_cast<float>(viewport.height);
1602 viewportOut->minDepth = gl::clamp01(nearPlane);
1603 viewportOut->maxDepth = gl::clamp01(farPlane);
1604
1605 // Say an application intends to draw a primitive (shown as 'o' below), it can choose to use
1606 // different clip space origin. When clip space origin (shown as 'C' below) is switched from
1607 // lower-left to upper-left, primitives will be rendered with its y-coordinate flipped.
1608
1609 // Rendered content will differ based on whether it is a default framebuffer or a user defined
1610 // framebuffer. We modify the viewport's 'y' and 'h' accordingly.
1611
1612 // clip space origin is lower-left
1613 // Expected draw in GLES default framebuffer user defined framebuffer
1614 // (0,H) (0,0) (0,0)
1615 // + +-----------+ (W,0) +-----------+ (W,0)
1616 // | | | C----+
1617 // | | | | | (h)
1618 // | +----+ | +----+ | | O |
1619 // | | O | | | O | (-h) | +----+
1620 // | | | | | | |
1621 // | C----+ | C----+ |
1622 // +-----------+ (W,0) + +
1623 // (0,0) (0,H) (0,H)
1624 // y' = H - h y' = y
1625
1626 // clip space origin is upper-left
1627 // Expected draw in GLES default framebuffer user defined framebuffer
1628 // (0,H) (0,0) (0,0)
1629 // + +-----------+ (W,0) +-----------+ (W,0)
1630 // | | | +----+
1631 // | | | | O | (-h)
1632 // | C----+ | C----+ | | |
1633 // | | | | | | (h) | C----+
1634 // | | O | | | O | |
1635 // | +----+ | +----+ |
1636 // +-----------+ (W,0) + +
1637 // (0,0) (0,H) (0,H)
1638 // y' = H - (y + h) y' = y + H
1639
1640 if (clipSpaceOriginUpperLeft)
1641 {
1642 if (invertViewport)
1643 {
1644 viewportOut->y = static_cast<float>(renderAreaHeight - (viewport.height + viewport.y));
1645 }
1646 else
1647 {
1648 viewportOut->y = static_cast<float>(viewport.height + viewport.y);
1649 viewportOut->height = -viewportOut->height;
1650 }
1651 }
1652 else
1653 {
1654 if (invertViewport)
1655 {
1656 viewportOut->y = static_cast<float>(renderAreaHeight - viewport.y);
1657 viewportOut->height = -viewportOut->height;
1658 }
1659 }
1660 }
1661
GetExtentsAndLayerCount(gl::TextureType textureType,const gl::Extents & extents,VkExtent3D * extentsOut,uint32_t * layerCountOut)1662 void GetExtentsAndLayerCount(gl::TextureType textureType,
1663 const gl::Extents &extents,
1664 VkExtent3D *extentsOut,
1665 uint32_t *layerCountOut)
1666 {
1667 extentsOut->width = extents.width;
1668 extentsOut->height = extents.height;
1669
1670 switch (textureType)
1671 {
1672 case gl::TextureType::CubeMap:
1673 extentsOut->depth = 1;
1674 *layerCountOut = gl::kCubeFaceCount;
1675 break;
1676
1677 case gl::TextureType::_2DArray:
1678 case gl::TextureType::_2DMultisampleArray:
1679 case gl::TextureType::CubeMapArray:
1680 extentsOut->depth = 1;
1681 *layerCountOut = extents.depth;
1682 break;
1683
1684 default:
1685 extentsOut->depth = extents.depth;
1686 *layerCountOut = 1;
1687 break;
1688 }
1689 }
1690
GetLevelIndex(gl::LevelIndex levelGL,gl::LevelIndex baseLevel)1691 vk::LevelIndex GetLevelIndex(gl::LevelIndex levelGL, gl::LevelIndex baseLevel)
1692 {
1693 ASSERT(baseLevel <= levelGL);
1694 return vk::LevelIndex(levelGL.get() - baseLevel.get());
1695 }
1696
GetTilingMode(gl::TilingMode tilingMode)1697 VkImageTiling GetTilingMode(gl::TilingMode tilingMode)
1698 {
1699 switch (tilingMode)
1700 {
1701 case gl::TilingMode::Optimal:
1702 return VK_IMAGE_TILING_OPTIMAL;
1703 case gl::TilingMode::Linear:
1704 return VK_IMAGE_TILING_LINEAR;
1705 default:
1706 UNREACHABLE();
1707 return VK_IMAGE_TILING_OPTIMAL;
1708 }
1709 }
1710
1711 } // namespace gl_vk
1712
1713 namespace vk_gl
1714 {
AddSampleCounts(VkSampleCountFlags sampleCounts,gl::SupportedSampleSet * setOut)1715 void AddSampleCounts(VkSampleCountFlags sampleCounts, gl::SupportedSampleSet *setOut)
1716 {
1717 // The possible bits are VK_SAMPLE_COUNT_n_BIT = n, with n = 1 << b. At the time of this
1718 // writing, b is in [0, 6], however, we test all 32 bits in case the enum is extended.
1719 for (size_t bit : angle::BitSet32<32>(sampleCounts & kSupportedSampleCounts))
1720 {
1721 setOut->insert(static_cast<GLuint>(1 << bit));
1722 }
1723 }
1724
GetMaxSampleCount(VkSampleCountFlags sampleCounts)1725 GLuint GetMaxSampleCount(VkSampleCountFlags sampleCounts)
1726 {
1727 GLuint maxCount = 0;
1728 for (size_t bit : angle::BitSet32<32>(sampleCounts & kSupportedSampleCounts))
1729 {
1730 maxCount = static_cast<GLuint>(1 << bit);
1731 }
1732 return maxCount;
1733 }
1734
GetSampleCount(VkSampleCountFlags supportedCounts,GLuint requestedCount)1735 GLuint GetSampleCount(VkSampleCountFlags supportedCounts, GLuint requestedCount)
1736 {
1737 for (size_t bit : angle::BitSet32<32>(supportedCounts & kSupportedSampleCounts))
1738 {
1739 GLuint sampleCount = static_cast<GLuint>(1 << bit);
1740 if (sampleCount >= requestedCount)
1741 {
1742 return sampleCount;
1743 }
1744 }
1745
1746 UNREACHABLE();
1747 return 0;
1748 }
1749
GetLevelIndex(vk::LevelIndex levelVk,gl::LevelIndex baseLevel)1750 gl::LevelIndex GetLevelIndex(vk::LevelIndex levelVk, gl::LevelIndex baseLevel)
1751 {
1752 return gl::LevelIndex(levelVk.get() + baseLevel.get());
1753 }
1754
ConvertVkFixedRateToGLFixedRate(const VkImageCompressionFixedRateFlagsEXT vkCompressionRate)1755 GLenum ConvertVkFixedRateToGLFixedRate(const VkImageCompressionFixedRateFlagsEXT vkCompressionRate)
1756 {
1757 switch (vkCompressionRate)
1758 {
1759 case VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT:
1760 return GL_SURFACE_COMPRESSION_FIXED_RATE_NONE_EXT;
1761 case VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT:
1762 return GL_SURFACE_COMPRESSION_FIXED_RATE_1BPC_EXT;
1763 case VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT:
1764 return GL_SURFACE_COMPRESSION_FIXED_RATE_2BPC_EXT;
1765 case VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT:
1766 return GL_SURFACE_COMPRESSION_FIXED_RATE_3BPC_EXT;
1767 case VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT:
1768 return GL_SURFACE_COMPRESSION_FIXED_RATE_4BPC_EXT;
1769 case VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT:
1770 return GL_SURFACE_COMPRESSION_FIXED_RATE_5BPC_EXT;
1771 case VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT:
1772 return GL_SURFACE_COMPRESSION_FIXED_RATE_6BPC_EXT;
1773 case VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT:
1774 return GL_SURFACE_COMPRESSION_FIXED_RATE_7BPC_EXT;
1775 case VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT:
1776 return GL_SURFACE_COMPRESSION_FIXED_RATE_8BPC_EXT;
1777 case VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT:
1778 return GL_SURFACE_COMPRESSION_FIXED_RATE_9BPC_EXT;
1779 case VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT:
1780 return GL_SURFACE_COMPRESSION_FIXED_RATE_10BPC_EXT;
1781 case VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT:
1782 return GL_SURFACE_COMPRESSION_FIXED_RATE_11BPC_EXT;
1783 case VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT:
1784 return GL_SURFACE_COMPRESSION_FIXED_RATE_12BPC_EXT;
1785 default:
1786 UNREACHABLE();
1787 return GL_SURFACE_COMPRESSION_FIXED_RATE_NONE_EXT;
1788 }
1789 }
1790
convertCompressionFlagsToGLFixedRates(VkImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags,GLint bufSize,GLint * rates)1791 GLint convertCompressionFlagsToGLFixedRates(
1792 VkImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags,
1793 GLint bufSize,
1794 GLint *rates)
1795 {
1796 if (imageCompressionFixedRateFlags == VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT)
1797 {
1798 if (nullptr != rates)
1799 {
1800 rates[0] = GL_SURFACE_COMPRESSION_FIXED_RATE_NONE_EXT;
1801 }
1802 return 0;
1803 }
1804 VkImageCompressionFixedRateFlagsEXT tmpFlags = imageCompressionFixedRateFlags;
1805 uint8_t bitCount = 0;
1806 std::vector<GLint> GLRates;
1807
1808 while (tmpFlags > 0)
1809 {
1810 if ((tmpFlags & 1) == true)
1811 {
1812 GLRates.push_back(ConvertVkFixedRateToGLFixedRate(1 << bitCount));
1813 }
1814 bitCount += 1;
1815 tmpFlags >>= 1;
1816 }
1817
1818 GLint size = static_cast<GLint>(GLRates.size());
1819 // rates could be nullprt, as user only want get the size(count) of rates
1820 if (nullptr != rates && size <= bufSize)
1821 {
1822 std::copy(GLRates.begin(), GLRates.end(), rates);
1823 }
1824 return size;
1825 }
1826 } // namespace vk_gl
1827 } // namespace rx
1828