1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_renderer.h:
7 // Defines the class interface for Renderer.
8 //
9
10 #ifndef LIBANGLE_RENDERER_VULKAN_RENDERERVK_H_
11 #define LIBANGLE_RENDERER_VULKAN_RENDERERVK_H_
12
13 #include <condition_variable>
14 #include <deque>
15 #include <memory>
16 #include <mutex>
17 #include <queue>
18 #include <thread>
19
20 #include "common/PackedEnums.h"
21 #include "common/SimpleMutex.h"
22 #include "common/WorkerThread.h"
23 #include "common/angleutils.h"
24 #include "common/vulkan/vk_headers.h"
25 #include "common/vulkan/vulkan_icd.h"
26 #include "libANGLE/Caps.h"
27 #include "libANGLE/renderer/vulkan/CommandProcessor.h"
28 #include "libANGLE/renderer/vulkan/DebugAnnotatorVk.h"
29 #include "libANGLE/renderer/vulkan/MemoryTracking.h"
30 #include "libANGLE/renderer/vulkan/QueryVk.h"
31 #include "libANGLE/renderer/vulkan/UtilsVk.h"
32 #include "libANGLE/renderer/vulkan/vk_format_utils.h"
33 #include "libANGLE/renderer/vulkan/vk_helpers.h"
34 #include "libANGLE/renderer/vulkan/vk_internal_shaders_autogen.h"
35 #include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
36 #include "libANGLE/renderer/vulkan/vk_resource.h"
37
38 namespace angle
39 {
40 class Library;
41 struct FrontendFeatures;
42 } // namespace angle
43
44 namespace rx
45 {
46 class FramebufferVk;
47
48 namespace vk
49 {
50 class Format;
51
52 static constexpr size_t kMaxExtensionNames = 400;
53 using ExtensionNameList = angle::FixedVector<const char *, kMaxExtensionNames>;
54
55 // Information used to accurately skip known synchronization issues in ANGLE.
56 struct SkippedSyncvalMessage
57 {
58 const char *messageId;
59 const char *messageContents1;
60 const char *messageContents2 = "";
61 bool isDueToNonConformantCoherentColorFramebufferFetch = false;
62 };
63
64 class ImageMemorySuballocator : angle::NonCopyable
65 {
66 public:
67 ImageMemorySuballocator();
68 ~ImageMemorySuballocator();
69
70 void destroy(vk::Renderer *renderer);
71
72 // Allocates memory for the image and binds it.
73 VkResult allocateAndBindMemory(Context *context,
74 Image *image,
75 const VkImageCreateInfo *imageCreateInfo,
76 VkMemoryPropertyFlags requiredFlags,
77 VkMemoryPropertyFlags preferredFlags,
78 const VkMemoryRequirements *memoryRequirements,
79 const bool allocateDedicatedMemory,
80 MemoryAllocationType memoryAllocationType,
81 Allocation *allocationOut,
82 VkMemoryPropertyFlags *memoryFlagsOut,
83 uint32_t *memoryTypeIndexOut,
84 VkDeviceSize *sizeOut);
85
86 // Maps the memory to initialize with non-zero value.
87 VkResult mapMemoryAndInitWithNonZeroValue(vk::Renderer *renderer,
88 Allocation *allocation,
89 VkDeviceSize size,
90 int value,
91 VkMemoryPropertyFlags flags);
92
93 // Determines if dedicated memory is required for the allocation.
94 bool needsDedicatedMemory(VkDeviceSize size) const;
95 };
96
97 // Supports one semaphore from current surface, and one semaphore passed to
98 // glSignalSemaphoreEXT.
99 using SignalSemaphoreVector = angle::FixedVector<VkSemaphore, 2>;
100
101 // Recursive function to process variable arguments for garbage collection
CollectGarbage(std::vector<vk::GarbageObject> * garbageOut)102 inline void CollectGarbage(std::vector<vk::GarbageObject> *garbageOut) {}
103 template <typename ArgT, typename... ArgsT>
CollectGarbage(std::vector<vk::GarbageObject> * garbageOut,ArgT object,ArgsT...objectsIn)104 void CollectGarbage(std::vector<vk::GarbageObject> *garbageOut, ArgT object, ArgsT... objectsIn)
105 {
106 if (object->valid())
107 {
108 garbageOut->emplace_back(vk::GarbageObject::Get(object));
109 }
110 CollectGarbage(garbageOut, objectsIn...);
111 }
112
113 // Recursive function to process variable arguments for garbage destroy
DestroyGarbage(vk::Renderer * renderer)114 inline void DestroyGarbage(vk::Renderer *renderer) {}
115
116 class OneOffCommandPool : angle::NonCopyable
117 {
118 public:
119 OneOffCommandPool();
120 void init(vk::ProtectionType protectionType);
121 angle::Result getCommandBuffer(vk::Context *context,
122 vk::PrimaryCommandBuffer *commandBufferOut);
123 void releaseCommandBuffer(const QueueSerial &submitQueueSerial,
124 vk::PrimaryCommandBuffer &&primary);
125 void destroy(VkDevice device);
126
127 private:
128 vk::ProtectionType mProtectionType;
129 angle::SimpleMutex mMutex;
130 vk::CommandPool mCommandPool;
131 struct PendingOneOffCommands
132 {
133 vk::ResourceUse use;
134 vk::PrimaryCommandBuffer commandBuffer;
135 };
136 std::deque<PendingOneOffCommands> mPendingCommands;
137 };
138
139 enum class UseDebugLayers
140 {
141 Yes,
142 YesIfAvailable,
143 No,
144 };
145
146 enum class UseVulkanSwapchain
147 {
148 Yes,
149 No,
150 };
151
152 class Renderer : angle::NonCopyable
153 {
154 public:
155 Renderer();
156 ~Renderer();
157
158 angle::Result initialize(vk::Context *context,
159 vk::GlobalOps *globalOps,
160 angle::vk::ICD desiredICD,
161 uint32_t preferredVendorId,
162 uint32_t preferredDeviceId,
163 UseDebugLayers useDebugLayers,
164 const char *wsiExtension,
165 const char *wsiLayer,
166 angle::NativeWindowSystem nativeWindowSystem,
167 const angle::FeatureOverrides &featureOverrides);
168
169 // Reload volk vk* function ptrs if needed for an already initialized Renderer
170 void reloadVolkIfNeeded() const;
171 void onDestroy(vk::Context *context);
172
173 void notifyDeviceLost();
174 bool isDeviceLost() const;
175 bool hasSharedGarbage();
176
177 std::string getVendorString() const;
178 std::string getRendererDescription() const;
179 std::string getVersionString(bool includeFullVersion) const;
180
181 gl::Version getMaxSupportedESVersion() const;
182 gl::Version getMaxConformantESVersion() const;
183
184 uint32_t getDeviceVersion();
getInstance()185 VkInstance getInstance() const { return mInstance; }
getPhysicalDevice()186 VkPhysicalDevice getPhysicalDevice() const { return mPhysicalDevice; }
getPhysicalDeviceProperties()187 const VkPhysicalDeviceProperties &getPhysicalDeviceProperties() const
188 {
189 return mPhysicalDeviceProperties;
190 }
getPhysicalDeviceDrmProperties()191 const VkPhysicalDeviceDrmPropertiesEXT &getPhysicalDeviceDrmProperties() const
192 {
193 return mDrmProperties;
194 }
195 const VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &
getPhysicalDevicePrimitivesGeneratedQueryFeatures()196 getPhysicalDevicePrimitivesGeneratedQueryFeatures() const
197 {
198 return mPrimitivesGeneratedQueryFeatures;
199 }
getPhysicalDeviceHostImageCopyProperties()200 const VkPhysicalDeviceHostImageCopyPropertiesEXT &getPhysicalDeviceHostImageCopyProperties()
201 const
202 {
203 return mHostImageCopyProperties;
204 }
getPhysicalDeviceFeatures()205 const VkPhysicalDeviceFeatures &getPhysicalDeviceFeatures() const
206 {
207 return mPhysicalDeviceFeatures;
208 }
getEnabledFeatures()209 const VkPhysicalDeviceFeatures2KHR &getEnabledFeatures() const { return mEnabledFeatures; }
getDevice()210 VkDevice getDevice() const { return mDevice; }
211
getAllocator()212 const vk::Allocator &getAllocator() const { return mAllocator; }
getImageMemorySuballocator()213 vk::ImageMemorySuballocator &getImageMemorySuballocator() { return mImageMemorySuballocator; }
214
215 angle::Result checkQueueForSurfacePresent(vk::Context *context,
216 VkSurfaceKHR surface,
217 bool *supportedOut);
218
219 const gl::Caps &getNativeCaps() const;
220 const gl::TextureCapsMap &getNativeTextureCaps() const;
221 const gl::Extensions &getNativeExtensions() const;
222 const gl::Limitations &getNativeLimitations() const;
223 const ShPixelLocalStorageOptions &getNativePixelLocalStorageOptions() const;
224 void initializeFrontendFeatures(angle::FrontendFeatures *features) const;
225
getQueueFamilyIndex()226 uint32_t getQueueFamilyIndex() const { return mCurrentQueueFamilyIndex; }
getQueueFamilyProperties()227 const VkQueueFamilyProperties &getQueueFamilyProperties() const
228 {
229 return mQueueFamilyProperties[mCurrentQueueFamilyIndex];
230 }
getDeviceQueueIndex(egl::ContextPriority priority)231 const DeviceQueueIndex getDeviceQueueIndex(egl::ContextPriority priority) const
232 {
233 return mCommandQueue.getDeviceQueueIndex(priority);
234 }
getDefaultDeviceQueueIndex()235 const DeviceQueueIndex getDefaultDeviceQueueIndex() const
236 {
237 // By default it will always use medium priority
238 return mCommandQueue.getDeviceQueueIndex(egl::ContextPriority::Medium);
239 }
240
getMemoryProperties()241 const vk::MemoryProperties &getMemoryProperties() const { return mMemoryProperties; }
242
getFormat(GLenum internalFormat)243 const vk::Format &getFormat(GLenum internalFormat) const
244 {
245 return mFormatTable[internalFormat];
246 }
247
getFormat(angle::FormatID formatID)248 const vk::Format &getFormat(angle::FormatID formatID) const { return mFormatTable[formatID]; }
249
250 // Get the pipeline cache data after retrieving the size, but only if the size is increased
251 // since last query. This function should be called with the |mPipelineCacheMutex| lock already
252 // held.
253 angle::Result getLockedPipelineCacheDataIfNew(vk::Context *context,
254 size_t *pipelineCacheSizeOut,
255 size_t lastSyncSize,
256 std::vector<uint8_t> *pipelineCacheDataOut);
257 angle::Result syncPipelineCacheVk(vk::Context *context,
258 vk::GlobalOps *globalOps,
259 const gl::Context *contextGL);
260
getFeatures()261 const angle::FeaturesVk &getFeatures() const { return mFeatures; }
getMaxVertexAttribDivisor()262 uint32_t getMaxVertexAttribDivisor() const { return mMaxVertexAttribDivisor; }
getMaxVertexAttribStride()263 VkDeviceSize getMaxVertexAttribStride() const { return mMaxVertexAttribStride; }
getMaxColorInputAttachmentCount()264 uint32_t getMaxColorInputAttachmentCount() const { return mMaxColorInputAttachmentCount; }
265
getDefaultUniformBufferSize()266 uint32_t getDefaultUniformBufferSize() const { return mDefaultUniformBufferSize; }
267
getEnabledICD()268 angle::vk::ICD getEnabledICD() const { return mEnabledICD; }
isMockICDEnabled()269 bool isMockICDEnabled() const { return mEnabledICD == angle::vk::ICD::Mock; }
270
271 // Query the format properties for select bits (linearTilingFeatures, optimalTilingFeatures
272 // and bufferFeatures). Looks through mandatory features first, and falls back to querying
273 // the device (first time only).
274 bool hasLinearImageFormatFeatureBits(angle::FormatID format,
275 const VkFormatFeatureFlags featureBits) const;
276 VkFormatFeatureFlags getLinearImageFormatFeatureBits(
277 angle::FormatID format,
278 const VkFormatFeatureFlags featureBits) const;
279 VkFormatFeatureFlags getImageFormatFeatureBits(angle::FormatID format,
280 const VkFormatFeatureFlags featureBits) const;
281 bool hasImageFormatFeatureBits(angle::FormatID format,
282 const VkFormatFeatureFlags featureBits) const;
283 bool hasBufferFormatFeatureBits(angle::FormatID format,
284 const VkFormatFeatureFlags featureBits) const;
285
isAsyncCommandQueueEnabled()286 bool isAsyncCommandQueueEnabled() const { return mFeatures.asyncCommandQueue.enabled; }
isAsyncCommandBufferResetAndGarbageCleanupEnabled()287 bool isAsyncCommandBufferResetAndGarbageCleanupEnabled() const
288 {
289 return mFeatures.asyncCommandBufferResetAndGarbageCleanup.enabled;
290 }
291
getDriverPriority(egl::ContextPriority priority)292 ANGLE_INLINE egl::ContextPriority getDriverPriority(egl::ContextPriority priority)
293 {
294 return mCommandQueue.getDriverPriority(priority);
295 }
296
getQueue(egl::ContextPriority priority)297 VkQueue getQueue(egl::ContextPriority priority) { return mCommandQueue.getQueue(priority); }
298
299 // This command buffer should be submitted immediately via queueSubmitOneOff.
getCommandBufferOneOff(vk::Context * context,vk::ProtectionType protectionType,vk::PrimaryCommandBuffer * commandBufferOut)300 angle::Result getCommandBufferOneOff(vk::Context *context,
301 vk::ProtectionType protectionType,
302 vk::PrimaryCommandBuffer *commandBufferOut)
303 {
304 return mOneOffCommandPoolMap[protectionType].getCommandBuffer(context, commandBufferOut);
305 }
306
307 // Fire off a single command buffer immediately with default priority.
308 // Command buffer must be allocated with getCommandBufferOneOff and is reclaimed.
309 angle::Result queueSubmitOneOff(vk::Context *context,
310 vk::PrimaryCommandBuffer &&primary,
311 vk::ProtectionType protectionType,
312 egl::ContextPriority priority,
313 VkSemaphore waitSemaphore,
314 VkPipelineStageFlags waitSemaphoreStageMasks,
315 vk::SubmitPolicy submitPolicy,
316 QueueSerial *queueSerialOut);
317
318 angle::Result queueSubmitWaitSemaphore(vk::Context *context,
319 egl::ContextPriority priority,
320 const vk::Semaphore &waitSemaphore,
321 VkPipelineStageFlags waitSemaphoreStageMasks,
322 QueueSerial submitQueueSerial);
323
324 template <typename... ArgsT>
collectGarbage(const vk::ResourceUse & use,ArgsT...garbageIn)325 void collectGarbage(const vk::ResourceUse &use, ArgsT... garbageIn)
326 {
327 if (hasResourceUseFinished(use))
328 {
329 DestroyGarbage(this, garbageIn...);
330 }
331 else
332 {
333 std::vector<vk::GarbageObject> sharedGarbage;
334 CollectGarbage(&sharedGarbage, garbageIn...);
335 if (!sharedGarbage.empty())
336 {
337 collectGarbage(use, std::move(sharedGarbage));
338 }
339 }
340 }
341
collectGarbage(const vk::ResourceUse & use,vk::GarbageObjects && sharedGarbage)342 void collectGarbage(const vk::ResourceUse &use, vk::GarbageObjects &&sharedGarbage)
343 {
344 ASSERT(!sharedGarbage.empty());
345 vk::SharedGarbage garbage(use, std::move(sharedGarbage));
346 mSharedGarbageList.add(this, std::move(garbage));
347 }
348
collectSuballocationGarbage(const vk::ResourceUse & use,vk::BufferSuballocation && suballocation,vk::Buffer && buffer)349 void collectSuballocationGarbage(const vk::ResourceUse &use,
350 vk::BufferSuballocation &&suballocation,
351 vk::Buffer &&buffer)
352 {
353 vk::BufferSuballocationGarbage garbage(use, std::move(suballocation), std::move(buffer));
354 mSuballocationGarbageList.add(this, std::move(garbage));
355 }
356
357 size_t getNextPipelineCacheBlobCacheSlotIndex(size_t *previousSlotIndexOut);
358 size_t updatePipelineCacheChunkCount(size_t chunkCount);
359 angle::Result getPipelineCache(vk::Context *context, vk::PipelineCacheAccess *pipelineCacheOut);
360 angle::Result mergeIntoPipelineCache(vk::Context *context,
361 const vk::PipelineCache &pipelineCache);
362
363 void onNewValidationMessage(const std::string &message);
364 std::string getAndClearLastValidationMessage(uint32_t *countSinceLastClear);
365
getSkippedValidationMessages()366 const std::vector<const char *> &getSkippedValidationMessages() const
367 {
368 return mSkippedValidationMessages;
369 }
getSkippedSyncvalMessages()370 const std::vector<vk::SkippedSyncvalMessage> &getSkippedSyncvalMessages() const
371 {
372 return mSkippedSyncvalMessages;
373 }
374
isCoherentColorFramebufferFetchEmulated()375 bool isCoherentColorFramebufferFetchEmulated() const
376 {
377 return mFeatures.supportsShaderFramebufferFetch.enabled &&
378 !mIsColorFramebufferFetchCoherent;
379 }
380
onColorFramebufferFetchUse()381 void onColorFramebufferFetchUse() { mIsColorFramebufferFetchUsed = true; }
isColorFramebufferFetchUsed()382 bool isColorFramebufferFetchUsed() const { return mIsColorFramebufferFetchUsed; }
383
384 uint64_t getMaxFenceWaitTimeNs() const;
385
isCommandQueueBusy()386 ANGLE_INLINE bool isCommandQueueBusy()
387 {
388 if (isAsyncCommandQueueEnabled())
389 {
390 return mCommandProcessor.isBusy(this);
391 }
392 else
393 {
394 return mCommandQueue.isBusy(this);
395 }
396 }
397
waitForResourceUseToBeSubmittedToDevice(vk::Context * context,const vk::ResourceUse & use)398 angle::Result waitForResourceUseToBeSubmittedToDevice(vk::Context *context,
399 const vk::ResourceUse &use)
400 {
401 // This is only needed for async submission code path. For immediate submission, it is a nop
402 // since everything is submitted immediately.
403 if (isAsyncCommandQueueEnabled())
404 {
405 ASSERT(mCommandProcessor.hasResourceUseEnqueued(use));
406 return mCommandProcessor.waitForResourceUseToBeSubmitted(context, use);
407 }
408 // This ResourceUse must have been submitted.
409 ASSERT(mCommandQueue.hasResourceUseSubmitted(use));
410 return angle::Result::Continue;
411 }
412
waitForQueueSerialToBeSubmittedToDevice(vk::Context * context,const QueueSerial & queueSerial)413 angle::Result waitForQueueSerialToBeSubmittedToDevice(vk::Context *context,
414 const QueueSerial &queueSerial)
415 {
416 // This is only needed for async submission code path. For immediate submission, it is a nop
417 // since everything is submitted immediately.
418 if (isAsyncCommandQueueEnabled())
419 {
420 ASSERT(mCommandProcessor.hasQueueSerialEnqueued(queueSerial));
421 return mCommandProcessor.waitForQueueSerialToBeSubmitted(context, queueSerial);
422 }
423 // This queueSerial must have been submitted.
424 ASSERT(mCommandQueue.hasQueueSerialSubmitted(queueSerial));
425 return angle::Result::Continue;
426 }
427
getCommandQueuePerfCounters()428 angle::VulkanPerfCounters getCommandQueuePerfCounters()
429 {
430 return mCommandQueue.getPerfCounters();
431 }
resetCommandQueuePerFrameCounters()432 void resetCommandQueuePerFrameCounters() { mCommandQueue.resetPerFramePerfCounters(); }
433
getGlobalOps()434 vk::GlobalOps *getGlobalOps() const { return mGlobalOps; }
435
enableDebugUtils()436 bool enableDebugUtils() const { return mEnableDebugUtils; }
angleDebuggerMode()437 bool angleDebuggerMode() const { return mAngleDebuggerMode; }
438
getSamplerCache()439 SamplerCache &getSamplerCache() { return mSamplerCache; }
getYuvConversionCache()440 SamplerYcbcrConversionCache &getYuvConversionCache() { return mYuvConversionCache; }
441
442 void onAllocateHandle(vk::HandleType handleType);
443 void onDeallocateHandle(vk::HandleType handleType, uint32_t count);
444
getEnableValidationLayers()445 bool getEnableValidationLayers() const { return mEnableValidationLayers; }
446
getResourceSerialFactory()447 vk::ResourceSerialFactory &getResourceSerialFactory() { return mResourceSerialFactory; }
448
449 void setGlobalDebugAnnotator(bool *installedAnnotatorOut);
450
451 void outputVmaStatString();
452
453 bool haveSameFormatFeatureBits(angle::FormatID formatID1, angle::FormatID formatID2) const;
454
455 void cleanupGarbage(bool *anyGarbageCleanedOut);
456 void cleanupPendingSubmissionGarbage();
457
458 angle::Result submitCommands(vk::Context *context,
459 vk::ProtectionType protectionType,
460 egl::ContextPriority contextPriority,
461 const vk::Semaphore *signalSemaphore,
462 const vk::SharedExternalFence *externalFence,
463 const QueueSerial &submitQueueSerial);
464
465 angle::Result submitPriorityDependency(vk::Context *context,
466 vk::ProtectionTypes protectionTypes,
467 egl::ContextPriority srcContextPriority,
468 egl::ContextPriority dstContextPriority,
469 SerialIndex index);
470
471 void handleDeviceLost();
472 angle::Result finishResourceUse(vk::Context *context, const vk::ResourceUse &use);
473 angle::Result finishQueueSerial(vk::Context *context, const QueueSerial &queueSerial);
474 angle::Result waitForResourceUseToFinishWithUserTimeout(vk::Context *context,
475 const vk::ResourceUse &use,
476 uint64_t timeout,
477 VkResult *result);
478 angle::Result checkCompletedCommands(vk::Context *context);
479
480 angle::Result checkCompletedCommandsAndCleanup(vk::Context *context);
481 angle::Result releaseFinishedCommands(vk::Context *context);
482
483 angle::Result flushWaitSemaphores(vk::ProtectionType protectionType,
484 egl::ContextPriority priority,
485 std::vector<VkSemaphore> &&waitSemaphores,
486 std::vector<VkPipelineStageFlags> &&waitSemaphoreStageMasks);
487 angle::Result flushRenderPassCommands(vk::Context *context,
488 vk::ProtectionType protectionType,
489 egl::ContextPriority priority,
490 const vk::RenderPass &renderPass,
491 VkFramebuffer framebufferOverride,
492 vk::RenderPassCommandBufferHelper **renderPassCommands);
493 angle::Result flushOutsideRPCommands(
494 vk::Context *context,
495 vk::ProtectionType protectionType,
496 egl::ContextPriority priority,
497 vk::OutsideRenderPassCommandBufferHelper **outsideRPCommands);
498
499 void queuePresent(vk::Context *context,
500 egl::ContextPriority priority,
501 const VkPresentInfoKHR &presentInfo,
502 vk::SwapchainStatus *swapchainStatus);
503
504 // Only useful if async submission is enabled
505 angle::Result waitForPresentToBeSubmitted(vk::SwapchainStatus *swapchainStatus);
506
507 angle::Result getOutsideRenderPassCommandBufferHelper(
508 vk::Context *context,
509 vk::SecondaryCommandPool *commandPool,
510 vk::SecondaryCommandMemoryAllocator *commandsAllocator,
511 vk::OutsideRenderPassCommandBufferHelper **commandBufferHelperOut);
512 angle::Result getRenderPassCommandBufferHelper(
513 vk::Context *context,
514 vk::SecondaryCommandPool *commandPool,
515 vk::SecondaryCommandMemoryAllocator *commandsAllocator,
516 vk::RenderPassCommandBufferHelper **commandBufferHelperOut);
517
518 void recycleOutsideRenderPassCommandBufferHelper(
519 vk::OutsideRenderPassCommandBufferHelper **commandBuffer);
520 void recycleRenderPassCommandBufferHelper(vk::RenderPassCommandBufferHelper **commandBuffer);
521
522 // Process GPU memory reports
processMemoryReportCallback(const VkDeviceMemoryReportCallbackDataEXT & callbackData)523 void processMemoryReportCallback(const VkDeviceMemoryReportCallbackDataEXT &callbackData)
524 {
525 bool logCallback = getFeatures().logMemoryReportCallbacks.enabled;
526 mMemoryReport.processCallback(callbackData, logCallback);
527 }
528
529 // Accumulate cache stats for a specific cache
accumulateCacheStats(VulkanCacheType cache,const CacheStats & stats)530 void accumulateCacheStats(VulkanCacheType cache, const CacheStats &stats)
531 {
532 std::unique_lock<angle::SimpleMutex> localLock(mCacheStatsMutex);
533 mVulkanCacheStats[cache].accumulate(stats);
534 }
535 // Log cache stats for all caches
536 void logCacheStats() const;
537
getSupportedBufferWritePipelineStageMask()538 VkPipelineStageFlags getSupportedBufferWritePipelineStageMask() const
539 {
540 return mSupportedBufferWritePipelineStageMask;
541 }
542
getPipelineStageMask(EventStage eventStage)543 VkPipelineStageFlags getPipelineStageMask(EventStage eventStage) const
544 {
545 return mEventStageAndPipelineStageFlagsMap[eventStage];
546 }
getEventPipelineStageMask(const RefCountedEvent & refCountedEvent)547 VkPipelineStageFlags getEventPipelineStageMask(const RefCountedEvent &refCountedEvent) const
548 {
549 return mEventStageAndPipelineStageFlagsMap[refCountedEvent.getEventStage()];
550 }
getImageMemoryBarrierData(ImageLayout layout)551 const ImageMemoryBarrierData &getImageMemoryBarrierData(ImageLayout layout) const
552 {
553 return mImageLayoutAndMemoryBarrierDataMap[layout];
554 }
555
getSupportedVulkanShaderStageMask()556 VkShaderStageFlags getSupportedVulkanShaderStageMask() const
557 {
558 return mSupportedVulkanShaderStageMask;
559 }
560
561 angle::Result getFormatDescriptorCountForVkFormat(vk::Context *context,
562 VkFormat format,
563 uint32_t *descriptorCountOut);
564
565 angle::Result getFormatDescriptorCountForExternalFormat(vk::Context *context,
566 uint64_t format,
567 uint32_t *descriptorCountOut);
568
getMaxCopyBytesUsingCPUWhenPreservingBufferData()569 VkDeviceSize getMaxCopyBytesUsingCPUWhenPreservingBufferData() const
570 {
571 return mMaxCopyBytesUsingCPUWhenPreservingBufferData;
572 }
573
getEnabledInstanceExtensions()574 const vk::ExtensionNameList &getEnabledInstanceExtensions() const
575 {
576 return mEnabledInstanceExtensions;
577 }
578
getEnabledDeviceExtensions()579 const vk::ExtensionNameList &getEnabledDeviceExtensions() const
580 {
581 return mEnabledDeviceExtensions;
582 }
583
584 VkDeviceSize getPreferedBufferBlockSize(uint32_t memoryTypeIndex) const;
585
getDefaultBufferAlignment()586 size_t getDefaultBufferAlignment() const { return mDefaultBufferAlignment; }
587
getStagingBufferMemoryTypeIndex(vk::MemoryCoherency coherency)588 uint32_t getStagingBufferMemoryTypeIndex(vk::MemoryCoherency coherency) const
589 {
590 return mStagingBufferMemoryTypeIndex[coherency];
591 }
getStagingBufferAlignment()592 size_t getStagingBufferAlignment() const { return mStagingBufferAlignment; }
593
getVertexConversionBufferMemoryTypeIndex(MemoryHostVisibility hostVisibility)594 uint32_t getVertexConversionBufferMemoryTypeIndex(MemoryHostVisibility hostVisibility) const
595 {
596 return hostVisibility == MemoryHostVisibility::Visible
597 ? mHostVisibleVertexConversionBufferMemoryTypeIndex
598 : mDeviceLocalVertexConversionBufferMemoryTypeIndex;
599 }
getVertexConversionBufferAlignment()600 size_t getVertexConversionBufferAlignment() const { return mVertexConversionBufferAlignment; }
601
getDeviceLocalMemoryTypeIndex()602 uint32_t getDeviceLocalMemoryTypeIndex() const
603 {
604 return mDeviceLocalVertexConversionBufferMemoryTypeIndex;
605 }
606
isShadingRateSupported(gl::ShadingRate shadingRate)607 bool isShadingRateSupported(gl::ShadingRate shadingRate) const
608 {
609 return mSupportedFragmentShadingRates.test(shadingRate);
610 }
611
getMaxFragmentShadingRateAttachmentTexelSize()612 VkExtent2D getMaxFragmentShadingRateAttachmentTexelSize() const
613 {
614 ASSERT(mFeatures.supportsFoveatedRendering.enabled);
615 return mFragmentShadingRateProperties.maxFragmentShadingRateAttachmentTexelSize;
616 }
617
addBufferBlockToOrphanList(vk::BufferBlock * block)618 void addBufferBlockToOrphanList(vk::BufferBlock *block) { mOrphanedBufferBlockList.add(block); }
619
getSuballocationDestroyedSize()620 VkDeviceSize getSuballocationDestroyedSize() const
621 {
622 return mSuballocationGarbageList.getDestroyedGarbageSize();
623 }
onBufferPoolPrune()624 void onBufferPoolPrune() { mSuballocationGarbageList.resetDestroyedGarbageSize(); }
getSuballocationGarbageSize()625 VkDeviceSize getSuballocationGarbageSize() const
626 {
627 return mSuballocationGarbageList.getSubmittedGarbageSize();
628 }
getPendingSuballocationGarbageSize()629 VkDeviceSize getPendingSuballocationGarbageSize()
630 {
631 return mSuballocationGarbageList.getUnsubmittedGarbageSize();
632 }
633
getPendingSubmissionGarbageSize()634 VkDeviceSize getPendingSubmissionGarbageSize() const
635 {
636 return mSharedGarbageList.getUnsubmittedGarbageSize();
637 }
638
getPreferredFilterForYUV(VkFilter defaultFilter)639 ANGLE_INLINE VkFilter getPreferredFilterForYUV(VkFilter defaultFilter)
640 {
641 return getFeatures().preferLinearFilterForYUV.enabled ? VK_FILTER_LINEAR : defaultFilter;
642 }
643
644 angle::Result allocateScopedQueueSerialIndex(vk::ScopedQueueSerialIndex *indexOut);
645 angle::Result allocateQueueSerialIndex(SerialIndex *serialIndexOut);
getLargestQueueSerialIndexEverAllocated()646 size_t getLargestQueueSerialIndexEverAllocated() const
647 {
648 return mQueueSerialIndexAllocator.getLargestIndexEverAllocated();
649 }
650 void releaseQueueSerialIndex(SerialIndex index);
651 Serial generateQueueSerial(SerialIndex index);
652 void reserveQueueSerials(SerialIndex index,
653 size_t count,
654 RangedSerialFactory *rangedSerialFactory);
655
656 // Return true if all serials in ResourceUse have been submitted.
657 bool hasResourceUseSubmitted(const vk::ResourceUse &use) const;
658 bool hasQueueSerialSubmitted(const QueueSerial &queueSerial) const;
659 Serial getLastSubmittedSerial(SerialIndex index) const;
660 // Return true if all serials in ResourceUse have been finished.
661 bool hasResourceUseFinished(const vk::ResourceUse &use) const;
662 bool hasQueueSerialFinished(const QueueSerial &queueSerial) const;
663
664 // Memory statistics can be updated on allocation and deallocation.
665 template <typename HandleT>
onMemoryAlloc(vk::MemoryAllocationType allocType,VkDeviceSize size,uint32_t memoryTypeIndex,HandleT handle)666 void onMemoryAlloc(vk::MemoryAllocationType allocType,
667 VkDeviceSize size,
668 uint32_t memoryTypeIndex,
669 HandleT handle)
670 {
671 mMemoryAllocationTracker.onMemoryAllocImpl(allocType, size, memoryTypeIndex,
672 reinterpret_cast<void *>(handle));
673 }
674
675 template <typename HandleT>
onMemoryDealloc(vk::MemoryAllocationType allocType,VkDeviceSize size,uint32_t memoryTypeIndex,HandleT handle)676 void onMemoryDealloc(vk::MemoryAllocationType allocType,
677 VkDeviceSize size,
678 uint32_t memoryTypeIndex,
679 HandleT handle)
680 {
681 mMemoryAllocationTracker.onMemoryDeallocImpl(allocType, size, memoryTypeIndex,
682 reinterpret_cast<void *>(handle));
683 }
684
getMemoryAllocationTracker()685 MemoryAllocationTracker *getMemoryAllocationTracker() { return &mMemoryAllocationTracker; }
686
getPendingGarbageSizeLimit()687 VkDeviceSize getPendingGarbageSizeLimit() const { return mPendingGarbageSizeLimit; }
688
689 void requestAsyncCommandsAndGarbageCleanup(vk::Context *context);
690
691 // Cleanup garbage and finish command batches from the queue if necessary in the event of an OOM
692 // error.
693 angle::Result cleanupSomeGarbage(Context *context, bool *anyGarbageCleanedOut);
694
695 // Static function to get Vulkan object type name.
696 static const char *GetVulkanObjectTypeName(VkObjectType type);
697
nullColorAttachmentWithExternalFormatResolve()698 bool nullColorAttachmentWithExternalFormatResolve() const
699 {
700 #if defined(ANGLE_PLATFORM_ANDROID)
701 ASSERT(mFeatures.supportsExternalFormatResolve.enabled);
702 return mExternalFormatResolveProperties.nullColorAttachmentWithExternalFormatResolve;
703 #else
704 return false;
705 #endif
706 }
707
getExternalFormatTable()708 vk::ExternalFormatTable *getExternalFormatTable() { return &mExternalFormatTable; }
709
getPipelineCacheGraphStream()710 std::ostringstream &getPipelineCacheGraphStream() { return mPipelineCacheGraph; }
isPipelineCacheGraphDumpEnabled()711 bool isPipelineCacheGraphDumpEnabled() const { return mDumpPipelineCacheGraph; }
getPipelineCacheGraphDumpPath()712 const char *getPipelineCacheGraphDumpPath() const
713 {
714 return mPipelineCacheGraphDumpPath.c_str();
715 }
716
getRefCountedEventRecycler()717 vk::RefCountedEventRecycler *getRefCountedEventRecycler() { return &mRefCountedEventRecycler; }
718
getCommandProcessorThreadId()719 std::thread::id getCommandProcessorThreadId() const { return mCommandProcessor.getThreadId(); }
720
getEmptyDescriptorLayout()721 const vk::DescriptorSetLayoutPtr &getEmptyDescriptorLayout() const
722 {
723 ASSERT(mPlaceHolderDescriptorSetLayout);
724 ASSERT(mPlaceHolderDescriptorSetLayout->valid());
725 return mPlaceHolderDescriptorSetLayout;
726 }
727
728 private:
729 angle::Result setupDevice(vk::Context *context,
730 const angle::FeatureOverrides &featureOverrides,
731 const char *wsiLayer,
732 UseVulkanSwapchain useVulkanSwapchain,
733 angle::NativeWindowSystem nativeWindowSystem);
734 angle::Result createDeviceAndQueue(vk::Context *context, uint32_t queueFamilyIndex);
735 void ensureCapsInitialized() const;
736 void initializeValidationMessageSuppressions();
737
738 void queryDeviceExtensionFeatures(const vk::ExtensionNameList &deviceExtensionNames);
739 void appendDeviceExtensionFeaturesNotPromoted(const vk::ExtensionNameList &deviceExtensionNames,
740 VkPhysicalDeviceFeatures2KHR *deviceFeatures,
741 VkPhysicalDeviceProperties2 *deviceProperties);
742 void appendDeviceExtensionFeaturesPromotedTo11(
743 const vk::ExtensionNameList &deviceExtensionNames,
744 VkPhysicalDeviceFeatures2KHR *deviceFeatures,
745 VkPhysicalDeviceProperties2 *deviceProperties);
746 void appendDeviceExtensionFeaturesPromotedTo12(
747 const vk::ExtensionNameList &deviceExtensionNames,
748 VkPhysicalDeviceFeatures2KHR *deviceFeatures,
749 VkPhysicalDeviceProperties2 *deviceProperties);
750 void appendDeviceExtensionFeaturesPromotedTo13(
751 const vk::ExtensionNameList &deviceExtensionNames,
752 VkPhysicalDeviceFeatures2KHR *deviceFeatures,
753 VkPhysicalDeviceProperties2 *deviceProperties);
754
755 angle::Result enableInstanceExtensions(vk::Context *context,
756 const VulkanLayerVector &enabledInstanceLayerNames,
757 const char *wsiExtension,
758 UseVulkanSwapchain useVulkanSwapchain,
759 bool canLoadDebugUtils);
760 angle::Result enableDeviceExtensions(vk::Context *context,
761 const angle::FeatureOverrides &featureOverrides,
762 UseVulkanSwapchain useVulkanSwapchain,
763 angle::NativeWindowSystem nativeWindowSystem);
764
765 void enableDeviceExtensionsNotPromoted(const vk::ExtensionNameList &deviceExtensionNames);
766 void enableDeviceExtensionsPromotedTo11(const vk::ExtensionNameList &deviceExtensionNames);
767 void enableDeviceExtensionsPromotedTo12(const vk::ExtensionNameList &deviceExtensionNames);
768 void enableDeviceExtensionsPromotedTo13(const vk::ExtensionNameList &deviceExtensionNames);
769
770 void initDeviceExtensionEntryPoints();
771 // Initialize extension entry points from core ones if needed
772 void initializeInstanceExtensionEntryPointsFromCore() const;
773 void initializeDeviceExtensionEntryPointsFromCore() const;
774
775 void initFeatures(const vk::ExtensionNameList &extensions,
776 const angle::FeatureOverrides &featureOverrides,
777 UseVulkanSwapchain useVulkanSwapchain,
778 angle::NativeWindowSystem nativeWindowSystem);
779 void appBasedFeatureOverrides(const vk::ExtensionNameList &extensions);
780 angle::Result initPipelineCache(vk::Context *context,
781 vk::PipelineCache *pipelineCache,
782 bool *success);
783 angle::Result ensurePipelineCacheInitialized(vk::Context *context);
784
785 template <VkFormatFeatureFlags VkFormatProperties::*features>
786 VkFormatFeatureFlags getFormatFeatureBits(angle::FormatID formatID,
787 const VkFormatFeatureFlags featureBits) const;
788
789 template <VkFormatFeatureFlags VkFormatProperties::*features>
790 bool hasFormatFeatureBits(angle::FormatID formatID,
791 const VkFormatFeatureFlags featureBits) const;
792
793 // Initialize VMA allocator and buffer suballocator related data.
794 angle::Result initializeMemoryAllocator(vk::Context *context);
795
796 // Query and cache supported fragment shading rates
797 void queryAndCacheFragmentShadingRates();
798 // Determine support for shading rate based rendering
799 bool canSupportFragmentShadingRate() const;
800 // Determine support for foveated rendering
801 bool canSupportFoveatedRendering() const;
802 // Prefer host visible device local via device local based on device type and heap size.
803 bool canPreferDeviceLocalMemoryHostVisible(VkPhysicalDeviceType deviceType);
804
805 // Find the threshold for pending suballocation and image garbage sizes before the context
806 // should be flushed.
807 void calculatePendingGarbageSizeLimit();
808
809 template <typename CommandBufferHelperT, typename RecyclerT>
810 angle::Result getCommandBufferImpl(vk::Context *context,
811 vk::SecondaryCommandPool *commandPool,
812 vk::SecondaryCommandMemoryAllocator *commandsAllocator,
813 RecyclerT *recycler,
814 CommandBufferHelperT **commandBufferHelperOut);
815
816 vk::GlobalOps *mGlobalOps;
817
818 void *mLibVulkanLibrary;
819
820 mutable bool mCapsInitialized;
821 mutable gl::Caps mNativeCaps;
822 mutable gl::TextureCapsMap mNativeTextureCaps;
823 mutable gl::Extensions mNativeExtensions;
824 mutable gl::Limitations mNativeLimitations;
825 mutable ShPixelLocalStorageOptions mNativePLSOptions;
826 mutable angle::FeaturesVk mFeatures;
827
828 // The instance and device versions. The instance version is the one from the Vulkan loader,
829 // while the device version comes from VkPhysicalDeviceProperties::apiVersion. With instance
830 // version 1.0, only device version 1.0 can be used. If instance version is at least 1.1, any
831 // device version (even higher than that) can be used. Some extensions have been promoted to
832 // Vulkan 1.1 or higher, but the version check must be done against the instance or device
833 // version, depending on whether it's an instance or device extension.
834 //
835 // Note that mDeviceVersion is technically redundant with mPhysicalDeviceProperties.apiVersion,
836 // but ANGLE may use a smaller version with problematic ICDs.
837 uint32_t mInstanceVersion;
838 uint32_t mDeviceVersion;
839
840 VkInstance mInstance;
841 bool mEnableValidationLayers;
842 // True if ANGLE is enabling the VK_EXT_debug_utils extension.
843 bool mEnableDebugUtils;
844 // True if ANGLE should call the vkCmd*DebugUtilsLabelEXT functions in order to communicate
845 // to debuggers (e.g. AGI) the OpenGL ES commands that the application uses. This is
846 // independent of mEnableDebugUtils, as an external graphics debugger can enable the
847 // VK_EXT_debug_utils extension and cause this to be set true.
848 bool mAngleDebuggerMode;
849 angle::vk::ICD mEnabledICD;
850 VkDebugUtilsMessengerEXT mDebugUtilsMessenger;
851 VkPhysicalDevice mPhysicalDevice;
852
853 VkPhysicalDeviceProperties mPhysicalDeviceProperties;
854 VkPhysicalDeviceVulkan11Properties mPhysicalDevice11Properties;
855
856 VkPhysicalDeviceFeatures mPhysicalDeviceFeatures;
857 VkPhysicalDeviceVulkan11Features mPhysicalDevice11Features;
858
859 VkPhysicalDeviceLineRasterizationFeaturesEXT mLineRasterizationFeatures;
860 VkPhysicalDeviceProvokingVertexFeaturesEXT mProvokingVertexFeatures;
861 VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT mVertexAttributeDivisorFeatures;
862 VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT mVertexAttributeDivisorProperties;
863 VkPhysicalDeviceTransformFeedbackFeaturesEXT mTransformFeedbackFeatures;
864 VkPhysicalDeviceIndexTypeUint8FeaturesEXT mIndexTypeUint8Features;
865 VkPhysicalDeviceSubgroupProperties mSubgroupProperties;
866 VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR mSubgroupExtendedTypesFeatures;
867 VkPhysicalDeviceDeviceMemoryReportFeaturesEXT mMemoryReportFeatures;
868 VkDeviceDeviceMemoryReportCreateInfoEXT mMemoryReportCallback;
869 VkPhysicalDeviceShaderFloat16Int8FeaturesKHR mShaderFloat16Int8Features;
870 VkPhysicalDeviceDepthStencilResolvePropertiesKHR mDepthStencilResolveProperties;
871 VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT
872 mMultisampledRenderToSingleSampledFeatures;
873 VkPhysicalDeviceImage2DViewOf3DFeaturesEXT mImage2dViewOf3dFeatures;
874 VkPhysicalDeviceMultiviewFeatures mMultiviewFeatures;
875 VkPhysicalDeviceFeatures2KHR mEnabledFeatures;
876 VkPhysicalDeviceMultiviewProperties mMultiviewProperties;
877 VkPhysicalDeviceDriverPropertiesKHR mDriverProperties;
878 VkPhysicalDeviceCustomBorderColorFeaturesEXT mCustomBorderColorFeatures;
879 VkPhysicalDeviceProtectedMemoryFeatures mProtectedMemoryFeatures;
880 VkPhysicalDeviceHostQueryResetFeaturesEXT mHostQueryResetFeatures;
881 VkPhysicalDeviceDepthClampZeroOneFeaturesEXT mDepthClampZeroOneFeatures;
882 VkPhysicalDeviceDepthClipControlFeaturesEXT mDepthClipControlFeatures;
883 VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT mBlendOperationAdvancedFeatures;
884 VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT mPrimitivesGeneratedQueryFeatures;
885 VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT mPrimitiveTopologyListRestartFeatures;
886 VkPhysicalDeviceSamplerYcbcrConversionFeatures mSamplerYcbcrConversionFeatures;
887 VkPhysicalDeviceExtendedDynamicStateFeaturesEXT mExtendedDynamicStateFeatures;
888 VkPhysicalDeviceExtendedDynamicState2FeaturesEXT mExtendedDynamicState2Features;
889 VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT mGraphicsPipelineLibraryFeatures;
890 VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT mGraphicsPipelineLibraryProperties;
891 VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT mVertexInputDynamicStateFeatures;
892 VkPhysicalDeviceDynamicRenderingFeaturesKHR mDynamicRenderingFeatures;
893 VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR mDynamicRenderingLocalReadFeatures;
894 VkPhysicalDeviceFragmentShadingRateFeaturesKHR mFragmentShadingRateFeatures;
895 VkPhysicalDeviceFragmentShadingRatePropertiesKHR mFragmentShadingRateProperties;
896 VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT mFragmentShaderInterlockFeatures;
897 VkPhysicalDeviceImagelessFramebufferFeaturesKHR mImagelessFramebufferFeatures;
898 VkPhysicalDevicePipelineRobustnessFeaturesEXT mPipelineRobustnessFeatures;
899 VkPhysicalDevicePipelineProtectedAccessFeaturesEXT mPipelineProtectedAccessFeatures;
900 VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT
901 mRasterizationOrderAttachmentAccessFeatures;
902 VkPhysicalDeviceShaderAtomicFloatFeaturesEXT mShaderAtomicFloatFeatures;
903 VkPhysicalDeviceMaintenance5FeaturesKHR mMaintenance5Features;
904 VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT mSwapchainMaintenance1Features;
905 VkPhysicalDeviceLegacyDitheringFeaturesEXT mDitheringFeatures;
906 VkPhysicalDeviceDrmPropertiesEXT mDrmProperties;
907 VkPhysicalDeviceTimelineSemaphoreFeaturesKHR mTimelineSemaphoreFeatures;
908 VkPhysicalDeviceHostImageCopyFeaturesEXT mHostImageCopyFeatures;
909 VkPhysicalDeviceHostImageCopyPropertiesEXT mHostImageCopyProperties;
910 VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT mTextureCompressionASTCHDRFeatures;
911 std::vector<VkImageLayout> mHostImageCopySrcLayoutsStorage;
912 std::vector<VkImageLayout> mHostImageCopyDstLayoutsStorage;
913 VkPhysicalDeviceImageCompressionControlFeaturesEXT mImageCompressionControlFeatures;
914 #if defined(ANGLE_PLATFORM_ANDROID)
915 VkPhysicalDeviceExternalFormatResolveFeaturesANDROID mExternalFormatResolveFeatures;
916 VkPhysicalDeviceExternalFormatResolvePropertiesANDROID mExternalFormatResolveProperties;
917 #endif
918 VkPhysicalDevice8BitStorageFeatures m8BitStorageFeatures;
919 VkPhysicalDevice16BitStorageFeatures m16BitStorageFeatures;
920 VkPhysicalDeviceSynchronization2Features mSynchronization2Features;
921 VkPhysicalDeviceVariablePointersFeatures mVariablePointersFeatures;
922 VkPhysicalDeviceFloatControlsProperties mFloatControlProperties;
923
924 uint32_t mLegacyDitheringVersion = 0;
925
926 angle::PackedEnumBitSet<gl::ShadingRate, uint8_t> mSupportedFragmentShadingRates;
927 angle::PackedEnumMap<gl::ShadingRate, VkSampleCountFlags>
928 mSupportedFragmentShadingRateSampleCounts;
929 std::vector<VkQueueFamilyProperties> mQueueFamilyProperties;
930 uint32_t mCurrentQueueFamilyIndex;
931 uint32_t mMaxVertexAttribDivisor;
932 VkDeviceSize mMaxVertexAttribStride;
933 mutable uint32_t mMaxColorInputAttachmentCount;
934 uint32_t mDefaultUniformBufferSize;
935 VkDevice mDevice;
936 VkDeviceSize mMaxCopyBytesUsingCPUWhenPreservingBufferData;
937
938 bool mDeviceLost;
939
940 vk::SharedGarbageList<vk::SharedGarbage> mSharedGarbageList;
941 // Suballocations have its own dedicated garbage list for performance optimization since they
942 // tend to be the most common garbage objects.
943 vk::SharedGarbageList<vk::BufferSuballocationGarbage> mSuballocationGarbageList;
944 // Holds orphaned BufferBlocks when ShareGroup gets destroyed
945 vk::BufferBlockGarbageList mOrphanedBufferBlockList;
946 // Holds RefCountedEvent that are free and ready to reuse
947 vk::RefCountedEventRecycler mRefCountedEventRecycler;
948
949 VkDeviceSize mPendingGarbageSizeLimit;
950
951 vk::FormatTable mFormatTable;
952 // A cache of VkFormatProperties as queried from the device over time.
953 mutable angle::FormatMap<VkFormatProperties> mFormatProperties;
954
955 vk::Allocator mAllocator;
956
957 // Used to allocate memory for images using VMA, utilizing suballocation.
958 vk::ImageMemorySuballocator mImageMemorySuballocator;
959
960 vk::MemoryProperties mMemoryProperties;
961 VkDeviceSize mPreferredLargeHeapBlockSize;
962
963 // The default alignment for BufferVk object
964 size_t mDefaultBufferAlignment;
965 // The memory type index for staging buffer that is host visible.
966 angle::PackedEnumMap<vk::MemoryCoherency, uint32_t> mStagingBufferMemoryTypeIndex;
967 size_t mStagingBufferAlignment;
968 // For vertex conversion buffers
969 uint32_t mHostVisibleVertexConversionBufferMemoryTypeIndex;
970 uint32_t mDeviceLocalVertexConversionBufferMemoryTypeIndex;
971 size_t mVertexConversionBufferAlignment;
972
973 // The mutex protects -
974 // 1. initialization of the cache
975 // 2. Vulkan driver guarantess synchronization for read and write operations but the spec
976 // requires external synchronization when mPipelineCache is the dstCache of
977 // vkMergePipelineCaches. Lock the mutex if mergeProgramPipelineCachesToGlobalCache is
978 // enabled
979 angle::SimpleMutex mPipelineCacheMutex;
980 vk::PipelineCache mPipelineCache;
981 size_t mCurrentPipelineCacheBlobCacheSlotIndex;
982 size_t mPipelineCacheChunkCount;
983 uint32_t mPipelineCacheVkUpdateTimeout;
984 size_t mPipelineCacheSizeAtLastSync;
985 std::atomic<bool> mPipelineCacheInitialized;
986
987 // Latest validation data for debug overlay.
988 std::string mLastValidationMessage;
989 uint32_t mValidationMessageCount;
990
991 // Skipped validation messages. The exact contents of the list depends on the availability
992 // of certain extensions.
993 std::vector<const char *> mSkippedValidationMessages;
994 // Syncval skipped messages. The exact contents of the list depends on the availability of
995 // certain extensions.
996 std::vector<vk::SkippedSyncvalMessage> mSkippedSyncvalMessages;
997
998 // Whether framebuffer fetch is internally coherent. If framebuffer fetch is not coherent,
999 // technically ANGLE could simply not expose EXT_shader_framebuffer_fetch and instead only
1000 // expose EXT_shader_framebuffer_fetch_non_coherent. In practice, too many Android apps assume
1001 // EXT_shader_framebuffer_fetch is available and break without it. Others use string matching
1002 // to detect when EXT_shader_framebuffer_fetch is available, and accidentally match
1003 // EXT_shader_framebuffer_fetch_non_coherent and believe coherent framebuffer fetch is
1004 // available.
1005 //
1006 // For these reasons, ANGLE always exposes EXT_shader_framebuffer_fetch. To ensure coherence
1007 // between draw calls, it automatically inserts barriers between draw calls when the program
1008 // uses framebuffer fetch. ANGLE does not attempt to guarantee coherence for self-overlapping
1009 // geometry, which makes this emulation incorrect per spec, but practically harmless.
1010 //
1011 // This emulation can also be used to implement coherent advanced blend similarly if needed.
1012 bool mIsColorFramebufferFetchCoherent;
1013 // Whether framebuffer fetch has been used, for the purposes of more accurate syncval error
1014 // filtering.
1015 bool mIsColorFramebufferFetchUsed;
1016
1017 // How close to VkPhysicalDeviceLimits::maxMemoryAllocationCount we allow ourselves to get
1018 static constexpr double kPercentMaxMemoryAllocationCount = 0.3;
1019 // How many objects to garbage collect before issuing a flush()
1020 uint32_t mGarbageCollectionFlushThreshold;
1021
1022 // Only used for "one off" command buffers.
1023 angle::PackedEnumMap<vk::ProtectionType, OneOffCommandPool> mOneOffCommandPoolMap;
1024
1025 // Synchronous Command Queue
1026 vk::CommandQueue mCommandQueue;
1027
1028 // Async Command Queue
1029 vk::CommandProcessor mCommandProcessor;
1030
1031 // Command buffer pool management.
1032 vk::CommandBufferRecycler<vk::OutsideRenderPassCommandBufferHelper>
1033 mOutsideRenderPassCommandBufferRecycler;
1034 vk::CommandBufferRecycler<vk::RenderPassCommandBufferHelper> mRenderPassCommandBufferRecycler;
1035
1036 SamplerCache mSamplerCache;
1037 SamplerYcbcrConversionCache mYuvConversionCache;
1038 angle::HashMap<VkFormat, uint32_t> mVkFormatDescriptorCountMap;
1039 vk::ActiveHandleCounter mActiveHandleCounts;
1040 angle::SimpleMutex mActiveHandleCountsMutex;
1041
1042 // Tracks resource serials.
1043 vk::ResourceSerialFactory mResourceSerialFactory;
1044
1045 // QueueSerial generator
1046 vk::QueueSerialIndexAllocator mQueueSerialIndexAllocator;
1047 std::array<AtomicSerialFactory, kMaxQueueSerialIndexCount> mQueueSerialFactory;
1048
1049 // Application executable information
1050 VkApplicationInfo mApplicationInfo;
1051 // Process GPU memory reports
1052 vk::MemoryReport mMemoryReport;
1053 // Helpers for adding trace annotations
1054 DebugAnnotatorVk mAnnotator;
1055
1056 // Stats about all Vulkan object caches
1057 VulkanCacheStats mVulkanCacheStats;
1058 mutable angle::SimpleMutex mCacheStatsMutex;
1059
1060 // A mask to filter out Vulkan pipeline stages that are not supported, applied in situations
1061 // where multiple stages are prespecified (for example with image layout transitions):
1062 //
1063 // - Excludes GEOMETRY if geometry shaders are not supported.
1064 // - Excludes TESSELLATION_CONTROL and TESSELLATION_EVALUATION if tessellation shaders are
1065 // not
1066 // supported.
1067 //
1068 // Note that this mask can have bits set that don't correspond to valid stages, so it's
1069 // strictly only useful for masking out unsupported stages in an otherwise valid set of
1070 // stages.
1071 VkPipelineStageFlags mSupportedBufferWritePipelineStageMask;
1072 VkShaderStageFlags mSupportedVulkanShaderStageMask;
1073 // The 1:1 mapping between EventStage and VkPipelineStageFlags
1074 angle::PackedEnumMap<EventStage, VkPipelineStageFlags> mEventStageAndPipelineStageFlagsMap;
1075 angle::PackedEnumMap<ImageLayout, ImageMemoryBarrierData> mImageLayoutAndMemoryBarrierDataMap;
1076
1077 // Use thread pool to compress cache data.
1078 std::shared_ptr<angle::WaitableEvent> mCompressEvent;
1079
1080 VulkanLayerVector mEnabledDeviceLayerNames;
1081 vk::ExtensionNameList mEnabledInstanceExtensions;
1082 vk::ExtensionNameList mEnabledDeviceExtensions;
1083
1084 // Memory tracker for allocations and deallocations.
1085 MemoryAllocationTracker mMemoryAllocationTracker;
1086
1087 vk::ExternalFormatTable mExternalFormatTable;
1088
1089 // A graph built from pipeline descs and their transitions. This is not thread-safe, but it's
1090 // only a debug feature that's disabled by default.
1091 std::ostringstream mPipelineCacheGraph;
1092 bool mDumpPipelineCacheGraph;
1093 std::string mPipelineCacheGraphDumpPath;
1094
1095 // A placeholder descriptor set layout handle for layouts with no bindings.
1096 vk::DescriptorSetLayoutPtr mPlaceHolderDescriptorSetLayout;
1097 };
1098
generateQueueSerial(SerialIndex index)1099 ANGLE_INLINE Serial Renderer::generateQueueSerial(SerialIndex index)
1100 {
1101 return mQueueSerialFactory[index].generate();
1102 }
1103
reserveQueueSerials(SerialIndex index,size_t count,RangedSerialFactory * rangedSerialFactory)1104 ANGLE_INLINE void Renderer::reserveQueueSerials(SerialIndex index,
1105 size_t count,
1106 RangedSerialFactory *rangedSerialFactory)
1107 {
1108 mQueueSerialFactory[index].reserve(rangedSerialFactory, count);
1109 }
1110
hasResourceUseSubmitted(const vk::ResourceUse & use)1111 ANGLE_INLINE bool Renderer::hasResourceUseSubmitted(const vk::ResourceUse &use) const
1112 {
1113 if (isAsyncCommandQueueEnabled())
1114 {
1115 return mCommandProcessor.hasResourceUseEnqueued(use);
1116 }
1117 else
1118 {
1119 return mCommandQueue.hasResourceUseSubmitted(use);
1120 }
1121 }
1122
hasQueueSerialSubmitted(const QueueSerial & queueSerial)1123 ANGLE_INLINE bool Renderer::hasQueueSerialSubmitted(const QueueSerial &queueSerial) const
1124 {
1125 if (isAsyncCommandQueueEnabled())
1126 {
1127 return mCommandProcessor.hasQueueSerialEnqueued(queueSerial);
1128 }
1129 else
1130 {
1131 return mCommandQueue.hasQueueSerialSubmitted(queueSerial);
1132 }
1133 }
1134
getLastSubmittedSerial(SerialIndex index)1135 ANGLE_INLINE Serial Renderer::getLastSubmittedSerial(SerialIndex index) const
1136 {
1137 if (isAsyncCommandQueueEnabled())
1138 {
1139 return mCommandProcessor.getLastEnqueuedSerial(index);
1140 }
1141 else
1142 {
1143 return mCommandQueue.getLastSubmittedSerial(index);
1144 }
1145 }
1146
hasResourceUseFinished(const vk::ResourceUse & use)1147 ANGLE_INLINE bool Renderer::hasResourceUseFinished(const vk::ResourceUse &use) const
1148 {
1149 return mCommandQueue.hasResourceUseFinished(use);
1150 }
1151
hasQueueSerialFinished(const QueueSerial & queueSerial)1152 ANGLE_INLINE bool Renderer::hasQueueSerialFinished(const QueueSerial &queueSerial) const
1153 {
1154 return mCommandQueue.hasQueueSerialFinished(queueSerial);
1155 }
1156
waitForPresentToBeSubmitted(vk::SwapchainStatus * swapchainStatus)1157 ANGLE_INLINE angle::Result Renderer::waitForPresentToBeSubmitted(
1158 vk::SwapchainStatus *swapchainStatus)
1159 {
1160 if (isAsyncCommandQueueEnabled())
1161 {
1162 return mCommandProcessor.waitForPresentToBeSubmitted(swapchainStatus);
1163 }
1164 ASSERT(!swapchainStatus->isPending);
1165 return angle::Result::Continue;
1166 }
1167
requestAsyncCommandsAndGarbageCleanup(vk::Context * context)1168 ANGLE_INLINE void Renderer::requestAsyncCommandsAndGarbageCleanup(vk::Context *context)
1169 {
1170 mCommandProcessor.requestCommandsAndGarbageCleanup();
1171 }
1172
checkCompletedCommands(vk::Context * context)1173 ANGLE_INLINE angle::Result Renderer::checkCompletedCommands(vk::Context *context)
1174 {
1175 return mCommandQueue.checkCompletedCommands(context);
1176 }
1177
checkCompletedCommandsAndCleanup(vk::Context * context)1178 ANGLE_INLINE angle::Result Renderer::checkCompletedCommandsAndCleanup(vk::Context *context)
1179 {
1180 return mCommandQueue.checkAndCleanupCompletedCommands(context);
1181 }
1182
releaseFinishedCommands(vk::Context * context)1183 ANGLE_INLINE angle::Result Renderer::releaseFinishedCommands(vk::Context *context)
1184 {
1185 return mCommandQueue.releaseFinishedCommands(context);
1186 }
1187
1188 template <typename ArgT, typename... ArgsT>
DestroyGarbage(Renderer * renderer,ArgT object,ArgsT...objectsIn)1189 void DestroyGarbage(Renderer *renderer, ArgT object, ArgsT... objectsIn)
1190 {
1191 if (object->valid())
1192 {
1193 object->destroy(renderer->getDevice());
1194 }
1195 DestroyGarbage(renderer, objectsIn...);
1196 }
1197
1198 template <typename... ArgsT>
DestroyGarbage(Renderer * renderer,vk::Allocation * object,ArgsT...objectsIn)1199 void DestroyGarbage(Renderer *renderer, vk::Allocation *object, ArgsT... objectsIn)
1200 {
1201 if (object->valid())
1202 {
1203 object->destroy(renderer->getAllocator());
1204 }
1205 DestroyGarbage(renderer, objectsIn...);
1206 }
1207 } // namespace vk
1208 } // namespace rx
1209
1210 #endif // LIBANGLE_RENDERER_VULKAN_RENDERERVK_H_
1211