xref: /aosp_15_r20/external/skia/src/gpu/vk/VulkanUtilsPriv.h (revision c8dee2aa9b3f27cf6c858bd81872bdeb2c07ed17)
1 /*
2  * Copyright 2022 Google LLC
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #ifndef skgpu_VulkanUtilsPriv_DEFINED
9 #define skgpu_VulkanUtilsPriv_DEFINED
10 
11 #include "include/core/SkColor.h"
12 #include "include/core/SkRefCnt.h"
13 #include "include/core/SkTextureCompressionType.h"
14 #include "include/gpu/vk/VulkanTypes.h"
15 #include "include/private/base/SkAssert.h"
16 #include "include/private/gpu/vk/SkiaVulkan.h"
17 #include "src/gpu/SkSLToBackend.h"
18 #include "src/sksl/codegen/SkSLSPIRVCodeGenerator.h"
19 
20 #ifdef SK_BUILD_FOR_ANDROID
21 #include <android/hardware_buffer.h>
22 #endif
23 
24 #include <cstdint>
25 #include <string>
26 #include <cstddef>
27 
28 namespace SkSL {
29 
30 enum class ProgramKind : int8_t;
31 struct ProgramInterface;
32 struct ProgramSettings;
33 struct ShaderCaps;
34 
35 }  // namespace SkSL
36 
37 namespace skgpu {
38 
39 class ShaderErrorHandler;
40 struct VulkanInterface;
41 struct VulkanBackendContext;
42 class VulkanExtensions;
43 
SkSLToSPIRV(const SkSL::ShaderCaps * caps,const std::string & sksl,SkSL::ProgramKind programKind,const SkSL::ProgramSettings & settings,std::string * spirv,SkSL::ProgramInterface * outInterface,ShaderErrorHandler * errorHandler)44 inline bool SkSLToSPIRV(const SkSL::ShaderCaps* caps,
45                         const std::string& sksl,
46                         SkSL::ProgramKind programKind,
47                         const SkSL::ProgramSettings& settings,
48                         std::string* spirv,
49                         SkSL::ProgramInterface* outInterface,
50                         ShaderErrorHandler* errorHandler) {
51     return SkSLToBackend(caps, &SkSL::ToSPIRV, /*backendLabel=*/nullptr,
52                          sksl, programKind, settings, spirv, outInterface, errorHandler);
53 }
54 
VkFormatChannels(VkFormat vkFormat)55 static constexpr uint32_t VkFormatChannels(VkFormat vkFormat) {
56     switch (vkFormat) {
57         case VK_FORMAT_R8G8B8A8_UNORM:           return kRGBA_SkColorChannelFlags;
58         case VK_FORMAT_R8_UNORM:                 return kRed_SkColorChannelFlag;
59         case VK_FORMAT_B8G8R8A8_UNORM:           return kRGBA_SkColorChannelFlags;
60         case VK_FORMAT_R5G6B5_UNORM_PACK16:      return kRGB_SkColorChannelFlags;
61         case VK_FORMAT_B5G6R5_UNORM_PACK16:      return kRGB_SkColorChannelFlags;
62         case VK_FORMAT_R16G16B16A16_SFLOAT:      return kRGBA_SkColorChannelFlags;
63         case VK_FORMAT_R16_SFLOAT:               return kRed_SkColorChannelFlag;
64         case VK_FORMAT_R8G8B8_UNORM:             return kRGB_SkColorChannelFlags;
65         case VK_FORMAT_R8G8_UNORM:               return kRG_SkColorChannelFlags;
66         case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return kRGBA_SkColorChannelFlags;
67         case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return kRGBA_SkColorChannelFlags;
68         case VK_FORMAT_B4G4R4A4_UNORM_PACK16:    return kRGBA_SkColorChannelFlags;
69         case VK_FORMAT_R4G4B4A4_UNORM_PACK16:    return kRGBA_SkColorChannelFlags;
70         case VK_FORMAT_R8G8B8A8_SRGB:            return kRGBA_SkColorChannelFlags;
71         case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:  return kRGB_SkColorChannelFlags;
72         case VK_FORMAT_BC1_RGB_UNORM_BLOCK:      return kRGB_SkColorChannelFlags;
73         case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:     return kRGBA_SkColorChannelFlags;
74         case VK_FORMAT_R16_UNORM:                return kRed_SkColorChannelFlag;
75         case VK_FORMAT_R16G16_UNORM:             return kRG_SkColorChannelFlags;
76         case VK_FORMAT_R16G16B16A16_UNORM:       return kRGBA_SkColorChannelFlags;
77         case VK_FORMAT_R16G16_SFLOAT:            return kRG_SkColorChannelFlags;
78         case VK_FORMAT_S8_UINT:                  return 0;
79         case VK_FORMAT_D16_UNORM:                return 0;
80         case VK_FORMAT_D32_SFLOAT:               return 0;
81         case VK_FORMAT_D24_UNORM_S8_UINT:        return 0;
82         case VK_FORMAT_D32_SFLOAT_S8_UINT:       return 0;
83         default:                                 return 0;
84     }
85 }
86 
VkFormatBytesPerBlock(VkFormat vkFormat)87 static constexpr size_t VkFormatBytesPerBlock(VkFormat vkFormat) {
88     switch (vkFormat) {
89         case VK_FORMAT_R8G8B8A8_UNORM:            return 4;
90         case VK_FORMAT_R8_UNORM:                  return 1;
91         case VK_FORMAT_B8G8R8A8_UNORM:            return 4;
92         case VK_FORMAT_R5G6B5_UNORM_PACK16:       return 2;
93         case VK_FORMAT_B5G6R5_UNORM_PACK16:       return 2;
94         case VK_FORMAT_R16G16B16A16_SFLOAT:       return 8;
95         case VK_FORMAT_R16_SFLOAT:                return 2;
96         case VK_FORMAT_R8G8B8_UNORM:              return 3;
97         case VK_FORMAT_R8G8_UNORM:                return 2;
98         case VK_FORMAT_A2B10G10R10_UNORM_PACK32:  return 4;
99         case VK_FORMAT_A2R10G10B10_UNORM_PACK32:  return 4;
100         case VK_FORMAT_B4G4R4A4_UNORM_PACK16:     return 2;
101         case VK_FORMAT_R4G4B4A4_UNORM_PACK16:     return 2;
102         case VK_FORMAT_R8G8B8A8_SRGB:             return 4;
103         case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:   return 8;
104         case VK_FORMAT_BC1_RGB_UNORM_BLOCK:       return 8;
105         case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:      return 8;
106         case VK_FORMAT_R16_UNORM:                 return 2;
107         case VK_FORMAT_R16G16_UNORM:              return 4;
108         case VK_FORMAT_R16G16B16A16_UNORM:        return 8;
109         case VK_FORMAT_R16G16_SFLOAT:             return 4;
110         // Currently we are just over estimating this value to be used in gpu size calculations even
111         // though the actually size is probably less. We should instead treat planar formats similar
112         // to compressed textures that go through their own special query for calculating size.
113         case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM: return 3;
114         case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:  return 3;
115         case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: return 6;
116         case VK_FORMAT_S8_UINT:                   return 1;
117         case VK_FORMAT_D16_UNORM:                 return 2;
118         case VK_FORMAT_D32_SFLOAT:                return 4;
119         case VK_FORMAT_D24_UNORM_S8_UINT:         return 4;
120         case VK_FORMAT_D32_SFLOAT_S8_UINT:        return 8;
121 
122         default:                                  return 0;
123     }
124 }
125 
VkFormatToCompressionType(VkFormat vkFormat)126 static constexpr SkTextureCompressionType VkFormatToCompressionType(VkFormat vkFormat) {
127     switch (vkFormat) {
128         case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: return SkTextureCompressionType::kETC2_RGB8_UNORM;
129         case VK_FORMAT_BC1_RGB_UNORM_BLOCK:     return SkTextureCompressionType::kBC1_RGB8_UNORM;
130         case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:    return SkTextureCompressionType::kBC1_RGBA8_UNORM;
131         default:                                return SkTextureCompressionType::kNone;
132     }
133 }
134 
VkFormatIsStencil(VkFormat format)135 static constexpr int VkFormatIsStencil(VkFormat format) {
136     switch (format) {
137         case VK_FORMAT_S8_UINT:
138         case VK_FORMAT_D24_UNORM_S8_UINT:
139         case VK_FORMAT_D32_SFLOAT_S8_UINT:
140             return true;
141         default:
142             return false;
143     }
144 }
145 
VkFormatIsDepth(VkFormat format)146 static constexpr int VkFormatIsDepth(VkFormat format) {
147     switch (format) {
148         case VK_FORMAT_D16_UNORM:
149         case VK_FORMAT_D32_SFLOAT:
150         case VK_FORMAT_D24_UNORM_S8_UINT:
151         case VK_FORMAT_D32_SFLOAT_S8_UINT:
152             return true;
153         default:
154             return false;
155     }
156 }
157 
VkFormatStencilBits(VkFormat format)158 static constexpr int VkFormatStencilBits(VkFormat format) {
159     switch (format) {
160         case VK_FORMAT_S8_UINT:
161             return 8;
162         case VK_FORMAT_D24_UNORM_S8_UINT:
163             return 8;
164         case VK_FORMAT_D32_SFLOAT_S8_UINT:
165             return 8;
166         default:
167             return 0;
168     }
169 }
170 
VkFormatNeedsYcbcrSampler(VkFormat format)171 static constexpr bool VkFormatNeedsYcbcrSampler(VkFormat format)  {
172     return format == VK_FORMAT_G8_B8R8_2PLANE_420_UNORM ||
173            format == VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM ||
174            format == VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16;
175 }
176 
SampleCountToVkSampleCount(uint32_t samples,VkSampleCountFlagBits * vkSamples)177 static constexpr bool SampleCountToVkSampleCount(uint32_t samples,
178                                                  VkSampleCountFlagBits* vkSamples) {
179     SkASSERT(samples >= 1);
180     switch (samples) {
181         case 1:
182             *vkSamples = VK_SAMPLE_COUNT_1_BIT;
183             return true;
184         case 2:
185             *vkSamples = VK_SAMPLE_COUNT_2_BIT;
186             return true;
187         case 4:
188             *vkSamples = VK_SAMPLE_COUNT_4_BIT;
189             return true;
190         case 8:
191             *vkSamples = VK_SAMPLE_COUNT_8_BIT;
192             return true;
193         case 16:
194             *vkSamples = VK_SAMPLE_COUNT_16_BIT;
195             return true;
196         default:
197             return false;
198     }
199 }
200 
201 /**
202  * Returns true if the format is compressed.
203  */
VkFormatIsCompressed(VkFormat vkFormat)204 static constexpr bool VkFormatIsCompressed(VkFormat vkFormat) {
205     switch (vkFormat) {
206         case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
207         case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
208         case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
209             return true;
210         default:
211             return false;
212     }
213     SkUNREACHABLE;
214 }
215 
216 /**
217  * Returns a ptr to the requested extension feature struct or nullptr if it is not present.
218 */
GetExtensionFeatureStruct(const VkPhysicalDeviceFeatures2 & features,VkStructureType type)219 template<typename T> T* GetExtensionFeatureStruct(const VkPhysicalDeviceFeatures2& features,
220                                                   VkStructureType type) {
221     // All Vulkan structs that could be part of the features chain will start with the
222     // structure type followed by the pNext pointer. We cast to the CommonVulkanHeader
223     // so we can get access to the pNext for the next struct.
224     struct CommonVulkanHeader {
225         VkStructureType sType;
226         void*           pNext;
227     };
228 
229     void* pNext = features.pNext;
230     while (pNext) {
231         CommonVulkanHeader* header = static_cast<CommonVulkanHeader*>(pNext);
232         if (header->sType == type) {
233             return static_cast<T*>(pNext);
234         }
235         pNext = header->pNext;
236     }
237     return nullptr;
238 }
239 
240 /**
241  * Returns a populated VkSamplerYcbcrConversionCreateInfo object based on VulkanYcbcrConversionInfo
242 */
243 void SetupSamplerYcbcrConversionInfo(VkSamplerYcbcrConversionCreateInfo* outInfo,
244                                      const VulkanYcbcrConversionInfo& conversionInfo);
245 
VkFormatToStr(VkFormat vkFormat)246 static constexpr const char* VkFormatToStr(VkFormat vkFormat) {
247     switch (vkFormat) {
248         case VK_FORMAT_R8G8B8A8_UNORM:           return "R8G8B8A8_UNORM";
249         case VK_FORMAT_R8_UNORM:                 return "R8_UNORM";
250         case VK_FORMAT_B8G8R8A8_UNORM:           return "B8G8R8A8_UNORM";
251         case VK_FORMAT_R5G6B5_UNORM_PACK16:      return "R5G6B5_UNORM_PACK16";
252         case VK_FORMAT_B5G6R5_UNORM_PACK16:      return "B5G6R5_UNORM_PACK16";
253         case VK_FORMAT_R16G16B16A16_SFLOAT:      return "R16G16B16A16_SFLOAT";
254         case VK_FORMAT_R16_SFLOAT:               return "R16_SFLOAT";
255         case VK_FORMAT_R8G8B8_UNORM:             return "R8G8B8_UNORM";
256         case VK_FORMAT_R8G8_UNORM:               return "R8G8_UNORM";
257         case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return "A2B10G10R10_UNORM_PACK32";
258         case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return "A2R10G10B10_UNORM_PACK32";
259         case VK_FORMAT_B4G4R4A4_UNORM_PACK16:    return "B4G4R4A4_UNORM_PACK16";
260         case VK_FORMAT_R4G4B4A4_UNORM_PACK16:    return "R4G4B4A4_UNORM_PACK16";
261         case VK_FORMAT_R32G32B32A32_SFLOAT:      return "R32G32B32A32_SFLOAT";
262         case VK_FORMAT_R8G8B8A8_SRGB:            return "R8G8B8A8_SRGB";
263         case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:  return "ETC2_R8G8B8_UNORM_BLOCK";
264         case VK_FORMAT_BC1_RGB_UNORM_BLOCK:      return "BC1_RGB_UNORM_BLOCK";
265         case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:     return "BC1_RGBA_UNORM_BLOCK";
266         case VK_FORMAT_R16_UNORM:                return "R16_UNORM";
267         case VK_FORMAT_R16G16_UNORM:             return "R16G16_UNORM";
268         case VK_FORMAT_R16G16B16A16_UNORM:       return "R16G16B16A16_UNORM";
269         case VK_FORMAT_R16G16_SFLOAT:            return "R16G16_SFLOAT";
270         case VK_FORMAT_S8_UINT:                  return "S8_UINT";
271         case VK_FORMAT_D16_UNORM:                return "D16_UNORM";
272         case VK_FORMAT_D32_SFLOAT:               return "D32_SFLOAT";
273         case VK_FORMAT_D24_UNORM_S8_UINT:        return "D24_UNORM_S8_UINT";
274         case VK_FORMAT_D32_SFLOAT_S8_UINT:       return "D32_SFLOAT_S8_UINT";
275 
276         default:                                 return "Unknown";
277     }
278 }
279 
280 #ifdef SK_BUILD_FOR_ANDROID
281 /**
282  * Vulkan AHardwareBuffer utility functions shared between graphite and ganesh
283 */
284 void GetYcbcrConversionInfoFromFormatProps(
285         VulkanYcbcrConversionInfo* outConversionInfo,
286         const VkAndroidHardwareBufferFormatPropertiesANDROID& formatProps);
287 
288 bool GetAHardwareBufferProperties(
289         VkAndroidHardwareBufferFormatPropertiesANDROID* outHwbFormatProps,
290         VkAndroidHardwareBufferPropertiesANDROID* outHwbProps,
291         const skgpu::VulkanInterface*,
292         const AHardwareBuffer*,
293         VkDevice);
294 
295 bool AllocateAndBindImageMemory(skgpu::VulkanAlloc* outVulkanAlloc,
296                                 VkImage,
297                                 const VkPhysicalDeviceMemoryProperties2&,
298                                 const VkAndroidHardwareBufferPropertiesANDROID&,
299                                 AHardwareBuffer*,
300                                 const skgpu::VulkanInterface*,
301                                 VkDevice);
302 
303 #endif // SK_BUILD_FOR_ANDROID
304 
305 /**
306  * Calls faultProc with faultContext; passes debug info if VK_EXT_device_fault is supported/enabled.
307  *
308  * Note: must only be called *after* receiving VK_ERROR_DEVICE_LOST.
309  */
310 void InvokeDeviceLostCallback(const skgpu::VulkanInterface* vulkanInterface,
311                               VkDevice vkDevice,
312                               skgpu::VulkanDeviceLostContext faultContext,
313                               skgpu::VulkanDeviceLostProc faultProc,
314                               bool supportsDeviceFaultInfoExtension);
315 
316 sk_sp<skgpu::VulkanInterface> MakeInterface(const skgpu::VulkanBackendContext&,
317                                             const skgpu::VulkanExtensions* extOverride,
318                                             uint32_t* physDevVersionOut,
319                                             uint32_t* instanceVersionOut);
320 
321 }  // namespace skgpu
322 
323 #endif // skgpu_VulkanUtilsPriv_DEFINED
324