1 /*
2 * Copyright 2023 Google LLC
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "src/gpu/vk/VulkanUtilsPriv.h"
9
10 #include "include/gpu/vk/VulkanBackendContext.h"
11 #include "include/private/base/SkDebug.h"
12 #include "src/gpu/vk/VulkanInterface.h"
13
14 #include <algorithm>
15 #include <vector>
16
17 namespace skgpu {
18
19 /**
20 * Define a macro that both ganesh and graphite can use to make simple calls into Vulkan so we can
21 * share more code between them.
22 */
23 #define SHARED_GR_VULKAN_CALL(IFACE, X) (IFACE)->fFunctions.f##X
24
25 /**
26 * Returns a populated VkSamplerYcbcrConversionCreateInfo object based on VulkanYcbcrConversionInfo
27 */
SetupSamplerYcbcrConversionInfo(VkSamplerYcbcrConversionCreateInfo * outInfo,const VulkanYcbcrConversionInfo & conversionInfo)28 void SetupSamplerYcbcrConversionInfo(VkSamplerYcbcrConversionCreateInfo* outInfo,
29 const VulkanYcbcrConversionInfo& conversionInfo) {
30 #ifdef SK_DEBUG
31 const VkFormatFeatureFlags& featureFlags = conversionInfo.fFormatFeatures;
32
33 // Format feature flags are only representative of an external format's capabilities, so skip
34 // these checks in the case of using a known format.
35 if (conversionInfo.fFormat == VK_FORMAT_UNDEFINED) {
36 if (conversionInfo.fXChromaOffset == VK_CHROMA_LOCATION_MIDPOINT ||
37 conversionInfo.fYChromaOffset == VK_CHROMA_LOCATION_MIDPOINT) {
38 SkASSERT(featureFlags & VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT);
39 }
40 if (conversionInfo.fXChromaOffset == VK_CHROMA_LOCATION_COSITED_EVEN ||
41 conversionInfo.fYChromaOffset == VK_CHROMA_LOCATION_COSITED_EVEN) {
42 SkASSERT(featureFlags & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT);
43 }
44 if (conversionInfo.fChromaFilter == VK_FILTER_LINEAR) {
45 SkASSERT(featureFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT);
46 }
47 if (conversionInfo.fForceExplicitReconstruction) {
48 SkASSERT(featureFlags &
49 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT);
50 }
51 }
52 #endif
53
54 VkFilter chromaFilter = conversionInfo.fChromaFilter;
55 if (!(conversionInfo.fFormatFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT)) {
56 if (!(conversionInfo.fFormatFeatures &
57 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT)) {
58 // Because we don't have have separate reconstruction filter, the min, mag and
59 // chroma filter must all match. However, we also don't support linear sampling so
60 // the min/mag filter have to be nearest. Therefore, we force the chrome filter to
61 // be nearest regardless of support for the feature
62 // VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT.
63 chromaFilter = VK_FILTER_NEAREST;
64 }
65 }
66
67 outInfo->sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
68 outInfo->pNext = nullptr;
69 outInfo->format = conversionInfo.fFormat;
70 outInfo->ycbcrModel = conversionInfo.fYcbcrModel;
71 outInfo->ycbcrRange = conversionInfo.fYcbcrRange;
72 outInfo->components = conversionInfo.fComponents;
73 outInfo->xChromaOffset = conversionInfo.fXChromaOffset;
74 outInfo->yChromaOffset = conversionInfo.fYChromaOffset;
75 outInfo->chromaFilter = chromaFilter;
76 outInfo->forceExplicitReconstruction = conversionInfo.fForceExplicitReconstruction;
77 }
78
79 #ifdef SK_BUILD_FOR_ANDROID
80
81 /**
82 * Shared Vulkan AHardwareBuffer utility functions between graphite and ganesh
83 */
GetYcbcrConversionInfoFromFormatProps(VulkanYcbcrConversionInfo * outConversionInfo,const VkAndroidHardwareBufferFormatPropertiesANDROID & formatProps)84 void GetYcbcrConversionInfoFromFormatProps(
85 VulkanYcbcrConversionInfo* outConversionInfo,
86 const VkAndroidHardwareBufferFormatPropertiesANDROID& formatProps) {
87 outConversionInfo->fYcbcrModel = formatProps.suggestedYcbcrModel;
88 outConversionInfo->fYcbcrRange = formatProps.suggestedYcbcrRange;
89 outConversionInfo->fComponents = formatProps.samplerYcbcrConversionComponents;
90 outConversionInfo->fXChromaOffset = formatProps.suggestedXChromaOffset;
91 outConversionInfo->fYChromaOffset = formatProps.suggestedYChromaOffset;
92 outConversionInfo->fForceExplicitReconstruction = VK_FALSE;
93 outConversionInfo->fExternalFormat = formatProps.externalFormat;
94 outConversionInfo->fFormatFeatures = formatProps.formatFeatures;
95 if (VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT &
96 formatProps.formatFeatures) {
97 outConversionInfo->fChromaFilter = VK_FILTER_LINEAR;
98 } else {
99 outConversionInfo->fChromaFilter = VK_FILTER_NEAREST;
100 }
101 }
102
GetAHardwareBufferProperties(VkAndroidHardwareBufferFormatPropertiesANDROID * outHwbFormatProps,VkAndroidHardwareBufferPropertiesANDROID * outHwbProps,const skgpu::VulkanInterface * interface,const AHardwareBuffer * hwBuffer,VkDevice device)103 bool GetAHardwareBufferProperties(
104 VkAndroidHardwareBufferFormatPropertiesANDROID* outHwbFormatProps,
105 VkAndroidHardwareBufferPropertiesANDROID* outHwbProps,
106 const skgpu::VulkanInterface* interface,
107 const AHardwareBuffer* hwBuffer,
108 VkDevice device) {
109 outHwbFormatProps->sType =
110 VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
111 outHwbFormatProps->pNext = nullptr;
112
113 outHwbProps->sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
114 outHwbProps->pNext = outHwbFormatProps;
115
116 VkResult result =
117 SHARED_GR_VULKAN_CALL(interface,
118 GetAndroidHardwareBufferProperties(device,
119 hwBuffer,
120 outHwbProps));
121 if (result != VK_SUCCESS) {
122 // The spec suggests VK_ERROR_OUT_OF_HOST_MEMORY and VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR
123 // are the only failure codes, but some platforms may report others, such as
124 // VK_ERROR_FORMAT_NOT_SUPPORTED (-11).
125 SkDebugf("Failed to get AndroidHardwareBufferProperties (result:%d)", result);
126 #if __ANDROID_API__ >= 26
127 AHardwareBuffer_Desc hwbDesc;
128 AHardwareBuffer_describe(hwBuffer, &hwbDesc);
129 SkDebugf("^ %" PRIu32 "x%" PRIu32 " AHB -- format:%" PRIu32 ", usage:%" PRIu64
130 ", layers:%" PRIu32,
131 hwbDesc.width,
132 hwbDesc.height,
133 hwbDesc.format,
134 hwbDesc.usage,
135 hwbDesc.layers);
136 #endif
137 return false;
138 }
139 return true;
140 }
141
AllocateAndBindImageMemory(skgpu::VulkanAlloc * outVulkanAlloc,VkImage image,const VkPhysicalDeviceMemoryProperties2 & phyDevMemProps,const VkAndroidHardwareBufferPropertiesANDROID & hwbProps,AHardwareBuffer * hardwareBuffer,const skgpu::VulkanInterface * interface,VkDevice device)142 bool AllocateAndBindImageMemory(skgpu::VulkanAlloc* outVulkanAlloc,
143 VkImage image,
144 const VkPhysicalDeviceMemoryProperties2& phyDevMemProps,
145 const VkAndroidHardwareBufferPropertiesANDROID& hwbProps,
146 AHardwareBuffer* hardwareBuffer,
147 const skgpu::VulkanInterface* interface,
148 VkDevice device) {
149 VkResult result;
150 uint32_t typeIndex = 0;
151 bool foundHeap = false;
152 uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
153 for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
154 if (hwbProps.memoryTypeBits & (1 << i)) {
155 const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
156 uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
157 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
158 if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
159 typeIndex = i;
160 foundHeap = true;
161 }
162 }
163 }
164
165 /**
166 * Fallback to use any available memory type for AHB.
167 *
168 * For external memory import, compatible memory types are decided by the Vulkan driver since
169 * the memory has been allocated externally. There are usually special requirements against
170 * external memory. e.g. AHB allocated with CPU R/W often usage bits is only importable for
171 * non-device-local heap on some AMD systems.
172 */
173 if (!foundHeap && hwbProps.memoryTypeBits) {
174 typeIndex = ffs(hwbProps.memoryTypeBits) - 1;
175 foundHeap = true;
176 }
177 if (!foundHeap) {
178 return false;
179 }
180
181 VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
182 hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
183 hwbImportInfo.pNext = nullptr;
184 hwbImportInfo.buffer = hardwareBuffer;
185
186 VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
187 dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
188 dedicatedAllocInfo.pNext = &hwbImportInfo;
189 dedicatedAllocInfo.image = image;
190 dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
191
192 VkMemoryAllocateInfo allocInfo = {
193 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
194 &dedicatedAllocInfo, // pNext
195 hwbProps.allocationSize, // allocationSize
196 typeIndex, // memoryTypeIndex
197 };
198
199 VkDeviceMemory memory;
200 result = SHARED_GR_VULKAN_CALL(interface,
201 AllocateMemory(device, &allocInfo, nullptr, &memory));
202 if (result != VK_SUCCESS) {
203 return false;
204 }
205
206 VkBindImageMemoryInfo bindImageInfo;
207 bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
208 bindImageInfo.pNext = nullptr;
209 bindImageInfo.image = image;
210 bindImageInfo.memory = memory;
211 bindImageInfo.memoryOffset = 0;
212
213 result = SHARED_GR_VULKAN_CALL(interface, BindImageMemory2(device, 1, &bindImageInfo));
214 if (result != VK_SUCCESS) {
215 SHARED_GR_VULKAN_CALL(interface, FreeMemory(device, memory, nullptr));
216 return false;
217 }
218
219 outVulkanAlloc->fMemory = memory;
220 outVulkanAlloc->fOffset = 0;
221 outVulkanAlloc->fSize = hwbProps.allocationSize;
222 outVulkanAlloc->fFlags = 0;
223 outVulkanAlloc->fBackendMemory = 0;
224 return true;
225 }
226
227 #endif // SK_BUILD_FOR_ANDROID
228
229 // Note: since this is called from Vulkan result-checking functions, any Vk calls this function
230 // makes must NOT be checked with those same functions to avoid infinite recursion.
InvokeDeviceLostCallback(const skgpu::VulkanInterface * vulkanInterface,VkDevice vkDevice,skgpu::VulkanDeviceLostContext deviceLostContext,skgpu::VulkanDeviceLostProc deviceLostProc,bool supportsDeviceFaultInfoExtension)231 void InvokeDeviceLostCallback(const skgpu::VulkanInterface* vulkanInterface,
232 VkDevice vkDevice,
233 skgpu::VulkanDeviceLostContext deviceLostContext,
234 skgpu::VulkanDeviceLostProc deviceLostProc,
235 bool supportsDeviceFaultInfoExtension) {
236 if (!deviceLostProc) {
237 return;
238 }
239
240 std::vector<VkDeviceFaultAddressInfoEXT> addressInfos = {};
241 std::vector<VkDeviceFaultVendorInfoEXT> vendorInfos = {};
242 std::vector<std::byte> vendorBinaryData = {};
243
244 if (!supportsDeviceFaultInfoExtension) {
245 deviceLostProc(deviceLostContext,
246 "No details: VK_EXT_device_fault not available/enabled.",
247 addressInfos,
248 vendorInfos,
249 vendorBinaryData);
250 return;
251 }
252
253 // Query counts
254 VkDeviceFaultCountsEXT faultCounts = {};
255 faultCounts.sType = VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT;
256 VkResult result = SHARED_GR_VULKAN_CALL(vulkanInterface,
257 GetDeviceFaultInfo(vkDevice, &faultCounts, NULL));
258 if (result != VK_SUCCESS) {
259 deviceLostProc(
260 deviceLostContext,
261 "No details: VK_EXT_device_fault error counting failed: " + std::to_string(result),
262 addressInfos,
263 vendorInfos,
264 vendorBinaryData);
265 return;
266 }
267
268 // Prepare storage
269 addressInfos.resize(faultCounts.addressInfoCount);
270 vendorInfos.resize(faultCounts.vendorInfoCount);
271 vendorBinaryData.resize(faultCounts.vendorBinarySize);
272
273 // Query fault info
274 VkDeviceFaultInfoEXT faultInfo = {};
275 faultInfo.sType = VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT;
276 faultInfo.pAddressInfos = addressInfos.data();
277 faultInfo.pVendorInfos = vendorInfos.data();
278 faultInfo.pVendorBinaryData =
279 faultCounts.vendorBinarySize > 0 ? vendorBinaryData.data() : nullptr;
280 result = SHARED_GR_VULKAN_CALL(vulkanInterface,
281 GetDeviceFaultInfo(vkDevice, &faultCounts, &faultInfo));
282 if (result != VK_SUCCESS) {
283 deviceLostProc(
284 deviceLostContext,
285 "No details: VK_EXT_device_fault info dumping failed: " + std::to_string(result),
286 addressInfos,
287 vendorInfos,
288 vendorBinaryData);
289 return;
290 }
291
292 deviceLostProc(deviceLostContext,
293 std::string(faultInfo.description),
294 addressInfos,
295 vendorInfos,
296 vendorBinaryData);
297 }
298
MakeInterface(const skgpu::VulkanBackendContext & context,const skgpu::VulkanExtensions * extOverride,uint32_t * instanceVersionOut,uint32_t * physDevVersionOut)299 sk_sp<skgpu::VulkanInterface> MakeInterface(const skgpu::VulkanBackendContext& context,
300 const skgpu::VulkanExtensions* extOverride,
301 uint32_t* instanceVersionOut,
302 uint32_t* physDevVersionOut) {
303 if (!extOverride) {
304 extOverride = context.fVkExtensions;
305 }
306 SkASSERT(extOverride);
307 PFN_vkEnumerateInstanceVersion localEnumerateInstanceVersion =
308 reinterpret_cast<PFN_vkEnumerateInstanceVersion>(
309 context.fGetProc("vkEnumerateInstanceVersion", VK_NULL_HANDLE, VK_NULL_HANDLE));
310 uint32_t instanceVersion = 0;
311 if (!localEnumerateInstanceVersion) {
312 instanceVersion = VK_MAKE_VERSION(1, 0, 0);
313 } else {
314 VkResult err = localEnumerateInstanceVersion(&instanceVersion);
315 if (err) {
316 return nullptr;
317 }
318 }
319
320 PFN_vkGetPhysicalDeviceProperties localGetPhysicalDeviceProperties =
321 reinterpret_cast<PFN_vkGetPhysicalDeviceProperties>(context.fGetProc(
322 "vkGetPhysicalDeviceProperties", context.fInstance, VK_NULL_HANDLE));
323
324 if (!localGetPhysicalDeviceProperties) {
325 return nullptr;
326 }
327 VkPhysicalDeviceProperties physDeviceProperties;
328 localGetPhysicalDeviceProperties(context.fPhysicalDevice, &physDeviceProperties);
329 uint32_t physDevVersion = physDeviceProperties.apiVersion;
330
331 uint32_t apiVersion = context.fMaxAPIVersion ? context.fMaxAPIVersion : instanceVersion;
332
333 instanceVersion = std::min(instanceVersion, apiVersion);
334 physDevVersion = std::min(physDevVersion, apiVersion);
335
336 sk_sp<skgpu::VulkanInterface> interface(new skgpu::VulkanInterface(context.fGetProc,
337 context.fInstance,
338 context.fDevice,
339 instanceVersion,
340 physDevVersion,
341 extOverride));
342 if (!interface->validate(instanceVersion, physDevVersion, extOverride)) {
343 return nullptr;
344 }
345 if (physDevVersionOut) {
346 *physDevVersionOut = physDevVersion;
347 }
348 if (instanceVersionOut) {
349 *instanceVersionOut = instanceVersion;
350 }
351 return interface;
352 }
353
354 } // namespace skgpu
355