1 /*
2 * Copyright 2022 Google LLC
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "src/gpu/graphite/vk/VulkanResourceProvider.h"
9
10 #include "include/core/SkSpan.h"
11 #include "include/gpu/MutableTextureState.h"
12 #include "include/gpu/graphite/BackendTexture.h"
13 #include "include/gpu/graphite/vk/VulkanGraphiteTypes.h"
14 #include "include/gpu/vk/VulkanMutableTextureState.h"
15 #include "src/gpu/graphite/Buffer.h"
16 #include "src/gpu/graphite/ComputePipeline.h"
17 #include "src/gpu/graphite/GraphicsPipeline.h"
18 #include "src/gpu/graphite/RenderPassDesc.h"
19 #include "src/gpu/graphite/Sampler.h"
20 #include "src/gpu/graphite/Texture.h"
21 #include "src/gpu/graphite/vk/VulkanBuffer.h"
22 #include "src/gpu/graphite/vk/VulkanCommandBuffer.h"
23 #include "src/gpu/graphite/vk/VulkanDescriptorPool.h"
24 #include "src/gpu/graphite/vk/VulkanDescriptorSet.h"
25 #include "src/gpu/graphite/vk/VulkanFramebuffer.h"
26 #include "src/gpu/graphite/vk/VulkanGraphicsPipeline.h"
27 #include "src/gpu/graphite/vk/VulkanGraphiteTypesPriv.h"
28 #include "src/gpu/graphite/vk/VulkanRenderPass.h"
29 #include "src/gpu/graphite/vk/VulkanSampler.h"
30 #include "src/gpu/graphite/vk/VulkanSharedContext.h"
31 #include "src/gpu/graphite/vk/VulkanTexture.h"
32 #include "src/gpu/graphite/vk/VulkanYcbcrConversion.h"
33 #include "src/gpu/vk/VulkanMemory.h"
34 #include "src/sksl/SkSLCompiler.h"
35
36 #ifdef SK_BUILD_FOR_ANDROID
37 #include "src/gpu/vk/VulkanUtilsPriv.h"
38 #include <android/hardware_buffer.h>
39 #endif
40
41 namespace skgpu::graphite {
42
43 constexpr int kMaxNumberOfCachedBufferDescSets = 1024;
44
VulkanResourceProvider(SharedContext * sharedContext,SingleOwner * singleOwner,uint32_t recorderID,size_t resourceBudget,sk_sp<Buffer> intrinsicConstantUniformBuffer)45 VulkanResourceProvider::VulkanResourceProvider(SharedContext* sharedContext,
46 SingleOwner* singleOwner,
47 uint32_t recorderID,
48 size_t resourceBudget,
49 sk_sp<Buffer> intrinsicConstantUniformBuffer)
50 : ResourceProvider(sharedContext, singleOwner, recorderID, resourceBudget)
51 , fIntrinsicUniformBuffer(std::move(intrinsicConstantUniformBuffer))
52 , fUniformBufferDescSetCache(kMaxNumberOfCachedBufferDescSets) {}
53
~VulkanResourceProvider()54 VulkanResourceProvider::~VulkanResourceProvider() {
55 if (fPipelineCache != VK_NULL_HANDLE) {
56 VULKAN_CALL(this->vulkanSharedContext()->interface(),
57 DestroyPipelineCache(this->vulkanSharedContext()->device(),
58 fPipelineCache,
59 nullptr));
60 }
61 if (fMSAALoadVertShaderModule != VK_NULL_HANDLE) {
62 VULKAN_CALL(this->vulkanSharedContext()->interface(),
63 DestroyShaderModule(this->vulkanSharedContext()->device(),
64 fMSAALoadVertShaderModule,
65 nullptr));
66 }
67 if (fMSAALoadFragShaderModule != VK_NULL_HANDLE) {
68 VULKAN_CALL(this->vulkanSharedContext()->interface(),
69 DestroyShaderModule(this->vulkanSharedContext()->device(),
70 fMSAALoadFragShaderModule,
71 nullptr));
72 }
73 if (fMSAALoadPipelineLayout != VK_NULL_HANDLE) {
74 VULKAN_CALL(this->vulkanSharedContext()->interface(),
75 DestroyPipelineLayout(this->vulkanSharedContext()->device(),
76 fMSAALoadPipelineLayout,
77 nullptr));
78 }
79 }
80
vulkanSharedContext() const81 const VulkanSharedContext* VulkanResourceProvider::vulkanSharedContext() const {
82 return static_cast<const VulkanSharedContext*>(fSharedContext);
83 }
84
onCreateWrappedTexture(const BackendTexture & texture)85 sk_sp<Texture> VulkanResourceProvider::onCreateWrappedTexture(const BackendTexture& texture) {
86 sk_sp<VulkanYcbcrConversion> ycbcrConversion;
87 if (TextureInfos::GetVulkanYcbcrConversionInfo(texture.info()).isValid()) {
88 ycbcrConversion = this->findOrCreateCompatibleYcbcrConversion(
89 TextureInfos::GetVulkanYcbcrConversionInfo(texture.info()));
90 if (!ycbcrConversion) {
91 return nullptr;
92 }
93 }
94
95 return VulkanTexture::MakeWrapped(this->vulkanSharedContext(),
96 texture.dimensions(),
97 texture.info(),
98 BackendTextures::GetMutableState(texture),
99 BackendTextures::GetVkImage(texture),
100 /*alloc=*/{} /*Skia does not own wrapped texture memory*/,
101 std::move(ycbcrConversion));
102 }
103
refIntrinsicConstantBuffer() const104 sk_sp<Buffer> VulkanResourceProvider::refIntrinsicConstantBuffer() const {
105 return fIntrinsicUniformBuffer;
106 }
107
createGraphicsPipeline(const RuntimeEffectDictionary * runtimeDict,const GraphicsPipelineDesc & pipelineDesc,const RenderPassDesc & renderPassDesc,SkEnumBitMask<PipelineCreationFlags> pipelineCreationFlags)108 sk_sp<GraphicsPipeline> VulkanResourceProvider::createGraphicsPipeline(
109 const RuntimeEffectDictionary* runtimeDict,
110 const GraphicsPipelineDesc& pipelineDesc,
111 const RenderPassDesc& renderPassDesc,
112 SkEnumBitMask<PipelineCreationFlags> pipelineCreationFlags) {
113 return VulkanGraphicsPipeline::Make(this,
114 runtimeDict,
115 pipelineDesc,
116 renderPassDesc,
117 pipelineCreationFlags);
118 }
119
createComputePipeline(const ComputePipelineDesc &)120 sk_sp<ComputePipeline> VulkanResourceProvider::createComputePipeline(const ComputePipelineDesc&) {
121 return nullptr;
122 }
123
createTexture(SkISize size,const TextureInfo & info,skgpu::Budgeted budgeted)124 sk_sp<Texture> VulkanResourceProvider::createTexture(SkISize size,
125 const TextureInfo& info,
126 skgpu::Budgeted budgeted) {
127 sk_sp<VulkanYcbcrConversion> ycbcrConversion;
128 if (TextureInfos::GetVulkanYcbcrConversionInfo(info).isValid()) {
129 ycbcrConversion = this->findOrCreateCompatibleYcbcrConversion(
130 TextureInfos::GetVulkanYcbcrConversionInfo(info));
131 if (!ycbcrConversion) {
132 return nullptr;
133 }
134 }
135
136 return VulkanTexture::Make(this->vulkanSharedContext(),
137 size,
138 info,
139 budgeted,
140 std::move(ycbcrConversion));
141 }
142
createBuffer(size_t size,BufferType type,AccessPattern accessPattern)143 sk_sp<Buffer> VulkanResourceProvider::createBuffer(size_t size,
144 BufferType type,
145 AccessPattern accessPattern) {
146 return VulkanBuffer::Make(this->vulkanSharedContext(), size, type, accessPattern);
147 }
148
createSampler(const SamplerDesc & samplerDesc)149 sk_sp<Sampler> VulkanResourceProvider::createSampler(const SamplerDesc& samplerDesc) {
150 sk_sp<VulkanYcbcrConversion> ycbcrConversion = nullptr;
151
152 // Non-zero conversion information means the sampler utilizes a ycbcr conversion.
153 bool usesYcbcrConversion = (samplerDesc.desc() >> SamplerDesc::kImmutableSamplerInfoShift) != 0;
154 if (usesYcbcrConversion) {
155 GraphiteResourceKey ycbcrKey = VulkanYcbcrConversion::GetKeyFromSamplerDesc(samplerDesc);
156 if (Resource* resource = fResourceCache->findAndRefResource(ycbcrKey,
157 skgpu::Budgeted::kYes)) {
158 ycbcrConversion =
159 sk_sp<VulkanYcbcrConversion>(static_cast<VulkanYcbcrConversion*>(resource));
160 } else {
161 ycbcrConversion = VulkanYcbcrConversion::Make(
162 this->vulkanSharedContext(),
163 static_cast<uint32_t>(
164 samplerDesc.desc() >> SamplerDesc::kImmutableSamplerInfoShift),
165 (uint64_t)(samplerDesc.externalFormatMSBs()) << 32 | samplerDesc.format());
166 SkASSERT(ycbcrConversion);
167
168 ycbcrConversion->setKey(ycbcrKey);
169 fResourceCache->insertResource(ycbcrConversion.get());
170 }
171 }
172
173 return VulkanSampler::Make(this->vulkanSharedContext(),
174 samplerDesc,
175 std::move(ycbcrConversion));
176 }
177
onCreateBackendTexture(SkISize dimensions,const TextureInfo & info)178 BackendTexture VulkanResourceProvider::onCreateBackendTexture(SkISize dimensions,
179 const TextureInfo& info) {
180 VulkanTextureInfo vkTexInfo;
181 if (!TextureInfos::GetVulkanTextureInfo(info, &vkTexInfo)) {
182 return {};
183 }
184 VulkanTexture::CreatedImageInfo createdTextureInfo;
185 if (!VulkanTexture::MakeVkImage(this->vulkanSharedContext(), dimensions, info,
186 &createdTextureInfo)) {
187 return {};
188 }
189 return BackendTextures::MakeVulkan(
190 dimensions,
191 vkTexInfo,
192 skgpu::MutableTextureStates::GetVkImageLayout(createdTextureInfo.fMutableState.get()),
193 skgpu::MutableTextureStates::GetVkQueueFamilyIndex(
194 createdTextureInfo.fMutableState.get()),
195 createdTextureInfo.fImage,
196 createdTextureInfo.fMemoryAlloc);
197 }
198
199 namespace {
build_desc_set_key(const SkSpan<DescriptorData> & requestedDescriptors)200 GraphiteResourceKey build_desc_set_key(const SkSpan<DescriptorData>& requestedDescriptors) {
201 static const ResourceType kType = GraphiteResourceKey::GenerateResourceType();
202
203 // The number of int32s needed for a key can depend on whether we use immutable samplers or not.
204 // So, accumulte key data while passing through to check for that quantity and simply copy
205 // into builder afterwards.
206 skia_private::TArray<uint32_t> keyData (requestedDescriptors.size() + 1);
207
208 keyData.push_back(requestedDescriptors.size());
209 for (const DescriptorData& desc : requestedDescriptors) {
210 keyData.push_back(static_cast<uint8_t>(desc.fType) << 24 |
211 desc.fBindingIndex << 16 |
212 static_cast<uint16_t>(desc.fCount));
213 if (desc.fImmutableSampler) {
214 const VulkanSampler* sampler =
215 static_cast<const VulkanSampler*>(desc.fImmutableSampler);
216 SkASSERT(sampler);
217 keyData.push_back_n(sampler->samplerDesc().asSpan().size(),
218 sampler->samplerDesc().asSpan().data());
219 }
220 }
221
222 GraphiteResourceKey key;
223 GraphiteResourceKey::Builder builder(&key, kType, keyData.size(), Shareable::kNo);
224
225 for (int i = 0; i < keyData.size(); i++) {
226 builder[i] = keyData[i];
227 }
228
229 builder.finish();
230 return key;
231 }
232
add_new_desc_set_to_cache(const VulkanSharedContext * context,const sk_sp<VulkanDescriptorPool> & pool,const GraphiteResourceKey & descSetKey,ResourceCache * resourceCache)233 sk_sp<VulkanDescriptorSet> add_new_desc_set_to_cache(const VulkanSharedContext* context,
234 const sk_sp<VulkanDescriptorPool>& pool,
235 const GraphiteResourceKey& descSetKey,
236 ResourceCache* resourceCache) {
237 sk_sp<VulkanDescriptorSet> descSet = VulkanDescriptorSet::Make(context, pool);
238 if (!descSet) {
239 return nullptr;
240 }
241 descSet->setKey(descSetKey);
242 resourceCache->insertResource(descSet.get());
243
244 return descSet;
245 }
246 } // anonymous namespace
247
findOrCreateDescriptorSet(SkSpan<DescriptorData> requestedDescriptors)248 sk_sp<VulkanDescriptorSet> VulkanResourceProvider::findOrCreateDescriptorSet(
249 SkSpan<DescriptorData> requestedDescriptors) {
250 if (requestedDescriptors.empty()) {
251 return nullptr;
252 }
253 // Search for available descriptor sets by assembling a key based upon the set's structure.
254 GraphiteResourceKey key = build_desc_set_key(requestedDescriptors);
255 if (auto descSet = fResourceCache->findAndRefResource(key, skgpu::Budgeted::kYes)) {
256 // A non-null resource pointer indicates we have found an available descriptor set.
257 return sk_sp<VulkanDescriptorSet>(static_cast<VulkanDescriptorSet*>(descSet));
258 }
259
260
261 // If we did not find an existing avilable desc set, allocate sets with the appropriate layout
262 // and add them to the cache.
263 VkDescriptorSetLayout layout;
264 const VulkanSharedContext* context = this->vulkanSharedContext();
265 DescriptorDataToVkDescSetLayout(context, requestedDescriptors, &layout);
266 if (!layout) {
267 return nullptr;
268 }
269 auto pool = VulkanDescriptorPool::Make(context, requestedDescriptors, layout);
270 if (!pool) {
271 VULKAN_CALL(context->interface(), DestroyDescriptorSetLayout(context->device(),
272 layout,
273 nullptr));
274 return nullptr;
275 }
276
277 // Start with allocating one descriptor set. If one cannot be successfully created, then we can
278 // return early before attempting to allocate more. Storing a ptr to the first set also
279 // allows us to return that later without having to perform a find operation on the cache once
280 // all the sets are added.
281 auto firstDescSet =
282 add_new_desc_set_to_cache(context, pool, key, fResourceCache.get());
283 if (!firstDescSet) {
284 return nullptr;
285 }
286
287 // Continue to allocate & cache the maximum number of sets so they can be easily accessed as
288 // they're needed.
289 for (int i = 1; i < VulkanDescriptorPool::kMaxNumSets ; i++) {
290 auto descSet =
291 add_new_desc_set_to_cache(context, pool, key, fResourceCache.get());
292 if (!descSet) {
293 SKGPU_LOG_W("Descriptor set allocation %d of %d was unsuccessful; no more sets will be"
294 "allocated from this pool.", i, VulkanDescriptorPool::kMaxNumSets);
295 break;
296 }
297 }
298
299 return firstDescSet;
300 }
301
302 namespace {
303
make_ubo_bind_group_key(SkSpan<DescriptorData> requestedDescriptors,SkSpan<BindBufferInfo> bindUniformBufferInfo)304 VulkanResourceProvider::UniformBindGroupKey make_ubo_bind_group_key(
305 SkSpan<DescriptorData> requestedDescriptors,
306 SkSpan<BindBufferInfo> bindUniformBufferInfo) {
307 VulkanResourceProvider::UniformBindGroupKey uniqueKey;
308 {
309 // Each entry in the bind group needs 2 uint32_t in the key:
310 // - buffer's unique ID: 32 bits.
311 // - buffer's binding size: 32 bits.
312 // We need total of 4 entries in the uniform buffer bind group.
313 // Unused entries will be assigned zero values.
314 VulkanResourceProvider::UniformBindGroupKey::Builder builder(&uniqueKey);
315
316 for (uint32_t i = 0; i < VulkanGraphicsPipeline::kNumUniformBuffers; ++i) {
317 builder[2 * i] = 0;
318 builder[2 * i + 1] = 0;
319 }
320
321 for (uint32_t i = 0; i < requestedDescriptors.size(); ++i) {
322 int descriptorBindingIndex = requestedDescriptors[i].fBindingIndex;
323 SkASSERT(SkTo<unsigned long>(descriptorBindingIndex) < bindUniformBufferInfo.size());
324 SkASSERT(SkTo<unsigned long>(descriptorBindingIndex) <
325 VulkanGraphicsPipeline::kNumUniformBuffers);
326 const auto& bindInfo = bindUniformBufferInfo[descriptorBindingIndex];
327 const VulkanBuffer* boundBuffer = static_cast<const VulkanBuffer*>(bindInfo.fBuffer);
328 SkASSERT(boundBuffer);
329 builder[2 * descriptorBindingIndex] = boundBuffer->uniqueID().asUInt();
330 builder[2 * descriptorBindingIndex + 1] = bindInfo.fSize;
331 }
332
333 builder.finish();
334 }
335
336 return uniqueKey;
337 }
338
update_uniform_descriptor_set(SkSpan<DescriptorData> requestedDescriptors,SkSpan<BindBufferInfo> bindUniformBufferInfo,VkDescriptorSet descSet,const VulkanSharedContext * sharedContext)339 void update_uniform_descriptor_set(SkSpan<DescriptorData> requestedDescriptors,
340 SkSpan<BindBufferInfo> bindUniformBufferInfo,
341 VkDescriptorSet descSet,
342 const VulkanSharedContext* sharedContext) {
343 for (size_t i = 0; i < requestedDescriptors.size(); i++) {
344 int descriptorBindingIndex = requestedDescriptors[i].fBindingIndex;
345 SkASSERT(SkTo<unsigned long>(descriptorBindingIndex) < bindUniformBufferInfo.size());
346 const auto& bindInfo = bindUniformBufferInfo[descriptorBindingIndex];
347 if (bindInfo.fBuffer) {
348 #if defined(SK_DEBUG)
349 static uint64_t maxBufferRange =
350 sharedContext->caps()->storageBufferSupport()
351 ? sharedContext->vulkanCaps().maxStorageBufferRange()
352 : sharedContext->vulkanCaps().maxUniformBufferRange();
353 SkASSERT(bindInfo.fSize <= maxBufferRange);
354 #endif
355 VkDescriptorBufferInfo bufferInfo;
356 memset(&bufferInfo, 0, sizeof(VkDescriptorBufferInfo));
357 auto vulkanBuffer = static_cast<const VulkanBuffer*>(bindInfo.fBuffer);
358 bufferInfo.buffer = vulkanBuffer->vkBuffer();
359 bufferInfo.offset = 0; // We always use dynamic ubos so we set the base offset to 0
360 bufferInfo.range = bindInfo.fSize;
361
362 VkWriteDescriptorSet writeInfo;
363 memset(&writeInfo, 0, sizeof(VkWriteDescriptorSet));
364 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
365 writeInfo.pNext = nullptr;
366 writeInfo.dstSet = descSet;
367 writeInfo.dstBinding = descriptorBindingIndex;
368 writeInfo.dstArrayElement = 0;
369 writeInfo.descriptorCount = requestedDescriptors[i].fCount;
370 writeInfo.descriptorType = DsTypeEnumToVkDs(requestedDescriptors[i].fType);
371 writeInfo.pImageInfo = nullptr;
372 writeInfo.pBufferInfo = &bufferInfo;
373 writeInfo.pTexelBufferView = nullptr;
374
375 // TODO(b/293925059): Migrate to updating all the uniform descriptors with one driver
376 // call. Calling UpdateDescriptorSets once to encapsulate updates to all uniform
377 // descriptors would be ideal, but that led to issues with draws where all the UBOs
378 // within that set would unexpectedly be assigned the same offset. Updating them one at
379 // a time within this loop works in the meantime but is suboptimal.
380 VULKAN_CALL(sharedContext->interface(),
381 UpdateDescriptorSets(sharedContext->device(),
382 /*descriptorWriteCount=*/1,
383 &writeInfo,
384 /*descriptorCopyCount=*/0,
385 /*pDescriptorCopies=*/nullptr));
386 }
387 }
388 }
389
390 } // anonymous namespace
391
findOrCreateUniformBuffersDescriptorSet(SkSpan<DescriptorData> requestedDescriptors,SkSpan<BindBufferInfo> bindUniformBufferInfo)392 sk_sp<VulkanDescriptorSet> VulkanResourceProvider::findOrCreateUniformBuffersDescriptorSet(
393 SkSpan<DescriptorData> requestedDescriptors,
394 SkSpan<BindBufferInfo> bindUniformBufferInfo) {
395 SkASSERT(requestedDescriptors.size() <= VulkanGraphicsPipeline::kNumUniformBuffers);
396
397 auto key = make_ubo_bind_group_key(requestedDescriptors, bindUniformBufferInfo);
398 auto* existingDescSet = fUniformBufferDescSetCache.find(key);
399 if (existingDescSet) {
400 return *existingDescSet;
401 }
402 sk_sp<VulkanDescriptorSet> newDS = this->findOrCreateDescriptorSet(requestedDescriptors);
403 if (!newDS) {
404 return nullptr;
405 }
406
407 update_uniform_descriptor_set(requestedDescriptors,
408 bindUniformBufferInfo,
409 *newDS->descriptorSet(),
410 this->vulkanSharedContext());
411 return *fUniformBufferDescSetCache.insert(key, newDS);
412 }
413
414
findOrCreateRenderPassWithKnownKey(const RenderPassDesc & renderPassDesc,bool compatibleOnly,const GraphiteResourceKey & rpKey)415 sk_sp<VulkanRenderPass> VulkanResourceProvider::findOrCreateRenderPassWithKnownKey(
416 const RenderPassDesc& renderPassDesc,
417 bool compatibleOnly,
418 const GraphiteResourceKey& rpKey) {
419 if (Resource* resource =
420 fResourceCache->findAndRefResource(rpKey, skgpu::Budgeted::kYes)) {
421 return sk_sp<VulkanRenderPass>(static_cast<VulkanRenderPass*>(resource));
422 }
423
424 sk_sp<VulkanRenderPass> renderPass =
425 VulkanRenderPass::MakeRenderPass(this->vulkanSharedContext(),
426 renderPassDesc,
427 compatibleOnly);
428 if (!renderPass) {
429 return nullptr;
430 }
431
432 renderPass->setKey(rpKey);
433 fResourceCache->insertResource(renderPass.get());
434
435 return renderPass;
436 }
437
findOrCreateRenderPass(const RenderPassDesc & renderPassDesc,bool compatibleOnly)438 sk_sp<VulkanRenderPass> VulkanResourceProvider::findOrCreateRenderPass(
439 const RenderPassDesc& renderPassDesc, bool compatibleOnly) {
440 GraphiteResourceKey rpKey = VulkanRenderPass::MakeRenderPassKey(renderPassDesc, compatibleOnly);
441
442 return this->findOrCreateRenderPassWithKnownKey(renderPassDesc, compatibleOnly, rpKey);
443 }
444
pipelineCache()445 VkPipelineCache VulkanResourceProvider::pipelineCache() {
446 if (fPipelineCache == VK_NULL_HANDLE) {
447 VkPipelineCacheCreateInfo createInfo;
448 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
449 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
450 createInfo.pNext = nullptr;
451 createInfo.flags = 0;
452 createInfo.initialDataSize = 0;
453 createInfo.pInitialData = nullptr;
454 VkResult result;
455 VULKAN_CALL_RESULT(this->vulkanSharedContext(),
456 result,
457 CreatePipelineCache(this->vulkanSharedContext()->device(),
458 &createInfo,
459 nullptr,
460 &fPipelineCache));
461 if (VK_SUCCESS != result) {
462 fPipelineCache = VK_NULL_HANDLE;
463 }
464 }
465 return fPipelineCache;
466 }
467
createFramebuffer(const VulkanSharedContext * context,const skia_private::TArray<VkImageView> & attachmentViews,const VulkanRenderPass & renderPass,const int width,const int height)468 sk_sp<VulkanFramebuffer> VulkanResourceProvider::createFramebuffer(
469 const VulkanSharedContext* context,
470 const skia_private::TArray<VkImageView>& attachmentViews,
471 const VulkanRenderPass& renderPass,
472 const int width,
473 const int height) {
474 // TODO: Consider caching these in the future. If we pursue that, it may make more sense to
475 // use a compatible renderpass rather than a full one to make each frame buffer more versatile.
476 VkFramebufferCreateInfo framebufferInfo;
477 memset(&framebufferInfo, 0, sizeof(VkFramebufferCreateInfo));
478 framebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
479 framebufferInfo.pNext = nullptr;
480 framebufferInfo.flags = 0;
481 framebufferInfo.renderPass = renderPass.renderPass();
482 framebufferInfo.attachmentCount = attachmentViews.size();
483 framebufferInfo.pAttachments = attachmentViews.begin();
484 framebufferInfo.width = width;
485 framebufferInfo.height = height;
486 framebufferInfo.layers = 1;
487 return VulkanFramebuffer::Make(context, framebufferInfo);
488 }
489
onDeleteBackendTexture(const BackendTexture & texture)490 void VulkanResourceProvider::onDeleteBackendTexture(const BackendTexture& texture) {
491 SkASSERT(texture.isValid());
492 SkASSERT(texture.backend() == BackendApi::kVulkan);
493
494 VULKAN_CALL(this->vulkanSharedContext()->interface(),
495 DestroyImage(this->vulkanSharedContext()->device(),
496 BackendTextures::GetVkImage(texture),
497 /*VkAllocationCallbacks=*/nullptr));
498
499 VulkanAlloc alloc = BackendTextures::GetMemoryAlloc(texture);
500 // Free the image memory used for the BackendTexture's VkImage.
501 //
502 // How we do this is dependent upon on how the image was allocated (via the memory allocator or
503 // with a direct call to the Vulkan driver) . If the VulkanAlloc's fBackendMemory is != 0, then
504 // that means the allocator was used. Otherwise, a direct driver call was used and we should
505 // free the VkDeviceMemory (fMemory).
506 if (alloc.fBackendMemory) {
507 skgpu::VulkanMemory::FreeImageMemory(this->vulkanSharedContext()->memoryAllocator(), alloc);
508 } else {
509 SkASSERT(alloc.fMemory != VK_NULL_HANDLE);
510 VULKAN_CALL(this->vulkanSharedContext()->interface(),
511 FreeMemory(this->vulkanSharedContext()->device(), alloc.fMemory, nullptr));
512 }
513 }
514
findOrCreateCompatibleYcbcrConversion(const VulkanYcbcrConversionInfo & ycbcrInfo) const515 sk_sp<VulkanYcbcrConversion> VulkanResourceProvider::findOrCreateCompatibleYcbcrConversion(
516 const VulkanYcbcrConversionInfo& ycbcrInfo) const {
517 if (!ycbcrInfo.isValid()) {
518 return nullptr;
519 }
520 GraphiteResourceKey ycbcrConversionKey =
521 VulkanYcbcrConversion::MakeYcbcrConversionKey(this->vulkanSharedContext(), ycbcrInfo);
522
523 if (Resource* resource = fResourceCache->findAndRefResource(ycbcrConversionKey,
524 skgpu::Budgeted::kYes)) {
525 return sk_sp<VulkanYcbcrConversion>(static_cast<VulkanYcbcrConversion*>(resource));
526 }
527
528 auto ycbcrConversion = VulkanYcbcrConversion::Make(this->vulkanSharedContext(), ycbcrInfo);
529 if (!ycbcrConversion) {
530 return nullptr;
531 }
532
533 ycbcrConversion->setKey(ycbcrConversionKey);
534 fResourceCache->insertResource(ycbcrConversion.get());
535
536 return ycbcrConversion;
537 }
538
findOrCreateLoadMSAAPipeline(const RenderPassDesc & renderPassDesc)539 sk_sp<VulkanGraphicsPipeline> VulkanResourceProvider::findOrCreateLoadMSAAPipeline(
540 const RenderPassDesc& renderPassDesc) {
541
542 if (!renderPassDesc.fColorResolveAttachment.fTextureInfo.isValid() ||
543 !renderPassDesc.fColorAttachment.fTextureInfo.isValid()) {
544 SKGPU_LOG_E("Loading MSAA from resolve texture requires valid color & resolve attachment");
545 return nullptr;
546 }
547
548 // Check to see if we already have a suitable pipeline that we can use.
549 GraphiteResourceKey renderPassKey =
550 VulkanRenderPass::MakeRenderPassKey(renderPassDesc, /*compatibleOnly=*/true);
551 for (int i = 0; i < fLoadMSAAPipelines.size(); i++) {
552 if (renderPassKey == fLoadMSAAPipelines.at(i).first) {
553 return fLoadMSAAPipelines.at(i).second;
554 }
555 }
556
557 // If any of the load MSAA pipeline creation structures are null then we need to initialize
558 // those before proceeding. If the creation of one of them fails, all are assigned to null, so
559 // we only need to check one of the structures.
560 if (fMSAALoadVertShaderModule == VK_NULL_HANDLE) {
561 SkASSERT(fMSAALoadFragShaderModule == VK_NULL_HANDLE &&
562 fMSAALoadPipelineLayout == VK_NULL_HANDLE);
563 if (!VulkanGraphicsPipeline::InitializeMSAALoadPipelineStructs(
564 this->vulkanSharedContext(),
565 &fMSAALoadVertShaderModule,
566 &fMSAALoadFragShaderModule,
567 &fMSAALoadShaderStageInfo[0],
568 &fMSAALoadPipelineLayout)) {
569 SKGPU_LOG_E("Failed to initialize MSAA load pipeline creation structure(s)");
570 return nullptr;
571 }
572 }
573
574 sk_sp<VulkanRenderPass> compatibleRenderPass =
575 this->findOrCreateRenderPassWithKnownKey(renderPassDesc,
576 /*compatibleOnly=*/true,
577 renderPassKey);
578 if (!compatibleRenderPass) {
579 SKGPU_LOG_E("Failed to make compatible render pass for loading MSAA");
580 }
581
582 sk_sp<VulkanGraphicsPipeline> pipeline = VulkanGraphicsPipeline::MakeLoadMSAAPipeline(
583 this->vulkanSharedContext(),
584 fMSAALoadVertShaderModule,
585 fMSAALoadFragShaderModule,
586 &fMSAALoadShaderStageInfo[0],
587 fMSAALoadPipelineLayout,
588 compatibleRenderPass,
589 this->pipelineCache(),
590 renderPassDesc.fColorAttachment.fTextureInfo);
591
592 if (!pipeline) {
593 SKGPU_LOG_E("Failed to create MSAA load pipeline");
594 return nullptr;
595 }
596
597 fLoadMSAAPipelines.push_back(std::make_pair(renderPassKey, pipeline));
598 return pipeline;
599 }
600
601 #ifdef SK_BUILD_FOR_ANDROID
602
onCreateBackendTexture(AHardwareBuffer * hardwareBuffer,bool isRenderable,bool isProtectedContent,SkISize dimensions,bool fromAndroidWindow) const603 BackendTexture VulkanResourceProvider::onCreateBackendTexture(AHardwareBuffer* hardwareBuffer,
604 bool isRenderable,
605 bool isProtectedContent,
606 SkISize dimensions,
607 bool fromAndroidWindow) const {
608
609 const VulkanSharedContext* vkContext = this->vulkanSharedContext();
610 VkDevice device = vkContext->device();
611 const VulkanCaps& vkCaps = vkContext->vulkanCaps();
612
613 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
614 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
615 if (!skgpu::GetAHardwareBufferProperties(
616 &hwbFormatProps, &hwbProps, vkContext->interface(), hardwareBuffer, device)) {
617 return {};
618 }
619
620 bool importAsExternalFormat = hwbFormatProps.format == VK_FORMAT_UNDEFINED;
621
622 // Start to assemble VulkanTextureInfo which is needed later on to create the VkImage but can
623 // sooner help us query VulkanCaps for certain format feature support.
624 // TODO: Allow client to pass in tiling mode. For external formats, this is required to be
625 // optimal. For AHB that have a known Vulkan format, we can query VulkanCaps to determine if
626 // optimal is a valid decision given the format features.
627 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
628 VkImageCreateFlags imgCreateflags = isProtectedContent ? VK_IMAGE_CREATE_PROTECTED_BIT : 0;
629 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT;
630 // When importing as an external format the image usage can only be VK_IMAGE_USAGE_SAMPLED_BIT.
631 if (!importAsExternalFormat) {
632 usageFlags |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
633 if (isRenderable) {
634 // Renderable attachments can be used as input attachments if we are loading from MSAA.
635 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
636 }
637 }
638 VulkanTextureInfo vkTexInfo { VK_SAMPLE_COUNT_1_BIT,
639 Mipmapped::kNo,
640 imgCreateflags,
641 hwbFormatProps.format,
642 tiling,
643 usageFlags,
644 VK_SHARING_MODE_EXCLUSIVE,
645 VK_IMAGE_ASPECT_COLOR_BIT,
646 VulkanYcbcrConversionInfo() };
647
648 if (isRenderable && (importAsExternalFormat || !vkCaps.isRenderable(vkTexInfo))) {
649 SKGPU_LOG_W("Renderable texture requested from an AHardwareBuffer which uses a VkFormat "
650 "that Skia cannot render to (VkFormat: %d).\n", hwbFormatProps.format);
651 return {};
652 }
653
654 if (!importAsExternalFormat && (!vkCaps.isTransferSrc(vkTexInfo) ||
655 !vkCaps.isTransferDst(vkTexInfo) ||
656 !vkCaps.isTexturable(vkTexInfo))) {
657 if (isRenderable) {
658 SKGPU_LOG_W("VkFormat %d is either unfamiliar to Skia or doesn't support the necessary"
659 " format features. Because a renerable texture was requested, we cannot "
660 "fall back to importing with an external format.\n", hwbFormatProps.format);
661 return {};
662 }
663 // If the VkFormat does not support the features we need, then import as an external format.
664 importAsExternalFormat = true;
665 // If we use VkExternalFormatANDROID with an externalFormat != 0, then format must =
666 // VK_FORMAT_UNDEFINED.
667 vkTexInfo.fFormat = VK_FORMAT_UNDEFINED;
668 vkTexInfo.fImageUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT;
669 }
670
671 VulkanYcbcrConversionInfo ycbcrInfo;
672 VkExternalFormatANDROID externalFormat;
673 externalFormat.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
674 externalFormat.pNext = nullptr;
675 externalFormat.externalFormat = 0; // If this is zero it is as if we aren't using this struct.
676 if (importAsExternalFormat) {
677 GetYcbcrConversionInfoFromFormatProps(&ycbcrInfo, hwbFormatProps);
678 if (!ycbcrInfo.isValid()) {
679 SKGPU_LOG_W("Failed to create valid YCbCr conversion information from hardware buffer"
680 "format properties.\n");
681 return {};
682 }
683 vkTexInfo.fYcbcrConversionInfo = ycbcrInfo;
684 externalFormat.externalFormat = hwbFormatProps.externalFormat;
685 }
686 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo{
687 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
688 &externalFormat, // pNext
689 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
690 };
691
692 SkASSERT(!(vkTexInfo.fFlags & VK_IMAGE_CREATE_PROTECTED_BIT) ||
693 fSharedContext->isProtected() == Protected::kYes);
694
695 const VkImageCreateInfo imageCreateInfo = {
696 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
697 &externalMemoryImageInfo, // pNext
698 vkTexInfo.fFlags, // VkImageCreateFlags
699 VK_IMAGE_TYPE_2D, // VkImageType
700 vkTexInfo.fFormat, // VkFormat
701 { (uint32_t)dimensions.fWidth, (uint32_t)dimensions.fHeight, 1 }, // VkExtent3D
702 1, // mipLevels
703 1, // arrayLayers
704 VK_SAMPLE_COUNT_1_BIT, // samples
705 vkTexInfo.fImageTiling, // VkImageTiling
706 vkTexInfo.fImageUsageFlags, // VkImageUsageFlags
707 vkTexInfo.fSharingMode, // VkSharingMode
708 0, // queueFamilyCount
709 nullptr, // pQueueFamilyIndices
710 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
711 };
712
713 VkResult result;
714 VkImage image;
715 result = VULKAN_CALL(vkContext->interface(),
716 CreateImage(device, &imageCreateInfo, nullptr, &image));
717 if (result != VK_SUCCESS) {
718 return {};
719 }
720
721 const VkPhysicalDeviceMemoryProperties2& phyDevMemProps =
722 vkContext->vulkanCaps().physicalDeviceMemoryProperties2();
723 VulkanAlloc alloc;
724 if (!AllocateAndBindImageMemory(&alloc, image, phyDevMemProps, hwbProps, hardwareBuffer,
725 vkContext->interface(), device)) {
726 VULKAN_CALL(vkContext->interface(), DestroyImage(device, image, nullptr));
727 return {};
728 }
729
730 return BackendTextures::MakeVulkan(dimensions,
731 vkTexInfo,
732 VK_IMAGE_LAYOUT_UNDEFINED,
733 VK_QUEUE_FAMILY_FOREIGN_EXT,
734 image,
735 alloc);
736 }
737
738 #endif // SK_BUILD_FOR_ANDROID
739
740 } // namespace skgpu::graphite
741