1 /*
2 * Copyright 2022 Google LLC
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "include/gpu/graphite/vk/VulkanGraphiteUtils.h"
9 #include "src/gpu/graphite/vk/VulkanGraphiteUtilsPriv.h"
10
11 #include "include/gpu/ShaderErrorHandler.h"
12 #include "include/gpu/graphite/Context.h"
13 #include "include/gpu/vk/VulkanBackendContext.h"
14 #include "src/core/SkTraceEvent.h"
15 #include "src/gpu/graphite/ContextPriv.h"
16 #include "src/gpu/graphite/vk/VulkanQueueManager.h"
17 #include "src/gpu/graphite/vk/VulkanSampler.h"
18 #include "src/gpu/graphite/vk/VulkanSharedContext.h"
19 #include "src/sksl/SkSLProgramSettings.h"
20
21 namespace skgpu::graphite::ContextFactory {
22
MakeVulkan(const VulkanBackendContext & backendContext,const ContextOptions & options)23 std::unique_ptr<Context> MakeVulkan(const VulkanBackendContext& backendContext,
24 const ContextOptions& options) {
25 sk_sp<SharedContext> sharedContext = VulkanSharedContext::Make(backendContext, options);
26 if (!sharedContext) {
27 return nullptr;
28 }
29
30 std::unique_ptr<QueueManager> queueManager(new VulkanQueueManager(backendContext.fQueue,
31 sharedContext.get()));
32 if (!queueManager) {
33 return nullptr;
34 }
35
36 return ContextCtorAccessor::MakeContext(std::move(sharedContext),
37 std::move(queueManager),
38 options);
39 }
40
41 } // namespace skgpu::graphite::ContextFactory
42
43 namespace skgpu::graphite {
44
createVulkanShaderModule(const VulkanSharedContext * context,const std::string & spirv,VkShaderStageFlagBits stage)45 VkShaderModule createVulkanShaderModule(const VulkanSharedContext* context,
46 const std::string& spirv,
47 VkShaderStageFlagBits stage) {
48 TRACE_EVENT0("skia.shaders", "InstallVkShaderModule");
49 VkShaderModuleCreateInfo moduleCreateInfo;
50 memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo));
51 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
52 moduleCreateInfo.pNext = nullptr;
53 moduleCreateInfo.flags = 0;
54 moduleCreateInfo.codeSize = spirv.size();
55 moduleCreateInfo.pCode = (const uint32_t*)spirv.c_str();
56
57 VkShaderModule shaderModule;
58 VkResult result;
59 VULKAN_CALL_RESULT(context,
60 result,
61 CreateShaderModule(context->device(),
62 &moduleCreateInfo,
63 /*const VkAllocationCallbacks*=*/nullptr,
64 &shaderModule));
65 if (result != VK_SUCCESS) {
66 SKGPU_LOG_E("Failed to create VkShaderModule");
67 return VK_NULL_HANDLE;
68 }
69 return shaderModule;
70 }
71
DescriptorDataToVkDescSetLayout(const VulkanSharedContext * ctxt,const SkSpan<DescriptorData> & requestedDescriptors,VkDescriptorSetLayout * outLayout)72 void DescriptorDataToVkDescSetLayout(const VulkanSharedContext* ctxt,
73 const SkSpan<DescriptorData>& requestedDescriptors,
74 VkDescriptorSetLayout* outLayout) {
75 skia_private::STArray<kDescriptorTypeCount, VkDescriptorSetLayoutBinding> bindingLayouts;
76 for (size_t i = 0; i < requestedDescriptors.size(); i++) {
77 if (requestedDescriptors[i].fCount != 0) {
78 const DescriptorData& currDescriptor = requestedDescriptors[i];
79 VkDescriptorSetLayoutBinding& layoutBinding = bindingLayouts.push_back();
80 memset(&layoutBinding, 0, sizeof(VkDescriptorSetLayoutBinding));
81 layoutBinding.binding = currDescriptor.fBindingIndex;
82 layoutBinding.descriptorType = DsTypeEnumToVkDs(currDescriptor.fType);
83 layoutBinding.descriptorCount = currDescriptor.fCount;
84 layoutBinding.stageFlags =
85 PipelineStageFlagsToVkShaderStageFlags(currDescriptor.fPipelineStageFlags);
86 layoutBinding.pImmutableSamplers = currDescriptor.fImmutableSampler
87 ? (static_cast<const VulkanSampler*>(
88 currDescriptor.fImmutableSampler))->constVkSamplerPtr()
89 : nullptr;
90 }
91 }
92
93 VkDescriptorSetLayoutCreateInfo layoutCreateInfo;
94 memset(&layoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
95 layoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
96 layoutCreateInfo.pNext = nullptr;
97 layoutCreateInfo.flags = 0;
98 layoutCreateInfo.bindingCount = bindingLayouts.size();
99 layoutCreateInfo.pBindings = &bindingLayouts.front();
100
101 VkResult result;
102 VULKAN_CALL_RESULT(
103 ctxt,
104 result,
105 CreateDescriptorSetLayout(ctxt->device(), &layoutCreateInfo, nullptr, outLayout));
106 if (result != VK_SUCCESS) {
107 SkDebugf("Failed to create VkDescriptorSetLayout\n");
108 outLayout = VK_NULL_HANDLE;
109 }
110 }
111
DsTypeEnumToVkDs(DescriptorType type)112 VkDescriptorType DsTypeEnumToVkDs(DescriptorType type) {
113 switch (type) {
114 case DescriptorType::kUniformBuffer:
115 return VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
116 case DescriptorType::kTextureSampler:
117 return VK_DESCRIPTOR_TYPE_SAMPLER;
118 case DescriptorType::kTexture:
119 return VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
120 case DescriptorType::kCombinedTextureSampler:
121 return VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
122 case DescriptorType::kStorageBuffer:
123 return VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
124 case DescriptorType::kInputAttachment:
125 return VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
126 }
127 SkUNREACHABLE;
128 }
129
vkFormatIsSupported(VkFormat format)130 bool vkFormatIsSupported(VkFormat format) {
131 switch (format) {
132 case VK_FORMAT_R8G8B8A8_UNORM:
133 case VK_FORMAT_B8G8R8A8_UNORM:
134 case VK_FORMAT_R8G8B8A8_SRGB:
135 case VK_FORMAT_R8G8B8_UNORM:
136 case VK_FORMAT_R8G8_UNORM:
137 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
138 case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
139 case VK_FORMAT_R5G6B5_UNORM_PACK16:
140 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
141 case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
142 case VK_FORMAT_R8_UNORM:
143 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
144 case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
145 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
146 case VK_FORMAT_R16G16B16A16_SFLOAT:
147 case VK_FORMAT_R16_SFLOAT:
148 case VK_FORMAT_R16_UNORM:
149 case VK_FORMAT_R16G16_UNORM:
150 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
151 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
152 case VK_FORMAT_R16G16B16A16_UNORM:
153 case VK_FORMAT_R16G16_SFLOAT:
154 case VK_FORMAT_S8_UINT:
155 case VK_FORMAT_D16_UNORM:
156 case VK_FORMAT_D32_SFLOAT:
157 case VK_FORMAT_D24_UNORM_S8_UINT:
158 case VK_FORMAT_D32_SFLOAT_S8_UINT:
159 return true;
160 default:
161 return false;
162 }
163 }
164
PipelineStageFlagsToVkShaderStageFlags(SkEnumBitMask<PipelineStageFlags> stageFlags)165 VkShaderStageFlags PipelineStageFlagsToVkShaderStageFlags(
166 SkEnumBitMask<PipelineStageFlags> stageFlags) {
167 VkShaderStageFlags vkStageFlags = 0;
168 if (stageFlags & PipelineStageFlags::kVertexShader) {
169 vkStageFlags |= VK_SHADER_STAGE_VERTEX_BIT;
170 }
171 if (stageFlags & PipelineStageFlags::kFragmentShader) {
172 vkStageFlags |= VK_SHADER_STAGE_FRAGMENT_BIT;
173 }
174 if (stageFlags & PipelineStageFlags::kCompute) {
175 vkStageFlags |= VK_SHADER_STAGE_COMPUTE_BIT;
176 }
177 return vkStageFlags;
178 }
179
180 namespace ycbcrPackaging {
nonFormatInfoAsUInt32(const VulkanYcbcrConversionInfo & conversionInfo)181 uint32_t nonFormatInfoAsUInt32(const VulkanYcbcrConversionInfo& conversionInfo) {
182 static_assert(kComponentAShift + kComponentBits <= 32);
183
184 SkASSERT(conversionInfo.fYcbcrModel < (1u << kYcbcrModelBits ));
185 SkASSERT(conversionInfo.fYcbcrRange < (1u << kYcbcrRangeBits ));
186 SkASSERT(conversionInfo.fXChromaOffset < (1u << kXChromaOffsetBits ));
187 SkASSERT(conversionInfo.fYChromaOffset < (1u << kYChromaOffsetBits ));
188 SkASSERT(conversionInfo.fChromaFilter < (1u << kChromaFilterBits ));
189 SkASSERT(conversionInfo.fForceExplicitReconstruction < (1u << kForceExplicitReconBits));
190 SkASSERT(conversionInfo.fComponents.r < (1u << kComponentBits ));
191 SkASSERT(conversionInfo.fComponents.g < (1u << kComponentBits ));
192 SkASSERT(conversionInfo.fComponents.b < (1u << kComponentBits ));
193 SkASSERT(conversionInfo.fComponents.a < (1u << kComponentBits ));
194
195 bool usesExternalFormat = conversionInfo.fFormat == VK_FORMAT_UNDEFINED;
196
197 return (((uint32_t)(usesExternalFormat ) << kUsesExternalFormatShift) |
198 ((uint32_t)(conversionInfo.fYcbcrModel ) << kYcbcrModelShift ) |
199 ((uint32_t)(conversionInfo.fYcbcrRange ) << kYcbcrRangeShift ) |
200 ((uint32_t)(conversionInfo.fXChromaOffset ) << kXChromaOffsetShift ) |
201 ((uint32_t)(conversionInfo.fYChromaOffset ) << kYChromaOffsetShift ) |
202 ((uint32_t)(conversionInfo.fChromaFilter ) << kChromaFilterShift ) |
203 ((uint32_t)(conversionInfo.fForceExplicitReconstruction) << kForceExplicitReconShift) |
204 ((uint32_t)(conversionInfo.fComponents.r ) << kComponentRShift ) |
205 ((uint32_t)(conversionInfo.fComponents.g ) << kComponentGShift ) |
206 ((uint32_t)(conversionInfo.fComponents.b ) << kComponentBShift ) |
207 ((uint32_t)(conversionInfo.fComponents.a ) << kComponentAShift ));
208 }
209
numInt32sNeeded(const VulkanYcbcrConversionInfo & conversionInfo)210 int numInt32sNeeded(const VulkanYcbcrConversionInfo& conversionInfo) {
211 if (!conversionInfo.isValid()) {
212 return 0;
213 }
214 return conversionInfo.fFormat == VK_FORMAT_UNDEFINED ? SamplerDesc::kInt32sNeededExternalFormat
215 : SamplerDesc::kInt32sNeededKnownFormat;
216 }
217 } // namespace ycbcrPackaging
218
219 } // namespace skgpu::graphite
220