1 /*
2 * Copyright 2019 Google LLC
3 * SPDX-License-Identifier: MIT
4 *
5 * based in part on anv and radv which are:
6 * Copyright © 2015 Intel Corporation
7 * Copyright © 2016 Red Hat.
8 * Copyright © 2016 Bas Nieuwenhuizen
9 */
10
11 #include "vn_physical_device.h"
12
13 #include <stdio.h>
14
15 #include "git_sha1.h"
16 #include "util/mesa-sha1.h"
17 #include "venus-protocol/vn_protocol_driver_device.h"
18 #include "vk_android.h"
19
20 #include "vn_android.h"
21 #include "vn_instance.h"
22
23 #define IMAGE_FORMAT_CACHE_MAX_ENTRIES 100
24
25 #define VN_EXTENSION_TABLE_INDEX(tbl, ext) \
26 ((const bool *)((const void *)(&(tbl)) + \
27 offsetof(__typeof__(tbl), ext)) - \
28 (tbl).extensions)
29
30 /** Add `elem` to the pNext chain of `head`. */
31 #define VN_ADD_PNEXT(head, s_type, elem) \
32 do { \
33 (elem).sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_##s_type; \
34 (elem).pNext = (head).pNext; \
35 (head).pNext = &(elem); \
36 } while (0)
37
38 /**
39 * If the renderer supports the extension, add `elem` to the pNext chain of
40 * `head`.
41 */
42 #define VN_ADD_PNEXT_EXT(head, s_type, elem, ext_cond) \
43 do { \
44 if (ext_cond) \
45 VN_ADD_PNEXT((head), s_type, (elem)); \
46 } while (0)
47
48 /**
49 * Set member in core feature/property struct to value. (This provides visual
50 * parity with VN_SET_CORE_FIELD).
51 */
52 #define VN_SET_CORE_VALUE(core_struct, member, val) \
53 do { \
54 (core_struct)->member = (val); \
55 } while (0)
56
57 /** Copy member into core feature/property struct from extension struct. */
58 #define VN_SET_CORE_FIELD(core_struct, member, ext_struct) \
59 VN_SET_CORE_VALUE((core_struct), member, (ext_struct).member)
60
61 /**
62 * Copy array member into core feature/property struct from extension struct.
63 */
64 #define VN_SET_CORE_ARRAY(core_struct, member, ext_struct) \
65 do { \
66 memcpy((core_struct)->member, (ext_struct).member, \
67 sizeof((core_struct)->member)); \
68 } while (0)
69
70 /**
71 * Copy vk struct members to common vk properties.
72 */
73 #define VN_SET_VK_PROPS(vk_props, vk_struct) \
74 do { \
75 vk_set_physical_device_properties_struct( \
76 (vk_props), (const VkBaseInStructure *)(vk_struct)); \
77 } while (0)
78
79 /**
80 * Copy vk struct members to common vk properties if extension is supported.
81 */
82 #define VN_SET_VK_PROPS_EXT(vk_props, vk_struct, ext_cond) \
83 do { \
84 if (ext_cond) \
85 VN_SET_VK_PROPS(vk_props, vk_struct); \
86 } while (0)
87
88 static void
vn_physical_device_init_features(struct vn_physical_device * physical_dev)89 vn_physical_device_init_features(struct vn_physical_device *physical_dev)
90 {
91 const uint32_t renderer_version = physical_dev->renderer_version;
92 const struct vk_device_extension_table *exts =
93 &physical_dev->renderer_extensions;
94 struct vn_ring *ring = physical_dev->instance->ring.ring;
95 VkPhysicalDeviceFeatures2 feats2 = {
96 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
97 };
98 struct {
99 VkPhysicalDeviceFeatures vulkan_1_0;
100 VkPhysicalDeviceVulkan11Features vulkan_1_1;
101 VkPhysicalDeviceVulkan12Features vulkan_1_2;
102 VkPhysicalDeviceVulkan13Features vulkan_1_3;
103
104 /* Vulkan 1.1 */
105 VkPhysicalDevice16BitStorageFeatures _16bit_storage;
106 VkPhysicalDeviceMultiviewFeatures multiview;
107 VkPhysicalDeviceVariablePointersFeatures variable_pointers;
108 VkPhysicalDeviceProtectedMemoryFeatures protected_memory;
109 VkPhysicalDeviceSamplerYcbcrConversionFeatures sampler_ycbcr_conversion;
110 VkPhysicalDeviceShaderDrawParametersFeatures shader_draw_parameters;
111
112 /* Vulkan 1.2 */
113 VkPhysicalDevice8BitStorageFeatures _8bit_storage;
114 VkPhysicalDeviceShaderAtomicInt64Features shader_atomic_int64;
115 VkPhysicalDeviceShaderFloat16Int8Features shader_float16_int8;
116 VkPhysicalDeviceDescriptorIndexingFeatures descriptor_indexing;
117 VkPhysicalDeviceScalarBlockLayoutFeatures scalar_block_layout;
118 VkPhysicalDeviceImagelessFramebufferFeatures imageless_framebuffer;
119 VkPhysicalDeviceUniformBufferStandardLayoutFeatures
120 uniform_buffer_standard_layout;
121 VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures
122 shader_subgroup_extended_types;
123 VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures
124 separate_depth_stencil_layouts;
125 VkPhysicalDeviceHostQueryResetFeatures host_query_reset;
126 VkPhysicalDeviceTimelineSemaphoreFeatures timeline_semaphore;
127 VkPhysicalDeviceBufferDeviceAddressFeatures buffer_device_address;
128 VkPhysicalDeviceVulkanMemoryModelFeatures vulkan_memory_model;
129
130 /* Vulkan 1.3 */
131 VkPhysicalDeviceDynamicRenderingFeatures dynamic_rendering;
132 VkPhysicalDeviceImageRobustnessFeatures image_robustness;
133 VkPhysicalDeviceInlineUniformBlockFeatures inline_uniform_block;
134 VkPhysicalDeviceMaintenance4Features maintenance4;
135 VkPhysicalDevicePipelineCreationCacheControlFeatures
136 pipeline_creation_cache_control;
137 VkPhysicalDevicePrivateDataFeatures private_data;
138 VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures
139 shader_demote_to_helper_invocation;
140 VkPhysicalDeviceShaderIntegerDotProductFeatures
141 shader_integer_dot_product;
142 VkPhysicalDeviceShaderTerminateInvocationFeatures
143 shader_terminate_invocation;
144 VkPhysicalDeviceSynchronization2Features synchronization2;
145 VkPhysicalDeviceSubgroupSizeControlFeatures subgroup_size_control;
146 VkPhysicalDeviceTextureCompressionASTCHDRFeatures
147 texture_compression_astc_hdr;
148 VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures
149 zero_initialize_workgroup_memory;
150
151 /* Vulkan 1.3: The extensions for the below structs were promoted, but
152 * some struct members were omitted from
153 * VkPhysicalDeviceVulkan13Features.
154 */
155 VkPhysicalDevice4444FormatsFeaturesEXT _4444_formats;
156 VkPhysicalDeviceExtendedDynamicStateFeaturesEXT extended_dynamic_state;
157 VkPhysicalDeviceExtendedDynamicState2FeaturesEXT
158 extended_dynamic_state_2;
159 VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT texel_buffer_alignment;
160 VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT
161 ycbcr_2plane_444_formats;
162
163 /* KHR */
164 VkPhysicalDeviceFragmentShadingRateFeaturesKHR fragment_shading_rate;
165 VkPhysicalDeviceMaintenance5FeaturesKHR maintenance5;
166 VkPhysicalDeviceShaderClockFeaturesKHR shader_clock;
167 VkPhysicalDeviceShaderExpectAssumeFeaturesKHR expect_assume;
168
169 /* EXT */
170 VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT attachment_feedback_loop_layout;
171 VkPhysicalDeviceBorderColorSwizzleFeaturesEXT border_color_swizzle;
172 VkPhysicalDeviceColorWriteEnableFeaturesEXT color_write_enable;
173 VkPhysicalDeviceConditionalRenderingFeaturesEXT conditional_rendering;
174 VkPhysicalDeviceCustomBorderColorFeaturesEXT custom_border_color;
175 VkPhysicalDeviceDepthClipControlFeaturesEXT depth_clip_control;
176 VkPhysicalDeviceDepthClipEnableFeaturesEXT depth_clip_enable;
177 VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT
178 dynamic_rendering_unused_attachments;
179 VkPhysicalDeviceExtendedDynamicState3FeaturesEXT
180 extended_dynamic_state_3;
181 VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT
182 fragment_shader_interlock;
183 VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT
184 graphics_pipeline_library;
185 VkPhysicalDeviceImage2DViewOf3DFeaturesEXT image_2d_view_of_3d;
186 VkPhysicalDeviceImageViewMinLodFeaturesEXT image_view_min_lod;
187 VkPhysicalDeviceIndexTypeUint8FeaturesEXT index_type_uint8;
188 VkPhysicalDeviceLineRasterizationFeaturesEXT line_rasterization;
189 VkPhysicalDeviceMultiDrawFeaturesEXT multi_draw;
190 VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT mutable_descriptor_type;
191 VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT non_seamless_cube_map;
192 VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT
193 primitive_topology_list_restart;
194 VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT
195 primitives_generated_query;
196 VkPhysicalDeviceProvokingVertexFeaturesEXT provoking_vertex;
197 VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT
198 rasterization_order_attachment_access;
199 VkPhysicalDeviceRobustness2FeaturesEXT robustness_2;
200 VkPhysicalDeviceTransformFeedbackFeaturesEXT transform_feedback;
201 VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT
202 vertex_attribute_divisor;
203 VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT
204 vertex_input_dynamic_state;
205 } local_feats;
206
207 /* Clear the struct so that all unqueried features will be VK_FALSE. */
208 memset(&local_feats, 0, sizeof(local_feats));
209
210 assert(renderer_version >= VK_API_VERSION_1_1);
211
212 /* clang-format off */
213
214 if (renderer_version >= VK_API_VERSION_1_2) {
215 VN_ADD_PNEXT(feats2, VULKAN_1_1_FEATURES, local_feats.vulkan_1_1);
216 VN_ADD_PNEXT(feats2, VULKAN_1_2_FEATURES, local_feats.vulkan_1_2);
217 } else {
218 /* Vulkan 1.1 */
219 VN_ADD_PNEXT(feats2, 16BIT_STORAGE_FEATURES, local_feats._16bit_storage);
220 VN_ADD_PNEXT(feats2, MULTIVIEW_FEATURES, local_feats.multiview);
221 VN_ADD_PNEXT(feats2, PROTECTED_MEMORY_FEATURES, local_feats.protected_memory);
222 VN_ADD_PNEXT(feats2, SAMPLER_YCBCR_CONVERSION_FEATURES, local_feats.sampler_ycbcr_conversion);
223 VN_ADD_PNEXT(feats2, SHADER_DRAW_PARAMETERS_FEATURES, local_feats.shader_draw_parameters);
224 VN_ADD_PNEXT(feats2, VARIABLE_POINTERS_FEATURES, local_feats.variable_pointers);
225
226 /* Vulkan 1.2 */
227 VN_ADD_PNEXT_EXT(feats2, 8BIT_STORAGE_FEATURES, local_feats._8bit_storage, exts->KHR_8bit_storage);
228 VN_ADD_PNEXT_EXT(feats2, BUFFER_DEVICE_ADDRESS_FEATURES, local_feats.buffer_device_address, exts->KHR_buffer_device_address);
229 VN_ADD_PNEXT_EXT(feats2, DESCRIPTOR_INDEXING_FEATURES, local_feats.descriptor_indexing, exts->EXT_descriptor_indexing);
230 VN_ADD_PNEXT_EXT(feats2, HOST_QUERY_RESET_FEATURES, local_feats.host_query_reset, exts->EXT_host_query_reset);
231 VN_ADD_PNEXT_EXT(feats2, IMAGELESS_FRAMEBUFFER_FEATURES, local_feats.imageless_framebuffer, exts->KHR_imageless_framebuffer);
232 VN_ADD_PNEXT_EXT(feats2, SCALAR_BLOCK_LAYOUT_FEATURES, local_feats.scalar_block_layout, exts->EXT_scalar_block_layout);
233 VN_ADD_PNEXT_EXT(feats2, SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, local_feats.separate_depth_stencil_layouts, exts->KHR_separate_depth_stencil_layouts);
234 VN_ADD_PNEXT_EXT(feats2, SHADER_ATOMIC_INT64_FEATURES, local_feats.shader_atomic_int64, exts->KHR_shader_atomic_int64);
235 VN_ADD_PNEXT_EXT(feats2, SHADER_FLOAT16_INT8_FEATURES, local_feats.shader_float16_int8, exts->KHR_shader_float16_int8);
236 VN_ADD_PNEXT_EXT(feats2, SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, local_feats.shader_subgroup_extended_types, exts->KHR_shader_subgroup_extended_types);
237 VN_ADD_PNEXT_EXT(feats2, TIMELINE_SEMAPHORE_FEATURES, local_feats.timeline_semaphore, exts->KHR_timeline_semaphore);
238 VN_ADD_PNEXT_EXT(feats2, UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, local_feats.uniform_buffer_standard_layout, exts->KHR_uniform_buffer_standard_layout);
239 VN_ADD_PNEXT_EXT(feats2, VULKAN_MEMORY_MODEL_FEATURES, local_feats.vulkan_memory_model, exts->KHR_vulkan_memory_model);
240 }
241
242 if (renderer_version >= VK_API_VERSION_1_3) {
243 VN_ADD_PNEXT(feats2, VULKAN_1_3_FEATURES, local_feats.vulkan_1_3);
244 } else {
245 VN_ADD_PNEXT_EXT(feats2, DYNAMIC_RENDERING_FEATURES, local_feats.dynamic_rendering, exts->KHR_dynamic_rendering);
246 VN_ADD_PNEXT_EXT(feats2, IMAGE_ROBUSTNESS_FEATURES, local_feats.image_robustness, exts->EXT_image_robustness);
247 VN_ADD_PNEXT_EXT(feats2, INLINE_UNIFORM_BLOCK_FEATURES, local_feats.inline_uniform_block, exts->EXT_inline_uniform_block);
248 VN_ADD_PNEXT_EXT(feats2, MAINTENANCE_4_FEATURES, local_feats.maintenance4, exts->KHR_maintenance4);
249 VN_ADD_PNEXT_EXT(feats2, PIPELINE_CREATION_CACHE_CONTROL_FEATURES, local_feats.pipeline_creation_cache_control, exts->EXT_pipeline_creation_cache_control);
250 VN_ADD_PNEXT_EXT(feats2, PRIVATE_DATA_FEATURES, local_feats.private_data, exts->EXT_private_data);
251 VN_ADD_PNEXT_EXT(feats2, SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES, local_feats.shader_demote_to_helper_invocation, exts->EXT_shader_demote_to_helper_invocation);
252 VN_ADD_PNEXT_EXT(feats2, SHADER_INTEGER_DOT_PRODUCT_FEATURES, local_feats.shader_integer_dot_product, exts->KHR_shader_integer_dot_product);
253 VN_ADD_PNEXT_EXT(feats2, SHADER_TERMINATE_INVOCATION_FEATURES, local_feats.shader_terminate_invocation, exts->KHR_shader_terminate_invocation);
254 VN_ADD_PNEXT_EXT(feats2, SUBGROUP_SIZE_CONTROL_FEATURES, local_feats.subgroup_size_control, exts->EXT_subgroup_size_control);
255 VN_ADD_PNEXT_EXT(feats2, SYNCHRONIZATION_2_FEATURES, local_feats.synchronization2, exts->KHR_synchronization2);
256 VN_ADD_PNEXT_EXT(feats2, TEXTURE_COMPRESSION_ASTC_HDR_FEATURES, local_feats.texture_compression_astc_hdr, exts->EXT_texture_compression_astc_hdr);
257 VN_ADD_PNEXT_EXT(feats2, ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES, local_feats.zero_initialize_workgroup_memory, exts->KHR_zero_initialize_workgroup_memory);
258 }
259
260 /* Vulkan 1.3: The extensions for the below structs were promoted, but some
261 * struct members were omitted from VkPhysicalDeviceVulkan13Features.
262 */
263 VN_ADD_PNEXT_EXT(feats2, 4444_FORMATS_FEATURES_EXT, local_feats._4444_formats, exts->EXT_4444_formats);
264 VN_ADD_PNEXT_EXT(feats2, EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT, local_feats.extended_dynamic_state_2, exts->EXT_extended_dynamic_state2);
265 VN_ADD_PNEXT_EXT(feats2, EXTENDED_DYNAMIC_STATE_FEATURES_EXT, local_feats.extended_dynamic_state, exts->EXT_extended_dynamic_state);
266 VN_ADD_PNEXT_EXT(feats2, TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, local_feats.texel_buffer_alignment, exts->EXT_texel_buffer_alignment);
267 VN_ADD_PNEXT_EXT(feats2, YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT, local_feats.ycbcr_2plane_444_formats, exts->EXT_ycbcr_2plane_444_formats);
268
269 /* KHR */
270 VN_ADD_PNEXT_EXT(feats2, FRAGMENT_SHADING_RATE_FEATURES_KHR, local_feats.fragment_shading_rate, exts->KHR_fragment_shading_rate);
271 VN_ADD_PNEXT_EXT(feats2, SHADER_CLOCK_FEATURES_KHR, local_feats.shader_clock, exts->KHR_shader_clock);
272 VN_ADD_PNEXT_EXT(feats2, SHADER_EXPECT_ASSUME_FEATURES_KHR, local_feats.expect_assume, exts->KHR_shader_expect_assume);
273 VN_ADD_PNEXT_EXT(feats2, MAINTENANCE_5_FEATURES_KHR, local_feats.maintenance5, exts->KHR_maintenance5);
274
275 /* EXT */
276 VN_ADD_PNEXT_EXT(feats2, ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT, local_feats.attachment_feedback_loop_layout, exts->EXT_attachment_feedback_loop_layout);
277 VN_ADD_PNEXT_EXT(feats2, BORDER_COLOR_SWIZZLE_FEATURES_EXT, local_feats.border_color_swizzle, exts->EXT_border_color_swizzle);
278 VN_ADD_PNEXT_EXT(feats2, COLOR_WRITE_ENABLE_FEATURES_EXT, local_feats.color_write_enable, exts->EXT_color_write_enable);
279 VN_ADD_PNEXT_EXT(feats2, CONDITIONAL_RENDERING_FEATURES_EXT, local_feats.conditional_rendering, exts->EXT_conditional_rendering);
280 VN_ADD_PNEXT_EXT(feats2, CUSTOM_BORDER_COLOR_FEATURES_EXT, local_feats.custom_border_color, exts->EXT_custom_border_color);
281 VN_ADD_PNEXT_EXT(feats2, DEPTH_CLIP_CONTROL_FEATURES_EXT, local_feats.depth_clip_control, exts->EXT_depth_clip_control);
282 VN_ADD_PNEXT_EXT(feats2, DEPTH_CLIP_ENABLE_FEATURES_EXT, local_feats.depth_clip_enable, exts->EXT_depth_clip_enable);
283 VN_ADD_PNEXT_EXT(feats2, DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT, local_feats.dynamic_rendering_unused_attachments, exts->EXT_dynamic_rendering_unused_attachments);
284 VN_ADD_PNEXT_EXT(feats2, EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT, local_feats.extended_dynamic_state_3, exts->EXT_extended_dynamic_state3);
285 VN_ADD_PNEXT_EXT(feats2, FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT, local_feats.fragment_shader_interlock, exts->EXT_fragment_shader_interlock);
286 VN_ADD_PNEXT_EXT(feats2, GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT, local_feats.graphics_pipeline_library, exts->EXT_graphics_pipeline_library);
287 VN_ADD_PNEXT_EXT(feats2, IMAGE_2D_VIEW_OF_3D_FEATURES_EXT, local_feats.image_2d_view_of_3d, exts->EXT_image_2d_view_of_3d);
288 VN_ADD_PNEXT_EXT(feats2, IMAGE_VIEW_MIN_LOD_FEATURES_EXT, local_feats.image_view_min_lod, exts->EXT_image_view_min_lod);
289 VN_ADD_PNEXT_EXT(feats2, INDEX_TYPE_UINT8_FEATURES_EXT, local_feats.index_type_uint8, exts->EXT_index_type_uint8);
290 VN_ADD_PNEXT_EXT(feats2, LINE_RASTERIZATION_FEATURES_EXT, local_feats.line_rasterization, exts->EXT_line_rasterization);
291 VN_ADD_PNEXT_EXT(feats2, MULTI_DRAW_FEATURES_EXT, local_feats.multi_draw, exts->EXT_multi_draw);
292 VN_ADD_PNEXT_EXT(feats2, MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT, local_feats.mutable_descriptor_type, exts->EXT_mutable_descriptor_type || exts->VALVE_mutable_descriptor_type);
293 VN_ADD_PNEXT_EXT(feats2, NON_SEAMLESS_CUBE_MAP_FEATURES_EXT, local_feats.non_seamless_cube_map, exts->EXT_non_seamless_cube_map);
294 VN_ADD_PNEXT_EXT(feats2, PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT, local_feats.primitive_topology_list_restart, exts->EXT_primitive_topology_list_restart);
295 VN_ADD_PNEXT_EXT(feats2, PRIMITIVES_GENERATED_QUERY_FEATURES_EXT, local_feats.primitives_generated_query, exts->EXT_primitives_generated_query);
296 VN_ADD_PNEXT_EXT(feats2, PROVOKING_VERTEX_FEATURES_EXT, local_feats.provoking_vertex, exts->EXT_provoking_vertex);
297 VN_ADD_PNEXT_EXT(feats2, RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT, local_feats.rasterization_order_attachment_access, exts->EXT_rasterization_order_attachment_access);
298 VN_ADD_PNEXT_EXT(feats2, ROBUSTNESS_2_FEATURES_EXT, local_feats.robustness_2, exts->EXT_robustness2);
299 VN_ADD_PNEXT_EXT(feats2, TRANSFORM_FEEDBACK_FEATURES_EXT, local_feats.transform_feedback, exts->EXT_transform_feedback);
300 VN_ADD_PNEXT_EXT(feats2, VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, local_feats.vertex_attribute_divisor, exts->EXT_vertex_attribute_divisor);
301 VN_ADD_PNEXT_EXT(feats2, VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT, local_feats.vertex_input_dynamic_state, exts->EXT_vertex_input_dynamic_state);
302
303 /* clang-format on */
304
305 vn_call_vkGetPhysicalDeviceFeatures2(
306 ring, vn_physical_device_to_handle(physical_dev), &feats2);
307
308 struct vk_features *feats = &physical_dev->base.base.supported_features;
309 vk_set_physical_device_features(feats, &feats2);
310
311 /* Enable features for extensions natively implemented in Venus driver.
312 * See vn_physical_device_get_native_extensions.
313 */
314 VN_SET_CORE_VALUE(feats, deviceMemoryReport, true);
315
316 /* Disable unsupported ExtendedDynamicState3Features */
317 if (exts->EXT_extended_dynamic_state3) {
318 /* TODO: Add support for VK_EXT_sample_locations */
319 VN_SET_CORE_VALUE(feats, extendedDynamicState3SampleLocationsEnable,
320 false);
321 /* TODO: Add support for VK_EXT_blend_operation_advanced */
322 VN_SET_CORE_VALUE(feats, extendedDynamicState3ColorBlendAdvanced,
323 false);
324 /* VK_NV_* extensions required */
325 VN_SET_CORE_VALUE(feats, extendedDynamicState3ViewportWScalingEnable,
326 false);
327 VN_SET_CORE_VALUE(feats, extendedDynamicState3ViewportSwizzle, false);
328 VN_SET_CORE_VALUE(feats, extendedDynamicState3CoverageToColorEnable,
329 false);
330 VN_SET_CORE_VALUE(feats, extendedDynamicState3CoverageToColorLocation,
331 false);
332 VN_SET_CORE_VALUE(feats, extendedDynamicState3CoverageModulationMode,
333 false);
334 VN_SET_CORE_VALUE(
335 feats, extendedDynamicState3CoverageModulationTableEnable, false);
336 VN_SET_CORE_VALUE(feats, extendedDynamicState3CoverageModulationTable,
337 false);
338 VN_SET_CORE_VALUE(feats, extendedDynamicState3CoverageReductionMode,
339 false);
340 VN_SET_CORE_VALUE(
341 feats, extendedDynamicState3RepresentativeFragmentTestEnable, false);
342 VN_SET_CORE_VALUE(feats, extendedDynamicState3ShadingRateImageEnable,
343 false);
344 }
345 }
346
347 static void
vn_physical_device_init_uuids(struct vn_physical_device * physical_dev)348 vn_physical_device_init_uuids(struct vn_physical_device *physical_dev)
349 {
350 struct vk_properties *props = &physical_dev->base.base.properties;
351 struct mesa_sha1 sha1_ctx;
352 uint8_t sha1[SHA1_DIGEST_LENGTH];
353
354 static_assert(VK_UUID_SIZE <= SHA1_DIGEST_LENGTH, "");
355
356 _mesa_sha1_init(&sha1_ctx);
357 _mesa_sha1_update(&sha1_ctx, &props->pipelineCacheUUID,
358 sizeof(props->pipelineCacheUUID));
359 _mesa_sha1_final(&sha1_ctx, sha1);
360
361 memcpy(props->pipelineCacheUUID, sha1, VK_UUID_SIZE);
362
363 _mesa_sha1_init(&sha1_ctx);
364 _mesa_sha1_update(&sha1_ctx, &props->vendorID, sizeof(props->vendorID));
365 _mesa_sha1_update(&sha1_ctx, &props->deviceID, sizeof(props->deviceID));
366 _mesa_sha1_final(&sha1_ctx, sha1);
367
368 memcpy(props->deviceUUID, sha1, VK_UUID_SIZE);
369
370 _mesa_sha1_init(&sha1_ctx);
371 _mesa_sha1_update(&sha1_ctx, props->driverName, strlen(props->driverName));
372 _mesa_sha1_update(&sha1_ctx, props->driverInfo, strlen(props->driverInfo));
373 _mesa_sha1_final(&sha1_ctx, sha1);
374
375 memcpy(props->driverUUID, sha1, VK_UUID_SIZE);
376
377 memset(props->deviceLUID, 0, VK_LUID_SIZE);
378 props->deviceNodeMask = 0;
379 props->deviceLUIDValid = false;
380 }
381
382 static void
vn_physical_device_sanitize_properties(struct vn_physical_device * physical_dev)383 vn_physical_device_sanitize_properties(struct vn_physical_device *physical_dev)
384 {
385 struct vn_instance *instance = physical_dev->instance;
386 const struct vk_device_extension_table *exts =
387 &physical_dev->renderer_extensions;
388 struct vk_properties *props = &physical_dev->base.base.properties;
389
390 const uint32_t version_override = vk_get_version_override();
391 if (version_override) {
392 props->apiVersion = version_override;
393 } else {
394 /* cap the advertised api version */
395 uint32_t ver = MIN3(props->apiVersion, VN_MAX_API_VERSION,
396 instance->renderer->info.vk_xml_version);
397 if (VK_VERSION_PATCH(ver) > VK_VERSION_PATCH(props->apiVersion)) {
398 ver =
399 ver - VK_VERSION_PATCH(ver) + VK_VERSION_PATCH(props->apiVersion);
400 }
401
402 /* Clamp to 1.2 if we disabled VK_KHR_synchronization2 since it
403 * is required for 1.3.
404 * See vn_physical_device_get_passthrough_extensions()
405 */
406 if (!physical_dev->base.base.supported_extensions.KHR_synchronization2)
407 ver = MIN2(VK_API_VERSION_1_2, ver);
408
409 props->apiVersion = ver;
410 }
411
412 /* ANGLE relies on ARM proprietary driver version for workarounds */
413 const char *engine_name = instance->base.base.app_info.engine_name;
414 const bool forward_driver_version =
415 props->driverID == VK_DRIVER_ID_ARM_PROPRIETARY && engine_name &&
416 strcmp(engine_name, "ANGLE") == 0;
417 if (!forward_driver_version)
418 props->driverVersion = vk_get_driver_version();
419
420 physical_dev->wa_min_fb_align = strstr(props->deviceName, "JSL") ? 128 : 1;
421
422 char device_name[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE];
423 int device_name_len = snprintf(device_name, sizeof(device_name),
424 "Virtio-GPU Venus (%s)", props->deviceName);
425 if (device_name_len >= VK_MAX_PHYSICAL_DEVICE_NAME_SIZE) {
426 memcpy(device_name + VK_MAX_PHYSICAL_DEVICE_NAME_SIZE - 5, "...)", 4);
427 device_name_len = VK_MAX_PHYSICAL_DEVICE_NAME_SIZE - 1;
428 }
429 memcpy(props->deviceName, device_name, device_name_len + 1);
430
431 /* store renderer VkDriverId for implementation specific workarounds */
432 physical_dev->renderer_driver_id = props->driverID;
433 VN_SET_CORE_VALUE(props, driverID, VK_DRIVER_ID_MESA_VENUS);
434
435 snprintf(props->driverName, sizeof(props->driverName), "venus");
436 snprintf(props->driverInfo, sizeof(props->driverInfo),
437 "Mesa " PACKAGE_VERSION MESA_GIT_SHA1);
438
439 VN_SET_CORE_VALUE(props, conformanceVersion.major, 1);
440 VN_SET_CORE_VALUE(props, conformanceVersion.minor, 3);
441 VN_SET_CORE_VALUE(props, conformanceVersion.subminor, 0);
442 VN_SET_CORE_VALUE(props, conformanceVersion.patch, 0);
443
444 vn_physical_device_init_uuids(physical_dev);
445
446 /* Disable unsupported VkPhysicalDeviceFragmentShadingRatePropertiesKHR */
447 if (exts->KHR_fragment_shading_rate) {
448 /* TODO: Add support for VK_EXT_sample_locations */
449 VN_SET_CORE_VALUE(props, fragmentShadingRateWithCustomSampleLocations,
450 false);
451 }
452 }
453
454 static void
vn_physical_device_init_properties(struct vn_physical_device * physical_dev)455 vn_physical_device_init_properties(struct vn_physical_device *physical_dev)
456 {
457 const uint32_t renderer_version = physical_dev->renderer_version;
458 struct vn_instance *instance = physical_dev->instance;
459 const struct vn_renderer_info *renderer_info = &instance->renderer->info;
460 struct vk_properties *props = &physical_dev->base.base.properties;
461 const struct vk_device_extension_table *exts =
462 &physical_dev->renderer_extensions;
463 VkPhysicalDeviceProperties2 props2 = {
464 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
465 };
466 struct {
467 /* Vulkan 1.1 */
468 VkPhysicalDeviceVulkan11Properties vulkan_1_1;
469 VkPhysicalDeviceIDProperties id;
470 VkPhysicalDeviceSubgroupProperties subgroup;
471 VkPhysicalDevicePointClippingProperties point_clipping;
472 VkPhysicalDeviceMultiviewProperties multiview;
473 VkPhysicalDeviceProtectedMemoryProperties protected_memory;
474 VkPhysicalDeviceMaintenance3Properties maintenance_3;
475
476 /* Vulkan 1.2 */
477 VkPhysicalDeviceVulkan12Properties vulkan_1_2;
478 VkPhysicalDeviceDriverProperties driver;
479 VkPhysicalDeviceFloatControlsProperties float_controls;
480 VkPhysicalDeviceDescriptorIndexingProperties descriptor_indexing;
481 VkPhysicalDeviceDepthStencilResolveProperties depth_stencil_resolve;
482 VkPhysicalDeviceSamplerFilterMinmaxProperties sampler_filter_minmax;
483 VkPhysicalDeviceTimelineSemaphoreProperties timeline_semaphore;
484
485 /* Vulkan 1.3 */
486 VkPhysicalDeviceVulkan13Properties vulkan_1_3;
487 VkPhysicalDeviceInlineUniformBlockProperties inline_uniform_block;
488 VkPhysicalDeviceMaintenance4Properties maintenance4;
489 VkPhysicalDeviceShaderIntegerDotProductProperties
490 shader_integer_dot_product;
491 VkPhysicalDeviceSubgroupSizeControlProperties subgroup_size_control;
492 VkPhysicalDeviceTexelBufferAlignmentProperties texel_buffer_alignment;
493
494 /* KHR */
495 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor;
496 VkPhysicalDeviceFragmentShadingRatePropertiesKHR fragment_shading_rate;
497
498 /* EXT */
499 VkPhysicalDeviceConservativeRasterizationPropertiesEXT
500 conservative_rasterization;
501 VkPhysicalDeviceCustomBorderColorPropertiesEXT custom_border_color;
502 VkPhysicalDeviceExtendedDynamicState3PropertiesEXT
503 extended_dynamic_state_3;
504 VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT
505 graphics_pipeline_library;
506 VkPhysicalDeviceLineRasterizationPropertiesEXT line_rasterization;
507 VkPhysicalDeviceMultiDrawPropertiesEXT multi_draw;
508 VkPhysicalDevicePCIBusInfoPropertiesEXT pci_bus_info;
509 VkPhysicalDeviceProvokingVertexPropertiesEXT provoking_vertex;
510 VkPhysicalDeviceRobustness2PropertiesEXT robustness_2;
511 VkPhysicalDeviceTransformFeedbackPropertiesEXT transform_feedback;
512 VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT
513 vertex_attribute_divisor;
514 } local_props;
515
516 /* Clear the structs so all unqueried properties will be well-defined. */
517 memset(props, 0, sizeof(*props));
518 memset(&local_props, 0, sizeof(local_props));
519
520 assert(renderer_version >= VK_API_VERSION_1_1);
521
522 /* clang-format off */
523 if (renderer_version >= VK_API_VERSION_1_2) {
524 VN_ADD_PNEXT(props2, VULKAN_1_1_PROPERTIES, local_props.vulkan_1_1);
525 VN_ADD_PNEXT(props2, VULKAN_1_2_PROPERTIES, local_props.vulkan_1_2);
526 } else {
527 /* Vulkan 1.1 */
528 VN_ADD_PNEXT(props2, ID_PROPERTIES, local_props.id);
529 VN_ADD_PNEXT(props2, MAINTENANCE_3_PROPERTIES, local_props.maintenance_3);
530 VN_ADD_PNEXT(props2, MULTIVIEW_PROPERTIES, local_props.multiview);
531 VN_ADD_PNEXT(props2, POINT_CLIPPING_PROPERTIES, local_props.point_clipping);
532 VN_ADD_PNEXT(props2, PROTECTED_MEMORY_PROPERTIES, local_props.protected_memory);
533 VN_ADD_PNEXT(props2, SUBGROUP_PROPERTIES, local_props.subgroup);
534
535 /* Vulkan 1.2 */
536 VN_ADD_PNEXT_EXT(props2, DEPTH_STENCIL_RESOLVE_PROPERTIES, local_props.depth_stencil_resolve, exts->KHR_depth_stencil_resolve);
537 VN_ADD_PNEXT_EXT(props2, DESCRIPTOR_INDEXING_PROPERTIES, local_props.descriptor_indexing, exts->EXT_descriptor_indexing);
538 VN_ADD_PNEXT_EXT(props2, DRIVER_PROPERTIES, local_props.driver, exts->KHR_driver_properties);
539 VN_ADD_PNEXT_EXT(props2, FLOAT_CONTROLS_PROPERTIES, local_props.float_controls, exts->KHR_shader_float_controls);
540 VN_ADD_PNEXT_EXT(props2, SAMPLER_FILTER_MINMAX_PROPERTIES, local_props.sampler_filter_minmax, exts->EXT_sampler_filter_minmax);
541 VN_ADD_PNEXT_EXT(props2, TIMELINE_SEMAPHORE_PROPERTIES, local_props.timeline_semaphore, exts->KHR_timeline_semaphore);
542 }
543
544 if (renderer_version >= VK_API_VERSION_1_3) {
545 VN_ADD_PNEXT(props2, VULKAN_1_3_PROPERTIES, local_props.vulkan_1_3);
546 } else {
547 VN_ADD_PNEXT_EXT(props2, INLINE_UNIFORM_BLOCK_PROPERTIES, local_props.inline_uniform_block, exts->EXT_inline_uniform_block);
548 VN_ADD_PNEXT_EXT(props2, MAINTENANCE_4_PROPERTIES, local_props.maintenance4, exts->KHR_maintenance4);
549 VN_ADD_PNEXT_EXT(props2, SHADER_INTEGER_DOT_PRODUCT_PROPERTIES, local_props.shader_integer_dot_product, exts->KHR_shader_integer_dot_product);
550 VN_ADD_PNEXT_EXT(props2, SUBGROUP_SIZE_CONTROL_PROPERTIES, local_props.subgroup_size_control, exts->EXT_subgroup_size_control);
551 VN_ADD_PNEXT_EXT(props2, TEXEL_BUFFER_ALIGNMENT_PROPERTIES, local_props.texel_buffer_alignment, exts->EXT_texel_buffer_alignment);
552 }
553
554 /* KHR */
555 VN_ADD_PNEXT_EXT(props2, FRAGMENT_SHADING_RATE_PROPERTIES_KHR, local_props.fragment_shading_rate, exts->KHR_fragment_shading_rate);
556 VN_ADD_PNEXT_EXT(props2, PUSH_DESCRIPTOR_PROPERTIES_KHR, local_props.push_descriptor, exts->KHR_push_descriptor);
557
558 /* EXT */
559 VN_ADD_PNEXT_EXT(props2, CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT, local_props.conservative_rasterization, exts->EXT_conservative_rasterization);
560 VN_ADD_PNEXT_EXT(props2, CUSTOM_BORDER_COLOR_PROPERTIES_EXT, local_props.custom_border_color, exts->EXT_custom_border_color);
561 VN_ADD_PNEXT_EXT(props2, EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT, local_props.extended_dynamic_state_3, exts->EXT_extended_dynamic_state3);
562 VN_ADD_PNEXT_EXT(props2, GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT, local_props.graphics_pipeline_library, exts->EXT_graphics_pipeline_library);
563 VN_ADD_PNEXT_EXT(props2, LINE_RASTERIZATION_PROPERTIES_EXT, local_props.line_rasterization, exts->EXT_line_rasterization);
564 VN_ADD_PNEXT_EXT(props2, MULTI_DRAW_PROPERTIES_EXT, local_props.multi_draw, exts->EXT_multi_draw);
565 VN_ADD_PNEXT_EXT(props2, PCI_BUS_INFO_PROPERTIES_EXT, local_props.pci_bus_info, exts->EXT_pci_bus_info);
566 VN_ADD_PNEXT_EXT(props2, PROVOKING_VERTEX_PROPERTIES_EXT, local_props.provoking_vertex, exts->EXT_provoking_vertex);
567 VN_ADD_PNEXT_EXT(props2, ROBUSTNESS_2_PROPERTIES_EXT, local_props.robustness_2, exts->EXT_robustness2);
568 VN_ADD_PNEXT_EXT(props2, TRANSFORM_FEEDBACK_PROPERTIES_EXT, local_props.transform_feedback, exts->EXT_transform_feedback);
569 VN_ADD_PNEXT_EXT(props2, VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, local_props.vertex_attribute_divisor, exts->EXT_vertex_attribute_divisor);
570
571 /* clang-format on */
572
573 vn_call_vkGetPhysicalDeviceProperties2(
574 instance->ring.ring, vn_physical_device_to_handle(physical_dev),
575 &props2);
576
577 /* clang-format off */
578
579 /* Vulkan 1.0 */
580 VN_SET_VK_PROPS(props, &props2);
581
582 /* Vulkan 1.1 and 1.2 */
583 if (renderer_version >= VK_API_VERSION_1_2) {
584 VN_SET_VK_PROPS(props, &local_props.vulkan_1_1);
585 VN_SET_VK_PROPS(props, &local_props.vulkan_1_2);
586 } else {
587 /* Vulkan 1.1 */
588 VN_SET_VK_PROPS(props, &local_props.id);
589 VN_SET_VK_PROPS(props, &local_props.subgroup);
590 VN_SET_VK_PROPS(props, &local_props.point_clipping);
591 VN_SET_VK_PROPS(props, &local_props.multiview);
592 VN_SET_VK_PROPS(props, &local_props.protected_memory);
593 VN_SET_VK_PROPS(props, &local_props.maintenance_3);
594
595 /* Vulkan 1.2 */
596 VN_SET_VK_PROPS_EXT(props, &local_props.driver, exts->KHR_driver_properties);
597 VN_SET_VK_PROPS_EXT(props, &local_props.float_controls, exts->KHR_shader_float_controls);
598 VN_SET_VK_PROPS_EXT(props, &local_props.descriptor_indexing, exts->EXT_descriptor_indexing);
599 VN_SET_VK_PROPS_EXT(props, &local_props.depth_stencil_resolve, exts->KHR_depth_stencil_resolve);
600 VN_SET_VK_PROPS_EXT(props, &local_props.sampler_filter_minmax, exts->EXT_sampler_filter_minmax);
601 VN_SET_VK_PROPS_EXT(props, &local_props.timeline_semaphore, exts->KHR_timeline_semaphore);
602 }
603
604 /* Vulkan 1.3 */
605 if (renderer_version >= VK_API_VERSION_1_3) {
606 VN_SET_VK_PROPS(props, &local_props.vulkan_1_3);
607 } else {
608 VN_SET_VK_PROPS_EXT(props, &local_props.subgroup_size_control, exts->EXT_subgroup_size_control);
609 VN_SET_VK_PROPS_EXT(props, &local_props.inline_uniform_block, exts->EXT_inline_uniform_block);
610 VN_SET_VK_PROPS_EXT(props, &local_props.shader_integer_dot_product, exts->KHR_shader_integer_dot_product);
611 VN_SET_VK_PROPS_EXT(props, &local_props.texel_buffer_alignment, exts->EXT_texel_buffer_alignment);
612 VN_SET_VK_PROPS_EXT(props, &local_props.maintenance4, exts->KHR_maintenance4);
613 }
614
615 /* KHR */
616 VN_SET_VK_PROPS_EXT(props, &local_props.fragment_shading_rate, exts->KHR_fragment_shading_rate);
617 VN_SET_VK_PROPS_EXT(props, &local_props.push_descriptor, exts->KHR_push_descriptor);
618
619 /* EXT */
620 VN_SET_VK_PROPS_EXT(props, &local_props.conservative_rasterization, exts->EXT_conservative_rasterization);
621 VN_SET_VK_PROPS_EXT(props, &local_props.custom_border_color, exts->EXT_custom_border_color);
622 VN_SET_VK_PROPS_EXT(props, &local_props.extended_dynamic_state_3, exts->EXT_extended_dynamic_state3);
623 VN_SET_VK_PROPS_EXT(props, &local_props.graphics_pipeline_library, exts->EXT_graphics_pipeline_library);
624 VN_SET_VK_PROPS_EXT(props, &local_props.line_rasterization, exts->EXT_line_rasterization);
625 VN_SET_VK_PROPS_EXT(props, &local_props.multi_draw, exts->EXT_multi_draw);
626 VN_SET_VK_PROPS_EXT(props, &local_props.pci_bus_info, exts->EXT_pci_bus_info);
627 VN_SET_VK_PROPS_EXT(props, &local_props.provoking_vertex, exts->EXT_provoking_vertex);
628 VN_SET_VK_PROPS_EXT(props, &local_props.robustness_2, exts->EXT_robustness2);
629 VN_SET_VK_PROPS_EXT(props, &local_props.transform_feedback, exts->EXT_transform_feedback);
630 VN_SET_VK_PROPS_EXT(props, &local_props.vertex_attribute_divisor, exts->EXT_vertex_attribute_divisor);
631
632 /* clang-format on */
633
634 /* initialize native properties */
635
636 /* VK_EXT_physical_device_drm */
637 VN_SET_VK_PROPS(props, &renderer_info->drm.props);
638
639 /* VK_EXT_pci_bus_info */
640 if (renderer_info->pci.has_bus_info)
641 VN_SET_VK_PROPS(props, &renderer_info->pci.props);
642
643 #if DETECT_OS_ANDROID
644 /* VK_ANDROID_native_buffer */
645 if (vn_android_gralloc_get_shared_present_usage())
646 props->sharedImage = true;
647 #endif
648
649 /* TODO: Fix sparse binding on lavapipe. */
650 if (props->driverID == VK_DRIVER_ID_MESA_LLVMPIPE)
651 physical_dev->sparse_binding_disabled = true;
652
653 vn_physical_device_sanitize_properties(physical_dev);
654 }
655
656 static VkResult
vn_physical_device_init_queue_family_properties(struct vn_physical_device * physical_dev)657 vn_physical_device_init_queue_family_properties(
658 struct vn_physical_device *physical_dev)
659 {
660 struct vn_instance *instance = physical_dev->instance;
661 struct vn_ring *ring = instance->ring.ring;
662 const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
663 uint32_t count;
664
665 vn_call_vkGetPhysicalDeviceQueueFamilyProperties2(
666 ring, vn_physical_device_to_handle(physical_dev), &count, NULL);
667
668 VkQueueFamilyProperties2 *props =
669 vk_alloc(alloc, sizeof(*props) * count, VN_DEFAULT_ALIGN,
670 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
671 if (!props)
672 return VK_ERROR_OUT_OF_HOST_MEMORY;
673
674 for (uint32_t i = 0; i < count; i++) {
675 props[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2;
676 props[i].pNext = NULL;
677 }
678 vn_call_vkGetPhysicalDeviceQueueFamilyProperties2(
679 ring, vn_physical_device_to_handle(physical_dev), &count, props);
680
681 /* Filter out queue families that exclusively support sparse binding as
682 * we need additional support for submitting feedback commands
683 */
684 uint32_t sparse_count = 0;
685 uint32_t non_sparse_only_count = 0;
686 for (uint32_t i = 0; i < count; i++) {
687 if (props[i].queueFamilyProperties.queueFlags &
688 ~VK_QUEUE_SPARSE_BINDING_BIT) {
689 props[non_sparse_only_count++].queueFamilyProperties =
690 props[i].queueFamilyProperties;
691 }
692 if (props[i].queueFamilyProperties.queueFlags &
693 VK_QUEUE_SPARSE_BINDING_BIT) {
694 sparse_count++;
695 }
696 }
697
698 if (VN_DEBUG(NO_SPARSE) ||
699 (sparse_count && non_sparse_only_count + sparse_count == count))
700 physical_dev->sparse_binding_disabled = true;
701
702 physical_dev->queue_family_properties = props;
703 physical_dev->queue_family_count = non_sparse_only_count;
704
705 return VK_SUCCESS;
706 }
707
708 static void
vn_physical_device_init_memory_properties(struct vn_physical_device * physical_dev)709 vn_physical_device_init_memory_properties(
710 struct vn_physical_device *physical_dev)
711 {
712 struct vn_instance *instance = physical_dev->instance;
713 struct vn_ring *ring = instance->ring.ring;
714 VkPhysicalDeviceMemoryProperties2 props2 = {
715 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
716 };
717 vn_call_vkGetPhysicalDeviceMemoryProperties2(
718 ring, vn_physical_device_to_handle(physical_dev), &props2);
719
720 physical_dev->memory_properties = props2.memoryProperties;
721
722 /* Kernel makes every mapping coherent. If a memory type is truly
723 * incoherent, it's better to remove the host-visible flag than silently
724 * making it coherent. However, for app compatibility purpose, when
725 * coherent-cached memory type is unavailable, we append the cached bit to
726 * the first coherent memory type.
727 */
728 bool has_coherent_cached = false;
729 uint32_t first_coherent = VK_MAX_MEMORY_TYPES;
730 VkPhysicalDeviceMemoryProperties *props = &physical_dev->memory_properties;
731 for (uint32_t i = 0; i < props->memoryTypeCount; i++) {
732 VkMemoryPropertyFlags *flags = &props->memoryTypes[i].propertyFlags;
733 const bool coherent = *flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
734 const bool cached = *flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
735 if (coherent) {
736 if (first_coherent == VK_MAX_MEMORY_TYPES)
737 first_coherent = i;
738 if (cached)
739 has_coherent_cached = true;
740 } else if (cached) {
741 *flags &= ~(VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
742 VK_MEMORY_PROPERTY_HOST_CACHED_BIT);
743 }
744 }
745
746 if (!has_coherent_cached) {
747 props->memoryTypes[first_coherent].propertyFlags |=
748 VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
749 }
750 }
751
752 static void
vn_physical_device_init_external_memory(struct vn_physical_device * physical_dev)753 vn_physical_device_init_external_memory(
754 struct vn_physical_device *physical_dev)
755 {
756 /* When a renderer VkDeviceMemory is exportable, we can create a
757 * vn_renderer_bo from it. The vn_renderer_bo can be freely exported as an
758 * opaque fd or a dma-buf.
759 *
760 * When an external memory can be imported as a vn_renderer_bo, that bo
761 * might be imported as a renderer side VkDeviceMemory.
762 *
763 * However, to know if a rendender VkDeviceMemory is exportable or if a bo
764 * can be imported as a renderer VkDeviceMemory. We have to start from
765 * physical device external image and external buffer properties queries,
766 * which requires to know the renderer supported external handle types. For
767 * such info, we can reliably retrieve from the external memory extensions
768 * advertised by the renderer.
769 *
770 * We require VK_EXT_external_memory_dma_buf to expose driver side external
771 * memory support for a renderer running on Linux. As a comparison, when
772 * the renderer runs on Windows, VK_KHR_external_memory_win32 might be
773 * required for the same.
774 *
775 * For vtest, the protocol does not support external memory import. So we
776 * only mask out the importable bit so that wsi over vtest can be supported.
777 */
778 if (physical_dev->renderer_extensions.EXT_external_memory_dma_buf) {
779 physical_dev->external_memory.renderer_handle_type =
780 VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
781
782 #if DETECT_OS_ANDROID
783 physical_dev->external_memory.supported_handle_types |=
784 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
785 #else /* DETECT_OS_ANDROID */
786 physical_dev->external_memory.supported_handle_types =
787 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT |
788 VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
789 #endif /* DETECT_OS_ANDROID */
790 }
791 }
792
793 static void
vn_physical_device_init_external_fence_handles(struct vn_physical_device * physical_dev)794 vn_physical_device_init_external_fence_handles(
795 struct vn_physical_device *physical_dev)
796 {
797 /* The current code manipulates the host-side VkFence directly.
798 * vkWaitForFences is translated to repeated vkGetFenceStatus.
799 *
800 * External fence is not possible currently. Instead, we cheat by
801 * translating vkGetFenceFdKHR to an empty renderer submission for the
802 * out fence, along with a venus protocol command to fix renderer side
803 * fence payload.
804 *
805 * We would like to create a vn_renderer_sync from a host-side VkFence,
806 * similar to how a vn_renderer_bo is created from a host-side
807 * VkDeviceMemory. That would require kernel support and tons of works on
808 * the host side. If we had that, and we kept both the vn_renderer_sync
809 * and the host-side VkFence in sync, we would have the freedom to use
810 * either of them depending on the occasions, and support external fences
811 * and idle waiting.
812 */
813 if (physical_dev->renderer_extensions.KHR_external_fence_fd) {
814 struct vn_ring *ring = physical_dev->instance->ring.ring;
815 const VkPhysicalDeviceExternalFenceInfo info = {
816 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
817 .handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
818 };
819 VkExternalFenceProperties props = {
820 .sType = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES,
821 };
822 vn_call_vkGetPhysicalDeviceExternalFenceProperties(
823 ring, vn_physical_device_to_handle(physical_dev), &info, &props);
824
825 physical_dev->renderer_sync_fd.fence_exportable =
826 props.externalFenceFeatures &
827 VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT;
828 }
829
830 physical_dev->external_fence_handles = 0;
831
832 if (physical_dev->instance->renderer->info.has_external_sync) {
833 physical_dev->external_fence_handles =
834 VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT;
835 }
836 }
837
838 static void
vn_physical_device_init_external_semaphore_handles(struct vn_physical_device * physical_dev)839 vn_physical_device_init_external_semaphore_handles(
840 struct vn_physical_device *physical_dev)
841 {
842 /* The current code manipulates the host-side VkSemaphore directly. It
843 * works very well for binary semaphores because there is no CPU operation.
844 * But for timeline semaphores, the situation is similar to that of fences.
845 * vkWaitSemaphores is translated to repeated vkGetSemaphoreCounterValue.
846 *
847 * External semaphore is not possible currently. Instead, we cheat when
848 * the semaphore is binary and the handle type is sync file. We do an empty
849 * renderer submission for the out fence, along with a venus protocol
850 * command to fix renderer side semaphore payload.
851 *
852 * We would like to create a vn_renderer_sync from a host-side VkSemaphore,
853 * similar to how a vn_renderer_bo is created from a host-side
854 * VkDeviceMemory. The reasoning is the same as that for fences.
855 * Additionally, we would like the sync file exported from the
856 * vn_renderer_sync to carry the necessary information to identify the
857 * host-side VkSemaphore. That would allow the consumers to wait on the
858 * host side rather than the guest side.
859 */
860 if (physical_dev->renderer_extensions.KHR_external_semaphore_fd) {
861 struct vn_ring *ring = physical_dev->instance->ring.ring;
862 const VkPhysicalDeviceExternalSemaphoreInfo info = {
863 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
864 .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
865 };
866 VkExternalSemaphoreProperties props = {
867 .sType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES,
868 };
869 vn_call_vkGetPhysicalDeviceExternalSemaphoreProperties(
870 ring, vn_physical_device_to_handle(physical_dev), &info, &props);
871
872 physical_dev->renderer_sync_fd.semaphore_exportable =
873 props.externalSemaphoreFeatures &
874 VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT;
875 physical_dev->renderer_sync_fd.semaphore_importable =
876 props.externalSemaphoreFeatures &
877 VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT;
878 }
879
880 physical_dev->external_binary_semaphore_handles = 0;
881 physical_dev->external_timeline_semaphore_handles = 0;
882
883 if (physical_dev->instance->renderer->info.has_external_sync) {
884 physical_dev->external_binary_semaphore_handles =
885 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
886 }
887 }
888
889 static inline bool
vn_physical_device_get_external_memory_support(const struct vn_physical_device * physical_dev)890 vn_physical_device_get_external_memory_support(
891 const struct vn_physical_device *physical_dev)
892 {
893 if (!physical_dev->external_memory.renderer_handle_type)
894 return false;
895
896 /* see vn_physical_device_init_external_memory */
897 if (physical_dev->external_memory.renderer_handle_type ==
898 VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT) {
899 const struct vk_device_extension_table *renderer_exts =
900 &physical_dev->renderer_extensions;
901 return renderer_exts->EXT_image_drm_format_modifier &&
902 renderer_exts->EXT_queue_family_foreign;
903 }
904
905 /* expand support once the renderer can run on non-Linux platforms */
906 return false;
907 }
908
909 static void
vn_physical_device_get_native_extensions(const struct vn_physical_device * physical_dev,struct vk_device_extension_table * exts)910 vn_physical_device_get_native_extensions(
911 const struct vn_physical_device *physical_dev,
912 struct vk_device_extension_table *exts)
913 {
914 memset(exts, 0, sizeof(*exts));
915
916 if (physical_dev->instance->renderer->info.has_external_sync &&
917 physical_dev->renderer_sync_fd.fence_exportable)
918 exts->KHR_external_fence_fd = true;
919
920 if (physical_dev->instance->renderer->info.has_external_sync &&
921 physical_dev->renderer_sync_fd.semaphore_importable &&
922 physical_dev->renderer_sync_fd.semaphore_exportable)
923 exts->KHR_external_semaphore_fd = true;
924
925 const bool can_external_mem =
926 vn_physical_device_get_external_memory_support(physical_dev);
927 if (can_external_mem) {
928 #if DETECT_OS_ANDROID
929 exts->ANDROID_external_memory_android_hardware_buffer = true;
930
931 /* For wsi, we require renderer:
932 * - semaphore sync fd import for queue submission to skip scrubbing the
933 * wsi wait semaphores.
934 * - fence sync fd export for QueueSignalReleaseImageANDROID to export a
935 * sync fd.
936 *
937 * TODO: relax these requirements by:
938 * - properly scrubbing wsi wait semaphores
939 * - not creating external fence but exporting sync fd directly
940 */
941 if (physical_dev->renderer_sync_fd.semaphore_importable &&
942 physical_dev->renderer_sync_fd.fence_exportable)
943 exts->ANDROID_native_buffer = true;
944 #else /* DETECT_OS_ANDROID */
945 exts->KHR_external_memory_fd = true;
946 exts->EXT_external_memory_dma_buf = true;
947 #endif /* DETECT_OS_ANDROID */
948 }
949
950 #ifdef VN_USE_WSI_PLATFORM
951 if (can_external_mem &&
952 physical_dev->renderer_sync_fd.semaphore_importable) {
953 exts->KHR_incremental_present = true;
954 exts->KHR_swapchain = true;
955 exts->KHR_swapchain_mutable_format = true;
956 }
957
958 /* VK_EXT_pci_bus_info is required by common wsi to decide whether native
959 * image or prime blit is used. Meanwhile, venus must stay on native image
960 * path for proper fencing.
961 * - For virtgpu, VK_EXT_pci_bus_info is natively supported.
962 * - For vtest, pci bus info must be queried from the renderer side physical
963 * device to be compared against the render node opened by common wsi.
964 */
965 exts->EXT_pci_bus_info =
966 physical_dev->instance->renderer->info.pci.has_bus_info ||
967 physical_dev->renderer_extensions.EXT_pci_bus_info;
968 #endif
969
970 exts->EXT_physical_device_drm = true;
971 /* use common implementation */
972 exts->EXT_tooling_info = true;
973 exts->EXT_device_memory_report = true;
974 }
975
976 static void
vn_physical_device_get_passthrough_extensions(const struct vn_physical_device * physical_dev,struct vk_device_extension_table * exts)977 vn_physical_device_get_passthrough_extensions(
978 const struct vn_physical_device *physical_dev,
979 struct vk_device_extension_table *exts)
980 {
981 *exts = (struct vk_device_extension_table){
982 /* promoted to VK_VERSION_1_1 */
983 .KHR_16bit_storage = true,
984 .KHR_bind_memory2 = true,
985 .KHR_dedicated_allocation = true,
986 .KHR_descriptor_update_template = true,
987 .KHR_device_group = true,
988 .KHR_external_fence = true,
989 .KHR_external_memory = true,
990 .KHR_external_semaphore = true,
991 .KHR_get_memory_requirements2 = true,
992 .KHR_maintenance1 = true,
993 .KHR_maintenance2 = true,
994 .KHR_maintenance3 = true,
995 .KHR_multiview = true,
996 .KHR_relaxed_block_layout = true,
997 .KHR_sampler_ycbcr_conversion = true,
998 .KHR_shader_draw_parameters = true,
999 .KHR_storage_buffer_storage_class = true,
1000 .KHR_variable_pointers = true,
1001
1002 /* promoted to VK_VERSION_1_2 */
1003 .KHR_8bit_storage = true,
1004 .KHR_buffer_device_address = true,
1005 .KHR_create_renderpass2 = true,
1006 .KHR_depth_stencil_resolve = true,
1007 .KHR_draw_indirect_count = true,
1008 .KHR_driver_properties = true,
1009 .KHR_image_format_list = true,
1010 .KHR_imageless_framebuffer = true,
1011 .KHR_sampler_mirror_clamp_to_edge = true,
1012 .KHR_separate_depth_stencil_layouts = true,
1013 .KHR_shader_atomic_int64 = true,
1014 .KHR_shader_float16_int8 = true,
1015 .KHR_shader_float_controls = true,
1016 .KHR_shader_subgroup_extended_types = true,
1017 .KHR_spirv_1_4 = true,
1018 .KHR_timeline_semaphore = true,
1019 .KHR_uniform_buffer_standard_layout = true,
1020 .KHR_vulkan_memory_model = true,
1021 .EXT_descriptor_indexing = true,
1022 .EXT_host_query_reset = true,
1023 .EXT_sampler_filter_minmax = true,
1024 .EXT_scalar_block_layout = true,
1025 .EXT_separate_stencil_usage = true,
1026 .EXT_shader_viewport_index_layer = true,
1027
1028 /* promoted to VK_VERSION_1_3 */
1029 .KHR_copy_commands2 = true,
1030 .KHR_dynamic_rendering = true,
1031 .KHR_format_feature_flags2 = true,
1032 .KHR_maintenance4 = true,
1033 .KHR_shader_integer_dot_product = true,
1034 .KHR_shader_non_semantic_info = true,
1035 .KHR_shader_terminate_invocation = true,
1036 /* Our implementation requires semaphore sync fd import
1037 * for VK_KHR_synchronization2.
1038 */
1039 .KHR_synchronization2 =
1040 physical_dev->renderer_sync_fd.semaphore_importable,
1041 .KHR_zero_initialize_workgroup_memory = true,
1042 .EXT_4444_formats = true,
1043 .EXT_extended_dynamic_state = true,
1044 .EXT_extended_dynamic_state2 = true,
1045 .EXT_image_robustness = true,
1046 .EXT_inline_uniform_block = true,
1047 .EXT_pipeline_creation_cache_control = true,
1048 /* hide behind renderer support to allow structs passing through */
1049 .EXT_pipeline_creation_feedback = true,
1050 .EXT_shader_demote_to_helper_invocation = true,
1051 .EXT_subgroup_size_control = true,
1052 .EXT_texel_buffer_alignment = true,
1053 .EXT_texture_compression_astc_hdr = true,
1054 .EXT_ycbcr_2plane_444_formats = true,
1055
1056 /* KHR */
1057 .KHR_fragment_shading_rate = true,
1058 .KHR_maintenance5 = true,
1059 .KHR_pipeline_library = true,
1060 .KHR_push_descriptor = true,
1061 .KHR_shader_clock = true,
1062 .KHR_shader_expect_assume = true,
1063
1064 /* EXT */
1065 .EXT_attachment_feedback_loop_layout = true,
1066 .EXT_border_color_swizzle = true,
1067 .EXT_calibrated_timestamps = true,
1068 .EXT_color_write_enable = true,
1069 .EXT_conditional_rendering = true,
1070 .EXT_conservative_rasterization = true,
1071 .EXT_custom_border_color = true,
1072 .EXT_depth_clip_control = true,
1073 .EXT_depth_clip_enable = true,
1074 .EXT_extended_dynamic_state3 = true,
1075 .EXT_dynamic_rendering_unused_attachments = true,
1076 .EXT_external_memory_acquire_unmodified = true,
1077 .EXT_fragment_shader_interlock = true,
1078 .EXT_graphics_pipeline_library = !VN_DEBUG(NO_GPL),
1079 .EXT_image_2d_view_of_3d = true,
1080 .EXT_image_drm_format_modifier = true,
1081 .EXT_image_view_min_lod = true,
1082 .EXT_index_type_uint8 = true,
1083 .EXT_line_rasterization = true,
1084 .EXT_load_store_op_none = true,
1085 /* TODO: re-enable after generic app compat issues are resolved */
1086 .EXT_memory_budget = false,
1087 .EXT_multi_draw = true,
1088 .EXT_mutable_descriptor_type = true,
1089 .EXT_non_seamless_cube_map = true,
1090 .EXT_primitive_topology_list_restart = true,
1091 .EXT_primitives_generated_query = true,
1092 /* hide behind renderer support to allow structs passing through */
1093 .EXT_private_data = true,
1094 .EXT_provoking_vertex = true,
1095 .EXT_queue_family_foreign = true,
1096 .EXT_rasterization_order_attachment_access = true,
1097 .EXT_robustness2 = true,
1098 .EXT_shader_stencil_export = true,
1099 .EXT_shader_subgroup_ballot = true,
1100 .EXT_transform_feedback = true,
1101 .EXT_vertex_attribute_divisor = true,
1102 .EXT_vertex_input_dynamic_state = true,
1103
1104 /* vendor */
1105 .VALVE_mutable_descriptor_type = true,
1106 };
1107 }
1108
1109 static void
vn_physical_device_init_supported_extensions(struct vn_physical_device * physical_dev)1110 vn_physical_device_init_supported_extensions(
1111 struct vn_physical_device *physical_dev)
1112 {
1113 struct vk_device_extension_table native;
1114 struct vk_device_extension_table passthrough;
1115 vn_physical_device_get_native_extensions(physical_dev, &native);
1116 vn_physical_device_get_passthrough_extensions(physical_dev, &passthrough);
1117
1118 for (uint32_t i = 0; i < VK_DEVICE_EXTENSION_COUNT; i++) {
1119 const VkExtensionProperties *props = &vk_device_extensions[i];
1120
1121 #ifdef ANDROID_STRICT
1122 if (!vk_android_allowed_device_extensions.extensions[i])
1123 continue;
1124 #endif
1125
1126 if (native.extensions[i]) {
1127 physical_dev->base.base.supported_extensions.extensions[i] = true;
1128 physical_dev->extension_spec_versions[i] = props->specVersion;
1129 } else if (passthrough.extensions[i] &&
1130 physical_dev->renderer_extensions.extensions[i]) {
1131 physical_dev->base.base.supported_extensions.extensions[i] = true;
1132 physical_dev->extension_spec_versions[i] = MIN2(
1133 physical_dev->extension_spec_versions[i], props->specVersion);
1134 }
1135 }
1136 }
1137
1138 static VkResult
vn_physical_device_init_renderer_extensions(struct vn_physical_device * physical_dev)1139 vn_physical_device_init_renderer_extensions(
1140 struct vn_physical_device *physical_dev)
1141 {
1142 struct vn_instance *instance = physical_dev->instance;
1143 struct vn_ring *ring = instance->ring.ring;
1144 const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
1145
1146 /* get renderer extensions */
1147 uint32_t count;
1148 VkResult result = vn_call_vkEnumerateDeviceExtensionProperties(
1149 ring, vn_physical_device_to_handle(physical_dev), NULL, &count, NULL);
1150 if (result != VK_SUCCESS)
1151 return result;
1152
1153 VkExtensionProperties *exts = NULL;
1154 if (count) {
1155 exts = vk_alloc(alloc, sizeof(*exts) * count, VN_DEFAULT_ALIGN,
1156 VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
1157 if (!exts)
1158 return VK_ERROR_OUT_OF_HOST_MEMORY;
1159
1160 result = vn_call_vkEnumerateDeviceExtensionProperties(
1161 ring, vn_physical_device_to_handle(physical_dev), NULL, &count,
1162 exts);
1163 if (result < VK_SUCCESS) {
1164 vk_free(alloc, exts);
1165 return result;
1166 }
1167 }
1168
1169 physical_dev->extension_spec_versions =
1170 vk_zalloc(alloc,
1171 sizeof(*physical_dev->extension_spec_versions) *
1172 VK_DEVICE_EXTENSION_COUNT,
1173 VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
1174 if (!physical_dev->extension_spec_versions) {
1175 vk_free(alloc, exts);
1176 return VK_ERROR_OUT_OF_HOST_MEMORY;
1177 }
1178
1179 for (uint32_t i = 0; i < VK_DEVICE_EXTENSION_COUNT; i++) {
1180 const VkExtensionProperties *props = &vk_device_extensions[i];
1181 for (uint32_t j = 0; j < count; j++) {
1182 if (strcmp(props->extensionName, exts[j].extensionName))
1183 continue;
1184
1185 /* check encoder support */
1186 const uint32_t enc_ext_spec_version =
1187 vn_extension_get_spec_version(props->extensionName);
1188 if (!enc_ext_spec_version)
1189 continue;
1190
1191 physical_dev->renderer_extensions.extensions[i] = true;
1192 physical_dev->extension_spec_versions[i] =
1193 MIN2(exts[j].specVersion, enc_ext_spec_version);
1194
1195 break;
1196 }
1197 }
1198
1199 vk_free(alloc, exts);
1200
1201 return VK_SUCCESS;
1202 }
1203
1204 static VkResult
vn_physical_device_init_renderer_version(struct vn_physical_device * physical_dev)1205 vn_physical_device_init_renderer_version(
1206 struct vn_physical_device *physical_dev)
1207 {
1208 struct vn_instance *instance = physical_dev->instance;
1209 struct vn_ring *ring = instance->ring.ring;
1210
1211 /*
1212 * We either check and enable VK_KHR_get_physical_device_properties2, or we
1213 * must use vkGetPhysicalDeviceProperties to get the device-level version.
1214 */
1215 VkPhysicalDeviceProperties props;
1216 vn_call_vkGetPhysicalDeviceProperties(
1217 ring, vn_physical_device_to_handle(physical_dev), &props);
1218 if (props.apiVersion < VN_MIN_RENDERER_VERSION) {
1219 if (VN_DEBUG(INIT)) {
1220 vn_log(instance, "%s has unsupported renderer device version %d.%d",
1221 props.deviceName, VK_VERSION_MAJOR(props.apiVersion),
1222 VK_VERSION_MINOR(props.apiVersion));
1223 }
1224 return VK_ERROR_INITIALIZATION_FAILED;
1225 }
1226
1227 /* device version for internal use is capped */
1228 physical_dev->renderer_version =
1229 MIN3(props.apiVersion, instance->renderer_api_version,
1230 instance->renderer->info.vk_xml_version);
1231
1232 return VK_SUCCESS;
1233 }
1234
1235 static void
vn_image_format_cache_debug_dump(struct vn_image_format_properties_cache * cache)1236 vn_image_format_cache_debug_dump(
1237 struct vn_image_format_properties_cache *cache)
1238 {
1239 vn_log(NULL, " hit %u\n", cache->debug.cache_hit_count);
1240 vn_log(NULL, " miss %u\n", cache->debug.cache_miss_count);
1241 vn_log(NULL, " skip %u\n", cache->debug.cache_skip_count);
1242 }
1243
1244 static void
vn_image_format_cache_init(struct vn_physical_device * physical_dev)1245 vn_image_format_cache_init(struct vn_physical_device *physical_dev)
1246 {
1247 struct vn_image_format_properties_cache *cache =
1248 &physical_dev->image_format_cache;
1249
1250 if (VN_PERF(NO_ASYNC_IMAGE_FORMAT))
1251 return;
1252
1253 cache->ht = _mesa_hash_table_create(NULL, vn_cache_key_hash_function,
1254 vn_cache_key_equal_function);
1255 if (!cache->ht)
1256 return;
1257
1258 simple_mtx_init(&cache->mutex, mtx_plain);
1259 list_inithead(&cache->lru);
1260 }
1261
1262 static void
vn_image_format_cache_fini(struct vn_physical_device * physical_dev)1263 vn_image_format_cache_fini(struct vn_physical_device *physical_dev)
1264 {
1265 const VkAllocationCallbacks *alloc =
1266 &physical_dev->base.base.instance->alloc;
1267 struct vn_image_format_properties_cache *cache =
1268 &physical_dev->image_format_cache;
1269
1270 if (!cache->ht)
1271 return;
1272
1273 hash_table_foreach(cache->ht, hash_entry) {
1274 struct vn_image_format_cache_entry *cache_entry = hash_entry->data;
1275 list_del(&cache_entry->head);
1276 vk_free(alloc, cache_entry);
1277 }
1278 assert(list_is_empty(&cache->lru));
1279
1280 _mesa_hash_table_destroy(cache->ht, NULL);
1281
1282 simple_mtx_destroy(&cache->mutex);
1283
1284 if (VN_DEBUG(CACHE))
1285 vn_image_format_cache_debug_dump(cache);
1286 }
1287
1288 static void
vn_physical_device_disable_sparse_binding(struct vn_physical_device * physical_dev)1289 vn_physical_device_disable_sparse_binding(
1290 struct vn_physical_device *physical_dev)
1291 {
1292 /* To support sparse binding with feedback, we require sparse binding queue
1293 * families to also support submiting feedback commands. Any queue
1294 * families that exclusively support sparse binding are filtered out. If a
1295 * device only supports sparse binding with exclusive queue families that
1296 * get filtered out then disable the feature.
1297 */
1298
1299 struct vk_features *feats = &physical_dev->base.base.supported_features;
1300 VN_SET_CORE_VALUE(feats, sparseBinding, false);
1301 VN_SET_CORE_VALUE(feats, sparseResidencyBuffer, false);
1302 VN_SET_CORE_VALUE(feats, sparseResidencyImage2D, false);
1303 VN_SET_CORE_VALUE(feats, sparseResidencyImage3D, false);
1304 VN_SET_CORE_VALUE(feats, sparseResidency2Samples, false);
1305 VN_SET_CORE_VALUE(feats, sparseResidency4Samples, false);
1306 VN_SET_CORE_VALUE(feats, sparseResidency8Samples, false);
1307 VN_SET_CORE_VALUE(feats, sparseResidency16Samples, false);
1308 VN_SET_CORE_VALUE(feats, sparseResidencyAliased, false);
1309
1310 struct vk_properties *props = &physical_dev->base.base.properties;
1311 VN_SET_CORE_VALUE(props, sparseAddressSpaceSize, 0);
1312 VN_SET_CORE_VALUE(props, sparseResidencyStandard2DBlockShape, 0);
1313 VN_SET_CORE_VALUE(props, sparseResidencyStandard2DMultisampleBlockShape,
1314 0);
1315 VN_SET_CORE_VALUE(props, sparseResidencyStandard3DBlockShape, 0);
1316 VN_SET_CORE_VALUE(props, sparseResidencyAlignedMipSize, 0);
1317 VN_SET_CORE_VALUE(props, sparseResidencyNonResidentStrict, 0);
1318 }
1319
1320 static VkResult
vn_physical_device_init(struct vn_physical_device * physical_dev)1321 vn_physical_device_init(struct vn_physical_device *physical_dev)
1322 {
1323 struct vn_instance *instance = physical_dev->instance;
1324 const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
1325 VkResult result;
1326
1327 result = vn_physical_device_init_renderer_extensions(physical_dev);
1328 if (result != VK_SUCCESS)
1329 return result;
1330
1331 vn_physical_device_init_external_memory(physical_dev);
1332 vn_physical_device_init_external_fence_handles(physical_dev);
1333 vn_physical_device_init_external_semaphore_handles(physical_dev);
1334
1335 vn_physical_device_init_supported_extensions(physical_dev);
1336
1337 result = vn_physical_device_init_queue_family_properties(physical_dev);
1338 if (result != VK_SUCCESS)
1339 goto fail;
1340
1341 /* TODO query all caps with minimal round trips */
1342 vn_physical_device_init_features(physical_dev);
1343 vn_physical_device_init_properties(physical_dev);
1344 if (physical_dev->sparse_binding_disabled)
1345 vn_physical_device_disable_sparse_binding(physical_dev);
1346
1347 vn_physical_device_init_memory_properties(physical_dev);
1348
1349 result = vn_wsi_init(physical_dev);
1350 if (result != VK_SUCCESS)
1351 goto fail;
1352
1353 simple_mtx_init(&physical_dev->format_update_mutex, mtx_plain);
1354 util_sparse_array_init(&physical_dev->format_properties,
1355 sizeof(struct vn_format_properties_entry), 64);
1356
1357 vn_image_format_cache_init(physical_dev);
1358
1359 return VK_SUCCESS;
1360
1361 fail:
1362 vk_free(alloc, physical_dev->extension_spec_versions);
1363 vk_free(alloc, physical_dev->queue_family_properties);
1364 return result;
1365 }
1366
1367 void
vn_physical_device_fini(struct vn_physical_device * physical_dev)1368 vn_physical_device_fini(struct vn_physical_device *physical_dev)
1369 {
1370 struct vn_instance *instance = physical_dev->instance;
1371 const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
1372
1373 vn_image_format_cache_fini(physical_dev);
1374
1375 simple_mtx_destroy(&physical_dev->format_update_mutex);
1376 util_sparse_array_finish(&physical_dev->format_properties);
1377
1378 vn_wsi_fini(physical_dev);
1379 vk_free(alloc, physical_dev->extension_spec_versions);
1380 vk_free(alloc, physical_dev->queue_family_properties);
1381
1382 vn_physical_device_base_fini(&physical_dev->base);
1383 }
1384
1385 static struct vn_physical_device *
find_physical_device(struct vn_physical_device * physical_devs,uint32_t count,vn_object_id id)1386 find_physical_device(struct vn_physical_device *physical_devs,
1387 uint32_t count,
1388 vn_object_id id)
1389 {
1390 for (uint32_t i = 0; i < count; i++) {
1391 if (physical_devs[i].base.id == id)
1392 return &physical_devs[i];
1393 }
1394 return NULL;
1395 }
1396
1397 static VkResult
vn_instance_enumerate_physical_device_groups_locked(struct vn_instance * instance,struct vn_physical_device * physical_devs,uint32_t physical_dev_count)1398 vn_instance_enumerate_physical_device_groups_locked(
1399 struct vn_instance *instance,
1400 struct vn_physical_device *physical_devs,
1401 uint32_t physical_dev_count)
1402 {
1403 VkInstance instance_handle = vn_instance_to_handle(instance);
1404 struct vn_ring *ring = instance->ring.ring;
1405 const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
1406 VkResult result;
1407
1408 uint32_t count;
1409 result = vn_call_vkEnumeratePhysicalDeviceGroups(ring, instance_handle,
1410 &count, NULL);
1411 if (result != VK_SUCCESS)
1412 return result;
1413
1414 VkPhysicalDeviceGroupProperties *groups =
1415 vk_alloc(alloc, sizeof(*groups) * count, VN_DEFAULT_ALIGN,
1416 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
1417 if (!groups)
1418 return VK_ERROR_OUT_OF_HOST_MEMORY;
1419
1420 /* VkPhysicalDeviceGroupProperties::physicalDevices is treated as an input
1421 * by the encoder. Each VkPhysicalDevice must point to a valid object.
1422 * Each object must have id 0 as well, which is interpreted as a query by
1423 * the renderer.
1424 */
1425 struct vn_physical_device_base *temp_objs =
1426 vk_zalloc(alloc, sizeof(*temp_objs) * VK_MAX_DEVICE_GROUP_SIZE * count,
1427 VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
1428 if (!temp_objs) {
1429 vk_free(alloc, groups);
1430 return VK_ERROR_OUT_OF_HOST_MEMORY;
1431 }
1432
1433 for (uint32_t i = 0; i < count; i++) {
1434 VkPhysicalDeviceGroupProperties *group = &groups[i];
1435 group->sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES;
1436 group->pNext = NULL;
1437 for (uint32_t j = 0; j < VK_MAX_DEVICE_GROUP_SIZE; j++) {
1438 struct vn_physical_device_base *temp_obj =
1439 &temp_objs[VK_MAX_DEVICE_GROUP_SIZE * i + j];
1440 temp_obj->base.base.type = VK_OBJECT_TYPE_PHYSICAL_DEVICE;
1441 group->physicalDevices[j] = (VkPhysicalDevice)temp_obj;
1442 }
1443 }
1444
1445 result = vn_call_vkEnumeratePhysicalDeviceGroups(ring, instance_handle,
1446 &count, groups);
1447 if (result != VK_SUCCESS) {
1448 vk_free(alloc, groups);
1449 vk_free(alloc, temp_objs);
1450 return result;
1451 }
1452
1453 /* fix VkPhysicalDeviceGroupProperties::physicalDevices to point to
1454 * physical_devs and discard unsupported ones
1455 */
1456 uint32_t supported_count = 0;
1457 for (uint32_t i = 0; i < count; i++) {
1458 VkPhysicalDeviceGroupProperties *group = &groups[i];
1459
1460 uint32_t group_physical_dev_count = 0;
1461 for (uint32_t j = 0; j < group->physicalDeviceCount; j++) {
1462 struct vn_physical_device_base *temp_obj =
1463 (struct vn_physical_device_base *)group->physicalDevices[j];
1464 struct vn_physical_device *physical_dev = find_physical_device(
1465 physical_devs, physical_dev_count, temp_obj->id);
1466 if (!physical_dev)
1467 continue;
1468
1469 group->physicalDevices[group_physical_dev_count++] =
1470 vn_physical_device_to_handle(physical_dev);
1471 }
1472
1473 group->physicalDeviceCount = group_physical_dev_count;
1474 if (!group->physicalDeviceCount)
1475 continue;
1476
1477 if (supported_count < i)
1478 groups[supported_count] = *group;
1479 supported_count++;
1480 }
1481
1482 count = supported_count;
1483 assert(count);
1484
1485 vk_free(alloc, temp_objs);
1486
1487 instance->physical_device.groups = groups;
1488 instance->physical_device.group_count = count;
1489
1490 return VK_SUCCESS;
1491 }
1492
1493 static VkResult
enumerate_physical_devices(struct vn_instance * instance,struct vn_physical_device ** out_physical_devs,uint32_t * out_count)1494 enumerate_physical_devices(struct vn_instance *instance,
1495 struct vn_physical_device **out_physical_devs,
1496 uint32_t *out_count)
1497 {
1498 const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
1499 struct vn_ring *ring = instance->ring.ring;
1500 struct vn_physical_device *physical_devs = NULL;
1501 VkResult result;
1502
1503 uint32_t count = 0;
1504 result = vn_call_vkEnumeratePhysicalDevices(
1505 ring, vn_instance_to_handle(instance), &count, NULL);
1506 if (result != VK_SUCCESS || !count)
1507 return result;
1508
1509 physical_devs =
1510 vk_zalloc(alloc, sizeof(*physical_devs) * count, VN_DEFAULT_ALIGN,
1511 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
1512 if (!physical_devs)
1513 return VK_ERROR_OUT_OF_HOST_MEMORY;
1514
1515 STACK_ARRAY(VkPhysicalDevice, handles, count);
1516
1517 for (uint32_t i = 0; i < count; i++) {
1518 struct vn_physical_device *physical_dev = &physical_devs[i];
1519
1520 struct vk_physical_device_dispatch_table dispatch_table;
1521 vk_physical_device_dispatch_table_from_entrypoints(
1522 &dispatch_table, &vn_physical_device_entrypoints, true);
1523 vk_physical_device_dispatch_table_from_entrypoints(
1524 &dispatch_table, &wsi_physical_device_entrypoints, false);
1525 result = vn_physical_device_base_init(
1526 &physical_dev->base, &instance->base, NULL, &dispatch_table);
1527 if (result != VK_SUCCESS) {
1528 count = i;
1529 goto fail;
1530 }
1531
1532 physical_dev->instance = instance;
1533
1534 handles[i] = vn_physical_device_to_handle(physical_dev);
1535 }
1536
1537 result = vn_call_vkEnumeratePhysicalDevices(
1538 ring, vn_instance_to_handle(instance), &count, handles);
1539 if (result != VK_SUCCESS)
1540 goto fail;
1541
1542 STACK_ARRAY_FINISH(handles);
1543 *out_physical_devs = physical_devs;
1544 *out_count = count;
1545
1546 return VK_SUCCESS;
1547
1548 fail:
1549 for (uint32_t i = 0; i < count; i++)
1550 vn_physical_device_base_fini(&physical_devs[i].base);
1551 vk_free(alloc, physical_devs);
1552 STACK_ARRAY_FINISH(handles);
1553 return result;
1554 }
1555
1556 static uint32_t
filter_physical_devices(struct vn_physical_device * physical_devs,uint32_t count)1557 filter_physical_devices(struct vn_physical_device *physical_devs,
1558 uint32_t count)
1559 {
1560 uint32_t supported_count = 0;
1561 for (uint32_t i = 0; i < count; i++) {
1562 struct vn_physical_device *physical_dev = &physical_devs[i];
1563
1564 /* init renderer version and discard unsupported devices */
1565 VkResult result =
1566 vn_physical_device_init_renderer_version(physical_dev);
1567 if (result != VK_SUCCESS) {
1568 vn_physical_device_base_fini(&physical_dev->base);
1569 continue;
1570 }
1571
1572 if (supported_count < i)
1573 physical_devs[supported_count] = *physical_dev;
1574 supported_count++;
1575 }
1576
1577 return supported_count;
1578 }
1579
1580 static VkResult
vn_instance_enumerate_physical_devices_and_groups(struct vn_instance * instance)1581 vn_instance_enumerate_physical_devices_and_groups(struct vn_instance *instance)
1582 {
1583 const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
1584 struct vn_physical_device *physical_devs = NULL;
1585 uint32_t count = 0;
1586 VkResult result = VK_SUCCESS;
1587
1588 mtx_lock(&instance->physical_device.mutex);
1589
1590 if (instance->physical_device.initialized)
1591 goto unlock;
1592 instance->physical_device.initialized = true;
1593
1594 result = enumerate_physical_devices(instance, &physical_devs, &count);
1595 if (result != VK_SUCCESS)
1596 goto unlock;
1597
1598 count = filter_physical_devices(physical_devs, count);
1599 if (!count) {
1600 vk_free(alloc, physical_devs);
1601 goto unlock;
1602 }
1603
1604 /* fully initialize physical devices */
1605 for (uint32_t i = 0; i < count; i++) {
1606 struct vn_physical_device *physical_dev = &physical_devs[i];
1607
1608 result = vn_physical_device_init(physical_dev);
1609 if (result != VK_SUCCESS) {
1610 for (uint32_t j = 0; j < i; j++)
1611 vn_physical_device_fini(&physical_devs[j]);
1612 for (uint32_t j = i; j < count; j++)
1613 vn_physical_device_base_fini(&physical_devs[j].base);
1614 vk_free(alloc, physical_devs);
1615 goto unlock;
1616 }
1617 }
1618
1619 result = vn_instance_enumerate_physical_device_groups_locked(
1620 instance, physical_devs, count);
1621 if (result != VK_SUCCESS) {
1622 for (uint32_t i = 0; i < count; i++)
1623 vn_physical_device_fini(&physical_devs[i]);
1624 vk_free(alloc, physical_devs);
1625 goto unlock;
1626 }
1627
1628 instance->physical_device.devices = physical_devs;
1629 instance->physical_device.device_count = count;
1630
1631 unlock:
1632 mtx_unlock(&instance->physical_device.mutex);
1633 return result;
1634 }
1635
1636 /* physical device commands */
1637
1638 VkResult
vn_EnumeratePhysicalDevices(VkInstance _instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)1639 vn_EnumeratePhysicalDevices(VkInstance _instance,
1640 uint32_t *pPhysicalDeviceCount,
1641 VkPhysicalDevice *pPhysicalDevices)
1642 {
1643 struct vn_instance *instance = vn_instance_from_handle(_instance);
1644
1645 VkResult result =
1646 vn_instance_enumerate_physical_devices_and_groups(instance);
1647 if (result != VK_SUCCESS)
1648 return vn_error(instance, result);
1649
1650 VK_OUTARRAY_MAKE_TYPED(VkPhysicalDevice, out, pPhysicalDevices,
1651 pPhysicalDeviceCount);
1652 for (uint32_t i = 0; i < instance->physical_device.device_count; i++) {
1653 vk_outarray_append_typed(VkPhysicalDevice, &out, physical_dev) {
1654 *physical_dev = vn_physical_device_to_handle(
1655 &instance->physical_device.devices[i]);
1656 }
1657 }
1658
1659 return vk_outarray_status(&out);
1660 }
1661
1662 VkResult
vn_EnumeratePhysicalDeviceGroups(VkInstance _instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties)1663 vn_EnumeratePhysicalDeviceGroups(
1664 VkInstance _instance,
1665 uint32_t *pPhysicalDeviceGroupCount,
1666 VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
1667 {
1668 struct vn_instance *instance = vn_instance_from_handle(_instance);
1669
1670 VkResult result =
1671 vn_instance_enumerate_physical_devices_and_groups(instance);
1672 if (result != VK_SUCCESS)
1673 return vn_error(instance, result);
1674
1675 VK_OUTARRAY_MAKE_TYPED(VkPhysicalDeviceGroupProperties, out,
1676 pPhysicalDeviceGroupProperties,
1677 pPhysicalDeviceGroupCount);
1678 for (uint32_t i = 0; i < instance->physical_device.group_count; i++) {
1679 vk_outarray_append_typed(VkPhysicalDeviceGroupProperties, &out, props) {
1680 *props = instance->physical_device.groups[i];
1681 }
1682 }
1683
1684 return vk_outarray_status(&out);
1685 }
1686
1687 VkResult
vn_EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1688 vn_EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
1689 const char *pLayerName,
1690 uint32_t *pPropertyCount,
1691 VkExtensionProperties *pProperties)
1692 {
1693 struct vn_physical_device *physical_dev =
1694 vn_physical_device_from_handle(physicalDevice);
1695
1696 if (pLayerName)
1697 return vn_error(physical_dev->instance, VK_ERROR_LAYER_NOT_PRESENT);
1698
1699 VK_OUTARRAY_MAKE_TYPED(VkExtensionProperties, out, pProperties,
1700 pPropertyCount);
1701 for (uint32_t i = 0; i < VK_DEVICE_EXTENSION_COUNT; i++) {
1702 if (physical_dev->base.base.supported_extensions.extensions[i]) {
1703 vk_outarray_append_typed(VkExtensionProperties, &out, prop) {
1704 *prop = vk_device_extensions[i];
1705 prop->specVersion = physical_dev->extension_spec_versions[i];
1706 }
1707 }
1708 }
1709
1710 return vk_outarray_status(&out);
1711 }
1712
1713 VkResult
vn_EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkLayerProperties * pProperties)1714 vn_EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,
1715 uint32_t *pPropertyCount,
1716 VkLayerProperties *pProperties)
1717 {
1718 *pPropertyCount = 0;
1719 return VK_SUCCESS;
1720 }
1721
1722 static struct vn_format_properties_entry *
vn_physical_device_get_format_properties(struct vn_physical_device * physical_dev,VkFormat format)1723 vn_physical_device_get_format_properties(
1724 struct vn_physical_device *physical_dev, VkFormat format)
1725 {
1726 return util_sparse_array_get(&physical_dev->format_properties, format);
1727 }
1728
1729 static void
vn_physical_device_add_format_properties(struct vn_physical_device * physical_dev,struct vn_format_properties_entry * entry,const VkFormatProperties * props,const VkFormatProperties3 * props3)1730 vn_physical_device_add_format_properties(
1731 struct vn_physical_device *physical_dev,
1732 struct vn_format_properties_entry *entry,
1733 const VkFormatProperties *props,
1734 const VkFormatProperties3 *props3)
1735 {
1736 simple_mtx_lock(&physical_dev->format_update_mutex);
1737 if (!entry->valid) {
1738 entry->properties = *props;
1739 entry->valid = true;
1740 }
1741
1742 if (props3 && !entry->props3_valid) {
1743 entry->properties3 = *props3;
1744 entry->props3_valid = true;
1745 }
1746
1747 simple_mtx_unlock(&physical_dev->format_update_mutex);
1748 }
1749
1750 void
vn_GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2 * pQueueFamilyProperties)1751 vn_GetPhysicalDeviceQueueFamilyProperties2(
1752 VkPhysicalDevice physicalDevice,
1753 uint32_t *pQueueFamilyPropertyCount,
1754 VkQueueFamilyProperties2 *pQueueFamilyProperties)
1755 {
1756 struct vn_physical_device *physical_dev =
1757 vn_physical_device_from_handle(physicalDevice);
1758
1759 VK_OUTARRAY_MAKE_TYPED(VkQueueFamilyProperties2, out,
1760 pQueueFamilyProperties, pQueueFamilyPropertyCount);
1761 for (uint32_t i = 0; i < physical_dev->queue_family_count; i++) {
1762 vk_outarray_append_typed(VkQueueFamilyProperties2, &out, props) {
1763 *props = physical_dev->queue_family_properties[i];
1764 }
1765 }
1766 }
1767
1768 void
vn_GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)1769 vn_GetPhysicalDeviceMemoryProperties2(
1770 VkPhysicalDevice physicalDevice,
1771 VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
1772 {
1773 struct vn_physical_device *physical_dev =
1774 vn_physical_device_from_handle(physicalDevice);
1775 struct vn_ring *ring = physical_dev->instance->ring.ring;
1776 VkPhysicalDeviceMemoryBudgetPropertiesEXT *memory_budget = NULL;
1777
1778 /* Don't waste time searching for unsupported structs. */
1779 if (physical_dev->base.base.supported_extensions.EXT_memory_budget) {
1780 memory_budget =
1781 vk_find_struct(pMemoryProperties->pNext,
1782 PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT);
1783 }
1784
1785 /* When the app queries invariant memory properties, we return a cached
1786 * copy. For dynamic properties, we must query the server.
1787 */
1788 if (memory_budget) {
1789 vn_call_vkGetPhysicalDeviceMemoryProperties2(ring, physicalDevice,
1790 pMemoryProperties);
1791 }
1792
1793 /* Even when we query the server for memory properties, we must still
1794 * overwrite the invariant memory properties returned from the server with
1795 * our cached version. Our cached version may differ from the server's
1796 * version due to workarounds.
1797 */
1798 pMemoryProperties->memoryProperties = physical_dev->memory_properties;
1799 }
1800
1801 void
vn_GetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)1802 vn_GetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,
1803 VkFormat format,
1804 VkFormatProperties2 *pFormatProperties)
1805 {
1806 struct vn_physical_device *physical_dev =
1807 vn_physical_device_from_handle(physicalDevice);
1808 struct vn_ring *ring = physical_dev->instance->ring.ring;
1809
1810 /* VkFormatProperties3 is cached if its the only struct in pNext */
1811 VkFormatProperties3 *props3 = NULL;
1812 if (pFormatProperties->pNext) {
1813 const VkBaseOutStructure *base = pFormatProperties->pNext;
1814 if (base->sType == VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3 &&
1815 base->pNext == NULL) {
1816 props3 = (VkFormatProperties3 *)base;
1817 }
1818 }
1819
1820 struct vn_format_properties_entry *entry = NULL;
1821 if (!pFormatProperties->pNext || props3) {
1822 entry = vn_physical_device_get_format_properties(physical_dev, format);
1823 if (entry->valid) {
1824 const bool has_valid_props3 = props3 && entry->props3_valid;
1825 if (has_valid_props3)
1826 *props3 = entry->properties3;
1827
1828 /* Make the host call if our cache doesn't have props3 but the app
1829 * now requests it.
1830 */
1831 if (!props3 || has_valid_props3) {
1832 pFormatProperties->formatProperties = entry->properties;
1833 pFormatProperties->pNext = props3;
1834 return;
1835 }
1836 }
1837 }
1838
1839 vn_call_vkGetPhysicalDeviceFormatProperties2(ring, physicalDevice, format,
1840 pFormatProperties);
1841
1842 if (entry) {
1843 vn_physical_device_add_format_properties(
1844 physical_dev, entry, &pFormatProperties->formatProperties, props3);
1845 }
1846 }
1847
1848 struct vn_physical_device_image_format_info {
1849 VkPhysicalDeviceImageFormatInfo2 format;
1850 VkPhysicalDeviceExternalImageFormatInfo external;
1851 VkImageFormatListCreateInfo list;
1852 VkImageStencilUsageCreateInfo stencil_usage;
1853 VkPhysicalDeviceImageDrmFormatModifierInfoEXT modifier;
1854 };
1855
1856 static const VkPhysicalDeviceImageFormatInfo2 *
vn_physical_device_fix_image_format_info(const VkPhysicalDeviceImageFormatInfo2 * info,const VkExternalMemoryHandleTypeFlagBits renderer_handle_type,struct vn_physical_device_image_format_info * local_info)1857 vn_physical_device_fix_image_format_info(
1858 const VkPhysicalDeviceImageFormatInfo2 *info,
1859 const VkExternalMemoryHandleTypeFlagBits renderer_handle_type,
1860 struct vn_physical_device_image_format_info *local_info)
1861 {
1862 local_info->format = *info;
1863 VkBaseOutStructure *dst = (void *)&local_info->format;
1864
1865 bool is_ahb = false;
1866 bool has_format_list = false;
1867 /* we should generate deep copy functions... */
1868 vk_foreach_struct_const(src, info->pNext) {
1869 void *pnext = NULL;
1870 switch (src->sType) {
1871 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO:
1872 memcpy(&local_info->external, src, sizeof(local_info->external));
1873 is_ahb =
1874 local_info->external.handleType ==
1875 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1876 local_info->external.handleType = renderer_handle_type;
1877 pnext = &local_info->external;
1878 break;
1879 case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
1880 has_format_list = true;
1881 memcpy(&local_info->list, src, sizeof(local_info->list));
1882 pnext = &local_info->list;
1883 break;
1884 case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
1885 memcpy(&local_info->stencil_usage, src,
1886 sizeof(local_info->stencil_usage));
1887 pnext = &local_info->stencil_usage;
1888 break;
1889 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT:
1890 memcpy(&local_info->modifier, src, sizeof(local_info->modifier));
1891 pnext = &local_info->modifier;
1892 break;
1893 default:
1894 break;
1895 }
1896
1897 if (pnext) {
1898 dst->pNext = pnext;
1899 dst = pnext;
1900 }
1901 }
1902
1903 if (is_ahb) {
1904 assert(local_info->format.tiling !=
1905 VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT);
1906 local_info->format.tiling = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT;
1907 if (!vn_android_get_drm_format_modifier_info(&local_info->format,
1908 &local_info->modifier))
1909 return NULL;
1910
1911 dst->pNext = (void *)&local_info->modifier;
1912 dst = dst->pNext;
1913
1914 if ((info->flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT) &&
1915 (!has_format_list || !local_info->list.viewFormatCount)) {
1916 /* 12.3. Images
1917 *
1918 * If tiling is VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT and flags
1919 * contains VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, then the pNext chain
1920 * must include a VkImageFormatListCreateInfo structure with non-zero
1921 * viewFormatCount.
1922 */
1923 VkImageFormatListCreateInfo *list = &local_info->list;
1924 uint32_t vcount = 0;
1925 const VkFormat *vformats =
1926 vn_android_format_to_view_formats(info->format, &vcount);
1927 if (!vformats) {
1928 /* local_info persists through the image format query call */
1929 vformats = &local_info->format.format;
1930 vcount = 1;
1931 }
1932
1933 list->sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO;
1934 list->viewFormatCount = vcount;
1935 list->pViewFormats = vformats;
1936
1937 if (!has_format_list) {
1938 dst->pNext = (void *)list;
1939 dst = dst->pNext;
1940 }
1941 }
1942 }
1943
1944 dst->pNext = NULL;
1945
1946 return &local_info->format;
1947 }
1948
1949 static uint32_t
vn_modifier_plane_count(struct vn_physical_device * physical_dev,VkFormat format,uint64_t modifier)1950 vn_modifier_plane_count(struct vn_physical_device *physical_dev,
1951 VkFormat format,
1952 uint64_t modifier)
1953 {
1954 VkPhysicalDevice physical_dev_handle =
1955 vn_physical_device_to_handle(physical_dev);
1956
1957 VkDrmFormatModifierPropertiesListEXT modifier_list = {
1958 .sType = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
1959 .pDrmFormatModifierProperties = NULL,
1960 };
1961 VkFormatProperties2 format_props = {
1962 .sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
1963 .pNext = &modifier_list,
1964 };
1965 vn_GetPhysicalDeviceFormatProperties2(physical_dev_handle, format,
1966 &format_props);
1967
1968 STACK_ARRAY(VkDrmFormatModifierPropertiesEXT, modifier_props,
1969 modifier_list.drmFormatModifierCount);
1970 if (!modifier_props)
1971 return 0;
1972 modifier_list.pDrmFormatModifierProperties = modifier_props;
1973
1974 vn_GetPhysicalDeviceFormatProperties2(physical_dev_handle, format,
1975 &format_props);
1976
1977 uint32_t plane_count = 0;
1978 for (uint32_t i = 0; i < modifier_list.drmFormatModifierCount; i++) {
1979 const struct VkDrmFormatModifierPropertiesEXT *props =
1980 &modifier_list.pDrmFormatModifierProperties[i];
1981 if (modifier == props->drmFormatModifier) {
1982 plane_count = props->drmFormatModifierPlaneCount;
1983 break;
1984 }
1985 }
1986
1987 STACK_ARRAY_FINISH(modifier_props);
1988 return plane_count;
1989 }
1990
1991 static bool
vn_image_get_image_format_key(struct vn_physical_device * physical_dev,const VkPhysicalDeviceImageFormatInfo2 * format_info,const VkImageFormatProperties2 * format_props,uint8_t * key)1992 vn_image_get_image_format_key(
1993 struct vn_physical_device *physical_dev,
1994 const VkPhysicalDeviceImageFormatInfo2 *format_info,
1995 const VkImageFormatProperties2 *format_props,
1996 uint8_t *key)
1997 {
1998 struct mesa_sha1 sha1_ctx;
1999
2000 if (!physical_dev->image_format_cache.ht)
2001 return false;
2002
2003 _mesa_sha1_init(&sha1_ctx);
2004
2005 /* VUID-VkPhysicalDeviceImageFormatInfo2-pNext-pNext
2006 * Each pNext member of any structure (including this one) in the pNext
2007 * chain must be either NULL or a pointer to a valid instance of
2008 * VkImageCompressionControlEXT, VkImageFormatListCreateInfo,
2009 * VkImageStencilUsageCreateInfo, VkOpticalFlowImageFormatInfoNV,
2010 * VkPhysicalDeviceExternalImageFormatInfo,
2011 * VkPhysicalDeviceImageDrmFormatModifierInfoEXT,
2012 * VkPhysicalDeviceImageViewImageFormatInfoEXT, or VkVideoProfileListInfoKHR
2013 *
2014 * Exclude VkOpticalFlowImageFormatInfoNV and VkVideoProfileListInfoKHR
2015 */
2016 if (format_info->pNext) {
2017 vk_foreach_struct_const(src, format_info->pNext) {
2018 switch (src->sType) {
2019 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT: {
2020 struct VkImageCompressionControlEXT *compression_control =
2021 (struct VkImageCompressionControlEXT *)src;
2022 _mesa_sha1_update(&sha1_ctx, &compression_control->flags,
2023 sizeof(VkImageCompressionFlagsEXT));
2024 _mesa_sha1_update(
2025 &sha1_ctx, compression_control->pFixedRateFlags,
2026 sizeof(uint32_t) *
2027 compression_control->compressionControlPlaneCount);
2028 break;
2029 }
2030 case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO: {
2031 struct VkImageFormatListCreateInfo *format_list =
2032 (struct VkImageFormatListCreateInfo *)src;
2033 _mesa_sha1_update(
2034 &sha1_ctx, format_list->pViewFormats,
2035 sizeof(VkFormat) * format_list->viewFormatCount);
2036
2037 break;
2038 }
2039 case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO: {
2040 struct VkImageStencilUsageCreateInfo *stencil_usage =
2041 (struct VkImageStencilUsageCreateInfo *)src;
2042 _mesa_sha1_update(&sha1_ctx, &stencil_usage->stencilUsage,
2043 sizeof(VkImageUsageFlags));
2044 break;
2045 }
2046 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO: {
2047 struct VkPhysicalDeviceExternalImageFormatInfo *ext_image =
2048 (struct VkPhysicalDeviceExternalImageFormatInfo *)src;
2049 _mesa_sha1_update(&sha1_ctx, &ext_image->handleType,
2050 sizeof(VkExternalMemoryHandleTypeFlagBits));
2051 break;
2052 }
2053 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT: {
2054 struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT
2055 *modifier_info =
2056 (struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT *)src;
2057 _mesa_sha1_update(&sha1_ctx, &modifier_info->drmFormatModifier,
2058 sizeof(uint64_t));
2059 if (modifier_info->sharingMode == VK_SHARING_MODE_CONCURRENT) {
2060 _mesa_sha1_update(
2061 &sha1_ctx, modifier_info->pQueueFamilyIndices,
2062 sizeof(uint32_t) * modifier_info->queueFamilyIndexCount);
2063 }
2064 break;
2065 }
2066 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT: {
2067 struct VkPhysicalDeviceImageViewImageFormatInfoEXT *view_image =
2068 (struct VkPhysicalDeviceImageViewImageFormatInfoEXT *)src;
2069 _mesa_sha1_update(&sha1_ctx, &view_image->imageViewType,
2070 sizeof(VkImageViewType));
2071 break;
2072 }
2073 default:
2074 physical_dev->image_format_cache.debug.cache_skip_count++;
2075 return false;
2076 }
2077 }
2078 }
2079
2080 /* Hash pImageFormatProperties pNext as well since some of them are
2081 * optional in that they can be attached without a corresponding pNext
2082 * in pImageFormatInfo.
2083 *
2084 * VUID-VkImageFormatProperties2-pNext-pNext
2085 * Each pNext member of any structure (including this one) in the pNext
2086 * chain must be either NULL or a pointer to a valid instance of
2087 * VkAndroidHardwareBufferUsageANDROID, VkExternalImageFormatProperties,
2088 * VkFilterCubicImageViewImageFormatPropertiesEXT,
2089 * VkHostImageCopyDevicePerformanceQueryEXT,
2090 * VkImageCompressionPropertiesEXT,
2091 * VkSamplerYcbcrConversionImageFormatProperties, or
2092 * VkTextureLODGatherFormatPropertiesAMD
2093 *
2094 * VkAndroidHardwareBufferUsageANDROID is handled outside of the cache.
2095 * VkFilterCubicImageViewImageFormatPropertiesEXT,
2096 * VkHostImageCopyDevicePerformanceQueryEXT,
2097 * VkHostImageCopyDevicePerformanceQueryEXT,
2098 * VkTextureLODGatherFormatPropertiesAMD are not supported
2099 */
2100 if (format_props->pNext) {
2101 vk_foreach_struct_const(src, format_props->pNext) {
2102 switch (src->sType) {
2103 case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES:
2104 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT:
2105 case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES:
2106 _mesa_sha1_update(&sha1_ctx, &src->sType,
2107 sizeof(VkStructureType));
2108 break;
2109 default:
2110 physical_dev->image_format_cache.debug.cache_skip_count++;
2111 return false;
2112 }
2113 }
2114 }
2115
2116 static const size_t format_info_2_hash_block_size =
2117 sizeof(VkFormat) + sizeof(VkImageType) + sizeof(VkImageTiling) +
2118 sizeof(VkImageUsageFlags) + sizeof(VkImageCreateFlags);
2119
2120 _mesa_sha1_update(&sha1_ctx, &format_info->format,
2121 format_info_2_hash_block_size);
2122 _mesa_sha1_final(&sha1_ctx, key);
2123
2124 return true;
2125 }
2126
2127 static bool
vn_image_init_format_from_cache(struct vn_physical_device * physical_dev,struct VkImageFormatProperties2 * pImageFormatProperties,VkResult * cached_result,uint8_t * key)2128 vn_image_init_format_from_cache(
2129 struct vn_physical_device *physical_dev,
2130 struct VkImageFormatProperties2 *pImageFormatProperties,
2131 VkResult *cached_result,
2132 uint8_t *key)
2133 {
2134 struct vn_image_format_properties_cache *cache =
2135 &physical_dev->image_format_cache;
2136
2137 assert(cache->ht);
2138
2139 simple_mtx_lock(&cache->mutex);
2140 struct hash_entry *hash_entry = _mesa_hash_table_search(cache->ht, key);
2141 if (hash_entry) {
2142 struct vn_image_format_cache_entry *cache_entry = hash_entry->data;
2143
2144 /* Copy the properties even if the cached_result is not supported.
2145 * Per spec 1.3.275 "If the combination of parameters to
2146 * vkGetPhysicalDeviceImageFormatProperties2 is not supported by the
2147 * implementation for use in vkCreateImage, then all members of
2148 * imageFormatProperties will be filled with zero."
2149 */
2150 pImageFormatProperties->imageFormatProperties =
2151 cache_entry->properties.format.imageFormatProperties;
2152 *cached_result = cache_entry->properties.cached_result;
2153
2154 if (pImageFormatProperties->pNext) {
2155 vk_foreach_struct_const(src, pImageFormatProperties->pNext) {
2156 switch (src->sType) {
2157 case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES: {
2158 struct VkExternalImageFormatProperties *ext_image =
2159 (struct VkExternalImageFormatProperties *)src;
2160 ext_image->externalMemoryProperties =
2161 cache_entry->properties.ext_image.externalMemoryProperties;
2162 break;
2163 }
2164 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT: {
2165 struct VkImageCompressionPropertiesEXT *compression =
2166 (struct VkImageCompressionPropertiesEXT *)src;
2167 compression->imageCompressionFlags =
2168 cache_entry->properties.compression.imageCompressionFlags;
2169 compression->imageCompressionFixedRateFlags =
2170 cache_entry->properties.compression
2171 .imageCompressionFixedRateFlags;
2172 break;
2173 }
2174 case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES: {
2175 struct VkSamplerYcbcrConversionImageFormatProperties
2176 *ycbcr_conversion =
2177 (struct VkSamplerYcbcrConversionImageFormatProperties *)
2178 src;
2179 ycbcr_conversion->combinedImageSamplerDescriptorCount =
2180 cache_entry->properties.ycbcr_conversion
2181 .combinedImageSamplerDescriptorCount;
2182 break;
2183 }
2184 default:
2185 unreachable("unexpected format props pNext");
2186 }
2187 }
2188 }
2189
2190 list_move_to(&cache_entry->head, &cache->lru);
2191 p_atomic_inc(&cache->debug.cache_hit_count);
2192 } else {
2193 p_atomic_inc(&cache->debug.cache_miss_count);
2194 }
2195 simple_mtx_unlock(&cache->mutex);
2196
2197 return !!hash_entry;
2198 }
2199
2200 static void
vn_image_store_format_in_cache(struct vn_physical_device * physical_dev,uint8_t * key,struct VkImageFormatProperties2 * pImageFormatProperties,VkResult cached_result)2201 vn_image_store_format_in_cache(
2202 struct vn_physical_device *physical_dev,
2203 uint8_t *key,
2204 struct VkImageFormatProperties2 *pImageFormatProperties,
2205 VkResult cached_result)
2206 {
2207 const VkAllocationCallbacks *alloc =
2208 &physical_dev->base.base.instance->alloc;
2209 struct vn_image_format_properties_cache *cache =
2210 &physical_dev->image_format_cache;
2211 struct vn_image_format_cache_entry *cache_entry = NULL;
2212
2213 assert(cache->ht);
2214
2215 simple_mtx_lock(&cache->mutex);
2216
2217 /* Check if entry was added before lock */
2218 if (_mesa_hash_table_search(cache->ht, key)) {
2219 simple_mtx_unlock(&cache->mutex);
2220 return;
2221 }
2222
2223 if (_mesa_hash_table_num_entries(cache->ht) ==
2224 IMAGE_FORMAT_CACHE_MAX_ENTRIES) {
2225 /* Evict/use the last entry in the lru list for this new entry */
2226 cache_entry = list_last_entry(&cache->lru,
2227 struct vn_image_format_cache_entry, head);
2228
2229 _mesa_hash_table_remove_key(cache->ht, cache_entry->key);
2230 list_del(&cache_entry->head);
2231 } else {
2232 cache_entry = vk_zalloc(alloc, sizeof(*cache_entry), VN_DEFAULT_ALIGN,
2233 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2234 if (!cache_entry) {
2235 simple_mtx_unlock(&cache->mutex);
2236 return;
2237 }
2238 }
2239
2240 if (pImageFormatProperties->pNext) {
2241 vk_foreach_struct_const(src, pImageFormatProperties->pNext) {
2242 switch (src->sType) {
2243 case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES: {
2244 cache_entry->properties.ext_image =
2245 *((struct VkExternalImageFormatProperties *)src);
2246 break;
2247 }
2248 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT: {
2249 cache_entry->properties.compression =
2250 *((struct VkImageCompressionPropertiesEXT *)src);
2251 break;
2252 }
2253 case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES: {
2254 cache_entry->properties.ycbcr_conversion =
2255 *((struct VkSamplerYcbcrConversionImageFormatProperties *)src);
2256 break;
2257 }
2258 default:
2259 unreachable("unexpected format props pNext");
2260 }
2261 }
2262 }
2263
2264 cache_entry->properties.format = *pImageFormatProperties;
2265 cache_entry->properties.cached_result = cached_result;
2266
2267 memcpy(cache_entry->key, key, SHA1_DIGEST_LENGTH);
2268
2269 _mesa_hash_table_insert(cache->ht, cache_entry->key, cache_entry);
2270 list_add(&cache_entry->head, &cache->lru);
2271
2272 simple_mtx_unlock(&cache->mutex);
2273 }
2274
2275 VkResult
vn_GetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)2276 vn_GetPhysicalDeviceImageFormatProperties2(
2277 VkPhysicalDevice physicalDevice,
2278 const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
2279 VkImageFormatProperties2 *pImageFormatProperties)
2280 {
2281 struct vn_physical_device *physical_dev =
2282 vn_physical_device_from_handle(physicalDevice);
2283 struct vn_ring *ring = physical_dev->instance->ring.ring;
2284 const VkExternalMemoryHandleTypeFlagBits renderer_handle_type =
2285 physical_dev->external_memory.renderer_handle_type;
2286 const VkExternalMemoryHandleTypeFlags supported_handle_types =
2287 physical_dev->external_memory.supported_handle_types;
2288
2289 const struct wsi_image_create_info *wsi_info = vk_find_struct_const(
2290 pImageFormatInfo->pNext, WSI_IMAGE_CREATE_INFO_MESA);
2291 const VkPhysicalDeviceImageDrmFormatModifierInfoEXT *modifier_info =
2292 vk_find_struct_const(
2293 pImageFormatInfo->pNext,
2294 PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT);
2295
2296 /* force common wsi into choosing DRM_FORMAT_MOD_LINEAR or else fall back
2297 * to the legacy path, for which Venus also forces LINEAR for wsi images.
2298 */
2299 if (VN_PERF(NO_TILED_WSI_IMAGE)) {
2300 if (wsi_info && modifier_info &&
2301 modifier_info->drmFormatModifier != DRM_FORMAT_MOD_LINEAR) {
2302 if (VN_DEBUG(WSI)) {
2303 vn_log(physical_dev->instance,
2304 "rejecting non-linear wsi image format modifier %" PRIu64,
2305 modifier_info->drmFormatModifier);
2306 }
2307 return vn_error(physical_dev->instance,
2308 VK_ERROR_FORMAT_NOT_SUPPORTED);
2309 }
2310 }
2311
2312 /* Integration with Xwayland (using virgl-backed gbm) may only use
2313 * modifiers for which `memory_plane_count == format_plane_count` with the
2314 * distinction defined in the spec for VkDrmFormatModifierPropertiesEXT.
2315 *
2316 * The spec also states that:
2317 * If an image is non-linear, then the partition of the image’s memory
2318 * into memory planes is implementation-specific and may be unrelated to
2319 * the partition of the image’s content into format planes.
2320 *
2321 * A modifier like I915_FORMAT_MOD_Y_TILED_CCS with an extra CCS
2322 * metadata-only _memory_ plane is not supported by virgl. In general,
2323 * since the partition of format planes into memory planes (even when their
2324 * counts match) cannot be guarantably known, the safest option is to limit
2325 * both plane counts to 1 while virgl may be involved.
2326 */
2327 if (wsi_info && modifier_info &&
2328 !physical_dev->instance->enable_wsi_multi_plane_modifiers &&
2329 modifier_info->drmFormatModifier != DRM_FORMAT_MOD_LINEAR) {
2330 const uint32_t plane_count =
2331 vn_modifier_plane_count(physical_dev, pImageFormatInfo->format,
2332 modifier_info->drmFormatModifier);
2333 if (plane_count != 1) {
2334 if (VN_DEBUG(WSI)) {
2335 vn_log(physical_dev->instance,
2336 "rejecting multi-plane (%u) modifier %" PRIu64
2337 " for wsi image with format %u",
2338 plane_count, modifier_info->drmFormatModifier,
2339 pImageFormatInfo->format);
2340 }
2341 return vn_error(physical_dev->instance,
2342 VK_ERROR_FORMAT_NOT_SUPPORTED);
2343 }
2344 }
2345
2346 const VkPhysicalDeviceExternalImageFormatInfo *external_info =
2347 vk_find_struct_const(pImageFormatInfo->pNext,
2348 PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO);
2349 if (external_info && !external_info->handleType)
2350 external_info = NULL;
2351
2352 struct vn_physical_device_image_format_info local_info;
2353 if (external_info) {
2354 if (!(external_info->handleType & supported_handle_types)) {
2355 return vn_error(physical_dev->instance,
2356 VK_ERROR_FORMAT_NOT_SUPPORTED);
2357 }
2358
2359 /* Check the image tiling against the renderer handle type:
2360 * - No need to check for AHB since the tiling will either be forwarded
2361 * or overwritten based on the renderer external memory type.
2362 * - For opaque fd and dma_buf fd handle types, passthrough tiling when
2363 * the renderer external memory is dma_buf. Then we can avoid
2364 * reconstructing the structs to support drm format modifier tiling
2365 * like how we support AHB.
2366 */
2367 if (external_info->handleType !=
2368 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) {
2369 if (renderer_handle_type ==
2370 VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT &&
2371 pImageFormatInfo->tiling !=
2372 VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
2373 return vn_error(physical_dev->instance,
2374 VK_ERROR_FORMAT_NOT_SUPPORTED);
2375 }
2376 }
2377
2378 if (external_info->handleType != renderer_handle_type) {
2379 pImageFormatInfo = vn_physical_device_fix_image_format_info(
2380 pImageFormatInfo, renderer_handle_type, &local_info);
2381 if (!pImageFormatInfo) {
2382 return vn_error(physical_dev->instance,
2383 VK_ERROR_FORMAT_NOT_SUPPORTED);
2384 }
2385 }
2386 }
2387
2388 /* Since venus-protocol doesn't pass the wsi_image_create_info struct, we
2389 * must remove the ALIAS_BIT here and in vn_wsi_create_image().
2390 * ANV rejects the bit for external+nonlinear images that don't have WSI
2391 * info chained.
2392 */
2393 if (wsi_info && physical_dev->renderer_driver_id ==
2394 VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA) {
2395 if (pImageFormatInfo != &local_info.format) {
2396 local_info.format = *pImageFormatInfo;
2397 pImageFormatInfo = &local_info.format;
2398 }
2399 local_info.format.flags &= ~VK_IMAGE_CREATE_ALIAS_BIT;
2400 }
2401
2402 /* Check if image format props is in the cache. */
2403 uint8_t key[SHA1_DIGEST_LENGTH] = { 0 };
2404 const bool cacheable = vn_image_get_image_format_key(
2405 physical_dev, pImageFormatInfo, pImageFormatProperties, key);
2406
2407 VkResult result = VK_SUCCESS;
2408 if (!(cacheable &&
2409 vn_image_init_format_from_cache(physical_dev, pImageFormatProperties,
2410 &result, key))) {
2411 result = vn_call_vkGetPhysicalDeviceImageFormatProperties2(
2412 ring, physicalDevice, pImageFormatInfo, pImageFormatProperties);
2413
2414 /* If cacheable, cache successful and unsupported results. */
2415 if (cacheable &&
2416 (result == VK_SUCCESS || result == VK_ERROR_FORMAT_NOT_SUPPORTED ||
2417 result == VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR)) {
2418 vn_image_store_format_in_cache(physical_dev, key,
2419 pImageFormatProperties, result);
2420 }
2421 }
2422
2423 if (result != VK_SUCCESS || !external_info)
2424 return vn_result(physical_dev->instance, result);
2425
2426 if (external_info->handleType ==
2427 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) {
2428 VkAndroidHardwareBufferUsageANDROID *ahb_usage =
2429 vk_find_struct(pImageFormatProperties->pNext,
2430 ANDROID_HARDWARE_BUFFER_USAGE_ANDROID);
2431 if (ahb_usage) {
2432 ahb_usage->androidHardwareBufferUsage = vk_image_usage_to_ahb_usage(
2433 pImageFormatInfo->flags, pImageFormatInfo->usage);
2434 }
2435
2436 /* AHBs with mipmap usage will ignore this property */
2437 pImageFormatProperties->imageFormatProperties.maxMipLevels = 1;
2438 }
2439
2440 VkExternalImageFormatProperties *img_props = vk_find_struct(
2441 pImageFormatProperties->pNext, EXTERNAL_IMAGE_FORMAT_PROPERTIES);
2442 if (!img_props)
2443 return VK_SUCCESS;
2444
2445 VkExternalMemoryProperties *mem_props =
2446 &img_props->externalMemoryProperties;
2447
2448 if (renderer_handle_type ==
2449 VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT &&
2450 !physical_dev->instance->renderer->info.has_dma_buf_import) {
2451 mem_props->externalMemoryFeatures &=
2452 ~VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
2453 }
2454
2455 if (external_info->handleType ==
2456 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) {
2457 /* AHB backed image requires renderer to support import bit */
2458 if (!(mem_props->externalMemoryFeatures &
2459 VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT))
2460 return vn_error(physical_dev->instance,
2461 VK_ERROR_FORMAT_NOT_SUPPORTED);
2462
2463 mem_props->externalMemoryFeatures =
2464 VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT |
2465 VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT |
2466 VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
2467 mem_props->exportFromImportedHandleTypes =
2468 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2469 mem_props->compatibleHandleTypes =
2470 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2471 } else {
2472 mem_props->compatibleHandleTypes = supported_handle_types;
2473 mem_props->exportFromImportedHandleTypes =
2474 (mem_props->exportFromImportedHandleTypes & renderer_handle_type)
2475 ? supported_handle_types
2476 : 0;
2477 }
2478
2479 return VK_SUCCESS;
2480 }
2481
2482 void
vn_GetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VkSparseImageFormatProperties2 * pProperties)2483 vn_GetPhysicalDeviceSparseImageFormatProperties2(
2484 VkPhysicalDevice physicalDevice,
2485 const VkPhysicalDeviceSparseImageFormatInfo2 *pFormatInfo,
2486 uint32_t *pPropertyCount,
2487 VkSparseImageFormatProperties2 *pProperties)
2488 {
2489
2490 struct vn_physical_device *physical_dev =
2491 vn_physical_device_from_handle(physicalDevice);
2492 struct vn_ring *ring = physical_dev->instance->ring.ring;
2493 /* If VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT is not supported for the given
2494 * arguments, pPropertyCount will be set to zero upon return, and no data
2495 * will be written to pProperties.
2496 */
2497 if (physical_dev->sparse_binding_disabled) {
2498 *pPropertyCount = 0;
2499 return;
2500 }
2501
2502 /* TODO per-device cache */
2503 vn_call_vkGetPhysicalDeviceSparseImageFormatProperties2(
2504 ring, physicalDevice, pFormatInfo, pPropertyCount, pProperties);
2505 }
2506
2507 void
vn_GetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VkExternalBufferProperties * pExternalBufferProperties)2508 vn_GetPhysicalDeviceExternalBufferProperties(
2509 VkPhysicalDevice physicalDevice,
2510 const VkPhysicalDeviceExternalBufferInfo *pExternalBufferInfo,
2511 VkExternalBufferProperties *pExternalBufferProperties)
2512 {
2513 struct vn_physical_device *physical_dev =
2514 vn_physical_device_from_handle(physicalDevice);
2515 struct vn_ring *ring = physical_dev->instance->ring.ring;
2516 const VkExternalMemoryHandleTypeFlagBits renderer_handle_type =
2517 physical_dev->external_memory.renderer_handle_type;
2518 const VkExternalMemoryHandleTypeFlags supported_handle_types =
2519 physical_dev->external_memory.supported_handle_types;
2520 const bool is_ahb =
2521 pExternalBufferInfo->handleType ==
2522 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2523
2524 VkExternalMemoryProperties *props =
2525 &pExternalBufferProperties->externalMemoryProperties;
2526 if (!(pExternalBufferInfo->handleType & supported_handle_types)) {
2527 props->compatibleHandleTypes = pExternalBufferInfo->handleType;
2528 props->exportFromImportedHandleTypes = 0;
2529 props->externalMemoryFeatures = 0;
2530 return;
2531 }
2532
2533 VkPhysicalDeviceExternalBufferInfo local_info;
2534 if (pExternalBufferInfo->handleType != renderer_handle_type) {
2535 local_info = *pExternalBufferInfo;
2536 local_info.handleType = renderer_handle_type;
2537 pExternalBufferInfo = &local_info;
2538 }
2539
2540 /* TODO per-device cache */
2541 vn_call_vkGetPhysicalDeviceExternalBufferProperties(
2542 ring, physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
2543
2544 if (renderer_handle_type ==
2545 VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT &&
2546 !physical_dev->instance->renderer->info.has_dma_buf_import) {
2547 props->externalMemoryFeatures &=
2548 ~VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
2549 }
2550
2551 if (is_ahb) {
2552 props->compatibleHandleTypes =
2553 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2554 /* AHB backed buffer requires renderer to support import bit while it
2555 * also requires the renderer to must not advertise dedicated only bit
2556 */
2557 if (!(props->externalMemoryFeatures &
2558 VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT) ||
2559 (props->externalMemoryFeatures &
2560 VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT)) {
2561 props->externalMemoryFeatures = 0;
2562 props->exportFromImportedHandleTypes = 0;
2563 return;
2564 }
2565 props->externalMemoryFeatures =
2566 VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT |
2567 VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
2568 props->exportFromImportedHandleTypes =
2569 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2570 } else {
2571 props->compatibleHandleTypes = supported_handle_types;
2572 props->exportFromImportedHandleTypes =
2573 (props->exportFromImportedHandleTypes & renderer_handle_type)
2574 ? supported_handle_types
2575 : 0;
2576 }
2577 }
2578
2579 void
vn_GetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)2580 vn_GetPhysicalDeviceExternalFenceProperties(
2581 VkPhysicalDevice physicalDevice,
2582 const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo,
2583 VkExternalFenceProperties *pExternalFenceProperties)
2584 {
2585 struct vn_physical_device *physical_dev =
2586 vn_physical_device_from_handle(physicalDevice);
2587
2588 if (pExternalFenceInfo->handleType &
2589 physical_dev->external_fence_handles) {
2590 pExternalFenceProperties->compatibleHandleTypes =
2591 physical_dev->external_fence_handles;
2592 pExternalFenceProperties->exportFromImportedHandleTypes =
2593 physical_dev->external_fence_handles;
2594 pExternalFenceProperties->externalFenceFeatures =
2595 VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT |
2596 VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT;
2597 } else {
2598 pExternalFenceProperties->compatibleHandleTypes = 0;
2599 pExternalFenceProperties->exportFromImportedHandleTypes = 0;
2600 pExternalFenceProperties->externalFenceFeatures = 0;
2601 }
2602 }
2603
2604 void
vn_GetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)2605 vn_GetPhysicalDeviceExternalSemaphoreProperties(
2606 VkPhysicalDevice physicalDevice,
2607 const VkPhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo,
2608 VkExternalSemaphoreProperties *pExternalSemaphoreProperties)
2609 {
2610 struct vn_physical_device *physical_dev =
2611 vn_physical_device_from_handle(physicalDevice);
2612
2613 const VkSemaphoreTypeCreateInfo *type_info = vk_find_struct_const(
2614 pExternalSemaphoreInfo->pNext, SEMAPHORE_TYPE_CREATE_INFO);
2615 const VkSemaphoreType sem_type =
2616 type_info ? type_info->semaphoreType : VK_SEMAPHORE_TYPE_BINARY;
2617 const VkExternalSemaphoreHandleTypeFlags valid_handles =
2618 sem_type == VK_SEMAPHORE_TYPE_BINARY
2619 ? physical_dev->external_binary_semaphore_handles
2620 : physical_dev->external_timeline_semaphore_handles;
2621 if (pExternalSemaphoreInfo->handleType & valid_handles) {
2622 pExternalSemaphoreProperties->compatibleHandleTypes = valid_handles;
2623 pExternalSemaphoreProperties->exportFromImportedHandleTypes =
2624 valid_handles;
2625 pExternalSemaphoreProperties->externalSemaphoreFeatures =
2626 VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT |
2627 VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT;
2628 } else {
2629 pExternalSemaphoreProperties->compatibleHandleTypes = 0;
2630 pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0;
2631 pExternalSemaphoreProperties->externalSemaphoreFeatures = 0;
2632 }
2633 }
2634
2635 VkResult
vn_GetPhysicalDeviceCalibrateableTimeDomainsEXT(VkPhysicalDevice physicalDevice,uint32_t * pTimeDomainCount,VkTimeDomainEXT * pTimeDomains)2636 vn_GetPhysicalDeviceCalibrateableTimeDomainsEXT(
2637 VkPhysicalDevice physicalDevice,
2638 uint32_t *pTimeDomainCount,
2639 VkTimeDomainEXT *pTimeDomains)
2640 {
2641 struct vn_physical_device *physical_dev =
2642 vn_physical_device_from_handle(physicalDevice);
2643 struct vn_ring *ring = physical_dev->instance->ring.ring;
2644
2645 return vn_call_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
2646 ring, physicalDevice, pTimeDomainCount, pTimeDomains);
2647 }
2648
2649 VkResult
vn_GetPhysicalDeviceFragmentShadingRatesKHR(VkPhysicalDevice physicalDevice,uint32_t * pFragmentShadingRateCount,VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates)2650 vn_GetPhysicalDeviceFragmentShadingRatesKHR(
2651 VkPhysicalDevice physicalDevice,
2652 uint32_t *pFragmentShadingRateCount,
2653 VkPhysicalDeviceFragmentShadingRateKHR *pFragmentShadingRates)
2654 {
2655 struct vn_physical_device *physical_dev =
2656 vn_physical_device_from_handle(physicalDevice);
2657 struct vn_ring *ring = physical_dev->instance->ring.ring;
2658
2659 return vn_call_vkGetPhysicalDeviceFragmentShadingRatesKHR(
2660 ring, physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates);
2661 }
2662