1 /*
2 * Copyright © 2022 Collabora Ltd. and Red Hat Inc.
3 * SPDX-License-Identifier: MIT
4 */
5 #include "nvk_format.h"
6
7 #include "nvk_buffer_view.h"
8 #include "nvk_entrypoints.h"
9 #include "nvk_image.h"
10 #include "nvk_physical_device.h"
11
12 #include "vk_enum_defines.h"
13 #include "vk_format.h"
14
15 #include "nvtypes.h"
16 #include "cl902d.h"
17 #include "cl9097.h"
18 #include "cl90c0.h"
19
20 #define VA_FMT(vk_fmt, widths, swap_rb, type) \
21 [VK_FORMAT_##vk_fmt] = \
22 { NV9097_SET_VERTEX_ATTRIBUTE_A_COMPONENT_BIT_WIDTHS_##widths, \
23 NV9097_SET_VERTEX_ATTRIBUTE_A_SWAP_R_AND_B_##swap_rb, \
24 NV9097_SET_VERTEX_ATTRIBUTE_A_NUMERICAL_TYPE_NUM_##type }
25
26 static const struct nvk_va_format nvk_vf_formats[] = {
27 VA_FMT(R8_UNORM, R8, FALSE, UNORM),
28 VA_FMT(R8_SNORM, R8, FALSE, SNORM),
29 VA_FMT(R8_USCALED, R8, FALSE, USCALED),
30 VA_FMT(R8_SSCALED, R8, FALSE, SSCALED),
31 VA_FMT(R8_UINT, R8, FALSE, UINT),
32 VA_FMT(R8_SINT, R8, FALSE, SINT),
33
34 VA_FMT(R8G8_UNORM, R8_G8, FALSE, UNORM),
35 VA_FMT(R8G8_SNORM, R8_G8, FALSE, SNORM),
36 VA_FMT(R8G8_USCALED, R8_G8, FALSE, USCALED),
37 VA_FMT(R8G8_SSCALED, R8_G8, FALSE, SSCALED),
38 VA_FMT(R8G8_UINT, R8_G8, FALSE, UINT),
39 VA_FMT(R8G8_SINT, R8_G8, FALSE, SINT),
40
41 VA_FMT(R8G8B8_UNORM, R8_G8_B8, FALSE, UNORM),
42 VA_FMT(R8G8B8_SNORM, R8_G8_B8, FALSE, SNORM),
43 VA_FMT(R8G8B8_USCALED, R8_G8_B8, FALSE, USCALED),
44 VA_FMT(R8G8B8_SSCALED, R8_G8_B8, FALSE, SSCALED),
45 VA_FMT(R8G8B8_UINT, R8_G8_B8, FALSE, UINT),
46 VA_FMT(R8G8B8_SINT, R8_G8_B8, FALSE, SINT),
47
48 VA_FMT(B8G8R8_UNORM, R8_G8_B8, TRUE, UNORM),
49 VA_FMT(B8G8R8_SNORM, R8_G8_B8, TRUE, SNORM),
50 VA_FMT(B8G8R8_USCALED, R8_G8_B8, TRUE, USCALED),
51 VA_FMT(B8G8R8_SSCALED, R8_G8_B8, TRUE, SSCALED),
52 VA_FMT(B8G8R8_UINT, R8_G8_B8, TRUE, UINT),
53 VA_FMT(B8G8R8_SINT, R8_G8_B8, TRUE, SINT),
54
55 VA_FMT(R8G8B8A8_UNORM, R8_G8_B8_A8, FALSE, UNORM),
56 VA_FMT(R8G8B8A8_SNORM, R8_G8_B8_A8, FALSE, SNORM),
57 VA_FMT(R8G8B8A8_USCALED, R8_G8_B8_A8, FALSE, USCALED),
58 VA_FMT(R8G8B8A8_SSCALED, R8_G8_B8_A8, FALSE, SSCALED),
59 VA_FMT(R8G8B8A8_UINT, R8_G8_B8_A8, FALSE, UINT),
60 VA_FMT(R8G8B8A8_SINT, R8_G8_B8_A8, FALSE, SINT),
61
62 VA_FMT(B8G8R8A8_UNORM, R8_G8_B8_A8, TRUE, UNORM),
63 VA_FMT(B8G8R8A8_SNORM, R8_G8_B8_A8, TRUE, SNORM),
64 VA_FMT(B8G8R8A8_USCALED, R8_G8_B8_A8, TRUE, USCALED),
65 VA_FMT(B8G8R8A8_SSCALED, R8_G8_B8_A8, TRUE, SSCALED),
66 VA_FMT(B8G8R8A8_UINT, R8_G8_B8_A8, TRUE, UINT),
67 VA_FMT(B8G8R8A8_SINT, R8_G8_B8_A8, TRUE, SINT),
68
69 VA_FMT(A8B8G8R8_UNORM_PACK32, R8_G8_B8_A8, FALSE, UNORM),
70 VA_FMT(A8B8G8R8_SNORM_PACK32, R8_G8_B8_A8, FALSE, SNORM),
71 VA_FMT(A8B8G8R8_USCALED_PACK32, R8_G8_B8_A8, FALSE, USCALED),
72 VA_FMT(A8B8G8R8_SSCALED_PACK32, R8_G8_B8_A8, FALSE, SSCALED),
73 VA_FMT(A8B8G8R8_UINT_PACK32, R8_G8_B8_A8, FALSE, UINT),
74 VA_FMT(A8B8G8R8_SINT_PACK32, R8_G8_B8_A8, FALSE, SINT),
75
76 VA_FMT(A2R10G10B10_UNORM_PACK32, A2B10G10R10, TRUE, UNORM),
77 VA_FMT(A2R10G10B10_SNORM_PACK32, A2B10G10R10, TRUE, SNORM),
78 VA_FMT(A2R10G10B10_USCALED_PACK32, A2B10G10R10, TRUE, USCALED),
79 VA_FMT(A2R10G10B10_SSCALED_PACK32, A2B10G10R10, TRUE, SSCALED),
80 VA_FMT(A2R10G10B10_UINT_PACK32, A2B10G10R10, TRUE, UINT),
81 VA_FMT(A2R10G10B10_SINT_PACK32, A2B10G10R10, TRUE, SINT),
82
83 VA_FMT(A2B10G10R10_UNORM_PACK32, A2B10G10R10, FALSE, UNORM),
84 VA_FMT(A2B10G10R10_SNORM_PACK32, A2B10G10R10, FALSE, SNORM),
85 VA_FMT(A2B10G10R10_USCALED_PACK32, A2B10G10R10, FALSE, USCALED),
86 VA_FMT(A2B10G10R10_SSCALED_PACK32, A2B10G10R10, FALSE, SSCALED),
87 VA_FMT(A2B10G10R10_UINT_PACK32, A2B10G10R10, FALSE, UINT),
88 VA_FMT(A2B10G10R10_SINT_PACK32, A2B10G10R10, FALSE, SINT),
89
90 VA_FMT(B10G11R11_UFLOAT_PACK32, B10G11R11, FALSE, FLOAT),
91
92 VA_FMT(R16_UNORM, R16, FALSE, UNORM),
93 VA_FMT(R16_SNORM, R16, FALSE, SNORM),
94 VA_FMT(R16_USCALED, R16, FALSE, USCALED),
95 VA_FMT(R16_SSCALED, R16, FALSE, SSCALED),
96 VA_FMT(R16_UINT, R16, FALSE, UINT),
97 VA_FMT(R16_SINT, R16, FALSE, SINT),
98 VA_FMT(R16_SFLOAT, R16, FALSE, FLOAT),
99
100 VA_FMT(R16G16_UNORM, R16_G16, FALSE, UNORM),
101 VA_FMT(R16G16_SNORM, R16_G16, FALSE, SNORM),
102 VA_FMT(R16G16_USCALED, R16_G16, FALSE, USCALED),
103 VA_FMT(R16G16_SSCALED, R16_G16, FALSE, SSCALED),
104 VA_FMT(R16G16_UINT, R16_G16, FALSE, UINT),
105 VA_FMT(R16G16_SINT, R16_G16, FALSE, SINT),
106 VA_FMT(R16G16_SFLOAT, R16_G16, FALSE, FLOAT),
107
108 VA_FMT(R16G16B16_UNORM, R16_G16_B16, FALSE, UNORM),
109 VA_FMT(R16G16B16_SNORM, R16_G16_B16, FALSE, SNORM),
110 VA_FMT(R16G16B16_USCALED, R16_G16_B16, FALSE, USCALED),
111 VA_FMT(R16G16B16_SSCALED, R16_G16_B16, FALSE, SSCALED),
112 VA_FMT(R16G16B16_UINT, R16_G16_B16, FALSE, UINT),
113 VA_FMT(R16G16B16_SINT, R16_G16_B16, FALSE, SINT),
114 VA_FMT(R16G16B16_SFLOAT, R16_G16_B16, FALSE, FLOAT),
115
116 VA_FMT(R16G16B16A16_UNORM, R16_G16_B16_A16, FALSE, UNORM),
117 VA_FMT(R16G16B16A16_SNORM, R16_G16_B16_A16, FALSE, SNORM),
118 VA_FMT(R16G16B16A16_USCALED, R16_G16_B16_A16, FALSE, USCALED),
119 VA_FMT(R16G16B16A16_SSCALED, R16_G16_B16_A16, FALSE, SSCALED),
120 VA_FMT(R16G16B16A16_UINT, R16_G16_B16_A16, FALSE, UINT),
121 VA_FMT(R16G16B16A16_SINT, R16_G16_B16_A16, FALSE, SINT),
122 VA_FMT(R16G16B16A16_SFLOAT, R16_G16_B16_A16, FALSE, FLOAT),
123
124 VA_FMT(R32_UINT, R32, FALSE, UINT),
125 VA_FMT(R32_SINT, R32, FALSE, SINT),
126 VA_FMT(R32_SFLOAT, R32, FALSE, FLOAT),
127
128 VA_FMT(R32G32_UINT, R32_G32, FALSE, UINT),
129 VA_FMT(R32G32_SINT, R32_G32, FALSE, SINT),
130 VA_FMT(R32G32_SFLOAT, R32_G32, FALSE, FLOAT),
131
132 VA_FMT(R32G32B32_UINT, R32_G32_B32, FALSE, UINT),
133 VA_FMT(R32G32B32_SINT, R32_G32_B32, FALSE, SINT),
134 VA_FMT(R32G32B32_SFLOAT, R32_G32_B32, FALSE, FLOAT),
135
136 VA_FMT(R32G32B32A32_UINT, R32_G32_B32_A32, FALSE, UINT),
137 VA_FMT(R32G32B32A32_SINT, R32_G32_B32_A32, FALSE, SINT),
138 VA_FMT(R32G32B32A32_SFLOAT, R32_G32_B32_A32, FALSE, FLOAT),
139 };
140
141 #undef VA_FMT
142
143 const struct nvk_va_format *
nvk_get_va_format(const struct nvk_physical_device * pdev,VkFormat format)144 nvk_get_va_format(const struct nvk_physical_device *pdev, VkFormat format)
145 {
146 if (format >= ARRAY_SIZE(nvk_vf_formats))
147 return NULL;
148
149 if (nvk_vf_formats[format].bit_widths == 0)
150 return NULL;
151
152 return &nvk_vf_formats[format];
153 }
154
155 VKAPI_ATTR void VKAPI_CALL
nvk_GetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)156 nvk_GetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,
157 VkFormat format,
158 VkFormatProperties2 *pFormatProperties)
159 {
160 VK_FROM_HANDLE(nvk_physical_device, pdevice, physicalDevice);
161
162 VkFormatFeatureFlags2 linear2, optimal2, buffer2;
163 linear2 = nvk_get_image_format_features(pdevice, format,
164 VK_IMAGE_TILING_LINEAR, 0);
165 optimal2 = nvk_get_image_format_features(pdevice, format,
166 VK_IMAGE_TILING_OPTIMAL, 0);
167 buffer2 = nvk_get_buffer_format_features(pdevice, format);
168
169 pFormatProperties->formatProperties = (VkFormatProperties) {
170 .linearTilingFeatures = vk_format_features2_to_features(linear2),
171 .optimalTilingFeatures = vk_format_features2_to_features(optimal2),
172 .bufferFeatures = vk_format_features2_to_features(buffer2),
173 };
174
175 vk_foreach_struct(ext, pFormatProperties->pNext) {
176 switch (ext->sType) {
177 case VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3: {
178 VkFormatProperties3 *p = (void *)ext;
179 p->linearTilingFeatures = linear2;
180 p->optimalTilingFeatures = optimal2;
181 p->bufferFeatures = buffer2;
182 break;
183 }
184
185 case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT:
186 case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT:
187 nvk_get_drm_format_modifier_properties_list(pdevice, format, ext);
188 break;
189
190 default:
191 vk_debug_ignored_stype(ext->sType);
192 break;
193 }
194 }
195 }
196