1 /*
2 * Copyright © 2016 Rob Clark <[email protected]>
3 * SPDX-License-Identifier: MIT
4 *
5 * Authors:
6 * Rob Clark <[email protected]>
7 */
8
9 #include "pipe/p_defines.h"
10 #include "util/format/u_format.h"
11
12 #include "fd5_format.h"
13
14 /* Specifies the table of all the formats and their features. Also supplies
15 * the helpers that look up various data in those tables.
16 */
17
18 struct fd5_format {
19 enum a5xx_vtx_fmt vtx;
20 enum a5xx_tex_fmt tex;
21 enum a5xx_color_fmt rb;
22 enum a3xx_color_swap swap;
23 bool present;
24 };
25
26 /* vertex + texture */
27 #define VT(pipe, fmt, rbfmt, swapfmt) \
28 [PIPE_FORMAT_##pipe] = {.present = 1, \
29 .vtx = VFMT5_##fmt, \
30 .tex = TFMT5_##fmt, \
31 .rb = RB5_##rbfmt, \
32 .swap = swapfmt}
33
34 /* texture-only */
35 #define _T(pipe, fmt, rbfmt, swapfmt) \
36 [PIPE_FORMAT_##pipe] = {.present = 1, \
37 .vtx = VFMT5_NONE, \
38 .tex = TFMT5_##fmt, \
39 .rb = RB5_##rbfmt, \
40 .swap = swapfmt}
41
42 /* vertex-only */
43 #define V_(pipe, fmt, rbfmt, swapfmt) \
44 [PIPE_FORMAT_##pipe] = {.present = 1, \
45 .vtx = VFMT5_##fmt, \
46 .tex = TFMT5_NONE, \
47 .rb = RB5_##rbfmt, \
48 .swap = swapfmt}
49
50 /* clang-format off */
51 static struct fd5_format formats[PIPE_FORMAT_COUNT] = {
52 /* for blitting, treat PIPE_FORMAT_NONE as 8bit R8: */
53 _T(R8_UINT, 8_UINT, R8_UINT, WZYX),
54
55 /* 8-bit */
56 VT(R8_UNORM, 8_UNORM, R8_UNORM, WZYX),
57 _T(R8_SRGB, 8_UNORM, R8_UNORM, WZYX),
58 VT(R8_SNORM, 8_SNORM, R8_SNORM, WZYX),
59 VT(R8_UINT, 8_UINT, R8_UINT, WZYX),
60 VT(R8_SINT, 8_SINT, R8_SINT, WZYX),
61 V_(R8_USCALED, 8_UINT, NONE, WZYX),
62 V_(R8_SSCALED, 8_SINT, NONE, WZYX),
63
64 _T(A8_UNORM, 8_UNORM, A8_UNORM, WZYX),
65 _T(L8_UNORM, 8_UNORM, R8_UNORM, WZYX),
66 _T(L8_SNORM, 8_SNORM, R8_SNORM, WZYX),
67 _T(I8_UNORM, 8_UNORM, NONE, WZYX),
68 _T(I8_SNORM, 8_SNORM, NONE, WZYX),
69
70 _T(A8_UINT, 8_UINT, NONE, WZYX),
71 _T(A8_SINT, 8_SINT, NONE, WZYX),
72 _T(L8_UINT, 8_UINT, NONE, WZYX),
73 _T(L8_SINT, 8_SINT, NONE, WZYX),
74 _T(I8_UINT, 8_UINT, NONE, WZYX),
75 _T(I8_SINT, 8_SINT, NONE, WZYX),
76
77 _T(S8_UINT, 8_UINT, R8_UNORM, WZYX),
78
79 /* 16-bit */
80 VT(R16_UNORM, 16_UNORM, R16_UNORM, WZYX),
81 VT(R16_SNORM, 16_SNORM, R16_SNORM, WZYX),
82 VT(R16_UINT, 16_UINT, R16_UINT, WZYX),
83 VT(R16_SINT, 16_SINT, R16_SINT, WZYX),
84 V_(R16_USCALED, 16_UINT, NONE, WZYX),
85 V_(R16_SSCALED, 16_SINT, NONE, WZYX),
86 VT(R16_FLOAT, 16_FLOAT, R16_FLOAT, WZYX),
87 _T(Z16_UNORM, 16_UNORM, R16_UNORM, WZYX),
88
89 _T(A16_UNORM, 16_UNORM, NONE, WZYX),
90 _T(A16_SNORM, 16_SNORM, NONE, WZYX),
91 _T(A16_UINT, 16_UINT, NONE, WZYX),
92 _T(A16_SINT, 16_SINT, NONE, WZYX),
93 _T(A16_FLOAT, 16_FLOAT, NONE, WZYX),
94 _T(L16_UNORM, 16_UNORM, NONE, WZYX),
95 _T(L16_SNORM, 16_SNORM, NONE, WZYX),
96 _T(L16_UINT, 16_UINT, NONE, WZYX),
97 _T(L16_SINT, 16_SINT, NONE, WZYX),
98 _T(L16_FLOAT, 16_FLOAT, NONE, WZYX),
99 _T(I16_UNORM, 16_UNORM, NONE, WZYX),
100 _T(I16_SNORM, 16_SNORM, NONE, WZYX),
101 _T(I16_UINT, 16_UINT, NONE, WZYX),
102 _T(I16_SINT, 16_SINT, NONE, WZYX),
103 _T(I16_FLOAT, 16_FLOAT, NONE, WZYX),
104
105 VT(R8G8_UNORM, 8_8_UNORM, R8G8_UNORM, WZYX),
106 _T(R8G8_SRGB, 8_8_UNORM, R8G8_UNORM, WZYX),
107 VT(R8G8_SNORM, 8_8_SNORM, R8G8_SNORM, WZYX),
108 VT(R8G8_UINT, 8_8_UINT, R8G8_UINT, WZYX),
109 VT(R8G8_SINT, 8_8_SINT, R8G8_SINT, WZYX),
110 V_(R8G8_USCALED, 8_8_UINT, NONE, WZYX),
111 V_(R8G8_SSCALED, 8_8_SINT, NONE, WZYX),
112
113 _T(L8A8_UINT, 8_8_UINT, NONE, WZYX),
114 _T(L8A8_SINT, 8_8_SINT, NONE, WZYX),
115 _T(L8A8_UNORM, 8_8_UNORM, NONE, WZYX),
116
117 _T(B5G6R5_UNORM, 5_6_5_UNORM, R5G6B5_UNORM, WXYZ),
118 _T(B5G5R5A1_UNORM, 5_5_5_1_UNORM, R5G5B5A1_UNORM, WXYZ),
119 _T(B5G5R5X1_UNORM, 5_5_5_1_UNORM, R5G5B5A1_UNORM, WXYZ),
120 _T(B4G4R4A4_UNORM, 4_4_4_4_UNORM, R4G4B4A4_UNORM, WXYZ),
121
122 /* 24-bit */
123 V_(R8G8B8_UNORM, 8_8_8_UNORM, NONE, WZYX),
124 V_(R8G8B8_SNORM, 8_8_8_SNORM, NONE, WZYX),
125 V_(R8G8B8_UINT, 8_8_8_UINT, NONE, WZYX),
126 V_(R8G8B8_SINT, 8_8_8_SINT, NONE, WZYX),
127 V_(R8G8B8_USCALED, 8_8_8_UINT, NONE, WZYX),
128 V_(R8G8B8_SSCALED, 8_8_8_SINT, NONE, WZYX),
129
130 /* 32-bit */
131 VT(R32_UINT, 32_UINT, R32_UINT, WZYX),
132 VT(R32_SINT, 32_SINT, R32_SINT, WZYX),
133 V_(R32_USCALED, 32_UINT, NONE, WZYX),
134 V_(R32_SSCALED, 32_SINT, NONE, WZYX),
135 VT(R32_FLOAT, 32_FLOAT, R32_FLOAT,WZYX),
136 V_(R32_FIXED, 32_FIXED, NONE, WZYX),
137
138 _T(A32_UINT, 32_UINT, NONE, WZYX),
139 _T(A32_SINT, 32_SINT, NONE, WZYX),
140 _T(A32_FLOAT, 32_FLOAT, NONE, WZYX),
141 _T(L32_UINT, 32_UINT, NONE, WZYX),
142 _T(L32_SINT, 32_SINT, NONE, WZYX),
143 _T(L32_FLOAT, 32_FLOAT, NONE, WZYX),
144 _T(I32_UINT, 32_UINT, NONE, WZYX),
145 _T(I32_SINT, 32_SINT, NONE, WZYX),
146 _T(I32_FLOAT, 32_FLOAT, NONE, WZYX),
147
148 VT(R16G16_UNORM, 16_16_UNORM, R16G16_UNORM, WZYX),
149 VT(R16G16_SNORM, 16_16_SNORM, R16G16_SNORM, WZYX),
150 VT(R16G16_UINT, 16_16_UINT, R16G16_UINT, WZYX),
151 VT(R16G16_SINT, 16_16_SINT, R16G16_SINT, WZYX),
152 VT(R16G16_USCALED, 16_16_UINT, NONE, WZYX),
153 VT(R16G16_SSCALED, 16_16_SINT, NONE, WZYX),
154 VT(R16G16_FLOAT, 16_16_FLOAT, R16G16_FLOAT, WZYX),
155
156 _T(L16A16_UNORM, 16_16_UNORM, NONE, WZYX),
157 _T(L16A16_SNORM, 16_16_SNORM, NONE, WZYX),
158 _T(L16A16_UINT, 16_16_UINT, NONE, WZYX),
159 _T(L16A16_SINT, 16_16_SINT, NONE, WZYX),
160 _T(L16A16_FLOAT, 16_16_FLOAT, NONE, WZYX),
161
162 VT(R8G8B8A8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WZYX),
163 _T(R8G8B8X8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WZYX),
164 _T(R8G8B8A8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WZYX),
165 _T(R8G8B8X8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WZYX),
166 VT(R8G8B8A8_SNORM, 8_8_8_8_SNORM, R8G8B8A8_SNORM, WZYX),
167 VT(R8G8B8A8_UINT, 8_8_8_8_UINT, R8G8B8A8_UINT, WZYX),
168 VT(R8G8B8A8_SINT, 8_8_8_8_SINT, R8G8B8A8_SINT, WZYX),
169 V_(R8G8B8A8_USCALED, 8_8_8_8_UINT, NONE, WZYX),
170 V_(R8G8B8A8_SSCALED, 8_8_8_8_SINT, NONE, WZYX),
171
172 VT(B8G8R8A8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WXYZ),
173 _T(B8G8R8X8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WXYZ),
174 VT(B8G8R8A8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WXYZ),
175 _T(B8G8R8X8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WXYZ),
176
177 VT(A8B8G8R8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, XYZW),
178 _T(X8B8G8R8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, XYZW),
179 _T(A8B8G8R8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, XYZW),
180 _T(X8B8G8R8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, XYZW),
181
182 VT(A8R8G8B8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, ZYXW),
183 _T(X8R8G8B8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, ZYXW),
184 _T(A8R8G8B8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, ZYXW),
185 _T(X8R8G8B8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, ZYXW),
186
187 VT(R10G10B10A2_UNORM, 10_10_10_2_UNORM, R10G10B10A2_UNORM, WZYX),
188 VT(B10G10R10A2_UNORM, 10_10_10_2_UNORM, R10G10B10A2_UNORM, WXYZ),
189 _T(B10G10R10X2_UNORM, 10_10_10_2_UNORM, R10G10B10A2_UNORM, WXYZ),
190 V_(R10G10B10A2_SNORM, 10_10_10_2_SNORM, NONE, WZYX),
191 V_(B10G10R10A2_SNORM, 10_10_10_2_SNORM, NONE, WXYZ),
192 VT(R10G10B10A2_UINT, 10_10_10_2_UINT, R10G10B10A2_UINT, WZYX),
193 VT(B10G10R10A2_UINT, 10_10_10_2_UINT, R10G10B10A2_UINT, WXYZ),
194 V_(R10G10B10A2_USCALED, 10_10_10_2_UINT, NONE, WZYX),
195 V_(B10G10R10A2_USCALED, 10_10_10_2_UINT, NONE, WXYZ),
196 V_(R10G10B10A2_SSCALED, 10_10_10_2_SINT, NONE, WZYX),
197 V_(B10G10R10A2_SSCALED, 10_10_10_2_SINT, NONE, WXYZ),
198
199 VT(R11G11B10_FLOAT, 11_11_10_FLOAT, R11G11B10_FLOAT, WZYX),
200 _T(R9G9B9E5_FLOAT, 9_9_9_E5_FLOAT, NONE, WZYX),
201
202 _T(Z24X8_UNORM, X8Z24_UNORM, R8G8B8A8_UNORM, WZYX),
203 _T(X24S8_UINT, 8_8_8_8_UINT, R8G8B8A8_UINT, XYZW),
204 _T(Z24_UNORM_S8_UINT, X8Z24_UNORM, R8G8B8A8_UNORM, WZYX),
205 _T(Z32_FLOAT, 32_FLOAT, R8G8B8A8_UNORM, WZYX),
206 _T(Z32_FLOAT_S8X24_UINT, 32_FLOAT, R8G8B8A8_UNORM, WZYX),
207 _T(X32_S8X24_UINT, 8_UINT, R8_UINT, WZYX),
208
209 /* 48-bit */
210 V_(R16G16B16_UNORM, 16_16_16_UNORM, NONE, WZYX),
211 V_(R16G16B16_SNORM, 16_16_16_SNORM, NONE, WZYX),
212 V_(R16G16B16_UINT, 16_16_16_UINT, NONE, WZYX),
213 V_(R16G16B16_SINT, 16_16_16_SINT, NONE, WZYX),
214 V_(R16G16B16_USCALED, 16_16_16_UINT, NONE, WZYX),
215 V_(R16G16B16_SSCALED, 16_16_16_SINT, NONE, WZYX),
216 V_(R16G16B16_FLOAT, 16_16_16_FLOAT, NONE, WZYX),
217
218 /* 64-bit */
219 VT(R16G16B16A16_UNORM, 16_16_16_16_UNORM, R16G16B16A16_UNORM, WZYX),
220 VT(R16G16B16X16_UNORM, 16_16_16_16_UNORM, R16G16B16A16_UNORM, WZYX),
221 VT(R16G16B16A16_SNORM, 16_16_16_16_SNORM, R16G16B16A16_SNORM, WZYX),
222 VT(R16G16B16X16_SNORM, 16_16_16_16_SNORM, R16G16B16A16_SNORM, WZYX),
223 VT(R16G16B16A16_UINT, 16_16_16_16_UINT, R16G16B16A16_UINT, WZYX),
224 VT(R16G16B16X16_UINT, 16_16_16_16_UINT, R16G16B16A16_UINT, WZYX),
225 VT(R16G16B16A16_SINT, 16_16_16_16_SINT, R16G16B16A16_SINT, WZYX),
226 VT(R16G16B16X16_SINT, 16_16_16_16_SINT, R16G16B16A16_SINT, WZYX),
227 VT(R16G16B16A16_USCALED, 16_16_16_16_UINT, NONE, WZYX),
228 VT(R16G16B16A16_SSCALED, 16_16_16_16_SINT, NONE, WZYX),
229 VT(R16G16B16A16_FLOAT, 16_16_16_16_FLOAT, R16G16B16A16_FLOAT, WZYX),
230 VT(R16G16B16X16_FLOAT, 16_16_16_16_FLOAT, R16G16B16A16_FLOAT, WZYX),
231
232 VT(R32G32_UINT, 32_32_UINT, R32G32_UINT, WZYX),
233 VT(R32G32_SINT, 32_32_SINT, R32G32_SINT, WZYX),
234 V_(R32G32_USCALED, 32_32_UINT, NONE, WZYX),
235 V_(R32G32_SSCALED, 32_32_SINT, NONE, WZYX),
236 VT(R32G32_FLOAT, 32_32_FLOAT, R32G32_FLOAT,WZYX),
237 V_(R32G32_FIXED, 32_32_FIXED, NONE, WZYX),
238
239 _T(L32A32_UINT, 32_32_UINT, NONE, WZYX),
240 _T(L32A32_SINT, 32_32_SINT, NONE, WZYX),
241 _T(L32A32_FLOAT, 32_32_FLOAT, NONE, WZYX),
242
243 /* 96-bit */
244 VT(R32G32B32_UINT, 32_32_32_UINT, NONE, WZYX),
245 VT(R32G32B32_SINT, 32_32_32_SINT, NONE, WZYX),
246 V_(R32G32B32_USCALED, 32_32_32_UINT, NONE, WZYX),
247 V_(R32G32B32_SSCALED, 32_32_32_SINT, NONE, WZYX),
248 VT(R32G32B32_FLOAT, 32_32_32_FLOAT, NONE, WZYX),
249 V_(R32G32B32_FIXED, 32_32_32_FIXED, NONE, WZYX),
250
251 /* 128-bit */
252 VT(R32G32B32A32_UINT, 32_32_32_32_UINT, R32G32B32A32_UINT, WZYX),
253 _T(R32G32B32X32_UINT, 32_32_32_32_UINT, R32G32B32A32_UINT, WZYX),
254 VT(R32G32B32A32_SINT, 32_32_32_32_SINT, R32G32B32A32_SINT, WZYX),
255 _T(R32G32B32X32_SINT, 32_32_32_32_SINT, R32G32B32A32_SINT, WZYX),
256 V_(R32G32B32A32_USCALED, 32_32_32_32_UINT, NONE, WZYX),
257 V_(R32G32B32A32_SSCALED, 32_32_32_32_SINT, NONE, WZYX),
258 VT(R32G32B32A32_FLOAT, 32_32_32_32_FLOAT, R32G32B32A32_FLOAT, WZYX),
259 _T(R32G32B32X32_FLOAT, 32_32_32_32_FLOAT, R32G32B32A32_FLOAT, WZYX),
260 V_(R32G32B32A32_FIXED, 32_32_32_32_FIXED, NONE, WZYX),
261
262 /* compressed */
263 _T(ETC1_RGB8, ETC1, NONE, WZYX),
264 _T(ETC2_RGB8, ETC2_RGB8, NONE, WZYX),
265 _T(ETC2_SRGB8, ETC2_RGB8, NONE, WZYX),
266 _T(ETC2_RGB8A1, ETC2_RGB8A1, NONE, WZYX),
267 _T(ETC2_SRGB8A1, ETC2_RGB8A1, NONE, WZYX),
268 _T(ETC2_RGBA8, ETC2_RGBA8, NONE, WZYX),
269 _T(ETC2_SRGBA8, ETC2_RGBA8, NONE, WZYX),
270 _T(ETC2_R11_UNORM, ETC2_R11_UNORM, NONE, WZYX),
271 _T(ETC2_R11_SNORM, ETC2_R11_SNORM, NONE, WZYX),
272 _T(ETC2_RG11_UNORM, ETC2_RG11_UNORM, NONE, WZYX),
273 _T(ETC2_RG11_SNORM, ETC2_RG11_SNORM, NONE, WZYX),
274
275 _T(DXT1_RGB, DXT1, NONE, WZYX),
276 _T(DXT1_SRGB, DXT1, NONE, WZYX),
277 _T(DXT1_RGBA, DXT1, NONE, WZYX),
278 _T(DXT1_SRGBA, DXT1, NONE, WZYX),
279 _T(DXT3_RGBA, DXT3, NONE, WZYX),
280 _T(DXT3_SRGBA, DXT3, NONE, WZYX),
281 _T(DXT5_RGBA, DXT5, NONE, WZYX),
282 _T(DXT5_SRGBA, DXT5, NONE, WZYX),
283
284 _T(BPTC_RGBA_UNORM, BPTC, NONE, WZYX),
285 _T(BPTC_SRGBA, BPTC, NONE, WZYX),
286 _T(BPTC_RGB_FLOAT, BPTC_FLOAT, NONE, WZYX),
287 _T(BPTC_RGB_UFLOAT, BPTC_UFLOAT, NONE, WZYX),
288
289 _T(RGTC1_UNORM, RGTC1_UNORM, NONE, WZYX),
290 _T(RGTC1_SNORM, RGTC1_SNORM, NONE, WZYX),
291 _T(RGTC2_UNORM, RGTC2_UNORM, NONE, WZYX),
292 _T(RGTC2_SNORM, RGTC2_SNORM, NONE, WZYX),
293 _T(LATC1_UNORM, RGTC1_UNORM, NONE, WZYX),
294 _T(LATC1_SNORM, RGTC1_SNORM, NONE, WZYX),
295 _T(LATC2_UNORM, RGTC2_UNORM, NONE, WZYX),
296 _T(LATC2_SNORM, RGTC2_SNORM, NONE, WZYX),
297
298 _T(ASTC_4x4, ASTC_4x4, NONE, WZYX),
299 _T(ASTC_5x4, ASTC_5x4, NONE, WZYX),
300 _T(ASTC_5x5, ASTC_5x5, NONE, WZYX),
301 _T(ASTC_6x5, ASTC_6x5, NONE, WZYX),
302 _T(ASTC_6x6, ASTC_6x6, NONE, WZYX),
303 _T(ASTC_8x5, ASTC_8x5, NONE, WZYX),
304 _T(ASTC_8x6, ASTC_8x6, NONE, WZYX),
305 _T(ASTC_8x8, ASTC_8x8, NONE, WZYX),
306 _T(ASTC_10x5, ASTC_10x5, NONE, WZYX),
307 _T(ASTC_10x6, ASTC_10x6, NONE, WZYX),
308 _T(ASTC_10x8, ASTC_10x8, NONE, WZYX),
309 _T(ASTC_10x10, ASTC_10x10, NONE, WZYX),
310 _T(ASTC_12x10, ASTC_12x10, NONE, WZYX),
311 _T(ASTC_12x12, ASTC_12x12, NONE, WZYX),
312
313 _T(ASTC_4x4_SRGB, ASTC_4x4, NONE, WZYX),
314 _T(ASTC_5x4_SRGB, ASTC_5x4, NONE, WZYX),
315 _T(ASTC_5x5_SRGB, ASTC_5x5, NONE, WZYX),
316 _T(ASTC_6x5_SRGB, ASTC_6x5, NONE, WZYX),
317 _T(ASTC_6x6_SRGB, ASTC_6x6, NONE, WZYX),
318 _T(ASTC_8x5_SRGB, ASTC_8x5, NONE, WZYX),
319 _T(ASTC_8x6_SRGB, ASTC_8x6, NONE, WZYX),
320 _T(ASTC_8x8_SRGB, ASTC_8x8, NONE, WZYX),
321 _T(ASTC_10x5_SRGB, ASTC_10x5, NONE, WZYX),
322 _T(ASTC_10x6_SRGB, ASTC_10x6, NONE, WZYX),
323 _T(ASTC_10x8_SRGB, ASTC_10x8, NONE, WZYX),
324 _T(ASTC_10x10_SRGB, ASTC_10x10, NONE, WZYX),
325 _T(ASTC_12x10_SRGB, ASTC_12x10, NONE, WZYX),
326 _T(ASTC_12x12_SRGB, ASTC_12x12, NONE, WZYX),
327 };
328 /* clang-format on */
329
330 /* convert pipe format to vertex buffer format: */
331 enum a5xx_vtx_fmt
fd5_pipe2vtx(enum pipe_format format)332 fd5_pipe2vtx(enum pipe_format format)
333 {
334 if (!formats[format].present)
335 return VFMT5_NONE;
336 return formats[format].vtx;
337 }
338
339 /* convert pipe format to texture sampler format: */
340 enum a5xx_tex_fmt
fd5_pipe2tex(enum pipe_format format)341 fd5_pipe2tex(enum pipe_format format)
342 {
343 if (!formats[format].present)
344 return TFMT5_NONE;
345 return formats[format].tex;
346 }
347
348 /* convert pipe format to MRT / copydest format used for render-target: */
349 enum a5xx_color_fmt
fd5_pipe2color(enum pipe_format format)350 fd5_pipe2color(enum pipe_format format)
351 {
352 if (!formats[format].present)
353 return RB5_NONE;
354 return formats[format].rb;
355 }
356
357 enum a3xx_color_swap
fd5_pipe2swap(enum pipe_format format)358 fd5_pipe2swap(enum pipe_format format)
359 {
360 if (!formats[format].present)
361 return WZYX;
362 return formats[format].swap;
363 }
364
365 enum a5xx_depth_format
fd5_pipe2depth(enum pipe_format format)366 fd5_pipe2depth(enum pipe_format format)
367 {
368 switch (format) {
369 case PIPE_FORMAT_Z16_UNORM:
370 return DEPTH5_16;
371 case PIPE_FORMAT_Z24X8_UNORM:
372 case PIPE_FORMAT_Z24_UNORM_S8_UINT:
373 case PIPE_FORMAT_X8Z24_UNORM:
374 case PIPE_FORMAT_S8_UINT_Z24_UNORM:
375 return DEPTH5_24_8;
376 case PIPE_FORMAT_Z32_FLOAT:
377 case PIPE_FORMAT_Z32_FLOAT_S8X24_UINT:
378 return DEPTH5_32;
379 default:
380 return ~0;
381 }
382 }
383
384 static inline enum a5xx_tex_swiz
tex_swiz(unsigned swiz)385 tex_swiz(unsigned swiz)
386 {
387 switch (swiz) {
388 default:
389 case PIPE_SWIZZLE_X:
390 return A5XX_TEX_X;
391 case PIPE_SWIZZLE_Y:
392 return A5XX_TEX_Y;
393 case PIPE_SWIZZLE_Z:
394 return A5XX_TEX_Z;
395 case PIPE_SWIZZLE_W:
396 return A5XX_TEX_W;
397 case PIPE_SWIZZLE_0:
398 return A5XX_TEX_ZERO;
399 case PIPE_SWIZZLE_1:
400 return A5XX_TEX_ONE;
401 }
402 }
403
404 uint32_t
fd5_tex_swiz(enum pipe_format format,unsigned swizzle_r,unsigned swizzle_g,unsigned swizzle_b,unsigned swizzle_a)405 fd5_tex_swiz(enum pipe_format format, unsigned swizzle_r, unsigned swizzle_g,
406 unsigned swizzle_b, unsigned swizzle_a)
407 {
408 const struct util_format_description *desc = util_format_description(format);
409 unsigned char swiz[4] = {
410 swizzle_r,
411 swizzle_g,
412 swizzle_b,
413 swizzle_a,
414 }, rswiz[4];
415
416 util_format_compose_swizzles(desc->swizzle, swiz, rswiz);
417
418 return A5XX_TEX_CONST_0_SWIZ_X(tex_swiz(rswiz[0])) |
419 A5XX_TEX_CONST_0_SWIZ_Y(tex_swiz(rswiz[1])) |
420 A5XX_TEX_CONST_0_SWIZ_Z(tex_swiz(rswiz[2])) |
421 A5XX_TEX_CONST_0_SWIZ_W(tex_swiz(rswiz[3]));
422 }
423