1 /*
2 * Copyright © 2018 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #ifndef NIR_BUILTIN_BUILDER_H
25 #define NIR_BUILTIN_BUILDER_H
26
27 #include "util/u_math.h"
28 #include "nir_builder.h"
29
30 #ifdef __cplusplus
31 extern "C" {
32 #endif
33
34 /*
35 * Functions are sorted alphabetically with removed type and "fast" prefix.
36 * Definitions for functions in the C file come first.
37 */
38
39 nir_def *nir_cross3(nir_builder *b, nir_def *x, nir_def *y);
40 nir_def *nir_cross4(nir_builder *b, nir_def *x, nir_def *y);
41 nir_def *nir_fast_length(nir_builder *b, nir_def *vec);
42 nir_def *nir_nextafter(nir_builder *b, nir_def *x, nir_def *y);
43 nir_def *nir_normalize(nir_builder *b, nir_def *vec);
44 nir_def *nir_smoothstep(nir_builder *b, nir_def *edge0,
45 nir_def *edge1, nir_def *x);
46 nir_def *nir_upsample(nir_builder *b, nir_def *hi, nir_def *lo);
47 nir_def *nir_atan(nir_builder *b, nir_def *y_over_x);
48 nir_def *nir_atan2(nir_builder *b, nir_def *y, nir_def *x);
49
50 nir_def *
51 nir_build_texture_query(nir_builder *b, nir_tex_instr *tex, nir_texop texop,
52 unsigned components, nir_alu_type dest_type,
53 bool include_coord, bool include_lod);
54
55 nir_def *
56 nir_get_texture_lod(nir_builder *b, nir_tex_instr *tex);
57
58 nir_def *
59 nir_get_texture_size(nir_builder *b, nir_tex_instr *tex);
60
61 static inline nir_def *
nir_fisnan(nir_builder * b,nir_def * x)62 nir_fisnan(nir_builder *b, nir_def *x)
63 {
64 bool old_exact = b->exact;
65 b->exact = true;
66 nir_def *res = nir_fneu(b, x, x);
67 b->exact = old_exact;
68 return res;
69 }
70
71 static inline nir_def *
nir_nan_check2(nir_builder * b,nir_def * x,nir_def * y,nir_def * res)72 nir_nan_check2(nir_builder *b, nir_def *x, nir_def *y, nir_def *res)
73 {
74 return nir_bcsel(b, nir_fisnan(b, x), x, nir_bcsel(b, nir_fisnan(b, y), y, res));
75 }
76
77 static inline nir_def *
nir_fmax_abs_vec_comp(nir_builder * b,nir_def * vec)78 nir_fmax_abs_vec_comp(nir_builder *b, nir_def *vec)
79 {
80 nir_def *abs = nir_fabs(b, vec);
81 nir_def *res = nir_channel(b, abs, 0);
82 for (unsigned i = 1; i < vec->num_components; ++i)
83 res = nir_fmax(b, res, nir_channel(b, abs, i));
84 return res;
85 }
86
87 static inline nir_def *
nir_iabs_diff(nir_builder * b,nir_def * x,nir_def * y)88 nir_iabs_diff(nir_builder *b, nir_def *x, nir_def *y)
89 {
90 nir_def *cond = nir_ige(b, x, y);
91 nir_def *res0 = nir_isub(b, x, y);
92 nir_def *res1 = nir_isub(b, y, x);
93 return nir_bcsel(b, cond, res0, res1);
94 }
95
96 static inline nir_def *
nir_uabs_diff(nir_builder * b,nir_def * x,nir_def * y)97 nir_uabs_diff(nir_builder *b, nir_def *x, nir_def *y)
98 {
99 nir_def *cond = nir_uge(b, x, y);
100 nir_def *res0 = nir_isub(b, x, y);
101 nir_def *res1 = nir_isub(b, y, x);
102 return nir_bcsel(b, cond, res0, res1);
103 }
104
105 static inline nir_def *
nir_fexp(nir_builder * b,nir_def * x)106 nir_fexp(nir_builder *b, nir_def *x)
107 {
108 return nir_fexp2(b, nir_fmul_imm(b, x, M_LOG2E));
109 }
110
111 static inline nir_def *
nir_flog(nir_builder * b,nir_def * x)112 nir_flog(nir_builder *b, nir_def *x)
113 {
114 return nir_fmul_imm(b, nir_flog2(b, x), 1.0 / M_LOG2E);
115 }
116
117 static inline nir_def *
nir_imad24(nir_builder * b,nir_def * x,nir_def * y,nir_def * z)118 nir_imad24(nir_builder *b, nir_def *x, nir_def *y, nir_def *z)
119 {
120 nir_def *temp = nir_imul24(b, x, y);
121 return nir_iadd(b, temp, z);
122 }
123
124 static inline nir_def *
nir_imad_hi(nir_builder * b,nir_def * x,nir_def * y,nir_def * z)125 nir_imad_hi(nir_builder *b, nir_def *x, nir_def *y, nir_def *z)
126 {
127 nir_def *temp = nir_imul_high(b, x, y);
128 return nir_iadd(b, temp, z);
129 }
130
131 static inline nir_def *
nir_umad_hi(nir_builder * b,nir_def * x,nir_def * y,nir_def * z)132 nir_umad_hi(nir_builder *b, nir_def *x, nir_def *y, nir_def *z)
133 {
134 nir_def *temp = nir_umul_high(b, x, y);
135 return nir_iadd(b, temp, z);
136 }
137
138 static inline nir_def *
nir_bitselect(nir_builder * b,nir_def * x,nir_def * y,nir_def * s)139 nir_bitselect(nir_builder *b, nir_def *x, nir_def *y, nir_def *s)
140 {
141 return nir_ior(b, nir_iand(b, nir_inot(b, s), x), nir_iand(b, s, y));
142 }
143
144 static inline nir_def *
nir_copysign(nir_builder * b,nir_def * x,nir_def * y)145 nir_copysign(nir_builder *b, nir_def *x, nir_def *y)
146 {
147 if (b->shader->options->no_integers) {
148 /* Unlike the integer path, this is not signed zero correct. We assume
149 * integerless backends don't care.
150 */
151 nir_def *abs = nir_fabs(b, x);
152 return nir_bcsel(b, nir_flt_imm(b, y, 0.0), nir_fneg(b, abs), abs);
153 } else {
154 uint64_t masks = 1ull << (x->bit_size - 1);
155 uint64_t maskv = ~masks;
156
157 nir_def *s = nir_imm_intN_t(b, masks, x->bit_size);
158 nir_def *v = nir_imm_intN_t(b, maskv, x->bit_size);
159
160 return nir_ior(b, nir_iand(b, x, v), nir_iand(b, y, s));
161 }
162 }
163
164 static inline nir_def *
nir_degrees(nir_builder * b,nir_def * val)165 nir_degrees(nir_builder *b, nir_def *val)
166 {
167 return nir_fmul_imm(b, val, 180.0 / M_PI);
168 }
169
170 static inline nir_def *
nir_fdim(nir_builder * b,nir_def * x,nir_def * y)171 nir_fdim(nir_builder *b, nir_def *x, nir_def *y)
172 {
173 nir_def *cond = nir_flt(b, y, x);
174 nir_def *res = nir_fsub(b, x, y);
175 nir_def *zero = nir_imm_floatN_t(b, 0.0, x->bit_size);
176
177 // return NaN if either x or y are NaN, else x-y if x>y, else +0.0
178 return nir_nan_check2(b, x, y, nir_bcsel(b, cond, res, zero));
179 }
180
181 static inline nir_def *
nir_fast_distance(nir_builder * b,nir_def * x,nir_def * y)182 nir_fast_distance(nir_builder *b, nir_def *x, nir_def *y)
183 {
184 return nir_fast_length(b, nir_fsub(b, x, y));
185 }
186
187 static inline nir_def *
nir_fast_normalize(nir_builder * b,nir_def * vec)188 nir_fast_normalize(nir_builder *b, nir_def *vec)
189 {
190 return nir_fdiv(b, vec, nir_fast_length(b, vec));
191 }
192
193 static inline nir_def *
nir_fmad(nir_builder * b,nir_def * x,nir_def * y,nir_def * z)194 nir_fmad(nir_builder *b, nir_def *x, nir_def *y, nir_def *z)
195 {
196 return nir_fadd(b, nir_fmul(b, x, y), z);
197 }
198
199 static inline nir_def *
nir_maxmag(nir_builder * b,nir_def * x,nir_def * y)200 nir_maxmag(nir_builder *b, nir_def *x, nir_def *y)
201 {
202 nir_def *xabs = nir_fabs(b, x);
203 nir_def *yabs = nir_fabs(b, y);
204
205 nir_def *condy = nir_flt(b, xabs, yabs);
206 nir_def *condx = nir_flt(b, yabs, xabs);
207
208 return nir_bcsel(b, condy, y, nir_bcsel(b, condx, x, nir_fmax(b, x, y)));
209 }
210
211 static inline nir_def *
nir_minmag(nir_builder * b,nir_def * x,nir_def * y)212 nir_minmag(nir_builder *b, nir_def *x, nir_def *y)
213 {
214 nir_def *xabs = nir_fabs(b, x);
215 nir_def *yabs = nir_fabs(b, y);
216
217 nir_def *condx = nir_flt(b, xabs, yabs);
218 nir_def *condy = nir_flt(b, yabs, xabs);
219
220 return nir_bcsel(b, condy, y, nir_bcsel(b, condx, x, nir_fmin(b, x, y)));
221 }
222
223 static inline nir_def *
nir_nan(nir_builder * b,nir_def * x)224 nir_nan(nir_builder *b, nir_def *x)
225 {
226 nir_def *nan = nir_imm_floatN_t(b, NAN, x->bit_size);
227 if (x->num_components == 1)
228 return nan;
229
230 nir_def *nans[NIR_MAX_VEC_COMPONENTS];
231 for (unsigned i = 0; i < x->num_components; ++i)
232 nans[i] = nan;
233
234 return nir_vec(b, nans, x->num_components);
235 }
236
237 static inline nir_def *
nir_radians(nir_builder * b,nir_def * val)238 nir_radians(nir_builder *b, nir_def *val)
239 {
240 return nir_fmul_imm(b, val, M_PI / 180.0);
241 }
242
243 static inline nir_def *
nir_select(nir_builder * b,nir_def * x,nir_def * y,nir_def * s)244 nir_select(nir_builder *b, nir_def *x, nir_def *y, nir_def *s)
245 {
246 if (s->num_components != 1) {
247 uint64_t mask = 1ull << (s->bit_size - 1);
248 s = nir_iand_imm(b, s, mask);
249 }
250 return nir_bcsel(b, nir_ieq_imm(b, s, 0), x, y);
251 }
252
253 static inline nir_def *
nir_ftan(nir_builder * b,nir_def * x)254 nir_ftan(nir_builder *b, nir_def *x)
255 {
256 return nir_fdiv(b, nir_fsin(b, x), nir_fcos(b, x));
257 }
258
259 static inline nir_def *
nir_clz_u(nir_builder * b,nir_def * a)260 nir_clz_u(nir_builder *b, nir_def *a)
261 {
262 nir_def *val;
263 val = nir_isub_imm(b, a->bit_size - 1,
264 nir_ufind_msb(b, nir_u2uN(b, a,
265 MAX2(a->bit_size, 32))));
266 return nir_u2uN(b, val, a->bit_size);
267 }
268
269 static inline nir_def *
nir_ctz_u(nir_builder * b,nir_def * a)270 nir_ctz_u(nir_builder *b, nir_def *a)
271 {
272 nir_def *cond = nir_ieq_imm(b, a, 0);
273
274 return nir_bcsel(b, cond,
275 nir_imm_intN_t(b, a->bit_size, a->bit_size),
276 nir_u2uN(b, nir_find_lsb(b, a), a->bit_size));
277 }
278
279 #ifdef __cplusplus
280 }
281 #endif
282
283 #endif /* NIR_BUILTIN_BUILDER_H */
284