1 /*
2 * Copyright © 2019 Red Hat.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "lvp_private.h"
25 #include "nir.h"
26 #include "nir_builder.h"
27 #include "lvp_lower_vulkan_resource.h"
28
29 static bool
lower_vulkan_resource_index(const nir_instr * instr,const void * data_cb)30 lower_vulkan_resource_index(const nir_instr *instr, const void *data_cb)
31 {
32 if (instr->type == nir_instr_type_intrinsic) {
33 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
34 switch (intrin->intrinsic) {
35 case nir_intrinsic_vulkan_resource_index:
36 case nir_intrinsic_vulkan_resource_reindex:
37 case nir_intrinsic_load_vulkan_descriptor:
38 case nir_intrinsic_get_ssbo_size:
39 case nir_intrinsic_image_deref_sparse_load:
40 case nir_intrinsic_image_deref_load:
41 case nir_intrinsic_image_deref_store:
42 case nir_intrinsic_image_deref_atomic:
43 case nir_intrinsic_image_deref_atomic_swap:
44 case nir_intrinsic_image_deref_size:
45 case nir_intrinsic_image_deref_samples:
46 return true;
47 default:
48 return false;
49 }
50 }
51 if (instr->type == nir_instr_type_tex) {
52 return true;
53 }
54 return false;
55 }
56
lower_vri_intrin_vri(struct nir_builder * b,nir_instr * instr,void * data_cb)57 static nir_def *lower_vri_intrin_vri(struct nir_builder *b,
58 nir_instr *instr, void *data_cb)
59 {
60 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
61 unsigned desc_set_idx = nir_intrinsic_desc_set(intrin);
62 unsigned binding_idx = nir_intrinsic_binding(intrin);
63 const struct lvp_descriptor_set_binding_layout *binding =
64 get_binding_layout(data_cb, desc_set_idx, binding_idx);
65
66 return nir_vec3(b, nir_imm_int(b, desc_set_idx + 1),
67 nir_iadd_imm(b, intrin->src[0].ssa, binding->descriptor_index),
68 nir_imm_int(b, 0));
69 }
70
lower_vri_intrin_vrri(struct nir_builder * b,nir_instr * instr,void * data_cb)71 static nir_def *lower_vri_intrin_vrri(struct nir_builder *b,
72 nir_instr *instr, void *data_cb)
73 {
74 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
75 nir_def *old_index = intrin->src[0].ssa;
76 nir_def *delta = intrin->src[1].ssa;
77 return nir_vec3(b, nir_channel(b, old_index, 0),
78 nir_iadd(b, nir_channel(b, old_index, 1), delta),
79 nir_channel(b, old_index, 2));
80 }
81
lower_vri_intrin_lvd(struct nir_builder * b,nir_instr * instr,void * data_cb)82 static nir_def *lower_vri_intrin_lvd(struct nir_builder *b,
83 nir_instr *instr, void *data_cb)
84 {
85 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
86 return intrin->src[0].ssa;
87 }
88
89 static nir_def *
vulkan_resource_from_deref(nir_builder * b,nir_deref_instr * deref,const struct lvp_pipeline_layout * layout,unsigned plane)90 vulkan_resource_from_deref(nir_builder *b, nir_deref_instr *deref, const struct lvp_pipeline_layout *layout,
91 unsigned plane)
92 {
93 nir_def *index = nir_imm_int(b, 0);
94
95 while (deref->deref_type != nir_deref_type_var) {
96 assert(deref->deref_type == nir_deref_type_array);
97 unsigned array_size = MAX2(glsl_get_aoa_size(deref->type), 1);
98
99 index = nir_iadd(b, index, nir_imul_imm(b, deref->arr.index.ssa, array_size));
100
101 deref = nir_deref_instr_parent(deref);
102 }
103
104 nir_variable *var = deref->var;
105
106 const struct lvp_descriptor_set_binding_layout *binding = get_binding_layout(layout, var->data.descriptor_set, var->data.binding);
107 uint32_t binding_base = binding->descriptor_index + plane;
108 index = nir_imul_imm(b, index, binding->stride);
109
110 return nir_vec3(b, nir_imm_int(b, var->data.descriptor_set + 1),
111 nir_iadd_imm(b, index, binding_base),
112 nir_imm_int(b, 0));
113 }
114
lower_vri_instr_tex(struct nir_builder * b,nir_tex_instr * tex,void * data_cb)115 static void lower_vri_instr_tex(struct nir_builder *b,
116 nir_tex_instr *tex, void *data_cb)
117 {
118 struct lvp_pipeline_layout *layout = data_cb;
119 nir_def *plane_ssa = nir_steal_tex_src(tex, nir_tex_src_plane);
120 const uint32_t plane =
121 plane_ssa ? nir_src_as_uint(nir_src_for_ssa(plane_ssa)) : 0;
122
123 for (unsigned i = 0; i < tex->num_srcs; i++) {
124 nir_deref_instr *deref;
125 switch (tex->src[i].src_type) {
126 case nir_tex_src_texture_deref:
127 tex->src[i].src_type = nir_tex_src_texture_handle;
128 deref = nir_src_as_deref(tex->src[i].src);
129 break;
130 case nir_tex_src_sampler_deref:
131 tex->src[i].src_type = nir_tex_src_sampler_handle;
132 deref = nir_src_as_deref(tex->src[i].src);
133 break;
134 default:
135 continue;
136 }
137
138 nir_def *resource = vulkan_resource_from_deref(b, deref, layout, plane);
139 nir_src_rewrite(&tex->src[i].src, resource);
140 }
141 }
142
143 static void
lower_image_intrinsic(nir_builder * b,nir_intrinsic_instr * intrin,void * data_cb)144 lower_image_intrinsic(nir_builder *b,
145 nir_intrinsic_instr *intrin,
146 void *data_cb)
147 {
148 const struct lvp_pipeline_layout *layout = data_cb;
149
150 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
151
152 nir_def *resource = vulkan_resource_from_deref(b, deref, layout, 0);
153 nir_rewrite_image_intrinsic(intrin, resource, true);
154 }
155
156 static bool
lower_load_ubo(nir_builder * b,nir_intrinsic_instr * intrin,void * data_cb)157 lower_load_ubo(nir_builder *b, nir_intrinsic_instr *intrin, void *data_cb)
158 {
159 if (intrin->intrinsic != nir_intrinsic_load_ubo)
160 return false;
161
162 nir_binding binding = nir_chase_binding(intrin->src[0]);
163 /* If binding.success=false, then this is a variable pointer, which we don't support with
164 * VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK.
165 */
166 if (!binding.success)
167 return false;
168
169 const struct lvp_descriptor_set_binding_layout *bind_layout =
170 get_binding_layout(data_cb, binding.desc_set, binding.binding);
171 if (bind_layout->type != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
172 return false;
173
174 b->cursor = nir_before_instr(&intrin->instr);
175
176 nir_src_rewrite(&intrin->src[0], nir_imm_int(b, binding.desc_set + 1));
177
178 nir_def *offset = nir_iadd_imm(b, intrin->src[1].ssa, bind_layout->uniform_block_offset);
179 nir_src_rewrite(&intrin->src[1], offset);
180
181 return true;
182 }
183
lower_vri_instr(struct nir_builder * b,nir_instr * instr,void * data_cb)184 static nir_def *lower_vri_instr(struct nir_builder *b,
185 nir_instr *instr, void *data_cb)
186 {
187 if (instr->type == nir_instr_type_intrinsic) {
188 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
189 switch (intrin->intrinsic) {
190 case nir_intrinsic_vulkan_resource_index:
191 return lower_vri_intrin_vri(b, instr, data_cb);
192
193 case nir_intrinsic_vulkan_resource_reindex:
194 return lower_vri_intrin_vrri(b, instr, data_cb);
195
196 case nir_intrinsic_load_vulkan_descriptor:
197 return lower_vri_intrin_lvd(b, instr, data_cb);
198
199 case nir_intrinsic_get_ssbo_size: {
200 /* Ignore the offset component. */
201 b->cursor = nir_before_instr(instr);
202 nir_def *resource = intrin->src[0].ssa;
203 nir_src_rewrite(&intrin->src[0], resource);
204 return NULL;
205 }
206 case nir_intrinsic_image_deref_sparse_load:
207 case nir_intrinsic_image_deref_load:
208 case nir_intrinsic_image_deref_store:
209 case nir_intrinsic_image_deref_atomic:
210 case nir_intrinsic_image_deref_atomic_swap:
211 case nir_intrinsic_image_deref_size:
212 case nir_intrinsic_image_deref_samples:
213 b->cursor = nir_before_instr(instr);
214 lower_image_intrinsic(b, intrin, data_cb);
215 return NULL;
216
217 default:
218 return NULL;
219 }
220 }
221
222 if (instr->type == nir_instr_type_tex) {
223 b->cursor = nir_before_instr(instr);
224 lower_vri_instr_tex(b, nir_instr_as_tex(instr), data_cb);
225 }
226
227 return NULL;
228 }
229
lvp_lower_pipeline_layout(const struct lvp_device * device,struct lvp_pipeline_layout * layout,nir_shader * shader)230 void lvp_lower_pipeline_layout(const struct lvp_device *device,
231 struct lvp_pipeline_layout *layout,
232 nir_shader *shader)
233 {
234 nir_shader_intrinsics_pass(shader, lower_load_ubo,
235 nir_metadata_control_flow,
236 layout);
237 nir_shader_lower_instructions(shader, lower_vulkan_resource_index, lower_vri_instr, layout);
238 }
239