1 /*
2 * Copyright © 2019 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "nir.h"
25 #include "nir_builder.h"
26
27 static bool
nir_lower_array_deref_of_vec_impl(nir_function_impl * impl,nir_variable_mode modes,bool (* filter)(nir_variable *),nir_lower_array_deref_of_vec_options options)28 nir_lower_array_deref_of_vec_impl(nir_function_impl *impl,
29 nir_variable_mode modes,
30 bool (*filter)(nir_variable *),
31 nir_lower_array_deref_of_vec_options options)
32 {
33 bool progress = false;
34 bool has_indirect_store = false;
35
36 nir_builder b = nir_builder_create(impl);
37
38 nir_foreach_block(block, impl) {
39 nir_foreach_instr_safe(instr, block) {
40 if (instr->type != nir_instr_type_intrinsic)
41 continue;
42
43 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
44 assert(intrin->intrinsic != nir_intrinsic_copy_deref);
45
46 if (intrin->intrinsic != nir_intrinsic_load_deref &&
47 intrin->intrinsic != nir_intrinsic_interp_deref_at_centroid &&
48 intrin->intrinsic != nir_intrinsic_interp_deref_at_sample &&
49 intrin->intrinsic != nir_intrinsic_interp_deref_at_offset &&
50 intrin->intrinsic != nir_intrinsic_interp_deref_at_vertex &&
51 intrin->intrinsic != nir_intrinsic_store_deref)
52 continue;
53
54 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
55
56 /* We choose to be conservative here. If the deref contains any
57 * modes which weren't specified, we bail and don't bother lowering.
58 */
59 if (!nir_deref_mode_must_be(deref, modes))
60 continue;
61
62 /* We only care about array derefs that act on vectors */
63 if (deref->deref_type != nir_deref_type_array)
64 continue;
65
66 nir_deref_instr *vec_deref = nir_deref_instr_parent(deref);
67 if (!glsl_type_is_vector(vec_deref->type))
68 continue;
69
70 if (filter && !filter(nir_deref_instr_get_variable(deref)))
71 continue;
72
73 assert(intrin->num_components == 1);
74 unsigned num_components = glsl_get_components(vec_deref->type);
75 assert(num_components > 1 && num_components <= NIR_MAX_VEC_COMPONENTS);
76
77 b.cursor = nir_after_instr(&intrin->instr);
78
79 if (intrin->intrinsic == nir_intrinsic_store_deref) {
80 nir_def *value = intrin->src[1].ssa;
81
82 if (nir_src_is_const(deref->arr.index)) {
83 if (!(options & nir_lower_direct_array_deref_of_vec_store))
84 continue;
85
86 unsigned index = nir_src_as_uint(deref->arr.index);
87 /* If index is OOB, we throw the old store away and don't
88 * replace it with anything.
89 */
90 if (index < num_components)
91 nir_build_write_masked_store(&b, vec_deref, value, index);
92 } else {
93 if (!(options & nir_lower_indirect_array_deref_of_vec_store))
94 continue;
95
96 nir_def *index = deref->arr.index.ssa;
97 nir_build_write_masked_stores(&b, vec_deref, value, index,
98 0, num_components);
99
100 has_indirect_store = true;
101 }
102 nir_instr_remove(&intrin->instr);
103
104 progress = true;
105 } else {
106 if (nir_src_is_const(deref->arr.index)) {
107 if (!(options & nir_lower_direct_array_deref_of_vec_load))
108 continue;
109 } else {
110 if (!(options & nir_lower_indirect_array_deref_of_vec_load))
111 continue;
112 }
113
114 /* Turn the load into a vector load */
115 nir_src_rewrite(&intrin->src[0], &vec_deref->def);
116 intrin->def.num_components = num_components;
117 intrin->num_components = num_components;
118
119 nir_def *index = deref->arr.index.ssa;
120 nir_def *scalar =
121 nir_vector_extract(&b, &intrin->def, index);
122 if (scalar->parent_instr->type == nir_instr_type_undef) {
123 nir_def_replace(&intrin->def, scalar);
124 } else {
125 nir_def_rewrite_uses_after(&intrin->def,
126 scalar,
127 scalar->parent_instr);
128 }
129 progress = true;
130 }
131 }
132 }
133
134 if (progress) {
135 /* indirect store lower will change control flow */
136 nir_metadata_preserve(impl, has_indirect_store ? nir_metadata_none : nir_metadata_control_flow);
137 } else {
138 nir_metadata_preserve(impl, nir_metadata_all);
139 }
140
141 return progress;
142 }
143
144 /* Lowers away array dereferences on vectors
145 *
146 * These are allowed on certain variable types such as SSBOs and TCS outputs.
147 * However, not everyone can actually handle them everywhere. There are also
148 * cases where we want to lower them for performance reasons.
149 *
150 * This patch assumes that copy_deref instructions have already been lowered.
151 */
152 bool
nir_lower_array_deref_of_vec(nir_shader * shader,nir_variable_mode modes,bool (* filter)(nir_variable *),nir_lower_array_deref_of_vec_options options)153 nir_lower_array_deref_of_vec(nir_shader *shader, nir_variable_mode modes,
154 bool (*filter)(nir_variable *),
155 nir_lower_array_deref_of_vec_options options)
156 {
157 bool progress = false;
158
159 nir_foreach_function_impl(impl, shader) {
160 if (nir_lower_array_deref_of_vec_impl(impl, modes, filter, options))
161 progress = true;
162 }
163
164 return progress;
165 }
166