1 /*
2 * Copyright © 2014 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "nir.h"
25 #include "nir_vla.h"
26 #include "nir_worklist.h"
27
28 /*
29 * Basic liveness analysis. This works only in SSA form.
30 *
31 * This liveness pass treats phi nodes as being melded to the space between
32 * blocks so that the destinations of a phi are in the livein of the block
33 * in which it resides and the sources are in the liveout of the
34 * corresponding block. By formulating the liveness information in this
35 * way, we ensure that the definition of any variable dominates its entire
36 * live range. This is true because the only way that the definition of an
37 * SSA value may not dominate a use is if the use is in a phi node and the
38 * uses in phi no are in the live-out of the corresponding predecessor
39 * block but not in the live-in of the block containing the phi node.
40 */
41
42 struct live_defs_state {
43 unsigned bitset_words;
44
45 /* Used in propagate_across_edge() */
46 BITSET_WORD *tmp_live;
47
48 nir_block_worklist worklist;
49 };
50
51 /* Initialize the liveness data to zero and add the given block to the
52 * worklist.
53 */
54 static void
init_liveness_block(nir_block * block,struct live_defs_state * state)55 init_liveness_block(nir_block *block,
56 struct live_defs_state *state)
57 {
58 block->live_in = reralloc(block, block->live_in, BITSET_WORD,
59 state->bitset_words);
60 memset(block->live_in, 0, state->bitset_words * sizeof(BITSET_WORD));
61
62 block->live_out = reralloc(block, block->live_out, BITSET_WORD,
63 state->bitset_words);
64 memset(block->live_out, 0, state->bitset_words * sizeof(BITSET_WORD));
65
66 nir_block_worklist_push_head(&state->worklist, block);
67 }
68
69 static bool
set_src_live(nir_src * src,void * void_live)70 set_src_live(nir_src *src, void *void_live)
71 {
72 BITSET_WORD *live = void_live;
73
74 if (nir_src_is_undef(*src))
75 return true; /* undefined variables are never live */
76
77 BITSET_SET(live, src->ssa->index);
78
79 return true;
80 }
81
82 static bool
set_ssa_def_dead(nir_def * def,void * void_live)83 set_ssa_def_dead(nir_def *def, void *void_live)
84 {
85 BITSET_WORD *live = void_live;
86
87 BITSET_CLEAR(live, def->index);
88
89 return true;
90 }
91
92 /** Propagates the live in of succ across the edge to the live out of pred
93 *
94 * Phi nodes exist "between" blocks and all the phi nodes at the start of a
95 * block act "in parallel". When we propagate from the live_in of one
96 * block to the live out of the other, we have to kill any writes from phis
97 * and make live any sources.
98 *
99 * Returns true if updating live out of pred added anything
100 */
101 static bool
propagate_across_edge(nir_block * pred,nir_block * succ,struct live_defs_state * state)102 propagate_across_edge(nir_block *pred, nir_block *succ,
103 struct live_defs_state *state)
104 {
105 BITSET_WORD *live = state->tmp_live;
106 memcpy(live, succ->live_in, state->bitset_words * sizeof *live);
107
108 nir_foreach_phi(phi, succ) {
109 set_ssa_def_dead(&phi->def, live);
110 }
111
112 nir_foreach_phi(phi, succ) {
113 nir_foreach_phi_src(src, phi) {
114 if (src->pred == pred) {
115 set_src_live(&src->src, live);
116 break;
117 }
118 }
119 }
120
121 BITSET_WORD progress = 0;
122 for (unsigned i = 0; i < state->bitset_words; ++i) {
123 progress |= live[i] & ~pred->live_out[i];
124 pred->live_out[i] |= live[i];
125 }
126 return progress != 0;
127 }
128
129 void
nir_live_defs_impl(nir_function_impl * impl)130 nir_live_defs_impl(nir_function_impl *impl)
131 {
132 struct live_defs_state state = {
133 .bitset_words = BITSET_WORDS(impl->ssa_alloc),
134 };
135 state.tmp_live = rzalloc_array(impl, BITSET_WORD, state.bitset_words),
136
137 /* Number the instructions so we can do cheap interference tests using the
138 * instruction index.
139 */
140 nir_metadata_require(impl, nir_metadata_instr_index);
141
142 nir_block_worklist_init(&state.worklist, impl->num_blocks, NULL);
143
144 /* Allocate live_in and live_out sets and add all of the blocks to the
145 * worklist.
146 */
147 nir_foreach_block(block, impl) {
148 init_liveness_block(block, &state);
149 }
150
151 /* We're now ready to work through the worklist and update the liveness
152 * sets of each of the blocks. By the time we get to this point, every
153 * block in the function implementation has been pushed onto the
154 * worklist in reverse order. As long as we keep the worklist
155 * up-to-date as we go, everything will get covered.
156 */
157 while (!nir_block_worklist_is_empty(&state.worklist)) {
158 /* We pop them off in the reverse order we pushed them on. This way
159 * the first walk of the instructions is backwards so we only walk
160 * once in the case of no control flow.
161 */
162 nir_block *block = nir_block_worklist_pop_head(&state.worklist);
163
164 memcpy(block->live_in, block->live_out,
165 state.bitset_words * sizeof(BITSET_WORD));
166
167 nir_if *following_if = nir_block_get_following_if(block);
168 if (following_if)
169 set_src_live(&following_if->condition, block->live_in);
170
171 nir_foreach_instr_reverse(instr, block) {
172 /* Phi nodes are handled seperately so we want to skip them. Since
173 * we are going backwards and they are at the beginning, we can just
174 * break as soon as we see one.
175 */
176 if (instr->type == nir_instr_type_phi)
177 break;
178
179 nir_foreach_def(instr, set_ssa_def_dead, block->live_in);
180 nir_foreach_src(instr, set_src_live, block->live_in);
181 }
182
183 /* Walk over all of the predecessors of the current block updating
184 * their live in with the live out of this one. If anything has
185 * changed, add the predecessor to the work list so that we ensure
186 * that the new information is used.
187 */
188 set_foreach(block->predecessors, entry) {
189 nir_block *pred = (nir_block *)entry->key;
190 if (propagate_across_edge(pred, block, &state))
191 nir_block_worklist_push_tail(&state.worklist, pred);
192 }
193 }
194
195 ralloc_free(state.tmp_live);
196 nir_block_worklist_fini(&state.worklist);
197 }
198
199 /** Return the live set at a cursor
200 *
201 * Note: The bitset returned may be the live_in or live_out from the block in
202 * which the instruction lives. Do not ralloc_free() it directly;
203 * instead, provide a mem_ctx and free that.
204 */
205 const BITSET_WORD *
nir_get_live_defs(nir_cursor cursor,void * mem_ctx)206 nir_get_live_defs(nir_cursor cursor, void *mem_ctx)
207 {
208 nir_block *block = nir_cursor_current_block(cursor);
209 nir_function_impl *impl = nir_cf_node_get_function(&block->cf_node);
210 assert(impl->valid_metadata & nir_metadata_live_defs);
211
212 switch (cursor.option) {
213 case nir_cursor_before_block:
214 return cursor.block->live_in;
215
216 case nir_cursor_after_block:
217 return cursor.block->live_out;
218
219 case nir_cursor_before_instr:
220 if (cursor.instr == nir_block_first_instr(cursor.instr->block))
221 return cursor.instr->block->live_in;
222 break;
223
224 case nir_cursor_after_instr:
225 if (cursor.instr == nir_block_last_instr(cursor.instr->block))
226 return cursor.instr->block->live_out;
227 break;
228 }
229
230 /* If we got here, we're an instruction cursor mid-block */
231 const unsigned bitset_words = BITSET_WORDS(impl->ssa_alloc);
232 BITSET_WORD *live = ralloc_array(mem_ctx, BITSET_WORD, bitset_words);
233 memcpy(live, block->live_out, bitset_words * sizeof(BITSET_WORD));
234
235 nir_foreach_instr_reverse(instr, block) {
236 if (cursor.option == nir_cursor_after_instr && instr == cursor.instr)
237 break;
238
239 /* If someone asked for liveness in the middle of a bunch of phis,
240 * that's an error. Since we are going backwards and they are at the
241 * beginning, we can just blow up as soon as we see one.
242 */
243 assert(instr->type != nir_instr_type_phi);
244 if (instr->type == nir_instr_type_phi)
245 break;
246
247 nir_foreach_def(instr, set_ssa_def_dead, live);
248 nir_foreach_src(instr, set_src_live, live);
249
250 if (cursor.option == nir_cursor_before_instr && instr == cursor.instr)
251 break;
252 }
253
254 return live;
255 }
256
257 static bool
src_does_not_use_def(nir_src * src,void * def)258 src_does_not_use_def(nir_src *src, void *def)
259 {
260 return src->ssa != (nir_def *)def;
261 }
262
263 static bool
search_for_use_after_instr(nir_instr * start,nir_def * def)264 search_for_use_after_instr(nir_instr *start, nir_def *def)
265 {
266 /* Only look for a use strictly after the given instruction */
267 struct exec_node *node = start->node.next;
268 while (!exec_node_is_tail_sentinel(node)) {
269 nir_instr *instr = exec_node_data(nir_instr, node, node);
270 if (!nir_foreach_src(instr, src_does_not_use_def, def))
271 return true;
272 node = node->next;
273 }
274
275 /* If uses are considered to be in the block immediately preceding the if
276 * so we need to also check the following if condition, if any.
277 */
278 nir_if *following_if = nir_block_get_following_if(start->block);
279 if (following_if && following_if->condition.ssa == def)
280 return true;
281
282 return false;
283 }
284
285 /* Returns true if def is live at instr assuming that def comes before
286 * instr in a pre DFS search of the dominance tree.
287 */
288 static bool
nir_def_is_live_at(nir_def * def,nir_instr * instr)289 nir_def_is_live_at(nir_def *def, nir_instr *instr)
290 {
291 if (BITSET_TEST(instr->block->live_out, def->index)) {
292 /* Since def dominates instr, if def is in the liveout of the block,
293 * it's live at instr
294 */
295 return true;
296 } else {
297 if (BITSET_TEST(instr->block->live_in, def->index) ||
298 def->parent_instr->block == instr->block) {
299 /* In this case it is either live coming into instr's block or it
300 * is defined in the same block. In this case, we simply need to
301 * see if it is used after instr.
302 */
303 return search_for_use_after_instr(instr, def);
304 } else {
305 return false;
306 }
307 }
308 }
309
310 bool
nir_defs_interfere(nir_def * a,nir_def * b)311 nir_defs_interfere(nir_def *a, nir_def *b)
312 {
313 if (a->parent_instr == b->parent_instr) {
314 /* Two variables defined at the same time interfere assuming at
315 * least one isn't dead.
316 */
317 return true;
318 } else if (a->parent_instr->type == nir_instr_type_undef ||
319 b->parent_instr->type == nir_instr_type_undef) {
320 /* If either variable is an ssa_undef, then there's no interference */
321 return false;
322 } else if (a->parent_instr->index < b->parent_instr->index) {
323 return nir_def_is_live_at(a, b->parent_instr);
324 } else {
325 return nir_def_is_live_at(b, a->parent_instr);
326 }
327 }
328