xref: /aosp_15_r20/external/mesa3d/src/freedreno/ir3/ir3_sched.c (revision 6104692788411f58d303aa86923a9ff6ecaded22)
1*61046927SAndroid Build Coastguard Worker /*
2*61046927SAndroid Build Coastguard Worker  * Copyright © 2014 Rob Clark <[email protected]>
3*61046927SAndroid Build Coastguard Worker  * SPDX-License-Identifier: MIT
4*61046927SAndroid Build Coastguard Worker  *
5*61046927SAndroid Build Coastguard Worker  * Authors:
6*61046927SAndroid Build Coastguard Worker  *    Rob Clark <[email protected]>
7*61046927SAndroid Build Coastguard Worker  */
8*61046927SAndroid Build Coastguard Worker 
9*61046927SAndroid Build Coastguard Worker #include "util/dag.h"
10*61046927SAndroid Build Coastguard Worker #include "util/u_math.h"
11*61046927SAndroid Build Coastguard Worker 
12*61046927SAndroid Build Coastguard Worker #include "ir3.h"
13*61046927SAndroid Build Coastguard Worker #include "ir3_compiler.h"
14*61046927SAndroid Build Coastguard Worker 
15*61046927SAndroid Build Coastguard Worker #if MESA_DEBUG
16*61046927SAndroid Build Coastguard Worker #define SCHED_DEBUG (ir3_shader_debug & IR3_DBG_SCHEDMSGS)
17*61046927SAndroid Build Coastguard Worker #else
18*61046927SAndroid Build Coastguard Worker #define SCHED_DEBUG 0
19*61046927SAndroid Build Coastguard Worker #endif
20*61046927SAndroid Build Coastguard Worker #define d(fmt, ...)                                                            \
21*61046927SAndroid Build Coastguard Worker    do {                                                                        \
22*61046927SAndroid Build Coastguard Worker       if (SCHED_DEBUG) {                                                       \
23*61046927SAndroid Build Coastguard Worker          mesa_logi("SCHED: " fmt, ##__VA_ARGS__);                              \
24*61046927SAndroid Build Coastguard Worker       }                                                                        \
25*61046927SAndroid Build Coastguard Worker    } while (0)
26*61046927SAndroid Build Coastguard Worker 
27*61046927SAndroid Build Coastguard Worker #define di(instr, fmt, ...)                                                    \
28*61046927SAndroid Build Coastguard Worker    do {                                                                        \
29*61046927SAndroid Build Coastguard Worker       if (SCHED_DEBUG) {                                                       \
30*61046927SAndroid Build Coastguard Worker          struct log_stream *stream = mesa_log_streami();                       \
31*61046927SAndroid Build Coastguard Worker          mesa_log_stream_printf(stream, "SCHED: " fmt ": ", ##__VA_ARGS__);    \
32*61046927SAndroid Build Coastguard Worker          ir3_print_instr_stream(stream, instr);                                \
33*61046927SAndroid Build Coastguard Worker          mesa_log_stream_destroy(stream);                                      \
34*61046927SAndroid Build Coastguard Worker       }                                                                        \
35*61046927SAndroid Build Coastguard Worker    } while (0)
36*61046927SAndroid Build Coastguard Worker 
37*61046927SAndroid Build Coastguard Worker /*
38*61046927SAndroid Build Coastguard Worker  * Instruction Scheduling:
39*61046927SAndroid Build Coastguard Worker  *
40*61046927SAndroid Build Coastguard Worker  * A block-level pre-RA scheduler, which works by creating a DAG of
41*61046927SAndroid Build Coastguard Worker  * instruction dependencies, and heuristically picking a DAG head
42*61046927SAndroid Build Coastguard Worker  * (instruction with no unscheduled dependencies).
43*61046927SAndroid Build Coastguard Worker  *
44*61046927SAndroid Build Coastguard Worker  * Where possible, it tries to pick instructions that avoid nop delay
45*61046927SAndroid Build Coastguard Worker  * slots, but it will prefer to pick instructions that reduce (or do
46*61046927SAndroid Build Coastguard Worker  * not increase) the number of live values.
47*61046927SAndroid Build Coastguard Worker  *
48*61046927SAndroid Build Coastguard Worker  * If the only possible choices are instructions that increase the
49*61046927SAndroid Build Coastguard Worker  * number of live values, it will try to pick the one with the earliest
50*61046927SAndroid Build Coastguard Worker  * consumer (based on pre-sched program order).
51*61046927SAndroid Build Coastguard Worker  *
52*61046927SAndroid Build Coastguard Worker  * There are a few special cases that need to be handled, since sched
53*61046927SAndroid Build Coastguard Worker  * is currently independent of register allocation.  Usages of address
54*61046927SAndroid Build Coastguard Worker  * register (a0.x) or predicate register (p0.x) must be serialized.  Ie.
55*61046927SAndroid Build Coastguard Worker  * if you have two pairs of instructions that write the same special
56*61046927SAndroid Build Coastguard Worker  * register and then read it, then those pairs cannot be interleaved.
57*61046927SAndroid Build Coastguard Worker  * To solve this, when we are in such a scheduling "critical section",
58*61046927SAndroid Build Coastguard Worker  * and we encounter a conflicting write to a special register, we try
59*61046927SAndroid Build Coastguard Worker  * to schedule any remaining instructions that use that value first.
60*61046927SAndroid Build Coastguard Worker  *
61*61046927SAndroid Build Coastguard Worker  * TODO we can detect too-large live_values here.. would be a good place
62*61046927SAndroid Build Coastguard Worker  * to "spill" cheap things, like move from uniform/immed.  (Constructing
63*61046927SAndroid Build Coastguard Worker  * list of ssa def consumers before sched pass would make this easier.
64*61046927SAndroid Build Coastguard Worker  * Also, in general it is general it might be best not to re-use load_immed
65*61046927SAndroid Build Coastguard Worker  * across blocks.
66*61046927SAndroid Build Coastguard Worker  *
67*61046927SAndroid Build Coastguard Worker  * TODO we can use (abs)/(neg) src modifiers in a lot of cases to reduce
68*61046927SAndroid Build Coastguard Worker  * the # of immediates in play (or at least that would help with
69*61046927SAndroid Build Coastguard Worker  * dEQP-GLES31.functional.ubo.random.all_per_block_buffers.*).. probably
70*61046927SAndroid Build Coastguard Worker  * do this in a nir pass that inserts fneg/etc?  The cp pass should fold
71*61046927SAndroid Build Coastguard Worker  * these into src modifiers..
72*61046927SAndroid Build Coastguard Worker  */
73*61046927SAndroid Build Coastguard Worker 
74*61046927SAndroid Build Coastguard Worker struct ir3_sched_ctx {
75*61046927SAndroid Build Coastguard Worker    struct ir3_compiler *compiler;
76*61046927SAndroid Build Coastguard Worker    struct ir3_block *block; /* the current block */
77*61046927SAndroid Build Coastguard Worker    struct dag *dag;
78*61046927SAndroid Build Coastguard Worker 
79*61046927SAndroid Build Coastguard Worker    struct list_head unscheduled_list; /* unscheduled instructions */
80*61046927SAndroid Build Coastguard Worker    struct ir3_instruction *scheduled; /* last scheduled instr */
81*61046927SAndroid Build Coastguard Worker    struct ir3_instruction *addr0;     /* current a0.x user, if any */
82*61046927SAndroid Build Coastguard Worker    struct ir3_instruction *addr1;     /* current a1.x user, if any */
83*61046927SAndroid Build Coastguard Worker 
84*61046927SAndroid Build Coastguard Worker    struct ir3_instruction *split; /* most-recently-split a0/a1 producer */
85*61046927SAndroid Build Coastguard Worker 
86*61046927SAndroid Build Coastguard Worker    int remaining_kills;
87*61046927SAndroid Build Coastguard Worker    int remaining_tex;
88*61046927SAndroid Build Coastguard Worker 
89*61046927SAndroid Build Coastguard Worker    bool error;
90*61046927SAndroid Build Coastguard Worker 
91*61046927SAndroid Build Coastguard Worker    unsigned ip;
92*61046927SAndroid Build Coastguard Worker 
93*61046927SAndroid Build Coastguard Worker    int sy_delay;
94*61046927SAndroid Build Coastguard Worker    int ss_delay;
95*61046927SAndroid Build Coastguard Worker 
96*61046927SAndroid Build Coastguard Worker    /* We order the scheduled (sy)/(ss) producers, and keep track of the
97*61046927SAndroid Build Coastguard Worker     * index of the last waited on instruction, so we can know which
98*61046927SAndroid Build Coastguard Worker     * instructions are still outstanding (and therefore would require us to
99*61046927SAndroid Build Coastguard Worker     * wait for all outstanding instructions before scheduling a use).
100*61046927SAndroid Build Coastguard Worker     */
101*61046927SAndroid Build Coastguard Worker    int sy_index, first_outstanding_sy_index;
102*61046927SAndroid Build Coastguard Worker    int ss_index, first_outstanding_ss_index;
103*61046927SAndroid Build Coastguard Worker };
104*61046927SAndroid Build Coastguard Worker 
105*61046927SAndroid Build Coastguard Worker struct ir3_sched_node {
106*61046927SAndroid Build Coastguard Worker    struct dag_node dag; /* must be first for util_dynarray_foreach */
107*61046927SAndroid Build Coastguard Worker    struct ir3_instruction *instr;
108*61046927SAndroid Build Coastguard Worker 
109*61046927SAndroid Build Coastguard Worker    unsigned delay;
110*61046927SAndroid Build Coastguard Worker    unsigned max_delay;
111*61046927SAndroid Build Coastguard Worker 
112*61046927SAndroid Build Coastguard Worker    unsigned sy_index;
113*61046927SAndroid Build Coastguard Worker    unsigned ss_index;
114*61046927SAndroid Build Coastguard Worker 
115*61046927SAndroid Build Coastguard Worker    /* For ready instructions, the earliest possible ip that it could be
116*61046927SAndroid Build Coastguard Worker     * scheduled.
117*61046927SAndroid Build Coastguard Worker     */
118*61046927SAndroid Build Coastguard Worker    unsigned earliest_ip;
119*61046927SAndroid Build Coastguard Worker 
120*61046927SAndroid Build Coastguard Worker    /* For instructions that are a meta:collect src, once we schedule
121*61046927SAndroid Build Coastguard Worker     * the first src of the collect, the entire vecN is live (at least
122*61046927SAndroid Build Coastguard Worker     * from the PoV of the first RA pass.. the 2nd scalar pass can fill
123*61046927SAndroid Build Coastguard Worker     * in some of the gaps, but often not all).  So we want to help out
124*61046927SAndroid Build Coastguard Worker     * RA, and realize that as soon as we schedule the first collect
125*61046927SAndroid Build Coastguard Worker     * src, there is no penalty to schedule the remainder (ie. they
126*61046927SAndroid Build Coastguard Worker     * don't make additional values live).  In fact we'd prefer to
127*61046927SAndroid Build Coastguard Worker     * schedule the rest ASAP to minimize the live range of the vecN.
128*61046927SAndroid Build Coastguard Worker     *
129*61046927SAndroid Build Coastguard Worker     * For instructions that are the src of a collect, we track the
130*61046927SAndroid Build Coastguard Worker     * corresponding collect, and mark them as partially live as soon
131*61046927SAndroid Build Coastguard Worker     * as any one of the src's is scheduled.
132*61046927SAndroid Build Coastguard Worker     */
133*61046927SAndroid Build Coastguard Worker    struct ir3_instruction *collect;
134*61046927SAndroid Build Coastguard Worker    bool partially_live;
135*61046927SAndroid Build Coastguard Worker 
136*61046927SAndroid Build Coastguard Worker    /* Is this instruction a direct or indirect dependency for a kill?
137*61046927SAndroid Build Coastguard Worker     * If so, we should prioritize it when possible
138*61046927SAndroid Build Coastguard Worker     */
139*61046927SAndroid Build Coastguard Worker    bool kill_path;
140*61046927SAndroid Build Coastguard Worker 
141*61046927SAndroid Build Coastguard Worker    /* This node represents a shader output.  A semi-common pattern in
142*61046927SAndroid Build Coastguard Worker     * shaders is something along the lines of:
143*61046927SAndroid Build Coastguard Worker     *
144*61046927SAndroid Build Coastguard Worker     *    fragcolor.w = 1.0
145*61046927SAndroid Build Coastguard Worker     *
146*61046927SAndroid Build Coastguard Worker     * Which we'd prefer to schedule as late as possible, since it
147*61046927SAndroid Build Coastguard Worker     * produces a live value that is never killed/consumed.  So detect
148*61046927SAndroid Build Coastguard Worker     * outputs up-front, and avoid scheduling them unless the reduce
149*61046927SAndroid Build Coastguard Worker     * register pressure (or at least are neutral)
150*61046927SAndroid Build Coastguard Worker     */
151*61046927SAndroid Build Coastguard Worker    bool output;
152*61046927SAndroid Build Coastguard Worker };
153*61046927SAndroid Build Coastguard Worker 
154*61046927SAndroid Build Coastguard Worker #define foreach_sched_node(__n, __list)                                        \
155*61046927SAndroid Build Coastguard Worker    list_for_each_entry (struct ir3_sched_node, __n, __list, dag.link)
156*61046927SAndroid Build Coastguard Worker 
157*61046927SAndroid Build Coastguard Worker static void sched_node_init(struct ir3_sched_ctx *ctx,
158*61046927SAndroid Build Coastguard Worker                             struct ir3_instruction *instr);
159*61046927SAndroid Build Coastguard Worker static void sched_node_add_dep(struct ir3_sched_ctx *ctx,
160*61046927SAndroid Build Coastguard Worker                                struct ir3_instruction *instr,
161*61046927SAndroid Build Coastguard Worker                                struct ir3_instruction *src, int i);
162*61046927SAndroid Build Coastguard Worker 
163*61046927SAndroid Build Coastguard Worker static bool
is_scheduled(struct ir3_instruction * instr)164*61046927SAndroid Build Coastguard Worker is_scheduled(struct ir3_instruction *instr)
165*61046927SAndroid Build Coastguard Worker {
166*61046927SAndroid Build Coastguard Worker    return !!(instr->flags & IR3_INSTR_MARK);
167*61046927SAndroid Build Coastguard Worker }
168*61046927SAndroid Build Coastguard Worker 
169*61046927SAndroid Build Coastguard Worker /* check_src_cond() passing the user and ir3_sched_ctx. */
170*61046927SAndroid Build Coastguard Worker static bool
sched_check_src_cond(struct ir3_instruction * instr,bool (* cond)(struct ir3_instruction *,struct ir3_instruction *,struct ir3_sched_ctx *),struct ir3_sched_ctx * ctx)171*61046927SAndroid Build Coastguard Worker sched_check_src_cond(struct ir3_instruction *instr,
172*61046927SAndroid Build Coastguard Worker                      bool (*cond)(struct ir3_instruction *,
173*61046927SAndroid Build Coastguard Worker                                   struct ir3_instruction *,
174*61046927SAndroid Build Coastguard Worker                                   struct ir3_sched_ctx *),
175*61046927SAndroid Build Coastguard Worker                      struct ir3_sched_ctx *ctx)
176*61046927SAndroid Build Coastguard Worker {
177*61046927SAndroid Build Coastguard Worker    foreach_ssa_src (src, instr) {
178*61046927SAndroid Build Coastguard Worker       /* meta:split/collect aren't real instructions, the thing that
179*61046927SAndroid Build Coastguard Worker        * we actually care about is *their* srcs
180*61046927SAndroid Build Coastguard Worker        */
181*61046927SAndroid Build Coastguard Worker       if ((src->opc == OPC_META_SPLIT) || (src->opc == OPC_META_COLLECT)) {
182*61046927SAndroid Build Coastguard Worker          if (sched_check_src_cond(src, cond, ctx))
183*61046927SAndroid Build Coastguard Worker             return true;
184*61046927SAndroid Build Coastguard Worker       } else {
185*61046927SAndroid Build Coastguard Worker          if (cond(src, instr, ctx))
186*61046927SAndroid Build Coastguard Worker             return true;
187*61046927SAndroid Build Coastguard Worker       }
188*61046927SAndroid Build Coastguard Worker    }
189*61046927SAndroid Build Coastguard Worker 
190*61046927SAndroid Build Coastguard Worker    return false;
191*61046927SAndroid Build Coastguard Worker }
192*61046927SAndroid Build Coastguard Worker 
193*61046927SAndroid Build Coastguard Worker /* Is this a sy producer that hasn't been waited on yet? */
194*61046927SAndroid Build Coastguard Worker 
195*61046927SAndroid Build Coastguard Worker static bool
is_outstanding_sy(struct ir3_instruction * instr,struct ir3_instruction * use,struct ir3_sched_ctx * ctx)196*61046927SAndroid Build Coastguard Worker is_outstanding_sy(struct ir3_instruction *instr, struct ir3_instruction *use,
197*61046927SAndroid Build Coastguard Worker                   struct ir3_sched_ctx *ctx)
198*61046927SAndroid Build Coastguard Worker {
199*61046927SAndroid Build Coastguard Worker    if (!is_sy_producer(instr))
200*61046927SAndroid Build Coastguard Worker       return false;
201*61046927SAndroid Build Coastguard Worker 
202*61046927SAndroid Build Coastguard Worker    /* The sched node is only valid within the same block, we cannot
203*61046927SAndroid Build Coastguard Worker     * really say anything about src's from other blocks
204*61046927SAndroid Build Coastguard Worker     */
205*61046927SAndroid Build Coastguard Worker    if (instr->block != ctx->block)
206*61046927SAndroid Build Coastguard Worker       return true;
207*61046927SAndroid Build Coastguard Worker 
208*61046927SAndroid Build Coastguard Worker    struct ir3_sched_node *n = instr->data;
209*61046927SAndroid Build Coastguard Worker    return n->sy_index >= ctx->first_outstanding_sy_index;
210*61046927SAndroid Build Coastguard Worker }
211*61046927SAndroid Build Coastguard Worker 
212*61046927SAndroid Build Coastguard Worker static bool
is_outstanding_ss(struct ir3_instruction * instr,struct ir3_instruction * use,struct ir3_sched_ctx * ctx)213*61046927SAndroid Build Coastguard Worker is_outstanding_ss(struct ir3_instruction *instr, struct ir3_instruction *use,
214*61046927SAndroid Build Coastguard Worker                   struct ir3_sched_ctx *ctx)
215*61046927SAndroid Build Coastguard Worker {
216*61046927SAndroid Build Coastguard Worker    if (!needs_ss(ctx->compiler, instr, use))
217*61046927SAndroid Build Coastguard Worker       return false;
218*61046927SAndroid Build Coastguard Worker 
219*61046927SAndroid Build Coastguard Worker    /* The sched node is only valid within the same block, we cannot
220*61046927SAndroid Build Coastguard Worker     * really say anything about src's from other blocks
221*61046927SAndroid Build Coastguard Worker     */
222*61046927SAndroid Build Coastguard Worker    if (instr->block != ctx->block)
223*61046927SAndroid Build Coastguard Worker       return true;
224*61046927SAndroid Build Coastguard Worker 
225*61046927SAndroid Build Coastguard Worker    struct ir3_sched_node *n = instr->data;
226*61046927SAndroid Build Coastguard Worker    return n->ss_index >= ctx->first_outstanding_ss_index;
227*61046927SAndroid Build Coastguard Worker }
228*61046927SAndroid Build Coastguard Worker 
229*61046927SAndroid Build Coastguard Worker static unsigned
cycle_count(struct ir3_instruction * instr)230*61046927SAndroid Build Coastguard Worker cycle_count(struct ir3_instruction *instr)
231*61046927SAndroid Build Coastguard Worker {
232*61046927SAndroid Build Coastguard Worker    if (instr->opc == OPC_META_COLLECT) {
233*61046927SAndroid Build Coastguard Worker       /* Assume that only immed/const sources produce moves */
234*61046927SAndroid Build Coastguard Worker       unsigned n = 0;
235*61046927SAndroid Build Coastguard Worker       foreach_src (src, instr) {
236*61046927SAndroid Build Coastguard Worker          if (src->flags & (IR3_REG_IMMED | IR3_REG_CONST))
237*61046927SAndroid Build Coastguard Worker             n++;
238*61046927SAndroid Build Coastguard Worker       }
239*61046927SAndroid Build Coastguard Worker       return n;
240*61046927SAndroid Build Coastguard Worker    } else if (is_meta(instr)) {
241*61046927SAndroid Build Coastguard Worker       return 0;
242*61046927SAndroid Build Coastguard Worker    } else {
243*61046927SAndroid Build Coastguard Worker       return 1;
244*61046927SAndroid Build Coastguard Worker    }
245*61046927SAndroid Build Coastguard Worker }
246*61046927SAndroid Build Coastguard Worker 
247*61046927SAndroid Build Coastguard Worker static void
schedule(struct ir3_sched_ctx * ctx,struct ir3_instruction * instr)248*61046927SAndroid Build Coastguard Worker schedule(struct ir3_sched_ctx *ctx, struct ir3_instruction *instr)
249*61046927SAndroid Build Coastguard Worker {
250*61046927SAndroid Build Coastguard Worker    assert(ctx->block == instr->block);
251*61046927SAndroid Build Coastguard Worker 
252*61046927SAndroid Build Coastguard Worker    /* remove from depth list:
253*61046927SAndroid Build Coastguard Worker     */
254*61046927SAndroid Build Coastguard Worker    list_delinit(&instr->node);
255*61046927SAndroid Build Coastguard Worker 
256*61046927SAndroid Build Coastguard Worker    if (writes_addr0(instr)) {
257*61046927SAndroid Build Coastguard Worker       assert(ctx->addr0 == NULL);
258*61046927SAndroid Build Coastguard Worker       ctx->addr0 = instr;
259*61046927SAndroid Build Coastguard Worker    }
260*61046927SAndroid Build Coastguard Worker 
261*61046927SAndroid Build Coastguard Worker    if (writes_addr1(instr)) {
262*61046927SAndroid Build Coastguard Worker       assert(ctx->addr1 == NULL);
263*61046927SAndroid Build Coastguard Worker       ctx->addr1 = instr;
264*61046927SAndroid Build Coastguard Worker    }
265*61046927SAndroid Build Coastguard Worker 
266*61046927SAndroid Build Coastguard Worker    instr->flags |= IR3_INSTR_MARK;
267*61046927SAndroid Build Coastguard Worker 
268*61046927SAndroid Build Coastguard Worker    di(instr, "schedule");
269*61046927SAndroid Build Coastguard Worker 
270*61046927SAndroid Build Coastguard Worker    list_addtail(&instr->node, &instr->block->instr_list);
271*61046927SAndroid Build Coastguard Worker    ctx->scheduled = instr;
272*61046927SAndroid Build Coastguard Worker 
273*61046927SAndroid Build Coastguard Worker    if (is_kill_or_demote(instr)) {
274*61046927SAndroid Build Coastguard Worker       assert(ctx->remaining_kills > 0);
275*61046927SAndroid Build Coastguard Worker       ctx->remaining_kills--;
276*61046927SAndroid Build Coastguard Worker    }
277*61046927SAndroid Build Coastguard Worker 
278*61046927SAndroid Build Coastguard Worker    struct ir3_sched_node *n = instr->data;
279*61046927SAndroid Build Coastguard Worker 
280*61046927SAndroid Build Coastguard Worker    /* If this instruction is a meta:collect src, mark the remaining
281*61046927SAndroid Build Coastguard Worker     * collect srcs as partially live.
282*61046927SAndroid Build Coastguard Worker     */
283*61046927SAndroid Build Coastguard Worker    if (n->collect) {
284*61046927SAndroid Build Coastguard Worker       foreach_ssa_src (src, n->collect) {
285*61046927SAndroid Build Coastguard Worker          if (src->block != instr->block)
286*61046927SAndroid Build Coastguard Worker             continue;
287*61046927SAndroid Build Coastguard Worker          struct ir3_sched_node *sn = src->data;
288*61046927SAndroid Build Coastguard Worker          sn->partially_live = true;
289*61046927SAndroid Build Coastguard Worker       }
290*61046927SAndroid Build Coastguard Worker    }
291*61046927SAndroid Build Coastguard Worker 
292*61046927SAndroid Build Coastguard Worker    bool counts_for_delay = is_alu(instr) || is_flow(instr);
293*61046927SAndroid Build Coastguard Worker 
294*61046927SAndroid Build Coastguard Worker    /* TODO: switch to "cycles". For now try to match ir3_delay. */
295*61046927SAndroid Build Coastguard Worker    unsigned delay_cycles = counts_for_delay ? 1 + instr->repeat : 0;
296*61046927SAndroid Build Coastguard Worker 
297*61046927SAndroid Build Coastguard Worker    /* We insert any nop's needed to get to earliest_ip, then advance
298*61046927SAndroid Build Coastguard Worker     * delay_cycles by scheduling the instruction.
299*61046927SAndroid Build Coastguard Worker     */
300*61046927SAndroid Build Coastguard Worker    ctx->ip = MAX2(ctx->ip, n->earliest_ip) + delay_cycles;
301*61046927SAndroid Build Coastguard Worker 
302*61046927SAndroid Build Coastguard Worker    util_dynarray_foreach (&n->dag.edges, struct dag_edge, edge) {
303*61046927SAndroid Build Coastguard Worker       unsigned delay = (unsigned)(uintptr_t)edge->data;
304*61046927SAndroid Build Coastguard Worker       struct ir3_sched_node *child =
305*61046927SAndroid Build Coastguard Worker          container_of(edge->child, struct ir3_sched_node, dag);
306*61046927SAndroid Build Coastguard Worker       child->earliest_ip = MAX2(child->earliest_ip, ctx->ip + delay);
307*61046927SAndroid Build Coastguard Worker    }
308*61046927SAndroid Build Coastguard Worker 
309*61046927SAndroid Build Coastguard Worker    dag_prune_head(ctx->dag, &n->dag);
310*61046927SAndroid Build Coastguard Worker 
311*61046927SAndroid Build Coastguard Worker    unsigned cycles = cycle_count(instr);
312*61046927SAndroid Build Coastguard Worker 
313*61046927SAndroid Build Coastguard Worker    if (is_ss_producer(instr)) {
314*61046927SAndroid Build Coastguard Worker       ctx->ss_delay = soft_ss_delay(instr);
315*61046927SAndroid Build Coastguard Worker       n->ss_index = ctx->ss_index++;
316*61046927SAndroid Build Coastguard Worker    } else if (!is_meta(instr) &&
317*61046927SAndroid Build Coastguard Worker               sched_check_src_cond(instr, is_outstanding_ss, ctx)) {
318*61046927SAndroid Build Coastguard Worker       ctx->ss_delay = 0;
319*61046927SAndroid Build Coastguard Worker       ctx->first_outstanding_ss_index = ctx->ss_index;
320*61046927SAndroid Build Coastguard Worker    } else if (ctx->ss_delay > 0) {
321*61046927SAndroid Build Coastguard Worker       ctx->ss_delay -= MIN2(cycles, ctx->ss_delay);
322*61046927SAndroid Build Coastguard Worker    }
323*61046927SAndroid Build Coastguard Worker 
324*61046927SAndroid Build Coastguard Worker    if (is_sy_producer(instr)) {
325*61046927SAndroid Build Coastguard Worker       /* NOTE that this isn't an attempt to hide texture fetch latency,
326*61046927SAndroid Build Coastguard Worker        * but an attempt to hide the cost of switching to another warp.
327*61046927SAndroid Build Coastguard Worker        * If we can, we'd like to try to schedule another texture fetch
328*61046927SAndroid Build Coastguard Worker        * before scheduling something that would sync.
329*61046927SAndroid Build Coastguard Worker        */
330*61046927SAndroid Build Coastguard Worker       ctx->sy_delay = soft_sy_delay(instr, ctx->block->shader);
331*61046927SAndroid Build Coastguard Worker       assert(ctx->remaining_tex > 0);
332*61046927SAndroid Build Coastguard Worker       ctx->remaining_tex--;
333*61046927SAndroid Build Coastguard Worker       n->sy_index = ctx->sy_index++;
334*61046927SAndroid Build Coastguard Worker    } else if (!is_meta(instr) &&
335*61046927SAndroid Build Coastguard Worker               sched_check_src_cond(instr, is_outstanding_sy, ctx)) {
336*61046927SAndroid Build Coastguard Worker       ctx->sy_delay = 0;
337*61046927SAndroid Build Coastguard Worker       ctx->first_outstanding_sy_index = ctx->sy_index;
338*61046927SAndroid Build Coastguard Worker    } else if (ctx->sy_delay > 0) {
339*61046927SAndroid Build Coastguard Worker       ctx->sy_delay -= MIN2(cycles, ctx->sy_delay);
340*61046927SAndroid Build Coastguard Worker    }
341*61046927SAndroid Build Coastguard Worker 
342*61046927SAndroid Build Coastguard Worker }
343*61046927SAndroid Build Coastguard Worker 
344*61046927SAndroid Build Coastguard Worker struct ir3_sched_notes {
345*61046927SAndroid Build Coastguard Worker    /* there is at least one kill which could be scheduled, except
346*61046927SAndroid Build Coastguard Worker     * for unscheduled bary.f's:
347*61046927SAndroid Build Coastguard Worker     */
348*61046927SAndroid Build Coastguard Worker    bool blocked_kill;
349*61046927SAndroid Build Coastguard Worker    /* there is at least one instruction that could be scheduled,
350*61046927SAndroid Build Coastguard Worker     * except for conflicting address register usage:
351*61046927SAndroid Build Coastguard Worker     */
352*61046927SAndroid Build Coastguard Worker    bool addr0_conflict, addr1_conflict;
353*61046927SAndroid Build Coastguard Worker };
354*61046927SAndroid Build Coastguard Worker 
355*61046927SAndroid Build Coastguard Worker static bool
should_skip(struct ir3_sched_ctx * ctx,struct ir3_instruction * instr)356*61046927SAndroid Build Coastguard Worker should_skip(struct ir3_sched_ctx *ctx, struct ir3_instruction *instr)
357*61046927SAndroid Build Coastguard Worker {
358*61046927SAndroid Build Coastguard Worker    if (ctx->remaining_kills && (is_tex(instr) || is_mem(instr))) {
359*61046927SAndroid Build Coastguard Worker       /* avoid texture/memory access if we have unscheduled kills
360*61046927SAndroid Build Coastguard Worker        * that could make the expensive operation unnecessary.  By
361*61046927SAndroid Build Coastguard Worker        * definition, if there are remaining kills, and this instr
362*61046927SAndroid Build Coastguard Worker        * is not a dependency of a kill, there are other instructions
363*61046927SAndroid Build Coastguard Worker        * that we can choose from.
364*61046927SAndroid Build Coastguard Worker        */
365*61046927SAndroid Build Coastguard Worker       struct ir3_sched_node *n = instr->data;
366*61046927SAndroid Build Coastguard Worker       if (!n->kill_path)
367*61046927SAndroid Build Coastguard Worker          return true;
368*61046927SAndroid Build Coastguard Worker    }
369*61046927SAndroid Build Coastguard Worker 
370*61046927SAndroid Build Coastguard Worker    return false;
371*61046927SAndroid Build Coastguard Worker }
372*61046927SAndroid Build Coastguard Worker 
373*61046927SAndroid Build Coastguard Worker /* could an instruction be scheduled if specified ssa src was scheduled? */
374*61046927SAndroid Build Coastguard Worker static bool
could_sched(struct ir3_sched_ctx * ctx,struct ir3_instruction * instr,struct ir3_instruction * src)375*61046927SAndroid Build Coastguard Worker could_sched(struct ir3_sched_ctx *ctx,
376*61046927SAndroid Build Coastguard Worker             struct ir3_instruction *instr, struct ir3_instruction *src)
377*61046927SAndroid Build Coastguard Worker {
378*61046927SAndroid Build Coastguard Worker    foreach_ssa_src (other_src, instr) {
379*61046927SAndroid Build Coastguard Worker       /* if dependency not scheduled, we aren't ready yet: */
380*61046927SAndroid Build Coastguard Worker       if ((src != other_src) && !is_scheduled(other_src)) {
381*61046927SAndroid Build Coastguard Worker          return false;
382*61046927SAndroid Build Coastguard Worker       }
383*61046927SAndroid Build Coastguard Worker    }
384*61046927SAndroid Build Coastguard Worker 
385*61046927SAndroid Build Coastguard Worker    /* Instructions not in the current block can never be scheduled.
386*61046927SAndroid Build Coastguard Worker     */
387*61046927SAndroid Build Coastguard Worker    if (instr->block != src->block)
388*61046927SAndroid Build Coastguard Worker       return false;
389*61046927SAndroid Build Coastguard Worker 
390*61046927SAndroid Build Coastguard Worker    return !should_skip(ctx, instr);
391*61046927SAndroid Build Coastguard Worker }
392*61046927SAndroid Build Coastguard Worker 
393*61046927SAndroid Build Coastguard Worker /* Check if instruction is ok to schedule.  Make sure it is not blocked
394*61046927SAndroid Build Coastguard Worker  * by use of addr/predicate register, etc.
395*61046927SAndroid Build Coastguard Worker  */
396*61046927SAndroid Build Coastguard Worker static bool
check_instr(struct ir3_sched_ctx * ctx,struct ir3_sched_notes * notes,struct ir3_instruction * instr)397*61046927SAndroid Build Coastguard Worker check_instr(struct ir3_sched_ctx *ctx, struct ir3_sched_notes *notes,
398*61046927SAndroid Build Coastguard Worker             struct ir3_instruction *instr)
399*61046927SAndroid Build Coastguard Worker {
400*61046927SAndroid Build Coastguard Worker    assert(!is_scheduled(instr));
401*61046927SAndroid Build Coastguard Worker 
402*61046927SAndroid Build Coastguard Worker    if (instr == ctx->split) {
403*61046927SAndroid Build Coastguard Worker       /* Don't schedule instructions created by splitting a a0.x/a1.x/p0.x
404*61046927SAndroid Build Coastguard Worker        * write until another "normal" instruction has been scheduled.
405*61046927SAndroid Build Coastguard Worker        */
406*61046927SAndroid Build Coastguard Worker       return false;
407*61046927SAndroid Build Coastguard Worker    }
408*61046927SAndroid Build Coastguard Worker 
409*61046927SAndroid Build Coastguard Worker    if (should_skip(ctx, instr))
410*61046927SAndroid Build Coastguard Worker        return false;
411*61046927SAndroid Build Coastguard Worker 
412*61046927SAndroid Build Coastguard Worker    /* For instructions that write address register we need to
413*61046927SAndroid Build Coastguard Worker     * make sure there is at least one instruction that uses the
414*61046927SAndroid Build Coastguard Worker     * addr value which is otherwise ready.
415*61046927SAndroid Build Coastguard Worker     *
416*61046927SAndroid Build Coastguard Worker     * NOTE if any instructions use pred register and have other
417*61046927SAndroid Build Coastguard Worker     * src args, we would need to do the same for writes_pred()..
418*61046927SAndroid Build Coastguard Worker     */
419*61046927SAndroid Build Coastguard Worker    if (writes_addr0(instr)) {
420*61046927SAndroid Build Coastguard Worker       struct ir3 *ir = instr->block->shader;
421*61046927SAndroid Build Coastguard Worker       bool ready = false;
422*61046927SAndroid Build Coastguard Worker       for (unsigned i = 0; (i < ir->a0_users_count) && !ready; i++) {
423*61046927SAndroid Build Coastguard Worker          struct ir3_instruction *indirect = ir->a0_users[i];
424*61046927SAndroid Build Coastguard Worker          if (!indirect)
425*61046927SAndroid Build Coastguard Worker             continue;
426*61046927SAndroid Build Coastguard Worker          if (indirect->address->def != instr->dsts[0])
427*61046927SAndroid Build Coastguard Worker             continue;
428*61046927SAndroid Build Coastguard Worker          ready = could_sched(ctx, indirect, instr);
429*61046927SAndroid Build Coastguard Worker       }
430*61046927SAndroid Build Coastguard Worker 
431*61046927SAndroid Build Coastguard Worker       /* nothing could be scheduled, so keep looking: */
432*61046927SAndroid Build Coastguard Worker       if (!ready)
433*61046927SAndroid Build Coastguard Worker          return false;
434*61046927SAndroid Build Coastguard Worker    }
435*61046927SAndroid Build Coastguard Worker 
436*61046927SAndroid Build Coastguard Worker    if (writes_addr1(instr)) {
437*61046927SAndroid Build Coastguard Worker       struct ir3 *ir = instr->block->shader;
438*61046927SAndroid Build Coastguard Worker       bool ready = false;
439*61046927SAndroid Build Coastguard Worker       for (unsigned i = 0; (i < ir->a1_users_count) && !ready; i++) {
440*61046927SAndroid Build Coastguard Worker          struct ir3_instruction *indirect = ir->a1_users[i];
441*61046927SAndroid Build Coastguard Worker          if (!indirect)
442*61046927SAndroid Build Coastguard Worker             continue;
443*61046927SAndroid Build Coastguard Worker          if (indirect->address->def != instr->dsts[0])
444*61046927SAndroid Build Coastguard Worker             continue;
445*61046927SAndroid Build Coastguard Worker          ready = could_sched(ctx, indirect, instr);
446*61046927SAndroid Build Coastguard Worker       }
447*61046927SAndroid Build Coastguard Worker 
448*61046927SAndroid Build Coastguard Worker       /* nothing could be scheduled, so keep looking: */
449*61046927SAndroid Build Coastguard Worker       if (!ready)
450*61046927SAndroid Build Coastguard Worker          return false;
451*61046927SAndroid Build Coastguard Worker    }
452*61046927SAndroid Build Coastguard Worker 
453*61046927SAndroid Build Coastguard Worker    /* if this is a write to address/predicate register, and that
454*61046927SAndroid Build Coastguard Worker     * register is currently in use, we need to defer until it is
455*61046927SAndroid Build Coastguard Worker     * free:
456*61046927SAndroid Build Coastguard Worker     */
457*61046927SAndroid Build Coastguard Worker    if (writes_addr0(instr) && ctx->addr0) {
458*61046927SAndroid Build Coastguard Worker       assert(ctx->addr0 != instr);
459*61046927SAndroid Build Coastguard Worker       notes->addr0_conflict = true;
460*61046927SAndroid Build Coastguard Worker       return false;
461*61046927SAndroid Build Coastguard Worker    }
462*61046927SAndroid Build Coastguard Worker 
463*61046927SAndroid Build Coastguard Worker    if (writes_addr1(instr) && ctx->addr1) {
464*61046927SAndroid Build Coastguard Worker       assert(ctx->addr1 != instr);
465*61046927SAndroid Build Coastguard Worker       notes->addr1_conflict = true;
466*61046927SAndroid Build Coastguard Worker       return false;
467*61046927SAndroid Build Coastguard Worker    }
468*61046927SAndroid Build Coastguard Worker 
469*61046927SAndroid Build Coastguard Worker    /* if the instruction is a kill, we need to ensure *every*
470*61046927SAndroid Build Coastguard Worker     * bary.f is scheduled.  The hw seems unhappy if the thread
471*61046927SAndroid Build Coastguard Worker     * gets killed before the end-input (ei) flag is hit.
472*61046927SAndroid Build Coastguard Worker     *
473*61046927SAndroid Build Coastguard Worker     * We could do this by adding each bary.f instruction as
474*61046927SAndroid Build Coastguard Worker     * virtual ssa src for the kill instruction.  But we have
475*61046927SAndroid Build Coastguard Worker     * fixed length instr->srcs[].
476*61046927SAndroid Build Coastguard Worker     *
477*61046927SAndroid Build Coastguard Worker     * TODO we could handle this by false-deps now, probably.
478*61046927SAndroid Build Coastguard Worker     */
479*61046927SAndroid Build Coastguard Worker    if (is_kill_or_demote(instr)) {
480*61046927SAndroid Build Coastguard Worker       struct ir3 *ir = instr->block->shader;
481*61046927SAndroid Build Coastguard Worker 
482*61046927SAndroid Build Coastguard Worker       for (unsigned i = 0; i < ir->baryfs_count; i++) {
483*61046927SAndroid Build Coastguard Worker          struct ir3_instruction *baryf = ir->baryfs[i];
484*61046927SAndroid Build Coastguard Worker          if (baryf->flags & IR3_INSTR_UNUSED)
485*61046927SAndroid Build Coastguard Worker             continue;
486*61046927SAndroid Build Coastguard Worker          if (!is_scheduled(baryf)) {
487*61046927SAndroid Build Coastguard Worker             notes->blocked_kill = true;
488*61046927SAndroid Build Coastguard Worker             return false;
489*61046927SAndroid Build Coastguard Worker          }
490*61046927SAndroid Build Coastguard Worker       }
491*61046927SAndroid Build Coastguard Worker    }
492*61046927SAndroid Build Coastguard Worker 
493*61046927SAndroid Build Coastguard Worker    return true;
494*61046927SAndroid Build Coastguard Worker }
495*61046927SAndroid Build Coastguard Worker 
496*61046927SAndroid Build Coastguard Worker /* Find the instr->ip of the closest use of an instruction, in
497*61046927SAndroid Build Coastguard Worker  * pre-sched order.  This isn't going to be the same as post-sched
498*61046927SAndroid Build Coastguard Worker  * order, but it is a reasonable approximation to limit scheduling
499*61046927SAndroid Build Coastguard Worker  * instructions *too* early.  This is mostly to prevent bad behavior
500*61046927SAndroid Build Coastguard Worker  * in cases where we have a large number of possible instructions
501*61046927SAndroid Build Coastguard Worker  * to choose, to avoid creating too much parallelism (ie. blowing
502*61046927SAndroid Build Coastguard Worker  * up register pressure)
503*61046927SAndroid Build Coastguard Worker  *
504*61046927SAndroid Build Coastguard Worker  * See
505*61046927SAndroid Build Coastguard Worker  * dEQP-GLES31.functional.atomic_counter.layout.reverse_offset.inc_dec.8_counters_5_calls_1_thread
506*61046927SAndroid Build Coastguard Worker  */
507*61046927SAndroid Build Coastguard Worker static int
nearest_use(struct ir3_instruction * instr)508*61046927SAndroid Build Coastguard Worker nearest_use(struct ir3_instruction *instr)
509*61046927SAndroid Build Coastguard Worker {
510*61046927SAndroid Build Coastguard Worker    unsigned nearest = ~0;
511*61046927SAndroid Build Coastguard Worker    foreach_ssa_use (use, instr)
512*61046927SAndroid Build Coastguard Worker       if (!is_scheduled(use))
513*61046927SAndroid Build Coastguard Worker          nearest = MIN2(nearest, use->ip);
514*61046927SAndroid Build Coastguard Worker 
515*61046927SAndroid Build Coastguard Worker    /* slight hack.. this heuristic tends to push bary.f's to later
516*61046927SAndroid Build Coastguard Worker     * in the shader, closer to their uses.  But we actually would
517*61046927SAndroid Build Coastguard Worker     * prefer to get these scheduled earlier, to unlock varying
518*61046927SAndroid Build Coastguard Worker     * storage for more VS jobs:
519*61046927SAndroid Build Coastguard Worker     */
520*61046927SAndroid Build Coastguard Worker    if (is_input(instr))
521*61046927SAndroid Build Coastguard Worker       nearest /= 2;
522*61046927SAndroid Build Coastguard Worker 
523*61046927SAndroid Build Coastguard Worker    return nearest;
524*61046927SAndroid Build Coastguard Worker }
525*61046927SAndroid Build Coastguard Worker 
526*61046927SAndroid Build Coastguard Worker static bool
is_only_nonscheduled_use(struct ir3_instruction * instr,struct ir3_instruction * use)527*61046927SAndroid Build Coastguard Worker is_only_nonscheduled_use(struct ir3_instruction *instr,
528*61046927SAndroid Build Coastguard Worker                          struct ir3_instruction *use)
529*61046927SAndroid Build Coastguard Worker {
530*61046927SAndroid Build Coastguard Worker    foreach_ssa_use (other_use, instr) {
531*61046927SAndroid Build Coastguard Worker       if (other_use != use && !is_scheduled(other_use))
532*61046927SAndroid Build Coastguard Worker          return false;
533*61046927SAndroid Build Coastguard Worker    }
534*61046927SAndroid Build Coastguard Worker 
535*61046927SAndroid Build Coastguard Worker    return true;
536*61046927SAndroid Build Coastguard Worker }
537*61046927SAndroid Build Coastguard Worker 
538*61046927SAndroid Build Coastguard Worker static unsigned
new_regs(struct ir3_instruction * instr)539*61046927SAndroid Build Coastguard Worker new_regs(struct ir3_instruction *instr)
540*61046927SAndroid Build Coastguard Worker {
541*61046927SAndroid Build Coastguard Worker    unsigned regs = 0;
542*61046927SAndroid Build Coastguard Worker 
543*61046927SAndroid Build Coastguard Worker    foreach_dst (dst, instr) {
544*61046927SAndroid Build Coastguard Worker       if (!is_dest_gpr(dst))
545*61046927SAndroid Build Coastguard Worker          continue;
546*61046927SAndroid Build Coastguard Worker       regs += reg_elems(dst);
547*61046927SAndroid Build Coastguard Worker    }
548*61046927SAndroid Build Coastguard Worker 
549*61046927SAndroid Build Coastguard Worker    return regs;
550*61046927SAndroid Build Coastguard Worker }
551*61046927SAndroid Build Coastguard Worker 
552*61046927SAndroid Build Coastguard Worker /* find net change to live values if instruction were scheduled: */
553*61046927SAndroid Build Coastguard Worker static int
live_effect(struct ir3_instruction * instr)554*61046927SAndroid Build Coastguard Worker live_effect(struct ir3_instruction *instr)
555*61046927SAndroid Build Coastguard Worker {
556*61046927SAndroid Build Coastguard Worker    struct ir3_sched_node *n = instr->data;
557*61046927SAndroid Build Coastguard Worker    int new_live =
558*61046927SAndroid Build Coastguard Worker       (n->partially_live || !instr->uses || instr->uses->entries == 0)
559*61046927SAndroid Build Coastguard Worker          ? 0
560*61046927SAndroid Build Coastguard Worker          : new_regs(instr);
561*61046927SAndroid Build Coastguard Worker    int freed_live = 0;
562*61046927SAndroid Build Coastguard Worker 
563*61046927SAndroid Build Coastguard Worker    /* if we schedule something that causes a vecN to be live,
564*61046927SAndroid Build Coastguard Worker     * then count all it's other components too:
565*61046927SAndroid Build Coastguard Worker     */
566*61046927SAndroid Build Coastguard Worker    if (n->collect)
567*61046927SAndroid Build Coastguard Worker       new_live *= n->collect->srcs_count;
568*61046927SAndroid Build Coastguard Worker 
569*61046927SAndroid Build Coastguard Worker    foreach_ssa_src_n (src, n, instr) {
570*61046927SAndroid Build Coastguard Worker       if (__is_false_dep(instr, n))
571*61046927SAndroid Build Coastguard Worker          continue;
572*61046927SAndroid Build Coastguard Worker 
573*61046927SAndroid Build Coastguard Worker       if (instr->block != src->block)
574*61046927SAndroid Build Coastguard Worker          continue;
575*61046927SAndroid Build Coastguard Worker 
576*61046927SAndroid Build Coastguard Worker       if (is_only_nonscheduled_use(src, instr))
577*61046927SAndroid Build Coastguard Worker          freed_live += new_regs(src);
578*61046927SAndroid Build Coastguard Worker    }
579*61046927SAndroid Build Coastguard Worker 
580*61046927SAndroid Build Coastguard Worker    return new_live - freed_live;
581*61046927SAndroid Build Coastguard Worker }
582*61046927SAndroid Build Coastguard Worker 
583*61046927SAndroid Build Coastguard Worker /* Determine if this is an instruction that we'd prefer not to schedule
584*61046927SAndroid Build Coastguard Worker  * yet, in order to avoid an (ss)/(sy) sync.  This is limited by the
585*61046927SAndroid Build Coastguard Worker  * ss_delay/sy_delay counters, ie. the more cycles it has been since
586*61046927SAndroid Build Coastguard Worker  * the last SFU/tex, the less costly a sync would be, and the number of
587*61046927SAndroid Build Coastguard Worker  * outstanding SFU/tex instructions to prevent a blowup in register pressure.
588*61046927SAndroid Build Coastguard Worker  */
589*61046927SAndroid Build Coastguard Worker static bool
should_defer(struct ir3_sched_ctx * ctx,struct ir3_instruction * instr)590*61046927SAndroid Build Coastguard Worker should_defer(struct ir3_sched_ctx *ctx, struct ir3_instruction *instr)
591*61046927SAndroid Build Coastguard Worker {
592*61046927SAndroid Build Coastguard Worker    if (ctx->ss_delay) {
593*61046927SAndroid Build Coastguard Worker       if (sched_check_src_cond(instr, is_outstanding_ss, ctx))
594*61046927SAndroid Build Coastguard Worker          return true;
595*61046927SAndroid Build Coastguard Worker    }
596*61046927SAndroid Build Coastguard Worker 
597*61046927SAndroid Build Coastguard Worker    /* We mostly just want to try to schedule another texture fetch
598*61046927SAndroid Build Coastguard Worker     * before scheduling something that would (sy) sync, so we can
599*61046927SAndroid Build Coastguard Worker     * limit this rule to cases where there are remaining texture
600*61046927SAndroid Build Coastguard Worker     * fetches
601*61046927SAndroid Build Coastguard Worker     */
602*61046927SAndroid Build Coastguard Worker    if (ctx->sy_delay && ctx->remaining_tex) {
603*61046927SAndroid Build Coastguard Worker       if (sched_check_src_cond(instr, is_outstanding_sy, ctx))
604*61046927SAndroid Build Coastguard Worker          return true;
605*61046927SAndroid Build Coastguard Worker    }
606*61046927SAndroid Build Coastguard Worker 
607*61046927SAndroid Build Coastguard Worker    /* Avoid scheduling too many outstanding texture or sfu instructions at
608*61046927SAndroid Build Coastguard Worker     * once by deferring further tex/SFU instructions. This both prevents
609*61046927SAndroid Build Coastguard Worker     * stalls when the queue of texture/sfu instructions becomes too large,
610*61046927SAndroid Build Coastguard Worker     * and prevents unacceptably large increases in register pressure from too
611*61046927SAndroid Build Coastguard Worker     * many outstanding texture instructions.
612*61046927SAndroid Build Coastguard Worker     */
613*61046927SAndroid Build Coastguard Worker    if (ctx->sy_index - ctx->first_outstanding_sy_index >= 8 && is_sy_producer(instr))
614*61046927SAndroid Build Coastguard Worker       return true;
615*61046927SAndroid Build Coastguard Worker 
616*61046927SAndroid Build Coastguard Worker    if (ctx->ss_index - ctx->first_outstanding_ss_index >= 8 && is_ss_producer(instr))
617*61046927SAndroid Build Coastguard Worker       return true;
618*61046927SAndroid Build Coastguard Worker 
619*61046927SAndroid Build Coastguard Worker    return false;
620*61046927SAndroid Build Coastguard Worker }
621*61046927SAndroid Build Coastguard Worker 
622*61046927SAndroid Build Coastguard Worker static struct ir3_sched_node *choose_instr_inc(struct ir3_sched_ctx *ctx,
623*61046927SAndroid Build Coastguard Worker                                                struct ir3_sched_notes *notes,
624*61046927SAndroid Build Coastguard Worker                                                bool defer, bool avoid_output);
625*61046927SAndroid Build Coastguard Worker 
626*61046927SAndroid Build Coastguard Worker enum choose_instr_dec_rank {
627*61046927SAndroid Build Coastguard Worker    DEC_NEUTRAL,
628*61046927SAndroid Build Coastguard Worker    DEC_NEUTRAL_READY,
629*61046927SAndroid Build Coastguard Worker    DEC_FREED,
630*61046927SAndroid Build Coastguard Worker    DEC_FREED_READY,
631*61046927SAndroid Build Coastguard Worker };
632*61046927SAndroid Build Coastguard Worker 
633*61046927SAndroid Build Coastguard Worker static const char *
dec_rank_name(enum choose_instr_dec_rank rank)634*61046927SAndroid Build Coastguard Worker dec_rank_name(enum choose_instr_dec_rank rank)
635*61046927SAndroid Build Coastguard Worker {
636*61046927SAndroid Build Coastguard Worker    switch (rank) {
637*61046927SAndroid Build Coastguard Worker    case DEC_NEUTRAL:
638*61046927SAndroid Build Coastguard Worker       return "neutral";
639*61046927SAndroid Build Coastguard Worker    case DEC_NEUTRAL_READY:
640*61046927SAndroid Build Coastguard Worker       return "neutral+ready";
641*61046927SAndroid Build Coastguard Worker    case DEC_FREED:
642*61046927SAndroid Build Coastguard Worker       return "freed";
643*61046927SAndroid Build Coastguard Worker    case DEC_FREED_READY:
644*61046927SAndroid Build Coastguard Worker       return "freed+ready";
645*61046927SAndroid Build Coastguard Worker    default:
646*61046927SAndroid Build Coastguard Worker       return NULL;
647*61046927SAndroid Build Coastguard Worker    }
648*61046927SAndroid Build Coastguard Worker }
649*61046927SAndroid Build Coastguard Worker 
650*61046927SAndroid Build Coastguard Worker static unsigned
node_delay(struct ir3_sched_ctx * ctx,struct ir3_sched_node * n)651*61046927SAndroid Build Coastguard Worker node_delay(struct ir3_sched_ctx *ctx, struct ir3_sched_node *n)
652*61046927SAndroid Build Coastguard Worker {
653*61046927SAndroid Build Coastguard Worker    return MAX2(n->earliest_ip, ctx->ip) - ctx->ip;
654*61046927SAndroid Build Coastguard Worker }
655*61046927SAndroid Build Coastguard Worker 
656*61046927SAndroid Build Coastguard Worker /**
657*61046927SAndroid Build Coastguard Worker  * Chooses an instruction to schedule using the Goodman/Hsu (1988) CSR (Code
658*61046927SAndroid Build Coastguard Worker  * Scheduling for Register pressure) heuristic.
659*61046927SAndroid Build Coastguard Worker  *
660*61046927SAndroid Build Coastguard Worker  * Only handles the case of choosing instructions that reduce register pressure
661*61046927SAndroid Build Coastguard Worker  * or are even.
662*61046927SAndroid Build Coastguard Worker  */
663*61046927SAndroid Build Coastguard Worker static struct ir3_sched_node *
choose_instr_dec(struct ir3_sched_ctx * ctx,struct ir3_sched_notes * notes,bool defer)664*61046927SAndroid Build Coastguard Worker choose_instr_dec(struct ir3_sched_ctx *ctx, struct ir3_sched_notes *notes,
665*61046927SAndroid Build Coastguard Worker                  bool defer)
666*61046927SAndroid Build Coastguard Worker {
667*61046927SAndroid Build Coastguard Worker    const char *mode = defer ? "-d" : "";
668*61046927SAndroid Build Coastguard Worker    struct ir3_sched_node *chosen = NULL;
669*61046927SAndroid Build Coastguard Worker    enum choose_instr_dec_rank chosen_rank = DEC_NEUTRAL;
670*61046927SAndroid Build Coastguard Worker 
671*61046927SAndroid Build Coastguard Worker    foreach_sched_node (n, &ctx->dag->heads) {
672*61046927SAndroid Build Coastguard Worker       if (defer && should_defer(ctx, n->instr))
673*61046927SAndroid Build Coastguard Worker          continue;
674*61046927SAndroid Build Coastguard Worker 
675*61046927SAndroid Build Coastguard Worker       unsigned d = node_delay(ctx, n);
676*61046927SAndroid Build Coastguard Worker 
677*61046927SAndroid Build Coastguard Worker       int live = live_effect(n->instr);
678*61046927SAndroid Build Coastguard Worker       if (live > 0)
679*61046927SAndroid Build Coastguard Worker          continue;
680*61046927SAndroid Build Coastguard Worker 
681*61046927SAndroid Build Coastguard Worker       if (!check_instr(ctx, notes, n->instr))
682*61046927SAndroid Build Coastguard Worker          continue;
683*61046927SAndroid Build Coastguard Worker 
684*61046927SAndroid Build Coastguard Worker       enum choose_instr_dec_rank rank;
685*61046927SAndroid Build Coastguard Worker       if (live < 0) {
686*61046927SAndroid Build Coastguard Worker          /* Prioritize instrs which free up regs and can be scheduled with no
687*61046927SAndroid Build Coastguard Worker           * delay.
688*61046927SAndroid Build Coastguard Worker           */
689*61046927SAndroid Build Coastguard Worker          if (d == 0)
690*61046927SAndroid Build Coastguard Worker             rank = DEC_FREED_READY;
691*61046927SAndroid Build Coastguard Worker          else
692*61046927SAndroid Build Coastguard Worker             rank = DEC_FREED;
693*61046927SAndroid Build Coastguard Worker       } else {
694*61046927SAndroid Build Coastguard Worker          /* Contra the paper, pick a leader with no effect on used regs.  This
695*61046927SAndroid Build Coastguard Worker           * may open up new opportunities, as otherwise a single-operand instr
696*61046927SAndroid Build Coastguard Worker           * consuming a value will tend to block finding freeing that value.
697*61046927SAndroid Build Coastguard Worker           * This had a massive effect on reducing spilling on V3D.
698*61046927SAndroid Build Coastguard Worker           *
699*61046927SAndroid Build Coastguard Worker           * XXX: Should this prioritize ready?
700*61046927SAndroid Build Coastguard Worker           */
701*61046927SAndroid Build Coastguard Worker          if (d == 0)
702*61046927SAndroid Build Coastguard Worker             rank = DEC_NEUTRAL_READY;
703*61046927SAndroid Build Coastguard Worker          else
704*61046927SAndroid Build Coastguard Worker             rank = DEC_NEUTRAL;
705*61046927SAndroid Build Coastguard Worker       }
706*61046927SAndroid Build Coastguard Worker 
707*61046927SAndroid Build Coastguard Worker       /* Prefer higher-ranked instructions, or in the case of a rank tie, the
708*61046927SAndroid Build Coastguard Worker        * highest latency-to-end-of-program instruction.
709*61046927SAndroid Build Coastguard Worker        */
710*61046927SAndroid Build Coastguard Worker       if (!chosen || rank > chosen_rank ||
711*61046927SAndroid Build Coastguard Worker           (rank == chosen_rank && chosen->max_delay < n->max_delay)) {
712*61046927SAndroid Build Coastguard Worker          chosen = n;
713*61046927SAndroid Build Coastguard Worker          chosen_rank = rank;
714*61046927SAndroid Build Coastguard Worker       }
715*61046927SAndroid Build Coastguard Worker    }
716*61046927SAndroid Build Coastguard Worker 
717*61046927SAndroid Build Coastguard Worker    if (chosen) {
718*61046927SAndroid Build Coastguard Worker       di(chosen->instr, "dec%s: chose (%s)", mode, dec_rank_name(chosen_rank));
719*61046927SAndroid Build Coastguard Worker       return chosen;
720*61046927SAndroid Build Coastguard Worker    }
721*61046927SAndroid Build Coastguard Worker 
722*61046927SAndroid Build Coastguard Worker    return choose_instr_inc(ctx, notes, defer, true);
723*61046927SAndroid Build Coastguard Worker }
724*61046927SAndroid Build Coastguard Worker 
725*61046927SAndroid Build Coastguard Worker enum choose_instr_inc_rank {
726*61046927SAndroid Build Coastguard Worker    INC_DISTANCE,
727*61046927SAndroid Build Coastguard Worker    INC_DISTANCE_READY,
728*61046927SAndroid Build Coastguard Worker };
729*61046927SAndroid Build Coastguard Worker 
730*61046927SAndroid Build Coastguard Worker static const char *
inc_rank_name(enum choose_instr_inc_rank rank)731*61046927SAndroid Build Coastguard Worker inc_rank_name(enum choose_instr_inc_rank rank)
732*61046927SAndroid Build Coastguard Worker {
733*61046927SAndroid Build Coastguard Worker    switch (rank) {
734*61046927SAndroid Build Coastguard Worker    case INC_DISTANCE:
735*61046927SAndroid Build Coastguard Worker       return "distance";
736*61046927SAndroid Build Coastguard Worker    case INC_DISTANCE_READY:
737*61046927SAndroid Build Coastguard Worker       return "distance+ready";
738*61046927SAndroid Build Coastguard Worker    default:
739*61046927SAndroid Build Coastguard Worker       return NULL;
740*61046927SAndroid Build Coastguard Worker    }
741*61046927SAndroid Build Coastguard Worker }
742*61046927SAndroid Build Coastguard Worker 
743*61046927SAndroid Build Coastguard Worker /**
744*61046927SAndroid Build Coastguard Worker  * When we can't choose an instruction that reduces register pressure or
745*61046927SAndroid Build Coastguard Worker  * is neutral, we end up here to try and pick the least bad option.
746*61046927SAndroid Build Coastguard Worker  */
747*61046927SAndroid Build Coastguard Worker static struct ir3_sched_node *
choose_instr_inc(struct ir3_sched_ctx * ctx,struct ir3_sched_notes * notes,bool defer,bool avoid_output)748*61046927SAndroid Build Coastguard Worker choose_instr_inc(struct ir3_sched_ctx *ctx, struct ir3_sched_notes *notes,
749*61046927SAndroid Build Coastguard Worker                  bool defer, bool avoid_output)
750*61046927SAndroid Build Coastguard Worker {
751*61046927SAndroid Build Coastguard Worker    const char *mode = defer ? "-d" : "";
752*61046927SAndroid Build Coastguard Worker    struct ir3_sched_node *chosen = NULL;
753*61046927SAndroid Build Coastguard Worker    enum choose_instr_inc_rank chosen_rank = INC_DISTANCE;
754*61046927SAndroid Build Coastguard Worker 
755*61046927SAndroid Build Coastguard Worker    /*
756*61046927SAndroid Build Coastguard Worker     * From hear on out, we are picking something that increases
757*61046927SAndroid Build Coastguard Worker     * register pressure.  So try to pick something which will
758*61046927SAndroid Build Coastguard Worker     * be consumed soon:
759*61046927SAndroid Build Coastguard Worker     */
760*61046927SAndroid Build Coastguard Worker    unsigned chosen_distance = 0;
761*61046927SAndroid Build Coastguard Worker 
762*61046927SAndroid Build Coastguard Worker    /* Pick the max delay of the remaining ready set. */
763*61046927SAndroid Build Coastguard Worker    foreach_sched_node (n, &ctx->dag->heads) {
764*61046927SAndroid Build Coastguard Worker       if (avoid_output && n->output)
765*61046927SAndroid Build Coastguard Worker          continue;
766*61046927SAndroid Build Coastguard Worker 
767*61046927SAndroid Build Coastguard Worker       if (defer && should_defer(ctx, n->instr))
768*61046927SAndroid Build Coastguard Worker          continue;
769*61046927SAndroid Build Coastguard Worker 
770*61046927SAndroid Build Coastguard Worker       if (!check_instr(ctx, notes, n->instr))
771*61046927SAndroid Build Coastguard Worker          continue;
772*61046927SAndroid Build Coastguard Worker 
773*61046927SAndroid Build Coastguard Worker       unsigned d = node_delay(ctx, n);
774*61046927SAndroid Build Coastguard Worker 
775*61046927SAndroid Build Coastguard Worker       enum choose_instr_inc_rank rank;
776*61046927SAndroid Build Coastguard Worker       if (d == 0)
777*61046927SAndroid Build Coastguard Worker          rank = INC_DISTANCE_READY;
778*61046927SAndroid Build Coastguard Worker       else
779*61046927SAndroid Build Coastguard Worker          rank = INC_DISTANCE;
780*61046927SAndroid Build Coastguard Worker 
781*61046927SAndroid Build Coastguard Worker       unsigned distance = nearest_use(n->instr);
782*61046927SAndroid Build Coastguard Worker 
783*61046927SAndroid Build Coastguard Worker       if (!chosen || rank > chosen_rank ||
784*61046927SAndroid Build Coastguard Worker           (rank == chosen_rank && distance < chosen_distance)) {
785*61046927SAndroid Build Coastguard Worker          chosen = n;
786*61046927SAndroid Build Coastguard Worker          chosen_distance = distance;
787*61046927SAndroid Build Coastguard Worker          chosen_rank = rank;
788*61046927SAndroid Build Coastguard Worker       }
789*61046927SAndroid Build Coastguard Worker    }
790*61046927SAndroid Build Coastguard Worker 
791*61046927SAndroid Build Coastguard Worker    if (chosen) {
792*61046927SAndroid Build Coastguard Worker       di(chosen->instr, "inc%s: chose (%s)", mode, inc_rank_name(chosen_rank));
793*61046927SAndroid Build Coastguard Worker       return chosen;
794*61046927SAndroid Build Coastguard Worker    }
795*61046927SAndroid Build Coastguard Worker 
796*61046927SAndroid Build Coastguard Worker    return NULL;
797*61046927SAndroid Build Coastguard Worker }
798*61046927SAndroid Build Coastguard Worker 
799*61046927SAndroid Build Coastguard Worker /* Handles instruction selections for instructions we want to prioritize
800*61046927SAndroid Build Coastguard Worker  * even if csp/csr would not pick them.
801*61046927SAndroid Build Coastguard Worker  */
802*61046927SAndroid Build Coastguard Worker static struct ir3_sched_node *
choose_instr_prio(struct ir3_sched_ctx * ctx,struct ir3_sched_notes * notes)803*61046927SAndroid Build Coastguard Worker choose_instr_prio(struct ir3_sched_ctx *ctx, struct ir3_sched_notes *notes)
804*61046927SAndroid Build Coastguard Worker {
805*61046927SAndroid Build Coastguard Worker    struct ir3_sched_node *chosen = NULL;
806*61046927SAndroid Build Coastguard Worker 
807*61046927SAndroid Build Coastguard Worker    foreach_sched_node (n, &ctx->dag->heads) {
808*61046927SAndroid Build Coastguard Worker       /*
809*61046927SAndroid Build Coastguard Worker        * - phi nodes and inputs must be scheduled first
810*61046927SAndroid Build Coastguard Worker        * - split should be scheduled first, so that the vector value is
811*61046927SAndroid Build Coastguard Worker        *   killed as soon as possible. RA cannot split up the vector and
812*61046927SAndroid Build Coastguard Worker        *   reuse components that have been killed until it's been killed.
813*61046927SAndroid Build Coastguard Worker        * - collect, on the other hand, should be treated as a "normal"
814*61046927SAndroid Build Coastguard Worker        *   instruction, and may add to register pressure if its sources are
815*61046927SAndroid Build Coastguard Worker        *   part of another vector or immediates.
816*61046927SAndroid Build Coastguard Worker        */
817*61046927SAndroid Build Coastguard Worker       if (!is_meta(n->instr) || n->instr->opc == OPC_META_COLLECT)
818*61046927SAndroid Build Coastguard Worker          continue;
819*61046927SAndroid Build Coastguard Worker 
820*61046927SAndroid Build Coastguard Worker       if (!chosen || (chosen->max_delay < n->max_delay))
821*61046927SAndroid Build Coastguard Worker          chosen = n;
822*61046927SAndroid Build Coastguard Worker    }
823*61046927SAndroid Build Coastguard Worker 
824*61046927SAndroid Build Coastguard Worker    if (chosen) {
825*61046927SAndroid Build Coastguard Worker       di(chosen->instr, "prio: chose (meta)");
826*61046927SAndroid Build Coastguard Worker       return chosen;
827*61046927SAndroid Build Coastguard Worker    }
828*61046927SAndroid Build Coastguard Worker 
829*61046927SAndroid Build Coastguard Worker    return NULL;
830*61046927SAndroid Build Coastguard Worker }
831*61046927SAndroid Build Coastguard Worker 
832*61046927SAndroid Build Coastguard Worker static void
dump_state(struct ir3_sched_ctx * ctx)833*61046927SAndroid Build Coastguard Worker dump_state(struct ir3_sched_ctx *ctx)
834*61046927SAndroid Build Coastguard Worker {
835*61046927SAndroid Build Coastguard Worker    if (!SCHED_DEBUG)
836*61046927SAndroid Build Coastguard Worker       return;
837*61046927SAndroid Build Coastguard Worker 
838*61046927SAndroid Build Coastguard Worker    foreach_sched_node (n, &ctx->dag->heads) {
839*61046927SAndroid Build Coastguard Worker       di(n->instr, "maxdel=%3d le=%d del=%u ", n->max_delay,
840*61046927SAndroid Build Coastguard Worker          live_effect(n->instr), node_delay(ctx, n));
841*61046927SAndroid Build Coastguard Worker 
842*61046927SAndroid Build Coastguard Worker       util_dynarray_foreach (&n->dag.edges, struct dag_edge, edge) {
843*61046927SAndroid Build Coastguard Worker          struct ir3_sched_node *child = (struct ir3_sched_node *)edge->child;
844*61046927SAndroid Build Coastguard Worker 
845*61046927SAndroid Build Coastguard Worker          di(child->instr, " -> (%d parents) ", child->dag.parent_count);
846*61046927SAndroid Build Coastguard Worker       }
847*61046927SAndroid Build Coastguard Worker    }
848*61046927SAndroid Build Coastguard Worker }
849*61046927SAndroid Build Coastguard Worker 
850*61046927SAndroid Build Coastguard Worker /* find instruction to schedule: */
851*61046927SAndroid Build Coastguard Worker static struct ir3_instruction *
choose_instr(struct ir3_sched_ctx * ctx,struct ir3_sched_notes * notes)852*61046927SAndroid Build Coastguard Worker choose_instr(struct ir3_sched_ctx *ctx, struct ir3_sched_notes *notes)
853*61046927SAndroid Build Coastguard Worker {
854*61046927SAndroid Build Coastguard Worker    struct ir3_sched_node *chosen;
855*61046927SAndroid Build Coastguard Worker 
856*61046927SAndroid Build Coastguard Worker    dump_state(ctx);
857*61046927SAndroid Build Coastguard Worker 
858*61046927SAndroid Build Coastguard Worker    chosen = choose_instr_prio(ctx, notes);
859*61046927SAndroid Build Coastguard Worker    if (chosen)
860*61046927SAndroid Build Coastguard Worker       return chosen->instr;
861*61046927SAndroid Build Coastguard Worker 
862*61046927SAndroid Build Coastguard Worker    chosen = choose_instr_dec(ctx, notes, true);
863*61046927SAndroid Build Coastguard Worker    if (chosen)
864*61046927SAndroid Build Coastguard Worker       return chosen->instr;
865*61046927SAndroid Build Coastguard Worker 
866*61046927SAndroid Build Coastguard Worker    chosen = choose_instr_dec(ctx, notes, false);
867*61046927SAndroid Build Coastguard Worker    if (chosen)
868*61046927SAndroid Build Coastguard Worker       return chosen->instr;
869*61046927SAndroid Build Coastguard Worker 
870*61046927SAndroid Build Coastguard Worker    chosen = choose_instr_inc(ctx, notes, false, false);
871*61046927SAndroid Build Coastguard Worker    if (chosen)
872*61046927SAndroid Build Coastguard Worker       return chosen->instr;
873*61046927SAndroid Build Coastguard Worker 
874*61046927SAndroid Build Coastguard Worker    return NULL;
875*61046927SAndroid Build Coastguard Worker }
876*61046927SAndroid Build Coastguard Worker 
877*61046927SAndroid Build Coastguard Worker static struct ir3_instruction *
split_instr(struct ir3_sched_ctx * ctx,struct ir3_instruction * orig_instr)878*61046927SAndroid Build Coastguard Worker split_instr(struct ir3_sched_ctx *ctx, struct ir3_instruction *orig_instr)
879*61046927SAndroid Build Coastguard Worker {
880*61046927SAndroid Build Coastguard Worker    struct ir3_instruction *new_instr = ir3_instr_clone(orig_instr);
881*61046927SAndroid Build Coastguard Worker    di(new_instr, "split instruction");
882*61046927SAndroid Build Coastguard Worker    sched_node_init(ctx, new_instr);
883*61046927SAndroid Build Coastguard Worker    return new_instr;
884*61046927SAndroid Build Coastguard Worker }
885*61046927SAndroid Build Coastguard Worker 
886*61046927SAndroid Build Coastguard Worker /* "spill" the address registers by remapping any unscheduled
887*61046927SAndroid Build Coastguard Worker  * instructions which depend on the current address register
888*61046927SAndroid Build Coastguard Worker  * to a clone of the instruction which wrote the address reg.
889*61046927SAndroid Build Coastguard Worker  */
890*61046927SAndroid Build Coastguard Worker static struct ir3_instruction *
split_addr(struct ir3_sched_ctx * ctx,struct ir3_instruction ** addr,struct ir3_instruction ** users,unsigned users_count)891*61046927SAndroid Build Coastguard Worker split_addr(struct ir3_sched_ctx *ctx, struct ir3_instruction **addr,
892*61046927SAndroid Build Coastguard Worker            struct ir3_instruction **users, unsigned users_count)
893*61046927SAndroid Build Coastguard Worker {
894*61046927SAndroid Build Coastguard Worker    struct ir3_instruction *new_addr = NULL;
895*61046927SAndroid Build Coastguard Worker    unsigned i;
896*61046927SAndroid Build Coastguard Worker 
897*61046927SAndroid Build Coastguard Worker    assert(*addr);
898*61046927SAndroid Build Coastguard Worker 
899*61046927SAndroid Build Coastguard Worker    for (i = 0; i < users_count; i++) {
900*61046927SAndroid Build Coastguard Worker       struct ir3_instruction *indirect = users[i];
901*61046927SAndroid Build Coastguard Worker 
902*61046927SAndroid Build Coastguard Worker       if (!indirect)
903*61046927SAndroid Build Coastguard Worker          continue;
904*61046927SAndroid Build Coastguard Worker 
905*61046927SAndroid Build Coastguard Worker       /* skip instructions already scheduled: */
906*61046927SAndroid Build Coastguard Worker       if (is_scheduled(indirect))
907*61046927SAndroid Build Coastguard Worker          continue;
908*61046927SAndroid Build Coastguard Worker 
909*61046927SAndroid Build Coastguard Worker       /* remap remaining instructions using current addr
910*61046927SAndroid Build Coastguard Worker        * to new addr:
911*61046927SAndroid Build Coastguard Worker        */
912*61046927SAndroid Build Coastguard Worker       if (indirect->address->def == (*addr)->dsts[0]) {
913*61046927SAndroid Build Coastguard Worker          if (!new_addr) {
914*61046927SAndroid Build Coastguard Worker             new_addr = split_instr(ctx, *addr);
915*61046927SAndroid Build Coastguard Worker             /* original addr is scheduled, but new one isn't: */
916*61046927SAndroid Build Coastguard Worker             new_addr->flags &= ~IR3_INSTR_MARK;
917*61046927SAndroid Build Coastguard Worker          }
918*61046927SAndroid Build Coastguard Worker          indirect->address->def = new_addr->dsts[0];
919*61046927SAndroid Build Coastguard Worker          /* don't need to remove old dag edge since old addr is
920*61046927SAndroid Build Coastguard Worker           * already scheduled:
921*61046927SAndroid Build Coastguard Worker           */
922*61046927SAndroid Build Coastguard Worker          sched_node_add_dep(ctx, indirect, new_addr, 0);
923*61046927SAndroid Build Coastguard Worker          di(indirect, "new address");
924*61046927SAndroid Build Coastguard Worker       }
925*61046927SAndroid Build Coastguard Worker    }
926*61046927SAndroid Build Coastguard Worker 
927*61046927SAndroid Build Coastguard Worker    /* all remaining indirects remapped to new addr: */
928*61046927SAndroid Build Coastguard Worker    *addr = NULL;
929*61046927SAndroid Build Coastguard Worker 
930*61046927SAndroid Build Coastguard Worker    return new_addr;
931*61046927SAndroid Build Coastguard Worker }
932*61046927SAndroid Build Coastguard Worker 
933*61046927SAndroid Build Coastguard Worker static void
sched_node_init(struct ir3_sched_ctx * ctx,struct ir3_instruction * instr)934*61046927SAndroid Build Coastguard Worker sched_node_init(struct ir3_sched_ctx *ctx, struct ir3_instruction *instr)
935*61046927SAndroid Build Coastguard Worker {
936*61046927SAndroid Build Coastguard Worker    struct ir3_sched_node *n = rzalloc(ctx->dag, struct ir3_sched_node);
937*61046927SAndroid Build Coastguard Worker 
938*61046927SAndroid Build Coastguard Worker    dag_init_node(ctx->dag, &n->dag);
939*61046927SAndroid Build Coastguard Worker 
940*61046927SAndroid Build Coastguard Worker    n->instr = instr;
941*61046927SAndroid Build Coastguard Worker    instr->data = n;
942*61046927SAndroid Build Coastguard Worker }
943*61046927SAndroid Build Coastguard Worker 
944*61046927SAndroid Build Coastguard Worker static void
sched_node_add_dep(struct ir3_sched_ctx * ctx,struct ir3_instruction * instr,struct ir3_instruction * src,int i)945*61046927SAndroid Build Coastguard Worker sched_node_add_dep(struct ir3_sched_ctx *ctx,
946*61046927SAndroid Build Coastguard Worker                    struct ir3_instruction *instr, struct ir3_instruction *src,
947*61046927SAndroid Build Coastguard Worker                    int i)
948*61046927SAndroid Build Coastguard Worker {
949*61046927SAndroid Build Coastguard Worker    /* don't consider dependencies in other blocks: */
950*61046927SAndroid Build Coastguard Worker    if (src->block != instr->block)
951*61046927SAndroid Build Coastguard Worker       return;
952*61046927SAndroid Build Coastguard Worker 
953*61046927SAndroid Build Coastguard Worker    /* we could have false-dep's that end up unused: */
954*61046927SAndroid Build Coastguard Worker    if (src->flags & IR3_INSTR_UNUSED) {
955*61046927SAndroid Build Coastguard Worker       assert(__is_false_dep(instr, i));
956*61046927SAndroid Build Coastguard Worker       return;
957*61046927SAndroid Build Coastguard Worker    }
958*61046927SAndroid Build Coastguard Worker 
959*61046927SAndroid Build Coastguard Worker    struct ir3_sched_node *n = instr->data;
960*61046927SAndroid Build Coastguard Worker    struct ir3_sched_node *sn = src->data;
961*61046927SAndroid Build Coastguard Worker 
962*61046927SAndroid Build Coastguard Worker    /* If src is consumed by a collect, track that to realize that once
963*61046927SAndroid Build Coastguard Worker     * any of the collect srcs are live, we should hurry up and schedule
964*61046927SAndroid Build Coastguard Worker     * the rest.
965*61046927SAndroid Build Coastguard Worker     */
966*61046927SAndroid Build Coastguard Worker    if (instr->opc == OPC_META_COLLECT)
967*61046927SAndroid Build Coastguard Worker       sn->collect = instr;
968*61046927SAndroid Build Coastguard Worker 
969*61046927SAndroid Build Coastguard Worker    unsigned d_soft = ir3_delayslots(ctx->compiler, src, instr, i, true);
970*61046927SAndroid Build Coastguard Worker    unsigned d = ir3_delayslots(ctx->compiler, src, instr, i, false);
971*61046927SAndroid Build Coastguard Worker 
972*61046927SAndroid Build Coastguard Worker    /* delays from (ss) and (sy) are considered separately and more accurately in
973*61046927SAndroid Build Coastguard Worker     * the scheduling heuristic, so ignore it when calculating the ip of
974*61046927SAndroid Build Coastguard Worker     * instructions, but do consider it when prioritizing which instructions to
975*61046927SAndroid Build Coastguard Worker     * schedule.
976*61046927SAndroid Build Coastguard Worker     */
977*61046927SAndroid Build Coastguard Worker    dag_add_edge_max_data(&sn->dag, &n->dag, (uintptr_t)d);
978*61046927SAndroid Build Coastguard Worker 
979*61046927SAndroid Build Coastguard Worker    n->delay = MAX2(n->delay, d_soft);
980*61046927SAndroid Build Coastguard Worker }
981*61046927SAndroid Build Coastguard Worker 
982*61046927SAndroid Build Coastguard Worker static void
mark_kill_path(struct ir3_instruction * instr)983*61046927SAndroid Build Coastguard Worker mark_kill_path(struct ir3_instruction *instr)
984*61046927SAndroid Build Coastguard Worker {
985*61046927SAndroid Build Coastguard Worker    struct ir3_sched_node *n = instr->data;
986*61046927SAndroid Build Coastguard Worker 
987*61046927SAndroid Build Coastguard Worker    if (n->kill_path) {
988*61046927SAndroid Build Coastguard Worker       return;
989*61046927SAndroid Build Coastguard Worker    }
990*61046927SAndroid Build Coastguard Worker 
991*61046927SAndroid Build Coastguard Worker    n->kill_path = true;
992*61046927SAndroid Build Coastguard Worker 
993*61046927SAndroid Build Coastguard Worker    foreach_ssa_src (src, instr) {
994*61046927SAndroid Build Coastguard Worker       if (src->block != instr->block)
995*61046927SAndroid Build Coastguard Worker          continue;
996*61046927SAndroid Build Coastguard Worker       mark_kill_path(src);
997*61046927SAndroid Build Coastguard Worker    }
998*61046927SAndroid Build Coastguard Worker }
999*61046927SAndroid Build Coastguard Worker 
1000*61046927SAndroid Build Coastguard Worker /* Is it an output? */
1001*61046927SAndroid Build Coastguard Worker static bool
is_output_collect(struct ir3_instruction * instr)1002*61046927SAndroid Build Coastguard Worker is_output_collect(struct ir3_instruction *instr)
1003*61046927SAndroid Build Coastguard Worker {
1004*61046927SAndroid Build Coastguard Worker    if (instr->opc != OPC_META_COLLECT)
1005*61046927SAndroid Build Coastguard Worker       return false;
1006*61046927SAndroid Build Coastguard Worker 
1007*61046927SAndroid Build Coastguard Worker    foreach_ssa_use (use, instr) {
1008*61046927SAndroid Build Coastguard Worker       if (use->opc != OPC_END && use->opc != OPC_CHMASK)
1009*61046927SAndroid Build Coastguard Worker          return false;
1010*61046927SAndroid Build Coastguard Worker    }
1011*61046927SAndroid Build Coastguard Worker 
1012*61046927SAndroid Build Coastguard Worker    return true;
1013*61046927SAndroid Build Coastguard Worker }
1014*61046927SAndroid Build Coastguard Worker 
1015*61046927SAndroid Build Coastguard Worker /* Is it's only use as output? */
1016*61046927SAndroid Build Coastguard Worker static bool
is_output_only(struct ir3_instruction * instr)1017*61046927SAndroid Build Coastguard Worker is_output_only(struct ir3_instruction *instr)
1018*61046927SAndroid Build Coastguard Worker {
1019*61046927SAndroid Build Coastguard Worker    foreach_ssa_use (use, instr)
1020*61046927SAndroid Build Coastguard Worker       if (!is_output_collect(use))
1021*61046927SAndroid Build Coastguard Worker          return false;
1022*61046927SAndroid Build Coastguard Worker 
1023*61046927SAndroid Build Coastguard Worker    return true;
1024*61046927SAndroid Build Coastguard Worker }
1025*61046927SAndroid Build Coastguard Worker 
1026*61046927SAndroid Build Coastguard Worker static void
sched_node_add_deps(struct ir3_sched_ctx * ctx,struct ir3_instruction * instr)1027*61046927SAndroid Build Coastguard Worker sched_node_add_deps(struct ir3_sched_ctx *ctx, struct ir3_instruction *instr)
1028*61046927SAndroid Build Coastguard Worker {
1029*61046927SAndroid Build Coastguard Worker    /* There's nothing to do for phi nodes, since they always go first. And
1030*61046927SAndroid Build Coastguard Worker     * phi nodes can reference sources later in the same block, so handling
1031*61046927SAndroid Build Coastguard Worker     * sources is not only unnecessary but could cause problems.
1032*61046927SAndroid Build Coastguard Worker     */
1033*61046927SAndroid Build Coastguard Worker    if (instr->opc == OPC_META_PHI)
1034*61046927SAndroid Build Coastguard Worker       return;
1035*61046927SAndroid Build Coastguard Worker 
1036*61046927SAndroid Build Coastguard Worker    /* Since foreach_ssa_src() already handles false-dep's we can construct
1037*61046927SAndroid Build Coastguard Worker     * the DAG easily in a single pass.
1038*61046927SAndroid Build Coastguard Worker     */
1039*61046927SAndroid Build Coastguard Worker    foreach_ssa_src_n (src, i, instr) {
1040*61046927SAndroid Build Coastguard Worker       sched_node_add_dep(ctx, instr, src, i);
1041*61046927SAndroid Build Coastguard Worker    }
1042*61046927SAndroid Build Coastguard Worker 
1043*61046927SAndroid Build Coastguard Worker    /* NOTE that all inputs must be scheduled before a kill, so
1044*61046927SAndroid Build Coastguard Worker     * mark these to be prioritized as well:
1045*61046927SAndroid Build Coastguard Worker     */
1046*61046927SAndroid Build Coastguard Worker    if (is_kill_or_demote(instr) || is_input(instr)) {
1047*61046927SAndroid Build Coastguard Worker       mark_kill_path(instr);
1048*61046927SAndroid Build Coastguard Worker    }
1049*61046927SAndroid Build Coastguard Worker 
1050*61046927SAndroid Build Coastguard Worker    if (is_output_only(instr)) {
1051*61046927SAndroid Build Coastguard Worker       struct ir3_sched_node *n = instr->data;
1052*61046927SAndroid Build Coastguard Worker       n->output = true;
1053*61046927SAndroid Build Coastguard Worker    }
1054*61046927SAndroid Build Coastguard Worker }
1055*61046927SAndroid Build Coastguard Worker 
1056*61046927SAndroid Build Coastguard Worker static void
sched_dag_max_delay_cb(struct dag_node * node,void * state)1057*61046927SAndroid Build Coastguard Worker sched_dag_max_delay_cb(struct dag_node *node, void *state)
1058*61046927SAndroid Build Coastguard Worker {
1059*61046927SAndroid Build Coastguard Worker    struct ir3_sched_node *n = (struct ir3_sched_node *)node;
1060*61046927SAndroid Build Coastguard Worker    uint32_t max_delay = 0;
1061*61046927SAndroid Build Coastguard Worker 
1062*61046927SAndroid Build Coastguard Worker    util_dynarray_foreach (&n->dag.edges, struct dag_edge, edge) {
1063*61046927SAndroid Build Coastguard Worker       struct ir3_sched_node *child = (struct ir3_sched_node *)edge->child;
1064*61046927SAndroid Build Coastguard Worker       max_delay = MAX2(child->max_delay, max_delay);
1065*61046927SAndroid Build Coastguard Worker    }
1066*61046927SAndroid Build Coastguard Worker 
1067*61046927SAndroid Build Coastguard Worker    n->max_delay = MAX2(n->max_delay, max_delay + n->delay);
1068*61046927SAndroid Build Coastguard Worker }
1069*61046927SAndroid Build Coastguard Worker 
1070*61046927SAndroid Build Coastguard Worker #ifndef NDEBUG
1071*61046927SAndroid Build Coastguard Worker static void
sched_dag_validate_cb(const struct dag_node * node,void * data)1072*61046927SAndroid Build Coastguard Worker sched_dag_validate_cb(const struct dag_node *node, void *data)
1073*61046927SAndroid Build Coastguard Worker {
1074*61046927SAndroid Build Coastguard Worker    struct ir3_sched_node *n = (struct ir3_sched_node *)node;
1075*61046927SAndroid Build Coastguard Worker 
1076*61046927SAndroid Build Coastguard Worker    ir3_print_instr(n->instr);
1077*61046927SAndroid Build Coastguard Worker }
1078*61046927SAndroid Build Coastguard Worker #endif
1079*61046927SAndroid Build Coastguard Worker 
1080*61046927SAndroid Build Coastguard Worker static void
sched_dag_init(struct ir3_sched_ctx * ctx)1081*61046927SAndroid Build Coastguard Worker sched_dag_init(struct ir3_sched_ctx *ctx)
1082*61046927SAndroid Build Coastguard Worker {
1083*61046927SAndroid Build Coastguard Worker    ctx->dag = dag_create(ctx);
1084*61046927SAndroid Build Coastguard Worker 
1085*61046927SAndroid Build Coastguard Worker    foreach_instr (instr, &ctx->unscheduled_list)
1086*61046927SAndroid Build Coastguard Worker       sched_node_init(ctx, instr);
1087*61046927SAndroid Build Coastguard Worker 
1088*61046927SAndroid Build Coastguard Worker #ifndef NDEBUG
1089*61046927SAndroid Build Coastguard Worker    dag_validate(ctx->dag, sched_dag_validate_cb, NULL);
1090*61046927SAndroid Build Coastguard Worker #endif
1091*61046927SAndroid Build Coastguard Worker 
1092*61046927SAndroid Build Coastguard Worker    foreach_instr (instr, &ctx->unscheduled_list)
1093*61046927SAndroid Build Coastguard Worker       sched_node_add_deps(ctx, instr);
1094*61046927SAndroid Build Coastguard Worker 
1095*61046927SAndroid Build Coastguard Worker    dag_traverse_bottom_up(ctx->dag, sched_dag_max_delay_cb, NULL);
1096*61046927SAndroid Build Coastguard Worker }
1097*61046927SAndroid Build Coastguard Worker 
1098*61046927SAndroid Build Coastguard Worker static void
sched_dag_destroy(struct ir3_sched_ctx * ctx)1099*61046927SAndroid Build Coastguard Worker sched_dag_destroy(struct ir3_sched_ctx *ctx)
1100*61046927SAndroid Build Coastguard Worker {
1101*61046927SAndroid Build Coastguard Worker    ralloc_free(ctx->dag);
1102*61046927SAndroid Build Coastguard Worker    ctx->dag = NULL;
1103*61046927SAndroid Build Coastguard Worker }
1104*61046927SAndroid Build Coastguard Worker 
1105*61046927SAndroid Build Coastguard Worker static void
sched_block(struct ir3_sched_ctx * ctx,struct ir3_block * block)1106*61046927SAndroid Build Coastguard Worker sched_block(struct ir3_sched_ctx *ctx, struct ir3_block *block)
1107*61046927SAndroid Build Coastguard Worker {
1108*61046927SAndroid Build Coastguard Worker    ctx->block = block;
1109*61046927SAndroid Build Coastguard Worker 
1110*61046927SAndroid Build Coastguard Worker    /* addr/pred writes are per-block: */
1111*61046927SAndroid Build Coastguard Worker    ctx->addr0 = NULL;
1112*61046927SAndroid Build Coastguard Worker    ctx->addr1 = NULL;
1113*61046927SAndroid Build Coastguard Worker    ctx->sy_delay = 0;
1114*61046927SAndroid Build Coastguard Worker    ctx->ss_delay = 0;
1115*61046927SAndroid Build Coastguard Worker    ctx->sy_index = ctx->first_outstanding_sy_index = 0;
1116*61046927SAndroid Build Coastguard Worker    ctx->ss_index = ctx->first_outstanding_ss_index = 0;
1117*61046927SAndroid Build Coastguard Worker 
1118*61046927SAndroid Build Coastguard Worker    /* The terminator has to stay at the end. Instead of trying to set up
1119*61046927SAndroid Build Coastguard Worker     * dependencies to achieve this, it's easier to just remove it now and add it
1120*61046927SAndroid Build Coastguard Worker     * back after scheduling.
1121*61046927SAndroid Build Coastguard Worker     */
1122*61046927SAndroid Build Coastguard Worker    struct ir3_instruction *terminator = ir3_block_take_terminator(block);
1123*61046927SAndroid Build Coastguard Worker 
1124*61046927SAndroid Build Coastguard Worker    /* move all instructions to the unscheduled list, and
1125*61046927SAndroid Build Coastguard Worker     * empty the block's instruction list (to which we will
1126*61046927SAndroid Build Coastguard Worker     * be inserting).
1127*61046927SAndroid Build Coastguard Worker     */
1128*61046927SAndroid Build Coastguard Worker    list_replace(&block->instr_list, &ctx->unscheduled_list);
1129*61046927SAndroid Build Coastguard Worker    list_inithead(&block->instr_list);
1130*61046927SAndroid Build Coastguard Worker 
1131*61046927SAndroid Build Coastguard Worker    sched_dag_init(ctx);
1132*61046927SAndroid Build Coastguard Worker 
1133*61046927SAndroid Build Coastguard Worker    ctx->remaining_kills = 0;
1134*61046927SAndroid Build Coastguard Worker    ctx->remaining_tex = 0;
1135*61046927SAndroid Build Coastguard Worker    foreach_instr_safe (instr, &ctx->unscheduled_list) {
1136*61046927SAndroid Build Coastguard Worker       if (is_kill_or_demote(instr))
1137*61046927SAndroid Build Coastguard Worker          ctx->remaining_kills++;
1138*61046927SAndroid Build Coastguard Worker       if (is_sy_producer(instr))
1139*61046927SAndroid Build Coastguard Worker          ctx->remaining_tex++;
1140*61046927SAndroid Build Coastguard Worker    }
1141*61046927SAndroid Build Coastguard Worker 
1142*61046927SAndroid Build Coastguard Worker    /* First schedule all meta:input and meta:phi instructions, followed by
1143*61046927SAndroid Build Coastguard Worker     * tex-prefetch.  We want all of the instructions that load values into
1144*61046927SAndroid Build Coastguard Worker     * registers before the shader starts to go before any other instructions.
1145*61046927SAndroid Build Coastguard Worker     * But in particular we want inputs to come before prefetches.  This is
1146*61046927SAndroid Build Coastguard Worker     * because a FS's bary_ij input may not actually be live in the shader,
1147*61046927SAndroid Build Coastguard Worker     * but it should not be scheduled on top of any other input (but can be
1148*61046927SAndroid Build Coastguard Worker     * overwritten by a tex prefetch)
1149*61046927SAndroid Build Coastguard Worker     *
1150*61046927SAndroid Build Coastguard Worker     * Note: Because the first block cannot have predecessors, meta:input and
1151*61046927SAndroid Build Coastguard Worker     * meta:phi cannot exist in the same block.
1152*61046927SAndroid Build Coastguard Worker     */
1153*61046927SAndroid Build Coastguard Worker    foreach_instr_safe (instr, &ctx->unscheduled_list)
1154*61046927SAndroid Build Coastguard Worker       if (instr->opc == OPC_META_INPUT || instr->opc == OPC_META_PHI)
1155*61046927SAndroid Build Coastguard Worker          schedule(ctx, instr);
1156*61046927SAndroid Build Coastguard Worker 
1157*61046927SAndroid Build Coastguard Worker    foreach_instr_safe (instr, &ctx->unscheduled_list)
1158*61046927SAndroid Build Coastguard Worker       if (instr->opc == OPC_META_TEX_PREFETCH)
1159*61046927SAndroid Build Coastguard Worker          schedule(ctx, instr);
1160*61046927SAndroid Build Coastguard Worker 
1161*61046927SAndroid Build Coastguard Worker    foreach_instr_safe (instr, &ctx->unscheduled_list)
1162*61046927SAndroid Build Coastguard Worker       if (instr->opc == OPC_PUSH_CONSTS_LOAD_MACRO)
1163*61046927SAndroid Build Coastguard Worker          schedule(ctx, instr);
1164*61046927SAndroid Build Coastguard Worker 
1165*61046927SAndroid Build Coastguard Worker    while (!list_is_empty(&ctx->unscheduled_list)) {
1166*61046927SAndroid Build Coastguard Worker       struct ir3_sched_notes notes = {0};
1167*61046927SAndroid Build Coastguard Worker       struct ir3_instruction *instr;
1168*61046927SAndroid Build Coastguard Worker 
1169*61046927SAndroid Build Coastguard Worker       instr = choose_instr(ctx, &notes);
1170*61046927SAndroid Build Coastguard Worker       if (instr) {
1171*61046927SAndroid Build Coastguard Worker          unsigned delay = node_delay(ctx, instr->data);
1172*61046927SAndroid Build Coastguard Worker          d("delay=%u", delay);
1173*61046927SAndroid Build Coastguard Worker 
1174*61046927SAndroid Build Coastguard Worker          assert(delay <= 6);
1175*61046927SAndroid Build Coastguard Worker 
1176*61046927SAndroid Build Coastguard Worker          schedule(ctx, instr);
1177*61046927SAndroid Build Coastguard Worker 
1178*61046927SAndroid Build Coastguard Worker          /* Since we've scheduled a "real" instruction, we can now
1179*61046927SAndroid Build Coastguard Worker           * schedule any split instruction created by the scheduler again.
1180*61046927SAndroid Build Coastguard Worker           */
1181*61046927SAndroid Build Coastguard Worker          ctx->split = NULL;
1182*61046927SAndroid Build Coastguard Worker       } else {
1183*61046927SAndroid Build Coastguard Worker          struct ir3_instruction *new_instr = NULL;
1184*61046927SAndroid Build Coastguard Worker          struct ir3 *ir = block->shader;
1185*61046927SAndroid Build Coastguard Worker 
1186*61046927SAndroid Build Coastguard Worker          /* nothing available to schedule.. if we are blocked on
1187*61046927SAndroid Build Coastguard Worker           * address/predicate register conflict, then break the
1188*61046927SAndroid Build Coastguard Worker           * deadlock by cloning the instruction that wrote that
1189*61046927SAndroid Build Coastguard Worker           * reg:
1190*61046927SAndroid Build Coastguard Worker           */
1191*61046927SAndroid Build Coastguard Worker          if (notes.addr0_conflict) {
1192*61046927SAndroid Build Coastguard Worker             new_instr =
1193*61046927SAndroid Build Coastguard Worker                split_addr(ctx, &ctx->addr0, ir->a0_users, ir->a0_users_count);
1194*61046927SAndroid Build Coastguard Worker          } else if (notes.addr1_conflict) {
1195*61046927SAndroid Build Coastguard Worker             new_instr =
1196*61046927SAndroid Build Coastguard Worker                split_addr(ctx, &ctx->addr1, ir->a1_users, ir->a1_users_count);
1197*61046927SAndroid Build Coastguard Worker          } else {
1198*61046927SAndroid Build Coastguard Worker             d("unscheduled_list:");
1199*61046927SAndroid Build Coastguard Worker             foreach_instr (instr, &ctx->unscheduled_list)
1200*61046927SAndroid Build Coastguard Worker                di(instr, "unscheduled: ");
1201*61046927SAndroid Build Coastguard Worker             assert(0);
1202*61046927SAndroid Build Coastguard Worker             ctx->error = true;
1203*61046927SAndroid Build Coastguard Worker             return;
1204*61046927SAndroid Build Coastguard Worker          }
1205*61046927SAndroid Build Coastguard Worker 
1206*61046927SAndroid Build Coastguard Worker          if (new_instr) {
1207*61046927SAndroid Build Coastguard Worker             list_delinit(&new_instr->node);
1208*61046927SAndroid Build Coastguard Worker             list_addtail(&new_instr->node, &ctx->unscheduled_list);
1209*61046927SAndroid Build Coastguard Worker          }
1210*61046927SAndroid Build Coastguard Worker 
1211*61046927SAndroid Build Coastguard Worker          /* If we produced a new instruction, do not schedule it next to
1212*61046927SAndroid Build Coastguard Worker           * guarantee progress.
1213*61046927SAndroid Build Coastguard Worker           */
1214*61046927SAndroid Build Coastguard Worker          ctx->split = new_instr;
1215*61046927SAndroid Build Coastguard Worker       }
1216*61046927SAndroid Build Coastguard Worker    }
1217*61046927SAndroid Build Coastguard Worker 
1218*61046927SAndroid Build Coastguard Worker    sched_dag_destroy(ctx);
1219*61046927SAndroid Build Coastguard Worker 
1220*61046927SAndroid Build Coastguard Worker    if (terminator)
1221*61046927SAndroid Build Coastguard Worker       list_addtail(&terminator->node, &block->instr_list);
1222*61046927SAndroid Build Coastguard Worker }
1223*61046927SAndroid Build Coastguard Worker 
1224*61046927SAndroid Build Coastguard Worker int
ir3_sched(struct ir3 * ir)1225*61046927SAndroid Build Coastguard Worker ir3_sched(struct ir3 *ir)
1226*61046927SAndroid Build Coastguard Worker {
1227*61046927SAndroid Build Coastguard Worker    struct ir3_sched_ctx *ctx = rzalloc(NULL, struct ir3_sched_ctx);
1228*61046927SAndroid Build Coastguard Worker 
1229*61046927SAndroid Build Coastguard Worker    ctx->compiler = ir->compiler;
1230*61046927SAndroid Build Coastguard Worker 
1231*61046927SAndroid Build Coastguard Worker    foreach_block (block, &ir->block_list) {
1232*61046927SAndroid Build Coastguard Worker       foreach_instr (instr, &block->instr_list) {
1233*61046927SAndroid Build Coastguard Worker          instr->data = NULL;
1234*61046927SAndroid Build Coastguard Worker       }
1235*61046927SAndroid Build Coastguard Worker    }
1236*61046927SAndroid Build Coastguard Worker 
1237*61046927SAndroid Build Coastguard Worker    ir3_count_instructions_sched(ir);
1238*61046927SAndroid Build Coastguard Worker    ir3_clear_mark(ir);
1239*61046927SAndroid Build Coastguard Worker    ir3_find_ssa_uses(ir, ctx, false);
1240*61046927SAndroid Build Coastguard Worker 
1241*61046927SAndroid Build Coastguard Worker    foreach_block (block, &ir->block_list) {
1242*61046927SAndroid Build Coastguard Worker       sched_block(ctx, block);
1243*61046927SAndroid Build Coastguard Worker    }
1244*61046927SAndroid Build Coastguard Worker 
1245*61046927SAndroid Build Coastguard Worker    int ret = ctx->error ? -1 : 0;
1246*61046927SAndroid Build Coastguard Worker 
1247*61046927SAndroid Build Coastguard Worker    ralloc_free(ctx);
1248*61046927SAndroid Build Coastguard Worker 
1249*61046927SAndroid Build Coastguard Worker    return ret;
1250*61046927SAndroid Build Coastguard Worker }
1251*61046927SAndroid Build Coastguard Worker 
1252*61046927SAndroid Build Coastguard Worker static unsigned
get_array_id(struct ir3_instruction * instr)1253*61046927SAndroid Build Coastguard Worker get_array_id(struct ir3_instruction *instr)
1254*61046927SAndroid Build Coastguard Worker {
1255*61046927SAndroid Build Coastguard Worker    /* The expectation is that there is only a single array
1256*61046927SAndroid Build Coastguard Worker     * src or dst, ir3_cp should enforce this.
1257*61046927SAndroid Build Coastguard Worker     */
1258*61046927SAndroid Build Coastguard Worker 
1259*61046927SAndroid Build Coastguard Worker    foreach_dst (dst, instr)
1260*61046927SAndroid Build Coastguard Worker       if (dst->flags & IR3_REG_ARRAY)
1261*61046927SAndroid Build Coastguard Worker          return dst->array.id;
1262*61046927SAndroid Build Coastguard Worker    foreach_src (src, instr)
1263*61046927SAndroid Build Coastguard Worker       if (src->flags & IR3_REG_ARRAY)
1264*61046927SAndroid Build Coastguard Worker          return src->array.id;
1265*61046927SAndroid Build Coastguard Worker 
1266*61046927SAndroid Build Coastguard Worker    unreachable("this was unexpected");
1267*61046927SAndroid Build Coastguard Worker }
1268*61046927SAndroid Build Coastguard Worker 
1269*61046927SAndroid Build Coastguard Worker /* does instruction 'prior' need to be scheduled before 'instr'? */
1270*61046927SAndroid Build Coastguard Worker static bool
depends_on(struct ir3_instruction * instr,struct ir3_instruction * prior)1271*61046927SAndroid Build Coastguard Worker depends_on(struct ir3_instruction *instr, struct ir3_instruction *prior)
1272*61046927SAndroid Build Coastguard Worker {
1273*61046927SAndroid Build Coastguard Worker    /* TODO for dependencies that are related to a specific object, ie
1274*61046927SAndroid Build Coastguard Worker     * a specific SSBO/image/array, we could relax this constraint to
1275*61046927SAndroid Build Coastguard Worker     * make accesses to unrelated objects not depend on each other (at
1276*61046927SAndroid Build Coastguard Worker     * least as long as not declared coherent)
1277*61046927SAndroid Build Coastguard Worker     */
1278*61046927SAndroid Build Coastguard Worker    if (((instr->barrier_class & IR3_BARRIER_EVERYTHING) &&
1279*61046927SAndroid Build Coastguard Worker         prior->barrier_class) ||
1280*61046927SAndroid Build Coastguard Worker        ((prior->barrier_class & IR3_BARRIER_EVERYTHING) &&
1281*61046927SAndroid Build Coastguard Worker         instr->barrier_class))
1282*61046927SAndroid Build Coastguard Worker       return true;
1283*61046927SAndroid Build Coastguard Worker 
1284*61046927SAndroid Build Coastguard Worker    if (instr->barrier_class & prior->barrier_conflict) {
1285*61046927SAndroid Build Coastguard Worker       if (!(instr->barrier_class &
1286*61046927SAndroid Build Coastguard Worker             ~(IR3_BARRIER_ARRAY_R | IR3_BARRIER_ARRAY_W))) {
1287*61046927SAndroid Build Coastguard Worker          /* if only array barrier, then we can further limit false-deps
1288*61046927SAndroid Build Coastguard Worker           * by considering the array-id, ie reads/writes to different
1289*61046927SAndroid Build Coastguard Worker           * arrays do not depend on each other (no aliasing)
1290*61046927SAndroid Build Coastguard Worker           */
1291*61046927SAndroid Build Coastguard Worker          if (get_array_id(instr) != get_array_id(prior)) {
1292*61046927SAndroid Build Coastguard Worker             return false;
1293*61046927SAndroid Build Coastguard Worker          }
1294*61046927SAndroid Build Coastguard Worker       }
1295*61046927SAndroid Build Coastguard Worker 
1296*61046927SAndroid Build Coastguard Worker       return true;
1297*61046927SAndroid Build Coastguard Worker    }
1298*61046927SAndroid Build Coastguard Worker 
1299*61046927SAndroid Build Coastguard Worker    return false;
1300*61046927SAndroid Build Coastguard Worker }
1301*61046927SAndroid Build Coastguard Worker 
1302*61046927SAndroid Build Coastguard Worker static void
add_barrier_deps(struct ir3_block * block,struct ir3_instruction * instr)1303*61046927SAndroid Build Coastguard Worker add_barrier_deps(struct ir3_block *block, struct ir3_instruction *instr)
1304*61046927SAndroid Build Coastguard Worker {
1305*61046927SAndroid Build Coastguard Worker    struct list_head *prev = instr->node.prev;
1306*61046927SAndroid Build Coastguard Worker    struct list_head *next = instr->node.next;
1307*61046927SAndroid Build Coastguard Worker 
1308*61046927SAndroid Build Coastguard Worker    /* add dependencies on previous instructions that must be scheduled
1309*61046927SAndroid Build Coastguard Worker     * prior to the current instruction
1310*61046927SAndroid Build Coastguard Worker     */
1311*61046927SAndroid Build Coastguard Worker    while (prev != &block->instr_list) {
1312*61046927SAndroid Build Coastguard Worker       struct ir3_instruction *pi =
1313*61046927SAndroid Build Coastguard Worker          list_entry(prev, struct ir3_instruction, node);
1314*61046927SAndroid Build Coastguard Worker 
1315*61046927SAndroid Build Coastguard Worker       prev = prev->prev;
1316*61046927SAndroid Build Coastguard Worker 
1317*61046927SAndroid Build Coastguard Worker       if (is_meta(pi))
1318*61046927SAndroid Build Coastguard Worker          continue;
1319*61046927SAndroid Build Coastguard Worker 
1320*61046927SAndroid Build Coastguard Worker       if (instr->barrier_class == pi->barrier_class) {
1321*61046927SAndroid Build Coastguard Worker          ir3_instr_add_dep(instr, pi);
1322*61046927SAndroid Build Coastguard Worker          break;
1323*61046927SAndroid Build Coastguard Worker       }
1324*61046927SAndroid Build Coastguard Worker 
1325*61046927SAndroid Build Coastguard Worker       if (depends_on(instr, pi))
1326*61046927SAndroid Build Coastguard Worker          ir3_instr_add_dep(instr, pi);
1327*61046927SAndroid Build Coastguard Worker    }
1328*61046927SAndroid Build Coastguard Worker 
1329*61046927SAndroid Build Coastguard Worker    /* add dependencies on this instruction to following instructions
1330*61046927SAndroid Build Coastguard Worker     * that must be scheduled after the current instruction:
1331*61046927SAndroid Build Coastguard Worker     */
1332*61046927SAndroid Build Coastguard Worker    while (next != &block->instr_list) {
1333*61046927SAndroid Build Coastguard Worker       struct ir3_instruction *ni =
1334*61046927SAndroid Build Coastguard Worker          list_entry(next, struct ir3_instruction, node);
1335*61046927SAndroid Build Coastguard Worker 
1336*61046927SAndroid Build Coastguard Worker       next = next->next;
1337*61046927SAndroid Build Coastguard Worker 
1338*61046927SAndroid Build Coastguard Worker       if (is_meta(ni))
1339*61046927SAndroid Build Coastguard Worker          continue;
1340*61046927SAndroid Build Coastguard Worker 
1341*61046927SAndroid Build Coastguard Worker       if (instr->barrier_class == ni->barrier_class) {
1342*61046927SAndroid Build Coastguard Worker          ir3_instr_add_dep(ni, instr);
1343*61046927SAndroid Build Coastguard Worker          break;
1344*61046927SAndroid Build Coastguard Worker       }
1345*61046927SAndroid Build Coastguard Worker 
1346*61046927SAndroid Build Coastguard Worker       if (depends_on(ni, instr))
1347*61046927SAndroid Build Coastguard Worker          ir3_instr_add_dep(ni, instr);
1348*61046927SAndroid Build Coastguard Worker    }
1349*61046927SAndroid Build Coastguard Worker }
1350*61046927SAndroid Build Coastguard Worker 
1351*61046927SAndroid Build Coastguard Worker /* before scheduling a block, we need to add any necessary false-dependencies
1352*61046927SAndroid Build Coastguard Worker  * to ensure that:
1353*61046927SAndroid Build Coastguard Worker  *
1354*61046927SAndroid Build Coastguard Worker  *  (1) barriers are scheduled in the right order wrt instructions related
1355*61046927SAndroid Build Coastguard Worker  *      to the barrier
1356*61046927SAndroid Build Coastguard Worker  *
1357*61046927SAndroid Build Coastguard Worker  *  (2) reads that come before a write actually get scheduled before the
1358*61046927SAndroid Build Coastguard Worker  *      write
1359*61046927SAndroid Build Coastguard Worker  */
1360*61046927SAndroid Build Coastguard Worker bool
ir3_sched_add_deps(struct ir3 * ir)1361*61046927SAndroid Build Coastguard Worker ir3_sched_add_deps(struct ir3 *ir)
1362*61046927SAndroid Build Coastguard Worker {
1363*61046927SAndroid Build Coastguard Worker    bool progress = false;
1364*61046927SAndroid Build Coastguard Worker 
1365*61046927SAndroid Build Coastguard Worker    foreach_block (block, &ir->block_list) {
1366*61046927SAndroid Build Coastguard Worker       foreach_instr (instr, &block->instr_list) {
1367*61046927SAndroid Build Coastguard Worker          if (instr->barrier_class) {
1368*61046927SAndroid Build Coastguard Worker             add_barrier_deps(block, instr);
1369*61046927SAndroid Build Coastguard Worker             progress = true;
1370*61046927SAndroid Build Coastguard Worker          }
1371*61046927SAndroid Build Coastguard Worker       }
1372*61046927SAndroid Build Coastguard Worker    }
1373*61046927SAndroid Build Coastguard Worker 
1374*61046927SAndroid Build Coastguard Worker    return progress;
1375*61046927SAndroid Build Coastguard Worker }
1376