xref: /aosp_15_r20/external/mesa3d/src/imagination/rogue/rogue.c (revision 6104692788411f58d303aa86923a9ff6ecaded22)
1 /*
2  * Copyright © 2022 Imagination Technologies Ltd.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a copy
5  * of this software and associated documentation files (the "Software"), to deal
6  * in the Software without restriction, including without limitation the rights
7  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8  * copies of the Software, and to permit persons to whom the Software is
9  * furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21  * SOFTWARE.
22  */
23 
24 #include "compiler/glsl_types.h"
25 #include "rogue.h"
26 #include "util/list.h"
27 #include "util/macros.h"
28 #include "util/ralloc.h"
29 #include "util/sparse_array.h"
30 
31 #include <stdbool.h>
32 
33 /**
34  * \file rogue.c
35  *
36  * \brief Contains general Rogue IR functions.
37  */
38 
39 /* TODO: Tweak these? */
40 #define ROGUE_REG_CACHE_NODE_SIZE 512
41 #define ROGUE_REGARRAY_CACHE_NODE_SIZE 512
42 
43 /**
44  * \brief Sets an existing register to a (new) class and/or index.
45  *
46  * \param[in] shader The shader containing the register.
47  * \param[in] reg The register being changed.
48  * \param[in] class The new register class.
49  * \param[in] index The new register index.
50  * \return True if the register was updated, else false.
51  */
52 PUBLIC
rogue_reg_set(rogue_shader * shader,rogue_reg * reg,enum rogue_reg_class class,unsigned index)53 bool rogue_reg_set(rogue_shader *shader,
54                    rogue_reg *reg,
55                    enum rogue_reg_class class,
56                    unsigned index)
57 {
58    bool changed = true;
59 
60    if (reg->class == class && reg->index == index)
61       changed = false;
62 
63    const rogue_reg_info *info = &rogue_reg_infos[class];
64 
65    if (info->num) {
66       assert(index < info->num);
67       rogue_set_reg_use(shader, class, index);
68    }
69 
70    if (reg->class != class) {
71       list_del(&reg->link);
72       list_addtail(&reg->link, &shader->regs[class]);
73    }
74 
75    reg->class = class;
76    reg->index = index;
77    reg->dirty = true;
78 
79    /* Clear the old cache entry. */
80    if (reg->cached && *reg->cached == reg)
81       *reg->cached = NULL;
82 
83    /* Set new cache entry. */
84    rogue_reg **reg_cached =
85       util_sparse_array_get(&shader->reg_cache[class], index);
86    *reg_cached = reg;
87    reg->cached = reg_cached;
88 
89    return changed;
90 }
91 
92 /**
93  * \brief Sets an existing register to a (new) class and/or index, and updates
94  * its usage bitset.
95  *
96  * \param[in] shader The shader containing the register.
97  * \param[in] reg The register being changed.
98  * \param[in] class The new register class.
99  * \param[in] index The new register index.
100  * \return True if the register was updated, else false.
101  */
102 PUBLIC
rogue_reg_rewrite(rogue_shader * shader,rogue_reg * reg,enum rogue_reg_class class,unsigned index)103 bool rogue_reg_rewrite(rogue_shader *shader,
104                        rogue_reg *reg,
105                        enum rogue_reg_class class,
106                        unsigned index)
107 {
108    const rogue_reg_info *info = &rogue_reg_infos[reg->class];
109    if (info->num) {
110       assert(rogue_reg_is_used(shader, reg->class, reg->index) &&
111              "Register not in use!");
112       rogue_clear_reg_use(shader, reg->class, reg->index);
113    }
114 
115    return rogue_reg_set(shader, reg, class, index);
116 }
117 
118 PUBLIC
rogue_regarray_set(rogue_shader * shader,rogue_regarray * regarray,enum rogue_reg_class class,unsigned base_index,bool set_regs)119 bool rogue_regarray_set(rogue_shader *shader,
120                         rogue_regarray *regarray,
121                         enum rogue_reg_class class,
122                         unsigned base_index,
123                         bool set_regs)
124 {
125    bool updated = true;
126 
127    if (set_regs) {
128       for (unsigned u = 0; u < regarray->size; ++u) {
129          updated &=
130             rogue_reg_set(shader, regarray->regs[u], class, base_index + u);
131       }
132    }
133 
134    if (regarray->cached && *regarray->cached == regarray)
135       *regarray->cached = NULL;
136 
137    uint64_t key =
138       rogue_regarray_cache_key(regarray->size, class, base_index, false, 0);
139 
140    rogue_regarray **regarray_cached =
141       util_sparse_array_get(&shader->regarray_cache, key);
142    assert(*regarray_cached == NULL);
143 
144    *regarray_cached = regarray;
145    regarray->cached = regarray_cached;
146 
147    return updated;
148 }
149 
rogue_regarray_rewrite(rogue_shader * shader,rogue_regarray * regarray,enum rogue_reg_class class,unsigned base_index)150 bool rogue_regarray_rewrite(rogue_shader *shader,
151                             rogue_regarray *regarray,
152                             enum rogue_reg_class class,
153                             unsigned base_index)
154 {
155    bool progress = true;
156 
157    enum rogue_reg_class orig_class = regarray->regs[0]->class;
158    unsigned orig_base_index = regarray->regs[0]->index;
159    const rogue_reg_info *info = &rogue_reg_infos[orig_class];
160 
161    assert(!regarray->parent);
162 
163    if (info->num) {
164       for (unsigned u = 0; u < regarray->size; ++u) {
165          assert(rogue_reg_is_used(shader, orig_class, orig_base_index) &&
166                 "Register not in use!");
167          rogue_clear_reg_use(shader, orig_class, orig_base_index);
168       }
169    }
170 
171    progress &= rogue_regarray_set(shader, regarray, class, base_index, true);
172 
173    rogue_foreach_subarray (subarray, regarray) {
174       unsigned idx_offset = subarray->regs[0]->index - regarray->regs[0]->index;
175       progress &= rogue_regarray_set(shader,
176                                      subarray,
177                                      class,
178                                      base_index + idx_offset,
179                                      false);
180    }
181 
182    assert(progress);
183    return progress;
184 }
185 
rogue_shader_destructor(void * ptr)186 static void rogue_shader_destructor(void *ptr)
187 {
188    rogue_shader *shader = ptr;
189    for (unsigned u = 0; u < ARRAY_SIZE(shader->reg_cache); ++u)
190       util_sparse_array_finish(&shader->reg_cache[u]);
191 
192    util_sparse_array_finish(&shader->regarray_cache);
193 }
194 
195 /**
196  * \brief Allocates and initializes a new rogue_shader object.
197  *
198  * \param[in] mem_ctx The new shader's memory context.
199  * \param[in] stage The new shader's stage.
200  * \return The new shader.
201  */
202 PUBLIC
rogue_shader_create(void * mem_ctx,gl_shader_stage stage)203 rogue_shader *rogue_shader_create(void *mem_ctx, gl_shader_stage stage)
204 {
205    rogue_debug_init();
206 
207    rogue_shader *shader = rzalloc_size(mem_ctx, sizeof(*shader));
208 
209    shader->stage = stage;
210 
211    list_inithead(&shader->blocks);
212 
213    for (enum rogue_reg_class class = 0; class < ROGUE_REG_CLASS_COUNT;
214         ++class) {
215       list_inithead(&shader->regs[class]);
216 
217       const rogue_reg_info *info = &rogue_reg_infos[class];
218       if (info->num) {
219          unsigned bitset_size =
220             sizeof(*shader->regs_used[class]) * BITSET_WORDS(info->num);
221          shader->regs_used[class] = rzalloc_size(shader, bitset_size);
222       }
223    }
224 
225    for (unsigned u = 0; u < ARRAY_SIZE(shader->reg_cache); ++u)
226       util_sparse_array_init(&shader->reg_cache[u],
227                              sizeof(rogue_reg *),
228                              ROGUE_REG_CACHE_NODE_SIZE);
229 
230    list_inithead(&shader->regarrays);
231 
232    util_sparse_array_init(&shader->regarray_cache,
233                           sizeof(rogue_regarray *),
234                           ROGUE_REGARRAY_CACHE_NODE_SIZE);
235 
236    for (unsigned u = 0; u < ARRAY_SIZE(shader->drc_trxns); ++u)
237       list_inithead(&shader->drc_trxns[u]);
238 
239    list_inithead(&shader->imm_uses);
240 
241    ralloc_set_destructor(shader, rogue_shader_destructor);
242 
243    return shader;
244 }
245 
246 /**
247  * \brief Allocates and initializes a new rogue_reg object.
248  *
249  * \param[in] shader The shader which will contain the register.
250  * \param[in] class The register class.
251  * \param[in] index The register index.
252  * \param[in] reg_cached The shader register cache.
253  * \return The new register.
254  */
rogue_reg_create(rogue_shader * shader,enum rogue_reg_class class,uint32_t index,rogue_reg ** reg_cached)255 static rogue_reg *rogue_reg_create(rogue_shader *shader,
256                                    enum rogue_reg_class class,
257                                    uint32_t index,
258                                    rogue_reg **reg_cached)
259 {
260    rogue_reg *reg = rzalloc_size(shader, sizeof(*reg));
261 
262    reg->shader = shader;
263    reg->class = class;
264    reg->index = index;
265    reg->cached = reg_cached;
266 
267    list_addtail(&reg->link, &shader->regs[class]);
268    list_inithead(&reg->writes);
269    list_inithead(&reg->uses);
270 
271    const rogue_reg_info *info = &rogue_reg_infos[class];
272    if (info->num) {
273       assert(index < info->num);
274       assert(!rogue_reg_is_used(shader, class, index) &&
275              "Register already in use!");
276       rogue_set_reg_use(shader, class, index);
277    }
278 
279    return reg;
280 }
281 
282 /**
283  * \brief Deletes and frees a Rogue register.
284  *
285  * \param[in] reg The register to delete.
286  */
287 PUBLIC
rogue_reg_delete(rogue_reg * reg)288 void rogue_reg_delete(rogue_reg *reg)
289 {
290    assert(rogue_reg_is_unused(reg));
291    const rogue_reg_info *info = &rogue_reg_infos[reg->class];
292    if (info->num) {
293       assert(rogue_reg_is_used(reg->shader, reg->class, reg->index) &&
294              "Register not in use!");
295       rogue_clear_reg_use(reg->shader, reg->class, reg->index);
296    }
297 
298    if (reg->cached && *reg->cached == reg)
299       *reg->cached = NULL;
300 
301    list_del(&reg->link);
302    ralloc_free(reg);
303 }
304 
rogue_reg_cached_common(rogue_shader * shader,enum rogue_reg_class class,uint32_t index,uint8_t component,bool vec)305 static inline rogue_reg *rogue_reg_cached_common(rogue_shader *shader,
306                                                  enum rogue_reg_class class,
307                                                  uint32_t index,
308                                                  uint8_t component,
309                                                  bool vec)
310 {
311    uint32_t key = rogue_reg_cache_key(index, vec, component);
312 
313    rogue_reg **reg_cached =
314       util_sparse_array_get(&shader->reg_cache[class], key);
315    if (!*reg_cached)
316       *reg_cached = rogue_reg_create(shader, class, key, reg_cached);
317 
318    return *reg_cached;
319 }
320 
rogue_reg_cached(rogue_shader * shader,enum rogue_reg_class class,uint32_t index)321 static inline rogue_reg *rogue_reg_cached(rogue_shader *shader,
322                                           enum rogue_reg_class class,
323                                           uint32_t index)
324 {
325    return rogue_reg_cached_common(shader, class, index, 0, false);
326 }
327 
rogue_vec_reg_cached(rogue_shader * shader,enum rogue_reg_class class,unsigned index,unsigned component)328 static inline rogue_reg *rogue_vec_reg_cached(rogue_shader *shader,
329                                               enum rogue_reg_class class,
330                                               unsigned index,
331                                               unsigned component)
332 {
333    return rogue_reg_cached_common(shader, class, index, component, true);
334 }
335 
336 /* TODO: Static inline in rogue.h? */
337 PUBLIC
rogue_ssa_reg(rogue_shader * shader,unsigned index)338 rogue_reg *rogue_ssa_reg(rogue_shader *shader, unsigned index)
339 {
340    return rogue_reg_cached(shader, ROGUE_REG_CLASS_SSA, index);
341 }
342 
343 PUBLIC
rogue_temp_reg(rogue_shader * shader,unsigned index)344 rogue_reg *rogue_temp_reg(rogue_shader *shader, unsigned index)
345 {
346    return rogue_reg_cached(shader, ROGUE_REG_CLASS_TEMP, index);
347 }
348 
349 PUBLIC
rogue_coeff_reg(rogue_shader * shader,unsigned index)350 rogue_reg *rogue_coeff_reg(rogue_shader *shader, unsigned index)
351 {
352    return rogue_reg_cached(shader, ROGUE_REG_CLASS_COEFF, index);
353 }
354 
355 PUBLIC
rogue_shared_reg(rogue_shader * shader,unsigned index)356 rogue_reg *rogue_shared_reg(rogue_shader *shader, unsigned index)
357 {
358    return rogue_reg_cached(shader, ROGUE_REG_CLASS_SHARED, index);
359 }
360 
361 PUBLIC
rogue_const_reg(rogue_shader * shader,unsigned index)362 rogue_reg *rogue_const_reg(rogue_shader *shader, unsigned index)
363 {
364    return rogue_reg_cached(shader, ROGUE_REG_CLASS_CONST, index);
365 }
366 
367 PUBLIC
rogue_pixout_reg(rogue_shader * shader,unsigned index)368 rogue_reg *rogue_pixout_reg(rogue_shader *shader, unsigned index)
369 {
370    return rogue_reg_cached(shader, ROGUE_REG_CLASS_PIXOUT, index);
371 }
372 
373 PUBLIC
rogue_special_reg(rogue_shader * shader,unsigned index)374 rogue_reg *rogue_special_reg(rogue_shader *shader, unsigned index)
375 {
376    return rogue_reg_cached(shader, ROGUE_REG_CLASS_SPECIAL, index);
377 }
378 
379 PUBLIC
rogue_vtxin_reg(rogue_shader * shader,unsigned index)380 rogue_reg *rogue_vtxin_reg(rogue_shader *shader, unsigned index)
381 {
382    return rogue_reg_cached(shader, ROGUE_REG_CLASS_VTXIN, index);
383 }
384 
385 PUBLIC
rogue_vtxout_reg(rogue_shader * shader,unsigned index)386 rogue_reg *rogue_vtxout_reg(rogue_shader *shader, unsigned index)
387 {
388    return rogue_reg_cached(shader, ROGUE_REG_CLASS_VTXOUT, index);
389 }
390 
391 PUBLIC
392 rogue_reg *
rogue_ssa_vec_reg(rogue_shader * shader,unsigned index,unsigned component)393 rogue_ssa_vec_reg(rogue_shader *shader, unsigned index, unsigned component)
394 {
395    return rogue_vec_reg_cached(shader, ROGUE_REG_CLASS_SSA, index, component);
396 }
397 
rogue_find_common_regarray(rogue_regarray * regarray,bool * is_parent,rogue_reg *** parent_regptr)398 static rogue_regarray *rogue_find_common_regarray(rogue_regarray *regarray,
399                                                   bool *is_parent,
400                                                   rogue_reg ***parent_regptr)
401 {
402    rogue_regarray *common_regarray = NULL;
403 
404    for (unsigned u = 0; u < regarray->size; ++u) {
405       if (regarray->regs[u]->regarray) {
406          if (common_regarray && regarray->regs[u]->regarray != common_regarray)
407             unreachable("Can't have overlapping regarrays.");
408          else if (!common_regarray)
409             common_regarray = regarray->regs[u]->regarray;
410       }
411    }
412 
413    if (common_regarray) {
414       unsigned min_index = regarray->regs[0]->index;
415       unsigned max_index = min_index + regarray->size - 1;
416 
417       unsigned min_common_index = common_regarray->regs[0]->index;
418       unsigned max_common_index = min_common_index + common_regarray->size - 1;
419 
420       /* TODO: Create a new parent array that encapsulates both ranges? */
421       /* Ensure that the new regarray doesn't occupy only part of its parent,
422        * and also registers *beyond* its parent. */
423       if ((min_index > min_common_index && max_index > max_common_index) ||
424           (min_index < min_common_index && max_index < max_common_index))
425          unreachable("Can't have overflowing partial regarrays.");
426 
427       *is_parent = regarray->size > common_regarray->size;
428       const rogue_regarray *parent_regarray = *is_parent ? regarray
429                                                          : common_regarray;
430       const rogue_regarray *child_regarray = *is_parent ? common_regarray
431                                                         : regarray;
432 
433       for (unsigned u = 0; u < parent_regarray->size; ++u) {
434          if (child_regarray->regs[0]->index ==
435              parent_regarray->regs[u]->index) {
436             *parent_regptr = &parent_regarray->regs[u];
437             break;
438          }
439       }
440    }
441 
442    return common_regarray;
443 }
444 
rogue_regarray_create(rogue_shader * shader,unsigned size,enum rogue_reg_class class,unsigned start_index,uint8_t component,bool vec,rogue_regarray ** regarray_cached)445 static rogue_regarray *rogue_regarray_create(rogue_shader *shader,
446                                              unsigned size,
447                                              enum rogue_reg_class class,
448                                              unsigned start_index,
449                                              uint8_t component,
450                                              bool vec,
451                                              rogue_regarray **regarray_cached)
452 {
453    rogue_regarray *regarray = rzalloc_size(shader, sizeof(*regarray));
454    regarray->regs = rzalloc_size(regarray, sizeof(*regarray->regs) * size);
455    regarray->size = size;
456    regarray->cached = regarray_cached;
457    list_inithead(&regarray->children);
458    list_inithead(&regarray->writes);
459    list_inithead(&regarray->uses);
460 
461    for (unsigned u = 0; u < size; ++u) {
462       regarray->regs[u] =
463          vec ? rogue_vec_reg_cached(shader, class, start_index, component + u)
464              : rogue_reg_cached(shader, class, start_index + u);
465    }
466 
467    bool is_parent = false;
468    rogue_reg **parent_regptr = NULL;
469    rogue_regarray *common_regarray =
470       rogue_find_common_regarray(regarray, &is_parent, &parent_regptr);
471 
472    if (!common_regarray) {
473       /* We don't share any registers with another regarray. */
474       for (unsigned u = 0; u < size; ++u)
475          regarray->regs[u]->regarray = regarray;
476    } else {
477       if (is_parent) {
478          /* We share registers with another regarray, and it is a subset of us.
479           */
480          for (unsigned u = 0; u < common_regarray->size; ++u)
481             common_regarray->regs[u]->regarray = regarray;
482 
483          /* Steal its children. */
484          rogue_foreach_subarray_safe (subarray, common_regarray) {
485             unsigned parent_index = common_regarray->regs[0]->index;
486             unsigned child_index = subarray->regs[0]->index;
487             assert(child_index >= parent_index);
488 
489             subarray->parent = regarray;
490             subarray->regs = &parent_regptr[child_index - parent_index];
491 
492             list_del(&subarray->child_link);
493             list_addtail(&subarray->child_link, &regarray->children);
494          }
495 
496          common_regarray->parent = regarray;
497          ralloc_free(common_regarray->regs);
498          common_regarray->regs = parent_regptr;
499          list_addtail(&common_regarray->child_link, &regarray->children);
500       } else {
501          /* We share registers with another regarray, and we are a subset of it.
502           */
503          regarray->parent = common_regarray;
504          ralloc_free(regarray->regs);
505          regarray->regs = parent_regptr;
506          assert(list_is_empty(&regarray->children));
507          list_addtail(&regarray->child_link, &common_regarray->children);
508       }
509    }
510 
511    list_addtail(&regarray->link, &shader->regarrays);
512 
513    return regarray;
514 }
515 
516 static inline rogue_regarray *
rogue_regarray_cached_common(rogue_shader * shader,unsigned size,enum rogue_reg_class class,uint32_t start_index,uint8_t component,bool vec)517 rogue_regarray_cached_common(rogue_shader *shader,
518                              unsigned size,
519                              enum rogue_reg_class class,
520                              uint32_t start_index,
521                              uint8_t component,
522                              bool vec)
523 {
524    uint64_t key =
525       rogue_regarray_cache_key(size, class, start_index, vec, component);
526 
527    rogue_regarray **regarray_cached =
528       util_sparse_array_get(&shader->regarray_cache, key);
529    if (!*regarray_cached)
530       *regarray_cached = rogue_regarray_create(shader,
531                                                size,
532                                                class,
533                                                start_index,
534                                                component,
535                                                vec,
536                                                regarray_cached);
537 
538    return *regarray_cached;
539 }
540 
541 PUBLIC
rogue_regarray_cached(rogue_shader * shader,unsigned size,enum rogue_reg_class class,uint32_t start_index)542 rogue_regarray *rogue_regarray_cached(rogue_shader *shader,
543                                       unsigned size,
544                                       enum rogue_reg_class class,
545                                       uint32_t start_index)
546 {
547    return rogue_regarray_cached_common(shader,
548                                        size,
549                                        class,
550                                        start_index,
551                                        0,
552                                        false);
553 }
554 
555 PUBLIC
rogue_vec_regarray_cached(rogue_shader * shader,unsigned size,enum rogue_reg_class class,uint32_t start_index,uint8_t component)556 rogue_regarray *rogue_vec_regarray_cached(rogue_shader *shader,
557                                           unsigned size,
558                                           enum rogue_reg_class class,
559                                           uint32_t start_index,
560                                           uint8_t component)
561 {
562    return rogue_regarray_cached_common(shader,
563                                        size,
564                                        class,
565                                        start_index,
566                                        component,
567                                        true);
568 }
569 
570 PUBLIC
571 rogue_regarray *
rogue_ssa_regarray(rogue_shader * shader,unsigned size,unsigned start_index)572 rogue_ssa_regarray(rogue_shader *shader, unsigned size, unsigned start_index)
573 {
574    return rogue_regarray_cached(shader, size, ROGUE_REG_CLASS_SSA, start_index);
575 }
576 
577 PUBLIC
578 rogue_regarray *
rogue_temp_regarray(rogue_shader * shader,unsigned size,unsigned start_index)579 rogue_temp_regarray(rogue_shader *shader, unsigned size, unsigned start_index)
580 {
581    return rogue_regarray_cached(shader, size, ROGUE_REG_CLASS_TEMP, start_index);
582 }
583 
584 PUBLIC
585 rogue_regarray *
rogue_coeff_regarray(rogue_shader * shader,unsigned size,unsigned start_index)586 rogue_coeff_regarray(rogue_shader *shader, unsigned size, unsigned start_index)
587 {
588    return rogue_regarray_cached(shader,
589                                 size,
590                                 ROGUE_REG_CLASS_COEFF,
591                                 start_index);
592 }
593 
594 PUBLIC
595 rogue_regarray *
rogue_shared_regarray(rogue_shader * shader,unsigned size,unsigned start_index)596 rogue_shared_regarray(rogue_shader *shader, unsigned size, unsigned start_index)
597 {
598    return rogue_regarray_cached(shader,
599                                 size,
600                                 ROGUE_REG_CLASS_SHARED,
601                                 start_index);
602 }
603 
604 PUBLIC
rogue_ssa_vec_regarray(rogue_shader * shader,unsigned size,unsigned start_index,unsigned component)605 rogue_regarray *rogue_ssa_vec_regarray(rogue_shader *shader,
606                                        unsigned size,
607                                        unsigned start_index,
608                                        unsigned component)
609 {
610    return rogue_vec_regarray_cached(shader,
611                                     size,
612                                     ROGUE_REG_CLASS_SSA,
613                                     start_index,
614                                     component);
615 }
616 
617 /**
618  * \brief Allocates and initializes a new rogue_block object.
619  *
620  * \param[in] shader The shader that the new block belongs to.
621  * \param[in] label The (optional) block label.
622  * \return The new block.
623  */
624 PUBLIC
rogue_block_create(rogue_shader * shader,const char * label)625 rogue_block *rogue_block_create(rogue_shader *shader, const char *label)
626 {
627    rogue_block *block = rzalloc_size(shader, sizeof(*block));
628 
629    block->shader = shader;
630    list_inithead(&block->instrs);
631    list_inithead(&block->uses);
632    block->index = shader->next_block++;
633    block->label = ralloc_strdup(block, label);
634 
635    return block;
636 }
637 
638 /**
639  * \brief Initialises a Rogue instruction.
640  *
641  * \param[in] instr The instruction to initialise.
642  * \param[in] type The instruction type.
643  * \param[in] block The block which will contain the instruction.
644  */
rogue_instr_init(rogue_instr * instr,enum rogue_instr_type type,rogue_block * block)645 static inline void rogue_instr_init(rogue_instr *instr,
646                                     enum rogue_instr_type type,
647                                     rogue_block *block)
648 {
649    instr->type = type;
650    instr->exec_cond = ROGUE_EXEC_COND_PE_TRUE;
651    instr->repeat = 1;
652    instr->index = block->shader->next_instr++;
653    instr->block = block;
654 }
655 
656 /**
657  * \brief Allocates and initializes a new rogue_alu_instr object.
658  *
659  * \param[in] block The block that the new ALU instruction belongs to.
660  * \param[in] op The ALU operation.
661  * \return The new ALU instruction.
662  */
663 PUBLIC
rogue_alu_instr_create(rogue_block * block,enum rogue_alu_op op)664 rogue_alu_instr *rogue_alu_instr_create(rogue_block *block,
665                                         enum rogue_alu_op op)
666 {
667    rogue_alu_instr *alu = rzalloc_size(block, sizeof(*alu));
668    rogue_instr_init(&alu->instr, ROGUE_INSTR_TYPE_ALU, block);
669    alu->op = op;
670 
671    return alu;
672 }
673 
674 /**
675  * \brief Allocates and initializes a new rogue_backend_instr object.
676  *
677  * \param[in] block The block that the new backend instruction belongs to.
678  * \param[in] op The backend operation.
679  * \return The new backend instruction.
680  */
681 PUBLIC
rogue_backend_instr_create(rogue_block * block,enum rogue_backend_op op)682 rogue_backend_instr *rogue_backend_instr_create(rogue_block *block,
683                                                 enum rogue_backend_op op)
684 {
685    rogue_backend_instr *backend = rzalloc_size(block, sizeof(*backend));
686    rogue_instr_init(&backend->instr, ROGUE_INSTR_TYPE_BACKEND, block);
687    backend->op = op;
688 
689    return backend;
690 }
691 
692 /**
693  * \brief Allocates and initializes a new rogue_ctrl_instr object.
694  *
695  * \param[in] block The block that the new control instruction belongs to.
696  * \param[in] op The control operation.
697  * \return The new control instruction.
698  */
699 PUBLIC
rogue_ctrl_instr_create(rogue_block * block,enum rogue_ctrl_op op)700 rogue_ctrl_instr *rogue_ctrl_instr_create(rogue_block *block,
701                                           enum rogue_ctrl_op op)
702 {
703    rogue_ctrl_instr *ctrl = rzalloc_size(block, sizeof(*ctrl));
704    rogue_instr_init(&ctrl->instr, ROGUE_INSTR_TYPE_CTRL, block);
705    ctrl->op = op;
706 
707    return ctrl;
708 }
709 
710 /**
711  * \brief Allocates and initializes a new rogue_bitwise_instr object.
712  *
713  * \param[in] block The block that the new bitwise instruction belongs to.
714  * \param[in] op The bitwise operation.
715  * \return The new bitwise instruction.
716  */
717 PUBLIC
rogue_bitwise_instr_create(rogue_block * block,enum rogue_bitwise_op op)718 rogue_bitwise_instr *rogue_bitwise_instr_create(rogue_block *block,
719                                                 enum rogue_bitwise_op op)
720 {
721    rogue_bitwise_instr *bitwise = rzalloc_size(block, sizeof(*bitwise));
722    rogue_instr_init(&bitwise->instr, ROGUE_INSTR_TYPE_BITWISE, block);
723    bitwise->op = op;
724 
725    return bitwise;
726 }
727 
728 /**
729  * \brief Tracks/links objects that are written to/modified by an instruction.
730  *
731  * \param[in] instr The instruction.
732  */
733 PUBLIC
rogue_link_instr_write(rogue_instr * instr)734 void rogue_link_instr_write(rogue_instr *instr)
735 {
736    switch (instr->type) {
737    case ROGUE_INSTR_TYPE_ALU: {
738       rogue_alu_instr *alu = rogue_instr_as_alu(instr);
739       const unsigned num_dsts = rogue_alu_op_infos[alu->op].num_dsts;
740 
741       for (unsigned i = 0; i < num_dsts; ++i) {
742          if (rogue_ref_is_reg(&alu->dst[i].ref)) {
743             rogue_reg_write *write = &alu->dst_write[i].reg;
744             rogue_reg *reg = alu->dst[i].ref.reg;
745             rogue_link_instr_write_reg(instr, write, reg, i);
746          } else if (rogue_ref_is_regarray(&alu->dst[i].ref)) {
747             rogue_regarray_write *write = &alu->dst_write[i].regarray;
748             rogue_regarray *regarray = alu->dst[i].ref.regarray;
749             rogue_link_instr_write_regarray(instr, write, regarray, i);
750          } else if (rogue_ref_is_io(&alu->dst[i].ref)) { /* TODO: check WHICH IO
751                                                             IT IS */
752          } else {
753             unreachable("Unsupported destination reference type.");
754          }
755       }
756 
757       break;
758    }
759 
760    case ROGUE_INSTR_TYPE_BACKEND: {
761       rogue_backend_instr *backend = rogue_instr_as_backend(instr);
762       const unsigned num_dsts = rogue_backend_op_infos[backend->op].num_dsts;
763 
764       for (unsigned i = 0; i < num_dsts; ++i) {
765          if (rogue_ref_is_reg(&backend->dst[i].ref)) {
766             rogue_reg_write *write = &backend->dst_write[i].reg;
767             rogue_reg *reg = backend->dst[i].ref.reg;
768             rogue_link_instr_write_reg(instr, write, reg, i);
769          } else if (rogue_ref_is_regarray(&backend->dst[i].ref)) {
770             rogue_regarray_write *write = &backend->dst_write[i].regarray;
771             rogue_regarray *regarray = backend->dst[i].ref.regarray;
772             rogue_link_instr_write_regarray(instr, write, regarray, i);
773          } else if (rogue_ref_is_io(&backend->dst[i].ref)) { /* TODO: check
774                                                                 WHICH IO IT IS
775                                                               */
776          } else {
777             unreachable("Unsupported destination reference type.");
778          }
779       }
780 
781       break;
782    }
783 
784    case ROGUE_INSTR_TYPE_CTRL: {
785       rogue_ctrl_instr *ctrl = rogue_instr_as_ctrl(instr);
786       const unsigned num_dsts = rogue_ctrl_op_infos[ctrl->op].num_dsts;
787 
788       for (unsigned i = 0; i < num_dsts; ++i) {
789          if (rogue_ref_is_reg(&ctrl->dst[i].ref)) {
790             rogue_reg_write *write = &ctrl->dst_write[i].reg;
791             rogue_reg *reg = ctrl->dst[i].ref.reg;
792             rogue_link_instr_write_reg(instr, write, reg, i);
793          } else if (rogue_ref_is_regarray(&ctrl->dst[i].ref)) {
794             rogue_regarray_write *write = &ctrl->dst_write[i].regarray;
795             rogue_regarray *regarray = ctrl->dst[i].ref.regarray;
796             rogue_link_instr_write_regarray(instr, write, regarray, i);
797          } else if (rogue_ref_is_io(&ctrl->dst[i].ref)) { /* TODO: check WHICH
798                                                              IO IT IS */
799          } else {
800             unreachable("Unsupported destination reference type.");
801          }
802       }
803 
804       break;
805    }
806 
807    case ROGUE_INSTR_TYPE_BITWISE: {
808       rogue_bitwise_instr *bitwise = rogue_instr_as_bitwise(instr);
809       const unsigned num_dsts = rogue_bitwise_op_infos[bitwise->op].num_dsts;
810 
811       for (unsigned i = 0; i < num_dsts; ++i) {
812          if (rogue_ref_is_reg(&bitwise->dst[i].ref)) {
813             rogue_reg_write *write = &bitwise->dst_write[i].reg;
814             rogue_reg *reg = bitwise->dst[i].ref.reg;
815             rogue_link_instr_write_reg(instr, write, reg, i);
816          } else if (rogue_ref_is_regarray(&bitwise->dst[i].ref)) {
817             rogue_regarray_write *write = &bitwise->dst_write[i].regarray;
818             rogue_regarray *regarray = bitwise->dst[i].ref.regarray;
819             rogue_link_instr_write_regarray(instr, write, regarray, i);
820          } else if (rogue_ref_is_io(&bitwise->dst[i].ref)) { /* TODO: check
821                                                                 WHICH IO IT IS
822                                                               */
823          } else {
824             unreachable("Unsupported destination reference type.");
825          }
826       }
827 
828       break;
829    }
830 
831    default:
832       unreachable("Unsupported instruction type.");
833    }
834 }
835 
836 /**
837  * \brief Tracks/links objects that are used by/read from an instruction.
838  *
839  * \param[in] instr The instruction.
840  */
841 PUBLIC
rogue_link_instr_use(rogue_instr * instr)842 void rogue_link_instr_use(rogue_instr *instr)
843 {
844    switch (instr->type) {
845    case ROGUE_INSTR_TYPE_ALU: {
846       rogue_alu_instr *alu = rogue_instr_as_alu(instr);
847       const unsigned num_srcs = rogue_alu_op_infos[alu->op].num_srcs;
848 
849       for (unsigned i = 0; i < num_srcs; ++i) {
850          if (rogue_ref_is_reg(&alu->src[i].ref)) {
851             rogue_reg_use *use = &alu->src_use[i].reg;
852             rogue_reg *reg = alu->src[i].ref.reg;
853             rogue_link_instr_use_reg(instr, use, reg, i);
854          } else if (rogue_ref_is_regarray(&alu->src[i].ref)) {
855             rogue_regarray_use *use = &alu->src_use[i].regarray;
856             rogue_regarray *regarray = alu->src[i].ref.regarray;
857             rogue_link_instr_use_regarray(instr, use, regarray, i);
858          } else if (rogue_ref_is_imm(&alu->src[i].ref)) {
859             rogue_link_imm_use(instr->block->shader,
860                                instr,
861                                i,
862                                rogue_ref_get_imm(&alu->src[i].ref));
863          } else if (rogue_ref_is_io(&alu->src[i].ref)) { /* TODO: check WHICH IO
864                                                             IT IS */
865          } else if (rogue_ref_is_val(&alu->src[i].ref)) {
866          } else {
867             unreachable("Unsupported source reference type.");
868          }
869       }
870 
871       break;
872    }
873 
874    case ROGUE_INSTR_TYPE_BACKEND: {
875       rogue_backend_instr *backend = rogue_instr_as_backend(instr);
876       const unsigned num_srcs = rogue_backend_op_infos[backend->op].num_srcs;
877 
878       for (unsigned i = 0; i < num_srcs; ++i) {
879          if (rogue_ref_is_reg(&backend->src[i].ref)) {
880             rogue_reg_use *use = &backend->src_use[i].reg;
881             rogue_reg *reg = backend->src[i].ref.reg;
882             rogue_link_instr_use_reg(instr, use, reg, i);
883          } else if (rogue_ref_is_regarray(&backend->src[i].ref)) {
884             rogue_regarray_use *use = &backend->src_use[i].regarray;
885             rogue_regarray *regarray = backend->src[i].ref.regarray;
886             rogue_link_instr_use_regarray(instr, use, regarray, i);
887          } else if (rogue_ref_is_drc(&backend->src[i].ref)) {
888             rogue_link_drc_trxn(instr->block->shader,
889                                 instr,
890                                 rogue_ref_get_drc(&backend->src[i].ref));
891          } else if (rogue_ref_is_io(&backend->src[i].ref)) { /* TODO: check
892                                                                 WHICH IO IT IS
893                                                               */
894          } else if (rogue_ref_is_val(&backend->src[i].ref)) {
895          } else {
896             unreachable("Unsupported source reference type.");
897          }
898       }
899 
900       break;
901    }
902 
903    case ROGUE_INSTR_TYPE_CTRL: {
904       rogue_ctrl_instr *ctrl = rogue_instr_as_ctrl(instr);
905       const unsigned num_srcs = rogue_ctrl_op_infos[ctrl->op].num_srcs;
906 
907       /* Branch instruction. */
908       if (!num_srcs && ctrl->target_block) {
909          rogue_link_instr_use_block(instr,
910                                     &ctrl->block_use,
911                                     ctrl->target_block);
912          break;
913       }
914 
915       for (unsigned i = 0; i < num_srcs; ++i) {
916          if (rogue_ref_is_reg(&ctrl->src[i].ref)) {
917             rogue_reg_use *use = &ctrl->src_use[i].reg;
918             rogue_reg *reg = ctrl->src[i].ref.reg;
919             rogue_link_instr_use_reg(instr, use, reg, i);
920          } else if (rogue_ref_is_regarray(&ctrl->src[i].ref)) {
921             rogue_regarray_use *use = &ctrl->src_use[i].regarray;
922             rogue_regarray *regarray = ctrl->src[i].ref.regarray;
923             rogue_link_instr_use_regarray(instr, use, regarray, i);
924          } else if (rogue_ref_is_drc(&ctrl->src[i].ref)) {
925             /* WDF instructions consume/release drcs, handled independently. */
926             if (ctrl->op != ROGUE_CTRL_OP_WDF)
927                rogue_link_drc_trxn(instr->block->shader,
928                                    instr,
929                                    rogue_ref_get_drc(&ctrl->src[i].ref));
930          } else if (rogue_ref_is_io(&ctrl->src[i].ref)) { /* TODO: check WHICH
931                                                              IO IT IS */
932          } else if (rogue_ref_is_val(&ctrl->src[i].ref)) {
933          } else {
934             unreachable("Unsupported source reference type.");
935          }
936       }
937 
938       break;
939    }
940 
941    case ROGUE_INSTR_TYPE_BITWISE: {
942       rogue_bitwise_instr *bitwise = rogue_instr_as_bitwise(instr);
943       const unsigned num_srcs = rogue_bitwise_op_infos[bitwise->op].num_srcs;
944 
945       for (unsigned i = 0; i < num_srcs; ++i) {
946          if (rogue_ref_is_reg(&bitwise->src[i].ref)) {
947             rogue_reg_use *use = &bitwise->src_use[i].reg;
948             rogue_reg *reg = bitwise->src[i].ref.reg;
949             rogue_link_instr_use_reg(instr, use, reg, i);
950          } else if (rogue_ref_is_regarray(&bitwise->src[i].ref)) {
951             rogue_regarray_use *use = &bitwise->src_use[i].regarray;
952             rogue_regarray *regarray = bitwise->src[i].ref.regarray;
953             rogue_link_instr_use_regarray(instr, use, regarray, i);
954          } else if (rogue_ref_is_drc(&bitwise->src[i].ref)) {
955             rogue_link_drc_trxn(instr->block->shader,
956                                 instr,
957                                 rogue_ref_get_drc(&bitwise->src[i].ref));
958          } else if (rogue_ref_is_io(&bitwise->src[i].ref)) { /* TODO: check
959                                                                 WHICH IO IT IS
960                                                               */
961          } else if (rogue_ref_is_val(&bitwise->src[i].ref)) {
962          } else {
963             unreachable("Unsupported source reference type.");
964          }
965       }
966 
967       break;
968    }
969 
970    default:
971       unreachable("Unsupported instruction type.");
972    }
973 }
974 
975 /**
976  * \brief Untracks/unlinks objects that are written to/modified by an
977  * instruction.
978  *
979  * \param[in] instr The instruction.
980  */
981 PUBLIC
rogue_unlink_instr_write(rogue_instr * instr)982 void rogue_unlink_instr_write(rogue_instr *instr)
983 {
984    switch (instr->type) {
985    case ROGUE_INSTR_TYPE_ALU: {
986       rogue_alu_instr *alu = rogue_instr_as_alu(instr);
987       const unsigned num_dsts = rogue_alu_op_infos[alu->op].num_dsts;
988 
989       for (unsigned i = 0; i < num_dsts; ++i) {
990          if (rogue_ref_is_reg(&alu->dst[i].ref)) {
991             rogue_reg_write *write = &alu->dst_write[i].reg;
992             rogue_unlink_instr_write_reg(instr, write);
993          } else if (rogue_ref_is_regarray(&alu->dst[i].ref)) {
994             rogue_regarray_write *write = &alu->dst_write[i].regarray;
995             rogue_unlink_instr_write_regarray(instr, write);
996          } else if (rogue_ref_is_io(&alu->dst[i].ref)) { /* TODO: check WHICH IO
997                                                             IT IS */
998          } else {
999             unreachable("Unsupported destination reference type.");
1000          }
1001       }
1002 
1003       break;
1004    }
1005 
1006    case ROGUE_INSTR_TYPE_BACKEND: {
1007       rogue_backend_instr *backend = rogue_instr_as_backend(instr);
1008       const unsigned num_dsts = rogue_backend_op_infos[backend->op].num_dsts;
1009 
1010       for (unsigned i = 0; i < num_dsts; ++i) {
1011          if (rogue_ref_is_reg(&backend->dst[i].ref)) {
1012             rogue_reg_write *write = &backend->dst_write[i].reg;
1013             rogue_unlink_instr_write_reg(instr, write);
1014          } else if (rogue_ref_is_regarray(&backend->dst[i].ref)) {
1015             rogue_regarray_write *write = &backend->dst_write[i].regarray;
1016             rogue_unlink_instr_write_regarray(instr, write);
1017          } else if (rogue_ref_is_io(&backend->dst[i].ref)) { /* TODO: check
1018                                                                 WHICH IO IT IS
1019                                                               */
1020          } else {
1021             unreachable("Unsupported destination reference type.");
1022          }
1023       }
1024 
1025       break;
1026    }
1027 
1028    case ROGUE_INSTR_TYPE_CTRL: {
1029       rogue_ctrl_instr *ctrl = rogue_instr_as_ctrl(instr);
1030       const unsigned num_dsts = rogue_ctrl_op_infos[ctrl->op].num_dsts;
1031 
1032       for (unsigned i = 0; i < num_dsts; ++i) {
1033          if (rogue_ref_is_reg(&ctrl->dst[i].ref)) {
1034             rogue_reg_write *write = &ctrl->dst_write[i].reg;
1035             rogue_unlink_instr_write_reg(instr, write);
1036          } else if (rogue_ref_is_regarray(&ctrl->dst[i].ref)) {
1037             rogue_regarray_write *write = &ctrl->dst_write[i].regarray;
1038             rogue_unlink_instr_write_regarray(instr, write);
1039          } else if (rogue_ref_is_io(&ctrl->dst[i].ref)) { /* TODO: check WHICH
1040                                                              IO IT IS */
1041          } else {
1042             unreachable("Unsupported destination reference type.");
1043          }
1044       }
1045 
1046       break;
1047    }
1048 
1049    case ROGUE_INSTR_TYPE_BITWISE: {
1050       rogue_bitwise_instr *bitwise = rogue_instr_as_bitwise(instr);
1051       const unsigned num_dsts = rogue_bitwise_op_infos[bitwise->op].num_dsts;
1052 
1053       for (unsigned i = 0; i < num_dsts; ++i) {
1054          if (rogue_ref_is_reg(&bitwise->dst[i].ref)) {
1055             rogue_reg_write *write = &bitwise->dst_write[i].reg;
1056             rogue_unlink_instr_write_reg(instr, write);
1057          } else if (rogue_ref_is_regarray(&bitwise->dst[i].ref)) {
1058             rogue_regarray_write *write = &bitwise->dst_write[i].regarray;
1059             rogue_unlink_instr_write_regarray(instr, write);
1060          } else {
1061             unreachable("Invalid destination reference type.");
1062          }
1063       }
1064 
1065       break;
1066    }
1067 
1068    default:
1069       unreachable("Unsupported instruction type.");
1070    }
1071 }
1072 
1073 /**
1074  * \brief Untracks/unlinks objects that are used by/read from an instruction.
1075  *
1076  * \param[in] instr The instruction.
1077  */
1078 PUBLIC
rogue_unlink_instr_use(rogue_instr * instr)1079 void rogue_unlink_instr_use(rogue_instr *instr)
1080 {
1081    switch (instr->type) {
1082    case ROGUE_INSTR_TYPE_ALU: {
1083       rogue_alu_instr *alu = rogue_instr_as_alu(instr);
1084       const unsigned num_srcs = rogue_alu_op_infos[alu->op].num_srcs;
1085 
1086       for (unsigned i = 0; i < num_srcs; ++i) {
1087          if (rogue_ref_is_reg(&alu->src[i].ref)) {
1088             rogue_reg_use *use = &alu->src_use[i].reg;
1089             rogue_unlink_instr_use_reg(instr, use);
1090          } else if (rogue_ref_is_regarray(&alu->src[i].ref)) {
1091             rogue_regarray_use *use = &alu->src_use[i].regarray;
1092             rogue_unlink_instr_use_regarray(instr, use);
1093          } else if (rogue_ref_is_imm(&alu->src[i].ref)) {
1094             rogue_unlink_imm_use(instr,
1095                                  &rogue_ref_get_imm(&alu->src[i].ref)->use);
1096          } else if (rogue_ref_is_io(&alu->src[i].ref)) { /* TODO: check WHICH IO
1097                                                             IT IS */
1098          } else if (rogue_ref_is_val(&alu->src[i].ref)) {
1099          } else {
1100             unreachable("Unsupported source reference type.");
1101          }
1102       }
1103 
1104       break;
1105    }
1106 
1107    case ROGUE_INSTR_TYPE_BACKEND: {
1108       rogue_backend_instr *backend = rogue_instr_as_backend(instr);
1109       const unsigned num_srcs = rogue_backend_op_infos[backend->op].num_srcs;
1110 
1111       for (unsigned i = 0; i < num_srcs; ++i) {
1112          if (rogue_ref_is_reg(&backend->src[i].ref)) {
1113             rogue_reg_use *use = &backend->src_use[i].reg;
1114             rogue_unlink_instr_use_reg(instr, use);
1115          } else if (rogue_ref_is_regarray(&backend->src[i].ref)) {
1116             rogue_regarray_use *use = &backend->src_use[i].regarray;
1117             rogue_unlink_instr_use_regarray(instr, use);
1118          } else if (rogue_ref_is_drc(&backend->src[i].ref)) {
1119             rogue_unlink_drc_trxn(instr->block->shader,
1120                                   instr,
1121                                   rogue_ref_get_drc(&backend->src[i].ref));
1122          } else if (rogue_ref_is_io(&backend->src[i].ref)) { /* TODO: check
1123                                                                 WHICH IO IT IS
1124                                                               */
1125          } else if (rogue_ref_is_val(&backend->src[i].ref)) {
1126          } else {
1127             unreachable("Unsupported source reference type.");
1128          }
1129       }
1130 
1131       break;
1132    }
1133 
1134    case ROGUE_INSTR_TYPE_CTRL: {
1135       rogue_ctrl_instr *ctrl = rogue_instr_as_ctrl(instr);
1136       const unsigned num_srcs = rogue_ctrl_op_infos[ctrl->op].num_srcs;
1137 
1138       /* Branch instruction. */
1139       if (!num_srcs && ctrl->target_block) {
1140          rogue_unlink_instr_use_block(instr, &ctrl->block_use);
1141          break;
1142       }
1143 
1144       for (unsigned i = 0; i < num_srcs; ++i) {
1145          if (rogue_ref_is_reg(&ctrl->src[i].ref)) {
1146             rogue_reg_use *use = &ctrl->src_use[i].reg;
1147             rogue_unlink_instr_use_reg(instr, use);
1148          } else if (rogue_ref_is_regarray(&ctrl->src[i].ref)) {
1149             rogue_regarray_use *use = &ctrl->src_use[i].regarray;
1150             rogue_unlink_instr_use_regarray(instr, use);
1151          } else if (rogue_ref_is_drc(&ctrl->src[i].ref)) {
1152             /* WDF instructions consume/release drcs, handled independently. */
1153             if (ctrl->op != ROGUE_CTRL_OP_WDF)
1154                rogue_unlink_drc_trxn(instr->block->shader,
1155                                      instr,
1156                                      rogue_ref_get_drc(&ctrl->src[i].ref));
1157          } else if (rogue_ref_is_io(&ctrl->src[i].ref)) { /* TODO: check WHICH
1158                                                              IO IT IS */
1159          } else if (rogue_ref_is_val(&ctrl->src[i].ref)) {
1160          } else {
1161             unreachable("Unsupported source reference type.");
1162          }
1163       }
1164 
1165       break;
1166    }
1167 
1168    case ROGUE_INSTR_TYPE_BITWISE: {
1169       rogue_bitwise_instr *bitwise = rogue_instr_as_bitwise(instr);
1170       const unsigned num_srcs = rogue_bitwise_op_infos[bitwise->op].num_srcs;
1171 
1172       for (unsigned i = 0; i < num_srcs; ++i) {
1173          if (rogue_ref_is_reg(&bitwise->src[i].ref)) {
1174             rogue_reg_use *use = &bitwise->src_use[i].reg;
1175             rogue_unlink_instr_use_reg(instr, use);
1176          } else if (rogue_ref_is_regarray(&bitwise->src[i].ref)) {
1177             rogue_regarray_use *use = &bitwise->src_use[i].regarray;
1178             rogue_unlink_instr_use_regarray(instr, use);
1179          } else if (rogue_ref_is_drc(&bitwise->src[i].ref)) {
1180             rogue_unlink_drc_trxn(instr->block->shader,
1181                                   instr,
1182                                   rogue_ref_get_drc(&bitwise->src[i].ref));
1183          } else if (rogue_ref_is_io(&bitwise->src[i].ref)) { /* TODO: check
1184                                                                 WHICH IO IT IS
1185                                                               */
1186          } else if (rogue_ref_is_val(&bitwise->src[i].ref)) {
1187          } else {
1188             unreachable("Unsupported source reference type.");
1189          }
1190       }
1191 
1192       break;
1193    }
1194 
1195    default:
1196       unreachable("Unsupported instruction type.");
1197    }
1198 }
1199 
rogue_compiler_destructor(UNUSED void * ptr)1200 static void rogue_compiler_destructor(UNUSED void *ptr)
1201 {
1202    glsl_type_singleton_decref();
1203 }
1204 
1205 /**
1206  * \brief Creates and sets up a Rogue compiler context.
1207  *
1208  * \param[in] dev_info Device info pointer.
1209  * \return A pointer to the new compiler context, or NULL on failure.
1210  */
1211 PUBLIC
rogue_compiler_create(const struct pvr_device_info * dev_info)1212 rogue_compiler *rogue_compiler_create(const struct pvr_device_info *dev_info)
1213 {
1214    rogue_compiler *compiler;
1215 
1216    rogue_debug_init();
1217 
1218    compiler = rzalloc_size(NULL, sizeof(*compiler));
1219    if (!compiler)
1220       return NULL;
1221 
1222    compiler->dev_info = dev_info;
1223 
1224    /* TODO: Additional compiler setup (e.g. number of internal registers, BRNs,
1225     * and other hw-specific info). */
1226 
1227    glsl_type_singleton_init_or_ref();
1228 
1229    ralloc_set_destructor(compiler, rogue_compiler_destructor);
1230 
1231    return compiler;
1232 }
1233 
1234 /**
1235  * \brief Creates and sets up a shared multi-stage build context.
1236  *
1237  * \param[in] compiler The compiler context.
1238  * \return A pointer to the new build context, or NULL on failure.
1239  */
1240 PUBLIC
1241 rogue_build_ctx *
rogue_build_context_create(rogue_compiler * compiler,struct pvr_pipeline_layout * pipeline_layout)1242 rogue_build_context_create(rogue_compiler *compiler,
1243                            struct pvr_pipeline_layout *pipeline_layout)
1244 {
1245    rogue_build_ctx *ctx;
1246 
1247    ctx = rzalloc_size(NULL, sizeof(*ctx));
1248    if (!ctx)
1249       return NULL;
1250 
1251    ctx->compiler = compiler;
1252    ctx->pipeline_layout = pipeline_layout;
1253 
1254    /* nir/rogue/binary shaders need to be default-zeroed;
1255     * this is taken care of by rzalloc_size.
1256     */
1257 
1258    /* Setup non-zero defaults. */
1259    ctx->stage_data.fs.msaa_mode = ROGUE_MSAA_MODE_PIXEL;
1260 
1261    return ctx;
1262 }
1263