1 /*
2  * Copyright © 2014 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  *
23  * Authors:
24  *    Jason Ekstrand (jason@jlekstrand.net)
25  *
26  */
27 
28 #include "nir.h"
29 #include "nir_builder.h"
30 #include "nir_deref.h"
31 #include "nir_phi_builder.h"
32 #include "nir_vla.h"
33 
34 
35 struct deref_node {
36    struct deref_node *parent;
37    const struct glsl_type *type;
38 
39    bool lower_to_ssa;
40 
41    /* Only valid for things that end up in the direct list.
42     * Note that multiple nir_deref_instrs may correspond to this node, but
43     * they will all be equivalent, so any is as good as the other.
44     */
45    nir_deref_path path;
46    struct exec_node direct_derefs_link;
47 
48    struct set *loads;
49    struct set *stores;
50    struct set *copies;
51 
52    struct nir_phi_builder_value *pb_value;
53 
54    /* True if this node is fully direct.  If set, it must be in the children
55     * array of its parent.
56     */
57    bool is_direct;
58 
59    /* Set on a root node for a variable to indicate that variable is used by a
60     * cast or passed through some other sequence of instructions that are not
61     * derefs.
62     */
63    bool has_complex_use;
64 
65    struct deref_node *wildcard;
66    struct deref_node *indirect;
67    struct deref_node *children[0];
68 };
69 
70 #define UNDEF_NODE ((struct deref_node *)(uintptr_t)1)
71 
72 struct lower_variables_state {
73    nir_shader *shader;
74    void *dead_ctx;
75    nir_function_impl *impl;
76 
77    /* A hash table mapping variables to deref_node data */
78    struct hash_table *deref_var_nodes;
79 
80    /* A hash table mapping fully-qualified direct dereferences, i.e.
81     * dereferences with no indirect or wildcard array dereferences, to
82     * deref_node data.
83     *
84     * At the moment, we only lower loads, stores, and copies that can be
85     * trivially lowered to loads and stores, i.e. copies with no indirects
86     * and no wildcards.  If a part of a variable that is being loaded from
87     * and/or stored into is also involved in a copy operation with
88     * wildcards, then we lower that copy operation to loads and stores, but
89     * otherwise we leave copies with wildcards alone. Since the only derefs
90     * used in these loads, stores, and trivial copies are ones with no
91     * wildcards and no indirects, these are precisely the derefs that we
92     * can actually consider lowering.
93     */
94    struct exec_list direct_deref_nodes;
95 
96    /* Controls whether get_deref_node will add variables to the
97     * direct_deref_nodes table.  This is turned on when we are initially
98     * scanning for load/store instructions.  It is then turned off so we
99     * don't accidentally change the direct_deref_nodes table while we're
100     * iterating throug it.
101     */
102    bool add_to_direct_deref_nodes;
103 
104    struct nir_phi_builder *phi_builder;
105 };
106 
107 static struct deref_node *
deref_node_create(struct deref_node * parent,const struct glsl_type * type,bool is_direct,void * mem_ctx)108 deref_node_create(struct deref_node *parent,
109                   const struct glsl_type *type,
110                   bool is_direct, void *mem_ctx)
111 {
112    size_t size = sizeof(struct deref_node) +
113                  glsl_get_length(type) * sizeof(struct deref_node *);
114 
115    struct deref_node *node = rzalloc_size(mem_ctx, size);
116    node->type = type;
117    node->parent = parent;
118    exec_node_init(&node->direct_derefs_link);
119    node->is_direct = is_direct;
120 
121    return node;
122 }
123 
124 /* Returns the deref node associated with the given variable.  This will be
125  * the root of the tree representing all of the derefs of the given variable.
126  */
127 static struct deref_node *
get_deref_node_for_var(nir_variable * var,struct lower_variables_state * state)128 get_deref_node_for_var(nir_variable *var, struct lower_variables_state *state)
129 {
130    struct deref_node *node;
131 
132    struct hash_entry *var_entry =
133       _mesa_hash_table_search(state->deref_var_nodes, var);
134 
135    if (var_entry) {
136       return var_entry->data;
137    } else {
138       node = deref_node_create(NULL, var->type, true, state->dead_ctx);
139       _mesa_hash_table_insert(state->deref_var_nodes, var, node);
140       return node;
141    }
142 }
143 
144 /* Gets the deref_node for the given deref chain and creates it if it
145  * doesn't yet exist.  If the deref is fully-qualified and direct and
146  * state->add_to_direct_deref_nodes is true, it will be added to the hash
147  * table of of fully-qualified direct derefs.
148  */
149 static struct deref_node *
get_deref_node_recur(nir_deref_instr * deref,struct lower_variables_state * state)150 get_deref_node_recur(nir_deref_instr *deref,
151                      struct lower_variables_state *state)
152 {
153    if (deref->deref_type == nir_deref_type_var)
154       return get_deref_node_for_var(deref->var, state);
155 
156    if (deref->deref_type == nir_deref_type_cast)
157       return NULL;
158 
159    struct deref_node *parent =
160       get_deref_node_recur(nir_deref_instr_parent(deref), state);
161    if (parent == NULL)
162       return NULL;
163 
164    if (parent == UNDEF_NODE)
165       return UNDEF_NODE;
166 
167    switch (deref->deref_type) {
168    case nir_deref_type_struct:
169       assert(glsl_type_is_struct_or_ifc(parent->type));
170       assert(deref->strct.index < glsl_get_length(parent->type));
171 
172       if (parent->children[deref->strct.index] == NULL) {
173          parent->children[deref->strct.index] =
174             deref_node_create(parent, deref->type, parent->is_direct,
175                               state->dead_ctx);
176       }
177 
178       return parent->children[deref->strct.index];
179 
180    case nir_deref_type_array: {
181       if (nir_src_is_const(deref->arr.index)) {
182          uint32_t index = nir_src_as_uint(deref->arr.index);
183          /* This is possible if a loop unrolls and generates an
184           * out-of-bounds offset.  We need to handle this at least
185           * somewhat gracefully.
186           */
187          if (index >= glsl_get_length(parent->type))
188             return UNDEF_NODE;
189 
190          if (parent->children[index] == NULL) {
191             parent->children[index] =
192                deref_node_create(parent, deref->type, parent->is_direct,
193                                  state->dead_ctx);
194          }
195 
196          return parent->children[index];
197       } else {
198          if (parent->indirect == NULL) {
199             parent->indirect =
200                deref_node_create(parent, deref->type, false, state->dead_ctx);
201          }
202 
203          return parent->indirect;
204       }
205       break;
206    }
207 
208    case nir_deref_type_array_wildcard:
209       if (parent->wildcard == NULL) {
210          parent->wildcard =
211             deref_node_create(parent, deref->type, false, state->dead_ctx);
212       }
213 
214       return parent->wildcard;
215 
216    default:
217       unreachable("Invalid deref type");
218    }
219 }
220 
221 static struct deref_node *
get_deref_node(nir_deref_instr * deref,struct lower_variables_state * state)222 get_deref_node(nir_deref_instr *deref, struct lower_variables_state *state)
223 {
224    /* This pass only works on local variables.  Just ignore any derefs with
225     * a non-local mode.
226     */
227    if (!nir_deref_mode_must_be(deref, nir_var_function_temp))
228       return NULL;
229 
230    struct deref_node *node = get_deref_node_recur(deref, state);
231    if (!node)
232       return NULL;
233 
234    /* Insert the node in the direct derefs list.  We only do this if it's not
235     * already in the list and we only bother for deref nodes which are used
236     * directly in a load or store.
237     */
238    if (node != UNDEF_NODE && node->is_direct &&
239        state->add_to_direct_deref_nodes &&
240        node->direct_derefs_link.next == NULL) {
241       nir_deref_path_init(&node->path, deref, state->dead_ctx);
242       assert(deref->var != NULL);
243       exec_list_push_tail(&state->direct_deref_nodes,
244                           &node->direct_derefs_link);
245    }
246 
247    return node;
248 }
249 
250 /* \sa foreach_deref_node_match */
251 static void
foreach_deref_node_worker(struct deref_node * node,nir_deref_instr ** path,void (* cb)(struct deref_node * node,struct lower_variables_state * state),struct lower_variables_state * state)252 foreach_deref_node_worker(struct deref_node *node, nir_deref_instr **path,
253                           void (* cb)(struct deref_node *node,
254                                       struct lower_variables_state *state),
255                           struct lower_variables_state *state)
256 {
257    if (*path == NULL) {
258       cb(node, state);
259       return;
260    }
261 
262    switch ((*path)->deref_type) {
263    case nir_deref_type_struct:
264       if (node->children[(*path)->strct.index]) {
265          foreach_deref_node_worker(node->children[(*path)->strct.index],
266                                    path + 1, cb, state);
267       }
268       return;
269 
270    case nir_deref_type_array: {
271       uint32_t index = nir_src_as_uint((*path)->arr.index);
272 
273       if (node->children[index]) {
274          foreach_deref_node_worker(node->children[index],
275                                    path + 1, cb, state);
276       }
277 
278       if (node->wildcard) {
279          foreach_deref_node_worker(node->wildcard,
280                                    path + 1, cb, state);
281       }
282       return;
283    }
284 
285    default:
286       unreachable("Unsupported deref type");
287    }
288 }
289 
290 /* Walks over every "matching" deref_node and calls the callback.  A node
291  * is considered to "match" if either refers to that deref or matches up t
292  * a wildcard.  In other words, the following would match a[6].foo[3].bar:
293  *
294  * a[6].foo[3].bar
295  * a[*].foo[3].bar
296  * a[6].foo[*].bar
297  * a[*].foo[*].bar
298  *
299  * The given deref must be a full-length and fully qualified (no wildcards
300  * or indirects) deref chain.
301  */
302 static void
foreach_deref_node_match(nir_deref_path * path,void (* cb)(struct deref_node * node,struct lower_variables_state * state),struct lower_variables_state * state)303 foreach_deref_node_match(nir_deref_path *path,
304                          void (* cb)(struct deref_node *node,
305                                      struct lower_variables_state *state),
306                          struct lower_variables_state *state)
307 {
308    assert(path->path[0]->deref_type == nir_deref_type_var);
309    struct deref_node *node = get_deref_node_for_var(path->path[0]->var, state);
310 
311    if (node == NULL)
312       return;
313 
314    foreach_deref_node_worker(node, &path->path[1], cb, state);
315 }
316 
317 /* \sa deref_may_be_aliased */
318 static bool
path_may_be_aliased_node(struct deref_node * node,nir_deref_instr ** path,struct lower_variables_state * state)319 path_may_be_aliased_node(struct deref_node *node, nir_deref_instr **path,
320                          struct lower_variables_state *state)
321 {
322    if (*path == NULL)
323       return false;
324 
325    switch ((*path)->deref_type) {
326    case nir_deref_type_struct:
327       if (node->children[(*path)->strct.index]) {
328          return path_may_be_aliased_node(node->children[(*path)->strct.index],
329                                          path + 1, state);
330       } else {
331          return false;
332       }
333 
334    case nir_deref_type_array: {
335       if (!nir_src_is_const((*path)->arr.index))
336          return true;
337 
338       uint32_t index = nir_src_as_uint((*path)->arr.index);
339 
340       /* If there is an indirect at this level, we're aliased. */
341       if (node->indirect)
342          return true;
343 
344       if (node->children[index] &&
345           path_may_be_aliased_node(node->children[index],
346                                    path + 1, state))
347          return true;
348 
349       if (node->wildcard &&
350           path_may_be_aliased_node(node->wildcard, path + 1, state))
351          return true;
352 
353       return false;
354    }
355 
356    default:
357       unreachable("Unsupported deref type");
358    }
359 }
360 
361 /* Returns true if there are no indirects that can ever touch this deref.
362  *
363  * For example, if the given deref is a[6].foo, then any uses of a[i].foo
364  * would cause this to return false, but a[i].bar would not affect it
365  * because it's a different structure member.  A var_copy involving of
366  * a[*].bar also doesn't affect it because that can be lowered to entirely
367  * direct load/stores.
368  *
369  * We only support asking this question about fully-qualified derefs.
370  * Obviously, it's pointless to ask this about indirects, but we also
371  * rule-out wildcards.  Handling Wildcard dereferences would involve
372  * checking each array index to make sure that there aren't any indirect
373  * references.
374  */
375 static bool
path_may_be_aliased(nir_deref_path * path,struct lower_variables_state * state)376 path_may_be_aliased(nir_deref_path *path,
377                     struct lower_variables_state *state)
378 {
379    assert(path->path[0]->deref_type == nir_deref_type_var);
380    nir_variable *var = path->path[0]->var;
381    struct deref_node *var_node = get_deref_node_for_var(var, state);
382 
383    /* First see if this variable is ever used by anything other than a
384     * load/store.  If there's even so much as a cast in the way, we have to
385     * assume aliasing and bail.
386     */
387    if (var_node->has_complex_use)
388       return true;
389 
390    return path_may_be_aliased_node(var_node, &path->path[1], state);
391 }
392 
393 static void
register_complex_use(nir_deref_instr * deref,struct lower_variables_state * state)394 register_complex_use(nir_deref_instr *deref,
395                      struct lower_variables_state *state)
396 {
397    assert(deref->deref_type == nir_deref_type_var);
398    struct deref_node *node = get_deref_node_for_var(deref->var, state);
399    if (node == NULL)
400       return;
401 
402    node->has_complex_use = true;
403 }
404 
405 static void
register_load_instr(nir_intrinsic_instr * load_instr,struct lower_variables_state * state)406 register_load_instr(nir_intrinsic_instr *load_instr,
407                     struct lower_variables_state *state)
408 {
409    nir_deref_instr *deref = nir_src_as_deref(load_instr->src[0]);
410    struct deref_node *node = get_deref_node(deref, state);
411    if (node == NULL || node == UNDEF_NODE)
412       return;
413 
414    if (node->loads == NULL)
415       node->loads = _mesa_pointer_set_create(state->dead_ctx);
416 
417    _mesa_set_add(node->loads, load_instr);
418 }
419 
420 static void
register_store_instr(nir_intrinsic_instr * store_instr,struct lower_variables_state * state)421 register_store_instr(nir_intrinsic_instr *store_instr,
422                      struct lower_variables_state *state)
423 {
424    nir_deref_instr *deref = nir_src_as_deref(store_instr->src[0]);
425    struct deref_node *node = get_deref_node(deref, state);
426    if (node == NULL || node == UNDEF_NODE)
427       return;
428 
429    if (node->stores == NULL)
430       node->stores = _mesa_pointer_set_create(state->dead_ctx);
431 
432    _mesa_set_add(node->stores, store_instr);
433 }
434 
435 static void
register_copy_instr(nir_intrinsic_instr * copy_instr,struct lower_variables_state * state)436 register_copy_instr(nir_intrinsic_instr *copy_instr,
437                     struct lower_variables_state *state)
438 {
439    for (unsigned idx = 0; idx < 2; idx++) {
440       nir_deref_instr *deref = nir_src_as_deref(copy_instr->src[idx]);
441       struct deref_node *node = get_deref_node(deref, state);
442       if (node == NULL || node == UNDEF_NODE)
443          continue;
444 
445       if (node->copies == NULL)
446          node->copies = _mesa_pointer_set_create(state->dead_ctx);
447 
448       _mesa_set_add(node->copies, copy_instr);
449    }
450 }
451 
452 static void
register_variable_uses(nir_function_impl * impl,struct lower_variables_state * state)453 register_variable_uses(nir_function_impl *impl,
454                        struct lower_variables_state *state)
455 {
456    nir_foreach_block(block, impl) {
457       nir_foreach_instr_safe(instr, block) {
458          switch (instr->type) {
459          case nir_instr_type_deref: {
460             nir_deref_instr *deref = nir_instr_as_deref(instr);
461 
462             if (deref->deref_type == nir_deref_type_var &&
463                 nir_deref_instr_has_complex_use(deref))
464                register_complex_use(deref, state);
465 
466             break;
467          }
468 
469          case nir_instr_type_intrinsic: {
470             nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
471 
472             switch (intrin->intrinsic) {
473             case nir_intrinsic_load_deref:
474                register_load_instr(intrin, state);
475                break;
476 
477             case nir_intrinsic_store_deref:
478                register_store_instr(intrin, state);
479                break;
480 
481             case nir_intrinsic_copy_deref:
482                register_copy_instr(intrin, state);
483                break;
484 
485             default:
486                continue;
487             }
488             break;
489          }
490 
491          default:
492             break;
493          }
494       }
495    }
496 }
497 
498 /* Walks over all of the copy instructions to or from the given deref_node
499  * and lowers them to load/store intrinsics.
500  */
501 static void
lower_copies_to_load_store(struct deref_node * node,struct lower_variables_state * state)502 lower_copies_to_load_store(struct deref_node *node,
503                            struct lower_variables_state *state)
504 {
505    if (!node->copies)
506       return;
507 
508    nir_builder b;
509    nir_builder_init(&b, state->impl);
510 
511    set_foreach(node->copies, copy_entry) {
512       nir_intrinsic_instr *copy = (void *)copy_entry->key;
513 
514       nir_lower_deref_copy_instr(&b, copy);
515 
516       for (unsigned i = 0; i < 2; ++i) {
517          nir_deref_instr *arg_deref = nir_src_as_deref(copy->src[i]);
518          struct deref_node *arg_node = get_deref_node(arg_deref, state);
519 
520          /* Only bother removing copy entries for other nodes */
521          if (arg_node == NULL || arg_node == node)
522             continue;
523 
524          struct set_entry *arg_entry = _mesa_set_search(arg_node->copies, copy);
525          assert(arg_entry);
526          _mesa_set_remove(arg_node->copies, arg_entry);
527       }
528 
529       nir_instr_remove(&copy->instr);
530    }
531 
532    node->copies = NULL;
533 }
534 
535 /* Performs variable renaming
536  *
537  * This algorithm is very similar to the one outlined in "Efficiently
538  * Computing Static Single Assignment Form and the Control Dependence
539  * Graph" by Cytron et al.  The primary difference is that we only put one
540  * SSA def on the stack per block.
541  */
542 static bool
rename_variables(struct lower_variables_state * state)543 rename_variables(struct lower_variables_state *state)
544 {
545    nir_builder b;
546    nir_builder_init(&b, state->impl);
547 
548    nir_foreach_block(block, state->impl) {
549       nir_foreach_instr_safe(instr, block) {
550          if (instr->type != nir_instr_type_intrinsic)
551             continue;
552 
553          nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
554 
555          switch (intrin->intrinsic) {
556          case nir_intrinsic_load_deref: {
557             nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
558             if (!nir_deref_mode_must_be(deref, nir_var_function_temp))
559                continue;
560 
561             struct deref_node *node = get_deref_node(deref, state);
562             if (node == NULL)
563                continue;
564 
565             if (node == UNDEF_NODE) {
566                /* If we hit this path then we are referencing an invalid
567                 * value.  Most likely, we unrolled something and are
568                 * reading past the end of some array.  In any case, this
569                 * should result in an undefined value.
570                 */
571                nir_ssa_undef_instr *undef =
572                   nir_ssa_undef_instr_create(state->shader,
573                                              intrin->num_components,
574                                              intrin->dest.ssa.bit_size);
575 
576                nir_instr_insert_before(&intrin->instr, &undef->instr);
577                nir_instr_remove(&intrin->instr);
578 
579                nir_ssa_def_rewrite_uses(&intrin->dest.ssa,
580                                         &undef->def);
581                continue;
582             }
583 
584             if (!node->lower_to_ssa)
585                continue;
586 
587             nir_alu_instr *mov = nir_alu_instr_create(state->shader,
588                                                       nir_op_mov);
589             mov->src[0].src = nir_src_for_ssa(
590                nir_phi_builder_value_get_block_def(node->pb_value, block));
591             for (unsigned i = intrin->num_components; i < NIR_MAX_VEC_COMPONENTS; i++)
592                mov->src[0].swizzle[i] = 0;
593 
594             assert(intrin->dest.is_ssa);
595 
596             mov->dest.write_mask = (1 << intrin->num_components) - 1;
597             nir_ssa_dest_init(&mov->instr, &mov->dest.dest,
598                               intrin->num_components,
599                               intrin->dest.ssa.bit_size, NULL);
600 
601             nir_instr_insert_before(&intrin->instr, &mov->instr);
602             nir_instr_remove(&intrin->instr);
603 
604             nir_ssa_def_rewrite_uses(&intrin->dest.ssa,
605                                      &mov->dest.dest.ssa);
606             break;
607          }
608 
609          case nir_intrinsic_store_deref: {
610             nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
611             if (!nir_deref_mode_must_be(deref, nir_var_function_temp))
612                continue;
613 
614             struct deref_node *node = get_deref_node(deref, state);
615             if (node == NULL)
616                continue;
617 
618             assert(intrin->src[1].is_ssa);
619             nir_ssa_def *value = intrin->src[1].ssa;
620 
621             if (node == UNDEF_NODE) {
622                /* Probably an out-of-bounds array store.  That should be a
623                 * no-op. */
624                nir_instr_remove(&intrin->instr);
625                continue;
626             }
627 
628             if (!node->lower_to_ssa)
629                continue;
630 
631             assert(intrin->num_components ==
632                    glsl_get_vector_elements(node->type));
633 
634             nir_ssa_def *new_def;
635             b.cursor = nir_before_instr(&intrin->instr);
636 
637             unsigned wrmask = nir_intrinsic_write_mask(intrin);
638             if (wrmask == (1 << intrin->num_components) - 1) {
639                /* Whole variable store - just copy the source.  Note that
640                 * intrin->num_components and value->num_components
641                 * may differ.
642                 */
643                unsigned swiz[NIR_MAX_VEC_COMPONENTS];
644                for (unsigned i = 0; i < NIR_MAX_VEC_COMPONENTS; i++)
645                   swiz[i] = i < intrin->num_components ? i : 0;
646 
647                new_def = nir_swizzle(&b, value, swiz,
648                                      intrin->num_components);
649             } else {
650                nir_ssa_def *old_def =
651                   nir_phi_builder_value_get_block_def(node->pb_value, block);
652                /* For writemasked store_var intrinsics, we combine the newly
653                 * written values with the existing contents of unwritten
654                 * channels, creating a new SSA value for the whole vector.
655                 */
656                nir_ssa_scalar srcs[NIR_MAX_VEC_COMPONENTS];
657                for (unsigned i = 0; i < intrin->num_components; i++) {
658                   if (wrmask & (1 << i)) {
659                      srcs[i] = nir_get_ssa_scalar(value, i);
660                   } else {
661                      srcs[i] = nir_get_ssa_scalar(old_def, i);
662                   }
663                }
664                new_def = nir_vec_scalars(&b, srcs, intrin->num_components);
665             }
666 
667             assert(new_def->num_components == intrin->num_components);
668 
669             nir_phi_builder_value_set_block_def(node->pb_value, block, new_def);
670             nir_instr_remove(&intrin->instr);
671             break;
672          }
673 
674          default:
675             break;
676          }
677       }
678    }
679 
680    return true;
681 }
682 
683 /** Implements a pass to lower variable uses to SSA values
684  *
685  * This path walks the list of instructions and tries to lower as many
686  * local variable load/store operations to SSA defs and uses as it can.
687  * The process involves four passes:
688  *
689  *  1) Iterate over all of the instructions and mark where each local
690  *     variable deref is used in a load, store, or copy.  While we're at
691  *     it, we keep track of all of the fully-qualified (no wildcards) and
692  *     fully-direct references we see and store them in the
693  *     direct_deref_nodes hash table.
694  *
695  *  2) Walk over the list of fully-qualified direct derefs generated in
696  *     the previous pass.  For each deref, we determine if it can ever be
697  *     aliased, i.e. if there is an indirect reference anywhere that may
698  *     refer to it.  If it cannot be aliased, we mark it for lowering to an
699  *     SSA value.  At this point, we lower any var_copy instructions that
700  *     use the given deref to load/store operations.
701  *
702  *  3) Walk over the list of derefs we plan to lower to SSA values and
703  *     insert phi nodes as needed.
704  *
705  *  4) Perform "variable renaming" by replacing the load/store instructions
706  *     with SSA definitions and SSA uses.
707  */
708 static bool
nir_lower_vars_to_ssa_impl(nir_function_impl * impl)709 nir_lower_vars_to_ssa_impl(nir_function_impl *impl)
710 {
711    struct lower_variables_state state;
712 
713    state.shader = impl->function->shader;
714    state.dead_ctx = ralloc_context(state.shader);
715    state.impl = impl;
716 
717    state.deref_var_nodes = _mesa_pointer_hash_table_create(state.dead_ctx);
718    exec_list_make_empty(&state.direct_deref_nodes);
719 
720    /* Build the initial deref structures and direct_deref_nodes table */
721    state.add_to_direct_deref_nodes = true;
722 
723    register_variable_uses(impl, &state);
724 
725    bool progress = false;
726 
727    nir_metadata_require(impl, nir_metadata_block_index);
728 
729    /* We're about to iterate through direct_deref_nodes.  Don't modify it. */
730    state.add_to_direct_deref_nodes = false;
731 
732    foreach_list_typed_safe(struct deref_node, node, direct_derefs_link,
733                            &state.direct_deref_nodes) {
734       nir_deref_path *path = &node->path;
735 
736       assert(path->path[0]->deref_type == nir_deref_type_var);
737 
738       /* We don't build deref nodes for non-local variables */
739       assert(path->path[0]->var->data.mode == nir_var_function_temp);
740 
741       if (path_may_be_aliased(path, &state)) {
742          exec_node_remove(&node->direct_derefs_link);
743          continue;
744       }
745 
746       node->lower_to_ssa = true;
747       progress = true;
748 
749       foreach_deref_node_match(path, lower_copies_to_load_store, &state);
750    }
751 
752    if (!progress) {
753       nir_metadata_preserve(impl, nir_metadata_all);
754       return false;
755    }
756 
757    nir_metadata_require(impl, nir_metadata_dominance);
758 
759    /* We may have lowered some copy instructions to load/store
760     * instructions.  The uses from the copy instructions hav already been
761     * removed but we need to rescan to ensure that the uses from the newly
762     * added load/store instructions are registered.  We need this
763     * information for phi node insertion below.
764     */
765    register_variable_uses(impl, &state);
766 
767    state.phi_builder = nir_phi_builder_create(state.impl);
768 
769    BITSET_WORD *store_blocks =
770       ralloc_array(state.dead_ctx, BITSET_WORD,
771                    BITSET_WORDS(state.impl->num_blocks));
772    foreach_list_typed(struct deref_node, node, direct_derefs_link,
773                       &state.direct_deref_nodes) {
774       if (!node->lower_to_ssa)
775          continue;
776 
777       memset(store_blocks, 0,
778              BITSET_WORDS(state.impl->num_blocks) * sizeof(*store_blocks));
779 
780       assert(node->path.path[0]->var->constant_initializer == NULL &&
781              node->path.path[0]->var->pointer_initializer == NULL);
782 
783       if (node->stores) {
784          set_foreach(node->stores, store_entry) {
785             nir_intrinsic_instr *store =
786                (nir_intrinsic_instr *)store_entry->key;
787             BITSET_SET(store_blocks, store->instr.block->index);
788          }
789       }
790 
791       node->pb_value =
792          nir_phi_builder_add_value(state.phi_builder,
793                                    glsl_get_vector_elements(node->type),
794                                    glsl_get_bit_size(node->type),
795                                    store_blocks);
796    }
797 
798    rename_variables(&state);
799 
800    nir_phi_builder_finish(state.phi_builder);
801 
802    nir_metadata_preserve(impl, nir_metadata_block_index |
803                                nir_metadata_dominance);
804 
805    ralloc_free(state.dead_ctx);
806 
807    return progress;
808 }
809 
810 bool
nir_lower_vars_to_ssa(nir_shader * shader)811 nir_lower_vars_to_ssa(nir_shader *shader)
812 {
813    bool progress = false;
814 
815    nir_foreach_function(function, shader) {
816       if (function->impl)
817          progress |= nir_lower_vars_to_ssa_impl(function->impl);
818    }
819 
820    return progress;
821 }
822