1 /*
2  * Copyright © 2010 Luca Barbieri
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23 
24 /**
25  * \file lower_jumps.cpp
26  *
27  * This pass lowers jumps (break, continue, and return) to if/else structures.
28  *
29  * It can be asked to:
30  * 1. Pull jumps out of ifs where possible
31  * 2. Remove all "continue"s, replacing them with an "execute flag"
32  * 3. Replace all "break" with a single conditional one at the end of the loop
33  * 4. Replace all "return"s with a single return at the end of the function,
34  *    for the main function and/or other functions
35  *
36  * Applying this pass gives several benefits:
37  * 1. All functions can be inlined.
38  * 2. nv40 and other pre-DX10 chips without "continue" can be supported
39  * 3. nv30 and other pre-DX10 chips with no control flow at all are better
40  *    supported
41  *
42  * Continues are lowered by adding a per-loop "execute flag", initialized to
43  * true, that when cleared inhibits all execution until the end of the loop.
44  *
45  * Breaks are lowered to continues, plus setting a "break flag" that is checked
46  * at the end of the loop, and trigger the unique "break".
47  *
48  * Returns are lowered to breaks/continues, plus adding a "return flag" that
49  * causes loops to break again out of their enclosing loops until all the
50  * loops are exited: then the "execute flag" logic will ignore everything
51  * until the end of the function.
52  *
53  * Note that "continue" and "return" can also be implemented by adding
54  * a dummy loop and using break.
55  * However, this is bad for hardware with limited nesting depth, and
56  * prevents further optimization, and thus is not currently performed.
57  */
58 
59 #include "glsl_types.h"
60 #include <string.h>
61 #include "ir.h"
62 
63 /**
64  * Enum recording the result of analyzing how control flow might exit
65  * an IR node.
66  *
67  * Each possible value of jump_strength indicates a strictly stronger
68  * guarantee on control flow than the previous value.
69  *
70  * The ordering of strengths roughly reflects the way jumps are
71  * lowered: jumps with higher strength tend to be lowered to jumps of
72  * lower strength.  Accordingly, strength is used as a heuristic to
73  * determine which lowering to perform first.
74  *
75  * This enum is also used by get_jump_strength() to categorize
76  * instructions as either break, continue, return, or other.  When
77  * used in this fashion, strength_always_clears_execute_flag is not
78  * used.
79  *
80  * The control flow analysis made by this optimization pass makes two
81  * simplifying assumptions:
82  *
83  * - It ignores discard instructions, since they are lowered by a
84  *   separate pass (lower_discard.cpp).
85  *
86  * - It assumes it is always possible for control to flow from a loop
87  *   to the instruction immediately following it.  Technically, this
88  *   is not true (since all execution paths through the loop might
89  *   jump back to the top, or return from the function).
90  *
91  * Both of these simplifying assumtions are safe, since they can never
92  * cause reachable code to be incorrectly classified as unreachable;
93  * they can only do the opposite.
94  */
95 enum jump_strength
96 {
97    /**
98     * Analysis has produced no guarantee on how control flow might
99     * exit this IR node.  It might fall out the bottom (with or
100     * without clearing the execute flag, if present), or it might
101     * continue to the top of the innermost enclosing loop, break out
102     * of it, or return from the function.
103     */
104    strength_none,
105 
106    /**
107     * The only way control can fall out the bottom of this node is
108     * through a code path that clears the execute flag.  It might also
109     * continue to the top of the innermost enclosing loop, break out
110     * of it, or return from the function.
111     */
112    strength_always_clears_execute_flag,
113 
114    /**
115     * Control cannot fall out the bottom of this node.  It might
116     * continue to the top of the innermost enclosing loop, break out
117     * of it, or return from the function.
118     */
119    strength_continue,
120 
121    /**
122     * Control cannot fall out the bottom of this node, or continue the
123     * top of the innermost enclosing loop.  It can only break out of
124     * it or return from the function.
125     */
126    strength_break,
127 
128    /**
129     * Control cannot fall out the bottom of this node, continue to the
130     * top of the innermost enclosing loop, or break out of it.  It can
131     * only return from the function.
132     */
133    strength_return
134 };
135 
136 namespace {
137 
138 struct block_record
139 {
140    /* minimum jump strength (of lowered IR, not pre-lowering IR)
141     *
142     * If the block ends with a jump, must be the strength of the jump.
143     * Otherwise, the jump would be dead and have been deleted before)
144     *
145     * If the block doesn't end with a jump, it can be different than strength_none if all paths before it lead to some jump
146     * (e.g. an if with a return in one branch, and a break in the other, while not lowering them)
147     * Note that identical jumps are usually unified though.
148     */
149    jump_strength min_strength;
150 
151    /* can anything clear the execute flag? */
152    bool may_clear_execute_flag;
153 
block_record__anon081a8e010111::block_record154    block_record()
155    {
156       this->min_strength = strength_none;
157       this->may_clear_execute_flag = false;
158    }
159 };
160 
161 struct loop_record
162 {
163    ir_function_signature* signature;
164    ir_loop* loop;
165 
166    /* used to avoid lowering the break used to represent lowered breaks */
167    unsigned nesting_depth;
168    bool in_if_at_the_end_of_the_loop;
169 
170    bool may_set_return_flag;
171 
172    ir_variable* break_flag;
173    ir_variable* execute_flag; /* cleared to emulate continue */
174 
loop_record__anon081a8e010111::loop_record175    loop_record(ir_function_signature* p_signature = 0, ir_loop* p_loop = 0)
176    {
177       this->signature = p_signature;
178       this->loop = p_loop;
179       this->nesting_depth = 0;
180       this->in_if_at_the_end_of_the_loop = false;
181       this->may_set_return_flag = false;
182       this->break_flag = 0;
183       this->execute_flag = 0;
184    }
185 
get_execute_flag__anon081a8e010111::loop_record186    ir_variable* get_execute_flag()
187    {
188       /* also supported for the "function loop" */
189       if(!this->execute_flag) {
190          exec_list& list = this->loop ? this->loop->body_instructions : signature->body;
191          this->execute_flag = new(this->signature) ir_variable(glsl_type::bool_type, "execute_flag", ir_var_temporary, glsl_precision_low);
192          list.push_head(new(this->signature) ir_assignment(new(this->signature) ir_dereference_variable(execute_flag), new(this->signature) ir_constant(true), 0));
193          list.push_head(this->execute_flag);
194       }
195       return this->execute_flag;
196    }
197 
get_break_flag__anon081a8e010111::loop_record198    ir_variable* get_break_flag()
199    {
200       assert(this->loop);
201       if(!this->break_flag) {
202          this->break_flag = new(this->signature) ir_variable(glsl_type::bool_type, "break_flag", ir_var_temporary, glsl_precision_low);
203          this->loop->insert_before(this->break_flag);
204          this->loop->insert_before(new(this->signature) ir_assignment(new(this->signature) ir_dereference_variable(break_flag), new(this->signature) ir_constant(false), 0));
205       }
206       return this->break_flag;
207    }
208 };
209 
210 struct function_record
211 {
212    ir_function_signature* signature;
213    ir_variable* return_flag; /* used to break out of all loops and then jump to the return instruction */
214    ir_variable* return_value;
215    bool lower_return;
216    unsigned nesting_depth;
217 
function_record__anon081a8e010111::function_record218    function_record(ir_function_signature* p_signature = 0,
219                    bool lower_return = false)
220    {
221       this->signature = p_signature;
222       this->return_flag = 0;
223       this->return_value = 0;
224       this->nesting_depth = 0;
225       this->lower_return = lower_return;
226    }
227 
get_return_flag__anon081a8e010111::function_record228    ir_variable* get_return_flag()
229    {
230       if(!this->return_flag) {
231          this->return_flag = new(this->signature) ir_variable(glsl_type::bool_type, "return_flag", ir_var_temporary, glsl_precision_low);
232          this->signature->body.push_head(new(this->signature) ir_assignment(new(this->signature) ir_dereference_variable(return_flag), new(this->signature) ir_constant(false), 0));
233          this->signature->body.push_head(this->return_flag);
234       }
235       return this->return_flag;
236    }
237 
get_return_value__anon081a8e010111::function_record238    ir_variable* get_return_value()
239    {
240       if(!this->return_value) {
241          assert(!this->signature->return_type->is_void());
242          return_value = new(this->signature) ir_variable(this->signature->return_type, "return_value", ir_var_temporary, this->signature->precision);
243          this->signature->body.push_head(this->return_value);
244       }
245       return this->return_value;
246    }
247 };
248 
249 struct ir_lower_jumps_visitor : public ir_control_flow_visitor {
250    /* Postconditions: on exit of any visit() function:
251     *
252     * ANALYSIS: this->block.min_strength,
253     * this->block.may_clear_execute_flag, and
254     * this->loop.may_set_return_flag are updated to reflect the
255     * characteristics of the visited statement.
256     *
257     * DEAD_CODE_ELIMINATION: If this->block.min_strength is not
258     * strength_none, the visited node is at the end of its exec_list.
259     * In other words, any unreachable statements that follow the
260     * visited statement in its exec_list have been removed.
261     *
262     * CONTAINED_JUMPS_LOWERED: If the visited statement contains other
263     * statements, then should_lower_jump() is false for all of the
264     * return, break, or continue statements it contains.
265     *
266     * Note that visiting a jump does not lower it.  That is the
267     * responsibility of the statement (or function signature) that
268     * contains the jump.
269     */
270 
271    bool progress;
272 
273    struct function_record function;
274    struct loop_record loop;
275    struct block_record block;
276 
277    bool pull_out_jumps;
278    bool lower_continue;
279    bool lower_break;
280    bool lower_sub_return;
281    bool lower_main_return;
282 
ir_lower_jumps_visitor__anon081a8e010111::ir_lower_jumps_visitor283    ir_lower_jumps_visitor()
284       : progress(false),
285         pull_out_jumps(false),
286         lower_continue(false),
287         lower_break(false),
288         lower_sub_return(false),
289         lower_main_return(false)
290    {
291    }
292 
truncate_after_instruction__anon081a8e010111::ir_lower_jumps_visitor293    void truncate_after_instruction(exec_node *ir)
294    {
295       if (!ir)
296          return;
297 
298       while (!ir->get_next()->is_tail_sentinel()) {
299          ((ir_instruction *)ir->get_next())->remove();
300          this->progress = true;
301       }
302    }
303 
move_outer_block_inside__anon081a8e010111::ir_lower_jumps_visitor304    void move_outer_block_inside(ir_instruction *ir, exec_list *inner_block)
305    {
306       while (!ir->get_next()->is_tail_sentinel()) {
307          ir_instruction *move_ir = (ir_instruction *)ir->get_next();
308 
309          move_ir->remove();
310          inner_block->push_tail(move_ir);
311       }
312    }
313 
314    /**
315     * Insert the instructions necessary to lower a return statement,
316     * before the given return instruction.
317     */
insert_lowered_return__anon081a8e010111::ir_lower_jumps_visitor318    void insert_lowered_return(ir_return *ir)
319    {
320       ir_variable* return_flag = this->function.get_return_flag();
321       if(!this->function.signature->return_type->is_void()) {
322          ir_variable* return_value = this->function.get_return_value();
323          ir->insert_before(
324             new(ir) ir_assignment(
325                new (ir) ir_dereference_variable(return_value),
326                ir->value));
327       }
328       ir->insert_before(
329          new(ir) ir_assignment(
330             new (ir) ir_dereference_variable(return_flag),
331             new (ir) ir_constant(true)));
332       this->loop.may_set_return_flag = true;
333    }
334 
335    /**
336     * If the given instruction is a return, lower it to instructions
337     * that store the return value (if there is one), set the return
338     * flag, and then break.
339     *
340     * It is safe to pass NULL to this function.
341     */
lower_return_unconditionally__anon081a8e010111::ir_lower_jumps_visitor342    void lower_return_unconditionally(ir_instruction *ir)
343    {
344       if (get_jump_strength(ir) != strength_return) {
345          return;
346       }
347       insert_lowered_return((ir_return*)ir);
348       ir->replace_with(new(ir) ir_loop_jump(ir_loop_jump::jump_break));
349    }
350 
351    /**
352     * Create the necessary instruction to replace a break instruction.
353     */
create_lowered_break__anon081a8e010111::ir_lower_jumps_visitor354    ir_instruction *create_lowered_break()
355    {
356       void *ctx = this->function.signature;
357       return new(ctx) ir_assignment(
358           new(ctx) ir_dereference_variable(this->loop.get_break_flag()),
359           new(ctx) ir_constant(true),
360           0);
361    }
362 
363    /**
364     * If the given instruction is a break, lower it to an instruction
365     * that sets the break flag, without consulting
366     * should_lower_jump().
367     *
368     * It is safe to pass NULL to this function.
369     */
lower_break_unconditionally__anon081a8e010111::ir_lower_jumps_visitor370    void lower_break_unconditionally(ir_instruction *ir)
371    {
372       if (get_jump_strength(ir) != strength_break) {
373          return;
374       }
375       ir->replace_with(create_lowered_break());
376    }
377 
378    /**
379     * If the block ends in a conditional or unconditional break, lower
380     * it, even though should_lower_jump() says it needn't be lowered.
381     */
lower_final_breaks__anon081a8e010111::ir_lower_jumps_visitor382    void lower_final_breaks(exec_list *block)
383    {
384       ir_instruction *ir = (ir_instruction *) block->get_tail();
385       lower_break_unconditionally(ir);
386       ir_if *ir_if = ir->as_if();
387       if (ir_if) {
388           lower_break_unconditionally(
389               (ir_instruction *) ir_if->then_instructions.get_tail());
390           lower_break_unconditionally(
391               (ir_instruction *) ir_if->else_instructions.get_tail());
392       }
393    }
394 
visit__anon081a8e010111::ir_lower_jumps_visitor395    virtual void visit(class ir_loop_jump * ir)
396    {
397       /* Eliminate all instructions after each one, since they are
398        * unreachable.  This satisfies the DEAD_CODE_ELIMINATION
399        * postcondition.
400        */
401       truncate_after_instruction(ir);
402 
403       /* Set this->block.min_strength based on this instruction.  This
404        * satisfies the ANALYSIS postcondition.  It is not necessary to
405        * update this->block.may_clear_execute_flag or
406        * this->loop.may_set_return_flag, because an unlowered jump
407        * instruction can't change any flags.
408        */
409       this->block.min_strength = ir->is_break() ? strength_break : strength_continue;
410 
411       /* The CONTAINED_JUMPS_LOWERED postcondition is already
412        * satisfied, because jump statements can't contain other
413        * statements.
414        */
415    }
416 
visit__anon081a8e010111::ir_lower_jumps_visitor417    virtual void visit(class ir_return * ir)
418    {
419       /* Eliminate all instructions after each one, since they are
420        * unreachable.  This satisfies the DEAD_CODE_ELIMINATION
421        * postcondition.
422        */
423       truncate_after_instruction(ir);
424 
425       /* Set this->block.min_strength based on this instruction.  This
426        * satisfies the ANALYSIS postcondition.  It is not necessary to
427        * update this->block.may_clear_execute_flag or
428        * this->loop.may_set_return_flag, because an unlowered return
429        * instruction can't change any flags.
430        */
431       this->block.min_strength = strength_return;
432 
433       /* The CONTAINED_JUMPS_LOWERED postcondition is already
434        * satisfied, because jump statements can't contain other
435        * statements.
436        */
437    }
438 
visit__anon081a8e010111::ir_lower_jumps_visitor439    virtual void visit(class ir_discard * ir)
440    {
441       /* Nothing needs to be done.  The ANALYSIS and
442        * DEAD_CODE_ELIMINATION postconditions are already satisfied,
443        * because discard statements are ignored by this optimization
444        * pass.  The CONTAINED_JUMPS_LOWERED postcondition is already
445        * satisfied, because discard statements can't contain other
446        * statements.
447        */
448       (void) ir;
449    }
450 
visit__anon081a8e010111::ir_lower_jumps_visitor451    virtual void visit(class ir_precision_statement * ir)
452    {
453       /* Nothing needs to be done. */
454    }
455 
visit__anon081a8e010111::ir_lower_jumps_visitor456    virtual void visit(class ir_typedecl_statement * ir)
457    {
458       /* Nothing needs to be done. */
459    }
460 
get_jump_strength__anon081a8e010111::ir_lower_jumps_visitor461    enum jump_strength get_jump_strength(ir_instruction* ir)
462    {
463       if(!ir)
464          return strength_none;
465       else if(ir->ir_type == ir_type_loop_jump) {
466          if(((ir_loop_jump*)ir)->is_break())
467             return strength_break;
468          else
469             return strength_continue;
470       } else if(ir->ir_type == ir_type_return)
471          return strength_return;
472       else
473          return strength_none;
474    }
475 
should_lower_jump__anon081a8e010111::ir_lower_jumps_visitor476    bool should_lower_jump(ir_jump* ir)
477    {
478       unsigned strength = get_jump_strength(ir);
479       bool lower;
480       switch(strength)
481       {
482       case strength_none:
483          lower = false; /* don't change this, code relies on it */
484          break;
485       case strength_continue:
486          lower = lower_continue;
487          break;
488       case strength_break:
489          assert(this->loop.loop);
490          /* never lower "canonical break" */
491          if(ir->get_next()->is_tail_sentinel() && (this->loop.nesting_depth == 0
492                || (this->loop.nesting_depth == 1 && this->loop.in_if_at_the_end_of_the_loop)))
493             lower = false;
494          else
495             lower = lower_break;
496          break;
497       case strength_return:
498          /* never lower return at the end of a this->function */
499          if(this->function.nesting_depth == 0 && ir->get_next()->is_tail_sentinel())
500             lower = false;
501          else
502             lower = this->function.lower_return;
503          break;
504       }
505       return lower;
506    }
507 
visit_block__anon081a8e010111::ir_lower_jumps_visitor508    block_record visit_block(exec_list* list)
509    {
510       /* Note: since visiting a node may change that node's next
511        * pointer, we can't use visit_exec_list(), because
512        * visit_exec_list() caches the node's next pointer before
513        * visiting it.  So we use foreach_in_list() instead.
514        *
515        * foreach_in_list() isn't safe if the node being visited gets
516        * removed, but fortunately this visitor doesn't do that.
517        */
518 
519       block_record saved_block = this->block;
520       this->block = block_record();
521       foreach_in_list(ir_instruction, node, list) {
522          node->accept(this);
523       }
524       block_record ret = this->block;
525       this->block = saved_block;
526       return ret;
527    }
528 
visit__anon081a8e010111::ir_lower_jumps_visitor529    virtual void visit(ir_if *ir)
530    {
531       if(this->loop.nesting_depth == 0 && ir->get_next()->is_tail_sentinel())
532          this->loop.in_if_at_the_end_of_the_loop = true;
533 
534       ++this->function.nesting_depth;
535       ++this->loop.nesting_depth;
536 
537       block_record block_records[2];
538       ir_jump* jumps[2];
539 
540       /* Recursively lower nested jumps.  This satisfies the
541        * CONTAINED_JUMPS_LOWERED postcondition, except in the case of
542        * unconditional jumps at the end of ir->then_instructions and
543        * ir->else_instructions, which are handled below.
544        */
545       block_records[0] = visit_block(&ir->then_instructions);
546       block_records[1] = visit_block(&ir->else_instructions);
547 
548 retry: /* we get here if we put code after the if inside a branch */
549 
550       /* Determine which of ir->then_instructions and
551        * ir->else_instructions end with an unconditional jump.
552        */
553       for(unsigned i = 0; i < 2; ++i) {
554          exec_list& list = i ? ir->else_instructions : ir->then_instructions;
555          jumps[i] = 0;
556          if(!list.is_empty() && get_jump_strength((ir_instruction*)list.get_tail()))
557             jumps[i] = (ir_jump*)list.get_tail();
558       }
559 
560       /* Loop until we have satisfied the CONTAINED_JUMPS_LOWERED
561        * postcondition by lowering jumps in both then_instructions and
562        * else_instructions.
563        */
564       for(;;) {
565          /* Determine the types of the jumps that terminate
566           * ir->then_instructions and ir->else_instructions.
567           */
568          jump_strength jump_strengths[2];
569 
570          for(unsigned i = 0; i < 2; ++i) {
571             if(jumps[i]) {
572                jump_strengths[i] = block_records[i].min_strength;
573                assert(jump_strengths[i] == get_jump_strength(jumps[i]));
574             } else
575                jump_strengths[i] = strength_none;
576          }
577 
578          /* If both code paths end in a jump, and the jumps are the
579           * same, and we are pulling out jumps, replace them with a
580           * single jump that comes after the if instruction.  The new
581           * jump will be visited next, and it will be lowered if
582           * necessary by the loop or conditional that encloses it.
583           */
584          if(pull_out_jumps && jump_strengths[0] == jump_strengths[1]) {
585             bool unify = true;
586             if(jump_strengths[0] == strength_continue)
587                ir->insert_after(new(ir) ir_loop_jump(ir_loop_jump::jump_continue));
588             else if(jump_strengths[0] == strength_break)
589                ir->insert_after(new(ir) ir_loop_jump(ir_loop_jump::jump_break));
590             /* FINISHME: unify returns with identical expressions */
591             else if(jump_strengths[0] == strength_return && this->function.signature->return_type->is_void())
592                ir->insert_after(new(ir) ir_return(NULL));
593 	    else
594 	       unify = false;
595 
596             if(unify) {
597                jumps[0]->remove();
598                jumps[1]->remove();
599                this->progress = true;
600 
601                /* Update jumps[] to reflect the fact that the jumps
602                 * are gone, and update block_records[] to reflect the
603                 * fact that control can now flow to the next
604                 * instruction.
605                 */
606                jumps[0] = 0;
607                jumps[1] = 0;
608                block_records[0].min_strength = strength_none;
609                block_records[1].min_strength = strength_none;
610 
611                /* The CONTAINED_JUMPS_LOWERED postcondition is now
612                 * satisfied, so we can break out of the loop.
613                 */
614                break;
615             }
616          }
617 
618          /* lower a jump: if both need to lowered, start with the strongest one, so that
619           * we might later unify the lowered version with the other one
620           */
621          bool should_lower[2];
622          for(unsigned i = 0; i < 2; ++i)
623             should_lower[i] = should_lower_jump(jumps[i]);
624 
625          int lower;
626          if(should_lower[1] && should_lower[0])
627             lower = jump_strengths[1] > jump_strengths[0];
628          else if(should_lower[0])
629             lower = 0;
630          else if(should_lower[1])
631             lower = 1;
632          else
633             /* Neither code path ends in a jump that needs to be
634              * lowered, so the CONTAINED_JUMPS_LOWERED postcondition
635              * is satisfied and we can break out of the loop.
636              */
637             break;
638 
639          if(jump_strengths[lower] == strength_return) {
640             /* To lower a return, we create a return flag (if the
641              * function doesn't have one already) and add instructions
642              * that: 1. store the return value (if this function has a
643              * non-void return) and 2. set the return flag
644              */
645             insert_lowered_return((ir_return*)jumps[lower]);
646             if(this->loop.loop) {
647                /* If we are in a loop, replace the return instruction
648                 * with a break instruction, and then loop so that the
649                 * break instruction can be lowered if necessary.
650                 */
651                ir_loop_jump* lowered = 0;
652                lowered = new(ir) ir_loop_jump(ir_loop_jump::jump_break);
653                /* Note: we must update block_records and jumps to
654                 * reflect the fact that the control path has been
655                 * altered from a return to a break.
656                 */
657                block_records[lower].min_strength = strength_break;
658                jumps[lower]->replace_with(lowered);
659                jumps[lower] = lowered;
660             } else {
661                /* If we are not in a loop, we then proceed as we would
662                 * for a continue statement (set the execute flag to
663                 * false to prevent the rest of the function from
664                 * executing).
665                 */
666                goto lower_continue;
667             }
668             this->progress = true;
669          } else if(jump_strengths[lower] == strength_break) {
670             /* To lower a break, we create a break flag (if the loop
671              * doesn't have one already) and add an instruction that
672              * sets it.
673              *
674              * Then we proceed as we would for a continue statement
675              * (set the execute flag to false to prevent the rest of
676              * the loop body from executing).
677              *
678              * The visit() function for the loop will ensure that the
679              * break flag is checked after executing the loop body.
680              */
681             jumps[lower]->insert_before(create_lowered_break());
682             goto lower_continue;
683          } else if(jump_strengths[lower] == strength_continue) {
684 lower_continue:
685             /* To lower a continue, we create an execute flag (if the
686              * loop doesn't have one already) and replace the continue
687              * with an instruction that clears it.
688              *
689              * Note that this code path gets exercised when lowering
690              * return statements that are not inside a loop, so
691              * this->loop must be initialized even outside of loops.
692              */
693             ir_variable* execute_flag = this->loop.get_execute_flag();
694             jumps[lower]->replace_with(new(ir) ir_assignment(new (ir) ir_dereference_variable(execute_flag), new (ir) ir_constant(false), 0));
695             /* Note: we must update block_records and jumps to reflect
696              * the fact that the control path has been altered to an
697              * instruction that clears the execute flag.
698              */
699             jumps[lower] = 0;
700             block_records[lower].min_strength = strength_always_clears_execute_flag;
701             block_records[lower].may_clear_execute_flag = true;
702             this->progress = true;
703 
704             /* Let the loop run again, in case the other branch of the
705              * if needs to be lowered too.
706              */
707          }
708       }
709 
710       /* move out a jump out if possible */
711       if(pull_out_jumps) {
712          /* If one of the branches ends in a jump, and control cannot
713           * fall out the bottom of the other branch, then we can move
714           * the jump after the if.
715           *
716           * Set move_out to the branch we are moving a jump out of.
717           */
718          int move_out = -1;
719          if(jumps[0] && block_records[1].min_strength >= strength_continue)
720             move_out = 0;
721          else if(jumps[1] && block_records[0].min_strength >= strength_continue)
722             move_out = 1;
723 
724          if(move_out >= 0)
725          {
726             jumps[move_out]->remove();
727             ir->insert_after(jumps[move_out]);
728             /* Note: we must update block_records and jumps to reflect
729              * the fact that the jump has been moved out of the if.
730              */
731             jumps[move_out] = 0;
732             block_records[move_out].min_strength = strength_none;
733             this->progress = true;
734          }
735       }
736 
737       /* Now satisfy the ANALYSIS postcondition by setting
738        * this->block.min_strength and
739        * this->block.may_clear_execute_flag based on the
740        * characteristics of the two branches.
741        */
742       if(block_records[0].min_strength < block_records[1].min_strength)
743          this->block.min_strength = block_records[0].min_strength;
744       else
745          this->block.min_strength = block_records[1].min_strength;
746       this->block.may_clear_execute_flag = this->block.may_clear_execute_flag || block_records[0].may_clear_execute_flag || block_records[1].may_clear_execute_flag;
747 
748       /* Now we need to clean up the instructions that follow the
749        * if.
750        *
751        * If those instructions are unreachable, then satisfy the
752        * DEAD_CODE_ELIMINATION postcondition by eliminating them.
753        * Otherwise that postcondition is already satisfied.
754        */
755       if(this->block.min_strength)
756          truncate_after_instruction(ir);
757       else if(this->block.may_clear_execute_flag)
758       {
759          /* If the "if" instruction might clear the execute flag, then
760           * we need to guard any instructions that follow so that they
761           * are only executed if the execute flag is set.
762           *
763           * If one of the branches of the "if" always clears the
764           * execute flag, and the other branch never clears it, then
765           * this is easy: just move all the instructions following the
766           * "if" into the branch that never clears it.
767           */
768          int move_into = -1;
769          if(block_records[0].min_strength && !block_records[1].may_clear_execute_flag)
770             move_into = 1;
771          else if(block_records[1].min_strength && !block_records[0].may_clear_execute_flag)
772             move_into = 0;
773 
774          if(move_into >= 0) {
775             assert(!block_records[move_into].min_strength && !block_records[move_into].may_clear_execute_flag); /* otherwise, we just truncated */
776 
777             exec_list* list = move_into ? &ir->else_instructions : &ir->then_instructions;
778             exec_node* next = ir->get_next();
779             if(!next->is_tail_sentinel()) {
780                move_outer_block_inside(ir, list);
781 
782                /* If any instructions moved, then we need to visit
783                 * them (since they are now inside the "if").  Since
784                 * block_records[move_into] is in its default state
785                 * (see assertion above), we can safely replace
786                 * block_records[move_into] with the result of this
787                 * analysis.
788                 */
789                exec_list list;
790                list.head = next;
791                block_records[move_into] = visit_block(&list);
792 
793                /*
794                 * Then we need to re-start our jump lowering, since one
795                 * of the instructions we moved might be a jump that
796                 * needs to be lowered.
797                 */
798                this->progress = true;
799                goto retry;
800             }
801          } else {
802             /* If we get here, then the simple case didn't apply; we
803              * need to actually guard the instructions that follow.
804              *
805              * To avoid creating unnecessarily-deep nesting, first
806              * look through the instructions that follow and unwrap
807              * any instructions that that are already wrapped in the
808              * appropriate guard.
809              */
810             ir_instruction* ir_after;
811             for(ir_after = (ir_instruction*)ir->get_next(); !ir_after->is_tail_sentinel();)
812             {
813                ir_if* ir_if = ir_after->as_if();
814                if(ir_if && ir_if->else_instructions.is_empty()) {
815                   ir_dereference_variable* ir_if_cond_deref = ir_if->condition->as_dereference_variable();
816                   if(ir_if_cond_deref && ir_if_cond_deref->var == this->loop.execute_flag) {
817                      ir_instruction* ir_next = (ir_instruction*)ir_after->get_next();
818                      ir_after->insert_before(&ir_if->then_instructions);
819                      ir_after->remove();
820                      ir_after = ir_next;
821                      continue;
822                   }
823                }
824                ir_after = (ir_instruction*)ir_after->get_next();
825 
826                /* only set this if we find any unprotected instruction */
827                this->progress = true;
828             }
829 
830             /* Then, wrap all the instructions that follow in a single
831              * guard.
832              */
833             if(!ir->get_next()->is_tail_sentinel()) {
834                assert(this->loop.execute_flag);
835                ir_if* if_execute = new(ir) ir_if(new(ir) ir_dereference_variable(this->loop.execute_flag));
836                move_outer_block_inside(ir, &if_execute->then_instructions);
837                ir->insert_after(if_execute);
838             }
839          }
840       }
841       --this->loop.nesting_depth;
842       --this->function.nesting_depth;
843    }
844 
visit__anon081a8e010111::ir_lower_jumps_visitor845    virtual void visit(ir_loop *ir)
846    {
847       /* Visit the body of the loop, with a fresh data structure in
848        * this->loop so that the analysis we do here won't bleed into
849        * enclosing loops.
850        *
851        * We assume that all code after a loop is reachable from the
852        * loop (see comments on enum jump_strength), so the
853        * DEAD_CODE_ELIMINATION postcondition is automatically
854        * satisfied, as is the block.min_strength portion of the
855        * ANALYSIS postcondition.
856        *
857        * The block.may_clear_execute_flag portion of the ANALYSIS
858        * postcondition is automatically satisfied because execute
859        * flags do not propagate outside of loops.
860        *
861        * The loop.may_set_return_flag portion of the ANALYSIS
862        * postcondition is handled below.
863        */
864       ++this->function.nesting_depth;
865       loop_record saved_loop = this->loop;
866       this->loop = loop_record(this->function.signature, ir);
867 
868       /* Recursively lower nested jumps.  This satisfies the
869        * CONTAINED_JUMPS_LOWERED postcondition, except in the case of
870        * an unconditional continue or return at the bottom of the
871        * loop, which are handled below.
872        */
873       block_record body = visit_block(&ir->body_instructions);
874 
875       /* If the loop ends in an unconditional continue, eliminate it
876        * because it is redundant.
877        */
878       ir_instruction *ir_last
879          = (ir_instruction *) ir->body_instructions.get_tail();
880       if (get_jump_strength(ir_last) == strength_continue) {
881          ir_last->remove();
882       }
883 
884       /* If the loop ends in an unconditional return, and we are
885        * lowering returns, lower it.
886        */
887       if (this->function.lower_return)
888          lower_return_unconditionally(ir_last);
889 
890       if(body.min_strength >= strength_break) {
891          /* FINISHME: If the min_strength of the loop body is
892           * strength_break or strength_return, that means that it
893           * isn't a loop at all, since control flow always leaves the
894           * body of the loop via break or return.  In principle the
895           * loop could be eliminated in this case.  This optimization
896           * is not implemented yet.
897           */
898       }
899 
900       if(this->loop.break_flag) {
901          /* We only get here if we are lowering breaks */
902          assert (lower_break);
903 
904          /* If a break flag was generated while visiting the body of
905           * the loop, then at least one break was lowered, so we need
906           * to generate an if statement at the end of the loop that
907           * does a "break" if the break flag is set.  The break we
908           * generate won't violate the CONTAINED_JUMPS_LOWERED
909           * postcondition, because should_lower_jump() always returns
910           * false for a break that happens at the end of a loop.
911           *
912           * However, if the loop already ends in a conditional or
913           * unconditional break, then we need to lower that break,
914           * because it won't be at the end of the loop anymore.
915           */
916          lower_final_breaks(&ir->body_instructions);
917 
918          ir_if* break_if = new(ir) ir_if(new(ir) ir_dereference_variable(this->loop.break_flag));
919          break_if->then_instructions.push_tail(new(ir) ir_loop_jump(ir_loop_jump::jump_break));
920          ir->body_instructions.push_tail(break_if);
921       }
922 
923       /* If the body of the loop may set the return flag, then at
924        * least one return was lowered to a break, so we need to ensure
925        * that the return flag is checked after the body of the loop is
926        * executed.
927        */
928       if(this->loop.may_set_return_flag) {
929          assert(this->function.return_flag);
930          /* Generate the if statement to check the return flag */
931          ir_if* return_if = new(ir) ir_if(new(ir) ir_dereference_variable(this->function.return_flag));
932          /* Note: we also need to propagate the knowledge that the
933           * return flag may get set to the outer context.  This
934           * satisfies the loop.may_set_return_flag part of the
935           * ANALYSIS postcondition.
936           */
937          saved_loop.may_set_return_flag = true;
938          if(saved_loop.loop)
939             /* If this loop is nested inside another one, then the if
940              * statement that we generated should break out of that
941              * loop if the return flag is set.  Caller will lower that
942              * break statement if necessary.
943              */
944             return_if->then_instructions.push_tail(new(ir) ir_loop_jump(ir_loop_jump::jump_break));
945          else
946             /* Otherwise, all we need to do is ensure that the
947              * instructions that follow are only executed if the
948              * return flag is clear.  We can do that by moving those
949              * instructions into the else clause of the generated if
950              * statement.
951              */
952             move_outer_block_inside(ir, &return_if->else_instructions);
953          ir->insert_after(return_if);
954       }
955 
956       this->loop = saved_loop;
957       --this->function.nesting_depth;
958    }
959 
visit__anon081a8e010111::ir_lower_jumps_visitor960    virtual void visit(ir_function_signature *ir)
961    {
962       /* these are not strictly necessary */
963       assert(!this->function.signature);
964       assert(!this->loop.loop);
965 
966       bool lower_return;
967       if (strcmp(ir->function_name(), "main") == 0)
968          lower_return = lower_main_return;
969       else
970          lower_return = lower_sub_return;
971 
972       function_record saved_function = this->function;
973       loop_record saved_loop = this->loop;
974       this->function = function_record(ir, lower_return);
975       this->loop = loop_record(ir);
976 
977       assert(!this->loop.loop);
978 
979       /* Visit the body of the function to lower any jumps that occur
980        * in it, except possibly an unconditional return statement at
981        * the end of it.
982        */
983       visit_block(&ir->body);
984 
985       /* If the body ended in an unconditional return of non-void,
986        * then we don't need to lower it because it's the one canonical
987        * return.
988        *
989        * If the body ended in a return of void, eliminate it because
990        * it is redundant.
991        */
992       if (ir->return_type->is_void() &&
993           get_jump_strength((ir_instruction *) ir->body.get_tail())) {
994          ir_jump *jump = (ir_jump *) ir->body.get_tail();
995          assert (jump->ir_type == ir_type_return);
996          jump->remove();
997       }
998 
999       if(this->function.return_value)
1000          ir->body.push_tail(new(ir) ir_return(new (ir) ir_dereference_variable(this->function.return_value)));
1001 
1002       this->loop = saved_loop;
1003       this->function = saved_function;
1004    }
1005 
visit__anon081a8e010111::ir_lower_jumps_visitor1006    virtual void visit(class ir_function * ir)
1007    {
1008       visit_block(&ir->signatures);
1009    }
1010 };
1011 
1012 } /* anonymous namespace */
1013 
1014 bool
do_lower_jumps(exec_list * instructions,bool pull_out_jumps,bool lower_sub_return,bool lower_main_return,bool lower_continue,bool lower_break)1015 do_lower_jumps(exec_list *instructions, bool pull_out_jumps, bool lower_sub_return, bool lower_main_return, bool lower_continue, bool lower_break)
1016 {
1017    ir_lower_jumps_visitor v;
1018    v.pull_out_jumps = pull_out_jumps;
1019    v.lower_continue = lower_continue;
1020    v.lower_break = lower_break;
1021    v.lower_sub_return = lower_sub_return;
1022    v.lower_main_return = lower_main_return;
1023 
1024    bool progress_ever = false;
1025    do {
1026       v.progress = false;
1027       visit_exec_list(instructions, &v);
1028       progress_ever = v.progress || progress_ever;
1029    } while (v.progress);
1030 
1031    return progress_ever;
1032 }
1033