1 /* Nested function decomposition for GIMPLE.
2    Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 
4    This file is part of GCC.
5 
6    GCC is free software; you can redistribute it and/or modify
7    it under the terms of the GNU General Public License as published by
8    the Free Software Foundation; either version 3, or (at your option)
9    any later version.
10 
11    GCC is distributed in the hope that it will be useful,
12    but WITHOUT ANY WARRANTY; without even the implied warranty of
13    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14    GNU General Public License for more details.
15 
16    You should have received a copy of the GNU General Public License
17    along with GCC; see the file COPYING3.  If not see
18    <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "tm_p.h"
26 #include "function.h"
27 #include "tree-dump.h"
28 #include "tree-inline.h"
29 #include "gimple.h"
30 #include "tree-iterator.h"
31 #include "tree-flow.h"
32 #include "cgraph.h"
33 #include "expr.h"	/* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL.  */
34 #include "langhooks.h"
35 #include "pointer-set.h"
36 
37 
38 /* The object of this pass is to lower the representation of a set of nested
39    functions in order to expose all of the gory details of the various
40    nonlocal references.  We want to do this sooner rather than later, in
41    order to give us more freedom in emitting all of the functions in question.
42 
43    Back in olden times, when gcc was young, we developed an insanely
44    complicated scheme whereby variables which were referenced nonlocally
45    were forced to live in the stack of the declaring function, and then
46    the nested functions magically discovered where these variables were
47    placed.  In order for this scheme to function properly, it required
48    that the outer function be partially expanded, then we switch to
49    compiling the inner function, and once done with those we switch back
50    to compiling the outer function.  Such delicate ordering requirements
51    makes it difficult to do whole translation unit optimizations
52    involving such functions.
53 
54    The implementation here is much more direct.  Everything that can be
55    referenced by an inner function is a member of an explicitly created
56    structure herein called the "nonlocal frame struct".  The incoming
57    static chain for a nested function is a pointer to this struct in
58    the parent.  In this way, we settle on known offsets from a known
59    base, and so are decoupled from the logic that places objects in the
60    function's stack frame.  More importantly, we don't have to wait for
61    that to happen -- since the compilation of the inner function is no
62    longer tied to a real stack frame, the nonlocal frame struct can be
63    allocated anywhere.  Which means that the outer function is now
64    inlinable.
65 
66    Theory of operation here is very simple.  Iterate over all the
67    statements in all the functions (depth first) several times,
68    allocating structures and fields on demand.  In general we want to
69    examine inner functions first, so that we can avoid making changes
70    to outer functions which are unnecessary.
71 
72    The order of the passes matters a bit, in that later passes will be
73    skipped if it is discovered that the functions don't actually interact
74    at all.  That is, they're nested in the lexical sense but could have
75    been written as independent functions without change.  */
76 
77 
78 struct nesting_info
79 {
80   struct nesting_info *outer;
81   struct nesting_info *inner;
82   struct nesting_info *next;
83 
84   struct pointer_map_t *field_map;
85   struct pointer_map_t *var_map;
86   struct pointer_set_t *mem_refs;
87   bitmap suppress_expansion;
88 
89   tree context;
90   tree new_local_var_chain;
91   tree debug_var_chain;
92   tree frame_type;
93   tree frame_decl;
94   tree chain_field;
95   tree chain_decl;
96   tree nl_goto_field;
97 
98   bool any_parm_remapped;
99   bool any_tramp_created;
100   char static_chain_added;
101 };
102 
103 
104 /* Iterate over the nesting tree, starting with ROOT, depth first.  */
105 
106 static inline struct nesting_info *
iter_nestinfo_start(struct nesting_info * root)107 iter_nestinfo_start (struct nesting_info *root)
108 {
109   while (root->inner)
110     root = root->inner;
111   return root;
112 }
113 
114 static inline struct nesting_info *
iter_nestinfo_next(struct nesting_info * node)115 iter_nestinfo_next (struct nesting_info *node)
116 {
117   if (node->next)
118     return iter_nestinfo_start (node->next);
119   return node->outer;
120 }
121 
122 #define FOR_EACH_NEST_INFO(I, ROOT) \
123   for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
124 
125 /* Obstack used for the bitmaps in the struct above.  */
126 static struct bitmap_obstack nesting_info_bitmap_obstack;
127 
128 
129 /* We're working in so many different function contexts simultaneously,
130    that create_tmp_var is dangerous.  Prevent mishap.  */
131 #define create_tmp_var cant_use_create_tmp_var_here_dummy
132 
133 /* Like create_tmp_var, except record the variable for registration at
134    the given nesting level.  */
135 
136 static tree
create_tmp_var_for(struct nesting_info * info,tree type,const char * prefix)137 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
138 {
139   tree tmp_var;
140 
141   /* If the type is of variable size or a type which must be created by the
142      frontend, something is wrong.  Note that we explicitly allow
143      incomplete types here, since we create them ourselves here.  */
144   gcc_assert (!TREE_ADDRESSABLE (type));
145   gcc_assert (!TYPE_SIZE_UNIT (type)
146 	      || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
147 
148   tmp_var = create_tmp_var_raw (type, prefix);
149   DECL_CONTEXT (tmp_var) = info->context;
150   DECL_CHAIN (tmp_var) = info->new_local_var_chain;
151   DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
152   if (TREE_CODE (type) == COMPLEX_TYPE
153       || TREE_CODE (type) == VECTOR_TYPE)
154     DECL_GIMPLE_REG_P (tmp_var) = 1;
155 
156   info->new_local_var_chain = tmp_var;
157 
158   return tmp_var;
159 }
160 
161 /* Take the address of EXP to be used within function CONTEXT.
162    Mark it for addressability as necessary.  */
163 
164 tree
build_addr(tree exp,tree context)165 build_addr (tree exp, tree context)
166 {
167   tree base = exp;
168   tree save_context;
169   tree retval;
170 
171   while (handled_component_p (base))
172     base = TREE_OPERAND (base, 0);
173 
174   if (DECL_P (base))
175     TREE_ADDRESSABLE (base) = 1;
176 
177   /* Building the ADDR_EXPR will compute a set of properties for
178      that ADDR_EXPR.  Those properties are unfortunately context
179      specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
180 
181      Temporarily set CURRENT_FUNCTION_DECL to the desired context,
182      build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL.  That
183      way the properties are for the ADDR_EXPR are computed properly.  */
184   save_context = current_function_decl;
185   current_function_decl = context;
186   retval = build_fold_addr_expr (exp);
187   current_function_decl = save_context;
188   return retval;
189 }
190 
191 /* Insert FIELD into TYPE, sorted by alignment requirements.  */
192 
193 void
insert_field_into_struct(tree type,tree field)194 insert_field_into_struct (tree type, tree field)
195 {
196   tree *p;
197 
198   DECL_CONTEXT (field) = type;
199 
200   for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
201     if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
202       break;
203 
204   DECL_CHAIN (field) = *p;
205   *p = field;
206 
207   /* Set correct alignment for frame struct type.  */
208   if (TYPE_ALIGN (type) < DECL_ALIGN (field))
209     TYPE_ALIGN (type) = DECL_ALIGN (field);
210 }
211 
212 /* Build or return the RECORD_TYPE that describes the frame state that is
213    shared between INFO->CONTEXT and its nested functions.  This record will
214    not be complete until finalize_nesting_tree; up until that point we'll
215    be adding fields as necessary.
216 
217    We also build the DECL that represents this frame in the function.  */
218 
219 static tree
get_frame_type(struct nesting_info * info)220 get_frame_type (struct nesting_info *info)
221 {
222   tree type = info->frame_type;
223   if (!type)
224     {
225       char *name;
226 
227       type = make_node (RECORD_TYPE);
228 
229       name = concat ("FRAME.",
230 		     IDENTIFIER_POINTER (DECL_NAME (info->context)),
231 		     NULL);
232       TYPE_NAME (type) = get_identifier (name);
233       free (name);
234 
235       info->frame_type = type;
236       info->frame_decl = create_tmp_var_for (info, type, "FRAME");
237       DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
238 
239       /* ??? Always make it addressable for now, since it is meant to
240 	 be pointed to by the static chain pointer.  This pessimizes
241 	 when it turns out that no static chains are needed because
242 	 the nested functions referencing non-local variables are not
243 	 reachable, but the true pessimization is to create the non-
244 	 local frame structure in the first place.  */
245       TREE_ADDRESSABLE (info->frame_decl) = 1;
246     }
247   return type;
248 }
249 
250 /* Return true if DECL should be referenced by pointer in the non-local
251    frame structure.  */
252 
253 static bool
use_pointer_in_frame(tree decl)254 use_pointer_in_frame (tree decl)
255 {
256   if (TREE_CODE (decl) == PARM_DECL)
257     {
258       /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
259          sized decls, and inefficient to copy large aggregates.  Don't bother
260          moving anything but scalar variables.  */
261       return AGGREGATE_TYPE_P (TREE_TYPE (decl));
262     }
263   else
264     {
265       /* Variable sized types make things "interesting" in the frame.  */
266       return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
267     }
268 }
269 
270 /* Given DECL, a non-locally accessed variable, find or create a field
271    in the non-local frame structure for the given nesting context.  */
272 
273 static tree
lookup_field_for_decl(struct nesting_info * info,tree decl,enum insert_option insert)274 lookup_field_for_decl (struct nesting_info *info, tree decl,
275 		       enum insert_option insert)
276 {
277   void **slot;
278 
279   if (insert == NO_INSERT)
280     {
281       slot = pointer_map_contains (info->field_map, decl);
282       return slot ? (tree) *slot : NULL_TREE;
283     }
284 
285   slot = pointer_map_insert (info->field_map, decl);
286   if (!*slot)
287     {
288       tree field = make_node (FIELD_DECL);
289       DECL_NAME (field) = DECL_NAME (decl);
290 
291       if (use_pointer_in_frame (decl))
292 	{
293 	  TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
294 	  DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
295 	  DECL_NONADDRESSABLE_P (field) = 1;
296 	}
297       else
298 	{
299           TREE_TYPE (field) = TREE_TYPE (decl);
300           DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
301           DECL_ALIGN (field) = DECL_ALIGN (decl);
302           DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
303           TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
304           DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
305           TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
306 	}
307 
308       insert_field_into_struct (get_frame_type (info), field);
309       *slot = field;
310 
311       if (TREE_CODE (decl) == PARM_DECL)
312 	info->any_parm_remapped = true;
313     }
314 
315   return (tree) *slot;
316 }
317 
318 /* Build or return the variable that holds the static chain within
319    INFO->CONTEXT.  This variable may only be used within INFO->CONTEXT.  */
320 
321 static tree
get_chain_decl(struct nesting_info * info)322 get_chain_decl (struct nesting_info *info)
323 {
324   tree decl = info->chain_decl;
325 
326   if (!decl)
327     {
328       tree type;
329 
330       type = get_frame_type (info->outer);
331       type = build_pointer_type (type);
332 
333       /* Note that this variable is *not* entered into any BIND_EXPR;
334 	 the construction of this variable is handled specially in
335 	 expand_function_start and initialize_inlined_parameters.
336 	 Note also that it's represented as a parameter.  This is more
337 	 close to the truth, since the initial value does come from
338 	 the caller.  */
339       decl = build_decl (DECL_SOURCE_LOCATION (info->context),
340 			 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
341       DECL_ARTIFICIAL (decl) = 1;
342       DECL_IGNORED_P (decl) = 1;
343       TREE_USED (decl) = 1;
344       DECL_CONTEXT (decl) = info->context;
345       DECL_ARG_TYPE (decl) = type;
346 
347       /* Tell tree-inline.c that we never write to this variable, so
348 	 it can copy-prop the replacement value immediately.  */
349       TREE_READONLY (decl) = 1;
350 
351       info->chain_decl = decl;
352 
353       if (dump_file
354           && (dump_flags & TDF_DETAILS)
355 	  && !DECL_STATIC_CHAIN (info->context))
356 	fprintf (dump_file, "Setting static-chain for %s\n",
357 		 lang_hooks.decl_printable_name (info->context, 2));
358 
359       DECL_STATIC_CHAIN (info->context) = 1;
360     }
361   return decl;
362 }
363 
364 /* Build or return the field within the non-local frame state that holds
365    the static chain for INFO->CONTEXT.  This is the way to walk back up
366    multiple nesting levels.  */
367 
368 static tree
get_chain_field(struct nesting_info * info)369 get_chain_field (struct nesting_info *info)
370 {
371   tree field = info->chain_field;
372 
373   if (!field)
374     {
375       tree type = build_pointer_type (get_frame_type (info->outer));
376 
377       field = make_node (FIELD_DECL);
378       DECL_NAME (field) = get_identifier ("__chain");
379       TREE_TYPE (field) = type;
380       DECL_ALIGN (field) = TYPE_ALIGN (type);
381       DECL_NONADDRESSABLE_P (field) = 1;
382 
383       insert_field_into_struct (get_frame_type (info), field);
384 
385       info->chain_field = field;
386 
387       if (dump_file
388           && (dump_flags & TDF_DETAILS)
389 	  && !DECL_STATIC_CHAIN (info->context))
390 	fprintf (dump_file, "Setting static-chain for %s\n",
391 		 lang_hooks.decl_printable_name (info->context, 2));
392 
393       DECL_STATIC_CHAIN (info->context) = 1;
394     }
395   return field;
396 }
397 
398 /* Initialize a new temporary with the GIMPLE_CALL STMT.  */
399 
400 static tree
init_tmp_var_with_call(struct nesting_info * info,gimple_stmt_iterator * gsi,gimple call)401 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
402 		        gimple call)
403 {
404   tree t;
405 
406   t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
407   gimple_call_set_lhs (call, t);
408   if (! gsi_end_p (*gsi))
409     gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
410   gsi_insert_before (gsi, call, GSI_SAME_STMT);
411 
412   return t;
413 }
414 
415 
416 /* Copy EXP into a temporary.  Allocate the temporary in the context of
417    INFO and insert the initialization statement before GSI.  */
418 
419 static tree
init_tmp_var(struct nesting_info * info,tree exp,gimple_stmt_iterator * gsi)420 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
421 {
422   tree t;
423   gimple stmt;
424 
425   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
426   stmt = gimple_build_assign (t, exp);
427   if (! gsi_end_p (*gsi))
428     gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
429   gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
430 
431   return t;
432 }
433 
434 
435 /* Similarly, but only do so to force EXP to satisfy is_gimple_val.  */
436 
437 static tree
gsi_gimplify_val(struct nesting_info * info,tree exp,gimple_stmt_iterator * gsi)438 gsi_gimplify_val (struct nesting_info *info, tree exp,
439 		  gimple_stmt_iterator *gsi)
440 {
441   if (is_gimple_val (exp))
442     return exp;
443   else
444     return init_tmp_var (info, exp, gsi);
445 }
446 
447 /* Similarly, but copy from the temporary and insert the statement
448    after the iterator.  */
449 
450 static tree
save_tmp_var(struct nesting_info * info,tree exp,gimple_stmt_iterator * gsi)451 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
452 {
453   tree t;
454   gimple stmt;
455 
456   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
457   stmt = gimple_build_assign (exp, t);
458   if (! gsi_end_p (*gsi))
459     gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
460   gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
461 
462   return t;
463 }
464 
465 /* Build or return the type used to represent a nested function trampoline.  */
466 
467 static GTY(()) tree trampoline_type;
468 
469 static tree
get_trampoline_type(struct nesting_info * info)470 get_trampoline_type (struct nesting_info *info)
471 {
472   unsigned align, size;
473   tree t;
474 
475   if (trampoline_type)
476     return trampoline_type;
477 
478   align = TRAMPOLINE_ALIGNMENT;
479   size = TRAMPOLINE_SIZE;
480 
481   /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
482      then allocate extra space so that we can do dynamic alignment.  */
483   if (align > STACK_BOUNDARY)
484     {
485       size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
486       align = STACK_BOUNDARY;
487     }
488 
489   t = build_index_type (size_int (size - 1));
490   t = build_array_type (char_type_node, t);
491   t = build_decl (DECL_SOURCE_LOCATION (info->context),
492 		  FIELD_DECL, get_identifier ("__data"), t);
493   DECL_ALIGN (t) = align;
494   DECL_USER_ALIGN (t) = 1;
495 
496   trampoline_type = make_node (RECORD_TYPE);
497   TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
498   TYPE_FIELDS (trampoline_type) = t;
499   layout_type (trampoline_type);
500   DECL_CONTEXT (t) = trampoline_type;
501 
502   return trampoline_type;
503 }
504 
505 /* Given DECL, a nested function, find or create a field in the non-local
506    frame structure for a trampoline for this function.  */
507 
508 static tree
lookup_tramp_for_decl(struct nesting_info * info,tree decl,enum insert_option insert)509 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
510 		       enum insert_option insert)
511 {
512   void **slot;
513 
514   if (insert == NO_INSERT)
515     {
516       slot = pointer_map_contains (info->var_map, decl);
517       return slot ? (tree) *slot : NULL_TREE;
518     }
519 
520   slot = pointer_map_insert (info->var_map, decl);
521   if (!*slot)
522     {
523       tree field = make_node (FIELD_DECL);
524       DECL_NAME (field) = DECL_NAME (decl);
525       TREE_TYPE (field) = get_trampoline_type (info);
526       TREE_ADDRESSABLE (field) = 1;
527 
528       insert_field_into_struct (get_frame_type (info), field);
529       *slot = field;
530 
531       info->any_tramp_created = true;
532     }
533 
534   return (tree) *slot;
535 }
536 
537 /* Build or return the field within the non-local frame state that holds
538    the non-local goto "jmp_buf".  The buffer itself is maintained by the
539    rtl middle-end as dynamic stack space is allocated.  */
540 
541 static tree
get_nl_goto_field(struct nesting_info * info)542 get_nl_goto_field (struct nesting_info *info)
543 {
544   tree field = info->nl_goto_field;
545   if (!field)
546     {
547       unsigned size;
548       tree type;
549 
550       /* For __builtin_nonlocal_goto, we need N words.  The first is the
551 	 frame pointer, the rest is for the target's stack pointer save
552 	 area.  The number of words is controlled by STACK_SAVEAREA_MODE;
553 	 not the best interface, but it'll do for now.  */
554       if (Pmode == ptr_mode)
555 	type = ptr_type_node;
556       else
557 	type = lang_hooks.types.type_for_mode (Pmode, 1);
558 
559       size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
560       size = size / GET_MODE_SIZE (Pmode);
561       size = size + 1;
562 
563       type = build_array_type
564 	(type, build_index_type (size_int (size)));
565 
566       field = make_node (FIELD_DECL);
567       DECL_NAME (field) = get_identifier ("__nl_goto_buf");
568       TREE_TYPE (field) = type;
569       DECL_ALIGN (field) = TYPE_ALIGN (type);
570       TREE_ADDRESSABLE (field) = 1;
571 
572       insert_field_into_struct (get_frame_type (info), field);
573 
574       info->nl_goto_field = field;
575     }
576 
577   return field;
578 }
579 
580 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ.  */
581 
582 static void
walk_body(walk_stmt_fn callback_stmt,walk_tree_fn callback_op,struct nesting_info * info,gimple_seq * pseq)583 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
584 	   struct nesting_info *info, gimple_seq *pseq)
585 {
586   struct walk_stmt_info wi;
587 
588   memset (&wi, 0, sizeof (wi));
589   wi.info = info;
590   wi.val_only = true;
591   walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
592 }
593 
594 
595 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT.  */
596 
597 static inline void
walk_function(walk_stmt_fn callback_stmt,walk_tree_fn callback_op,struct nesting_info * info)598 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
599 	       struct nesting_info *info)
600 {
601   gimple_seq body = gimple_body (info->context);
602   walk_body (callback_stmt, callback_op, info, &body);
603   gimple_set_body (info->context, body);
604 }
605 
606 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body.  */
607 
608 static void
walk_gimple_omp_for(gimple for_stmt,walk_stmt_fn callback_stmt,walk_tree_fn callback_op,struct nesting_info * info)609 walk_gimple_omp_for (gimple for_stmt,
610     		     walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
611     		     struct nesting_info *info)
612 {
613   struct walk_stmt_info wi;
614   gimple_seq seq;
615   tree t;
616   size_t i;
617 
618   walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
619 
620   seq = NULL;
621   memset (&wi, 0, sizeof (wi));
622   wi.info = info;
623   wi.gsi = gsi_last (seq);
624 
625   for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
626     {
627       wi.val_only = false;
628       walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
629 		 &wi, NULL);
630       wi.val_only = true;
631       wi.is_lhs = false;
632       walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
633 		 &wi, NULL);
634 
635       wi.val_only = true;
636       wi.is_lhs = false;
637       walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
638 		 &wi, NULL);
639 
640       t = gimple_omp_for_incr (for_stmt, i);
641       gcc_assert (BINARY_CLASS_P (t));
642       wi.val_only = false;
643       walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
644       wi.val_only = true;
645       wi.is_lhs = false;
646       walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
647     }
648 
649   seq = gsi_seq (wi.gsi);
650   if (!gimple_seq_empty_p (seq))
651     {
652       gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
653       annotate_all_with_location (seq, gimple_location (for_stmt));
654       gimple_seq_add_seq (&pre_body, seq);
655       gimple_omp_for_set_pre_body (for_stmt, pre_body);
656     }
657 }
658 
659 /* Similarly for ROOT and all functions nested underneath, depth first.  */
660 
661 static void
walk_all_functions(walk_stmt_fn callback_stmt,walk_tree_fn callback_op,struct nesting_info * root)662 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
663 		    struct nesting_info *root)
664 {
665   struct nesting_info *n;
666   FOR_EACH_NEST_INFO (n, root)
667     walk_function (callback_stmt, callback_op, n);
668 }
669 
670 
671 /* We have to check for a fairly pathological case.  The operands of function
672    nested function are to be interpreted in the context of the enclosing
673    function.  So if any are variably-sized, they will get remapped when the
674    enclosing function is inlined.  But that remapping would also have to be
675    done in the types of the PARM_DECLs of the nested function, meaning the
676    argument types of that function will disagree with the arguments in the
677    calls to that function.  So we'd either have to make a copy of the nested
678    function corresponding to each time the enclosing function was inlined or
679    add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
680    function.  The former is not practical.  The latter would still require
681    detecting this case to know when to add the conversions.  So, for now at
682    least, we don't inline such an enclosing function.
683 
684    We have to do that check recursively, so here return indicating whether
685    FNDECL has such a nested function.  ORIG_FN is the function we were
686    trying to inline to use for checking whether any argument is variably
687    modified by anything in it.
688 
689    It would be better to do this in tree-inline.c so that we could give
690    the appropriate warning for why a function can't be inlined, but that's
691    too late since the nesting structure has already been flattened and
692    adding a flag just to record this fact seems a waste of a flag.  */
693 
694 static bool
check_for_nested_with_variably_modified(tree fndecl,tree orig_fndecl)695 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
696 {
697   struct cgraph_node *cgn = cgraph_get_node (fndecl);
698   tree arg;
699 
700   for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
701     {
702       for (arg = DECL_ARGUMENTS (cgn->symbol.decl); arg; arg = DECL_CHAIN (arg))
703 	if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
704 	  return true;
705 
706       if (check_for_nested_with_variably_modified (cgn->symbol.decl,
707 						   orig_fndecl))
708 	return true;
709     }
710 
711   return false;
712 }
713 
714 /* Construct our local datastructure describing the function nesting
715    tree rooted by CGN.  */
716 
717 static struct nesting_info *
create_nesting_tree(struct cgraph_node * cgn)718 create_nesting_tree (struct cgraph_node *cgn)
719 {
720   struct nesting_info *info = XCNEW (struct nesting_info);
721   info->field_map = pointer_map_create ();
722   info->var_map = pointer_map_create ();
723   info->mem_refs = pointer_set_create ();
724   info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
725   info->context = cgn->symbol.decl;
726 
727   for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
728     {
729       struct nesting_info *sub = create_nesting_tree (cgn);
730       sub->outer = info;
731       sub->next = info->inner;
732       info->inner = sub;
733     }
734 
735   /* See discussion at check_for_nested_with_variably_modified for a
736      discussion of why this has to be here.  */
737   if (check_for_nested_with_variably_modified (info->context, info->context))
738     DECL_UNINLINABLE (info->context) = true;
739 
740   return info;
741 }
742 
743 /* Return an expression computing the static chain for TARGET_CONTEXT
744    from INFO->CONTEXT.  Insert any necessary computations before TSI.  */
745 
746 static tree
get_static_chain(struct nesting_info * info,tree target_context,gimple_stmt_iterator * gsi)747 get_static_chain (struct nesting_info *info, tree target_context,
748 		  gimple_stmt_iterator *gsi)
749 {
750   struct nesting_info *i;
751   tree x;
752 
753   if (info->context == target_context)
754     {
755       x = build_addr (info->frame_decl, target_context);
756     }
757   else
758     {
759       x = get_chain_decl (info);
760 
761       for (i = info->outer; i->context != target_context; i = i->outer)
762 	{
763 	  tree field = get_chain_field (i);
764 
765 	  x = build_simple_mem_ref (x);
766 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
767 	  x = init_tmp_var (info, x, gsi);
768 	}
769     }
770 
771   return x;
772 }
773 
774 
775 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
776    frame as seen from INFO->CONTEXT.  Insert any necessary computations
777    before GSI.  */
778 
779 static tree
get_frame_field(struct nesting_info * info,tree target_context,tree field,gimple_stmt_iterator * gsi)780 get_frame_field (struct nesting_info *info, tree target_context,
781 		 tree field, gimple_stmt_iterator *gsi)
782 {
783   struct nesting_info *i;
784   tree x;
785 
786   if (info->context == target_context)
787     {
788       /* Make sure frame_decl gets created.  */
789       (void) get_frame_type (info);
790       x = info->frame_decl;
791     }
792   else
793     {
794       x = get_chain_decl (info);
795 
796       for (i = info->outer; i->context != target_context; i = i->outer)
797 	{
798 	  tree field = get_chain_field (i);
799 
800 	  x = build_simple_mem_ref (x);
801 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
802 	  x = init_tmp_var (info, x, gsi);
803 	}
804 
805       x = build_simple_mem_ref (x);
806     }
807 
808   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
809   return x;
810 }
811 
812 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
813 
814 /* A subroutine of convert_nonlocal_reference_op.  Create a local variable
815    in the nested function with DECL_VALUE_EXPR set to reference the true
816    variable in the parent function.  This is used both for debug info
817    and in OpenMP lowering.  */
818 
819 static tree
get_nonlocal_debug_decl(struct nesting_info * info,tree decl)820 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
821 {
822   tree target_context;
823   struct nesting_info *i;
824   tree x, field, new_decl;
825   void **slot;
826 
827   slot = pointer_map_insert (info->var_map, decl);
828 
829   if (*slot)
830     return (tree) *slot;
831 
832   target_context = decl_function_context (decl);
833 
834   /* A copy of the code in get_frame_field, but without the temporaries.  */
835   if (info->context == target_context)
836     {
837       /* Make sure frame_decl gets created.  */
838       (void) get_frame_type (info);
839       x = info->frame_decl;
840       i = info;
841     }
842   else
843     {
844       x = get_chain_decl (info);
845       for (i = info->outer; i->context != target_context; i = i->outer)
846 	{
847 	  field = get_chain_field (i);
848 	  x = build_simple_mem_ref (x);
849 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
850 	}
851       x = build_simple_mem_ref (x);
852     }
853 
854   field = lookup_field_for_decl (i, decl, INSERT);
855   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
856   if (use_pointer_in_frame (decl))
857     x = build_simple_mem_ref (x);
858 
859   /* ??? We should be remapping types as well, surely.  */
860   new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
861 			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
862   DECL_CONTEXT (new_decl) = info->context;
863   DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
864   DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
865   TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
866   TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
867   TREE_READONLY (new_decl) = TREE_READONLY (decl);
868   TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
869   DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
870   if ((TREE_CODE (decl) == PARM_DECL
871        || TREE_CODE (decl) == RESULT_DECL
872        || TREE_CODE (decl) == VAR_DECL)
873       && DECL_BY_REFERENCE (decl))
874     DECL_BY_REFERENCE (new_decl) = 1;
875 
876   SET_DECL_VALUE_EXPR (new_decl, x);
877   DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
878 
879   *slot = new_decl;
880   DECL_CHAIN (new_decl) = info->debug_var_chain;
881   info->debug_var_chain = new_decl;
882 
883   if (!optimize
884       && info->context != target_context
885       && variably_modified_type_p (TREE_TYPE (decl), NULL))
886     note_nonlocal_vla_type (info, TREE_TYPE (decl));
887 
888   return new_decl;
889 }
890 
891 
892 /* Callback for walk_gimple_stmt, rewrite all references to VAR
893    and PARM_DECLs that belong to outer functions.
894 
895    The rewrite will involve some number of structure accesses back up
896    the static chain.  E.g. for a variable FOO up one nesting level it'll
897    be CHAIN->FOO.  For two levels it'll be CHAIN->__chain->FOO.  Further
898    indirections apply to decls for which use_pointer_in_frame is true.  */
899 
900 static tree
convert_nonlocal_reference_op(tree * tp,int * walk_subtrees,void * data)901 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
902 {
903   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
904   struct nesting_info *const info = (struct nesting_info *) wi->info;
905   tree t = *tp;
906 
907   *walk_subtrees = 0;
908   switch (TREE_CODE (t))
909     {
910     case VAR_DECL:
911       /* Non-automatic variables are never processed.  */
912       if (TREE_STATIC (t) || DECL_EXTERNAL (t))
913 	break;
914       /* FALLTHRU */
915 
916     case PARM_DECL:
917       if (decl_function_context (t) != info->context)
918 	{
919 	  tree x;
920 	  wi->changed = true;
921 
922 	  x = get_nonlocal_debug_decl (info, t);
923 	  if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
924 	    {
925 	      tree target_context = decl_function_context (t);
926 	      struct nesting_info *i;
927 	      for (i = info->outer; i->context != target_context; i = i->outer)
928 		continue;
929 	      x = lookup_field_for_decl (i, t, INSERT);
930 	      x = get_frame_field (info, target_context, x, &wi->gsi);
931 	      if (use_pointer_in_frame (t))
932 		{
933 		  x = init_tmp_var (info, x, &wi->gsi);
934 		  x = build_simple_mem_ref (x);
935 		}
936 	    }
937 
938 	  if (wi->val_only)
939 	    {
940 	      if (wi->is_lhs)
941 		x = save_tmp_var (info, x, &wi->gsi);
942 	      else
943 		x = init_tmp_var (info, x, &wi->gsi);
944 	    }
945 
946 	  *tp = x;
947 	}
948       break;
949 
950     case LABEL_DECL:
951       /* We're taking the address of a label from a parent function, but
952 	 this is not itself a non-local goto.  Mark the label such that it
953 	 will not be deleted, much as we would with a label address in
954 	 static storage.  */
955       if (decl_function_context (t) != info->context)
956         FORCED_LABEL (t) = 1;
957       break;
958 
959     case ADDR_EXPR:
960       {
961 	bool save_val_only = wi->val_only;
962 
963 	wi->val_only = false;
964 	wi->is_lhs = false;
965 	wi->changed = false;
966 	walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
967 	wi->val_only = true;
968 
969 	if (wi->changed)
970 	  {
971 	    tree save_context;
972 
973 	    /* If we changed anything, we might no longer be directly
974 	       referencing a decl.  */
975 	    save_context = current_function_decl;
976 	    current_function_decl = info->context;
977 	    recompute_tree_invariant_for_addr_expr (t);
978 	    current_function_decl = save_context;
979 
980 	    /* If the callback converted the address argument in a context
981 	       where we only accept variables (and min_invariant, presumably),
982 	       then compute the address into a temporary.  */
983 	    if (save_val_only)
984 	      *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
985 				      t, &wi->gsi);
986 	  }
987       }
988       break;
989 
990     case REALPART_EXPR:
991     case IMAGPART_EXPR:
992     case COMPONENT_REF:
993     case ARRAY_REF:
994     case ARRAY_RANGE_REF:
995     case BIT_FIELD_REF:
996       /* Go down this entire nest and just look at the final prefix and
997 	 anything that describes the references.  Otherwise, we lose track
998 	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
999       wi->val_only = true;
1000       wi->is_lhs = false;
1001       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1002 	{
1003 	  if (TREE_CODE (t) == COMPONENT_REF)
1004 	    walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1005 		       NULL);
1006 	  else if (TREE_CODE (t) == ARRAY_REF
1007 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1008 	    {
1009 	      walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1010 			 wi, NULL);
1011 	      walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1012 			 wi, NULL);
1013 	      walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1014 			 wi, NULL);
1015 	    }
1016 	}
1017       wi->val_only = false;
1018       walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1019       break;
1020 
1021     case VIEW_CONVERT_EXPR:
1022       /* Just request to look at the subtrees, leaving val_only and lhs
1023 	 untouched.  This might actually be for !val_only + lhs, in which
1024 	 case we don't want to force a replacement by a temporary.  */
1025       *walk_subtrees = 1;
1026       break;
1027 
1028     default:
1029       if (!IS_TYPE_OR_DECL_P (t))
1030 	{
1031 	  *walk_subtrees = 1;
1032           wi->val_only = true;
1033 	  wi->is_lhs = false;
1034 	}
1035       break;
1036     }
1037 
1038   return NULL_TREE;
1039 }
1040 
1041 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1042 					     struct walk_stmt_info *);
1043 
1044 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1045    and PARM_DECLs that belong to outer functions.  */
1046 
1047 static bool
convert_nonlocal_omp_clauses(tree * pclauses,struct walk_stmt_info * wi)1048 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1049 {
1050   struct nesting_info *const info = (struct nesting_info *) wi->info;
1051   bool need_chain = false, need_stmts = false;
1052   tree clause, decl;
1053   int dummy;
1054   bitmap new_suppress;
1055 
1056   new_suppress = BITMAP_GGC_ALLOC ();
1057   bitmap_copy (new_suppress, info->suppress_expansion);
1058 
1059   for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1060     {
1061       switch (OMP_CLAUSE_CODE (clause))
1062 	{
1063 	case OMP_CLAUSE_REDUCTION:
1064 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1065 	    need_stmts = true;
1066 	  goto do_decl_clause;
1067 
1068 	case OMP_CLAUSE_LASTPRIVATE:
1069 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1070 	    need_stmts = true;
1071 	  goto do_decl_clause;
1072 
1073 	case OMP_CLAUSE_PRIVATE:
1074 	case OMP_CLAUSE_FIRSTPRIVATE:
1075 	case OMP_CLAUSE_COPYPRIVATE:
1076 	case OMP_CLAUSE_SHARED:
1077 	do_decl_clause:
1078 	  decl = OMP_CLAUSE_DECL (clause);
1079 	  if (TREE_CODE (decl) == VAR_DECL
1080 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1081 	    break;
1082 	  if (decl_function_context (decl) != info->context)
1083 	    {
1084 	      bitmap_set_bit (new_suppress, DECL_UID (decl));
1085 	      OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1086 	      if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1087 		need_chain = true;
1088 	    }
1089 	  break;
1090 
1091 	case OMP_CLAUSE_SCHEDULE:
1092 	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1093 	    break;
1094 	  /* FALLTHRU */
1095 	case OMP_CLAUSE_FINAL:
1096 	case OMP_CLAUSE_IF:
1097 	case OMP_CLAUSE_NUM_THREADS:
1098 	  wi->val_only = true;
1099 	  wi->is_lhs = false;
1100 	  convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1101 	                                 &dummy, wi);
1102 	  break;
1103 
1104 	case OMP_CLAUSE_NOWAIT:
1105 	case OMP_CLAUSE_ORDERED:
1106 	case OMP_CLAUSE_DEFAULT:
1107 	case OMP_CLAUSE_COPYIN:
1108 	case OMP_CLAUSE_COLLAPSE:
1109 	case OMP_CLAUSE_UNTIED:
1110 	case OMP_CLAUSE_MERGEABLE:
1111 	  break;
1112 
1113 	default:
1114 	  gcc_unreachable ();
1115 	}
1116     }
1117 
1118   info->suppress_expansion = new_suppress;
1119 
1120   if (need_stmts)
1121     for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1122       switch (OMP_CLAUSE_CODE (clause))
1123 	{
1124 	case OMP_CLAUSE_REDUCTION:
1125 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1126 	    {
1127 	      tree old_context
1128 		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1129 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1130 		= info->context;
1131 	      walk_body (convert_nonlocal_reference_stmt,
1132 			 convert_nonlocal_reference_op, info,
1133 			 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1134 	      walk_body (convert_nonlocal_reference_stmt,
1135 			 convert_nonlocal_reference_op, info,
1136 			 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1137 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1138 		= old_context;
1139 	    }
1140 	  break;
1141 
1142 	case OMP_CLAUSE_LASTPRIVATE:
1143 	  walk_body (convert_nonlocal_reference_stmt,
1144 		     convert_nonlocal_reference_op, info,
1145 		     &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1146 	  break;
1147 
1148 	default:
1149 	  break;
1150 	}
1151 
1152   return need_chain;
1153 }
1154 
1155 /* Create nonlocal debug decls for nonlocal VLA array bounds.  */
1156 
1157 static void
note_nonlocal_vla_type(struct nesting_info * info,tree type)1158 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1159 {
1160   while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1161     type = TREE_TYPE (type);
1162 
1163   if (TYPE_NAME (type)
1164       && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1165       && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1166     type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1167 
1168   while (POINTER_TYPE_P (type)
1169 	 || TREE_CODE (type) == VECTOR_TYPE
1170 	 || TREE_CODE (type) == FUNCTION_TYPE
1171 	 || TREE_CODE (type) == METHOD_TYPE)
1172     type = TREE_TYPE (type);
1173 
1174   if (TREE_CODE (type) == ARRAY_TYPE)
1175     {
1176       tree domain, t;
1177 
1178       note_nonlocal_vla_type (info, TREE_TYPE (type));
1179       domain = TYPE_DOMAIN (type);
1180       if (domain)
1181 	{
1182 	  t = TYPE_MIN_VALUE (domain);
1183 	  if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1184 	      && decl_function_context (t) != info->context)
1185 	    get_nonlocal_debug_decl (info, t);
1186 	  t = TYPE_MAX_VALUE (domain);
1187 	  if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1188 	      && decl_function_context (t) != info->context)
1189 	    get_nonlocal_debug_decl (info, t);
1190 	}
1191     }
1192 }
1193 
1194 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1195    in BLOCK.  */
1196 
1197 static void
note_nonlocal_block_vlas(struct nesting_info * info,tree block)1198 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1199 {
1200   tree var;
1201 
1202   for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1203     if (TREE_CODE (var) == VAR_DECL
1204 	&& variably_modified_type_p (TREE_TYPE (var), NULL)
1205 	&& DECL_HAS_VALUE_EXPR_P (var)
1206 	&& decl_function_context (var) != info->context)
1207       note_nonlocal_vla_type (info, TREE_TYPE (var));
1208 }
1209 
1210 /* Callback for walk_gimple_stmt.  Rewrite all references to VAR and
1211    PARM_DECLs that belong to outer functions.  This handles statements
1212    that are not handled via the standard recursion done in
1213    walk_gimple_stmt.  STMT is the statement to examine, DATA is as in
1214    convert_nonlocal_reference_op.  Set *HANDLED_OPS_P to true if all the
1215    operands of STMT have been handled by this function.  */
1216 
1217 static tree
convert_nonlocal_reference_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)1218 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1219 				 struct walk_stmt_info *wi)
1220 {
1221   struct nesting_info *info = (struct nesting_info *) wi->info;
1222   tree save_local_var_chain;
1223   bitmap save_suppress;
1224   gimple stmt = gsi_stmt (*gsi);
1225 
1226   switch (gimple_code (stmt))
1227     {
1228     case GIMPLE_GOTO:
1229       /* Don't walk non-local gotos for now.  */
1230       if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1231 	{
1232 	  wi->val_only = true;
1233 	  wi->is_lhs = false;
1234 	  *handled_ops_p = true;
1235 	  return NULL_TREE;
1236 	}
1237       break;
1238 
1239     case GIMPLE_OMP_PARALLEL:
1240     case GIMPLE_OMP_TASK:
1241       save_suppress = info->suppress_expansion;
1242       if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1243 	                                wi))
1244 	{
1245 	  tree c, decl;
1246 	  decl = get_chain_decl (info);
1247 	  c = build_omp_clause (gimple_location (stmt),
1248 				OMP_CLAUSE_FIRSTPRIVATE);
1249 	  OMP_CLAUSE_DECL (c) = decl;
1250 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1251 	  gimple_omp_taskreg_set_clauses (stmt, c);
1252 	}
1253 
1254       save_local_var_chain = info->new_local_var_chain;
1255       info->new_local_var_chain = NULL;
1256 
1257       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1258 	         info, gimple_omp_body_ptr (stmt));
1259 
1260       if (info->new_local_var_chain)
1261 	declare_vars (info->new_local_var_chain,
1262 	              gimple_seq_first_stmt (gimple_omp_body (stmt)),
1263 		      false);
1264       info->new_local_var_chain = save_local_var_chain;
1265       info->suppress_expansion = save_suppress;
1266       break;
1267 
1268     case GIMPLE_OMP_FOR:
1269       save_suppress = info->suppress_expansion;
1270       convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1271       walk_gimple_omp_for (stmt, convert_nonlocal_reference_stmt,
1272 	  		   convert_nonlocal_reference_op, info);
1273       walk_body (convert_nonlocal_reference_stmt,
1274 	  	 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1275       info->suppress_expansion = save_suppress;
1276       break;
1277 
1278     case GIMPLE_OMP_SECTIONS:
1279       save_suppress = info->suppress_expansion;
1280       convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1281       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1282 	         info, gimple_omp_body_ptr (stmt));
1283       info->suppress_expansion = save_suppress;
1284       break;
1285 
1286     case GIMPLE_OMP_SINGLE:
1287       save_suppress = info->suppress_expansion;
1288       convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1289       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1290 	         info, gimple_omp_body_ptr (stmt));
1291       info->suppress_expansion = save_suppress;
1292       break;
1293 
1294     case GIMPLE_OMP_SECTION:
1295     case GIMPLE_OMP_MASTER:
1296     case GIMPLE_OMP_ORDERED:
1297       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1298 	         info, gimple_omp_body_ptr (stmt));
1299       break;
1300 
1301     case GIMPLE_BIND:
1302       if (!optimize && gimple_bind_block (stmt))
1303 	note_nonlocal_block_vlas (info, gimple_bind_block (stmt));
1304 
1305       *handled_ops_p = false;
1306       return NULL_TREE;
1307 
1308     case GIMPLE_COND:
1309       wi->val_only = true;
1310       wi->is_lhs = false;
1311       *handled_ops_p = false;
1312       return NULL_TREE;
1313 
1314     default:
1315       /* For every other statement that we are not interested in
1316 	 handling here, let the walker traverse the operands.  */
1317       *handled_ops_p = false;
1318       return NULL_TREE;
1319     }
1320 
1321   /* We have handled all of STMT operands, no need to traverse the operands.  */
1322   *handled_ops_p = true;
1323   return NULL_TREE;
1324 }
1325 
1326 
1327 /* A subroutine of convert_local_reference.  Create a local variable
1328    in the parent function with DECL_VALUE_EXPR set to reference the
1329    field in FRAME.  This is used both for debug info and in OpenMP
1330    lowering.  */
1331 
1332 static tree
get_local_debug_decl(struct nesting_info * info,tree decl,tree field)1333 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1334 {
1335   tree x, new_decl;
1336   void **slot;
1337 
1338   slot = pointer_map_insert (info->var_map, decl);
1339   if (*slot)
1340     return (tree) *slot;
1341 
1342   /* Make sure frame_decl gets created.  */
1343   (void) get_frame_type (info);
1344   x = info->frame_decl;
1345   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1346 
1347   new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1348 			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1349   DECL_CONTEXT (new_decl) = info->context;
1350   DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1351   DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1352   TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1353   TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1354   TREE_READONLY (new_decl) = TREE_READONLY (decl);
1355   TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1356   DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1357   if ((TREE_CODE (decl) == PARM_DECL
1358        || TREE_CODE (decl) == RESULT_DECL
1359        || TREE_CODE (decl) == VAR_DECL)
1360       && DECL_BY_REFERENCE (decl))
1361     DECL_BY_REFERENCE (new_decl) = 1;
1362 
1363   SET_DECL_VALUE_EXPR (new_decl, x);
1364   DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1365   *slot = new_decl;
1366 
1367   DECL_CHAIN (new_decl) = info->debug_var_chain;
1368   info->debug_var_chain = new_decl;
1369 
1370   /* Do not emit debug info twice.  */
1371   DECL_IGNORED_P (decl) = 1;
1372 
1373   return new_decl;
1374 }
1375 
1376 
1377 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1378    and PARM_DECLs that were referenced by inner nested functions.
1379    The rewrite will be a structure reference to the local frame variable.  */
1380 
1381 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1382 
1383 static tree
convert_local_reference_op(tree * tp,int * walk_subtrees,void * data)1384 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1385 {
1386   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1387   struct nesting_info *const info = (struct nesting_info *) wi->info;
1388   tree t = *tp, field, x;
1389   bool save_val_only;
1390 
1391   *walk_subtrees = 0;
1392   switch (TREE_CODE (t))
1393     {
1394     case VAR_DECL:
1395       /* Non-automatic variables are never processed.  */
1396       if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1397 	break;
1398       /* FALLTHRU */
1399 
1400     case PARM_DECL:
1401       if (decl_function_context (t) == info->context)
1402 	{
1403 	  /* If we copied a pointer to the frame, then the original decl
1404 	     is used unchanged in the parent function.  */
1405 	  if (use_pointer_in_frame (t))
1406 	    break;
1407 
1408 	  /* No need to transform anything if no child references the
1409 	     variable.  */
1410 	  field = lookup_field_for_decl (info, t, NO_INSERT);
1411 	  if (!field)
1412 	    break;
1413 	  wi->changed = true;
1414 
1415 	  x = get_local_debug_decl (info, t, field);
1416 	  if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1417 	    x = get_frame_field (info, info->context, field, &wi->gsi);
1418 
1419 	  if (wi->val_only)
1420 	    {
1421 	      if (wi->is_lhs)
1422 		x = save_tmp_var (info, x, &wi->gsi);
1423 	      else
1424 		x = init_tmp_var (info, x, &wi->gsi);
1425 	    }
1426 
1427 	  *tp = x;
1428 	}
1429       break;
1430 
1431     case ADDR_EXPR:
1432       save_val_only = wi->val_only;
1433       wi->val_only = false;
1434       wi->is_lhs = false;
1435       wi->changed = false;
1436       walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1437       wi->val_only = save_val_only;
1438 
1439       /* If we converted anything ... */
1440       if (wi->changed)
1441 	{
1442 	  tree save_context;
1443 
1444 	  /* Then the frame decl is now addressable.  */
1445 	  TREE_ADDRESSABLE (info->frame_decl) = 1;
1446 
1447 	  save_context = current_function_decl;
1448 	  current_function_decl = info->context;
1449 	  recompute_tree_invariant_for_addr_expr (t);
1450 	  current_function_decl = save_context;
1451 
1452 	  /* If we are in a context where we only accept values, then
1453 	     compute the address into a temporary.  */
1454 	  if (save_val_only)
1455 	    *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1456 				    t, &wi->gsi);
1457 	}
1458       break;
1459 
1460     case REALPART_EXPR:
1461     case IMAGPART_EXPR:
1462     case COMPONENT_REF:
1463     case ARRAY_REF:
1464     case ARRAY_RANGE_REF:
1465     case BIT_FIELD_REF:
1466       /* Go down this entire nest and just look at the final prefix and
1467 	 anything that describes the references.  Otherwise, we lose track
1468 	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
1469       save_val_only = wi->val_only;
1470       wi->val_only = true;
1471       wi->is_lhs = false;
1472       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1473 	{
1474 	  if (TREE_CODE (t) == COMPONENT_REF)
1475 	    walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1476 		       NULL);
1477 	  else if (TREE_CODE (t) == ARRAY_REF
1478 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1479 	    {
1480 	      walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1481 			 NULL);
1482 	      walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1483 			 NULL);
1484 	      walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1485 			 NULL);
1486 	    }
1487 	}
1488       wi->val_only = false;
1489       walk_tree (tp, convert_local_reference_op, wi, NULL);
1490       wi->val_only = save_val_only;
1491       break;
1492 
1493     case MEM_REF:
1494       save_val_only = wi->val_only;
1495       wi->val_only = true;
1496       wi->is_lhs = false;
1497       walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1498 		 wi, NULL);
1499       /* We need to re-fold the MEM_REF as component references as
1500 	 part of a ADDR_EXPR address are not allowed.  But we cannot
1501 	 fold here, as the chain record type is not yet finalized.  */
1502       if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1503 	  && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1504 	pointer_set_insert (info->mem_refs, tp);
1505       wi->val_only = save_val_only;
1506       break;
1507 
1508     case VIEW_CONVERT_EXPR:
1509       /* Just request to look at the subtrees, leaving val_only and lhs
1510 	 untouched.  This might actually be for !val_only + lhs, in which
1511 	 case we don't want to force a replacement by a temporary.  */
1512       *walk_subtrees = 1;
1513       break;
1514 
1515     default:
1516       if (!IS_TYPE_OR_DECL_P (t))
1517 	{
1518 	  *walk_subtrees = 1;
1519 	  wi->val_only = true;
1520 	  wi->is_lhs = false;
1521 	}
1522       break;
1523     }
1524 
1525   return NULL_TREE;
1526 }
1527 
1528 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1529 					  struct walk_stmt_info *);
1530 
1531 /* Helper for convert_local_reference.  Convert all the references in
1532    the chain of clauses at *PCLAUSES.  WI is as in convert_local_reference.  */
1533 
1534 static bool
convert_local_omp_clauses(tree * pclauses,struct walk_stmt_info * wi)1535 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1536 {
1537   struct nesting_info *const info = (struct nesting_info *) wi->info;
1538   bool need_frame = false, need_stmts = false;
1539   tree clause, decl;
1540   int dummy;
1541   bitmap new_suppress;
1542 
1543   new_suppress = BITMAP_GGC_ALLOC ();
1544   bitmap_copy (new_suppress, info->suppress_expansion);
1545 
1546   for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1547     {
1548       switch (OMP_CLAUSE_CODE (clause))
1549 	{
1550 	case OMP_CLAUSE_REDUCTION:
1551 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1552 	    need_stmts = true;
1553 	  goto do_decl_clause;
1554 
1555 	case OMP_CLAUSE_LASTPRIVATE:
1556 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1557 	    need_stmts = true;
1558 	  goto do_decl_clause;
1559 
1560 	case OMP_CLAUSE_PRIVATE:
1561 	case OMP_CLAUSE_FIRSTPRIVATE:
1562 	case OMP_CLAUSE_COPYPRIVATE:
1563 	case OMP_CLAUSE_SHARED:
1564 	do_decl_clause:
1565 	  decl = OMP_CLAUSE_DECL (clause);
1566 	  if (TREE_CODE (decl) == VAR_DECL
1567 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1568 	    break;
1569 	  if (decl_function_context (decl) == info->context
1570 	      && !use_pointer_in_frame (decl))
1571 	    {
1572 	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1573 	      if (field)
1574 		{
1575 		  bitmap_set_bit (new_suppress, DECL_UID (decl));
1576 		  OMP_CLAUSE_DECL (clause)
1577 		    = get_local_debug_decl (info, decl, field);
1578 		  need_frame = true;
1579 		}
1580 	    }
1581 	  break;
1582 
1583 	case OMP_CLAUSE_SCHEDULE:
1584 	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1585 	    break;
1586 	  /* FALLTHRU */
1587 	case OMP_CLAUSE_FINAL:
1588 	case OMP_CLAUSE_IF:
1589 	case OMP_CLAUSE_NUM_THREADS:
1590 	  wi->val_only = true;
1591 	  wi->is_lhs = false;
1592 	  convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1593 				      wi);
1594 	  break;
1595 
1596 	case OMP_CLAUSE_NOWAIT:
1597 	case OMP_CLAUSE_ORDERED:
1598 	case OMP_CLAUSE_DEFAULT:
1599 	case OMP_CLAUSE_COPYIN:
1600 	case OMP_CLAUSE_COLLAPSE:
1601 	case OMP_CLAUSE_UNTIED:
1602 	case OMP_CLAUSE_MERGEABLE:
1603 	  break;
1604 
1605 	default:
1606 	  gcc_unreachable ();
1607 	}
1608     }
1609 
1610   info->suppress_expansion = new_suppress;
1611 
1612   if (need_stmts)
1613     for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1614       switch (OMP_CLAUSE_CODE (clause))
1615 	{
1616 	case OMP_CLAUSE_REDUCTION:
1617 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1618 	    {
1619 	      tree old_context
1620 		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1621 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1622 		= info->context;
1623 	      walk_body (convert_local_reference_stmt,
1624 			 convert_local_reference_op, info,
1625 			 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1626 	      walk_body (convert_local_reference_stmt,
1627 			 convert_local_reference_op, info,
1628 			 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1629 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1630 		= old_context;
1631 	    }
1632 	  break;
1633 
1634 	case OMP_CLAUSE_LASTPRIVATE:
1635 	  walk_body (convert_local_reference_stmt,
1636 		     convert_local_reference_op, info,
1637 		     &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1638 	  break;
1639 
1640 	default:
1641 	  break;
1642 	}
1643 
1644   return need_frame;
1645 }
1646 
1647 
1648 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1649    and PARM_DECLs that were referenced by inner nested functions.
1650    The rewrite will be a structure reference to the local frame variable.  */
1651 
1652 static tree
convert_local_reference_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)1653 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1654 			      struct walk_stmt_info *wi)
1655 {
1656   struct nesting_info *info = (struct nesting_info *) wi->info;
1657   tree save_local_var_chain;
1658   bitmap save_suppress;
1659   gimple stmt = gsi_stmt (*gsi);
1660 
1661   switch (gimple_code (stmt))
1662     {
1663     case GIMPLE_OMP_PARALLEL:
1664     case GIMPLE_OMP_TASK:
1665       save_suppress = info->suppress_expansion;
1666       if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1667 	                             wi))
1668 	{
1669 	  tree c;
1670 	  (void) get_frame_type (info);
1671 	  c = build_omp_clause (gimple_location (stmt),
1672 				OMP_CLAUSE_SHARED);
1673 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
1674 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1675 	  gimple_omp_taskreg_set_clauses (stmt, c);
1676 	}
1677 
1678       save_local_var_chain = info->new_local_var_chain;
1679       info->new_local_var_chain = NULL;
1680 
1681       walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1682 	         gimple_omp_body_ptr (stmt));
1683 
1684       if (info->new_local_var_chain)
1685 	declare_vars (info->new_local_var_chain,
1686 		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1687       info->new_local_var_chain = save_local_var_chain;
1688       info->suppress_expansion = save_suppress;
1689       break;
1690 
1691     case GIMPLE_OMP_FOR:
1692       save_suppress = info->suppress_expansion;
1693       convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1694       walk_gimple_omp_for (stmt, convert_local_reference_stmt,
1695 			   convert_local_reference_op, info);
1696       walk_body (convert_local_reference_stmt, convert_local_reference_op,
1697 		 info, gimple_omp_body_ptr (stmt));
1698       info->suppress_expansion = save_suppress;
1699       break;
1700 
1701     case GIMPLE_OMP_SECTIONS:
1702       save_suppress = info->suppress_expansion;
1703       convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1704       walk_body (convert_local_reference_stmt, convert_local_reference_op,
1705 		 info, gimple_omp_body_ptr (stmt));
1706       info->suppress_expansion = save_suppress;
1707       break;
1708 
1709     case GIMPLE_OMP_SINGLE:
1710       save_suppress = info->suppress_expansion;
1711       convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1712       walk_body (convert_local_reference_stmt, convert_local_reference_op,
1713 		 info, gimple_omp_body_ptr (stmt));
1714       info->suppress_expansion = save_suppress;
1715       break;
1716 
1717     case GIMPLE_OMP_SECTION:
1718     case GIMPLE_OMP_MASTER:
1719     case GIMPLE_OMP_ORDERED:
1720       walk_body (convert_local_reference_stmt, convert_local_reference_op,
1721 		 info, gimple_omp_body_ptr (stmt));
1722       break;
1723 
1724     case GIMPLE_COND:
1725       wi->val_only = true;
1726       wi->is_lhs = false;
1727       *handled_ops_p = false;
1728       return NULL_TREE;
1729 
1730     case GIMPLE_ASSIGN:
1731       if (gimple_clobber_p (stmt))
1732 	{
1733 	  tree lhs = gimple_assign_lhs (stmt);
1734 	  if (!use_pointer_in_frame (lhs)
1735 	      && lookup_field_for_decl (info, lhs, NO_INSERT))
1736 	    {
1737 	      gsi_replace (gsi, gimple_build_nop (), true);
1738 	      break;
1739 	    }
1740 	}
1741       *handled_ops_p = false;
1742       return NULL_TREE;
1743 
1744     default:
1745       /* For every other statement that we are not interested in
1746 	 handling here, let the walker traverse the operands.  */
1747       *handled_ops_p = false;
1748       return NULL_TREE;
1749     }
1750 
1751   /* Indicate that we have handled all the operands ourselves.  */
1752   *handled_ops_p = true;
1753   return NULL_TREE;
1754 }
1755 
1756 
1757 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
1758    that reference labels from outer functions.  The rewrite will be a
1759    call to __builtin_nonlocal_goto.  */
1760 
1761 static tree
convert_nl_goto_reference(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)1762 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1763 			   struct walk_stmt_info *wi)
1764 {
1765   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1766   tree label, new_label, target_context, x, field;
1767   void **slot;
1768   gimple call;
1769   gimple stmt = gsi_stmt (*gsi);
1770 
1771   if (gimple_code (stmt) != GIMPLE_GOTO)
1772     {
1773       *handled_ops_p = false;
1774       return NULL_TREE;
1775     }
1776 
1777   label = gimple_goto_dest (stmt);
1778   if (TREE_CODE (label) != LABEL_DECL)
1779     {
1780       *handled_ops_p = false;
1781       return NULL_TREE;
1782     }
1783 
1784   target_context = decl_function_context (label);
1785   if (target_context == info->context)
1786     {
1787       *handled_ops_p = false;
1788       return NULL_TREE;
1789     }
1790 
1791   for (i = info->outer; target_context != i->context; i = i->outer)
1792     continue;
1793 
1794   /* The original user label may also be use for a normal goto, therefore
1795      we must create a new label that will actually receive the abnormal
1796      control transfer.  This new label will be marked LABEL_NONLOCAL; this
1797      mark will trigger proper behavior in the cfg, as well as cause the
1798      (hairy target-specific) non-local goto receiver code to be generated
1799      when we expand rtl.  Enter this association into var_map so that we
1800      can insert the new label into the IL during a second pass.  */
1801   slot = pointer_map_insert (i->var_map, label);
1802   if (*slot == NULL)
1803     {
1804       new_label = create_artificial_label (UNKNOWN_LOCATION);
1805       DECL_NONLOCAL (new_label) = 1;
1806       *slot = new_label;
1807     }
1808   else
1809     new_label = (tree) *slot;
1810 
1811   /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field).  */
1812   field = get_nl_goto_field (i);
1813   x = get_frame_field (info, target_context, field, gsi);
1814   x = build_addr (x, target_context);
1815   x = gsi_gimplify_val (info, x, gsi);
1816   call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
1817 			    2, build_addr (new_label, target_context), x);
1818   gsi_replace (gsi, call, false);
1819 
1820   /* We have handled all of STMT's operands, no need to keep going.  */
1821   *handled_ops_p = true;
1822   return NULL_TREE;
1823 }
1824 
1825 
1826 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
1827    are referenced via nonlocal goto from a nested function.  The rewrite
1828    will involve installing a newly generated DECL_NONLOCAL label, and
1829    (potentially) a branch around the rtl gunk that is assumed to be
1830    attached to such a label.  */
1831 
1832 static tree
convert_nl_goto_receiver(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)1833 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1834 			  struct walk_stmt_info *wi)
1835 {
1836   struct nesting_info *const info = (struct nesting_info *) wi->info;
1837   tree label, new_label;
1838   gimple_stmt_iterator tmp_gsi;
1839   void **slot;
1840   gimple stmt = gsi_stmt (*gsi);
1841 
1842   if (gimple_code (stmt) != GIMPLE_LABEL)
1843     {
1844       *handled_ops_p = false;
1845       return NULL_TREE;
1846     }
1847 
1848   label = gimple_label_label (stmt);
1849 
1850   slot = pointer_map_contains (info->var_map, label);
1851   if (!slot)
1852     {
1853       *handled_ops_p = false;
1854       return NULL_TREE;
1855     }
1856 
1857   /* If there's any possibility that the previous statement falls through,
1858      then we must branch around the new non-local label.  */
1859   tmp_gsi = wi->gsi;
1860   gsi_prev (&tmp_gsi);
1861   if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
1862     {
1863       gimple stmt = gimple_build_goto (label);
1864       gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1865     }
1866 
1867   new_label = (tree) *slot;
1868   stmt = gimple_build_label (new_label);
1869   gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1870 
1871   *handled_ops_p = true;
1872   return NULL_TREE;
1873 }
1874 
1875 
1876 /* Called via walk_function+walk_stmt, rewrite all references to addresses
1877    of nested functions that require the use of trampolines.  The rewrite
1878    will involve a reference a trampoline generated for the occasion.  */
1879 
1880 static tree
convert_tramp_reference_op(tree * tp,int * walk_subtrees,void * data)1881 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
1882 {
1883   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1884   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1885   tree t = *tp, decl, target_context, x, builtin;
1886   gimple call;
1887 
1888   *walk_subtrees = 0;
1889   switch (TREE_CODE (t))
1890     {
1891     case ADDR_EXPR:
1892       /* Build
1893 	   T.1 = &CHAIN->tramp;
1894 	   T.2 = __builtin_adjust_trampoline (T.1);
1895 	   T.3 = (func_type)T.2;
1896       */
1897 
1898       decl = TREE_OPERAND (t, 0);
1899       if (TREE_CODE (decl) != FUNCTION_DECL)
1900 	break;
1901 
1902       /* Only need to process nested functions.  */
1903       target_context = decl_function_context (decl);
1904       if (!target_context)
1905 	break;
1906 
1907       /* If the nested function doesn't use a static chain, then
1908 	 it doesn't need a trampoline.  */
1909       if (!DECL_STATIC_CHAIN (decl))
1910 	break;
1911 
1912       /* If we don't want a trampoline, then don't build one.  */
1913       if (TREE_NO_TRAMPOLINE (t))
1914 	break;
1915 
1916       /* Lookup the immediate parent of the callee, as that's where
1917 	 we need to insert the trampoline.  */
1918       for (i = info; i->context != target_context; i = i->outer)
1919 	continue;
1920       x = lookup_tramp_for_decl (i, decl, INSERT);
1921 
1922       /* Compute the address of the field holding the trampoline.  */
1923       x = get_frame_field (info, target_context, x, &wi->gsi);
1924       x = build_addr (x, target_context);
1925       x = gsi_gimplify_val (info, x, &wi->gsi);
1926 
1927       /* Do machine-specific ugliness.  Normally this will involve
1928 	 computing extra alignment, but it can really be anything.  */
1929       builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
1930       call = gimple_build_call (builtin, 1, x);
1931       x = init_tmp_var_with_call (info, &wi->gsi, call);
1932 
1933       /* Cast back to the proper function type.  */
1934       x = build1 (NOP_EXPR, TREE_TYPE (t), x);
1935       x = init_tmp_var (info, x, &wi->gsi);
1936 
1937       *tp = x;
1938       break;
1939 
1940     default:
1941       if (!IS_TYPE_OR_DECL_P (t))
1942 	*walk_subtrees = 1;
1943       break;
1944     }
1945 
1946   return NULL_TREE;
1947 }
1948 
1949 
1950 /* Called via walk_function+walk_gimple_stmt, rewrite all references
1951    to addresses of nested functions that require the use of
1952    trampolines.  The rewrite will involve a reference a trampoline
1953    generated for the occasion.  */
1954 
1955 static tree
convert_tramp_reference_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)1956 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1957 			      struct walk_stmt_info *wi)
1958 {
1959   struct nesting_info *info = (struct nesting_info *) wi->info;
1960   gimple stmt = gsi_stmt (*gsi);
1961 
1962   switch (gimple_code (stmt))
1963     {
1964     case GIMPLE_CALL:
1965       {
1966 	/* Only walk call arguments, lest we generate trampolines for
1967 	   direct calls.  */
1968 	unsigned long i, nargs = gimple_call_num_args (stmt);
1969 	for (i = 0; i < nargs; i++)
1970 	  walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
1971 		     wi, NULL);
1972 	break;
1973       }
1974 
1975     case GIMPLE_OMP_PARALLEL:
1976     case GIMPLE_OMP_TASK:
1977       {
1978 	tree save_local_var_chain;
1979         walk_gimple_op (stmt, convert_tramp_reference_op, wi);
1980 	save_local_var_chain = info->new_local_var_chain;
1981 	info->new_local_var_chain = NULL;
1982         walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
1983 		   info, gimple_omp_body_ptr (stmt));
1984 	if (info->new_local_var_chain)
1985 	  declare_vars (info->new_local_var_chain,
1986 			gimple_seq_first_stmt (gimple_omp_body (stmt)),
1987 			false);
1988 	info->new_local_var_chain = save_local_var_chain;
1989       }
1990       break;
1991 
1992     default:
1993       *handled_ops_p = false;
1994       return NULL_TREE;
1995       break;
1996     }
1997 
1998   *handled_ops_p = true;
1999   return NULL_TREE;
2000 }
2001 
2002 
2003 
2004 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2005    that reference nested functions to make sure that the static chain
2006    is set up properly for the call.  */
2007 
2008 static tree
convert_gimple_call(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)2009 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2010                      struct walk_stmt_info *wi)
2011 {
2012   struct nesting_info *const info = (struct nesting_info *) wi->info;
2013   tree decl, target_context;
2014   char save_static_chain_added;
2015   int i;
2016   gimple stmt = gsi_stmt (*gsi);
2017 
2018   switch (gimple_code (stmt))
2019     {
2020     case GIMPLE_CALL:
2021       if (gimple_call_chain (stmt))
2022 	break;
2023       decl = gimple_call_fndecl (stmt);
2024       if (!decl)
2025 	break;
2026       target_context = decl_function_context (decl);
2027       if (target_context && DECL_STATIC_CHAIN (decl))
2028 	{
2029 	  gimple_call_set_chain (stmt, get_static_chain (info, target_context,
2030 							 &wi->gsi));
2031 	  info->static_chain_added |= (1 << (info->context != target_context));
2032 	}
2033       break;
2034 
2035     case GIMPLE_OMP_PARALLEL:
2036     case GIMPLE_OMP_TASK:
2037       save_static_chain_added = info->static_chain_added;
2038       info->static_chain_added = 0;
2039       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2040       for (i = 0; i < 2; i++)
2041 	{
2042 	  tree c, decl;
2043 	  if ((info->static_chain_added & (1 << i)) == 0)
2044 	    continue;
2045 	  decl = i ? get_chain_decl (info) : info->frame_decl;
2046 	  /* Don't add CHAIN.* or FRAME.* twice.  */
2047 	  for (c = gimple_omp_taskreg_clauses (stmt);
2048 	       c;
2049 	       c = OMP_CLAUSE_CHAIN (c))
2050 	    if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2051 		 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2052 		&& OMP_CLAUSE_DECL (c) == decl)
2053 	      break;
2054 	  if (c == NULL)
2055 	    {
2056 	      c = build_omp_clause (gimple_location (stmt),
2057 				    i ? OMP_CLAUSE_FIRSTPRIVATE
2058 				    : OMP_CLAUSE_SHARED);
2059 	      OMP_CLAUSE_DECL (c) = decl;
2060 	      OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2061 	      gimple_omp_taskreg_set_clauses (stmt, c);
2062 	    }
2063 	}
2064       info->static_chain_added |= save_static_chain_added;
2065       break;
2066 
2067     case GIMPLE_OMP_FOR:
2068       walk_body (convert_gimple_call, NULL, info,
2069 	  	 gimple_omp_for_pre_body_ptr (stmt));
2070       /* FALLTHRU */
2071     case GIMPLE_OMP_SECTIONS:
2072     case GIMPLE_OMP_SECTION:
2073     case GIMPLE_OMP_SINGLE:
2074     case GIMPLE_OMP_MASTER:
2075     case GIMPLE_OMP_ORDERED:
2076     case GIMPLE_OMP_CRITICAL:
2077       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2078       break;
2079 
2080     default:
2081       /* Keep looking for other operands.  */
2082       *handled_ops_p = false;
2083       return NULL_TREE;
2084     }
2085 
2086   *handled_ops_p = true;
2087   return NULL_TREE;
2088 }
2089 
2090 /* Walk the nesting tree starting with ROOT.  Convert all trampolines and
2091    call expressions.  At the same time, determine if a nested function
2092    actually uses its static chain; if not, remember that.  */
2093 
2094 static void
convert_all_function_calls(struct nesting_info * root)2095 convert_all_function_calls (struct nesting_info *root)
2096 {
2097   unsigned int chain_count = 0, old_chain_count, iter_count;
2098   struct nesting_info *n;
2099 
2100   /* First, optimistically clear static_chain for all decls that haven't
2101      used the static chain already for variable access.  */
2102   FOR_EACH_NEST_INFO (n, root)
2103     {
2104       tree decl = n->context;
2105       if (!n->outer || (!n->chain_decl && !n->chain_field))
2106 	{
2107 	  DECL_STATIC_CHAIN (decl) = 0;
2108 	  if (dump_file && (dump_flags & TDF_DETAILS))
2109 	    fprintf (dump_file, "Guessing no static-chain for %s\n",
2110 		     lang_hooks.decl_printable_name (decl, 2));
2111 	}
2112       else
2113 	DECL_STATIC_CHAIN (decl) = 1;
2114       chain_count += DECL_STATIC_CHAIN (decl);
2115     }
2116 
2117   /* Walk the functions and perform transformations.  Note that these
2118      transformations can induce new uses of the static chain, which in turn
2119      require re-examining all users of the decl.  */
2120   /* ??? It would make sense to try to use the call graph to speed this up,
2121      but the call graph hasn't really been built yet.  Even if it did, we
2122      would still need to iterate in this loop since address-of references
2123      wouldn't show up in the callgraph anyway.  */
2124   iter_count = 0;
2125   do
2126     {
2127       old_chain_count = chain_count;
2128       chain_count = 0;
2129       iter_count++;
2130 
2131       if (dump_file && (dump_flags & TDF_DETAILS))
2132 	fputc ('\n', dump_file);
2133 
2134       FOR_EACH_NEST_INFO (n, root)
2135 	{
2136 	  tree decl = n->context;
2137 	  walk_function (convert_tramp_reference_stmt,
2138 			 convert_tramp_reference_op, n);
2139 	  walk_function (convert_gimple_call, NULL, n);
2140 	  chain_count += DECL_STATIC_CHAIN (decl);
2141 	}
2142     }
2143   while (chain_count != old_chain_count);
2144 
2145   if (dump_file && (dump_flags & TDF_DETAILS))
2146     fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2147 	     iter_count);
2148 }
2149 
2150 struct nesting_copy_body_data
2151 {
2152   copy_body_data cb;
2153   struct nesting_info *root;
2154 };
2155 
2156 /* A helper subroutine for debug_var_chain type remapping.  */
2157 
2158 static tree
nesting_copy_decl(tree decl,copy_body_data * id)2159 nesting_copy_decl (tree decl, copy_body_data *id)
2160 {
2161   struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2162   void **slot = pointer_map_contains (nid->root->var_map, decl);
2163 
2164   if (slot)
2165     return (tree) *slot;
2166 
2167   if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2168     {
2169       tree new_decl = copy_decl_no_change (decl, id);
2170       DECL_ORIGINAL_TYPE (new_decl)
2171 	= remap_type (DECL_ORIGINAL_TYPE (decl), id);
2172       return new_decl;
2173     }
2174 
2175   if (TREE_CODE (decl) == VAR_DECL
2176       || TREE_CODE (decl) == PARM_DECL
2177       || TREE_CODE (decl) == RESULT_DECL)
2178     return decl;
2179 
2180   return copy_decl_no_change (decl, id);
2181 }
2182 
2183 /* A helper function for remap_vla_decls.  See if *TP contains
2184    some remapped variables.  */
2185 
2186 static tree
contains_remapped_vars(tree * tp,int * walk_subtrees,void * data)2187 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2188 {
2189   struct nesting_info *root = (struct nesting_info *) data;
2190   tree t = *tp;
2191   void **slot;
2192 
2193   if (DECL_P (t))
2194     {
2195       *walk_subtrees = 0;
2196       slot = pointer_map_contains (root->var_map, t);
2197 
2198       if (slot)
2199 	return (tree) *slot;
2200     }
2201   return NULL;
2202 }
2203 
2204 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2205    involved.  */
2206 
2207 static void
remap_vla_decls(tree block,struct nesting_info * root)2208 remap_vla_decls (tree block, struct nesting_info *root)
2209 {
2210   tree var, subblock, val, type;
2211   struct nesting_copy_body_data id;
2212 
2213   for (subblock = BLOCK_SUBBLOCKS (block);
2214        subblock;
2215        subblock = BLOCK_CHAIN (subblock))
2216     remap_vla_decls (subblock, root);
2217 
2218   for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2219     if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2220       {
2221 	val = DECL_VALUE_EXPR (var);
2222 	type = TREE_TYPE (var);
2223 
2224 	if (!(TREE_CODE (val) == INDIRECT_REF
2225 	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2226 	      && variably_modified_type_p (type, NULL)))
2227 	  continue;
2228 
2229 	if (pointer_map_contains (root->var_map, TREE_OPERAND (val, 0))
2230 	    || walk_tree (&type, contains_remapped_vars, root, NULL))
2231 	  break;
2232       }
2233 
2234   if (var == NULL_TREE)
2235     return;
2236 
2237   memset (&id, 0, sizeof (id));
2238   id.cb.copy_decl = nesting_copy_decl;
2239   id.cb.decl_map = pointer_map_create ();
2240   id.root = root;
2241 
2242   for (; var; var = DECL_CHAIN (var))
2243     if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2244       {
2245 	struct nesting_info *i;
2246 	tree newt, context;
2247 	void **slot;
2248 
2249 	val = DECL_VALUE_EXPR (var);
2250 	type = TREE_TYPE (var);
2251 
2252 	if (!(TREE_CODE (val) == INDIRECT_REF
2253 	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2254 	      && variably_modified_type_p (type, NULL)))
2255 	  continue;
2256 
2257 	slot = pointer_map_contains (root->var_map, TREE_OPERAND (val, 0));
2258 	if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2259 	  continue;
2260 
2261 	context = decl_function_context (var);
2262 	for (i = root; i; i = i->outer)
2263 	  if (i->context == context)
2264 	    break;
2265 
2266 	if (i == NULL)
2267 	  continue;
2268 
2269 	/* Fully expand value expressions.  This avoids having debug variables
2270 	   only referenced from them and that can be swept during GC.  */
2271         if (slot)
2272 	  {
2273 	    tree t = (tree) *slot;
2274 	    gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2275 	    val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2276 	  }
2277 
2278 	id.cb.src_fn = i->context;
2279 	id.cb.dst_fn = i->context;
2280 	id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2281 
2282 	TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2283 	while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2284 	  {
2285 	    newt = TREE_TYPE (newt);
2286 	    type = TREE_TYPE (type);
2287 	  }
2288 	if (TYPE_NAME (newt)
2289 	    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2290 	    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2291 	    && newt != type
2292 	    && TYPE_NAME (newt) == TYPE_NAME (type))
2293 	  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2294 
2295 	walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2296 	if (val != DECL_VALUE_EXPR (var))
2297 	  SET_DECL_VALUE_EXPR (var, val);
2298       }
2299 
2300   pointer_map_destroy (id.cb.decl_map);
2301 }
2302 
2303 /* Fold the MEM_REF *E.  */
2304 static bool
fold_mem_refs(const void * e,void * data ATTRIBUTE_UNUSED)2305 fold_mem_refs (const void *e, void *data ATTRIBUTE_UNUSED)
2306 {
2307   tree *ref_p = CONST_CAST2(tree *, const tree *, (const tree *)e);
2308   *ref_p = fold (*ref_p);
2309   return true;
2310 }
2311 
2312 /* Do "everything else" to clean up or complete state collected by the
2313    various walking passes -- lay out the types and decls, generate code
2314    to initialize the frame decl, store critical expressions in the
2315    struct function for rtl to find.  */
2316 
2317 static void
finalize_nesting_tree_1(struct nesting_info * root)2318 finalize_nesting_tree_1 (struct nesting_info *root)
2319 {
2320   gimple_seq stmt_list;
2321   gimple stmt;
2322   tree context = root->context;
2323   struct function *sf;
2324 
2325   stmt_list = NULL;
2326 
2327   /* If we created a non-local frame type or decl, we need to lay them
2328      out at this time.  */
2329   if (root->frame_type)
2330     {
2331       /* In some cases the frame type will trigger the -Wpadded warning.
2332 	 This is not helpful; suppress it. */
2333       int save_warn_padded = warn_padded;
2334       tree *adjust;
2335 
2336       warn_padded = 0;
2337       layout_type (root->frame_type);
2338       warn_padded = save_warn_padded;
2339       layout_decl (root->frame_decl, 0);
2340 
2341       /* Remove root->frame_decl from root->new_local_var_chain, so
2342 	 that we can declare it also in the lexical blocks, which
2343 	 helps ensure virtual regs that end up appearing in its RTL
2344 	 expression get substituted in instantiate_virtual_regs().  */
2345       for (adjust = &root->new_local_var_chain;
2346 	   *adjust != root->frame_decl;
2347 	   adjust = &DECL_CHAIN (*adjust))
2348 	gcc_assert (DECL_CHAIN (*adjust));
2349       *adjust = DECL_CHAIN (*adjust);
2350 
2351       DECL_CHAIN (root->frame_decl) = NULL_TREE;
2352       declare_vars (root->frame_decl,
2353 		    gimple_seq_first_stmt (gimple_body (context)), true);
2354     }
2355 
2356   /* If any parameters were referenced non-locally, then we need to
2357      insert a copy.  Likewise, if any variables were referenced by
2358      pointer, we need to initialize the address.  */
2359   if (root->any_parm_remapped)
2360     {
2361       tree p;
2362       for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2363 	{
2364 	  tree field, x, y;
2365 
2366 	  field = lookup_field_for_decl (root, p, NO_INSERT);
2367 	  if (!field)
2368 	    continue;
2369 
2370 	  if (use_pointer_in_frame (p))
2371 	    x = build_addr (p, context);
2372 	  else
2373 	    x = p;
2374 
2375 	  y = build3 (COMPONENT_REF, TREE_TYPE (field),
2376 		      root->frame_decl, field, NULL_TREE);
2377 	  stmt = gimple_build_assign (y, x);
2378 	  gimple_seq_add_stmt (&stmt_list, stmt);
2379 	  /* If the assignment is from a non-register the stmt is
2380 	     not valid gimple.  Make it so by using a temporary instead.  */
2381 	  if (!is_gimple_reg (x)
2382 	      && is_gimple_reg_type (TREE_TYPE (x)))
2383 	    {
2384 	      gimple_stmt_iterator gsi = gsi_last (stmt_list);
2385 	      x = init_tmp_var (root, x, &gsi);
2386 	      gimple_assign_set_rhs1 (stmt, x);
2387 	    }
2388 	}
2389     }
2390 
2391   /* If a chain_field was created, then it needs to be initialized
2392      from chain_decl.  */
2393   if (root->chain_field)
2394     {
2395       tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2396 		       root->frame_decl, root->chain_field, NULL_TREE);
2397       stmt = gimple_build_assign (x, get_chain_decl (root));
2398       gimple_seq_add_stmt (&stmt_list, stmt);
2399     }
2400 
2401   /* If trampolines were created, then we need to initialize them.  */
2402   if (root->any_tramp_created)
2403     {
2404       struct nesting_info *i;
2405       for (i = root->inner; i ; i = i->next)
2406 	{
2407 	  tree arg1, arg2, arg3, x, field;
2408 
2409 	  field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2410 	  if (!field)
2411 	    continue;
2412 
2413 	  gcc_assert (DECL_STATIC_CHAIN (i->context));
2414 	  arg3 = build_addr (root->frame_decl, context);
2415 
2416 	  arg2 = build_addr (i->context, context);
2417 
2418 	  x = build3 (COMPONENT_REF, TREE_TYPE (field),
2419 		      root->frame_decl, field, NULL_TREE);
2420 	  arg1 = build_addr (x, context);
2421 
2422 	  x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2423 	  stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2424 	  gimple_seq_add_stmt (&stmt_list, stmt);
2425 	}
2426     }
2427 
2428   /* If we created initialization statements, insert them.  */
2429   if (stmt_list)
2430     {
2431       gimple bind;
2432       annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2433       bind = gimple_seq_first_stmt (gimple_body (context));
2434       gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2435       gimple_bind_set_body (bind, stmt_list);
2436     }
2437 
2438   /* If a chain_decl was created, then it needs to be registered with
2439      struct function so that it gets initialized from the static chain
2440      register at the beginning of the function.  */
2441   sf = DECL_STRUCT_FUNCTION (root->context);
2442   sf->static_chain_decl = root->chain_decl;
2443 
2444   /* Similarly for the non-local goto save area.  */
2445   if (root->nl_goto_field)
2446     {
2447       sf->nonlocal_goto_save_area
2448 	= get_frame_field (root, context, root->nl_goto_field, NULL);
2449       sf->has_nonlocal_label = 1;
2450     }
2451 
2452   /* Make sure all new local variables get inserted into the
2453      proper BIND_EXPR.  */
2454   if (root->new_local_var_chain)
2455     declare_vars (root->new_local_var_chain,
2456 		  gimple_seq_first_stmt (gimple_body (root->context)),
2457 		  false);
2458 
2459   if (root->debug_var_chain)
2460     {
2461       tree debug_var;
2462       gimple scope;
2463 
2464       remap_vla_decls (DECL_INITIAL (root->context), root);
2465 
2466       for (debug_var = root->debug_var_chain; debug_var;
2467 	   debug_var = DECL_CHAIN (debug_var))
2468 	if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2469 	  break;
2470 
2471       /* If there are any debug decls with variable length types,
2472 	 remap those types using other debug_var_chain variables.  */
2473       if (debug_var)
2474 	{
2475 	  struct nesting_copy_body_data id;
2476 
2477 	  memset (&id, 0, sizeof (id));
2478 	  id.cb.copy_decl = nesting_copy_decl;
2479 	  id.cb.decl_map = pointer_map_create ();
2480 	  id.root = root;
2481 
2482 	  for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2483 	    if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2484 	      {
2485 		tree type = TREE_TYPE (debug_var);
2486 		tree newt, t = type;
2487 		struct nesting_info *i;
2488 
2489 		for (i = root; i; i = i->outer)
2490 		  if (variably_modified_type_p (type, i->context))
2491 		    break;
2492 
2493 		if (i == NULL)
2494 		  continue;
2495 
2496 		id.cb.src_fn = i->context;
2497 		id.cb.dst_fn = i->context;
2498 		id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2499 
2500 		TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2501 		while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2502 		  {
2503 		    newt = TREE_TYPE (newt);
2504 		    t = TREE_TYPE (t);
2505 		  }
2506 		if (TYPE_NAME (newt)
2507 		    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2508 		    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2509 		    && newt != t
2510 		    && TYPE_NAME (newt) == TYPE_NAME (t))
2511 		  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2512 	      }
2513 
2514 	  pointer_map_destroy (id.cb.decl_map);
2515 	}
2516 
2517       scope = gimple_seq_first_stmt (gimple_body (root->context));
2518       if (gimple_bind_block (scope))
2519 	declare_vars (root->debug_var_chain, scope, true);
2520       else
2521 	BLOCK_VARS (DECL_INITIAL (root->context))
2522 	  = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2523 		     root->debug_var_chain);
2524     }
2525 
2526   /* Fold the rewritten MEM_REF trees.  */
2527   pointer_set_traverse (root->mem_refs, fold_mem_refs, NULL);
2528 
2529   /* Dump the translated tree function.  */
2530   if (dump_file)
2531     {
2532       fputs ("\n\n", dump_file);
2533       dump_function_to_file (root->context, dump_file, dump_flags);
2534     }
2535 }
2536 
2537 static void
finalize_nesting_tree(struct nesting_info * root)2538 finalize_nesting_tree (struct nesting_info *root)
2539 {
2540   struct nesting_info *n;
2541   FOR_EACH_NEST_INFO (n, root)
2542     finalize_nesting_tree_1 (n);
2543 }
2544 
2545 /* Unnest the nodes and pass them to cgraph.  */
2546 
2547 static void
unnest_nesting_tree_1(struct nesting_info * root)2548 unnest_nesting_tree_1 (struct nesting_info *root)
2549 {
2550   struct cgraph_node *node = cgraph_get_node (root->context);
2551 
2552   /* For nested functions update the cgraph to reflect unnesting.
2553      We also delay finalizing of these functions up to this point.  */
2554   if (node->origin)
2555     {
2556        cgraph_unnest_node (node);
2557        cgraph_finalize_function (root->context, true);
2558     }
2559 }
2560 
2561 static void
unnest_nesting_tree(struct nesting_info * root)2562 unnest_nesting_tree (struct nesting_info *root)
2563 {
2564   struct nesting_info *n;
2565   FOR_EACH_NEST_INFO (n, root)
2566     unnest_nesting_tree_1 (n);
2567 }
2568 
2569 /* Free the data structures allocated during this pass.  */
2570 
2571 static void
free_nesting_tree(struct nesting_info * root)2572 free_nesting_tree (struct nesting_info *root)
2573 {
2574   struct nesting_info *node, *next;
2575 
2576   node = iter_nestinfo_start (root);
2577   do
2578     {
2579       next = iter_nestinfo_next (node);
2580       pointer_map_destroy (node->var_map);
2581       pointer_map_destroy (node->field_map);
2582       pointer_set_destroy (node->mem_refs);
2583       free (node);
2584       node = next;
2585     }
2586   while (node);
2587 }
2588 
2589 /* Gimplify a function and all its nested functions.  */
2590 static void
gimplify_all_functions(struct cgraph_node * root)2591 gimplify_all_functions (struct cgraph_node *root)
2592 {
2593   struct cgraph_node *iter;
2594   if (!gimple_body (root->symbol.decl))
2595     gimplify_function_tree (root->symbol.decl);
2596   for (iter = root->nested; iter; iter = iter->next_nested)
2597     gimplify_all_functions (iter);
2598 }
2599 
2600 /* Main entry point for this pass.  Process FNDECL and all of its nested
2601    subroutines and turn them into something less tightly bound.  */
2602 
2603 void
lower_nested_functions(tree fndecl)2604 lower_nested_functions (tree fndecl)
2605 {
2606   struct cgraph_node *cgn;
2607   struct nesting_info *root;
2608 
2609   /* If there are no nested functions, there's nothing to do.  */
2610   cgn = cgraph_get_node (fndecl);
2611   if (!cgn->nested)
2612     return;
2613 
2614   gimplify_all_functions (cgn);
2615 
2616   dump_file = dump_begin (TDI_nested, &dump_flags);
2617   if (dump_file)
2618     fprintf (dump_file, "\n;; Function %s\n\n",
2619 	     lang_hooks.decl_printable_name (fndecl, 2));
2620 
2621   bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2622   root = create_nesting_tree (cgn);
2623 
2624   walk_all_functions (convert_nonlocal_reference_stmt,
2625                       convert_nonlocal_reference_op,
2626 		      root);
2627   walk_all_functions (convert_local_reference_stmt,
2628                       convert_local_reference_op,
2629 		      root);
2630   walk_all_functions (convert_nl_goto_reference, NULL, root);
2631   walk_all_functions (convert_nl_goto_receiver, NULL, root);
2632 
2633   convert_all_function_calls (root);
2634   finalize_nesting_tree (root);
2635   unnest_nesting_tree (root);
2636 
2637   free_nesting_tree (root);
2638   bitmap_obstack_release (&nesting_info_bitmap_obstack);
2639 
2640   if (dump_file)
2641     {
2642       dump_end (TDI_nested, dump_file);
2643       dump_file = NULL;
2644     }
2645 }
2646 
2647 #include "gt-tree-nested.h"
2648