xref: /dragonfly/contrib/gcc-8.0/gcc/tree-nested.c (revision 8af44722)
1 /* Nested function decomposition for GIMPLE.
2    Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 
4    This file is part of GCC.
5 
6    GCC is free software; you can redistribute it and/or modify
7    it under the terms of the GNU General Public License as published by
8    the Free Software Foundation; either version 3, or (at your option)
9    any later version.
10 
11    GCC is distributed in the hope that it will be useful,
12    but WITHOUT ANY WARRANTY; without even the implied warranty of
13    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14    GNU General Public License for more details.
15 
16    You should have received a copy of the GNU General Public License
17    along with GCC; see the file COPYING3.  If not see
18    <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "dumpfile.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 
45 
46 /* The object of this pass is to lower the representation of a set of nested
47    functions in order to expose all of the gory details of the various
48    nonlocal references.  We want to do this sooner rather than later, in
49    order to give us more freedom in emitting all of the functions in question.
50 
51    Back in olden times, when gcc was young, we developed an insanely
52    complicated scheme whereby variables which were referenced nonlocally
53    were forced to live in the stack of the declaring function, and then
54    the nested functions magically discovered where these variables were
55    placed.  In order for this scheme to function properly, it required
56    that the outer function be partially expanded, then we switch to
57    compiling the inner function, and once done with those we switch back
58    to compiling the outer function.  Such delicate ordering requirements
59    makes it difficult to do whole translation unit optimizations
60    involving such functions.
61 
62    The implementation here is much more direct.  Everything that can be
63    referenced by an inner function is a member of an explicitly created
64    structure herein called the "nonlocal frame struct".  The incoming
65    static chain for a nested function is a pointer to this struct in
66    the parent.  In this way, we settle on known offsets from a known
67    base, and so are decoupled from the logic that places objects in the
68    function's stack frame.  More importantly, we don't have to wait for
69    that to happen -- since the compilation of the inner function is no
70    longer tied to a real stack frame, the nonlocal frame struct can be
71    allocated anywhere.  Which means that the outer function is now
72    inlinable.
73 
74    Theory of operation here is very simple.  Iterate over all the
75    statements in all the functions (depth first) several times,
76    allocating structures and fields on demand.  In general we want to
77    examine inner functions first, so that we can avoid making changes
78    to outer functions which are unnecessary.
79 
80    The order of the passes matters a bit, in that later passes will be
81    skipped if it is discovered that the functions don't actually interact
82    at all.  That is, they're nested in the lexical sense but could have
83    been written as independent functions without change.  */
84 
85 
86 struct nesting_info
87 {
88   struct nesting_info *outer;
89   struct nesting_info *inner;
90   struct nesting_info *next;
91 
92   hash_map<tree, tree> *field_map;
93   hash_map<tree, tree> *var_map;
94   hash_set<tree *> *mem_refs;
95   bitmap suppress_expansion;
96 
97   tree context;
98   tree new_local_var_chain;
99   tree debug_var_chain;
100   tree frame_type;
101   tree frame_decl;
102   tree chain_field;
103   tree chain_decl;
104   tree nl_goto_field;
105 
106   bool any_parm_remapped;
107   bool any_tramp_created;
108   bool any_descr_created;
109   char static_chain_added;
110 };
111 
112 
113 /* Iterate over the nesting tree, starting with ROOT, depth first.  */
114 
115 static inline struct nesting_info *
116 iter_nestinfo_start (struct nesting_info *root)
117 {
118   while (root->inner)
119     root = root->inner;
120   return root;
121 }
122 
123 static inline struct nesting_info *
124 iter_nestinfo_next (struct nesting_info *node)
125 {
126   if (node->next)
127     return iter_nestinfo_start (node->next);
128   return node->outer;
129 }
130 
131 #define FOR_EACH_NEST_INFO(I, ROOT) \
132   for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
133 
134 /* Obstack used for the bitmaps in the struct above.  */
135 static struct bitmap_obstack nesting_info_bitmap_obstack;
136 
137 
138 /* We're working in so many different function contexts simultaneously,
139    that create_tmp_var is dangerous.  Prevent mishap.  */
140 #define create_tmp_var cant_use_create_tmp_var_here_dummy
141 
142 /* Like create_tmp_var, except record the variable for registration at
143    the given nesting level.  */
144 
145 static tree
146 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
147 {
148   tree tmp_var;
149 
150   /* If the type is of variable size or a type which must be created by the
151      frontend, something is wrong.  Note that we explicitly allow
152      incomplete types here, since we create them ourselves here.  */
153   gcc_assert (!TREE_ADDRESSABLE (type));
154   gcc_assert (!TYPE_SIZE_UNIT (type)
155 	      || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
156 
157   tmp_var = create_tmp_var_raw (type, prefix);
158   DECL_CONTEXT (tmp_var) = info->context;
159   DECL_CHAIN (tmp_var) = info->new_local_var_chain;
160   DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
161   if (TREE_CODE (type) == COMPLEX_TYPE
162       || TREE_CODE (type) == VECTOR_TYPE)
163     DECL_GIMPLE_REG_P (tmp_var) = 1;
164 
165   info->new_local_var_chain = tmp_var;
166 
167   return tmp_var;
168 }
169 
170 /* Take the address of EXP to be used within function CONTEXT.
171    Mark it for addressability as necessary.  */
172 
173 tree
174 build_addr (tree exp)
175 {
176   mark_addressable (exp);
177   return build_fold_addr_expr (exp);
178 }
179 
180 /* Insert FIELD into TYPE, sorted by alignment requirements.  */
181 
182 void
183 insert_field_into_struct (tree type, tree field)
184 {
185   tree *p;
186 
187   DECL_CONTEXT (field) = type;
188 
189   for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
190     if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
191       break;
192 
193   DECL_CHAIN (field) = *p;
194   *p = field;
195 
196   /* Set correct alignment for frame struct type.  */
197   if (TYPE_ALIGN (type) < DECL_ALIGN (field))
198     SET_TYPE_ALIGN (type, DECL_ALIGN (field));
199 }
200 
201 /* Build or return the RECORD_TYPE that describes the frame state that is
202    shared between INFO->CONTEXT and its nested functions.  This record will
203    not be complete until finalize_nesting_tree; up until that point we'll
204    be adding fields as necessary.
205 
206    We also build the DECL that represents this frame in the function.  */
207 
208 static tree
209 get_frame_type (struct nesting_info *info)
210 {
211   tree type = info->frame_type;
212   if (!type)
213     {
214       char *name;
215 
216       type = make_node (RECORD_TYPE);
217 
218       name = concat ("FRAME.",
219 		     IDENTIFIER_POINTER (DECL_NAME (info->context)),
220 		     NULL);
221       TYPE_NAME (type) = get_identifier (name);
222       free (name);
223 
224       info->frame_type = type;
225       info->frame_decl = create_tmp_var_for (info, type, "FRAME");
226       DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
227 
228       /* ??? Always make it addressable for now, since it is meant to
229 	 be pointed to by the static chain pointer.  This pessimizes
230 	 when it turns out that no static chains are needed because
231 	 the nested functions referencing non-local variables are not
232 	 reachable, but the true pessimization is to create the non-
233 	 local frame structure in the first place.  */
234       TREE_ADDRESSABLE (info->frame_decl) = 1;
235     }
236   return type;
237 }
238 
239 /* Return true if DECL should be referenced by pointer in the non-local
240    frame structure.  */
241 
242 static bool
243 use_pointer_in_frame (tree decl)
244 {
245   if (TREE_CODE (decl) == PARM_DECL)
246     {
247       /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
248          sized decls, and inefficient to copy large aggregates.  Don't bother
249          moving anything but scalar variables.  */
250       return AGGREGATE_TYPE_P (TREE_TYPE (decl));
251     }
252   else
253     {
254       /* Variable sized types make things "interesting" in the frame.  */
255       return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
256     }
257 }
258 
259 /* Given DECL, a non-locally accessed variable, find or create a field
260    in the non-local frame structure for the given nesting context.  */
261 
262 static tree
263 lookup_field_for_decl (struct nesting_info *info, tree decl,
264 		       enum insert_option insert)
265 {
266   if (insert == NO_INSERT)
267     {
268       tree *slot = info->field_map->get (decl);
269       return slot ? *slot : NULL_TREE;
270     }
271 
272   tree *slot = &info->field_map->get_or_insert (decl);
273   if (!*slot)
274     {
275       tree field = make_node (FIELD_DECL);
276       DECL_NAME (field) = DECL_NAME (decl);
277 
278       if (use_pointer_in_frame (decl))
279 	{
280 	  TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
281 	  SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
282 	  DECL_NONADDRESSABLE_P (field) = 1;
283 	}
284       else
285 	{
286           TREE_TYPE (field) = TREE_TYPE (decl);
287           DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
288           SET_DECL_ALIGN (field, DECL_ALIGN (decl));
289           DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
290           TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
291           DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
292           TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
293 	}
294 
295       insert_field_into_struct (get_frame_type (info), field);
296       *slot = field;
297 
298       if (TREE_CODE (decl) == PARM_DECL)
299 	info->any_parm_remapped = true;
300     }
301 
302   return *slot;
303 }
304 
305 /* Build or return the variable that holds the static chain within
306    INFO->CONTEXT.  This variable may only be used within INFO->CONTEXT.  */
307 
308 static tree
309 get_chain_decl (struct nesting_info *info)
310 {
311   tree decl = info->chain_decl;
312 
313   if (!decl)
314     {
315       tree type;
316 
317       type = get_frame_type (info->outer);
318       type = build_pointer_type (type);
319 
320       /* Note that this variable is *not* entered into any BIND_EXPR;
321 	 the construction of this variable is handled specially in
322 	 expand_function_start and initialize_inlined_parameters.
323 	 Note also that it's represented as a parameter.  This is more
324 	 close to the truth, since the initial value does come from
325 	 the caller.  */
326       decl = build_decl (DECL_SOURCE_LOCATION (info->context),
327 			 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
328       DECL_ARTIFICIAL (decl) = 1;
329       DECL_IGNORED_P (decl) = 1;
330       TREE_USED (decl) = 1;
331       DECL_CONTEXT (decl) = info->context;
332       DECL_ARG_TYPE (decl) = type;
333 
334       /* Tell tree-inline.c that we never write to this variable, so
335 	 it can copy-prop the replacement value immediately.  */
336       TREE_READONLY (decl) = 1;
337 
338       info->chain_decl = decl;
339 
340       if (dump_file
341           && (dump_flags & TDF_DETAILS)
342 	  && !DECL_STATIC_CHAIN (info->context))
343 	fprintf (dump_file, "Setting static-chain for %s\n",
344 		 lang_hooks.decl_printable_name (info->context, 2));
345 
346       DECL_STATIC_CHAIN (info->context) = 1;
347     }
348   return decl;
349 }
350 
351 /* Build or return the field within the non-local frame state that holds
352    the static chain for INFO->CONTEXT.  This is the way to walk back up
353    multiple nesting levels.  */
354 
355 static tree
356 get_chain_field (struct nesting_info *info)
357 {
358   tree field = info->chain_field;
359 
360   if (!field)
361     {
362       tree type = build_pointer_type (get_frame_type (info->outer));
363 
364       field = make_node (FIELD_DECL);
365       DECL_NAME (field) = get_identifier ("__chain");
366       TREE_TYPE (field) = type;
367       SET_DECL_ALIGN (field, TYPE_ALIGN (type));
368       DECL_NONADDRESSABLE_P (field) = 1;
369 
370       insert_field_into_struct (get_frame_type (info), field);
371 
372       info->chain_field = field;
373 
374       if (dump_file
375           && (dump_flags & TDF_DETAILS)
376 	  && !DECL_STATIC_CHAIN (info->context))
377 	fprintf (dump_file, "Setting static-chain for %s\n",
378 		 lang_hooks.decl_printable_name (info->context, 2));
379 
380       DECL_STATIC_CHAIN (info->context) = 1;
381     }
382   return field;
383 }
384 
385 /* Initialize a new temporary with the GIMPLE_CALL STMT.  */
386 
387 static tree
388 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
389 		        gcall *call)
390 {
391   tree t;
392 
393   t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
394   gimple_call_set_lhs (call, t);
395   if (! gsi_end_p (*gsi))
396     gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
397   gsi_insert_before (gsi, call, GSI_SAME_STMT);
398 
399   return t;
400 }
401 
402 
403 /* Copy EXP into a temporary.  Allocate the temporary in the context of
404    INFO and insert the initialization statement before GSI.  */
405 
406 static tree
407 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
408 {
409   tree t;
410   gimple *stmt;
411 
412   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
413   stmt = gimple_build_assign (t, exp);
414   if (! gsi_end_p (*gsi))
415     gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
416   gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
417 
418   return t;
419 }
420 
421 
422 /* Similarly, but only do so to force EXP to satisfy is_gimple_val.  */
423 
424 static tree
425 gsi_gimplify_val (struct nesting_info *info, tree exp,
426 		  gimple_stmt_iterator *gsi)
427 {
428   if (is_gimple_val (exp))
429     return exp;
430   else
431     return init_tmp_var (info, exp, gsi);
432 }
433 
434 /* Similarly, but copy from the temporary and insert the statement
435    after the iterator.  */
436 
437 static tree
438 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
439 {
440   tree t;
441   gimple *stmt;
442 
443   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
444   stmt = gimple_build_assign (exp, t);
445   if (! gsi_end_p (*gsi))
446     gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
447   gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
448 
449   return t;
450 }
451 
452 /* Build or return the type used to represent a nested function trampoline.  */
453 
454 static GTY(()) tree trampoline_type;
455 
456 static tree
457 get_trampoline_type (struct nesting_info *info)
458 {
459   unsigned align, size;
460   tree t;
461 
462   if (trampoline_type)
463     return trampoline_type;
464 
465   align = TRAMPOLINE_ALIGNMENT;
466   size = TRAMPOLINE_SIZE;
467 
468   /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
469      then allocate extra space so that we can do dynamic alignment.  */
470   if (align > STACK_BOUNDARY)
471     {
472       size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
473       align = STACK_BOUNDARY;
474     }
475 
476   t = build_index_type (size_int (size - 1));
477   t = build_array_type (char_type_node, t);
478   t = build_decl (DECL_SOURCE_LOCATION (info->context),
479 		  FIELD_DECL, get_identifier ("__data"), t);
480   SET_DECL_ALIGN (t, align);
481   DECL_USER_ALIGN (t) = 1;
482 
483   trampoline_type = make_node (RECORD_TYPE);
484   TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
485   TYPE_FIELDS (trampoline_type) = t;
486   layout_type (trampoline_type);
487   DECL_CONTEXT (t) = trampoline_type;
488 
489   return trampoline_type;
490 }
491 
492 /* Build or return the type used to represent a nested function descriptor.  */
493 
494 static GTY(()) tree descriptor_type;
495 
496 static tree
497 get_descriptor_type (struct nesting_info *info)
498 {
499   /* The base alignment is that of a function.  */
500   const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
501   tree t;
502 
503   if (descriptor_type)
504     return descriptor_type;
505 
506   t = build_index_type (integer_one_node);
507   t = build_array_type (ptr_type_node, t);
508   t = build_decl (DECL_SOURCE_LOCATION (info->context),
509 		  FIELD_DECL, get_identifier ("__data"), t);
510   SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
511   DECL_USER_ALIGN (t) = 1;
512 
513   descriptor_type = make_node (RECORD_TYPE);
514   TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
515   TYPE_FIELDS (descriptor_type) = t;
516   layout_type (descriptor_type);
517   DECL_CONTEXT (t) = descriptor_type;
518 
519   return descriptor_type;
520 }
521 
522 /* Given DECL, a nested function, find or create an element in the
523    var map for this function.  */
524 
525 static tree
526 lookup_element_for_decl (struct nesting_info *info, tree decl,
527 			 enum insert_option insert)
528 {
529   if (insert == NO_INSERT)
530     {
531       tree *slot = info->var_map->get (decl);
532       return slot ? *slot : NULL_TREE;
533     }
534 
535   tree *slot = &info->var_map->get_or_insert (decl);
536   if (!*slot)
537     *slot = build_tree_list (NULL_TREE, NULL_TREE);
538 
539   return (tree) *slot;
540 }
541 
542 /* Given DECL, a nested function, create a field in the non-local
543    frame structure for this function.  */
544 
545 static tree
546 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
547 {
548   tree field = make_node (FIELD_DECL);
549   DECL_NAME (field) = DECL_NAME (decl);
550   TREE_TYPE (field) = type;
551   TREE_ADDRESSABLE (field) = 1;
552   insert_field_into_struct (get_frame_type (info), field);
553   return field;
554 }
555 
556 /* Given DECL, a nested function, find or create a field in the non-local
557    frame structure for a trampoline for this function.  */
558 
559 static tree
560 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
561 		       enum insert_option insert)
562 {
563   tree elt, field;
564 
565   elt = lookup_element_for_decl (info, decl, insert);
566   if (!elt)
567     return NULL_TREE;
568 
569   field = TREE_PURPOSE (elt);
570 
571   if (!field && insert == INSERT)
572     {
573       field = create_field_for_decl (info, decl, get_trampoline_type (info));
574       TREE_PURPOSE (elt) = field;
575       info->any_tramp_created = true;
576     }
577 
578   return field;
579 }
580 
581 /* Given DECL, a nested function, find or create a field in the non-local
582    frame structure for a descriptor for this function.  */
583 
584 static tree
585 lookup_descr_for_decl (struct nesting_info *info, tree decl,
586 		       enum insert_option insert)
587 {
588   tree elt, field;
589 
590   elt = lookup_element_for_decl (info, decl, insert);
591   if (!elt)
592     return NULL_TREE;
593 
594   field = TREE_VALUE (elt);
595 
596   if (!field && insert == INSERT)
597     {
598       field = create_field_for_decl (info, decl, get_descriptor_type (info));
599       TREE_VALUE (elt) = field;
600       info->any_descr_created = true;
601     }
602 
603   return field;
604 }
605 
606 /* Build or return the field within the non-local frame state that holds
607    the non-local goto "jmp_buf".  The buffer itself is maintained by the
608    rtl middle-end as dynamic stack space is allocated.  */
609 
610 static tree
611 get_nl_goto_field (struct nesting_info *info)
612 {
613   tree field = info->nl_goto_field;
614   if (!field)
615     {
616       unsigned size;
617       tree type;
618 
619       /* For __builtin_nonlocal_goto, we need N words.  The first is the
620 	 frame pointer, the rest is for the target's stack pointer save
621 	 area.  The number of words is controlled by STACK_SAVEAREA_MODE;
622 	 not the best interface, but it'll do for now.  */
623       if (Pmode == ptr_mode)
624 	type = ptr_type_node;
625       else
626 	type = lang_hooks.types.type_for_mode (Pmode, 1);
627 
628       scalar_int_mode mode
629 	= as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
630       size = GET_MODE_SIZE (mode);
631       size = size / GET_MODE_SIZE (Pmode);
632       size = size + 1;
633 
634       type = build_array_type
635 	(type, build_index_type (size_int (size)));
636 
637       field = make_node (FIELD_DECL);
638       DECL_NAME (field) = get_identifier ("__nl_goto_buf");
639       TREE_TYPE (field) = type;
640       SET_DECL_ALIGN (field, TYPE_ALIGN (type));
641       TREE_ADDRESSABLE (field) = 1;
642 
643       insert_field_into_struct (get_frame_type (info), field);
644 
645       info->nl_goto_field = field;
646     }
647 
648   return field;
649 }
650 
651 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ.  */
652 
653 static void
654 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
655 	   struct nesting_info *info, gimple_seq *pseq)
656 {
657   struct walk_stmt_info wi;
658 
659   memset (&wi, 0, sizeof (wi));
660   wi.info = info;
661   wi.val_only = true;
662   walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
663 }
664 
665 
666 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT.  */
667 
668 static inline void
669 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
670 	       struct nesting_info *info)
671 {
672   gimple_seq body = gimple_body (info->context);
673   walk_body (callback_stmt, callback_op, info, &body);
674   gimple_set_body (info->context, body);
675 }
676 
677 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body.  */
678 
679 static void
680 walk_gimple_omp_for (gomp_for *for_stmt,
681     		     walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
682     		     struct nesting_info *info)
683 {
684   struct walk_stmt_info wi;
685   gimple_seq seq;
686   tree t;
687   size_t i;
688 
689   walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
690 
691   seq = NULL;
692   memset (&wi, 0, sizeof (wi));
693   wi.info = info;
694   wi.gsi = gsi_last (seq);
695 
696   for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
697     {
698       wi.val_only = false;
699       walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
700 		 &wi, NULL);
701       wi.val_only = true;
702       wi.is_lhs = false;
703       walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
704 		 &wi, NULL);
705 
706       wi.val_only = true;
707       wi.is_lhs = false;
708       walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
709 		 &wi, NULL);
710 
711       t = gimple_omp_for_incr (for_stmt, i);
712       gcc_assert (BINARY_CLASS_P (t));
713       wi.val_only = false;
714       walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
715       wi.val_only = true;
716       wi.is_lhs = false;
717       walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
718     }
719 
720   seq = gsi_seq (wi.gsi);
721   if (!gimple_seq_empty_p (seq))
722     {
723       gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
724       annotate_all_with_location (seq, gimple_location (for_stmt));
725       gimple_seq_add_seq (&pre_body, seq);
726       gimple_omp_for_set_pre_body (for_stmt, pre_body);
727     }
728 }
729 
730 /* Similarly for ROOT and all functions nested underneath, depth first.  */
731 
732 static void
733 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
734 		    struct nesting_info *root)
735 {
736   struct nesting_info *n;
737   FOR_EACH_NEST_INFO (n, root)
738     walk_function (callback_stmt, callback_op, n);
739 }
740 
741 
742 /* We have to check for a fairly pathological case.  The operands of function
743    nested function are to be interpreted in the context of the enclosing
744    function.  So if any are variably-sized, they will get remapped when the
745    enclosing function is inlined.  But that remapping would also have to be
746    done in the types of the PARM_DECLs of the nested function, meaning the
747    argument types of that function will disagree with the arguments in the
748    calls to that function.  So we'd either have to make a copy of the nested
749    function corresponding to each time the enclosing function was inlined or
750    add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
751    function.  The former is not practical.  The latter would still require
752    detecting this case to know when to add the conversions.  So, for now at
753    least, we don't inline such an enclosing function.
754 
755    We have to do that check recursively, so here return indicating whether
756    FNDECL has such a nested function.  ORIG_FN is the function we were
757    trying to inline to use for checking whether any argument is variably
758    modified by anything in it.
759 
760    It would be better to do this in tree-inline.c so that we could give
761    the appropriate warning for why a function can't be inlined, but that's
762    too late since the nesting structure has already been flattened and
763    adding a flag just to record this fact seems a waste of a flag.  */
764 
765 static bool
766 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
767 {
768   struct cgraph_node *cgn = cgraph_node::get (fndecl);
769   tree arg;
770 
771   for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
772     {
773       for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
774 	if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
775 	  return true;
776 
777       if (check_for_nested_with_variably_modified (cgn->decl,
778 						   orig_fndecl))
779 	return true;
780     }
781 
782   return false;
783 }
784 
785 /* Construct our local datastructure describing the function nesting
786    tree rooted by CGN.  */
787 
788 static struct nesting_info *
789 create_nesting_tree (struct cgraph_node *cgn)
790 {
791   struct nesting_info *info = XCNEW (struct nesting_info);
792   info->field_map = new hash_map<tree, tree>;
793   info->var_map = new hash_map<tree, tree>;
794   info->mem_refs = new hash_set<tree *>;
795   info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
796   info->context = cgn->decl;
797 
798   for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
799     {
800       struct nesting_info *sub = create_nesting_tree (cgn);
801       sub->outer = info;
802       sub->next = info->inner;
803       info->inner = sub;
804     }
805 
806   /* See discussion at check_for_nested_with_variably_modified for a
807      discussion of why this has to be here.  */
808   if (check_for_nested_with_variably_modified (info->context, info->context))
809     DECL_UNINLINABLE (info->context) = true;
810 
811   return info;
812 }
813 
814 /* Return an expression computing the static chain for TARGET_CONTEXT
815    from INFO->CONTEXT.  Insert any necessary computations before TSI.  */
816 
817 static tree
818 get_static_chain (struct nesting_info *info, tree target_context,
819 		  gimple_stmt_iterator *gsi)
820 {
821   struct nesting_info *i;
822   tree x;
823 
824   if (info->context == target_context)
825     {
826       x = build_addr (info->frame_decl);
827       info->static_chain_added |= 1;
828     }
829   else
830     {
831       x = get_chain_decl (info);
832       info->static_chain_added |= 2;
833 
834       for (i = info->outer; i->context != target_context; i = i->outer)
835 	{
836 	  tree field = get_chain_field (i);
837 
838 	  x = build_simple_mem_ref (x);
839 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
840 	  x = init_tmp_var (info, x, gsi);
841 	}
842     }
843 
844   return x;
845 }
846 
847 
848 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
849    frame as seen from INFO->CONTEXT.  Insert any necessary computations
850    before GSI.  */
851 
852 static tree
853 get_frame_field (struct nesting_info *info, tree target_context,
854 		 tree field, gimple_stmt_iterator *gsi)
855 {
856   struct nesting_info *i;
857   tree x;
858 
859   if (info->context == target_context)
860     {
861       /* Make sure frame_decl gets created.  */
862       (void) get_frame_type (info);
863       x = info->frame_decl;
864       info->static_chain_added |= 1;
865     }
866   else
867     {
868       x = get_chain_decl (info);
869       info->static_chain_added |= 2;
870 
871       for (i = info->outer; i->context != target_context; i = i->outer)
872 	{
873 	  tree field = get_chain_field (i);
874 
875 	  x = build_simple_mem_ref (x);
876 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
877 	  x = init_tmp_var (info, x, gsi);
878 	}
879 
880       x = build_simple_mem_ref (x);
881     }
882 
883   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
884   return x;
885 }
886 
887 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
888 
889 /* A subroutine of convert_nonlocal_reference_op.  Create a local variable
890    in the nested function with DECL_VALUE_EXPR set to reference the true
891    variable in the parent function.  This is used both for debug info
892    and in OMP lowering.  */
893 
894 static tree
895 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
896 {
897   tree target_context;
898   struct nesting_info *i;
899   tree x, field, new_decl;
900 
901   tree *slot = &info->var_map->get_or_insert (decl);
902 
903   if (*slot)
904     return *slot;
905 
906   target_context = decl_function_context (decl);
907 
908   /* A copy of the code in get_frame_field, but without the temporaries.  */
909   if (info->context == target_context)
910     {
911       /* Make sure frame_decl gets created.  */
912       (void) get_frame_type (info);
913       x = info->frame_decl;
914       i = info;
915       info->static_chain_added |= 1;
916     }
917   else
918     {
919       x = get_chain_decl (info);
920       info->static_chain_added |= 2;
921       for (i = info->outer; i->context != target_context; i = i->outer)
922 	{
923 	  field = get_chain_field (i);
924 	  x = build_simple_mem_ref (x);
925 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
926 	}
927       x = build_simple_mem_ref (x);
928     }
929 
930   field = lookup_field_for_decl (i, decl, INSERT);
931   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
932   if (use_pointer_in_frame (decl))
933     x = build_simple_mem_ref (x);
934 
935   /* ??? We should be remapping types as well, surely.  */
936   new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
937 			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
938   DECL_CONTEXT (new_decl) = info->context;
939   DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
940   DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
941   TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
942   TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
943   TREE_READONLY (new_decl) = TREE_READONLY (decl);
944   TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
945   DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
946   if ((TREE_CODE (decl) == PARM_DECL
947        || TREE_CODE (decl) == RESULT_DECL
948        || VAR_P (decl))
949       && DECL_BY_REFERENCE (decl))
950     DECL_BY_REFERENCE (new_decl) = 1;
951 
952   SET_DECL_VALUE_EXPR (new_decl, x);
953   DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
954 
955   *slot = new_decl;
956   DECL_CHAIN (new_decl) = info->debug_var_chain;
957   info->debug_var_chain = new_decl;
958 
959   if (!optimize
960       && info->context != target_context
961       && variably_modified_type_p (TREE_TYPE (decl), NULL))
962     note_nonlocal_vla_type (info, TREE_TYPE (decl));
963 
964   return new_decl;
965 }
966 
967 
968 /* Callback for walk_gimple_stmt, rewrite all references to VAR
969    and PARM_DECLs that belong to outer functions.
970 
971    The rewrite will involve some number of structure accesses back up
972    the static chain.  E.g. for a variable FOO up one nesting level it'll
973    be CHAIN->FOO.  For two levels it'll be CHAIN->__chain->FOO.  Further
974    indirections apply to decls for which use_pointer_in_frame is true.  */
975 
976 static tree
977 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
978 {
979   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
980   struct nesting_info *const info = (struct nesting_info *) wi->info;
981   tree t = *tp;
982 
983   *walk_subtrees = 0;
984   switch (TREE_CODE (t))
985     {
986     case VAR_DECL:
987       /* Non-automatic variables are never processed.  */
988       if (TREE_STATIC (t) || DECL_EXTERNAL (t))
989 	break;
990       /* FALLTHRU */
991 
992     case PARM_DECL:
993       if (decl_function_context (t) != info->context)
994 	{
995 	  tree x;
996 	  wi->changed = true;
997 
998 	  x = get_nonlocal_debug_decl (info, t);
999 	  if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1000 	    {
1001 	      tree target_context = decl_function_context (t);
1002 	      struct nesting_info *i;
1003 	      for (i = info->outer; i->context != target_context; i = i->outer)
1004 		continue;
1005 	      x = lookup_field_for_decl (i, t, INSERT);
1006 	      x = get_frame_field (info, target_context, x, &wi->gsi);
1007 	      if (use_pointer_in_frame (t))
1008 		{
1009 		  x = init_tmp_var (info, x, &wi->gsi);
1010 		  x = build_simple_mem_ref (x);
1011 		}
1012 	    }
1013 
1014 	  if (wi->val_only)
1015 	    {
1016 	      if (wi->is_lhs)
1017 		x = save_tmp_var (info, x, &wi->gsi);
1018 	      else
1019 		x = init_tmp_var (info, x, &wi->gsi);
1020 	    }
1021 
1022 	  *tp = x;
1023 	}
1024       break;
1025 
1026     case LABEL_DECL:
1027       /* We're taking the address of a label from a parent function, but
1028 	 this is not itself a non-local goto.  Mark the label such that it
1029 	 will not be deleted, much as we would with a label address in
1030 	 static storage.  */
1031       if (decl_function_context (t) != info->context)
1032         FORCED_LABEL (t) = 1;
1033       break;
1034 
1035     case ADDR_EXPR:
1036       {
1037 	bool save_val_only = wi->val_only;
1038 
1039 	wi->val_only = false;
1040 	wi->is_lhs = false;
1041 	wi->changed = false;
1042 	walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1043 	wi->val_only = true;
1044 
1045 	if (wi->changed)
1046 	  {
1047 	    tree save_context;
1048 
1049 	    /* If we changed anything, we might no longer be directly
1050 	       referencing a decl.  */
1051 	    save_context = current_function_decl;
1052 	    current_function_decl = info->context;
1053 	    recompute_tree_invariant_for_addr_expr (t);
1054 	    current_function_decl = save_context;
1055 
1056 	    /* If the callback converted the address argument in a context
1057 	       where we only accept variables (and min_invariant, presumably),
1058 	       then compute the address into a temporary.  */
1059 	    if (save_val_only)
1060 	      *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1061 				      t, &wi->gsi);
1062 	  }
1063       }
1064       break;
1065 
1066     case REALPART_EXPR:
1067     case IMAGPART_EXPR:
1068     case COMPONENT_REF:
1069     case ARRAY_REF:
1070     case ARRAY_RANGE_REF:
1071     case BIT_FIELD_REF:
1072       /* Go down this entire nest and just look at the final prefix and
1073 	 anything that describes the references.  Otherwise, we lose track
1074 	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
1075       wi->val_only = true;
1076       wi->is_lhs = false;
1077       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1078 	{
1079 	  if (TREE_CODE (t) == COMPONENT_REF)
1080 	    walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1081 		       NULL);
1082 	  else if (TREE_CODE (t) == ARRAY_REF
1083 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1084 	    {
1085 	      walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1086 			 wi, NULL);
1087 	      walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1088 			 wi, NULL);
1089 	      walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1090 			 wi, NULL);
1091 	    }
1092 	}
1093       wi->val_only = false;
1094       walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1095       break;
1096 
1097     case VIEW_CONVERT_EXPR:
1098       /* Just request to look at the subtrees, leaving val_only and lhs
1099 	 untouched.  This might actually be for !val_only + lhs, in which
1100 	 case we don't want to force a replacement by a temporary.  */
1101       *walk_subtrees = 1;
1102       break;
1103 
1104     default:
1105       if (!IS_TYPE_OR_DECL_P (t))
1106 	{
1107 	  *walk_subtrees = 1;
1108           wi->val_only = true;
1109 	  wi->is_lhs = false;
1110 	}
1111       break;
1112     }
1113 
1114   return NULL_TREE;
1115 }
1116 
1117 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1118 					     struct walk_stmt_info *);
1119 
1120 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1121    and PARM_DECLs that belong to outer functions.  */
1122 
1123 static bool
1124 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1125 {
1126   struct nesting_info *const info = (struct nesting_info *) wi->info;
1127   bool need_chain = false, need_stmts = false;
1128   tree clause, decl;
1129   int dummy;
1130   bitmap new_suppress;
1131 
1132   new_suppress = BITMAP_GGC_ALLOC ();
1133   bitmap_copy (new_suppress, info->suppress_expansion);
1134 
1135   for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1136     {
1137       switch (OMP_CLAUSE_CODE (clause))
1138 	{
1139 	case OMP_CLAUSE_REDUCTION:
1140 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1141 	    need_stmts = true;
1142 	  goto do_decl_clause;
1143 
1144 	case OMP_CLAUSE_LASTPRIVATE:
1145 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1146 	    need_stmts = true;
1147 	  goto do_decl_clause;
1148 
1149 	case OMP_CLAUSE_LINEAR:
1150 	  if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1151 	    need_stmts = true;
1152 	  wi->val_only = true;
1153 	  wi->is_lhs = false;
1154 	  convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1155 					 &dummy, wi);
1156 	  goto do_decl_clause;
1157 
1158 	case OMP_CLAUSE_PRIVATE:
1159 	case OMP_CLAUSE_FIRSTPRIVATE:
1160 	case OMP_CLAUSE_COPYPRIVATE:
1161 	case OMP_CLAUSE_SHARED:
1162 	case OMP_CLAUSE_TO_DECLARE:
1163 	case OMP_CLAUSE_LINK:
1164 	case OMP_CLAUSE_USE_DEVICE_PTR:
1165 	case OMP_CLAUSE_IS_DEVICE_PTR:
1166 	do_decl_clause:
1167 	  decl = OMP_CLAUSE_DECL (clause);
1168 	  if (VAR_P (decl)
1169 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1170 	    break;
1171 	  if (decl_function_context (decl) != info->context)
1172 	    {
1173 	      if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1174 		OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1175 	      bitmap_set_bit (new_suppress, DECL_UID (decl));
1176 	      OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1177 	      if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1178 		need_chain = true;
1179 	    }
1180 	  break;
1181 
1182 	case OMP_CLAUSE_SCHEDULE:
1183 	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1184 	    break;
1185 	  /* FALLTHRU */
1186 	case OMP_CLAUSE_FINAL:
1187 	case OMP_CLAUSE_IF:
1188 	case OMP_CLAUSE_NUM_THREADS:
1189 	case OMP_CLAUSE_DEPEND:
1190 	case OMP_CLAUSE_DEVICE:
1191 	case OMP_CLAUSE_NUM_TEAMS:
1192 	case OMP_CLAUSE_THREAD_LIMIT:
1193 	case OMP_CLAUSE_SAFELEN:
1194 	case OMP_CLAUSE_SIMDLEN:
1195 	case OMP_CLAUSE_PRIORITY:
1196 	case OMP_CLAUSE_GRAINSIZE:
1197 	case OMP_CLAUSE_NUM_TASKS:
1198 	case OMP_CLAUSE_HINT:
1199 	case OMP_CLAUSE_NUM_GANGS:
1200 	case OMP_CLAUSE_NUM_WORKERS:
1201 	case OMP_CLAUSE_VECTOR_LENGTH:
1202 	case OMP_CLAUSE_GANG:
1203 	case OMP_CLAUSE_WORKER:
1204 	case OMP_CLAUSE_VECTOR:
1205 	case OMP_CLAUSE_ASYNC:
1206 	case OMP_CLAUSE_WAIT:
1207 	  /* Several OpenACC clauses have optional arguments.  Check if they
1208 	     are present.  */
1209 	  if (OMP_CLAUSE_OPERAND (clause, 0))
1210 	    {
1211 	      wi->val_only = true;
1212 	      wi->is_lhs = false;
1213 	      convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1214 					     &dummy, wi);
1215 	    }
1216 
1217 	  /* The gang clause accepts two arguments.  */
1218 	  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1219 	      && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1220 	    {
1221 		wi->val_only = true;
1222 		wi->is_lhs = false;
1223 		convert_nonlocal_reference_op
1224 		  (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1225 	    }
1226 	  break;
1227 
1228 	case OMP_CLAUSE_DIST_SCHEDULE:
1229 	  if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1230 	    {
1231 	      wi->val_only = true;
1232 	      wi->is_lhs = false;
1233 	      convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1234 					     &dummy, wi);
1235 	    }
1236 	  break;
1237 
1238 	case OMP_CLAUSE_MAP:
1239 	case OMP_CLAUSE_TO:
1240 	case OMP_CLAUSE_FROM:
1241 	  if (OMP_CLAUSE_SIZE (clause))
1242 	    {
1243 	      wi->val_only = true;
1244 	      wi->is_lhs = false;
1245 	      convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1246 					     &dummy, wi);
1247 	    }
1248 	  if (DECL_P (OMP_CLAUSE_DECL (clause)))
1249 	    goto do_decl_clause;
1250 	  wi->val_only = true;
1251 	  wi->is_lhs = false;
1252 	  walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1253 		     wi, NULL);
1254 	  break;
1255 
1256 	case OMP_CLAUSE_ALIGNED:
1257 	  if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1258 	    {
1259 	      wi->val_only = true;
1260 	      wi->is_lhs = false;
1261 	      convert_nonlocal_reference_op
1262 		(&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1263 	    }
1264 	  /* Like do_decl_clause, but don't add any suppression.  */
1265 	  decl = OMP_CLAUSE_DECL (clause);
1266 	  if (VAR_P (decl)
1267 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1268 	    break;
1269 	  if (decl_function_context (decl) != info->context)
1270 	    {
1271 	      OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1272 	      if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1273 		need_chain = true;
1274 	    }
1275 	  break;
1276 
1277 	case OMP_CLAUSE_NOWAIT:
1278 	case OMP_CLAUSE_ORDERED:
1279 	case OMP_CLAUSE_DEFAULT:
1280 	case OMP_CLAUSE_COPYIN:
1281 	case OMP_CLAUSE_COLLAPSE:
1282 	case OMP_CLAUSE_TILE:
1283 	case OMP_CLAUSE_UNTIED:
1284 	case OMP_CLAUSE_MERGEABLE:
1285 	case OMP_CLAUSE_PROC_BIND:
1286 	case OMP_CLAUSE_NOGROUP:
1287 	case OMP_CLAUSE_THREADS:
1288 	case OMP_CLAUSE_SIMD:
1289 	case OMP_CLAUSE_DEFAULTMAP:
1290 	case OMP_CLAUSE_SEQ:
1291 	case OMP_CLAUSE_INDEPENDENT:
1292 	case OMP_CLAUSE_AUTO:
1293 	  break;
1294 
1295 	  /* The following clause belongs to the OpenACC cache directive, which
1296 	     is discarded during gimplification.  */
1297 	case OMP_CLAUSE__CACHE_:
1298 	  /* The following clauses are only allowed in the OpenMP declare simd
1299 	     directive, so not seen here.  */
1300 	case OMP_CLAUSE_UNIFORM:
1301 	case OMP_CLAUSE_INBRANCH:
1302 	case OMP_CLAUSE_NOTINBRANCH:
1303 	  /* The following clauses are only allowed on OpenMP cancel and
1304 	     cancellation point directives, which at this point have already
1305 	     been lowered into a function call.  */
1306 	case OMP_CLAUSE_FOR:
1307 	case OMP_CLAUSE_PARALLEL:
1308 	case OMP_CLAUSE_SECTIONS:
1309 	case OMP_CLAUSE_TASKGROUP:
1310 	  /* The following clauses are only added during OMP lowering; nested
1311 	     function decomposition happens before that.  */
1312 	case OMP_CLAUSE__LOOPTEMP_:
1313 	case OMP_CLAUSE__SIMDUID_:
1314 	case OMP_CLAUSE__GRIDDIM_:
1315 	  /* Anything else.  */
1316 	default:
1317 	  gcc_unreachable ();
1318 	}
1319     }
1320 
1321   info->suppress_expansion = new_suppress;
1322 
1323   if (need_stmts)
1324     for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1325       switch (OMP_CLAUSE_CODE (clause))
1326 	{
1327 	case OMP_CLAUSE_REDUCTION:
1328 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1329 	    {
1330 	      tree old_context
1331 		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1332 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1333 		= info->context;
1334 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1335 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1336 		  = info->context;
1337 	      walk_body (convert_nonlocal_reference_stmt,
1338 			 convert_nonlocal_reference_op, info,
1339 			 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1340 	      walk_body (convert_nonlocal_reference_stmt,
1341 			 convert_nonlocal_reference_op, info,
1342 			 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1343 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1344 		= old_context;
1345 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1346 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1347 		  = old_context;
1348 	    }
1349 	  break;
1350 
1351 	case OMP_CLAUSE_LASTPRIVATE:
1352 	  walk_body (convert_nonlocal_reference_stmt,
1353 		     convert_nonlocal_reference_op, info,
1354 		     &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1355 	  break;
1356 
1357 	case OMP_CLAUSE_LINEAR:
1358 	  walk_body (convert_nonlocal_reference_stmt,
1359 		     convert_nonlocal_reference_op, info,
1360 		     &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1361 	  break;
1362 
1363 	default:
1364 	  break;
1365 	}
1366 
1367   return need_chain;
1368 }
1369 
1370 /* Create nonlocal debug decls for nonlocal VLA array bounds.  */
1371 
1372 static void
1373 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1374 {
1375   while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1376     type = TREE_TYPE (type);
1377 
1378   if (TYPE_NAME (type)
1379       && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1380       && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1381     type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1382 
1383   while (POINTER_TYPE_P (type)
1384 	 || TREE_CODE (type) == VECTOR_TYPE
1385 	 || TREE_CODE (type) == FUNCTION_TYPE
1386 	 || TREE_CODE (type) == METHOD_TYPE)
1387     type = TREE_TYPE (type);
1388 
1389   if (TREE_CODE (type) == ARRAY_TYPE)
1390     {
1391       tree domain, t;
1392 
1393       note_nonlocal_vla_type (info, TREE_TYPE (type));
1394       domain = TYPE_DOMAIN (type);
1395       if (domain)
1396 	{
1397 	  t = TYPE_MIN_VALUE (domain);
1398 	  if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1399 	      && decl_function_context (t) != info->context)
1400 	    get_nonlocal_debug_decl (info, t);
1401 	  t = TYPE_MAX_VALUE (domain);
1402 	  if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1403 	      && decl_function_context (t) != info->context)
1404 	    get_nonlocal_debug_decl (info, t);
1405 	}
1406     }
1407 }
1408 
1409 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1410    in BLOCK.  */
1411 
1412 static void
1413 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1414 {
1415   tree var;
1416 
1417   for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1418     if (VAR_P (var)
1419 	&& variably_modified_type_p (TREE_TYPE (var), NULL)
1420 	&& DECL_HAS_VALUE_EXPR_P (var)
1421 	&& decl_function_context (var) != info->context)
1422       note_nonlocal_vla_type (info, TREE_TYPE (var));
1423 }
1424 
1425 /* Callback for walk_gimple_stmt.  Rewrite all references to VAR and
1426    PARM_DECLs that belong to outer functions.  This handles statements
1427    that are not handled via the standard recursion done in
1428    walk_gimple_stmt.  STMT is the statement to examine, DATA is as in
1429    convert_nonlocal_reference_op.  Set *HANDLED_OPS_P to true if all the
1430    operands of STMT have been handled by this function.  */
1431 
1432 static tree
1433 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1434 				 struct walk_stmt_info *wi)
1435 {
1436   struct nesting_info *info = (struct nesting_info *) wi->info;
1437   tree save_local_var_chain;
1438   bitmap save_suppress;
1439   gimple *stmt = gsi_stmt (*gsi);
1440 
1441   switch (gimple_code (stmt))
1442     {
1443     case GIMPLE_GOTO:
1444       /* Don't walk non-local gotos for now.  */
1445       if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1446 	{
1447 	  wi->val_only = true;
1448 	  wi->is_lhs = false;
1449 	  *handled_ops_p = false;
1450 	  return NULL_TREE;
1451 	}
1452       break;
1453 
1454     case GIMPLE_OMP_PARALLEL:
1455     case GIMPLE_OMP_TASK:
1456       save_suppress = info->suppress_expansion;
1457       if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1458 	                                wi))
1459 	{
1460 	  tree c, decl;
1461 	  decl = get_chain_decl (info);
1462 	  c = build_omp_clause (gimple_location (stmt),
1463 				OMP_CLAUSE_FIRSTPRIVATE);
1464 	  OMP_CLAUSE_DECL (c) = decl;
1465 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1466 	  gimple_omp_taskreg_set_clauses (stmt, c);
1467 	}
1468 
1469       save_local_var_chain = info->new_local_var_chain;
1470       info->new_local_var_chain = NULL;
1471 
1472       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1473 	         info, gimple_omp_body_ptr (stmt));
1474 
1475       if (info->new_local_var_chain)
1476 	declare_vars (info->new_local_var_chain,
1477 	              gimple_seq_first_stmt (gimple_omp_body (stmt)),
1478 		      false);
1479       info->new_local_var_chain = save_local_var_chain;
1480       info->suppress_expansion = save_suppress;
1481       break;
1482 
1483     case GIMPLE_OMP_FOR:
1484       save_suppress = info->suppress_expansion;
1485       convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1486       walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1487 			   convert_nonlocal_reference_stmt,
1488 	  		   convert_nonlocal_reference_op, info);
1489       walk_body (convert_nonlocal_reference_stmt,
1490 	  	 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1491       info->suppress_expansion = save_suppress;
1492       break;
1493 
1494     case GIMPLE_OMP_SECTIONS:
1495       save_suppress = info->suppress_expansion;
1496       convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1497       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1498 	         info, gimple_omp_body_ptr (stmt));
1499       info->suppress_expansion = save_suppress;
1500       break;
1501 
1502     case GIMPLE_OMP_SINGLE:
1503       save_suppress = info->suppress_expansion;
1504       convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1505       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1506 	         info, gimple_omp_body_ptr (stmt));
1507       info->suppress_expansion = save_suppress;
1508       break;
1509 
1510     case GIMPLE_OMP_TARGET:
1511       if (!is_gimple_omp_offloaded (stmt))
1512 	{
1513 	  save_suppress = info->suppress_expansion;
1514 	  convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1515 					wi);
1516 	  info->suppress_expansion = save_suppress;
1517 	  walk_body (convert_nonlocal_reference_stmt,
1518 		     convert_nonlocal_reference_op, info,
1519 		     gimple_omp_body_ptr (stmt));
1520 	  break;
1521 	}
1522       save_suppress = info->suppress_expansion;
1523       if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1524 					wi))
1525 	{
1526 	  tree c, decl;
1527 	  decl = get_chain_decl (info);
1528 	  c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1529 	  OMP_CLAUSE_DECL (c) = decl;
1530 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1531 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1532 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1533 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1534 	}
1535 
1536       save_local_var_chain = info->new_local_var_chain;
1537       info->new_local_var_chain = NULL;
1538 
1539       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1540 		 info, gimple_omp_body_ptr (stmt));
1541 
1542       if (info->new_local_var_chain)
1543 	declare_vars (info->new_local_var_chain,
1544 		      gimple_seq_first_stmt (gimple_omp_body (stmt)),
1545 		      false);
1546       info->new_local_var_chain = save_local_var_chain;
1547       info->suppress_expansion = save_suppress;
1548       break;
1549 
1550     case GIMPLE_OMP_TEAMS:
1551       save_suppress = info->suppress_expansion;
1552       convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1553       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1554 		 info, gimple_omp_body_ptr (stmt));
1555       info->suppress_expansion = save_suppress;
1556       break;
1557 
1558     case GIMPLE_OMP_SECTION:
1559     case GIMPLE_OMP_MASTER:
1560     case GIMPLE_OMP_TASKGROUP:
1561     case GIMPLE_OMP_ORDERED:
1562       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1563 	         info, gimple_omp_body_ptr (stmt));
1564       break;
1565 
1566     case GIMPLE_BIND:
1567       {
1568       gbind *bind_stmt = as_a <gbind *> (stmt);
1569       if (!optimize && gimple_bind_block (bind_stmt))
1570 	note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1571 
1572       for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1573 	if (TREE_CODE (var) == NAMELIST_DECL)
1574 	  {
1575 	    /* Adjust decls mentioned in NAMELIST_DECL.  */
1576 	    tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1577 	    tree decl;
1578 	    unsigned int i;
1579 
1580 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1581 	      {
1582 		if (VAR_P (decl)
1583 		    && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1584 		  continue;
1585 		if (decl_function_context (decl) != info->context)
1586 		  CONSTRUCTOR_ELT (decls, i)->value
1587 		    = get_nonlocal_debug_decl (info, decl);
1588 	      }
1589 	  }
1590 
1591       *handled_ops_p = false;
1592       return NULL_TREE;
1593       }
1594     case GIMPLE_COND:
1595       wi->val_only = true;
1596       wi->is_lhs = false;
1597       *handled_ops_p = false;
1598       return NULL_TREE;
1599 
1600     default:
1601       /* For every other statement that we are not interested in
1602 	 handling here, let the walker traverse the operands.  */
1603       *handled_ops_p = false;
1604       return NULL_TREE;
1605     }
1606 
1607   /* We have handled all of STMT operands, no need to traverse the operands.  */
1608   *handled_ops_p = true;
1609   return NULL_TREE;
1610 }
1611 
1612 
1613 /* A subroutine of convert_local_reference.  Create a local variable
1614    in the parent function with DECL_VALUE_EXPR set to reference the
1615    field in FRAME.  This is used both for debug info and in OMP
1616    lowering.  */
1617 
1618 static tree
1619 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1620 {
1621   tree x, new_decl;
1622 
1623   tree *slot = &info->var_map->get_or_insert (decl);
1624   if (*slot)
1625     return *slot;
1626 
1627   /* Make sure frame_decl gets created.  */
1628   (void) get_frame_type (info);
1629   x = info->frame_decl;
1630   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1631 
1632   new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1633 			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1634   DECL_CONTEXT (new_decl) = info->context;
1635   DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1636   DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1637   TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1638   TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1639   TREE_READONLY (new_decl) = TREE_READONLY (decl);
1640   TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1641   DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1642   if ((TREE_CODE (decl) == PARM_DECL
1643        || TREE_CODE (decl) == RESULT_DECL
1644        || VAR_P (decl))
1645       && DECL_BY_REFERENCE (decl))
1646     DECL_BY_REFERENCE (new_decl) = 1;
1647 
1648   SET_DECL_VALUE_EXPR (new_decl, x);
1649   DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1650   *slot = new_decl;
1651 
1652   DECL_CHAIN (new_decl) = info->debug_var_chain;
1653   info->debug_var_chain = new_decl;
1654 
1655   /* Do not emit debug info twice.  */
1656   DECL_IGNORED_P (decl) = 1;
1657 
1658   return new_decl;
1659 }
1660 
1661 
1662 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1663    and PARM_DECLs that were referenced by inner nested functions.
1664    The rewrite will be a structure reference to the local frame variable.  */
1665 
1666 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1667 
1668 static tree
1669 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1670 {
1671   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1672   struct nesting_info *const info = (struct nesting_info *) wi->info;
1673   tree t = *tp, field, x;
1674   bool save_val_only;
1675 
1676   *walk_subtrees = 0;
1677   switch (TREE_CODE (t))
1678     {
1679     case VAR_DECL:
1680       /* Non-automatic variables are never processed.  */
1681       if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1682 	break;
1683       /* FALLTHRU */
1684 
1685     case PARM_DECL:
1686       if (decl_function_context (t) == info->context)
1687 	{
1688 	  /* If we copied a pointer to the frame, then the original decl
1689 	     is used unchanged in the parent function.  */
1690 	  if (use_pointer_in_frame (t))
1691 	    break;
1692 
1693 	  /* No need to transform anything if no child references the
1694 	     variable.  */
1695 	  field = lookup_field_for_decl (info, t, NO_INSERT);
1696 	  if (!field)
1697 	    break;
1698 	  wi->changed = true;
1699 
1700 	  x = get_local_debug_decl (info, t, field);
1701 	  if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1702 	    x = get_frame_field (info, info->context, field, &wi->gsi);
1703 
1704 	  if (wi->val_only)
1705 	    {
1706 	      if (wi->is_lhs)
1707 		x = save_tmp_var (info, x, &wi->gsi);
1708 	      else
1709 		x = init_tmp_var (info, x, &wi->gsi);
1710 	    }
1711 
1712 	  *tp = x;
1713 	}
1714       break;
1715 
1716     case ADDR_EXPR:
1717       save_val_only = wi->val_only;
1718       wi->val_only = false;
1719       wi->is_lhs = false;
1720       wi->changed = false;
1721       walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1722       wi->val_only = save_val_only;
1723 
1724       /* If we converted anything ... */
1725       if (wi->changed)
1726 	{
1727 	  tree save_context;
1728 
1729 	  /* Then the frame decl is now addressable.  */
1730 	  TREE_ADDRESSABLE (info->frame_decl) = 1;
1731 
1732 	  save_context = current_function_decl;
1733 	  current_function_decl = info->context;
1734 	  recompute_tree_invariant_for_addr_expr (t);
1735 	  current_function_decl = save_context;
1736 
1737 	  /* If we are in a context where we only accept values, then
1738 	     compute the address into a temporary.  */
1739 	  if (save_val_only)
1740 	    *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1741 				    t, &wi->gsi);
1742 	}
1743       break;
1744 
1745     case REALPART_EXPR:
1746     case IMAGPART_EXPR:
1747     case COMPONENT_REF:
1748     case ARRAY_REF:
1749     case ARRAY_RANGE_REF:
1750     case BIT_FIELD_REF:
1751       /* Go down this entire nest and just look at the final prefix and
1752 	 anything that describes the references.  Otherwise, we lose track
1753 	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
1754       save_val_only = wi->val_only;
1755       wi->val_only = true;
1756       wi->is_lhs = false;
1757       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1758 	{
1759 	  if (TREE_CODE (t) == COMPONENT_REF)
1760 	    walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1761 		       NULL);
1762 	  else if (TREE_CODE (t) == ARRAY_REF
1763 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1764 	    {
1765 	      walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1766 			 NULL);
1767 	      walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1768 			 NULL);
1769 	      walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1770 			 NULL);
1771 	    }
1772 	}
1773       wi->val_only = false;
1774       walk_tree (tp, convert_local_reference_op, wi, NULL);
1775       wi->val_only = save_val_only;
1776       break;
1777 
1778     case MEM_REF:
1779       save_val_only = wi->val_only;
1780       wi->val_only = true;
1781       wi->is_lhs = false;
1782       walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1783 		 wi, NULL);
1784       /* We need to re-fold the MEM_REF as component references as
1785 	 part of a ADDR_EXPR address are not allowed.  But we cannot
1786 	 fold here, as the chain record type is not yet finalized.  */
1787       if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1788 	  && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1789 	info->mem_refs->add (tp);
1790       wi->val_only = save_val_only;
1791       break;
1792 
1793     case VIEW_CONVERT_EXPR:
1794       /* Just request to look at the subtrees, leaving val_only and lhs
1795 	 untouched.  This might actually be for !val_only + lhs, in which
1796 	 case we don't want to force a replacement by a temporary.  */
1797       *walk_subtrees = 1;
1798       break;
1799 
1800     default:
1801       if (!IS_TYPE_OR_DECL_P (t))
1802 	{
1803 	  *walk_subtrees = 1;
1804 	  wi->val_only = true;
1805 	  wi->is_lhs = false;
1806 	}
1807       break;
1808     }
1809 
1810   return NULL_TREE;
1811 }
1812 
1813 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1814 					  struct walk_stmt_info *);
1815 
1816 /* Helper for convert_local_reference.  Convert all the references in
1817    the chain of clauses at *PCLAUSES.  WI is as in convert_local_reference.  */
1818 
1819 static bool
1820 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1821 {
1822   struct nesting_info *const info = (struct nesting_info *) wi->info;
1823   bool need_frame = false, need_stmts = false;
1824   tree clause, decl;
1825   int dummy;
1826   bitmap new_suppress;
1827 
1828   new_suppress = BITMAP_GGC_ALLOC ();
1829   bitmap_copy (new_suppress, info->suppress_expansion);
1830 
1831   for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1832     {
1833       switch (OMP_CLAUSE_CODE (clause))
1834 	{
1835 	case OMP_CLAUSE_REDUCTION:
1836 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1837 	    need_stmts = true;
1838 	  goto do_decl_clause;
1839 
1840 	case OMP_CLAUSE_LASTPRIVATE:
1841 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1842 	    need_stmts = true;
1843 	  goto do_decl_clause;
1844 
1845 	case OMP_CLAUSE_LINEAR:
1846 	  if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1847 	    need_stmts = true;
1848 	  wi->val_only = true;
1849 	  wi->is_lhs = false;
1850 	  convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1851 				      wi);
1852 	  goto do_decl_clause;
1853 
1854 	case OMP_CLAUSE_PRIVATE:
1855 	case OMP_CLAUSE_FIRSTPRIVATE:
1856 	case OMP_CLAUSE_COPYPRIVATE:
1857 	case OMP_CLAUSE_SHARED:
1858 	case OMP_CLAUSE_TO_DECLARE:
1859 	case OMP_CLAUSE_LINK:
1860 	case OMP_CLAUSE_USE_DEVICE_PTR:
1861 	case OMP_CLAUSE_IS_DEVICE_PTR:
1862 	do_decl_clause:
1863 	  decl = OMP_CLAUSE_DECL (clause);
1864 	  if (VAR_P (decl)
1865 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1866 	    break;
1867 	  if (decl_function_context (decl) == info->context
1868 	      && !use_pointer_in_frame (decl))
1869 	    {
1870 	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1871 	      if (field)
1872 		{
1873 		  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1874 		    OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1875 		  bitmap_set_bit (new_suppress, DECL_UID (decl));
1876 		  OMP_CLAUSE_DECL (clause)
1877 		    = get_local_debug_decl (info, decl, field);
1878 		  need_frame = true;
1879 		}
1880 	    }
1881 	  break;
1882 
1883 	case OMP_CLAUSE_SCHEDULE:
1884 	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1885 	    break;
1886 	  /* FALLTHRU */
1887 	case OMP_CLAUSE_FINAL:
1888 	case OMP_CLAUSE_IF:
1889 	case OMP_CLAUSE_NUM_THREADS:
1890 	case OMP_CLAUSE_DEPEND:
1891 	case OMP_CLAUSE_DEVICE:
1892 	case OMP_CLAUSE_NUM_TEAMS:
1893 	case OMP_CLAUSE_THREAD_LIMIT:
1894 	case OMP_CLAUSE_SAFELEN:
1895 	case OMP_CLAUSE_SIMDLEN:
1896 	case OMP_CLAUSE_PRIORITY:
1897 	case OMP_CLAUSE_GRAINSIZE:
1898 	case OMP_CLAUSE_NUM_TASKS:
1899 	case OMP_CLAUSE_HINT:
1900 	case OMP_CLAUSE_NUM_GANGS:
1901 	case OMP_CLAUSE_NUM_WORKERS:
1902 	case OMP_CLAUSE_VECTOR_LENGTH:
1903 	case OMP_CLAUSE_GANG:
1904 	case OMP_CLAUSE_WORKER:
1905 	case OMP_CLAUSE_VECTOR:
1906 	case OMP_CLAUSE_ASYNC:
1907 	case OMP_CLAUSE_WAIT:
1908 	  /* Several OpenACC clauses have optional arguments.  Check if they
1909 	     are present.  */
1910 	  if (OMP_CLAUSE_OPERAND (clause, 0))
1911 	    {
1912 	      wi->val_only = true;
1913 	      wi->is_lhs = false;
1914 	      convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1915 					  &dummy, wi);
1916 	    }
1917 
1918 	  /* The gang clause accepts two arguments.  */
1919 	  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1920 	      && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1921 	    {
1922 		wi->val_only = true;
1923 		wi->is_lhs = false;
1924 		convert_nonlocal_reference_op
1925 		  (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1926 	    }
1927 	  break;
1928 
1929 	case OMP_CLAUSE_DIST_SCHEDULE:
1930 	  if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1931 	    {
1932 	      wi->val_only = true;
1933 	      wi->is_lhs = false;
1934 	      convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1935 					  &dummy, wi);
1936 	    }
1937 	  break;
1938 
1939 	case OMP_CLAUSE_MAP:
1940 	case OMP_CLAUSE_TO:
1941 	case OMP_CLAUSE_FROM:
1942 	  if (OMP_CLAUSE_SIZE (clause))
1943 	    {
1944 	      wi->val_only = true;
1945 	      wi->is_lhs = false;
1946 	      convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1947 					  &dummy, wi);
1948 	    }
1949 	  if (DECL_P (OMP_CLAUSE_DECL (clause)))
1950 	    goto do_decl_clause;
1951 	  wi->val_only = true;
1952 	  wi->is_lhs = false;
1953 	  walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1954 		     wi, NULL);
1955 	  break;
1956 
1957 	case OMP_CLAUSE_ALIGNED:
1958 	  if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1959 	    {
1960 	      wi->val_only = true;
1961 	      wi->is_lhs = false;
1962 	      convert_local_reference_op
1963 		(&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1964 	    }
1965 	  /* Like do_decl_clause, but don't add any suppression.  */
1966 	  decl = OMP_CLAUSE_DECL (clause);
1967 	  if (VAR_P (decl)
1968 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1969 	    break;
1970 	  if (decl_function_context (decl) == info->context
1971 	      && !use_pointer_in_frame (decl))
1972 	    {
1973 	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1974 	      if (field)
1975 		{
1976 		  OMP_CLAUSE_DECL (clause)
1977 		    = get_local_debug_decl (info, decl, field);
1978 		  need_frame = true;
1979 		}
1980 	    }
1981 	  break;
1982 
1983 	case OMP_CLAUSE_NOWAIT:
1984 	case OMP_CLAUSE_ORDERED:
1985 	case OMP_CLAUSE_DEFAULT:
1986 	case OMP_CLAUSE_COPYIN:
1987 	case OMP_CLAUSE_COLLAPSE:
1988 	case OMP_CLAUSE_TILE:
1989 	case OMP_CLAUSE_UNTIED:
1990 	case OMP_CLAUSE_MERGEABLE:
1991 	case OMP_CLAUSE_PROC_BIND:
1992 	case OMP_CLAUSE_NOGROUP:
1993 	case OMP_CLAUSE_THREADS:
1994 	case OMP_CLAUSE_SIMD:
1995 	case OMP_CLAUSE_DEFAULTMAP:
1996 	case OMP_CLAUSE_SEQ:
1997 	case OMP_CLAUSE_INDEPENDENT:
1998 	case OMP_CLAUSE_AUTO:
1999 	  break;
2000 
2001 	  /* The following clause belongs to the OpenACC cache directive, which
2002 	     is discarded during gimplification.  */
2003 	case OMP_CLAUSE__CACHE_:
2004 	  /* The following clauses are only allowed in the OpenMP declare simd
2005 	     directive, so not seen here.  */
2006 	case OMP_CLAUSE_UNIFORM:
2007 	case OMP_CLAUSE_INBRANCH:
2008 	case OMP_CLAUSE_NOTINBRANCH:
2009 	  /* The following clauses are only allowed on OpenMP cancel and
2010 	     cancellation point directives, which at this point have already
2011 	     been lowered into a function call.  */
2012 	case OMP_CLAUSE_FOR:
2013 	case OMP_CLAUSE_PARALLEL:
2014 	case OMP_CLAUSE_SECTIONS:
2015 	case OMP_CLAUSE_TASKGROUP:
2016 	  /* The following clauses are only added during OMP lowering; nested
2017 	     function decomposition happens before that.  */
2018 	case OMP_CLAUSE__LOOPTEMP_:
2019 	case OMP_CLAUSE__SIMDUID_:
2020 	case OMP_CLAUSE__GRIDDIM_:
2021 	  /* Anything else.  */
2022 	default:
2023 	  gcc_unreachable ();
2024 	}
2025     }
2026 
2027   info->suppress_expansion = new_suppress;
2028 
2029   if (need_stmts)
2030     for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2031       switch (OMP_CLAUSE_CODE (clause))
2032 	{
2033 	case OMP_CLAUSE_REDUCTION:
2034 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2035 	    {
2036 	      tree old_context
2037 		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2038 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2039 		= info->context;
2040 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2041 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2042 		  = info->context;
2043 	      walk_body (convert_local_reference_stmt,
2044 			 convert_local_reference_op, info,
2045 			 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2046 	      walk_body (convert_local_reference_stmt,
2047 			 convert_local_reference_op, info,
2048 			 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2049 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2050 		= old_context;
2051 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2052 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2053 		  = old_context;
2054 	    }
2055 	  break;
2056 
2057 	case OMP_CLAUSE_LASTPRIVATE:
2058 	  walk_body (convert_local_reference_stmt,
2059 		     convert_local_reference_op, info,
2060 		     &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2061 	  break;
2062 
2063 	case OMP_CLAUSE_LINEAR:
2064 	  walk_body (convert_local_reference_stmt,
2065 		     convert_local_reference_op, info,
2066 		     &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2067 	  break;
2068 
2069 	default:
2070 	  break;
2071 	}
2072 
2073   return need_frame;
2074 }
2075 
2076 
2077 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2078    and PARM_DECLs that were referenced by inner nested functions.
2079    The rewrite will be a structure reference to the local frame variable.  */
2080 
2081 static tree
2082 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2083 			      struct walk_stmt_info *wi)
2084 {
2085   struct nesting_info *info = (struct nesting_info *) wi->info;
2086   tree save_local_var_chain;
2087   bitmap save_suppress;
2088   char save_static_chain_added;
2089   bool frame_decl_added;
2090   gimple *stmt = gsi_stmt (*gsi);
2091 
2092   switch (gimple_code (stmt))
2093     {
2094     case GIMPLE_OMP_PARALLEL:
2095     case GIMPLE_OMP_TASK:
2096       save_suppress = info->suppress_expansion;
2097       frame_decl_added = false;
2098       if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2099 	                             wi))
2100 	{
2101 	  tree c = build_omp_clause (gimple_location (stmt),
2102 				     OMP_CLAUSE_SHARED);
2103 	  (void) get_frame_type (info);
2104 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2105 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2106 	  gimple_omp_taskreg_set_clauses (stmt, c);
2107 	  info->static_chain_added |= 4;
2108 	  frame_decl_added = true;
2109 	}
2110 
2111       save_local_var_chain = info->new_local_var_chain;
2112       save_static_chain_added = info->static_chain_added;
2113       info->new_local_var_chain = NULL;
2114       info->static_chain_added = 0;
2115 
2116       walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2117 	         gimple_omp_body_ptr (stmt));
2118 
2119       if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2120 	{
2121 	  tree c = build_omp_clause (gimple_location (stmt),
2122 				     OMP_CLAUSE_SHARED);
2123 	  (void) get_frame_type (info);
2124 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2125 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2126 	  info->static_chain_added |= 4;
2127 	  gimple_omp_taskreg_set_clauses (stmt, c);
2128 	}
2129       if (info->new_local_var_chain)
2130 	declare_vars (info->new_local_var_chain,
2131 		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2132       info->new_local_var_chain = save_local_var_chain;
2133       info->suppress_expansion = save_suppress;
2134       info->static_chain_added |= save_static_chain_added;
2135       break;
2136 
2137     case GIMPLE_OMP_FOR:
2138       save_suppress = info->suppress_expansion;
2139       convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2140       walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2141 			   convert_local_reference_stmt,
2142 			   convert_local_reference_op, info);
2143       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2144 		 info, gimple_omp_body_ptr (stmt));
2145       info->suppress_expansion = save_suppress;
2146       break;
2147 
2148     case GIMPLE_OMP_SECTIONS:
2149       save_suppress = info->suppress_expansion;
2150       convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2151       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2152 		 info, gimple_omp_body_ptr (stmt));
2153       info->suppress_expansion = save_suppress;
2154       break;
2155 
2156     case GIMPLE_OMP_SINGLE:
2157       save_suppress = info->suppress_expansion;
2158       convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2159       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2160 		 info, gimple_omp_body_ptr (stmt));
2161       info->suppress_expansion = save_suppress;
2162       break;
2163 
2164     case GIMPLE_OMP_TARGET:
2165       if (!is_gimple_omp_offloaded (stmt))
2166 	{
2167 	  save_suppress = info->suppress_expansion;
2168 	  convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2169 	  info->suppress_expansion = save_suppress;
2170 	  walk_body (convert_local_reference_stmt, convert_local_reference_op,
2171 		     info, gimple_omp_body_ptr (stmt));
2172 	  break;
2173 	}
2174       save_suppress = info->suppress_expansion;
2175       frame_decl_added = false;
2176       if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2177 	{
2178 	  tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2179 	  (void) get_frame_type (info);
2180 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2181 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2182 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2183 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2184 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2185 	  info->static_chain_added |= 4;
2186 	  frame_decl_added = true;
2187 	}
2188 
2189       save_local_var_chain = info->new_local_var_chain;
2190       save_static_chain_added = info->static_chain_added;
2191       info->new_local_var_chain = NULL;
2192       info->static_chain_added = 0;
2193 
2194       walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2195 		 gimple_omp_body_ptr (stmt));
2196 
2197       if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2198 	{
2199 	  tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2200 	  (void) get_frame_type (info);
2201 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2202 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2203 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2204 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2205 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2206 	  info->static_chain_added |= 4;
2207 	}
2208 
2209       if (info->new_local_var_chain)
2210 	declare_vars (info->new_local_var_chain,
2211 		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2212       info->new_local_var_chain = save_local_var_chain;
2213       info->suppress_expansion = save_suppress;
2214       info->static_chain_added |= save_static_chain_added;
2215       break;
2216 
2217     case GIMPLE_OMP_TEAMS:
2218       save_suppress = info->suppress_expansion;
2219       convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2220       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2221 		 info, gimple_omp_body_ptr (stmt));
2222       info->suppress_expansion = save_suppress;
2223       break;
2224 
2225     case GIMPLE_OMP_SECTION:
2226     case GIMPLE_OMP_MASTER:
2227     case GIMPLE_OMP_TASKGROUP:
2228     case GIMPLE_OMP_ORDERED:
2229       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2230 		 info, gimple_omp_body_ptr (stmt));
2231       break;
2232 
2233     case GIMPLE_COND:
2234       wi->val_only = true;
2235       wi->is_lhs = false;
2236       *handled_ops_p = false;
2237       return NULL_TREE;
2238 
2239     case GIMPLE_ASSIGN:
2240       if (gimple_clobber_p (stmt))
2241 	{
2242 	  tree lhs = gimple_assign_lhs (stmt);
2243 	  if (!use_pointer_in_frame (lhs)
2244 	      && lookup_field_for_decl (info, lhs, NO_INSERT))
2245 	    {
2246 	      gsi_replace (gsi, gimple_build_nop (), true);
2247 	      break;
2248 	    }
2249 	}
2250       *handled_ops_p = false;
2251       return NULL_TREE;
2252 
2253     case GIMPLE_BIND:
2254       for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2255 	   var;
2256 	   var = DECL_CHAIN (var))
2257 	if (TREE_CODE (var) == NAMELIST_DECL)
2258 	  {
2259 	    /* Adjust decls mentioned in NAMELIST_DECL.  */
2260 	    tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2261 	    tree decl;
2262 	    unsigned int i;
2263 
2264 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2265 	      {
2266 		if (VAR_P (decl)
2267 		    && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2268 		  continue;
2269 		if (decl_function_context (decl) == info->context
2270 		    && !use_pointer_in_frame (decl))
2271 		  {
2272 		    tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2273 		    if (field)
2274 		      {
2275 			CONSTRUCTOR_ELT (decls, i)->value
2276 			  = get_local_debug_decl (info, decl, field);
2277 		      }
2278 		  }
2279 	      }
2280 	  }
2281 
2282       *handled_ops_p = false;
2283       return NULL_TREE;
2284 
2285     default:
2286       /* For every other statement that we are not interested in
2287 	 handling here, let the walker traverse the operands.  */
2288       *handled_ops_p = false;
2289       return NULL_TREE;
2290     }
2291 
2292   /* Indicate that we have handled all the operands ourselves.  */
2293   *handled_ops_p = true;
2294   return NULL_TREE;
2295 }
2296 
2297 
2298 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2299    that reference labels from outer functions.  The rewrite will be a
2300    call to __builtin_nonlocal_goto.  */
2301 
2302 static tree
2303 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2304 			   struct walk_stmt_info *wi)
2305 {
2306   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2307   tree label, new_label, target_context, x, field;
2308   gcall *call;
2309   gimple *stmt = gsi_stmt (*gsi);
2310 
2311   if (gimple_code (stmt) != GIMPLE_GOTO)
2312     {
2313       *handled_ops_p = false;
2314       return NULL_TREE;
2315     }
2316 
2317   label = gimple_goto_dest (stmt);
2318   if (TREE_CODE (label) != LABEL_DECL)
2319     {
2320       *handled_ops_p = false;
2321       return NULL_TREE;
2322     }
2323 
2324   target_context = decl_function_context (label);
2325   if (target_context == info->context)
2326     {
2327       *handled_ops_p = false;
2328       return NULL_TREE;
2329     }
2330 
2331   for (i = info->outer; target_context != i->context; i = i->outer)
2332     continue;
2333 
2334   /* The original user label may also be use for a normal goto, therefore
2335      we must create a new label that will actually receive the abnormal
2336      control transfer.  This new label will be marked LABEL_NONLOCAL; this
2337      mark will trigger proper behavior in the cfg, as well as cause the
2338      (hairy target-specific) non-local goto receiver code to be generated
2339      when we expand rtl.  Enter this association into var_map so that we
2340      can insert the new label into the IL during a second pass.  */
2341   tree *slot = &i->var_map->get_or_insert (label);
2342   if (*slot == NULL)
2343     {
2344       new_label = create_artificial_label (UNKNOWN_LOCATION);
2345       DECL_NONLOCAL (new_label) = 1;
2346       *slot = new_label;
2347     }
2348   else
2349     new_label = *slot;
2350 
2351   /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field).  */
2352   field = get_nl_goto_field (i);
2353   x = get_frame_field (info, target_context, field, gsi);
2354   x = build_addr (x);
2355   x = gsi_gimplify_val (info, x, gsi);
2356   call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2357 			    2, build_addr (new_label), x);
2358   gsi_replace (gsi, call, false);
2359 
2360   /* We have handled all of STMT's operands, no need to keep going.  */
2361   *handled_ops_p = true;
2362   return NULL_TREE;
2363 }
2364 
2365 
2366 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2367    are referenced via nonlocal goto from a nested function.  The rewrite
2368    will involve installing a newly generated DECL_NONLOCAL label, and
2369    (potentially) a branch around the rtl gunk that is assumed to be
2370    attached to such a label.  */
2371 
2372 static tree
2373 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2374 			  struct walk_stmt_info *wi)
2375 {
2376   struct nesting_info *const info = (struct nesting_info *) wi->info;
2377   tree label, new_label;
2378   gimple_stmt_iterator tmp_gsi;
2379   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2380 
2381   if (!stmt)
2382     {
2383       *handled_ops_p = false;
2384       return NULL_TREE;
2385     }
2386 
2387   label = gimple_label_label (stmt);
2388 
2389   tree *slot = info->var_map->get (label);
2390   if (!slot)
2391     {
2392       *handled_ops_p = false;
2393       return NULL_TREE;
2394     }
2395 
2396   /* If there's any possibility that the previous statement falls through,
2397      then we must branch around the new non-local label.  */
2398   tmp_gsi = wi->gsi;
2399   gsi_prev (&tmp_gsi);
2400   if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2401     {
2402       gimple *stmt = gimple_build_goto (label);
2403       gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2404     }
2405 
2406   new_label = (tree) *slot;
2407   stmt = gimple_build_label (new_label);
2408   gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2409 
2410   *handled_ops_p = true;
2411   return NULL_TREE;
2412 }
2413 
2414 
2415 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2416    of nested functions that require the use of trampolines.  The rewrite
2417    will involve a reference a trampoline generated for the occasion.  */
2418 
2419 static tree
2420 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2421 {
2422   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2423   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2424   tree t = *tp, decl, target_context, x, builtin;
2425   bool descr;
2426   gcall *call;
2427 
2428   *walk_subtrees = 0;
2429   switch (TREE_CODE (t))
2430     {
2431     case ADDR_EXPR:
2432       /* Build
2433 	   T.1 = &CHAIN->tramp;
2434 	   T.2 = __builtin_adjust_trampoline (T.1);
2435 	   T.3 = (func_type)T.2;
2436       */
2437 
2438       decl = TREE_OPERAND (t, 0);
2439       if (TREE_CODE (decl) != FUNCTION_DECL)
2440 	break;
2441 
2442       /* Only need to process nested functions.  */
2443       target_context = decl_function_context (decl);
2444       if (!target_context)
2445 	break;
2446 
2447       /* If the nested function doesn't use a static chain, then
2448 	 it doesn't need a trampoline.  */
2449       if (!DECL_STATIC_CHAIN (decl))
2450 	break;
2451 
2452       /* If we don't want a trampoline, then don't build one.  */
2453       if (TREE_NO_TRAMPOLINE (t))
2454 	break;
2455 
2456       /* Lookup the immediate parent of the callee, as that's where
2457 	 we need to insert the trampoline.  */
2458       for (i = info; i->context != target_context; i = i->outer)
2459 	continue;
2460 
2461       /* Decide whether to generate a descriptor or a trampoline. */
2462       descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2463 
2464       if (descr)
2465 	x = lookup_descr_for_decl (i, decl, INSERT);
2466       else
2467 	x = lookup_tramp_for_decl (i, decl, INSERT);
2468 
2469       /* Compute the address of the field holding the trampoline.  */
2470       x = get_frame_field (info, target_context, x, &wi->gsi);
2471       x = build_addr (x);
2472       x = gsi_gimplify_val (info, x, &wi->gsi);
2473 
2474       /* Do machine-specific ugliness.  Normally this will involve
2475 	 computing extra alignment, but it can really be anything.  */
2476       if (descr)
2477 	builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2478       else
2479 	builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2480       call = gimple_build_call (builtin, 1, x);
2481       x = init_tmp_var_with_call (info, &wi->gsi, call);
2482 
2483       /* Cast back to the proper function type.  */
2484       x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2485       x = init_tmp_var (info, x, &wi->gsi);
2486 
2487       *tp = x;
2488       break;
2489 
2490     default:
2491       if (!IS_TYPE_OR_DECL_P (t))
2492 	*walk_subtrees = 1;
2493       break;
2494     }
2495 
2496   return NULL_TREE;
2497 }
2498 
2499 
2500 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2501    to addresses of nested functions that require the use of
2502    trampolines.  The rewrite will involve a reference a trampoline
2503    generated for the occasion.  */
2504 
2505 static tree
2506 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2507 			      struct walk_stmt_info *wi)
2508 {
2509   struct nesting_info *info = (struct nesting_info *) wi->info;
2510   gimple *stmt = gsi_stmt (*gsi);
2511 
2512   switch (gimple_code (stmt))
2513     {
2514     case GIMPLE_CALL:
2515       {
2516 	/* Only walk call arguments, lest we generate trampolines for
2517 	   direct calls.  */
2518 	unsigned long i, nargs = gimple_call_num_args (stmt);
2519 	for (i = 0; i < nargs; i++)
2520 	  walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2521 		     wi, NULL);
2522 	break;
2523       }
2524 
2525     case GIMPLE_OMP_TARGET:
2526       if (!is_gimple_omp_offloaded (stmt))
2527 	{
2528 	  *handled_ops_p = false;
2529 	  return NULL_TREE;
2530 	}
2531       /* FALLTHRU */
2532     case GIMPLE_OMP_PARALLEL:
2533     case GIMPLE_OMP_TASK:
2534       {
2535 	tree save_local_var_chain = info->new_local_var_chain;
2536         walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2537 	info->new_local_var_chain = NULL;
2538 	char save_static_chain_added = info->static_chain_added;
2539 	info->static_chain_added = 0;
2540         walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2541 		   info, gimple_omp_body_ptr (stmt));
2542 	if (info->new_local_var_chain)
2543 	  declare_vars (info->new_local_var_chain,
2544 			gimple_seq_first_stmt (gimple_omp_body (stmt)),
2545 			false);
2546 	for (int i = 0; i < 2; i++)
2547 	  {
2548 	    tree c, decl;
2549 	    if ((info->static_chain_added & (1 << i)) == 0)
2550 	      continue;
2551 	    decl = i ? get_chain_decl (info) : info->frame_decl;
2552 	    /* Don't add CHAIN.* or FRAME.* twice.  */
2553 	    for (c = gimple_omp_taskreg_clauses (stmt);
2554 		 c;
2555 		 c = OMP_CLAUSE_CHAIN (c))
2556 	      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2557 		   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2558 		  && OMP_CLAUSE_DECL (c) == decl)
2559 		break;
2560 	    if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2561 	      {
2562 		c = build_omp_clause (gimple_location (stmt),
2563 				      i ? OMP_CLAUSE_FIRSTPRIVATE
2564 				      : OMP_CLAUSE_SHARED);
2565 		OMP_CLAUSE_DECL (c) = decl;
2566 		OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2567 		gimple_omp_taskreg_set_clauses (stmt, c);
2568 	      }
2569 	    else if (c == NULL)
2570 	      {
2571 		c = build_omp_clause (gimple_location (stmt),
2572 				      OMP_CLAUSE_MAP);
2573 		OMP_CLAUSE_DECL (c) = decl;
2574 		OMP_CLAUSE_SET_MAP_KIND (c,
2575 					 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2576 		OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2577 		OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2578 		gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2579 					       c);
2580 	      }
2581 	  }
2582 	info->new_local_var_chain = save_local_var_chain;
2583 	info->static_chain_added |= save_static_chain_added;
2584       }
2585       break;
2586 
2587     default:
2588       *handled_ops_p = false;
2589       return NULL_TREE;
2590     }
2591 
2592   *handled_ops_p = true;
2593   return NULL_TREE;
2594 }
2595 
2596 
2597 
2598 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2599    that reference nested functions to make sure that the static chain
2600    is set up properly for the call.  */
2601 
2602 static tree
2603 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2604                      struct walk_stmt_info *wi)
2605 {
2606   struct nesting_info *const info = (struct nesting_info *) wi->info;
2607   tree decl, target_context;
2608   char save_static_chain_added;
2609   int i;
2610   gimple *stmt = gsi_stmt (*gsi);
2611 
2612   switch (gimple_code (stmt))
2613     {
2614     case GIMPLE_CALL:
2615       if (gimple_call_chain (stmt))
2616 	break;
2617       decl = gimple_call_fndecl (stmt);
2618       if (!decl)
2619 	break;
2620       target_context = decl_function_context (decl);
2621       if (target_context && DECL_STATIC_CHAIN (decl))
2622 	{
2623 	  gimple_call_set_chain (as_a <gcall *> (stmt),
2624 				 get_static_chain (info, target_context,
2625 						   &wi->gsi));
2626 	  info->static_chain_added |= (1 << (info->context != target_context));
2627 	}
2628       break;
2629 
2630     case GIMPLE_OMP_PARALLEL:
2631     case GIMPLE_OMP_TASK:
2632       save_static_chain_added = info->static_chain_added;
2633       info->static_chain_added = 0;
2634       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2635       for (i = 0; i < 2; i++)
2636 	{
2637 	  tree c, decl;
2638 	  if ((info->static_chain_added & (1 << i)) == 0)
2639 	    continue;
2640 	  decl = i ? get_chain_decl (info) : info->frame_decl;
2641 	  /* Don't add CHAIN.* or FRAME.* twice.  */
2642 	  for (c = gimple_omp_taskreg_clauses (stmt);
2643 	       c;
2644 	       c = OMP_CLAUSE_CHAIN (c))
2645 	    if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2646 		 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2647 		&& OMP_CLAUSE_DECL (c) == decl)
2648 	      break;
2649 	  if (c == NULL)
2650 	    {
2651 	      c = build_omp_clause (gimple_location (stmt),
2652 				    i ? OMP_CLAUSE_FIRSTPRIVATE
2653 				    : OMP_CLAUSE_SHARED);
2654 	      OMP_CLAUSE_DECL (c) = decl;
2655 	      OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2656 	      gimple_omp_taskreg_set_clauses (stmt, c);
2657 	    }
2658 	}
2659       info->static_chain_added |= save_static_chain_added;
2660       break;
2661 
2662     case GIMPLE_OMP_TARGET:
2663       if (!is_gimple_omp_offloaded (stmt))
2664 	{
2665 	  walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2666 	  break;
2667 	}
2668       save_static_chain_added = info->static_chain_added;
2669       info->static_chain_added = 0;
2670       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2671       for (i = 0; i < 2; i++)
2672 	{
2673 	  tree c, decl;
2674 	  if ((info->static_chain_added & (1 << i)) == 0)
2675 	    continue;
2676 	  decl = i ? get_chain_decl (info) : info->frame_decl;
2677 	  /* Don't add CHAIN.* or FRAME.* twice.  */
2678 	  for (c = gimple_omp_target_clauses (stmt);
2679 	       c;
2680 	       c = OMP_CLAUSE_CHAIN (c))
2681 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2682 		&& OMP_CLAUSE_DECL (c) == decl)
2683 	      break;
2684 	  if (c == NULL)
2685 	    {
2686 	      c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2687 	      OMP_CLAUSE_DECL (c) = decl;
2688 	      OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2689 	      OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2690 	      OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2691 	      gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2692 					     c);
2693 	    }
2694 	}
2695       info->static_chain_added |= save_static_chain_added;
2696       break;
2697 
2698     case GIMPLE_OMP_FOR:
2699       walk_body (convert_gimple_call, NULL, info,
2700 	  	 gimple_omp_for_pre_body_ptr (stmt));
2701       /* FALLTHRU */
2702     case GIMPLE_OMP_SECTIONS:
2703     case GIMPLE_OMP_SECTION:
2704     case GIMPLE_OMP_SINGLE:
2705     case GIMPLE_OMP_TEAMS:
2706     case GIMPLE_OMP_MASTER:
2707     case GIMPLE_OMP_TASKGROUP:
2708     case GIMPLE_OMP_ORDERED:
2709     case GIMPLE_OMP_CRITICAL:
2710       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2711       break;
2712 
2713     default:
2714       /* Keep looking for other operands.  */
2715       *handled_ops_p = false;
2716       return NULL_TREE;
2717     }
2718 
2719   *handled_ops_p = true;
2720   return NULL_TREE;
2721 }
2722 
2723 /* Walk the nesting tree starting with ROOT.  Convert all trampolines and
2724    call expressions.  At the same time, determine if a nested function
2725    actually uses its static chain; if not, remember that.  */
2726 
2727 static void
2728 convert_all_function_calls (struct nesting_info *root)
2729 {
2730   unsigned int chain_count = 0, old_chain_count, iter_count;
2731   struct nesting_info *n;
2732 
2733   /* First, optimistically clear static_chain for all decls that haven't
2734      used the static chain already for variable access.  But always create
2735      it if not optimizing.  This makes it possible to reconstruct the static
2736      nesting tree at run time and thus to resolve up-level references from
2737      within the debugger.  */
2738   FOR_EACH_NEST_INFO (n, root)
2739     {
2740       tree decl = n->context;
2741       if (!optimize)
2742 	{
2743 	  if (n->inner)
2744 	    (void) get_frame_type (n);
2745 	  if (n->outer)
2746 	    (void) get_chain_decl (n);
2747 	}
2748       else if (!n->outer || (!n->chain_decl && !n->chain_field))
2749 	{
2750 	  DECL_STATIC_CHAIN (decl) = 0;
2751 	  if (dump_file && (dump_flags & TDF_DETAILS))
2752 	    fprintf (dump_file, "Guessing no static-chain for %s\n",
2753 		     lang_hooks.decl_printable_name (decl, 2));
2754 	}
2755       else
2756 	DECL_STATIC_CHAIN (decl) = 1;
2757       chain_count += DECL_STATIC_CHAIN (decl);
2758     }
2759 
2760   /* Walk the functions and perform transformations.  Note that these
2761      transformations can induce new uses of the static chain, which in turn
2762      require re-examining all users of the decl.  */
2763   /* ??? It would make sense to try to use the call graph to speed this up,
2764      but the call graph hasn't really been built yet.  Even if it did, we
2765      would still need to iterate in this loop since address-of references
2766      wouldn't show up in the callgraph anyway.  */
2767   iter_count = 0;
2768   do
2769     {
2770       old_chain_count = chain_count;
2771       chain_count = 0;
2772       iter_count++;
2773 
2774       if (dump_file && (dump_flags & TDF_DETAILS))
2775 	fputc ('\n', dump_file);
2776 
2777       FOR_EACH_NEST_INFO (n, root)
2778 	{
2779 	  tree decl = n->context;
2780 	  walk_function (convert_tramp_reference_stmt,
2781 			 convert_tramp_reference_op, n);
2782 	  walk_function (convert_gimple_call, NULL, n);
2783 	  chain_count += DECL_STATIC_CHAIN (decl);
2784 	}
2785     }
2786   while (chain_count != old_chain_count);
2787 
2788   if (dump_file && (dump_flags & TDF_DETAILS))
2789     fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2790 	     iter_count);
2791 }
2792 
2793 struct nesting_copy_body_data
2794 {
2795   copy_body_data cb;
2796   struct nesting_info *root;
2797 };
2798 
2799 /* A helper subroutine for debug_var_chain type remapping.  */
2800 
2801 static tree
2802 nesting_copy_decl (tree decl, copy_body_data *id)
2803 {
2804   struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2805   tree *slot = nid->root->var_map->get (decl);
2806 
2807   if (slot)
2808     return (tree) *slot;
2809 
2810   if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2811     {
2812       tree new_decl = copy_decl_no_change (decl, id);
2813       DECL_ORIGINAL_TYPE (new_decl)
2814 	= remap_type (DECL_ORIGINAL_TYPE (decl), id);
2815       return new_decl;
2816     }
2817 
2818   if (VAR_P (decl)
2819       || TREE_CODE (decl) == PARM_DECL
2820       || TREE_CODE (decl) == RESULT_DECL)
2821     return decl;
2822 
2823   return copy_decl_no_change (decl, id);
2824 }
2825 
2826 /* A helper function for remap_vla_decls.  See if *TP contains
2827    some remapped variables.  */
2828 
2829 static tree
2830 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2831 {
2832   struct nesting_info *root = (struct nesting_info *) data;
2833   tree t = *tp;
2834 
2835   if (DECL_P (t))
2836     {
2837       *walk_subtrees = 0;
2838       tree *slot = root->var_map->get (t);
2839 
2840       if (slot)
2841 	return *slot;
2842     }
2843   return NULL;
2844 }
2845 
2846 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2847    involved.  */
2848 
2849 static void
2850 remap_vla_decls (tree block, struct nesting_info *root)
2851 {
2852   tree var, subblock, val, type;
2853   struct nesting_copy_body_data id;
2854 
2855   for (subblock = BLOCK_SUBBLOCKS (block);
2856        subblock;
2857        subblock = BLOCK_CHAIN (subblock))
2858     remap_vla_decls (subblock, root);
2859 
2860   for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2861     if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
2862       {
2863 	val = DECL_VALUE_EXPR (var);
2864 	type = TREE_TYPE (var);
2865 
2866 	if (!(TREE_CODE (val) == INDIRECT_REF
2867 	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2868 	      && variably_modified_type_p (type, NULL)))
2869 	  continue;
2870 
2871 	if (root->var_map->get (TREE_OPERAND (val, 0))
2872 	    || walk_tree (&type, contains_remapped_vars, root, NULL))
2873 	  break;
2874       }
2875 
2876   if (var == NULL_TREE)
2877     return;
2878 
2879   memset (&id, 0, sizeof (id));
2880   id.cb.copy_decl = nesting_copy_decl;
2881   id.cb.decl_map = new hash_map<tree, tree>;
2882   id.root = root;
2883 
2884   for (; var; var = DECL_CHAIN (var))
2885     if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
2886       {
2887 	struct nesting_info *i;
2888 	tree newt, context;
2889 
2890 	val = DECL_VALUE_EXPR (var);
2891 	type = TREE_TYPE (var);
2892 
2893 	if (!(TREE_CODE (val) == INDIRECT_REF
2894 	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2895 	      && variably_modified_type_p (type, NULL)))
2896 	  continue;
2897 
2898 	tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2899 	if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2900 	  continue;
2901 
2902 	context = decl_function_context (var);
2903 	for (i = root; i; i = i->outer)
2904 	  if (i->context == context)
2905 	    break;
2906 
2907 	if (i == NULL)
2908 	  continue;
2909 
2910 	/* Fully expand value expressions.  This avoids having debug variables
2911 	   only referenced from them and that can be swept during GC.  */
2912         if (slot)
2913 	  {
2914 	    tree t = (tree) *slot;
2915 	    gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2916 	    val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2917 	  }
2918 
2919 	id.cb.src_fn = i->context;
2920 	id.cb.dst_fn = i->context;
2921 	id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2922 
2923 	TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2924 	while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2925 	  {
2926 	    newt = TREE_TYPE (newt);
2927 	    type = TREE_TYPE (type);
2928 	  }
2929 	if (TYPE_NAME (newt)
2930 	    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2931 	    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2932 	    && newt != type
2933 	    && TYPE_NAME (newt) == TYPE_NAME (type))
2934 	  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2935 
2936 	walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2937 	if (val != DECL_VALUE_EXPR (var))
2938 	  SET_DECL_VALUE_EXPR (var, val);
2939       }
2940 
2941   delete id.cb.decl_map;
2942 }
2943 
2944 /* Fold the MEM_REF *E.  */
2945 bool
2946 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2947 {
2948   tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2949   *ref_p = fold (*ref_p);
2950   return true;
2951 }
2952 
2953 /* Given DECL, a nested function, build an initialization call for FIELD,
2954    the trampoline or descriptor for DECL, using FUNC as the function.  */
2955 
2956 static gcall *
2957 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
2958 		      tree func)
2959 {
2960   tree arg1, arg2, arg3, x;
2961 
2962   gcc_assert (DECL_STATIC_CHAIN (decl));
2963   arg3 = build_addr (info->frame_decl);
2964 
2965   arg2 = build_addr (decl);
2966 
2967   x = build3 (COMPONENT_REF, TREE_TYPE (field),
2968 	      info->frame_decl, field, NULL_TREE);
2969   arg1 = build_addr (x);
2970 
2971   return gimple_build_call (func, 3, arg1, arg2, arg3);
2972 }
2973 
2974 /* Do "everything else" to clean up or complete state collected by the various
2975    walking passes -- create a field to hold the frame base address, lay out the
2976    types and decls, generate code to initialize the frame decl, store critical
2977    expressions in the struct function for rtl to find.  */
2978 
2979 static void
2980 finalize_nesting_tree_1 (struct nesting_info *root)
2981 {
2982   gimple_seq stmt_list;
2983   gimple *stmt;
2984   tree context = root->context;
2985   struct function *sf;
2986 
2987   stmt_list = NULL;
2988 
2989   /* If we created a non-local frame type or decl, we need to lay them
2990      out at this time.  */
2991   if (root->frame_type)
2992     {
2993       /* Debugging information needs to compute the frame base address of the
2994 	 parent frame out of the static chain from the nested frame.
2995 
2996 	 The static chain is the address of the FRAME record, so one could
2997 	 imagine it would be possible to compute the frame base address just
2998 	 adding a constant offset to this address.  Unfortunately, this is not
2999 	 possible: if the FRAME object has alignment constraints that are
3000 	 stronger than the stack, then the offset between the frame base and
3001 	 the FRAME object will be dynamic.
3002 
3003 	 What we do instead is to append a field to the FRAME object that holds
3004 	 the frame base address: then debug info just has to fetch this
3005 	 field.  */
3006 
3007       /* Debugging information will refer to the CFA as the frame base
3008 	 address: we will do the same here.  */
3009       const tree frame_addr_fndecl
3010         = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3011 
3012       /* Create a field in the FRAME record to hold the frame base address for
3013 	 this stack frame.  Since it will be used only by the debugger, put it
3014 	 at the end of the record in order not to shift all other offsets.  */
3015       tree fb_decl = make_node (FIELD_DECL);
3016 
3017       DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3018       TREE_TYPE (fb_decl) = ptr_type_node;
3019       TREE_ADDRESSABLE (fb_decl) = 1;
3020       DECL_CONTEXT (fb_decl) = root->frame_type;
3021       TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3022 						fb_decl);
3023 
3024       /* In some cases the frame type will trigger the -Wpadded warning.
3025 	 This is not helpful; suppress it. */
3026       int save_warn_padded = warn_padded;
3027       warn_padded = 0;
3028       layout_type (root->frame_type);
3029       warn_padded = save_warn_padded;
3030       layout_decl (root->frame_decl, 0);
3031 
3032       /* Initialize the frame base address field.  If the builtin we need is
3033 	 not available, set it to NULL so that debugging information does not
3034 	 reference junk.  */
3035       tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3036 			    root->frame_decl, fb_decl, NULL_TREE);
3037       tree fb_tmp;
3038 
3039       if (frame_addr_fndecl != NULL_TREE)
3040 	{
3041 	  gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3042 						integer_zero_node);
3043 	  gimple_stmt_iterator gsi = gsi_last (stmt_list);
3044 
3045 	  fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3046 	}
3047       else
3048 	fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3049       gimple_seq_add_stmt (&stmt_list,
3050 			   gimple_build_assign (fb_ref, fb_tmp));
3051 
3052       /* Remove root->frame_decl from root->new_local_var_chain, so
3053 	 that we can declare it also in the lexical blocks, which
3054 	 helps ensure virtual regs that end up appearing in its RTL
3055 	 expression get substituted in instantiate_virtual_regs().  */
3056       tree *adjust;
3057       for (adjust = &root->new_local_var_chain;
3058 	   *adjust != root->frame_decl;
3059 	   adjust = &DECL_CHAIN (*adjust))
3060 	gcc_assert (DECL_CHAIN (*adjust));
3061       *adjust = DECL_CHAIN (*adjust);
3062 
3063       DECL_CHAIN (root->frame_decl) = NULL_TREE;
3064       declare_vars (root->frame_decl,
3065 		    gimple_seq_first_stmt (gimple_body (context)), true);
3066     }
3067 
3068   /* If any parameters were referenced non-locally, then we need to
3069      insert a copy.  Likewise, if any variables were referenced by
3070      pointer, we need to initialize the address.  */
3071   if (root->any_parm_remapped)
3072     {
3073       tree p;
3074       for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3075 	{
3076 	  tree field, x, y;
3077 
3078 	  field = lookup_field_for_decl (root, p, NO_INSERT);
3079 	  if (!field)
3080 	    continue;
3081 
3082 	  if (use_pointer_in_frame (p))
3083 	    x = build_addr (p);
3084 	  else
3085 	    x = p;
3086 
3087 	  /* If the assignment is from a non-register the stmt is
3088 	     not valid gimple.  Make it so by using a temporary instead.  */
3089 	  if (!is_gimple_reg (x)
3090 	      && is_gimple_reg_type (TREE_TYPE (x)))
3091 	    {
3092 	      gimple_stmt_iterator gsi = gsi_last (stmt_list);
3093 	      x = init_tmp_var (root, x, &gsi);
3094 	    }
3095 
3096 	  y = build3 (COMPONENT_REF, TREE_TYPE (field),
3097 		      root->frame_decl, field, NULL_TREE);
3098 	  stmt = gimple_build_assign (y, x);
3099 	  gimple_seq_add_stmt (&stmt_list, stmt);
3100 	}
3101     }
3102 
3103   /* If a chain_field was created, then it needs to be initialized
3104      from chain_decl.  */
3105   if (root->chain_field)
3106     {
3107       tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3108 		       root->frame_decl, root->chain_field, NULL_TREE);
3109       stmt = gimple_build_assign (x, get_chain_decl (root));
3110       gimple_seq_add_stmt (&stmt_list, stmt);
3111     }
3112 
3113   /* If trampolines were created, then we need to initialize them.  */
3114   if (root->any_tramp_created)
3115     {
3116       struct nesting_info *i;
3117       for (i = root->inner; i ; i = i->next)
3118 	{
3119 	  tree field, x;
3120 
3121 	  field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3122 	  if (!field)
3123 	    continue;
3124 
3125 	  x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3126 	  stmt = build_init_call_stmt (root, i->context, field, x);
3127 	  gimple_seq_add_stmt (&stmt_list, stmt);
3128 	}
3129     }
3130 
3131   /* If descriptors were created, then we need to initialize them.  */
3132   if (root->any_descr_created)
3133     {
3134       struct nesting_info *i;
3135       for (i = root->inner; i ; i = i->next)
3136 	{
3137 	  tree field, x;
3138 
3139 	  field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3140 	  if (!field)
3141 	    continue;
3142 
3143 	  x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3144 	  stmt = build_init_call_stmt (root, i->context, field, x);
3145 	  gimple_seq_add_stmt (&stmt_list, stmt);
3146 	}
3147     }
3148 
3149   /* If we created initialization statements, insert them.  */
3150   if (stmt_list)
3151     {
3152       gbind *bind;
3153       annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3154       bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3155       gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3156       gimple_bind_set_body (bind, stmt_list);
3157     }
3158 
3159   /* If a chain_decl was created, then it needs to be registered with
3160      struct function so that it gets initialized from the static chain
3161      register at the beginning of the function.  */
3162   sf = DECL_STRUCT_FUNCTION (root->context);
3163   sf->static_chain_decl = root->chain_decl;
3164 
3165   /* Similarly for the non-local goto save area.  */
3166   if (root->nl_goto_field)
3167     {
3168       sf->nonlocal_goto_save_area
3169 	= get_frame_field (root, context, root->nl_goto_field, NULL);
3170       sf->has_nonlocal_label = 1;
3171     }
3172 
3173   /* Make sure all new local variables get inserted into the
3174      proper BIND_EXPR.  */
3175   if (root->new_local_var_chain)
3176     declare_vars (root->new_local_var_chain,
3177 		  gimple_seq_first_stmt (gimple_body (root->context)),
3178 		  false);
3179 
3180   if (root->debug_var_chain)
3181     {
3182       tree debug_var;
3183       gbind *scope;
3184 
3185       remap_vla_decls (DECL_INITIAL (root->context), root);
3186 
3187       for (debug_var = root->debug_var_chain; debug_var;
3188 	   debug_var = DECL_CHAIN (debug_var))
3189 	if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3190 	  break;
3191 
3192       /* If there are any debug decls with variable length types,
3193 	 remap those types using other debug_var_chain variables.  */
3194       if (debug_var)
3195 	{
3196 	  struct nesting_copy_body_data id;
3197 
3198 	  memset (&id, 0, sizeof (id));
3199 	  id.cb.copy_decl = nesting_copy_decl;
3200 	  id.cb.decl_map = new hash_map<tree, tree>;
3201 	  id.root = root;
3202 
3203 	  for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3204 	    if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3205 	      {
3206 		tree type = TREE_TYPE (debug_var);
3207 		tree newt, t = type;
3208 		struct nesting_info *i;
3209 
3210 		for (i = root; i; i = i->outer)
3211 		  if (variably_modified_type_p (type, i->context))
3212 		    break;
3213 
3214 		if (i == NULL)
3215 		  continue;
3216 
3217 		id.cb.src_fn = i->context;
3218 		id.cb.dst_fn = i->context;
3219 		id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3220 
3221 		TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3222 		while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3223 		  {
3224 		    newt = TREE_TYPE (newt);
3225 		    t = TREE_TYPE (t);
3226 		  }
3227 		if (TYPE_NAME (newt)
3228 		    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3229 		    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3230 		    && newt != t
3231 		    && TYPE_NAME (newt) == TYPE_NAME (t))
3232 		  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3233 	      }
3234 
3235 	  delete id.cb.decl_map;
3236 	}
3237 
3238       scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3239       if (gimple_bind_block (scope))
3240 	declare_vars (root->debug_var_chain, scope, true);
3241       else
3242 	BLOCK_VARS (DECL_INITIAL (root->context))
3243 	  = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3244 		     root->debug_var_chain);
3245     }
3246 
3247   /* Fold the rewritten MEM_REF trees.  */
3248   root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3249 
3250   /* Dump the translated tree function.  */
3251   if (dump_file)
3252     {
3253       fputs ("\n\n", dump_file);
3254       dump_function_to_file (root->context, dump_file, dump_flags);
3255     }
3256 }
3257 
3258 static void
3259 finalize_nesting_tree (struct nesting_info *root)
3260 {
3261   struct nesting_info *n;
3262   FOR_EACH_NEST_INFO (n, root)
3263     finalize_nesting_tree_1 (n);
3264 }
3265 
3266 /* Unnest the nodes and pass them to cgraph.  */
3267 
3268 static void
3269 unnest_nesting_tree_1 (struct nesting_info *root)
3270 {
3271   struct cgraph_node *node = cgraph_node::get (root->context);
3272 
3273   /* For nested functions update the cgraph to reflect unnesting.
3274      We also delay finalizing of these functions up to this point.  */
3275   if (node->origin)
3276     {
3277        node->unnest ();
3278        cgraph_node::finalize_function (root->context, true);
3279     }
3280 }
3281 
3282 static void
3283 unnest_nesting_tree (struct nesting_info *root)
3284 {
3285   struct nesting_info *n;
3286   FOR_EACH_NEST_INFO (n, root)
3287     unnest_nesting_tree_1 (n);
3288 }
3289 
3290 /* Free the data structures allocated during this pass.  */
3291 
3292 static void
3293 free_nesting_tree (struct nesting_info *root)
3294 {
3295   struct nesting_info *node, *next;
3296 
3297   node = iter_nestinfo_start (root);
3298   do
3299     {
3300       next = iter_nestinfo_next (node);
3301       delete node->var_map;
3302       delete node->field_map;
3303       delete node->mem_refs;
3304       free (node);
3305       node = next;
3306     }
3307   while (node);
3308 }
3309 
3310 /* Gimplify a function and all its nested functions.  */
3311 static void
3312 gimplify_all_functions (struct cgraph_node *root)
3313 {
3314   struct cgraph_node *iter;
3315   if (!gimple_body (root->decl))
3316     gimplify_function_tree (root->decl);
3317   for (iter = root->nested; iter; iter = iter->next_nested)
3318     gimplify_all_functions (iter);
3319 }
3320 
3321 /* Main entry point for this pass.  Process FNDECL and all of its nested
3322    subroutines and turn them into something less tightly bound.  */
3323 
3324 void
3325 lower_nested_functions (tree fndecl)
3326 {
3327   struct cgraph_node *cgn;
3328   struct nesting_info *root;
3329 
3330   /* If there are no nested functions, there's nothing to do.  */
3331   cgn = cgraph_node::get (fndecl);
3332   if (!cgn->nested)
3333     return;
3334 
3335   gimplify_all_functions (cgn);
3336 
3337   dump_file = dump_begin (TDI_nested, &dump_flags);
3338   if (dump_file)
3339     fprintf (dump_file, "\n;; Function %s\n\n",
3340 	     lang_hooks.decl_printable_name (fndecl, 2));
3341 
3342   bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3343   root = create_nesting_tree (cgn);
3344 
3345   walk_all_functions (convert_nonlocal_reference_stmt,
3346                       convert_nonlocal_reference_op,
3347 		      root);
3348   walk_all_functions (convert_local_reference_stmt,
3349                       convert_local_reference_op,
3350 		      root);
3351   walk_all_functions (convert_nl_goto_reference, NULL, root);
3352   walk_all_functions (convert_nl_goto_receiver, NULL, root);
3353 
3354   convert_all_function_calls (root);
3355   finalize_nesting_tree (root);
3356   unnest_nesting_tree (root);
3357 
3358   free_nesting_tree (root);
3359   bitmap_obstack_release (&nesting_info_bitmap_obstack);
3360 
3361   if (dump_file)
3362     {
3363       dump_end (TDI_nested, dump_file);
3364       dump_file = NULL;
3365     }
3366 }
3367 
3368 #include "gt-tree-nested.h"
3369