1 /* Nested function decomposition for GIMPLE.
2    Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 
4    This file is part of GCC.
5 
6    GCC is free software; you can redistribute it and/or modify
7    it under the terms of the GNU General Public License as published by
8    the Free Software Foundation; either version 3, or (at your option)
9    any later version.
10 
11    GCC is distributed in the hope that it will be useful,
12    but WITHOUT ANY WARRANTY; without even the implied warranty of
13    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14    GNU General Public License for more details.
15 
16    You should have received a copy of the GNU General Public License
17    along with GCC; see the file COPYING3.  If not see
18    <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "dumpfile.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
45 
46 
47 /* The object of this pass is to lower the representation of a set of nested
48    functions in order to expose all of the gory details of the various
49    nonlocal references.  We want to do this sooner rather than later, in
50    order to give us more freedom in emitting all of the functions in question.
51 
52    Back in olden times, when gcc was young, we developed an insanely
53    complicated scheme whereby variables which were referenced nonlocally
54    were forced to live in the stack of the declaring function, and then
55    the nested functions magically discovered where these variables were
56    placed.  In order for this scheme to function properly, it required
57    that the outer function be partially expanded, then we switch to
58    compiling the inner function, and once done with those we switch back
59    to compiling the outer function.  Such delicate ordering requirements
60    makes it difficult to do whole translation unit optimizations
61    involving such functions.
62 
63    The implementation here is much more direct.  Everything that can be
64    referenced by an inner function is a member of an explicitly created
65    structure herein called the "nonlocal frame struct".  The incoming
66    static chain for a nested function is a pointer to this struct in
67    the parent.  In this way, we settle on known offsets from a known
68    base, and so are decoupled from the logic that places objects in the
69    function's stack frame.  More importantly, we don't have to wait for
70    that to happen -- since the compilation of the inner function is no
71    longer tied to a real stack frame, the nonlocal frame struct can be
72    allocated anywhere.  Which means that the outer function is now
73    inlinable.
74 
75    Theory of operation here is very simple.  Iterate over all the
76    statements in all the functions (depth first) several times,
77    allocating structures and fields on demand.  In general we want to
78    examine inner functions first, so that we can avoid making changes
79    to outer functions which are unnecessary.
80 
81    The order of the passes matters a bit, in that later passes will be
82    skipped if it is discovered that the functions don't actually interact
83    at all.  That is, they're nested in the lexical sense but could have
84    been written as independent functions without change.  */
85 
86 
87 struct nesting_info
88 {
89   struct nesting_info *outer;
90   struct nesting_info *inner;
91   struct nesting_info *next;
92 
93   hash_map<tree, tree> *field_map;
94   hash_map<tree, tree> *var_map;
95   hash_set<tree *> *mem_refs;
96   bitmap suppress_expansion;
97 
98   tree context;
99   tree new_local_var_chain;
100   tree debug_var_chain;
101   tree frame_type;
102   tree frame_decl;
103   tree chain_field;
104   tree chain_decl;
105   tree nl_goto_field;
106 
107   bool thunk_p;
108   bool any_parm_remapped;
109   bool any_tramp_created;
110   bool any_descr_created;
111   char static_chain_added;
112 };
113 
114 
115 /* Iterate over the nesting tree, starting with ROOT, depth first.  */
116 
117 static inline struct nesting_info *
iter_nestinfo_start(struct nesting_info * root)118 iter_nestinfo_start (struct nesting_info *root)
119 {
120   while (root->inner)
121     root = root->inner;
122   return root;
123 }
124 
125 static inline struct nesting_info *
iter_nestinfo_next(struct nesting_info * node)126 iter_nestinfo_next (struct nesting_info *node)
127 {
128   if (node->next)
129     return iter_nestinfo_start (node->next);
130   return node->outer;
131 }
132 
133 #define FOR_EACH_NEST_INFO(I, ROOT) \
134   for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
135 
136 /* Obstack used for the bitmaps in the struct above.  */
137 static struct bitmap_obstack nesting_info_bitmap_obstack;
138 
139 
140 /* We're working in so many different function contexts simultaneously,
141    that create_tmp_var is dangerous.  Prevent mishap.  */
142 #define create_tmp_var cant_use_create_tmp_var_here_dummy
143 
144 /* Like create_tmp_var, except record the variable for registration at
145    the given nesting level.  */
146 
147 static tree
create_tmp_var_for(struct nesting_info * info,tree type,const char * prefix)148 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
149 {
150   tree tmp_var;
151 
152   /* If the type is of variable size or a type which must be created by the
153      frontend, something is wrong.  Note that we explicitly allow
154      incomplete types here, since we create them ourselves here.  */
155   gcc_assert (!TREE_ADDRESSABLE (type));
156   gcc_assert (!TYPE_SIZE_UNIT (type)
157 	      || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
158 
159   tmp_var = create_tmp_var_raw (type, prefix);
160   DECL_CONTEXT (tmp_var) = info->context;
161   DECL_CHAIN (tmp_var) = info->new_local_var_chain;
162   DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
163   if (TREE_CODE (type) == COMPLEX_TYPE
164       || TREE_CODE (type) == VECTOR_TYPE)
165     DECL_GIMPLE_REG_P (tmp_var) = 1;
166 
167   info->new_local_var_chain = tmp_var;
168 
169   return tmp_var;
170 }
171 
172 /* Like build_simple_mem_ref, but set TREE_THIS_NOTRAP on the result.  */
173 
174 static tree
build_simple_mem_ref_notrap(tree ptr)175 build_simple_mem_ref_notrap (tree ptr)
176 {
177   tree t = build_simple_mem_ref (ptr);
178   TREE_THIS_NOTRAP (t) = 1;
179   return t;
180 }
181 
182 /* Take the address of EXP to be used within function CONTEXT.
183    Mark it for addressability as necessary.  */
184 
185 tree
build_addr(tree exp)186 build_addr (tree exp)
187 {
188   mark_addressable (exp);
189   return build_fold_addr_expr (exp);
190 }
191 
192 /* Insert FIELD into TYPE, sorted by alignment requirements.  */
193 
194 void
insert_field_into_struct(tree type,tree field)195 insert_field_into_struct (tree type, tree field)
196 {
197   tree *p;
198 
199   DECL_CONTEXT (field) = type;
200 
201   for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
202     if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
203       break;
204 
205   DECL_CHAIN (field) = *p;
206   *p = field;
207 
208   /* Set correct alignment for frame struct type.  */
209   if (TYPE_ALIGN (type) < DECL_ALIGN (field))
210     SET_TYPE_ALIGN (type, DECL_ALIGN (field));
211 }
212 
213 /* Build or return the RECORD_TYPE that describes the frame state that is
214    shared between INFO->CONTEXT and its nested functions.  This record will
215    not be complete until finalize_nesting_tree; up until that point we'll
216    be adding fields as necessary.
217 
218    We also build the DECL that represents this frame in the function.  */
219 
220 static tree
get_frame_type(struct nesting_info * info)221 get_frame_type (struct nesting_info *info)
222 {
223   tree type = info->frame_type;
224   if (!type)
225     {
226       char *name;
227 
228       type = make_node (RECORD_TYPE);
229 
230       name = concat ("FRAME.",
231 		     IDENTIFIER_POINTER (DECL_NAME (info->context)),
232 		     NULL);
233       TYPE_NAME (type) = get_identifier (name);
234       free (name);
235 
236       info->frame_type = type;
237 
238       /* Do not put info->frame_decl on info->new_local_var_chain,
239 	 so that we can declare it in the lexical blocks, which
240 	 makes sure virtual regs that end up appearing in its RTL
241 	 expression get substituted in instantiate_virtual_regs.  */
242       info->frame_decl = create_tmp_var_raw (type, "FRAME");
243       DECL_CONTEXT (info->frame_decl) = info->context;
244       DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
245       DECL_SEEN_IN_BIND_EXPR_P (info->frame_decl) = 1;
246 
247       /* ??? Always make it addressable for now, since it is meant to
248 	 be pointed to by the static chain pointer.  This pessimizes
249 	 when it turns out that no static chains are needed because
250 	 the nested functions referencing non-local variables are not
251 	 reachable, but the true pessimization is to create the non-
252 	 local frame structure in the first place.  */
253       TREE_ADDRESSABLE (info->frame_decl) = 1;
254     }
255 
256   return type;
257 }
258 
259 /* Return true if DECL should be referenced by pointer in the non-local frame
260    structure.  */
261 
262 static bool
use_pointer_in_frame(tree decl)263 use_pointer_in_frame (tree decl)
264 {
265   if (TREE_CODE (decl) == PARM_DECL)
266     {
267       /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
268 	 sized DECLs, and inefficient to copy large aggregates.  Don't bother
269 	 moving anything but scalar parameters.  */
270       return AGGREGATE_TYPE_P (TREE_TYPE (decl));
271     }
272   else
273     {
274       /* Variable-sized DECLs can only come from OMP clauses at this point
275 	 since the gimplifier has already turned the regular variables into
276 	 pointers.  Do the same as the gimplifier.  */
277       return !DECL_SIZE (decl) || TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST;
278     }
279 }
280 
281 /* Given DECL, a non-locally accessed variable, find or create a field
282    in the non-local frame structure for the given nesting context.  */
283 
284 static tree
lookup_field_for_decl(struct nesting_info * info,tree decl,enum insert_option insert)285 lookup_field_for_decl (struct nesting_info *info, tree decl,
286 		       enum insert_option insert)
287 {
288   gcc_checking_assert (decl_function_context (decl) == info->context);
289 
290   if (insert == NO_INSERT)
291     {
292       tree *slot = info->field_map->get (decl);
293       return slot ? *slot : NULL_TREE;
294     }
295 
296   tree *slot = &info->field_map->get_or_insert (decl);
297   if (!*slot)
298     {
299       tree type = get_frame_type (info);
300       tree field = make_node (FIELD_DECL);
301       DECL_NAME (field) = DECL_NAME (decl);
302 
303       if (use_pointer_in_frame (decl))
304 	{
305 	  TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
306 	  SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
307 	  DECL_NONADDRESSABLE_P (field) = 1;
308 	}
309       else
310 	{
311           TREE_TYPE (field) = TREE_TYPE (decl);
312           DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
313           SET_DECL_ALIGN (field, DECL_ALIGN (decl));
314           DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
315           TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
316           DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
317           TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
318 
319 	  /* Declare the transformation and adjust the original DECL.  For a
320 	     variable or for a parameter when not optimizing, we make it point
321 	     to the field in the frame directly.  For a parameter, we don't do
322 	     it when optimizing because the variable tracking pass will already
323 	     do the job,  */
324 	  if (VAR_P (decl) || !optimize)
325 	    {
326 	      tree x
327 		= build3 (COMPONENT_REF, TREE_TYPE (field), info->frame_decl,
328 			  field, NULL_TREE);
329 
330 	      /* If the next declaration is a PARM_DECL pointing to the DECL,
331 		 we need to adjust its VALUE_EXPR directly, since chains of
332 		 VALUE_EXPRs run afoul of garbage collection.  This occurs
333 		 in Ada for Out parameters that aren't copied in.  */
334 	      tree next = DECL_CHAIN (decl);
335 	      if (next
336 		  && TREE_CODE (next) == PARM_DECL
337 		  && DECL_HAS_VALUE_EXPR_P (next)
338 		  && DECL_VALUE_EXPR (next) == decl)
339 		SET_DECL_VALUE_EXPR (next, x);
340 
341 	      SET_DECL_VALUE_EXPR (decl, x);
342 	      DECL_HAS_VALUE_EXPR_P (decl) = 1;
343 	    }
344 	}
345 
346       insert_field_into_struct (type, field);
347       *slot = field;
348 
349       if (TREE_CODE (decl) == PARM_DECL)
350 	info->any_parm_remapped = true;
351     }
352 
353   return *slot;
354 }
355 
356 /* Build or return the variable that holds the static chain within
357    INFO->CONTEXT.  This variable may only be used within INFO->CONTEXT.  */
358 
359 static tree
get_chain_decl(struct nesting_info * info)360 get_chain_decl (struct nesting_info *info)
361 {
362   tree decl = info->chain_decl;
363 
364   if (!decl)
365     {
366       tree type;
367 
368       type = get_frame_type (info->outer);
369       type = build_pointer_type (type);
370 
371       /* Note that this variable is *not* entered into any BIND_EXPR;
372 	 the construction of this variable is handled specially in
373 	 expand_function_start and initialize_inlined_parameters.
374 	 Note also that it's represented as a parameter.  This is more
375 	 close to the truth, since the initial value does come from
376 	 the caller.  */
377       decl = build_decl (DECL_SOURCE_LOCATION (info->context),
378 			 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
379       DECL_ARTIFICIAL (decl) = 1;
380       DECL_IGNORED_P (decl) = 1;
381       TREE_USED (decl) = 1;
382       DECL_CONTEXT (decl) = info->context;
383       DECL_ARG_TYPE (decl) = type;
384 
385       /* Tell tree-inline.c that we never write to this variable, so
386 	 it can copy-prop the replacement value immediately.  */
387       TREE_READONLY (decl) = 1;
388 
389       info->chain_decl = decl;
390 
391       if (dump_file
392           && (dump_flags & TDF_DETAILS)
393 	  && !DECL_STATIC_CHAIN (info->context))
394 	fprintf (dump_file, "Setting static-chain for %s\n",
395 		 lang_hooks.decl_printable_name (info->context, 2));
396 
397       DECL_STATIC_CHAIN (info->context) = 1;
398     }
399   return decl;
400 }
401 
402 /* Build or return the field within the non-local frame state that holds
403    the static chain for INFO->CONTEXT.  This is the way to walk back up
404    multiple nesting levels.  */
405 
406 static tree
get_chain_field(struct nesting_info * info)407 get_chain_field (struct nesting_info *info)
408 {
409   tree field = info->chain_field;
410 
411   if (!field)
412     {
413       tree type = build_pointer_type (get_frame_type (info->outer));
414 
415       field = make_node (FIELD_DECL);
416       DECL_NAME (field) = get_identifier ("__chain");
417       TREE_TYPE (field) = type;
418       SET_DECL_ALIGN (field, TYPE_ALIGN (type));
419       DECL_NONADDRESSABLE_P (field) = 1;
420 
421       insert_field_into_struct (get_frame_type (info), field);
422 
423       info->chain_field = field;
424 
425       if (dump_file
426           && (dump_flags & TDF_DETAILS)
427 	  && !DECL_STATIC_CHAIN (info->context))
428 	fprintf (dump_file, "Setting static-chain for %s\n",
429 		 lang_hooks.decl_printable_name (info->context, 2));
430 
431       DECL_STATIC_CHAIN (info->context) = 1;
432     }
433   return field;
434 }
435 
436 /* Initialize a new temporary with the GIMPLE_CALL STMT.  */
437 
438 static tree
init_tmp_var_with_call(struct nesting_info * info,gimple_stmt_iterator * gsi,gcall * call)439 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
440 		        gcall *call)
441 {
442   tree t;
443 
444   t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
445   gimple_call_set_lhs (call, t);
446   if (! gsi_end_p (*gsi))
447     gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
448   gsi_insert_before (gsi, call, GSI_SAME_STMT);
449 
450   return t;
451 }
452 
453 
454 /* Copy EXP into a temporary.  Allocate the temporary in the context of
455    INFO and insert the initialization statement before GSI.  */
456 
457 static tree
init_tmp_var(struct nesting_info * info,tree exp,gimple_stmt_iterator * gsi)458 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
459 {
460   tree t;
461   gimple *stmt;
462 
463   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
464   stmt = gimple_build_assign (t, exp);
465   if (! gsi_end_p (*gsi))
466     gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
467   gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
468 
469   return t;
470 }
471 
472 
473 /* Similarly, but only do so to force EXP to satisfy is_gimple_val.  */
474 
475 static tree
gsi_gimplify_val(struct nesting_info * info,tree exp,gimple_stmt_iterator * gsi)476 gsi_gimplify_val (struct nesting_info *info, tree exp,
477 		  gimple_stmt_iterator *gsi)
478 {
479   if (is_gimple_val (exp))
480     return exp;
481   else
482     return init_tmp_var (info, exp, gsi);
483 }
484 
485 /* Similarly, but copy from the temporary and insert the statement
486    after the iterator.  */
487 
488 static tree
save_tmp_var(struct nesting_info * info,tree exp,gimple_stmt_iterator * gsi)489 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
490 {
491   tree t;
492   gimple *stmt;
493 
494   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
495   stmt = gimple_build_assign (exp, t);
496   if (! gsi_end_p (*gsi))
497     gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
498   gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
499 
500   return t;
501 }
502 
503 /* Build or return the type used to represent a nested function trampoline.  */
504 
505 static GTY(()) tree trampoline_type;
506 
507 static tree
get_trampoline_type(struct nesting_info * info)508 get_trampoline_type (struct nesting_info *info)
509 {
510   unsigned align, size;
511   tree t;
512 
513   if (trampoline_type)
514     return trampoline_type;
515 
516   align = TRAMPOLINE_ALIGNMENT;
517   size = TRAMPOLINE_SIZE;
518 
519   /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
520      then allocate extra space so that we can do dynamic alignment.  */
521   if (align > STACK_BOUNDARY)
522     {
523       size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
524       align = STACK_BOUNDARY;
525     }
526 
527   t = build_index_type (size_int (size - 1));
528   t = build_array_type (char_type_node, t);
529   t = build_decl (DECL_SOURCE_LOCATION (info->context),
530 		  FIELD_DECL, get_identifier ("__data"), t);
531   SET_DECL_ALIGN (t, align);
532   DECL_USER_ALIGN (t) = 1;
533 
534   trampoline_type = make_node (RECORD_TYPE);
535   TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
536   TYPE_FIELDS (trampoline_type) = t;
537   layout_type (trampoline_type);
538   DECL_CONTEXT (t) = trampoline_type;
539 
540   return trampoline_type;
541 }
542 
543 /* Build or return the type used to represent a nested function descriptor.  */
544 
545 static GTY(()) tree descriptor_type;
546 
547 static tree
get_descriptor_type(struct nesting_info * info)548 get_descriptor_type (struct nesting_info *info)
549 {
550   /* The base alignment is that of a function.  */
551   const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
552   tree t;
553 
554   if (descriptor_type)
555     return descriptor_type;
556 
557   t = build_index_type (integer_one_node);
558   t = build_array_type (ptr_type_node, t);
559   t = build_decl (DECL_SOURCE_LOCATION (info->context),
560 		  FIELD_DECL, get_identifier ("__data"), t);
561   SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
562   DECL_USER_ALIGN (t) = 1;
563 
564   descriptor_type = make_node (RECORD_TYPE);
565   TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
566   TYPE_FIELDS (descriptor_type) = t;
567   layout_type (descriptor_type);
568   DECL_CONTEXT (t) = descriptor_type;
569 
570   return descriptor_type;
571 }
572 
573 /* Given DECL, a nested function, find or create an element in the
574    var map for this function.  */
575 
576 static tree
lookup_element_for_decl(struct nesting_info * info,tree decl,enum insert_option insert)577 lookup_element_for_decl (struct nesting_info *info, tree decl,
578 			 enum insert_option insert)
579 {
580   if (insert == NO_INSERT)
581     {
582       tree *slot = info->var_map->get (decl);
583       return slot ? *slot : NULL_TREE;
584     }
585 
586   tree *slot = &info->var_map->get_or_insert (decl);
587   if (!*slot)
588     *slot = build_tree_list (NULL_TREE, NULL_TREE);
589 
590   return (tree) *slot;
591 }
592 
593 /* Given DECL, a nested function, create a field in the non-local
594    frame structure for this function.  */
595 
596 static tree
create_field_for_decl(struct nesting_info * info,tree decl,tree type)597 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
598 {
599   tree field = make_node (FIELD_DECL);
600   DECL_NAME (field) = DECL_NAME (decl);
601   TREE_TYPE (field) = type;
602   TREE_ADDRESSABLE (field) = 1;
603   insert_field_into_struct (get_frame_type (info), field);
604   return field;
605 }
606 
607 /* Given DECL, a nested function, find or create a field in the non-local
608    frame structure for a trampoline for this function.  */
609 
610 static tree
lookup_tramp_for_decl(struct nesting_info * info,tree decl,enum insert_option insert)611 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
612 		       enum insert_option insert)
613 {
614   tree elt, field;
615 
616   elt = lookup_element_for_decl (info, decl, insert);
617   if (!elt)
618     return NULL_TREE;
619 
620   field = TREE_PURPOSE (elt);
621 
622   if (!field && insert == INSERT)
623     {
624       field = create_field_for_decl (info, decl, get_trampoline_type (info));
625       TREE_PURPOSE (elt) = field;
626       info->any_tramp_created = true;
627     }
628 
629   return field;
630 }
631 
632 /* Given DECL, a nested function, find or create a field in the non-local
633    frame structure for a descriptor for this function.  */
634 
635 static tree
lookup_descr_for_decl(struct nesting_info * info,tree decl,enum insert_option insert)636 lookup_descr_for_decl (struct nesting_info *info, tree decl,
637 		       enum insert_option insert)
638 {
639   tree elt, field;
640 
641   elt = lookup_element_for_decl (info, decl, insert);
642   if (!elt)
643     return NULL_TREE;
644 
645   field = TREE_VALUE (elt);
646 
647   if (!field && insert == INSERT)
648     {
649       field = create_field_for_decl (info, decl, get_descriptor_type (info));
650       TREE_VALUE (elt) = field;
651       info->any_descr_created = true;
652     }
653 
654   return field;
655 }
656 
657 /* Build or return the field within the non-local frame state that holds
658    the non-local goto "jmp_buf".  The buffer itself is maintained by the
659    rtl middle-end as dynamic stack space is allocated.  */
660 
661 static tree
get_nl_goto_field(struct nesting_info * info)662 get_nl_goto_field (struct nesting_info *info)
663 {
664   tree field = info->nl_goto_field;
665   if (!field)
666     {
667       unsigned size;
668       tree type;
669 
670       /* For __builtin_nonlocal_goto, we need N words.  The first is the
671 	 frame pointer, the rest is for the target's stack pointer save
672 	 area.  The number of words is controlled by STACK_SAVEAREA_MODE;
673 	 not the best interface, but it'll do for now.  */
674       if (Pmode == ptr_mode)
675 	type = ptr_type_node;
676       else
677 	type = lang_hooks.types.type_for_mode (Pmode, 1);
678 
679       scalar_int_mode mode
680 	= as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
681       size = GET_MODE_SIZE (mode);
682       size = size / GET_MODE_SIZE (Pmode);
683       size = size + 1;
684 
685       type = build_array_type
686 	(type, build_index_type (size_int (size)));
687 
688       field = make_node (FIELD_DECL);
689       DECL_NAME (field) = get_identifier ("__nl_goto_buf");
690       TREE_TYPE (field) = type;
691       SET_DECL_ALIGN (field, TYPE_ALIGN (type));
692       TREE_ADDRESSABLE (field) = 1;
693 
694       insert_field_into_struct (get_frame_type (info), field);
695 
696       info->nl_goto_field = field;
697     }
698 
699   return field;
700 }
701 
702 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ.  */
703 
704 static void
walk_body(walk_stmt_fn callback_stmt,walk_tree_fn callback_op,struct nesting_info * info,gimple_seq * pseq)705 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
706 	   struct nesting_info *info, gimple_seq *pseq)
707 {
708   struct walk_stmt_info wi;
709 
710   memset (&wi, 0, sizeof (wi));
711   wi.info = info;
712   wi.val_only = true;
713   walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
714 }
715 
716 
717 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT.  */
718 
719 static inline void
walk_function(walk_stmt_fn callback_stmt,walk_tree_fn callback_op,struct nesting_info * info)720 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
721 	       struct nesting_info *info)
722 {
723   gimple_seq body = gimple_body (info->context);
724   walk_body (callback_stmt, callback_op, info, &body);
725   gimple_set_body (info->context, body);
726 }
727 
728 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body.  */
729 
730 static void
walk_gimple_omp_for(gomp_for * for_stmt,walk_stmt_fn callback_stmt,walk_tree_fn callback_op,struct nesting_info * info)731 walk_gimple_omp_for (gomp_for *for_stmt,
732     		     walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
733     		     struct nesting_info *info)
734 {
735   struct walk_stmt_info wi;
736   gimple_seq seq;
737   tree t;
738   size_t i;
739 
740   walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
741 
742   seq = NULL;
743   memset (&wi, 0, sizeof (wi));
744   wi.info = info;
745   wi.gsi = gsi_last (seq);
746 
747   for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
748     {
749       wi.val_only = false;
750       walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
751 		 &wi, NULL);
752       wi.val_only = true;
753       wi.is_lhs = false;
754       walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
755 		 &wi, NULL);
756 
757       wi.val_only = true;
758       wi.is_lhs = false;
759       walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
760 		 &wi, NULL);
761 
762       t = gimple_omp_for_incr (for_stmt, i);
763       gcc_assert (BINARY_CLASS_P (t));
764       wi.val_only = false;
765       walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
766       wi.val_only = true;
767       wi.is_lhs = false;
768       walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
769     }
770 
771   seq = gsi_seq (wi.gsi);
772   if (!gimple_seq_empty_p (seq))
773     {
774       gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
775       annotate_all_with_location (seq, gimple_location (for_stmt));
776       gimple_seq_add_seq (&pre_body, seq);
777       gimple_omp_for_set_pre_body (for_stmt, pre_body);
778     }
779 }
780 
781 /* Similarly for ROOT and all functions nested underneath, depth first.  */
782 
783 static void
walk_all_functions(walk_stmt_fn callback_stmt,walk_tree_fn callback_op,struct nesting_info * root)784 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
785 		    struct nesting_info *root)
786 {
787   struct nesting_info *n;
788   FOR_EACH_NEST_INFO (n, root)
789     walk_function (callback_stmt, callback_op, n);
790 }
791 
792 
793 /* We have to check for a fairly pathological case.  The operands of function
794    nested function are to be interpreted in the context of the enclosing
795    function.  So if any are variably-sized, they will get remapped when the
796    enclosing function is inlined.  But that remapping would also have to be
797    done in the types of the PARM_DECLs of the nested function, meaning the
798    argument types of that function will disagree with the arguments in the
799    calls to that function.  So we'd either have to make a copy of the nested
800    function corresponding to each time the enclosing function was inlined or
801    add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
802    function.  The former is not practical.  The latter would still require
803    detecting this case to know when to add the conversions.  So, for now at
804    least, we don't inline such an enclosing function.
805 
806    We have to do that check recursively, so here return indicating whether
807    FNDECL has such a nested function.  ORIG_FN is the function we were
808    trying to inline to use for checking whether any argument is variably
809    modified by anything in it.
810 
811    It would be better to do this in tree-inline.c so that we could give
812    the appropriate warning for why a function can't be inlined, but that's
813    too late since the nesting structure has already been flattened and
814    adding a flag just to record this fact seems a waste of a flag.  */
815 
816 static bool
check_for_nested_with_variably_modified(tree fndecl,tree orig_fndecl)817 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
818 {
819   struct cgraph_node *cgn = cgraph_node::get (fndecl);
820   tree arg;
821 
822   for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
823     {
824       for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
825 	if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
826 	  return true;
827 
828       if (check_for_nested_with_variably_modified (cgn->decl,
829 						   orig_fndecl))
830 	return true;
831     }
832 
833   return false;
834 }
835 
836 /* Construct our local datastructure describing the function nesting
837    tree rooted by CGN.  */
838 
839 static struct nesting_info *
create_nesting_tree(struct cgraph_node * cgn)840 create_nesting_tree (struct cgraph_node *cgn)
841 {
842   struct nesting_info *info = XCNEW (struct nesting_info);
843   info->field_map = new hash_map<tree, tree>;
844   info->var_map = new hash_map<tree, tree>;
845   info->mem_refs = new hash_set<tree *>;
846   info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
847   info->context = cgn->decl;
848   info->thunk_p = cgn->thunk.thunk_p;
849 
850   for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
851     {
852       struct nesting_info *sub = create_nesting_tree (cgn);
853       sub->outer = info;
854       sub->next = info->inner;
855       info->inner = sub;
856     }
857 
858   /* See discussion at check_for_nested_with_variably_modified for a
859      discussion of why this has to be here.  */
860   if (check_for_nested_with_variably_modified (info->context, info->context))
861     DECL_UNINLINABLE (info->context) = true;
862 
863   return info;
864 }
865 
866 /* Return an expression computing the static chain for TARGET_CONTEXT
867    from INFO->CONTEXT.  Insert any necessary computations before TSI.  */
868 
869 static tree
get_static_chain(struct nesting_info * info,tree target_context,gimple_stmt_iterator * gsi)870 get_static_chain (struct nesting_info *info, tree target_context,
871 		  gimple_stmt_iterator *gsi)
872 {
873   struct nesting_info *i;
874   tree x;
875 
876   if (info->context == target_context)
877     {
878       x = build_addr (info->frame_decl);
879       info->static_chain_added |= 1;
880     }
881   else
882     {
883       x = get_chain_decl (info);
884       info->static_chain_added |= 2;
885 
886       for (i = info->outer; i->context != target_context; i = i->outer)
887 	{
888 	  tree field = get_chain_field (i);
889 
890 	  x = build_simple_mem_ref_notrap (x);
891 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
892 	  x = init_tmp_var (info, x, gsi);
893 	}
894     }
895 
896   return x;
897 }
898 
899 
900 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
901    frame as seen from INFO->CONTEXT.  Insert any necessary computations
902    before GSI.  */
903 
904 static tree
get_frame_field(struct nesting_info * info,tree target_context,tree field,gimple_stmt_iterator * gsi)905 get_frame_field (struct nesting_info *info, tree target_context,
906 		 tree field, gimple_stmt_iterator *gsi)
907 {
908   struct nesting_info *i;
909   tree x;
910 
911   if (info->context == target_context)
912     {
913       /* Make sure frame_decl gets created.  */
914       (void) get_frame_type (info);
915       x = info->frame_decl;
916       info->static_chain_added |= 1;
917     }
918   else
919     {
920       x = get_chain_decl (info);
921       info->static_chain_added |= 2;
922 
923       for (i = info->outer; i->context != target_context; i = i->outer)
924 	{
925 	  tree field = get_chain_field (i);
926 
927 	  x = build_simple_mem_ref_notrap (x);
928 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
929 	  x = init_tmp_var (info, x, gsi);
930 	}
931 
932       x = build_simple_mem_ref_notrap (x);
933     }
934 
935   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
936   return x;
937 }
938 
939 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
940 
941 /* A subroutine of convert_nonlocal_reference_op.  Create a local variable
942    in the nested function with DECL_VALUE_EXPR set to reference the true
943    variable in the parent function.  This is used both for debug info
944    and in OMP lowering.  */
945 
946 static tree
get_nonlocal_debug_decl(struct nesting_info * info,tree decl)947 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
948 {
949   tree target_context;
950   struct nesting_info *i;
951   tree x, field, new_decl;
952 
953   tree *slot = &info->var_map->get_or_insert (decl);
954 
955   if (*slot)
956     return *slot;
957 
958   target_context = decl_function_context (decl);
959 
960   /* A copy of the code in get_frame_field, but without the temporaries.  */
961   if (info->context == target_context)
962     {
963       /* Make sure frame_decl gets created.  */
964       (void) get_frame_type (info);
965       x = info->frame_decl;
966       i = info;
967       info->static_chain_added |= 1;
968     }
969   else
970     {
971       x = get_chain_decl (info);
972       info->static_chain_added |= 2;
973       for (i = info->outer; i->context != target_context; i = i->outer)
974 	{
975 	  field = get_chain_field (i);
976 	  x = build_simple_mem_ref_notrap (x);
977 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
978 	}
979       x = build_simple_mem_ref_notrap (x);
980     }
981 
982   field = lookup_field_for_decl (i, decl, INSERT);
983   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
984   if (use_pointer_in_frame (decl))
985     x = build_simple_mem_ref_notrap (x);
986 
987   /* ??? We should be remapping types as well, surely.  */
988   new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
989 			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
990   DECL_CONTEXT (new_decl) = info->context;
991   DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
992   DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
993   TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
994   TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
995   TREE_READONLY (new_decl) = TREE_READONLY (decl);
996   TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
997   DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
998   if ((TREE_CODE (decl) == PARM_DECL
999        || TREE_CODE (decl) == RESULT_DECL
1000        || VAR_P (decl))
1001       && DECL_BY_REFERENCE (decl))
1002     DECL_BY_REFERENCE (new_decl) = 1;
1003 
1004   SET_DECL_VALUE_EXPR (new_decl, x);
1005   DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1006 
1007   *slot = new_decl;
1008   DECL_CHAIN (new_decl) = info->debug_var_chain;
1009   info->debug_var_chain = new_decl;
1010 
1011   if (!optimize
1012       && info->context != target_context
1013       && variably_modified_type_p (TREE_TYPE (decl), NULL))
1014     note_nonlocal_vla_type (info, TREE_TYPE (decl));
1015 
1016   return new_decl;
1017 }
1018 
1019 
1020 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1021    and PARM_DECLs that belong to outer functions.
1022 
1023    The rewrite will involve some number of structure accesses back up
1024    the static chain.  E.g. for a variable FOO up one nesting level it'll
1025    be CHAIN->FOO.  For two levels it'll be CHAIN->__chain->FOO.  Further
1026    indirections apply to decls for which use_pointer_in_frame is true.  */
1027 
1028 static tree
convert_nonlocal_reference_op(tree * tp,int * walk_subtrees,void * data)1029 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
1030 {
1031   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1032   struct nesting_info *const info = (struct nesting_info *) wi->info;
1033   tree t = *tp;
1034 
1035   *walk_subtrees = 0;
1036   switch (TREE_CODE (t))
1037     {
1038     case VAR_DECL:
1039       /* Non-automatic variables are never processed.  */
1040       if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1041 	break;
1042       /* FALLTHRU */
1043 
1044     case PARM_DECL:
1045       {
1046 	tree x, target_context = decl_function_context (t);
1047 
1048 	if (info->context == target_context)
1049 	  break;
1050 
1051 	wi->changed = true;
1052 
1053 	if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1054 	  x = get_nonlocal_debug_decl (info, t);
1055 	else
1056 	  {
1057 	    struct nesting_info *i = info;
1058 	    while (i && i->context != target_context)
1059 	      i = i->outer;
1060 	    /* If none of the outer contexts is the target context, this means
1061 	       that the VAR or PARM_DECL is referenced in a wrong context.  */
1062 	    if (!i)
1063 	      internal_error ("%s from %s referenced in %s",
1064 			      IDENTIFIER_POINTER (DECL_NAME (t)),
1065 			      IDENTIFIER_POINTER (DECL_NAME (target_context)),
1066 			      IDENTIFIER_POINTER (DECL_NAME (info->context)));
1067 
1068 	    x = lookup_field_for_decl (i, t, INSERT);
1069 	    x = get_frame_field (info, target_context, x, &wi->gsi);
1070 	    if (use_pointer_in_frame (t))
1071 	      {
1072 		x = init_tmp_var (info, x, &wi->gsi);
1073 		x = build_simple_mem_ref_notrap (x);
1074 	      }
1075 	  }
1076 
1077 	if (wi->val_only)
1078 	  {
1079 	    if (wi->is_lhs)
1080 	      x = save_tmp_var (info, x, &wi->gsi);
1081 	    else
1082 	      x = init_tmp_var (info, x, &wi->gsi);
1083 	  }
1084 
1085 	*tp = x;
1086       }
1087       break;
1088 
1089     case LABEL_DECL:
1090       /* We're taking the address of a label from a parent function, but
1091 	 this is not itself a non-local goto.  Mark the label such that it
1092 	 will not be deleted, much as we would with a label address in
1093 	 static storage.  */
1094       if (decl_function_context (t) != info->context)
1095         FORCED_LABEL (t) = 1;
1096       break;
1097 
1098     case ADDR_EXPR:
1099       {
1100 	bool save_val_only = wi->val_only;
1101 
1102 	wi->val_only = false;
1103 	wi->is_lhs = false;
1104 	wi->changed = false;
1105 	walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1106 	wi->val_only = true;
1107 
1108 	if (wi->changed)
1109 	  {
1110 	    tree save_context;
1111 
1112 	    /* If we changed anything, we might no longer be directly
1113 	       referencing a decl.  */
1114 	    save_context = current_function_decl;
1115 	    current_function_decl = info->context;
1116 	    recompute_tree_invariant_for_addr_expr (t);
1117 	    current_function_decl = save_context;
1118 
1119 	    /* If the callback converted the address argument in a context
1120 	       where we only accept variables (and min_invariant, presumably),
1121 	       then compute the address into a temporary.  */
1122 	    if (save_val_only)
1123 	      *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1124 				      t, &wi->gsi);
1125 	  }
1126       }
1127       break;
1128 
1129     case REALPART_EXPR:
1130     case IMAGPART_EXPR:
1131     case COMPONENT_REF:
1132     case ARRAY_REF:
1133     case ARRAY_RANGE_REF:
1134     case BIT_FIELD_REF:
1135       /* Go down this entire nest and just look at the final prefix and
1136 	 anything that describes the references.  Otherwise, we lose track
1137 	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
1138       wi->val_only = true;
1139       wi->is_lhs = false;
1140       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1141 	{
1142 	  if (TREE_CODE (t) == COMPONENT_REF)
1143 	    walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1144 		       NULL);
1145 	  else if (TREE_CODE (t) == ARRAY_REF
1146 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1147 	    {
1148 	      walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1149 			 wi, NULL);
1150 	      walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1151 			 wi, NULL);
1152 	      walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1153 			 wi, NULL);
1154 	    }
1155 	}
1156       wi->val_only = false;
1157       walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1158       break;
1159 
1160     case VIEW_CONVERT_EXPR:
1161       /* Just request to look at the subtrees, leaving val_only and lhs
1162 	 untouched.  This might actually be for !val_only + lhs, in which
1163 	 case we don't want to force a replacement by a temporary.  */
1164       *walk_subtrees = 1;
1165       break;
1166 
1167     default:
1168       if (!IS_TYPE_OR_DECL_P (t))
1169 	{
1170 	  *walk_subtrees = 1;
1171           wi->val_only = true;
1172 	  wi->is_lhs = false;
1173 	}
1174       break;
1175     }
1176 
1177   return NULL_TREE;
1178 }
1179 
1180 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1181 					     struct walk_stmt_info *);
1182 
1183 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1184    and PARM_DECLs that belong to outer functions.  */
1185 
1186 static bool
convert_nonlocal_omp_clauses(tree * pclauses,struct walk_stmt_info * wi)1187 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1188 {
1189   struct nesting_info *const info = (struct nesting_info *) wi->info;
1190   bool need_chain = false, need_stmts = false;
1191   tree clause, decl, *pdecl;
1192   int dummy;
1193   bitmap new_suppress;
1194 
1195   new_suppress = BITMAP_GGC_ALLOC ();
1196   bitmap_copy (new_suppress, info->suppress_expansion);
1197 
1198   for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1199     {
1200       pdecl = NULL;
1201       switch (OMP_CLAUSE_CODE (clause))
1202 	{
1203 	case OMP_CLAUSE_REDUCTION:
1204 	case OMP_CLAUSE_IN_REDUCTION:
1205 	case OMP_CLAUSE_TASK_REDUCTION:
1206 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1207 	    need_stmts = true;
1208 	  if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
1209 	    {
1210 	      pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
1211 	      if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
1212 		pdecl = &TREE_OPERAND (*pdecl, 0);
1213 	      if (TREE_CODE (*pdecl) == INDIRECT_REF
1214 		  || TREE_CODE (*pdecl) == ADDR_EXPR)
1215 		pdecl = &TREE_OPERAND (*pdecl, 0);
1216 	    }
1217 	  goto do_decl_clause;
1218 
1219 	case OMP_CLAUSE_LASTPRIVATE:
1220 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1221 	    need_stmts = true;
1222 	  goto do_decl_clause;
1223 
1224 	case OMP_CLAUSE_LINEAR:
1225 	  if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1226 	    need_stmts = true;
1227 	  wi->val_only = true;
1228 	  wi->is_lhs = false;
1229 	  convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1230 					 &dummy, wi);
1231 	  goto do_decl_clause;
1232 
1233 	case OMP_CLAUSE_PRIVATE:
1234 	case OMP_CLAUSE_FIRSTPRIVATE:
1235 	case OMP_CLAUSE_COPYPRIVATE:
1236 	case OMP_CLAUSE_SHARED:
1237 	case OMP_CLAUSE_TO_DECLARE:
1238 	case OMP_CLAUSE_LINK:
1239 	case OMP_CLAUSE_USE_DEVICE_PTR:
1240 	case OMP_CLAUSE_USE_DEVICE_ADDR:
1241 	case OMP_CLAUSE_IS_DEVICE_PTR:
1242 	do_decl_clause:
1243 	  if (pdecl == NULL)
1244 	    pdecl = &OMP_CLAUSE_DECL (clause);
1245 	  decl = *pdecl;
1246 	  if (VAR_P (decl)
1247 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1248 	    break;
1249 	  if (decl_function_context (decl) != info->context)
1250 	    {
1251 	      if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1252 		OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1253 	      bitmap_set_bit (new_suppress, DECL_UID (decl));
1254 	      *pdecl = get_nonlocal_debug_decl (info, decl);
1255 	      if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1256 		need_chain = true;
1257 	    }
1258 	  break;
1259 
1260 	case OMP_CLAUSE_SCHEDULE:
1261 	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1262 	    break;
1263 	  /* FALLTHRU */
1264 	case OMP_CLAUSE_FINAL:
1265 	case OMP_CLAUSE_IF:
1266 	case OMP_CLAUSE_NUM_THREADS:
1267 	case OMP_CLAUSE_DEPEND:
1268 	case OMP_CLAUSE_DEVICE:
1269 	case OMP_CLAUSE_NUM_TEAMS:
1270 	case OMP_CLAUSE_THREAD_LIMIT:
1271 	case OMP_CLAUSE_SAFELEN:
1272 	case OMP_CLAUSE_SIMDLEN:
1273 	case OMP_CLAUSE_PRIORITY:
1274 	case OMP_CLAUSE_GRAINSIZE:
1275 	case OMP_CLAUSE_NUM_TASKS:
1276 	case OMP_CLAUSE_HINT:
1277 	case OMP_CLAUSE_NUM_GANGS:
1278 	case OMP_CLAUSE_NUM_WORKERS:
1279 	case OMP_CLAUSE_VECTOR_LENGTH:
1280 	case OMP_CLAUSE_GANG:
1281 	case OMP_CLAUSE_WORKER:
1282 	case OMP_CLAUSE_VECTOR:
1283 	case OMP_CLAUSE_ASYNC:
1284 	case OMP_CLAUSE_WAIT:
1285 	  /* Several OpenACC clauses have optional arguments.  Check if they
1286 	     are present.  */
1287 	  if (OMP_CLAUSE_OPERAND (clause, 0))
1288 	    {
1289 	      wi->val_only = true;
1290 	      wi->is_lhs = false;
1291 	      convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1292 					     &dummy, wi);
1293 	    }
1294 
1295 	  /* The gang clause accepts two arguments.  */
1296 	  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1297 	      && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1298 	    {
1299 		wi->val_only = true;
1300 		wi->is_lhs = false;
1301 		convert_nonlocal_reference_op
1302 		  (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1303 	    }
1304 	  break;
1305 
1306 	case OMP_CLAUSE_DIST_SCHEDULE:
1307 	  if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1308 	    {
1309 	      wi->val_only = true;
1310 	      wi->is_lhs = false;
1311 	      convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1312 					     &dummy, wi);
1313 	    }
1314 	  break;
1315 
1316 	case OMP_CLAUSE_MAP:
1317 	case OMP_CLAUSE_TO:
1318 	case OMP_CLAUSE_FROM:
1319 	  if (OMP_CLAUSE_SIZE (clause))
1320 	    {
1321 	      wi->val_only = true;
1322 	      wi->is_lhs = false;
1323 	      convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1324 					     &dummy, wi);
1325 	    }
1326 	  if (DECL_P (OMP_CLAUSE_DECL (clause)))
1327 	    goto do_decl_clause;
1328 	  wi->val_only = true;
1329 	  wi->is_lhs = false;
1330 	  walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1331 		     wi, NULL);
1332 	  break;
1333 
1334 	case OMP_CLAUSE_ALIGNED:
1335 	  if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1336 	    {
1337 	      wi->val_only = true;
1338 	      wi->is_lhs = false;
1339 	      convert_nonlocal_reference_op
1340 		(&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1341 	    }
1342 	  /* FALLTHRU */
1343 	case OMP_CLAUSE_NONTEMPORAL:
1344 	  /* Like do_decl_clause, but don't add any suppression.  */
1345 	  decl = OMP_CLAUSE_DECL (clause);
1346 	  if (VAR_P (decl)
1347 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1348 	    break;
1349 	  if (decl_function_context (decl) != info->context)
1350 	    {
1351 	      OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1352 	      need_chain = true;
1353 	    }
1354 	  break;
1355 
1356 	case OMP_CLAUSE_NOWAIT:
1357 	case OMP_CLAUSE_ORDERED:
1358 	case OMP_CLAUSE_DEFAULT:
1359 	case OMP_CLAUSE_COPYIN:
1360 	case OMP_CLAUSE_COLLAPSE:
1361 	case OMP_CLAUSE_TILE:
1362 	case OMP_CLAUSE_UNTIED:
1363 	case OMP_CLAUSE_MERGEABLE:
1364 	case OMP_CLAUSE_PROC_BIND:
1365 	case OMP_CLAUSE_NOGROUP:
1366 	case OMP_CLAUSE_THREADS:
1367 	case OMP_CLAUSE_SIMD:
1368 	case OMP_CLAUSE_DEFAULTMAP:
1369 	case OMP_CLAUSE_ORDER:
1370 	case OMP_CLAUSE_SEQ:
1371 	case OMP_CLAUSE_INDEPENDENT:
1372 	case OMP_CLAUSE_AUTO:
1373 	case OMP_CLAUSE_IF_PRESENT:
1374 	case OMP_CLAUSE_FINALIZE:
1375 	case OMP_CLAUSE__CONDTEMP_:
1376 	case OMP_CLAUSE__SCANTEMP_:
1377 	  break;
1378 
1379 	  /* The following clause belongs to the OpenACC cache directive, which
1380 	     is discarded during gimplification.  */
1381 	case OMP_CLAUSE__CACHE_:
1382 	  /* The following clauses are only allowed in the OpenMP declare simd
1383 	     directive, so not seen here.  */
1384 	case OMP_CLAUSE_UNIFORM:
1385 	case OMP_CLAUSE_INBRANCH:
1386 	case OMP_CLAUSE_NOTINBRANCH:
1387 	  /* The following clauses are only allowed on OpenMP cancel and
1388 	     cancellation point directives, which at this point have already
1389 	     been lowered into a function call.  */
1390 	case OMP_CLAUSE_FOR:
1391 	case OMP_CLAUSE_PARALLEL:
1392 	case OMP_CLAUSE_SECTIONS:
1393 	case OMP_CLAUSE_TASKGROUP:
1394 	  /* The following clauses are only added during OMP lowering; nested
1395 	     function decomposition happens before that.  */
1396 	case OMP_CLAUSE__LOOPTEMP_:
1397 	case OMP_CLAUSE__REDUCTEMP_:
1398 	case OMP_CLAUSE__SIMDUID_:
1399 	case OMP_CLAUSE__GRIDDIM_:
1400 	case OMP_CLAUSE__SIMT_:
1401 	  /* Anything else.  */
1402 	default:
1403 	  gcc_unreachable ();
1404 	}
1405     }
1406 
1407   info->suppress_expansion = new_suppress;
1408 
1409   if (need_stmts)
1410     for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1411       switch (OMP_CLAUSE_CODE (clause))
1412 	{
1413 	case OMP_CLAUSE_REDUCTION:
1414 	case OMP_CLAUSE_IN_REDUCTION:
1415 	case OMP_CLAUSE_TASK_REDUCTION:
1416 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1417 	    {
1418 	      tree old_context
1419 		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1420 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1421 		= info->context;
1422 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1423 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1424 		  = info->context;
1425 	      tree save_local_var_chain = info->new_local_var_chain;
1426 	      info->new_local_var_chain = NULL;
1427 	      gimple_seq *seq = &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause);
1428 	      walk_body (convert_nonlocal_reference_stmt,
1429 			 convert_nonlocal_reference_op, info, seq);
1430 	      if (info->new_local_var_chain)
1431 		declare_vars (info->new_local_var_chain,
1432 			      gimple_seq_first_stmt (*seq), false);
1433 	      info->new_local_var_chain = NULL;
1434 	      seq = &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause);
1435 	      walk_body (convert_nonlocal_reference_stmt,
1436 			 convert_nonlocal_reference_op, info, seq);
1437 	      if (info->new_local_var_chain)
1438 		declare_vars (info->new_local_var_chain,
1439 			      gimple_seq_first_stmt (*seq), false);
1440 	      info->new_local_var_chain = save_local_var_chain;
1441 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1442 		= old_context;
1443 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1444 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1445 		  = old_context;
1446 	    }
1447 	  break;
1448 
1449 	case OMP_CLAUSE_LASTPRIVATE:
1450 	  {
1451 	    tree save_local_var_chain = info->new_local_var_chain;
1452 	    info->new_local_var_chain = NULL;
1453 	    gimple_seq *seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause);
1454 	    walk_body (convert_nonlocal_reference_stmt,
1455 		       convert_nonlocal_reference_op, info, seq);
1456 	    if (info->new_local_var_chain)
1457 	      declare_vars (info->new_local_var_chain,
1458 			    gimple_seq_first_stmt (*seq), false);
1459 	    info->new_local_var_chain = save_local_var_chain;
1460 	  }
1461 	  break;
1462 
1463 	case OMP_CLAUSE_LINEAR:
1464 	  {
1465 	    tree save_local_var_chain = info->new_local_var_chain;
1466 	    info->new_local_var_chain = NULL;
1467 	    gimple_seq *seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause);
1468 	    walk_body (convert_nonlocal_reference_stmt,
1469 		       convert_nonlocal_reference_op, info, seq);
1470 	    if (info->new_local_var_chain)
1471 	      declare_vars (info->new_local_var_chain,
1472 			    gimple_seq_first_stmt (*seq), false);
1473 	    info->new_local_var_chain = save_local_var_chain;
1474 	  }
1475 	  break;
1476 
1477 	default:
1478 	  break;
1479 	}
1480 
1481   return need_chain;
1482 }
1483 
1484 /* Create nonlocal debug decls for nonlocal VLA array bounds.  */
1485 
1486 static void
note_nonlocal_vla_type(struct nesting_info * info,tree type)1487 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1488 {
1489   while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1490     type = TREE_TYPE (type);
1491 
1492   if (TYPE_NAME (type)
1493       && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1494       && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1495     type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1496 
1497   while (POINTER_TYPE_P (type)
1498 	 || TREE_CODE (type) == VECTOR_TYPE
1499 	 || TREE_CODE (type) == FUNCTION_TYPE
1500 	 || TREE_CODE (type) == METHOD_TYPE)
1501     type = TREE_TYPE (type);
1502 
1503   if (TREE_CODE (type) == ARRAY_TYPE)
1504     {
1505       tree domain, t;
1506 
1507       note_nonlocal_vla_type (info, TREE_TYPE (type));
1508       domain = TYPE_DOMAIN (type);
1509       if (domain)
1510 	{
1511 	  t = TYPE_MIN_VALUE (domain);
1512 	  if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1513 	      && decl_function_context (t) != info->context)
1514 	    get_nonlocal_debug_decl (info, t);
1515 	  t = TYPE_MAX_VALUE (domain);
1516 	  if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1517 	      && decl_function_context (t) != info->context)
1518 	    get_nonlocal_debug_decl (info, t);
1519 	}
1520     }
1521 }
1522 
1523 /* Callback for walk_gimple_stmt.  Rewrite all references to VAR and
1524    PARM_DECLs that belong to outer functions.  This handles statements
1525    that are not handled via the standard recursion done in
1526    walk_gimple_stmt.  STMT is the statement to examine, DATA is as in
1527    convert_nonlocal_reference_op.  Set *HANDLED_OPS_P to true if all the
1528    operands of STMT have been handled by this function.  */
1529 
1530 static tree
convert_nonlocal_reference_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)1531 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1532 				 struct walk_stmt_info *wi)
1533 {
1534   struct nesting_info *info = (struct nesting_info *) wi->info;
1535   tree save_local_var_chain;
1536   bitmap save_suppress;
1537   gimple *stmt = gsi_stmt (*gsi);
1538 
1539   switch (gimple_code (stmt))
1540     {
1541     case GIMPLE_GOTO:
1542       /* Don't walk non-local gotos for now.  */
1543       if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1544 	{
1545 	  wi->val_only = true;
1546 	  wi->is_lhs = false;
1547 	  *handled_ops_p = false;
1548 	  return NULL_TREE;
1549 	}
1550       break;
1551 
1552     case GIMPLE_OMP_TEAMS:
1553       if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
1554 	{
1555 	  save_suppress = info->suppress_expansion;
1556 	  convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt),
1557 					wi);
1558 	  walk_body (convert_nonlocal_reference_stmt,
1559 		     convert_nonlocal_reference_op, info,
1560 		     gimple_omp_body_ptr (stmt));
1561 	  info->suppress_expansion = save_suppress;
1562 	  break;
1563 	}
1564       /* FALLTHRU */
1565 
1566     case GIMPLE_OMP_PARALLEL:
1567     case GIMPLE_OMP_TASK:
1568       save_suppress = info->suppress_expansion;
1569       if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1570 	                                wi))
1571 	{
1572 	  tree c, decl;
1573 	  decl = get_chain_decl (info);
1574 	  c = build_omp_clause (gimple_location (stmt),
1575 				OMP_CLAUSE_FIRSTPRIVATE);
1576 	  OMP_CLAUSE_DECL (c) = decl;
1577 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1578 	  gimple_omp_taskreg_set_clauses (stmt, c);
1579 	}
1580 
1581       save_local_var_chain = info->new_local_var_chain;
1582       info->new_local_var_chain = NULL;
1583 
1584       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1585 	         info, gimple_omp_body_ptr (stmt));
1586 
1587       if (info->new_local_var_chain)
1588 	declare_vars (info->new_local_var_chain,
1589 	              gimple_seq_first_stmt (gimple_omp_body (stmt)),
1590 		      false);
1591       info->new_local_var_chain = save_local_var_chain;
1592       info->suppress_expansion = save_suppress;
1593       break;
1594 
1595     case GIMPLE_OMP_FOR:
1596       save_suppress = info->suppress_expansion;
1597       convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1598       walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1599 			   convert_nonlocal_reference_stmt,
1600 	  		   convert_nonlocal_reference_op, info);
1601       walk_body (convert_nonlocal_reference_stmt,
1602 	  	 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1603       info->suppress_expansion = save_suppress;
1604       break;
1605 
1606     case GIMPLE_OMP_SECTIONS:
1607       save_suppress = info->suppress_expansion;
1608       convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1609       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1610 	         info, gimple_omp_body_ptr (stmt));
1611       info->suppress_expansion = save_suppress;
1612       break;
1613 
1614     case GIMPLE_OMP_SINGLE:
1615       save_suppress = info->suppress_expansion;
1616       convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1617       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1618 	         info, gimple_omp_body_ptr (stmt));
1619       info->suppress_expansion = save_suppress;
1620       break;
1621 
1622     case GIMPLE_OMP_TASKGROUP:
1623       save_suppress = info->suppress_expansion;
1624       convert_nonlocal_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
1625       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1626 		 info, gimple_omp_body_ptr (stmt));
1627       info->suppress_expansion = save_suppress;
1628       break;
1629 
1630     case GIMPLE_OMP_TARGET:
1631       if (!is_gimple_omp_offloaded (stmt))
1632 	{
1633 	  save_suppress = info->suppress_expansion;
1634 	  convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1635 					wi);
1636 	  info->suppress_expansion = save_suppress;
1637 	  walk_body (convert_nonlocal_reference_stmt,
1638 		     convert_nonlocal_reference_op, info,
1639 		     gimple_omp_body_ptr (stmt));
1640 	  break;
1641 	}
1642       save_suppress = info->suppress_expansion;
1643       if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1644 					wi))
1645 	{
1646 	  tree c, decl;
1647 	  decl = get_chain_decl (info);
1648 	  c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1649 	  OMP_CLAUSE_DECL (c) = decl;
1650 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1651 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1652 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1653 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1654 	}
1655 
1656       save_local_var_chain = info->new_local_var_chain;
1657       info->new_local_var_chain = NULL;
1658 
1659       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1660 		 info, gimple_omp_body_ptr (stmt));
1661 
1662       if (info->new_local_var_chain)
1663 	declare_vars (info->new_local_var_chain,
1664 		      gimple_seq_first_stmt (gimple_omp_body (stmt)),
1665 		      false);
1666       info->new_local_var_chain = save_local_var_chain;
1667       info->suppress_expansion = save_suppress;
1668       break;
1669 
1670     case GIMPLE_OMP_SECTION:
1671     case GIMPLE_OMP_MASTER:
1672     case GIMPLE_OMP_ORDERED:
1673     case GIMPLE_OMP_SCAN:
1674       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1675 	         info, gimple_omp_body_ptr (stmt));
1676       break;
1677 
1678     case GIMPLE_BIND:
1679       {
1680       gbind *bind_stmt = as_a <gbind *> (stmt);
1681 
1682       for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1683 	if (TREE_CODE (var) == NAMELIST_DECL)
1684 	  {
1685 	    /* Adjust decls mentioned in NAMELIST_DECL.  */
1686 	    tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1687 	    tree decl;
1688 	    unsigned int i;
1689 
1690 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1691 	      {
1692 		if (VAR_P (decl)
1693 		    && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1694 		  continue;
1695 		if (decl_function_context (decl) != info->context)
1696 		  CONSTRUCTOR_ELT (decls, i)->value
1697 		    = get_nonlocal_debug_decl (info, decl);
1698 	      }
1699 	  }
1700 
1701       *handled_ops_p = false;
1702       return NULL_TREE;
1703       }
1704     case GIMPLE_COND:
1705       wi->val_only = true;
1706       wi->is_lhs = false;
1707       *handled_ops_p = false;
1708       return NULL_TREE;
1709 
1710     case GIMPLE_ASSIGN:
1711       if (gimple_clobber_p (stmt))
1712 	{
1713 	  tree lhs = gimple_assign_lhs (stmt);
1714 	  if (DECL_P (lhs)
1715 	      && !(TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
1716 	      && decl_function_context (lhs) != info->context)
1717 	    {
1718 	      gsi_replace (gsi, gimple_build_nop (), true);
1719 	      break;
1720 	    }
1721 	}
1722       *handled_ops_p = false;
1723       return NULL_TREE;
1724 
1725     default:
1726       /* For every other statement that we are not interested in
1727 	 handling here, let the walker traverse the operands.  */
1728       *handled_ops_p = false;
1729       return NULL_TREE;
1730     }
1731 
1732   /* We have handled all of STMT operands, no need to traverse the operands.  */
1733   *handled_ops_p = true;
1734   return NULL_TREE;
1735 }
1736 
1737 
1738 /* A subroutine of convert_local_reference.  Create a local variable
1739    in the parent function with DECL_VALUE_EXPR set to reference the
1740    field in FRAME.  This is used both for debug info and in OMP
1741    lowering.  */
1742 
1743 static tree
get_local_debug_decl(struct nesting_info * info,tree decl,tree field)1744 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1745 {
1746   tree x, new_decl;
1747 
1748   tree *slot = &info->var_map->get_or_insert (decl);
1749   if (*slot)
1750     return *slot;
1751 
1752   /* Make sure frame_decl gets created.  */
1753   (void) get_frame_type (info);
1754   x = info->frame_decl;
1755   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1756 
1757   new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1758 			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1759   DECL_CONTEXT (new_decl) = info->context;
1760   DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1761   DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1762   TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1763   TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1764   TREE_READONLY (new_decl) = TREE_READONLY (decl);
1765   TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1766   DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1767   if ((TREE_CODE (decl) == PARM_DECL
1768        || TREE_CODE (decl) == RESULT_DECL
1769        || VAR_P (decl))
1770       && DECL_BY_REFERENCE (decl))
1771     DECL_BY_REFERENCE (new_decl) = 1;
1772 
1773   SET_DECL_VALUE_EXPR (new_decl, x);
1774   DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1775   *slot = new_decl;
1776 
1777   DECL_CHAIN (new_decl) = info->debug_var_chain;
1778   info->debug_var_chain = new_decl;
1779 
1780   /* Do not emit debug info twice.  */
1781   DECL_IGNORED_P (decl) = 1;
1782 
1783   return new_decl;
1784 }
1785 
1786 
1787 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1788    and PARM_DECLs that were referenced by inner nested functions.
1789    The rewrite will be a structure reference to the local frame variable.  */
1790 
1791 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1792 
1793 static tree
convert_local_reference_op(tree * tp,int * walk_subtrees,void * data)1794 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1795 {
1796   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1797   struct nesting_info *const info = (struct nesting_info *) wi->info;
1798   tree t = *tp, field, x;
1799   bool save_val_only;
1800 
1801   *walk_subtrees = 0;
1802   switch (TREE_CODE (t))
1803     {
1804     case VAR_DECL:
1805       /* Non-automatic variables are never processed.  */
1806       if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1807 	break;
1808       /* FALLTHRU */
1809 
1810     case PARM_DECL:
1811       if (t != info->frame_decl && decl_function_context (t) == info->context)
1812 	{
1813 	  /* If we copied a pointer to the frame, then the original decl
1814 	     is used unchanged in the parent function.  */
1815 	  if (use_pointer_in_frame (t))
1816 	    break;
1817 
1818 	  /* No need to transform anything if no child references the
1819 	     variable.  */
1820 	  field = lookup_field_for_decl (info, t, NO_INSERT);
1821 	  if (!field)
1822 	    break;
1823 	  wi->changed = true;
1824 
1825 	  if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1826 	    x = get_local_debug_decl (info, t, field);
1827 	  else
1828 	    x = get_frame_field (info, info->context, field, &wi->gsi);
1829 
1830 	  if (wi->val_only)
1831 	    {
1832 	      if (wi->is_lhs)
1833 		x = save_tmp_var (info, x, &wi->gsi);
1834 	      else
1835 		x = init_tmp_var (info, x, &wi->gsi);
1836 	    }
1837 
1838 	  *tp = x;
1839 	}
1840       break;
1841 
1842     case ADDR_EXPR:
1843       save_val_only = wi->val_only;
1844       wi->val_only = false;
1845       wi->is_lhs = false;
1846       wi->changed = false;
1847       walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1848       wi->val_only = save_val_only;
1849 
1850       /* If we converted anything ... */
1851       if (wi->changed)
1852 	{
1853 	  tree save_context;
1854 
1855 	  /* Then the frame decl is now addressable.  */
1856 	  TREE_ADDRESSABLE (info->frame_decl) = 1;
1857 
1858 	  save_context = current_function_decl;
1859 	  current_function_decl = info->context;
1860 	  recompute_tree_invariant_for_addr_expr (t);
1861 	  current_function_decl = save_context;
1862 
1863 	  /* If we are in a context where we only accept values, then
1864 	     compute the address into a temporary.  */
1865 	  if (save_val_only)
1866 	    *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1867 				    t, &wi->gsi);
1868 	}
1869       break;
1870 
1871     case REALPART_EXPR:
1872     case IMAGPART_EXPR:
1873     case COMPONENT_REF:
1874     case ARRAY_REF:
1875     case ARRAY_RANGE_REF:
1876     case BIT_FIELD_REF:
1877       /* Go down this entire nest and just look at the final prefix and
1878 	 anything that describes the references.  Otherwise, we lose track
1879 	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
1880       save_val_only = wi->val_only;
1881       wi->val_only = true;
1882       wi->is_lhs = false;
1883       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1884 	{
1885 	  if (TREE_CODE (t) == COMPONENT_REF)
1886 	    walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1887 		       NULL);
1888 	  else if (TREE_CODE (t) == ARRAY_REF
1889 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1890 	    {
1891 	      walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1892 			 NULL);
1893 	      walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1894 			 NULL);
1895 	      walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1896 			 NULL);
1897 	    }
1898 	}
1899       wi->val_only = false;
1900       walk_tree (tp, convert_local_reference_op, wi, NULL);
1901       wi->val_only = save_val_only;
1902       break;
1903 
1904     case MEM_REF:
1905       save_val_only = wi->val_only;
1906       wi->val_only = true;
1907       wi->is_lhs = false;
1908       walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1909 		 wi, NULL);
1910       /* We need to re-fold the MEM_REF as component references as
1911 	 part of a ADDR_EXPR address are not allowed.  But we cannot
1912 	 fold here, as the chain record type is not yet finalized.  */
1913       if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1914 	  && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1915 	info->mem_refs->add (tp);
1916       wi->val_only = save_val_only;
1917       break;
1918 
1919     case VIEW_CONVERT_EXPR:
1920       /* Just request to look at the subtrees, leaving val_only and lhs
1921 	 untouched.  This might actually be for !val_only + lhs, in which
1922 	 case we don't want to force a replacement by a temporary.  */
1923       *walk_subtrees = 1;
1924       break;
1925 
1926     default:
1927       if (!IS_TYPE_OR_DECL_P (t))
1928 	{
1929 	  *walk_subtrees = 1;
1930 	  wi->val_only = true;
1931 	  wi->is_lhs = false;
1932 	}
1933       break;
1934     }
1935 
1936   return NULL_TREE;
1937 }
1938 
1939 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1940 					  struct walk_stmt_info *);
1941 
1942 /* Helper for convert_local_reference.  Convert all the references in
1943    the chain of clauses at *PCLAUSES.  WI is as in convert_local_reference.  */
1944 
1945 static bool
convert_local_omp_clauses(tree * pclauses,struct walk_stmt_info * wi)1946 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1947 {
1948   struct nesting_info *const info = (struct nesting_info *) wi->info;
1949   bool need_frame = false, need_stmts = false;
1950   tree clause, decl, *pdecl;
1951   int dummy;
1952   bitmap new_suppress;
1953 
1954   new_suppress = BITMAP_GGC_ALLOC ();
1955   bitmap_copy (new_suppress, info->suppress_expansion);
1956 
1957   for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1958     {
1959       pdecl = NULL;
1960       switch (OMP_CLAUSE_CODE (clause))
1961 	{
1962 	case OMP_CLAUSE_REDUCTION:
1963 	case OMP_CLAUSE_IN_REDUCTION:
1964 	case OMP_CLAUSE_TASK_REDUCTION:
1965 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1966 	    need_stmts = true;
1967 	  if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
1968 	    {
1969 	      pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
1970 	      if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
1971 		pdecl = &TREE_OPERAND (*pdecl, 0);
1972 	      if (TREE_CODE (*pdecl) == INDIRECT_REF
1973 		  || TREE_CODE (*pdecl) == ADDR_EXPR)
1974 		pdecl = &TREE_OPERAND (*pdecl, 0);
1975 	    }
1976 	  goto do_decl_clause;
1977 
1978 	case OMP_CLAUSE_LASTPRIVATE:
1979 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1980 	    need_stmts = true;
1981 	  goto do_decl_clause;
1982 
1983 	case OMP_CLAUSE_LINEAR:
1984 	  if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1985 	    need_stmts = true;
1986 	  wi->val_only = true;
1987 	  wi->is_lhs = false;
1988 	  convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1989 				      wi);
1990 	  goto do_decl_clause;
1991 
1992 	case OMP_CLAUSE_PRIVATE:
1993 	case OMP_CLAUSE_FIRSTPRIVATE:
1994 	case OMP_CLAUSE_COPYPRIVATE:
1995 	case OMP_CLAUSE_SHARED:
1996 	case OMP_CLAUSE_TO_DECLARE:
1997 	case OMP_CLAUSE_LINK:
1998 	case OMP_CLAUSE_USE_DEVICE_PTR:
1999 	case OMP_CLAUSE_USE_DEVICE_ADDR:
2000 	case OMP_CLAUSE_IS_DEVICE_PTR:
2001 	do_decl_clause:
2002 	  if (pdecl == NULL)
2003 	    pdecl = &OMP_CLAUSE_DECL (clause);
2004 	  decl = *pdecl;
2005 	  if (VAR_P (decl)
2006 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2007 	    break;
2008 	  if (decl_function_context (decl) == info->context
2009 	      && !use_pointer_in_frame (decl))
2010 	    {
2011 	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2012 	      if (field)
2013 		{
2014 		  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
2015 		    OMP_CLAUSE_SHARED_READONLY (clause) = 0;
2016 		  bitmap_set_bit (new_suppress, DECL_UID (decl));
2017 		  *pdecl = get_local_debug_decl (info, decl, field);
2018 		  need_frame = true;
2019 		}
2020 	    }
2021 	  break;
2022 
2023 	case OMP_CLAUSE_SCHEDULE:
2024 	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
2025 	    break;
2026 	  /* FALLTHRU */
2027 	case OMP_CLAUSE_FINAL:
2028 	case OMP_CLAUSE_IF:
2029 	case OMP_CLAUSE_NUM_THREADS:
2030 	case OMP_CLAUSE_DEPEND:
2031 	case OMP_CLAUSE_DEVICE:
2032 	case OMP_CLAUSE_NUM_TEAMS:
2033 	case OMP_CLAUSE_THREAD_LIMIT:
2034 	case OMP_CLAUSE_SAFELEN:
2035 	case OMP_CLAUSE_SIMDLEN:
2036 	case OMP_CLAUSE_PRIORITY:
2037 	case OMP_CLAUSE_GRAINSIZE:
2038 	case OMP_CLAUSE_NUM_TASKS:
2039 	case OMP_CLAUSE_HINT:
2040 	case OMP_CLAUSE_NUM_GANGS:
2041 	case OMP_CLAUSE_NUM_WORKERS:
2042 	case OMP_CLAUSE_VECTOR_LENGTH:
2043 	case OMP_CLAUSE_GANG:
2044 	case OMP_CLAUSE_WORKER:
2045 	case OMP_CLAUSE_VECTOR:
2046 	case OMP_CLAUSE_ASYNC:
2047 	case OMP_CLAUSE_WAIT:
2048 	  /* Several OpenACC clauses have optional arguments.  Check if they
2049 	     are present.  */
2050 	  if (OMP_CLAUSE_OPERAND (clause, 0))
2051 	    {
2052 	      wi->val_only = true;
2053 	      wi->is_lhs = false;
2054 	      convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2055 					  &dummy, wi);
2056 	    }
2057 
2058 	  /* The gang clause accepts two arguments.  */
2059 	  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
2060 	      && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
2061 	    {
2062 		wi->val_only = true;
2063 		wi->is_lhs = false;
2064 		convert_nonlocal_reference_op
2065 		  (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
2066 	    }
2067 	  break;
2068 
2069 	case OMP_CLAUSE_DIST_SCHEDULE:
2070 	  if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
2071 	    {
2072 	      wi->val_only = true;
2073 	      wi->is_lhs = false;
2074 	      convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2075 					  &dummy, wi);
2076 	    }
2077 	  break;
2078 
2079 	case OMP_CLAUSE_MAP:
2080 	case OMP_CLAUSE_TO:
2081 	case OMP_CLAUSE_FROM:
2082 	  if (OMP_CLAUSE_SIZE (clause))
2083 	    {
2084 	      wi->val_only = true;
2085 	      wi->is_lhs = false;
2086 	      convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
2087 					  &dummy, wi);
2088 	    }
2089 	  if (DECL_P (OMP_CLAUSE_DECL (clause)))
2090 	    goto do_decl_clause;
2091 	  wi->val_only = true;
2092 	  wi->is_lhs = false;
2093 	  walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
2094 		     wi, NULL);
2095 	  break;
2096 
2097 	case OMP_CLAUSE_ALIGNED:
2098 	  if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2099 	    {
2100 	      wi->val_only = true;
2101 	      wi->is_lhs = false;
2102 	      convert_local_reference_op
2103 		(&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
2104 	    }
2105 	  /* FALLTHRU */
2106 	case OMP_CLAUSE_NONTEMPORAL:
2107 	  /* Like do_decl_clause, but don't add any suppression.  */
2108 	  decl = OMP_CLAUSE_DECL (clause);
2109 	  if (VAR_P (decl)
2110 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2111 	    break;
2112 	  if (decl_function_context (decl) == info->context
2113 	      && !use_pointer_in_frame (decl))
2114 	    {
2115 	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2116 	      if (field)
2117 		{
2118 		  OMP_CLAUSE_DECL (clause)
2119 		    = get_local_debug_decl (info, decl, field);
2120 		  need_frame = true;
2121 		}
2122 	    }
2123 	  break;
2124 
2125 	case OMP_CLAUSE_NOWAIT:
2126 	case OMP_CLAUSE_ORDERED:
2127 	case OMP_CLAUSE_DEFAULT:
2128 	case OMP_CLAUSE_COPYIN:
2129 	case OMP_CLAUSE_COLLAPSE:
2130 	case OMP_CLAUSE_TILE:
2131 	case OMP_CLAUSE_UNTIED:
2132 	case OMP_CLAUSE_MERGEABLE:
2133 	case OMP_CLAUSE_PROC_BIND:
2134 	case OMP_CLAUSE_NOGROUP:
2135 	case OMP_CLAUSE_THREADS:
2136 	case OMP_CLAUSE_SIMD:
2137 	case OMP_CLAUSE_DEFAULTMAP:
2138 	case OMP_CLAUSE_ORDER:
2139 	case OMP_CLAUSE_SEQ:
2140 	case OMP_CLAUSE_INDEPENDENT:
2141 	case OMP_CLAUSE_AUTO:
2142 	case OMP_CLAUSE_IF_PRESENT:
2143 	case OMP_CLAUSE_FINALIZE:
2144 	case OMP_CLAUSE__CONDTEMP_:
2145 	case OMP_CLAUSE__SCANTEMP_:
2146 	  break;
2147 
2148 	  /* The following clause belongs to the OpenACC cache directive, which
2149 	     is discarded during gimplification.  */
2150 	case OMP_CLAUSE__CACHE_:
2151 	  /* The following clauses are only allowed in the OpenMP declare simd
2152 	     directive, so not seen here.  */
2153 	case OMP_CLAUSE_UNIFORM:
2154 	case OMP_CLAUSE_INBRANCH:
2155 	case OMP_CLAUSE_NOTINBRANCH:
2156 	  /* The following clauses are only allowed on OpenMP cancel and
2157 	     cancellation point directives, which at this point have already
2158 	     been lowered into a function call.  */
2159 	case OMP_CLAUSE_FOR:
2160 	case OMP_CLAUSE_PARALLEL:
2161 	case OMP_CLAUSE_SECTIONS:
2162 	case OMP_CLAUSE_TASKGROUP:
2163 	  /* The following clauses are only added during OMP lowering; nested
2164 	     function decomposition happens before that.  */
2165 	case OMP_CLAUSE__LOOPTEMP_:
2166 	case OMP_CLAUSE__REDUCTEMP_:
2167 	case OMP_CLAUSE__SIMDUID_:
2168 	case OMP_CLAUSE__GRIDDIM_:
2169 	case OMP_CLAUSE__SIMT_:
2170 	  /* Anything else.  */
2171 	default:
2172 	  gcc_unreachable ();
2173 	}
2174     }
2175 
2176   info->suppress_expansion = new_suppress;
2177 
2178   if (need_stmts)
2179     for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2180       switch (OMP_CLAUSE_CODE (clause))
2181 	{
2182 	case OMP_CLAUSE_REDUCTION:
2183 	case OMP_CLAUSE_IN_REDUCTION:
2184 	case OMP_CLAUSE_TASK_REDUCTION:
2185 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2186 	    {
2187 	      tree old_context
2188 		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2189 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2190 		= info->context;
2191 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2192 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2193 		  = info->context;
2194 	      walk_body (convert_local_reference_stmt,
2195 			 convert_local_reference_op, info,
2196 			 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2197 	      walk_body (convert_local_reference_stmt,
2198 			 convert_local_reference_op, info,
2199 			 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2200 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2201 		= old_context;
2202 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2203 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2204 		  = old_context;
2205 	    }
2206 	  break;
2207 
2208 	case OMP_CLAUSE_LASTPRIVATE:
2209 	  walk_body (convert_local_reference_stmt,
2210 		     convert_local_reference_op, info,
2211 		     &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2212 	  break;
2213 
2214 	case OMP_CLAUSE_LINEAR:
2215 	  walk_body (convert_local_reference_stmt,
2216 		     convert_local_reference_op, info,
2217 		     &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2218 	  break;
2219 
2220 	default:
2221 	  break;
2222 	}
2223 
2224   return need_frame;
2225 }
2226 
2227 
2228 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2229    and PARM_DECLs that were referenced by inner nested functions.
2230    The rewrite will be a structure reference to the local frame variable.  */
2231 
2232 static tree
convert_local_reference_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)2233 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2234 			      struct walk_stmt_info *wi)
2235 {
2236   struct nesting_info *info = (struct nesting_info *) wi->info;
2237   tree save_local_var_chain;
2238   bitmap save_suppress;
2239   char save_static_chain_added;
2240   bool frame_decl_added;
2241   gimple *stmt = gsi_stmt (*gsi);
2242 
2243   switch (gimple_code (stmt))
2244     {
2245     case GIMPLE_OMP_TEAMS:
2246       if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2247 	{
2248 	  save_suppress = info->suppress_expansion;
2249 	  convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2250 	  walk_body (convert_local_reference_stmt, convert_local_reference_op,
2251 		     info, gimple_omp_body_ptr (stmt));
2252 	  info->suppress_expansion = save_suppress;
2253 	  break;
2254 	}
2255       /* FALLTHRU */
2256 
2257     case GIMPLE_OMP_PARALLEL:
2258     case GIMPLE_OMP_TASK:
2259       save_suppress = info->suppress_expansion;
2260       frame_decl_added = false;
2261       if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2262 	                             wi))
2263 	{
2264 	  tree c = build_omp_clause (gimple_location (stmt),
2265 				     OMP_CLAUSE_SHARED);
2266 	  (void) get_frame_type (info);
2267 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2268 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2269 	  gimple_omp_taskreg_set_clauses (stmt, c);
2270 	  info->static_chain_added |= 4;
2271 	  frame_decl_added = true;
2272 	}
2273 
2274       save_local_var_chain = info->new_local_var_chain;
2275       save_static_chain_added = info->static_chain_added;
2276       info->new_local_var_chain = NULL;
2277       info->static_chain_added = 0;
2278 
2279       walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2280 	         gimple_omp_body_ptr (stmt));
2281 
2282       if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2283 	{
2284 	  tree c = build_omp_clause (gimple_location (stmt),
2285 				     OMP_CLAUSE_SHARED);
2286 	  (void) get_frame_type (info);
2287 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2288 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2289 	  info->static_chain_added |= 4;
2290 	  gimple_omp_taskreg_set_clauses (stmt, c);
2291 	}
2292       if (info->new_local_var_chain)
2293 	declare_vars (info->new_local_var_chain,
2294 		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2295       info->new_local_var_chain = save_local_var_chain;
2296       info->suppress_expansion = save_suppress;
2297       info->static_chain_added |= save_static_chain_added;
2298       break;
2299 
2300     case GIMPLE_OMP_FOR:
2301       save_suppress = info->suppress_expansion;
2302       convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2303       walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2304 			   convert_local_reference_stmt,
2305 			   convert_local_reference_op, info);
2306       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2307 		 info, gimple_omp_body_ptr (stmt));
2308       info->suppress_expansion = save_suppress;
2309       break;
2310 
2311     case GIMPLE_OMP_SECTIONS:
2312       save_suppress = info->suppress_expansion;
2313       convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2314       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2315 		 info, gimple_omp_body_ptr (stmt));
2316       info->suppress_expansion = save_suppress;
2317       break;
2318 
2319     case GIMPLE_OMP_SINGLE:
2320       save_suppress = info->suppress_expansion;
2321       convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2322       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2323 		 info, gimple_omp_body_ptr (stmt));
2324       info->suppress_expansion = save_suppress;
2325       break;
2326 
2327     case GIMPLE_OMP_TASKGROUP:
2328       save_suppress = info->suppress_expansion;
2329       convert_local_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
2330       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2331 		 info, gimple_omp_body_ptr (stmt));
2332       info->suppress_expansion = save_suppress;
2333       break;
2334 
2335     case GIMPLE_OMP_TARGET:
2336       if (!is_gimple_omp_offloaded (stmt))
2337 	{
2338 	  save_suppress = info->suppress_expansion;
2339 	  convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2340 	  info->suppress_expansion = save_suppress;
2341 	  walk_body (convert_local_reference_stmt, convert_local_reference_op,
2342 		     info, gimple_omp_body_ptr (stmt));
2343 	  break;
2344 	}
2345       save_suppress = info->suppress_expansion;
2346       frame_decl_added = false;
2347       if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2348 	{
2349 	  tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2350 	  (void) get_frame_type (info);
2351 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2352 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2353 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2354 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2355 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2356 	  info->static_chain_added |= 4;
2357 	  frame_decl_added = true;
2358 	}
2359 
2360       save_local_var_chain = info->new_local_var_chain;
2361       save_static_chain_added = info->static_chain_added;
2362       info->new_local_var_chain = NULL;
2363       info->static_chain_added = 0;
2364 
2365       walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2366 		 gimple_omp_body_ptr (stmt));
2367 
2368       if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2369 	{
2370 	  tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2371 	  (void) get_frame_type (info);
2372 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2373 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2374 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2375 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2376 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2377 	  info->static_chain_added |= 4;
2378 	}
2379 
2380       if (info->new_local_var_chain)
2381 	declare_vars (info->new_local_var_chain,
2382 		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2383       info->new_local_var_chain = save_local_var_chain;
2384       info->suppress_expansion = save_suppress;
2385       info->static_chain_added |= save_static_chain_added;
2386       break;
2387 
2388     case GIMPLE_OMP_SECTION:
2389     case GIMPLE_OMP_MASTER:
2390     case GIMPLE_OMP_ORDERED:
2391     case GIMPLE_OMP_SCAN:
2392       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2393 		 info, gimple_omp_body_ptr (stmt));
2394       break;
2395 
2396     case GIMPLE_COND:
2397       wi->val_only = true;
2398       wi->is_lhs = false;
2399       *handled_ops_p = false;
2400       return NULL_TREE;
2401 
2402     case GIMPLE_ASSIGN:
2403       if (gimple_clobber_p (stmt))
2404 	{
2405 	  tree lhs = gimple_assign_lhs (stmt);
2406 	  if (DECL_P (lhs)
2407 	      && decl_function_context (lhs) == info->context
2408 	      && !use_pointer_in_frame (lhs)
2409 	      && lookup_field_for_decl (info, lhs, NO_INSERT))
2410 	    {
2411 	      gsi_replace (gsi, gimple_build_nop (), true);
2412 	      break;
2413 	    }
2414 	}
2415       *handled_ops_p = false;
2416       return NULL_TREE;
2417 
2418     case GIMPLE_BIND:
2419       for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2420 	   var;
2421 	   var = DECL_CHAIN (var))
2422 	if (TREE_CODE (var) == NAMELIST_DECL)
2423 	  {
2424 	    /* Adjust decls mentioned in NAMELIST_DECL.  */
2425 	    tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2426 	    tree decl;
2427 	    unsigned int i;
2428 
2429 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2430 	      {
2431 		if (VAR_P (decl)
2432 		    && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2433 		  continue;
2434 		if (decl_function_context (decl) == info->context
2435 		    && !use_pointer_in_frame (decl))
2436 		  {
2437 		    tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2438 		    if (field)
2439 		      {
2440 			CONSTRUCTOR_ELT (decls, i)->value
2441 			  = get_local_debug_decl (info, decl, field);
2442 		      }
2443 		  }
2444 	      }
2445 	  }
2446 
2447       *handled_ops_p = false;
2448       return NULL_TREE;
2449 
2450     default:
2451       /* For every other statement that we are not interested in
2452 	 handling here, let the walker traverse the operands.  */
2453       *handled_ops_p = false;
2454       return NULL_TREE;
2455     }
2456 
2457   /* Indicate that we have handled all the operands ourselves.  */
2458   *handled_ops_p = true;
2459   return NULL_TREE;
2460 }
2461 
2462 
2463 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2464    that reference labels from outer functions.  The rewrite will be a
2465    call to __builtin_nonlocal_goto.  */
2466 
2467 static tree
convert_nl_goto_reference(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)2468 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2469 			   struct walk_stmt_info *wi)
2470 {
2471   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2472   tree label, new_label, target_context, x, field;
2473   gcall *call;
2474   gimple *stmt = gsi_stmt (*gsi);
2475 
2476   if (gimple_code (stmt) != GIMPLE_GOTO)
2477     {
2478       *handled_ops_p = false;
2479       return NULL_TREE;
2480     }
2481 
2482   label = gimple_goto_dest (stmt);
2483   if (TREE_CODE (label) != LABEL_DECL)
2484     {
2485       *handled_ops_p = false;
2486       return NULL_TREE;
2487     }
2488 
2489   target_context = decl_function_context (label);
2490   if (target_context == info->context)
2491     {
2492       *handled_ops_p = false;
2493       return NULL_TREE;
2494     }
2495 
2496   for (i = info->outer; target_context != i->context; i = i->outer)
2497     continue;
2498 
2499   /* The original user label may also be use for a normal goto, therefore
2500      we must create a new label that will actually receive the abnormal
2501      control transfer.  This new label will be marked LABEL_NONLOCAL; this
2502      mark will trigger proper behavior in the cfg, as well as cause the
2503      (hairy target-specific) non-local goto receiver code to be generated
2504      when we expand rtl.  Enter this association into var_map so that we
2505      can insert the new label into the IL during a second pass.  */
2506   tree *slot = &i->var_map->get_or_insert (label);
2507   if (*slot == NULL)
2508     {
2509       new_label = create_artificial_label (UNKNOWN_LOCATION);
2510       DECL_NONLOCAL (new_label) = 1;
2511       *slot = new_label;
2512     }
2513   else
2514     new_label = *slot;
2515 
2516   /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field).  */
2517   field = get_nl_goto_field (i);
2518   x = get_frame_field (info, target_context, field, gsi);
2519   x = build_addr (x);
2520   x = gsi_gimplify_val (info, x, gsi);
2521   call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2522 			    2, build_addr (new_label), x);
2523   gsi_replace (gsi, call, false);
2524 
2525   /* We have handled all of STMT's operands, no need to keep going.  */
2526   *handled_ops_p = true;
2527   return NULL_TREE;
2528 }
2529 
2530 
2531 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2532    are referenced via nonlocal goto from a nested function.  The rewrite
2533    will involve installing a newly generated DECL_NONLOCAL label, and
2534    (potentially) a branch around the rtl gunk that is assumed to be
2535    attached to such a label.  */
2536 
2537 static tree
convert_nl_goto_receiver(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)2538 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2539 			  struct walk_stmt_info *wi)
2540 {
2541   struct nesting_info *const info = (struct nesting_info *) wi->info;
2542   tree label, new_label;
2543   gimple_stmt_iterator tmp_gsi;
2544   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2545 
2546   if (!stmt)
2547     {
2548       *handled_ops_p = false;
2549       return NULL_TREE;
2550     }
2551 
2552   label = gimple_label_label (stmt);
2553 
2554   tree *slot = info->var_map->get (label);
2555   if (!slot)
2556     {
2557       *handled_ops_p = false;
2558       return NULL_TREE;
2559     }
2560 
2561   /* If there's any possibility that the previous statement falls through,
2562      then we must branch around the new non-local label.  */
2563   tmp_gsi = wi->gsi;
2564   gsi_prev (&tmp_gsi);
2565   if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2566     {
2567       gimple *stmt = gimple_build_goto (label);
2568       gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2569     }
2570 
2571   new_label = (tree) *slot;
2572   stmt = gimple_build_label (new_label);
2573   gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2574 
2575   *handled_ops_p = true;
2576   return NULL_TREE;
2577 }
2578 
2579 
2580 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2581    of nested functions that require the use of trampolines.  The rewrite
2582    will involve a reference a trampoline generated for the occasion.  */
2583 
2584 static tree
convert_tramp_reference_op(tree * tp,int * walk_subtrees,void * data)2585 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2586 {
2587   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2588   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2589   tree t = *tp, decl, target_context, x, builtin;
2590   bool descr;
2591   gcall *call;
2592 
2593   *walk_subtrees = 0;
2594   switch (TREE_CODE (t))
2595     {
2596     case ADDR_EXPR:
2597       /* Build
2598 	   T.1 = &CHAIN->tramp;
2599 	   T.2 = __builtin_adjust_trampoline (T.1);
2600 	   T.3 = (func_type)T.2;
2601       */
2602 
2603       decl = TREE_OPERAND (t, 0);
2604       if (TREE_CODE (decl) != FUNCTION_DECL)
2605 	break;
2606 
2607       /* Only need to process nested functions.  */
2608       target_context = decl_function_context (decl);
2609       if (!target_context)
2610 	break;
2611 
2612       /* If the nested function doesn't use a static chain, then
2613 	 it doesn't need a trampoline.  */
2614       if (!DECL_STATIC_CHAIN (decl))
2615 	break;
2616 
2617       /* If we don't want a trampoline, then don't build one.  */
2618       if (TREE_NO_TRAMPOLINE (t))
2619 	break;
2620 
2621       /* Lookup the immediate parent of the callee, as that's where
2622 	 we need to insert the trampoline.  */
2623       for (i = info; i->context != target_context; i = i->outer)
2624 	continue;
2625 
2626       /* Decide whether to generate a descriptor or a trampoline. */
2627       descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2628 
2629       if (descr)
2630 	x = lookup_descr_for_decl (i, decl, INSERT);
2631       else
2632 	x = lookup_tramp_for_decl (i, decl, INSERT);
2633 
2634       /* Compute the address of the field holding the trampoline.  */
2635       x = get_frame_field (info, target_context, x, &wi->gsi);
2636       x = build_addr (x);
2637       x = gsi_gimplify_val (info, x, &wi->gsi);
2638 
2639       /* Do machine-specific ugliness.  Normally this will involve
2640 	 computing extra alignment, but it can really be anything.  */
2641       if (descr)
2642 	builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2643       else
2644 	builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2645       call = gimple_build_call (builtin, 1, x);
2646       x = init_tmp_var_with_call (info, &wi->gsi, call);
2647 
2648       /* Cast back to the proper function type.  */
2649       x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2650       x = init_tmp_var (info, x, &wi->gsi);
2651 
2652       *tp = x;
2653       break;
2654 
2655     default:
2656       if (!IS_TYPE_OR_DECL_P (t))
2657 	*walk_subtrees = 1;
2658       break;
2659     }
2660 
2661   return NULL_TREE;
2662 }
2663 
2664 
2665 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2666    to addresses of nested functions that require the use of
2667    trampolines.  The rewrite will involve a reference a trampoline
2668    generated for the occasion.  */
2669 
2670 static tree
convert_tramp_reference_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)2671 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2672 			      struct walk_stmt_info *wi)
2673 {
2674   struct nesting_info *info = (struct nesting_info *) wi->info;
2675   gimple *stmt = gsi_stmt (*gsi);
2676 
2677   switch (gimple_code (stmt))
2678     {
2679     case GIMPLE_CALL:
2680       {
2681 	/* Only walk call arguments, lest we generate trampolines for
2682 	   direct calls.  */
2683 	unsigned long i, nargs = gimple_call_num_args (stmt);
2684 	for (i = 0; i < nargs; i++)
2685 	  walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2686 		     wi, NULL);
2687 	break;
2688       }
2689 
2690     case GIMPLE_OMP_TEAMS:
2691       if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2692 	{
2693 	  *handled_ops_p = false;
2694 	  return NULL_TREE;
2695 	}
2696       goto do_parallel;
2697 
2698     case GIMPLE_OMP_TARGET:
2699       if (!is_gimple_omp_offloaded (stmt))
2700 	{
2701 	  *handled_ops_p = false;
2702 	  return NULL_TREE;
2703 	}
2704       /* FALLTHRU */
2705     case GIMPLE_OMP_PARALLEL:
2706     case GIMPLE_OMP_TASK:
2707     do_parallel:
2708       {
2709 	tree save_local_var_chain = info->new_local_var_chain;
2710         walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2711 	info->new_local_var_chain = NULL;
2712 	char save_static_chain_added = info->static_chain_added;
2713 	info->static_chain_added = 0;
2714         walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2715 		   info, gimple_omp_body_ptr (stmt));
2716 	if (info->new_local_var_chain)
2717 	  declare_vars (info->new_local_var_chain,
2718 			gimple_seq_first_stmt (gimple_omp_body (stmt)),
2719 			false);
2720 	for (int i = 0; i < 2; i++)
2721 	  {
2722 	    tree c, decl;
2723 	    if ((info->static_chain_added & (1 << i)) == 0)
2724 	      continue;
2725 	    decl = i ? get_chain_decl (info) : info->frame_decl;
2726 	    /* Don't add CHAIN.* or FRAME.* twice.  */
2727 	    for (c = gimple_omp_taskreg_clauses (stmt);
2728 		 c;
2729 		 c = OMP_CLAUSE_CHAIN (c))
2730 	      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2731 		   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2732 		  && OMP_CLAUSE_DECL (c) == decl)
2733 		break;
2734 	    if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2735 	      {
2736 		c = build_omp_clause (gimple_location (stmt),
2737 				      i ? OMP_CLAUSE_FIRSTPRIVATE
2738 				      : OMP_CLAUSE_SHARED);
2739 		OMP_CLAUSE_DECL (c) = decl;
2740 		OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2741 		gimple_omp_taskreg_set_clauses (stmt, c);
2742 	      }
2743 	    else if (c == NULL)
2744 	      {
2745 		c = build_omp_clause (gimple_location (stmt),
2746 				      OMP_CLAUSE_MAP);
2747 		OMP_CLAUSE_DECL (c) = decl;
2748 		OMP_CLAUSE_SET_MAP_KIND (c,
2749 					 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2750 		OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2751 		OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2752 		gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2753 					       c);
2754 	      }
2755 	  }
2756 	info->new_local_var_chain = save_local_var_chain;
2757 	info->static_chain_added |= save_static_chain_added;
2758       }
2759       break;
2760 
2761     default:
2762       *handled_ops_p = false;
2763       return NULL_TREE;
2764     }
2765 
2766   *handled_ops_p = true;
2767   return NULL_TREE;
2768 }
2769 
2770 
2771 
2772 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2773    that reference nested functions to make sure that the static chain
2774    is set up properly for the call.  */
2775 
2776 static tree
convert_gimple_call(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)2777 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2778                      struct walk_stmt_info *wi)
2779 {
2780   struct nesting_info *const info = (struct nesting_info *) wi->info;
2781   tree decl, target_context;
2782   char save_static_chain_added;
2783   int i;
2784   gimple *stmt = gsi_stmt (*gsi);
2785 
2786   switch (gimple_code (stmt))
2787     {
2788     case GIMPLE_CALL:
2789       if (gimple_call_chain (stmt))
2790 	break;
2791       decl = gimple_call_fndecl (stmt);
2792       if (!decl)
2793 	break;
2794       target_context = decl_function_context (decl);
2795       if (target_context && DECL_STATIC_CHAIN (decl))
2796 	{
2797 	  struct nesting_info *i = info;
2798 	  while (i && i->context != target_context)
2799 	    i = i->outer;
2800 	  /* If none of the outer contexts is the target context, this means
2801 	     that the function is called in a wrong context.  */
2802 	  if (!i)
2803 	    internal_error ("%s from %s called in %s",
2804 			    IDENTIFIER_POINTER (DECL_NAME (decl)),
2805 			    IDENTIFIER_POINTER (DECL_NAME (target_context)),
2806 			    IDENTIFIER_POINTER (DECL_NAME (info->context)));
2807 
2808 	  gimple_call_set_chain (as_a <gcall *> (stmt),
2809 				 get_static_chain (info, target_context,
2810 						   &wi->gsi));
2811 	  info->static_chain_added |= (1 << (info->context != target_context));
2812 	}
2813       break;
2814 
2815     case GIMPLE_OMP_TEAMS:
2816       if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2817 	{
2818 	  walk_body (convert_gimple_call, NULL, info,
2819 		     gimple_omp_body_ptr (stmt));
2820 	  break;
2821 	}
2822       /* FALLTHRU */
2823 
2824     case GIMPLE_OMP_PARALLEL:
2825     case GIMPLE_OMP_TASK:
2826       save_static_chain_added = info->static_chain_added;
2827       info->static_chain_added = 0;
2828       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2829       for (i = 0; i < 2; i++)
2830 	{
2831 	  tree c, decl;
2832 	  if ((info->static_chain_added & (1 << i)) == 0)
2833 	    continue;
2834 	  decl = i ? get_chain_decl (info) : info->frame_decl;
2835 	  /* Don't add CHAIN.* or FRAME.* twice.  */
2836 	  for (c = gimple_omp_taskreg_clauses (stmt);
2837 	       c;
2838 	       c = OMP_CLAUSE_CHAIN (c))
2839 	    if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2840 		 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2841 		&& OMP_CLAUSE_DECL (c) == decl)
2842 	      break;
2843 	  if (c == NULL)
2844 	    {
2845 	      c = build_omp_clause (gimple_location (stmt),
2846 				    i ? OMP_CLAUSE_FIRSTPRIVATE
2847 				    : OMP_CLAUSE_SHARED);
2848 	      OMP_CLAUSE_DECL (c) = decl;
2849 	      OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2850 	      gimple_omp_taskreg_set_clauses (stmt, c);
2851 	    }
2852 	}
2853       info->static_chain_added |= save_static_chain_added;
2854       break;
2855 
2856     case GIMPLE_OMP_TARGET:
2857       if (!is_gimple_omp_offloaded (stmt))
2858 	{
2859 	  walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2860 	  break;
2861 	}
2862       save_static_chain_added = info->static_chain_added;
2863       info->static_chain_added = 0;
2864       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2865       for (i = 0; i < 2; i++)
2866 	{
2867 	  tree c, decl;
2868 	  if ((info->static_chain_added & (1 << i)) == 0)
2869 	    continue;
2870 	  decl = i ? get_chain_decl (info) : info->frame_decl;
2871 	  /* Don't add CHAIN.* or FRAME.* twice.  */
2872 	  for (c = gimple_omp_target_clauses (stmt);
2873 	       c;
2874 	       c = OMP_CLAUSE_CHAIN (c))
2875 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2876 		&& OMP_CLAUSE_DECL (c) == decl)
2877 	      break;
2878 	  if (c == NULL)
2879 	    {
2880 	      c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2881 	      OMP_CLAUSE_DECL (c) = decl;
2882 	      OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2883 	      OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2884 	      OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2885 	      gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2886 					     c);
2887 	    }
2888 	}
2889       info->static_chain_added |= save_static_chain_added;
2890       break;
2891 
2892     case GIMPLE_OMP_FOR:
2893       walk_body (convert_gimple_call, NULL, info,
2894 	  	 gimple_omp_for_pre_body_ptr (stmt));
2895       /* FALLTHRU */
2896     case GIMPLE_OMP_SECTIONS:
2897     case GIMPLE_OMP_SECTION:
2898     case GIMPLE_OMP_SINGLE:
2899     case GIMPLE_OMP_MASTER:
2900     case GIMPLE_OMP_TASKGROUP:
2901     case GIMPLE_OMP_ORDERED:
2902     case GIMPLE_OMP_SCAN:
2903     case GIMPLE_OMP_CRITICAL:
2904       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2905       break;
2906 
2907     default:
2908       /* Keep looking for other operands.  */
2909       *handled_ops_p = false;
2910       return NULL_TREE;
2911     }
2912 
2913   *handled_ops_p = true;
2914   return NULL_TREE;
2915 }
2916 
2917 /* Walk the nesting tree starting with ROOT.  Convert all trampolines and
2918    call expressions.  At the same time, determine if a nested function
2919    actually uses its static chain; if not, remember that.  */
2920 
2921 static void
convert_all_function_calls(struct nesting_info * root)2922 convert_all_function_calls (struct nesting_info *root)
2923 {
2924   unsigned int chain_count = 0, old_chain_count, iter_count;
2925   struct nesting_info *n;
2926 
2927   /* First, optimistically clear static_chain for all decls that haven't
2928      used the static chain already for variable access.  But always create
2929      it if not optimizing.  This makes it possible to reconstruct the static
2930      nesting tree at run time and thus to resolve up-level references from
2931      within the debugger.  */
2932   FOR_EACH_NEST_INFO (n, root)
2933     {
2934       if (n->thunk_p)
2935 	continue;
2936       tree decl = n->context;
2937       if (!optimize)
2938 	{
2939 	  if (n->inner)
2940 	    (void) get_frame_type (n);
2941 	  if (n->outer)
2942 	    (void) get_chain_decl (n);
2943 	}
2944       else if (!n->outer || (!n->chain_decl && !n->chain_field))
2945 	{
2946 	  DECL_STATIC_CHAIN (decl) = 0;
2947 	  if (dump_file && (dump_flags & TDF_DETAILS))
2948 	    fprintf (dump_file, "Guessing no static-chain for %s\n",
2949 		     lang_hooks.decl_printable_name (decl, 2));
2950 	}
2951       else
2952 	DECL_STATIC_CHAIN (decl) = 1;
2953       chain_count += DECL_STATIC_CHAIN (decl);
2954     }
2955 
2956   FOR_EACH_NEST_INFO (n, root)
2957     if (n->thunk_p)
2958       {
2959 	tree decl = n->context;
2960 	tree alias = cgraph_node::get (decl)->thunk.alias;
2961 	DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
2962       }
2963 
2964   /* Walk the functions and perform transformations.  Note that these
2965      transformations can induce new uses of the static chain, which in turn
2966      require re-examining all users of the decl.  */
2967   /* ??? It would make sense to try to use the call graph to speed this up,
2968      but the call graph hasn't really been built yet.  Even if it did, we
2969      would still need to iterate in this loop since address-of references
2970      wouldn't show up in the callgraph anyway.  */
2971   iter_count = 0;
2972   do
2973     {
2974       old_chain_count = chain_count;
2975       chain_count = 0;
2976       iter_count++;
2977 
2978       if (dump_file && (dump_flags & TDF_DETAILS))
2979 	fputc ('\n', dump_file);
2980 
2981       FOR_EACH_NEST_INFO (n, root)
2982 	{
2983 	  if (n->thunk_p)
2984 	    continue;
2985 	  tree decl = n->context;
2986 	  walk_function (convert_tramp_reference_stmt,
2987 			 convert_tramp_reference_op, n);
2988 	  walk_function (convert_gimple_call, NULL, n);
2989 	  chain_count += DECL_STATIC_CHAIN (decl);
2990 	}
2991 
2992       FOR_EACH_NEST_INFO (n, root)
2993 	if (n->thunk_p)
2994 	  {
2995 	    tree decl = n->context;
2996 	    tree alias = cgraph_node::get (decl)->thunk.alias;
2997 	    DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
2998 	  }
2999     }
3000   while (chain_count != old_chain_count);
3001 
3002   if (dump_file && (dump_flags & TDF_DETAILS))
3003     fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
3004 	     iter_count);
3005 }
3006 
3007 struct nesting_copy_body_data
3008 {
3009   copy_body_data cb;
3010   struct nesting_info *root;
3011 };
3012 
3013 /* A helper subroutine for debug_var_chain type remapping.  */
3014 
3015 static tree
nesting_copy_decl(tree decl,copy_body_data * id)3016 nesting_copy_decl (tree decl, copy_body_data *id)
3017 {
3018   struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
3019   tree *slot = nid->root->var_map->get (decl);
3020 
3021   if (slot)
3022     return (tree) *slot;
3023 
3024   if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
3025     {
3026       tree new_decl = copy_decl_no_change (decl, id);
3027       DECL_ORIGINAL_TYPE (new_decl)
3028 	= remap_type (DECL_ORIGINAL_TYPE (decl), id);
3029       return new_decl;
3030     }
3031 
3032   if (VAR_P (decl)
3033       || TREE_CODE (decl) == PARM_DECL
3034       || TREE_CODE (decl) == RESULT_DECL)
3035     return decl;
3036 
3037   return copy_decl_no_change (decl, id);
3038 }
3039 
3040 /* A helper function for remap_vla_decls.  See if *TP contains
3041    some remapped variables.  */
3042 
3043 static tree
contains_remapped_vars(tree * tp,int * walk_subtrees,void * data)3044 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
3045 {
3046   struct nesting_info *root = (struct nesting_info *) data;
3047   tree t = *tp;
3048 
3049   if (DECL_P (t))
3050     {
3051       *walk_subtrees = 0;
3052       tree *slot = root->var_map->get (t);
3053 
3054       if (slot)
3055 	return *slot;
3056     }
3057   return NULL;
3058 }
3059 
3060 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
3061    involved.  */
3062 
3063 static void
remap_vla_decls(tree block,struct nesting_info * root)3064 remap_vla_decls (tree block, struct nesting_info *root)
3065 {
3066   tree var, subblock, val, type;
3067   struct nesting_copy_body_data id;
3068 
3069   for (subblock = BLOCK_SUBBLOCKS (block);
3070        subblock;
3071        subblock = BLOCK_CHAIN (subblock))
3072     remap_vla_decls (subblock, root);
3073 
3074   for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3075     if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3076       {
3077 	val = DECL_VALUE_EXPR (var);
3078 	type = TREE_TYPE (var);
3079 
3080 	if (!(TREE_CODE (val) == INDIRECT_REF
3081 	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3082 	      && variably_modified_type_p (type, NULL)))
3083 	  continue;
3084 
3085 	if (root->var_map->get (TREE_OPERAND (val, 0))
3086 	    || walk_tree (&type, contains_remapped_vars, root, NULL))
3087 	  break;
3088       }
3089 
3090   if (var == NULL_TREE)
3091     return;
3092 
3093   memset (&id, 0, sizeof (id));
3094   id.cb.copy_decl = nesting_copy_decl;
3095   id.cb.decl_map = new hash_map<tree, tree>;
3096   id.root = root;
3097 
3098   for (; var; var = DECL_CHAIN (var))
3099     if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3100       {
3101 	struct nesting_info *i;
3102 	tree newt, context;
3103 
3104 	val = DECL_VALUE_EXPR (var);
3105 	type = TREE_TYPE (var);
3106 
3107 	if (!(TREE_CODE (val) == INDIRECT_REF
3108 	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3109 	      && variably_modified_type_p (type, NULL)))
3110 	  continue;
3111 
3112 	tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
3113 	if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
3114 	  continue;
3115 
3116 	context = decl_function_context (var);
3117 	for (i = root; i; i = i->outer)
3118 	  if (i->context == context)
3119 	    break;
3120 
3121 	if (i == NULL)
3122 	  continue;
3123 
3124 	/* Fully expand value expressions.  This avoids having debug variables
3125 	   only referenced from them and that can be swept during GC.  */
3126         if (slot)
3127 	  {
3128 	    tree t = (tree) *slot;
3129 	    gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
3130 	    val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
3131 	  }
3132 
3133 	id.cb.src_fn = i->context;
3134 	id.cb.dst_fn = i->context;
3135 	id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3136 
3137 	TREE_TYPE (var) = newt = remap_type (type, &id.cb);
3138 	while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3139 	  {
3140 	    newt = TREE_TYPE (newt);
3141 	    type = TREE_TYPE (type);
3142 	  }
3143 	if (TYPE_NAME (newt)
3144 	    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3145 	    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3146 	    && newt != type
3147 	    && TYPE_NAME (newt) == TYPE_NAME (type))
3148 	  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3149 
3150 	walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
3151 	if (val != DECL_VALUE_EXPR (var))
3152 	  SET_DECL_VALUE_EXPR (var, val);
3153       }
3154 
3155   delete id.cb.decl_map;
3156 }
3157 
3158 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3159    involved.  */
3160 
3161 static void
fixup_vla_decls(tree block)3162 fixup_vla_decls (tree block)
3163 {
3164   for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3165     if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3166       {
3167 	tree val = DECL_VALUE_EXPR (var);
3168 
3169 	if (!(TREE_CODE (val) == INDIRECT_REF
3170 	      && VAR_P (TREE_OPERAND (val, 0))
3171 	      && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val, 0))))
3172 	  continue;
3173 
3174 	/* Fully expand value expressions.  This avoids having debug variables
3175 	   only referenced from them and that can be swept during GC.  */
3176 	val = build1 (INDIRECT_REF, TREE_TYPE (val),
3177 		      DECL_VALUE_EXPR (TREE_OPERAND (val, 0)));
3178 	SET_DECL_VALUE_EXPR (var, val);
3179       }
3180 
3181   for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3182     fixup_vla_decls (sub);
3183 }
3184 
3185 /* Fold the MEM_REF *E.  */
3186 bool
fold_mem_refs(tree * const & e,void * data ATTRIBUTE_UNUSED)3187 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
3188 {
3189   tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
3190   *ref_p = fold (*ref_p);
3191   return true;
3192 }
3193 
3194 /* Given DECL, a nested function, build an initialization call for FIELD,
3195    the trampoline or descriptor for DECL, using FUNC as the function.  */
3196 
3197 static gcall *
build_init_call_stmt(struct nesting_info * info,tree decl,tree field,tree func)3198 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
3199 		      tree func)
3200 {
3201   tree arg1, arg2, arg3, x;
3202 
3203   gcc_assert (DECL_STATIC_CHAIN (decl));
3204   arg3 = build_addr (info->frame_decl);
3205 
3206   arg2 = build_addr (decl);
3207 
3208   x = build3 (COMPONENT_REF, TREE_TYPE (field),
3209 	      info->frame_decl, field, NULL_TREE);
3210   arg1 = build_addr (x);
3211 
3212   return gimple_build_call (func, 3, arg1, arg2, arg3);
3213 }
3214 
3215 /* Do "everything else" to clean up or complete state collected by the various
3216    walking passes -- create a field to hold the frame base address, lay out the
3217    types and decls, generate code to initialize the frame decl, store critical
3218    expressions in the struct function for rtl to find.  */
3219 
3220 static void
finalize_nesting_tree_1(struct nesting_info * root)3221 finalize_nesting_tree_1 (struct nesting_info *root)
3222 {
3223   gimple_seq stmt_list = NULL;
3224   gimple *stmt;
3225   tree context = root->context;
3226   struct function *sf;
3227 
3228   if (root->thunk_p)
3229     return;
3230 
3231   /* If we created a non-local frame type or decl, we need to lay them
3232      out at this time.  */
3233   if (root->frame_type)
3234     {
3235       /* Debugging information needs to compute the frame base address of the
3236 	 parent frame out of the static chain from the nested frame.
3237 
3238 	 The static chain is the address of the FRAME record, so one could
3239 	 imagine it would be possible to compute the frame base address just
3240 	 adding a constant offset to this address.  Unfortunately, this is not
3241 	 possible: if the FRAME object has alignment constraints that are
3242 	 stronger than the stack, then the offset between the frame base and
3243 	 the FRAME object will be dynamic.
3244 
3245 	 What we do instead is to append a field to the FRAME object that holds
3246 	 the frame base address: then debug info just has to fetch this
3247 	 field.  */
3248 
3249       /* Debugging information will refer to the CFA as the frame base
3250 	 address: we will do the same here.  */
3251       const tree frame_addr_fndecl
3252         = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3253 
3254       /* Create a field in the FRAME record to hold the frame base address for
3255 	 this stack frame.  Since it will be used only by the debugger, put it
3256 	 at the end of the record in order not to shift all other offsets.  */
3257       tree fb_decl = make_node (FIELD_DECL);
3258 
3259       DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3260       TREE_TYPE (fb_decl) = ptr_type_node;
3261       TREE_ADDRESSABLE (fb_decl) = 1;
3262       DECL_CONTEXT (fb_decl) = root->frame_type;
3263       TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3264 						fb_decl);
3265 
3266       /* In some cases the frame type will trigger the -Wpadded warning.
3267 	 This is not helpful; suppress it. */
3268       int save_warn_padded = warn_padded;
3269       warn_padded = 0;
3270       layout_type (root->frame_type);
3271       warn_padded = save_warn_padded;
3272       layout_decl (root->frame_decl, 0);
3273 
3274       /* Initialize the frame base address field.  If the builtin we need is
3275 	 not available, set it to NULL so that debugging information does not
3276 	 reference junk.  */
3277       tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3278 			    root->frame_decl, fb_decl, NULL_TREE);
3279       tree fb_tmp;
3280 
3281       if (frame_addr_fndecl != NULL_TREE)
3282 	{
3283 	  gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3284 						integer_zero_node);
3285 	  gimple_stmt_iterator gsi = gsi_last (stmt_list);
3286 
3287 	  fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3288 	}
3289       else
3290 	fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3291       gimple_seq_add_stmt (&stmt_list,
3292 			   gimple_build_assign (fb_ref, fb_tmp));
3293 
3294       declare_vars (root->frame_decl,
3295 		    gimple_seq_first_stmt (gimple_body (context)), true);
3296     }
3297 
3298   /* If any parameters were referenced non-locally, then we need to insert
3299      a copy or a pointer.  */
3300   if (root->any_parm_remapped)
3301     {
3302       tree p;
3303       for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3304 	{
3305 	  tree field, x, y;
3306 
3307 	  field = lookup_field_for_decl (root, p, NO_INSERT);
3308 	  if (!field)
3309 	    continue;
3310 
3311 	  if (use_pointer_in_frame (p))
3312 	    x = build_addr (p);
3313 	  else
3314 	    x = p;
3315 
3316 	  /* If the assignment is from a non-register the stmt is
3317 	     not valid gimple.  Make it so by using a temporary instead.  */
3318 	  if (!is_gimple_reg (x)
3319 	      && is_gimple_reg_type (TREE_TYPE (x)))
3320 	    {
3321 	      gimple_stmt_iterator gsi = gsi_last (stmt_list);
3322 	      x = init_tmp_var (root, x, &gsi);
3323 	    }
3324 
3325 	  y = build3 (COMPONENT_REF, TREE_TYPE (field),
3326 		      root->frame_decl, field, NULL_TREE);
3327 	  stmt = gimple_build_assign (y, x);
3328 	  gimple_seq_add_stmt (&stmt_list, stmt);
3329 	}
3330     }
3331 
3332   /* If a chain_field was created, then it needs to be initialized
3333      from chain_decl.  */
3334   if (root->chain_field)
3335     {
3336       tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3337 		       root->frame_decl, root->chain_field, NULL_TREE);
3338       stmt = gimple_build_assign (x, get_chain_decl (root));
3339       gimple_seq_add_stmt (&stmt_list, stmt);
3340     }
3341 
3342   /* If trampolines were created, then we need to initialize them.  */
3343   if (root->any_tramp_created)
3344     {
3345       struct nesting_info *i;
3346       for (i = root->inner; i ; i = i->next)
3347 	{
3348 	  tree field, x;
3349 
3350 	  field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3351 	  if (!field)
3352 	    continue;
3353 
3354 	  x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3355 	  stmt = build_init_call_stmt (root, i->context, field, x);
3356 	  gimple_seq_add_stmt (&stmt_list, stmt);
3357 	}
3358     }
3359 
3360   /* If descriptors were created, then we need to initialize them.  */
3361   if (root->any_descr_created)
3362     {
3363       struct nesting_info *i;
3364       for (i = root->inner; i ; i = i->next)
3365 	{
3366 	  tree field, x;
3367 
3368 	  field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3369 	  if (!field)
3370 	    continue;
3371 
3372 	  x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3373 	  stmt = build_init_call_stmt (root, i->context, field, x);
3374 	  gimple_seq_add_stmt (&stmt_list, stmt);
3375 	}
3376     }
3377 
3378   /* If we created initialization statements, insert them.  */
3379   if (stmt_list)
3380     {
3381       gbind *bind;
3382       annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3383       bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3384       gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3385       gimple_bind_set_body (bind, stmt_list);
3386     }
3387 
3388   /* If a chain_decl was created, then it needs to be registered with
3389      struct function so that it gets initialized from the static chain
3390      register at the beginning of the function.  */
3391   sf = DECL_STRUCT_FUNCTION (root->context);
3392   sf->static_chain_decl = root->chain_decl;
3393 
3394   /* Similarly for the non-local goto save area.  */
3395   if (root->nl_goto_field)
3396     {
3397       sf->nonlocal_goto_save_area
3398 	= get_frame_field (root, context, root->nl_goto_field, NULL);
3399       sf->has_nonlocal_label = 1;
3400     }
3401 
3402   /* Make sure all new local variables get inserted into the
3403      proper BIND_EXPR.  */
3404   if (root->new_local_var_chain)
3405     declare_vars (root->new_local_var_chain,
3406 		  gimple_seq_first_stmt (gimple_body (root->context)),
3407 		  false);
3408 
3409   if (root->debug_var_chain)
3410     {
3411       tree debug_var;
3412       gbind *scope;
3413 
3414       remap_vla_decls (DECL_INITIAL (root->context), root);
3415 
3416       for (debug_var = root->debug_var_chain; debug_var;
3417 	   debug_var = DECL_CHAIN (debug_var))
3418 	if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3419 	  break;
3420 
3421       /* If there are any debug decls with variable length types,
3422 	 remap those types using other debug_var_chain variables.  */
3423       if (debug_var)
3424 	{
3425 	  struct nesting_copy_body_data id;
3426 
3427 	  memset (&id, 0, sizeof (id));
3428 	  id.cb.copy_decl = nesting_copy_decl;
3429 	  id.cb.decl_map = new hash_map<tree, tree>;
3430 	  id.root = root;
3431 
3432 	  for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3433 	    if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3434 	      {
3435 		tree type = TREE_TYPE (debug_var);
3436 		tree newt, t = type;
3437 		struct nesting_info *i;
3438 
3439 		for (i = root; i; i = i->outer)
3440 		  if (variably_modified_type_p (type, i->context))
3441 		    break;
3442 
3443 		if (i == NULL)
3444 		  continue;
3445 
3446 		id.cb.src_fn = i->context;
3447 		id.cb.dst_fn = i->context;
3448 		id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3449 
3450 		TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3451 		while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3452 		  {
3453 		    newt = TREE_TYPE (newt);
3454 		    t = TREE_TYPE (t);
3455 		  }
3456 		if (TYPE_NAME (newt)
3457 		    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3458 		    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3459 		    && newt != t
3460 		    && TYPE_NAME (newt) == TYPE_NAME (t))
3461 		  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3462 	      }
3463 
3464 	  delete id.cb.decl_map;
3465 	}
3466 
3467       scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3468       if (gimple_bind_block (scope))
3469 	declare_vars (root->debug_var_chain, scope, true);
3470       else
3471 	BLOCK_VARS (DECL_INITIAL (root->context))
3472 	  = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3473 		     root->debug_var_chain);
3474     }
3475   else
3476     fixup_vla_decls (DECL_INITIAL (root->context));
3477 
3478   /* Fold the rewritten MEM_REF trees.  */
3479   root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3480 
3481   /* Dump the translated tree function.  */
3482   if (dump_file)
3483     {
3484       fputs ("\n\n", dump_file);
3485       dump_function_to_file (root->context, dump_file, dump_flags);
3486     }
3487 }
3488 
3489 static void
finalize_nesting_tree(struct nesting_info * root)3490 finalize_nesting_tree (struct nesting_info *root)
3491 {
3492   struct nesting_info *n;
3493   FOR_EACH_NEST_INFO (n, root)
3494     finalize_nesting_tree_1 (n);
3495 }
3496 
3497 /* Unnest the nodes and pass them to cgraph.  */
3498 
3499 static void
unnest_nesting_tree_1(struct nesting_info * root)3500 unnest_nesting_tree_1 (struct nesting_info *root)
3501 {
3502   struct cgraph_node *node = cgraph_node::get (root->context);
3503 
3504   /* For nested functions update the cgraph to reflect unnesting.
3505      We also delay finalizing of these functions up to this point.  */
3506   if (node->origin)
3507     {
3508        node->unnest ();
3509        if (!root->thunk_p)
3510 	 cgraph_node::finalize_function (root->context, true);
3511     }
3512 }
3513 
3514 static void
unnest_nesting_tree(struct nesting_info * root)3515 unnest_nesting_tree (struct nesting_info *root)
3516 {
3517   struct nesting_info *n;
3518   FOR_EACH_NEST_INFO (n, root)
3519     unnest_nesting_tree_1 (n);
3520 }
3521 
3522 /* Free the data structures allocated during this pass.  */
3523 
3524 static void
free_nesting_tree(struct nesting_info * root)3525 free_nesting_tree (struct nesting_info *root)
3526 {
3527   struct nesting_info *node, *next;
3528 
3529   node = iter_nestinfo_start (root);
3530   do
3531     {
3532       next = iter_nestinfo_next (node);
3533       delete node->var_map;
3534       delete node->field_map;
3535       delete node->mem_refs;
3536       free (node);
3537       node = next;
3538     }
3539   while (node);
3540 }
3541 
3542 /* Gimplify a function and all its nested functions.  */
3543 static void
gimplify_all_functions(struct cgraph_node * root)3544 gimplify_all_functions (struct cgraph_node *root)
3545 {
3546   struct cgraph_node *iter;
3547   if (!gimple_body (root->decl))
3548     gimplify_function_tree (root->decl);
3549   for (iter = root->nested; iter; iter = iter->next_nested)
3550     if (!iter->thunk.thunk_p)
3551       gimplify_all_functions (iter);
3552 }
3553 
3554 /* Main entry point for this pass.  Process FNDECL and all of its nested
3555    subroutines and turn them into something less tightly bound.  */
3556 
3557 void
lower_nested_functions(tree fndecl)3558 lower_nested_functions (tree fndecl)
3559 {
3560   struct cgraph_node *cgn;
3561   struct nesting_info *root;
3562 
3563   /* If there are no nested functions, there's nothing to do.  */
3564   cgn = cgraph_node::get (fndecl);
3565   if (!cgn->nested)
3566     return;
3567 
3568   gimplify_all_functions (cgn);
3569 
3570   set_dump_file (dump_begin (TDI_nested, &dump_flags));
3571   if (dump_file)
3572     fprintf (dump_file, "\n;; Function %s\n\n",
3573 	     lang_hooks.decl_printable_name (fndecl, 2));
3574 
3575   bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3576   root = create_nesting_tree (cgn);
3577 
3578   walk_all_functions (convert_nonlocal_reference_stmt,
3579                       convert_nonlocal_reference_op,
3580 		      root);
3581   walk_all_functions (convert_local_reference_stmt,
3582                       convert_local_reference_op,
3583 		      root);
3584   walk_all_functions (convert_nl_goto_reference, NULL, root);
3585   walk_all_functions (convert_nl_goto_receiver, NULL, root);
3586 
3587   convert_all_function_calls (root);
3588   finalize_nesting_tree (root);
3589   unnest_nesting_tree (root);
3590 
3591   free_nesting_tree (root);
3592   bitmap_obstack_release (&nesting_info_bitmap_obstack);
3593 
3594   if (dump_file)
3595     {
3596       dump_end (TDI_nested, dump_file);
3597       set_dump_file (NULL);
3598     }
3599 }
3600 
3601 #include "gt-tree-nested.h"
3602