1 /* Nested function decomposition for GIMPLE.
2    Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 
4    This file is part of GCC.
5 
6    GCC is free software; you can redistribute it and/or modify
7    it under the terms of the GNU General Public License as published by
8    the Free Software Foundation; either version 3, or (at your option)
9    any later version.
10 
11    GCC is distributed in the hope that it will be useful,
12    but WITHOUT ANY WARRANTY; without even the implied warranty of
13    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14    GNU General Public License for more details.
15 
16    You should have received a copy of the GNU General Public License
17    along with GCC; see the file COPYING3.  If not see
18    <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "dumpfile.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 
45 
46 /* The object of this pass is to lower the representation of a set of nested
47    functions in order to expose all of the gory details of the various
48    nonlocal references.  We want to do this sooner rather than later, in
49    order to give us more freedom in emitting all of the functions in question.
50 
51    Back in olden times, when gcc was young, we developed an insanely
52    complicated scheme whereby variables which were referenced nonlocally
53    were forced to live in the stack of the declaring function, and then
54    the nested functions magically discovered where these variables were
55    placed.  In order for this scheme to function properly, it required
56    that the outer function be partially expanded, then we switch to
57    compiling the inner function, and once done with those we switch back
58    to compiling the outer function.  Such delicate ordering requirements
59    makes it difficult to do whole translation unit optimizations
60    involving such functions.
61 
62    The implementation here is much more direct.  Everything that can be
63    referenced by an inner function is a member of an explicitly created
64    structure herein called the "nonlocal frame struct".  The incoming
65    static chain for a nested function is a pointer to this struct in
66    the parent.  In this way, we settle on known offsets from a known
67    base, and so are decoupled from the logic that places objects in the
68    function's stack frame.  More importantly, we don't have to wait for
69    that to happen -- since the compilation of the inner function is no
70    longer tied to a real stack frame, the nonlocal frame struct can be
71    allocated anywhere.  Which means that the outer function is now
72    inlinable.
73 
74    Theory of operation here is very simple.  Iterate over all the
75    statements in all the functions (depth first) several times,
76    allocating structures and fields on demand.  In general we want to
77    examine inner functions first, so that we can avoid making changes
78    to outer functions which are unnecessary.
79 
80    The order of the passes matters a bit, in that later passes will be
81    skipped if it is discovered that the functions don't actually interact
82    at all.  That is, they're nested in the lexical sense but could have
83    been written as independent functions without change.  */
84 
85 
86 struct nesting_info
87 {
88   struct nesting_info *outer;
89   struct nesting_info *inner;
90   struct nesting_info *next;
91 
92   hash_map<tree, tree> *field_map;
93   hash_map<tree, tree> *var_map;
94   hash_set<tree *> *mem_refs;
95   bitmap suppress_expansion;
96 
97   tree context;
98   tree new_local_var_chain;
99   tree debug_var_chain;
100   tree frame_type;
101   tree frame_decl;
102   tree chain_field;
103   tree chain_decl;
104   tree nl_goto_field;
105 
106   bool any_parm_remapped;
107   bool any_tramp_created;
108   bool any_descr_created;
109   char static_chain_added;
110 };
111 
112 
113 /* Iterate over the nesting tree, starting with ROOT, depth first.  */
114 
115 static inline struct nesting_info *
iter_nestinfo_start(struct nesting_info * root)116 iter_nestinfo_start (struct nesting_info *root)
117 {
118   while (root->inner)
119     root = root->inner;
120   return root;
121 }
122 
123 static inline struct nesting_info *
iter_nestinfo_next(struct nesting_info * node)124 iter_nestinfo_next (struct nesting_info *node)
125 {
126   if (node->next)
127     return iter_nestinfo_start (node->next);
128   return node->outer;
129 }
130 
131 #define FOR_EACH_NEST_INFO(I, ROOT) \
132   for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
133 
134 /* Obstack used for the bitmaps in the struct above.  */
135 static struct bitmap_obstack nesting_info_bitmap_obstack;
136 
137 
138 /* We're working in so many different function contexts simultaneously,
139    that create_tmp_var is dangerous.  Prevent mishap.  */
140 #define create_tmp_var cant_use_create_tmp_var_here_dummy
141 
142 /* Like create_tmp_var, except record the variable for registration at
143    the given nesting level.  */
144 
145 static tree
create_tmp_var_for(struct nesting_info * info,tree type,const char * prefix)146 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
147 {
148   tree tmp_var;
149 
150   /* If the type is of variable size or a type which must be created by the
151      frontend, something is wrong.  Note that we explicitly allow
152      incomplete types here, since we create them ourselves here.  */
153   gcc_assert (!TREE_ADDRESSABLE (type));
154   gcc_assert (!TYPE_SIZE_UNIT (type)
155 	      || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
156 
157   tmp_var = create_tmp_var_raw (type, prefix);
158   DECL_CONTEXT (tmp_var) = info->context;
159   DECL_CHAIN (tmp_var) = info->new_local_var_chain;
160   DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
161   if (TREE_CODE (type) == COMPLEX_TYPE
162       || TREE_CODE (type) == VECTOR_TYPE)
163     DECL_GIMPLE_REG_P (tmp_var) = 1;
164 
165   info->new_local_var_chain = tmp_var;
166 
167   return tmp_var;
168 }
169 
170 /* Take the address of EXP to be used within function CONTEXT.
171    Mark it for addressability as necessary.  */
172 
173 tree
build_addr(tree exp)174 build_addr (tree exp)
175 {
176   mark_addressable (exp);
177   return build_fold_addr_expr (exp);
178 }
179 
180 /* Insert FIELD into TYPE, sorted by alignment requirements.  */
181 
182 void
insert_field_into_struct(tree type,tree field)183 insert_field_into_struct (tree type, tree field)
184 {
185   tree *p;
186 
187   DECL_CONTEXT (field) = type;
188 
189   for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
190     if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
191       break;
192 
193   DECL_CHAIN (field) = *p;
194   *p = field;
195 
196   /* Set correct alignment for frame struct type.  */
197   if (TYPE_ALIGN (type) < DECL_ALIGN (field))
198     SET_TYPE_ALIGN (type, DECL_ALIGN (field));
199 }
200 
201 /* Build or return the RECORD_TYPE that describes the frame state that is
202    shared between INFO->CONTEXT and its nested functions.  This record will
203    not be complete until finalize_nesting_tree; up until that point we'll
204    be adding fields as necessary.
205 
206    We also build the DECL that represents this frame in the function.  */
207 
208 static tree
get_frame_type(struct nesting_info * info)209 get_frame_type (struct nesting_info *info)
210 {
211   tree type = info->frame_type;
212   if (!type)
213     {
214       char *name;
215 
216       type = make_node (RECORD_TYPE);
217 
218       name = concat ("FRAME.",
219 		     IDENTIFIER_POINTER (DECL_NAME (info->context)),
220 		     NULL);
221       TYPE_NAME (type) = get_identifier (name);
222       free (name);
223 
224       info->frame_type = type;
225       info->frame_decl = create_tmp_var_for (info, type, "FRAME");
226       DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
227 
228       /* ??? Always make it addressable for now, since it is meant to
229 	 be pointed to by the static chain pointer.  This pessimizes
230 	 when it turns out that no static chains are needed because
231 	 the nested functions referencing non-local variables are not
232 	 reachable, but the true pessimization is to create the non-
233 	 local frame structure in the first place.  */
234       TREE_ADDRESSABLE (info->frame_decl) = 1;
235     }
236   return type;
237 }
238 
239 /* Return true if DECL should be referenced by pointer in the non-local
240    frame structure.  */
241 
242 static bool
use_pointer_in_frame(tree decl)243 use_pointer_in_frame (tree decl)
244 {
245   if (TREE_CODE (decl) == PARM_DECL)
246     {
247       /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
248          sized decls, and inefficient to copy large aggregates.  Don't bother
249          moving anything but scalar variables.  */
250       return AGGREGATE_TYPE_P (TREE_TYPE (decl));
251     }
252   else
253     {
254       /* Variable sized types make things "interesting" in the frame.  */
255       return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
256     }
257 }
258 
259 /* Given DECL, a non-locally accessed variable, find or create a field
260    in the non-local frame structure for the given nesting context.  */
261 
262 static tree
lookup_field_for_decl(struct nesting_info * info,tree decl,enum insert_option insert)263 lookup_field_for_decl (struct nesting_info *info, tree decl,
264 		       enum insert_option insert)
265 {
266   if (insert == NO_INSERT)
267     {
268       tree *slot = info->field_map->get (decl);
269       return slot ? *slot : NULL_TREE;
270     }
271 
272   tree *slot = &info->field_map->get_or_insert (decl);
273   if (!*slot)
274     {
275       tree field = make_node (FIELD_DECL);
276       DECL_NAME (field) = DECL_NAME (decl);
277 
278       if (use_pointer_in_frame (decl))
279 	{
280 	  TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
281 	  SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
282 	  DECL_NONADDRESSABLE_P (field) = 1;
283 	}
284       else
285 	{
286           TREE_TYPE (field) = TREE_TYPE (decl);
287           DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
288           SET_DECL_ALIGN (field, DECL_ALIGN (decl));
289           DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
290           TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
291           DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
292           TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
293 	}
294 
295       insert_field_into_struct (get_frame_type (info), field);
296       *slot = field;
297 
298       if (TREE_CODE (decl) == PARM_DECL)
299 	info->any_parm_remapped = true;
300     }
301 
302   return *slot;
303 }
304 
305 /* Build or return the variable that holds the static chain within
306    INFO->CONTEXT.  This variable may only be used within INFO->CONTEXT.  */
307 
308 static tree
get_chain_decl(struct nesting_info * info)309 get_chain_decl (struct nesting_info *info)
310 {
311   tree decl = info->chain_decl;
312 
313   if (!decl)
314     {
315       tree type;
316 
317       type = get_frame_type (info->outer);
318       type = build_pointer_type (type);
319 
320       /* Note that this variable is *not* entered into any BIND_EXPR;
321 	 the construction of this variable is handled specially in
322 	 expand_function_start and initialize_inlined_parameters.
323 	 Note also that it's represented as a parameter.  This is more
324 	 close to the truth, since the initial value does come from
325 	 the caller.  */
326       decl = build_decl (DECL_SOURCE_LOCATION (info->context),
327 			 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
328       DECL_ARTIFICIAL (decl) = 1;
329       DECL_IGNORED_P (decl) = 1;
330       TREE_USED (decl) = 1;
331       DECL_CONTEXT (decl) = info->context;
332       DECL_ARG_TYPE (decl) = type;
333 
334       /* Tell tree-inline.c that we never write to this variable, so
335 	 it can copy-prop the replacement value immediately.  */
336       TREE_READONLY (decl) = 1;
337 
338       info->chain_decl = decl;
339 
340       if (dump_file
341           && (dump_flags & TDF_DETAILS)
342 	  && !DECL_STATIC_CHAIN (info->context))
343 	fprintf (dump_file, "Setting static-chain for %s\n",
344 		 lang_hooks.decl_printable_name (info->context, 2));
345 
346       DECL_STATIC_CHAIN (info->context) = 1;
347     }
348   return decl;
349 }
350 
351 /* Build or return the field within the non-local frame state that holds
352    the static chain for INFO->CONTEXT.  This is the way to walk back up
353    multiple nesting levels.  */
354 
355 static tree
get_chain_field(struct nesting_info * info)356 get_chain_field (struct nesting_info *info)
357 {
358   tree field = info->chain_field;
359 
360   if (!field)
361     {
362       tree type = build_pointer_type (get_frame_type (info->outer));
363 
364       field = make_node (FIELD_DECL);
365       DECL_NAME (field) = get_identifier ("__chain");
366       TREE_TYPE (field) = type;
367       SET_DECL_ALIGN (field, TYPE_ALIGN (type));
368       DECL_NONADDRESSABLE_P (field) = 1;
369 
370       insert_field_into_struct (get_frame_type (info), field);
371 
372       info->chain_field = field;
373 
374       if (dump_file
375           && (dump_flags & TDF_DETAILS)
376 	  && !DECL_STATIC_CHAIN (info->context))
377 	fprintf (dump_file, "Setting static-chain for %s\n",
378 		 lang_hooks.decl_printable_name (info->context, 2));
379 
380       DECL_STATIC_CHAIN (info->context) = 1;
381     }
382   return field;
383 }
384 
385 /* Initialize a new temporary with the GIMPLE_CALL STMT.  */
386 
387 static tree
init_tmp_var_with_call(struct nesting_info * info,gimple_stmt_iterator * gsi,gcall * call)388 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
389 		        gcall *call)
390 {
391   tree t;
392 
393   t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
394   gimple_call_set_lhs (call, t);
395   if (! gsi_end_p (*gsi))
396     gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
397   gsi_insert_before (gsi, call, GSI_SAME_STMT);
398 
399   return t;
400 }
401 
402 
403 /* Copy EXP into a temporary.  Allocate the temporary in the context of
404    INFO and insert the initialization statement before GSI.  */
405 
406 static tree
init_tmp_var(struct nesting_info * info,tree exp,gimple_stmt_iterator * gsi)407 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
408 {
409   tree t;
410   gimple *stmt;
411 
412   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
413   stmt = gimple_build_assign (t, exp);
414   if (! gsi_end_p (*gsi))
415     gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
416   gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
417 
418   return t;
419 }
420 
421 
422 /* Similarly, but only do so to force EXP to satisfy is_gimple_val.  */
423 
424 static tree
gsi_gimplify_val(struct nesting_info * info,tree exp,gimple_stmt_iterator * gsi)425 gsi_gimplify_val (struct nesting_info *info, tree exp,
426 		  gimple_stmt_iterator *gsi)
427 {
428   if (is_gimple_val (exp))
429     return exp;
430   else
431     return init_tmp_var (info, exp, gsi);
432 }
433 
434 /* Similarly, but copy from the temporary and insert the statement
435    after the iterator.  */
436 
437 static tree
save_tmp_var(struct nesting_info * info,tree exp,gimple_stmt_iterator * gsi)438 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
439 {
440   tree t;
441   gimple *stmt;
442 
443   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
444   stmt = gimple_build_assign (exp, t);
445   if (! gsi_end_p (*gsi))
446     gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
447   gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
448 
449   return t;
450 }
451 
452 /* Build or return the type used to represent a nested function trampoline.  */
453 
454 static GTY(()) tree trampoline_type;
455 
456 static tree
get_trampoline_type(struct nesting_info * info)457 get_trampoline_type (struct nesting_info *info)
458 {
459   unsigned align, size;
460   tree t;
461 
462   if (trampoline_type)
463     return trampoline_type;
464 
465   align = TRAMPOLINE_ALIGNMENT;
466   size = TRAMPOLINE_SIZE;
467 
468   /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
469      then allocate extra space so that we can do dynamic alignment.  */
470   if (align > STACK_BOUNDARY)
471     {
472       size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
473       align = STACK_BOUNDARY;
474     }
475 
476   t = build_index_type (size_int (size - 1));
477   t = build_array_type (char_type_node, t);
478   t = build_decl (DECL_SOURCE_LOCATION (info->context),
479 		  FIELD_DECL, get_identifier ("__data"), t);
480   SET_DECL_ALIGN (t, align);
481   DECL_USER_ALIGN (t) = 1;
482 
483   trampoline_type = make_node (RECORD_TYPE);
484   TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
485   TYPE_FIELDS (trampoline_type) = t;
486   layout_type (trampoline_type);
487   DECL_CONTEXT (t) = trampoline_type;
488 
489   return trampoline_type;
490 }
491 
492 /* Build or return the type used to represent a nested function descriptor.  */
493 
494 static GTY(()) tree descriptor_type;
495 
496 static tree
get_descriptor_type(struct nesting_info * info)497 get_descriptor_type (struct nesting_info *info)
498 {
499   /* The base alignment is that of a function.  */
500   const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
501   tree t;
502 
503   if (descriptor_type)
504     return descriptor_type;
505 
506   t = build_index_type (integer_one_node);
507   t = build_array_type (ptr_type_node, t);
508   t = build_decl (DECL_SOURCE_LOCATION (info->context),
509 		  FIELD_DECL, get_identifier ("__data"), t);
510   SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
511   DECL_USER_ALIGN (t) = 1;
512 
513   descriptor_type = make_node (RECORD_TYPE);
514   TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
515   TYPE_FIELDS (descriptor_type) = t;
516   layout_type (descriptor_type);
517   DECL_CONTEXT (t) = descriptor_type;
518 
519   return descriptor_type;
520 }
521 
522 /* Given DECL, a nested function, find or create an element in the
523    var map for this function.  */
524 
525 static tree
lookup_element_for_decl(struct nesting_info * info,tree decl,enum insert_option insert)526 lookup_element_for_decl (struct nesting_info *info, tree decl,
527 			 enum insert_option insert)
528 {
529   if (insert == NO_INSERT)
530     {
531       tree *slot = info->var_map->get (decl);
532       return slot ? *slot : NULL_TREE;
533     }
534 
535   tree *slot = &info->var_map->get_or_insert (decl);
536   if (!*slot)
537     *slot = build_tree_list (NULL_TREE, NULL_TREE);
538 
539   return (tree) *slot;
540 }
541 
542 /* Given DECL, a nested function, create a field in the non-local
543    frame structure for this function.  */
544 
545 static tree
create_field_for_decl(struct nesting_info * info,tree decl,tree type)546 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
547 {
548   tree field = make_node (FIELD_DECL);
549   DECL_NAME (field) = DECL_NAME (decl);
550   TREE_TYPE (field) = type;
551   TREE_ADDRESSABLE (field) = 1;
552   insert_field_into_struct (get_frame_type (info), field);
553   return field;
554 }
555 
556 /* Given DECL, a nested function, find or create a field in the non-local
557    frame structure for a trampoline for this function.  */
558 
559 static tree
lookup_tramp_for_decl(struct nesting_info * info,tree decl,enum insert_option insert)560 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
561 		       enum insert_option insert)
562 {
563   tree elt, field;
564 
565   elt = lookup_element_for_decl (info, decl, insert);
566   if (!elt)
567     return NULL_TREE;
568 
569   field = TREE_PURPOSE (elt);
570 
571   if (!field && insert == INSERT)
572     {
573       field = create_field_for_decl (info, decl, get_trampoline_type (info));
574       TREE_PURPOSE (elt) = field;
575       info->any_tramp_created = true;
576     }
577 
578   return field;
579 }
580 
581 /* Given DECL, a nested function, find or create a field in the non-local
582    frame structure for a descriptor for this function.  */
583 
584 static tree
lookup_descr_for_decl(struct nesting_info * info,tree decl,enum insert_option insert)585 lookup_descr_for_decl (struct nesting_info *info, tree decl,
586 		       enum insert_option insert)
587 {
588   tree elt, field;
589 
590   elt = lookup_element_for_decl (info, decl, insert);
591   if (!elt)
592     return NULL_TREE;
593 
594   field = TREE_VALUE (elt);
595 
596   if (!field && insert == INSERT)
597     {
598       field = create_field_for_decl (info, decl, get_descriptor_type (info));
599       TREE_VALUE (elt) = field;
600       info->any_descr_created = true;
601     }
602 
603   return field;
604 }
605 
606 /* Build or return the field within the non-local frame state that holds
607    the non-local goto "jmp_buf".  The buffer itself is maintained by the
608    rtl middle-end as dynamic stack space is allocated.  */
609 
610 static tree
get_nl_goto_field(struct nesting_info * info)611 get_nl_goto_field (struct nesting_info *info)
612 {
613   tree field = info->nl_goto_field;
614   if (!field)
615     {
616       unsigned size;
617       tree type;
618 
619       /* For __builtin_nonlocal_goto, we need N words.  The first is the
620 	 frame pointer, the rest is for the target's stack pointer save
621 	 area.  The number of words is controlled by STACK_SAVEAREA_MODE;
622 	 not the best interface, but it'll do for now.  */
623       if (Pmode == ptr_mode)
624 	type = ptr_type_node;
625       else
626 	type = lang_hooks.types.type_for_mode (Pmode, 1);
627 
628       scalar_int_mode mode
629 	= as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
630       size = GET_MODE_SIZE (mode);
631       size = size / GET_MODE_SIZE (Pmode);
632       size = size + 1;
633 
634       type = build_array_type
635 	(type, build_index_type (size_int (size)));
636 
637       field = make_node (FIELD_DECL);
638       DECL_NAME (field) = get_identifier ("__nl_goto_buf");
639       TREE_TYPE (field) = type;
640       SET_DECL_ALIGN (field, TYPE_ALIGN (type));
641       TREE_ADDRESSABLE (field) = 1;
642 
643       insert_field_into_struct (get_frame_type (info), field);
644 
645       info->nl_goto_field = field;
646     }
647 
648   return field;
649 }
650 
651 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ.  */
652 
653 static void
walk_body(walk_stmt_fn callback_stmt,walk_tree_fn callback_op,struct nesting_info * info,gimple_seq * pseq)654 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
655 	   struct nesting_info *info, gimple_seq *pseq)
656 {
657   struct walk_stmt_info wi;
658 
659   memset (&wi, 0, sizeof (wi));
660   wi.info = info;
661   wi.val_only = true;
662   walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
663 }
664 
665 
666 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT.  */
667 
668 static inline void
walk_function(walk_stmt_fn callback_stmt,walk_tree_fn callback_op,struct nesting_info * info)669 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
670 	       struct nesting_info *info)
671 {
672   gimple_seq body = gimple_body (info->context);
673   walk_body (callback_stmt, callback_op, info, &body);
674   gimple_set_body (info->context, body);
675 }
676 
677 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body.  */
678 
679 static void
walk_gimple_omp_for(gomp_for * for_stmt,walk_stmt_fn callback_stmt,walk_tree_fn callback_op,struct nesting_info * info)680 walk_gimple_omp_for (gomp_for *for_stmt,
681     		     walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
682     		     struct nesting_info *info)
683 {
684   struct walk_stmt_info wi;
685   gimple_seq seq;
686   tree t;
687   size_t i;
688 
689   walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
690 
691   seq = NULL;
692   memset (&wi, 0, sizeof (wi));
693   wi.info = info;
694   wi.gsi = gsi_last (seq);
695 
696   for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
697     {
698       wi.val_only = false;
699       walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
700 		 &wi, NULL);
701       wi.val_only = true;
702       wi.is_lhs = false;
703       walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
704 		 &wi, NULL);
705 
706       wi.val_only = true;
707       wi.is_lhs = false;
708       walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
709 		 &wi, NULL);
710 
711       t = gimple_omp_for_incr (for_stmt, i);
712       gcc_assert (BINARY_CLASS_P (t));
713       wi.val_only = false;
714       walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
715       wi.val_only = true;
716       wi.is_lhs = false;
717       walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
718     }
719 
720   seq = gsi_seq (wi.gsi);
721   if (!gimple_seq_empty_p (seq))
722     {
723       gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
724       annotate_all_with_location (seq, gimple_location (for_stmt));
725       gimple_seq_add_seq (&pre_body, seq);
726       gimple_omp_for_set_pre_body (for_stmt, pre_body);
727     }
728 }
729 
730 /* Similarly for ROOT and all functions nested underneath, depth first.  */
731 
732 static void
walk_all_functions(walk_stmt_fn callback_stmt,walk_tree_fn callback_op,struct nesting_info * root)733 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
734 		    struct nesting_info *root)
735 {
736   struct nesting_info *n;
737   FOR_EACH_NEST_INFO (n, root)
738     walk_function (callback_stmt, callback_op, n);
739 }
740 
741 
742 /* We have to check for a fairly pathological case.  The operands of function
743    nested function are to be interpreted in the context of the enclosing
744    function.  So if any are variably-sized, they will get remapped when the
745    enclosing function is inlined.  But that remapping would also have to be
746    done in the types of the PARM_DECLs of the nested function, meaning the
747    argument types of that function will disagree with the arguments in the
748    calls to that function.  So we'd either have to make a copy of the nested
749    function corresponding to each time the enclosing function was inlined or
750    add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
751    function.  The former is not practical.  The latter would still require
752    detecting this case to know when to add the conversions.  So, for now at
753    least, we don't inline such an enclosing function.
754 
755    We have to do that check recursively, so here return indicating whether
756    FNDECL has such a nested function.  ORIG_FN is the function we were
757    trying to inline to use for checking whether any argument is variably
758    modified by anything in it.
759 
760    It would be better to do this in tree-inline.c so that we could give
761    the appropriate warning for why a function can't be inlined, but that's
762    too late since the nesting structure has already been flattened and
763    adding a flag just to record this fact seems a waste of a flag.  */
764 
765 static bool
check_for_nested_with_variably_modified(tree fndecl,tree orig_fndecl)766 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
767 {
768   struct cgraph_node *cgn = cgraph_node::get (fndecl);
769   tree arg;
770 
771   for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
772     {
773       for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
774 	if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
775 	  return true;
776 
777       if (check_for_nested_with_variably_modified (cgn->decl,
778 						   orig_fndecl))
779 	return true;
780     }
781 
782   return false;
783 }
784 
785 /* Construct our local datastructure describing the function nesting
786    tree rooted by CGN.  */
787 
788 static struct nesting_info *
create_nesting_tree(struct cgraph_node * cgn)789 create_nesting_tree (struct cgraph_node *cgn)
790 {
791   struct nesting_info *info = XCNEW (struct nesting_info);
792   info->field_map = new hash_map<tree, tree>;
793   info->var_map = new hash_map<tree, tree>;
794   info->mem_refs = new hash_set<tree *>;
795   info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
796   info->context = cgn->decl;
797 
798   for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
799     {
800       struct nesting_info *sub = create_nesting_tree (cgn);
801       sub->outer = info;
802       sub->next = info->inner;
803       info->inner = sub;
804     }
805 
806   /* See discussion at check_for_nested_with_variably_modified for a
807      discussion of why this has to be here.  */
808   if (check_for_nested_with_variably_modified (info->context, info->context))
809     DECL_UNINLINABLE (info->context) = true;
810 
811   return info;
812 }
813 
814 /* Return an expression computing the static chain for TARGET_CONTEXT
815    from INFO->CONTEXT.  Insert any necessary computations before TSI.  */
816 
817 static tree
get_static_chain(struct nesting_info * info,tree target_context,gimple_stmt_iterator * gsi)818 get_static_chain (struct nesting_info *info, tree target_context,
819 		  gimple_stmt_iterator *gsi)
820 {
821   struct nesting_info *i;
822   tree x;
823 
824   if (info->context == target_context)
825     {
826       x = build_addr (info->frame_decl);
827       info->static_chain_added |= 1;
828     }
829   else
830     {
831       x = get_chain_decl (info);
832       info->static_chain_added |= 2;
833 
834       for (i = info->outer; i->context != target_context; i = i->outer)
835 	{
836 	  tree field = get_chain_field (i);
837 
838 	  x = build_simple_mem_ref (x);
839 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
840 	  x = init_tmp_var (info, x, gsi);
841 	}
842     }
843 
844   return x;
845 }
846 
847 
848 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
849    frame as seen from INFO->CONTEXT.  Insert any necessary computations
850    before GSI.  */
851 
852 static tree
get_frame_field(struct nesting_info * info,tree target_context,tree field,gimple_stmt_iterator * gsi)853 get_frame_field (struct nesting_info *info, tree target_context,
854 		 tree field, gimple_stmt_iterator *gsi)
855 {
856   struct nesting_info *i;
857   tree x;
858 
859   if (info->context == target_context)
860     {
861       /* Make sure frame_decl gets created.  */
862       (void) get_frame_type (info);
863       x = info->frame_decl;
864       info->static_chain_added |= 1;
865     }
866   else
867     {
868       x = get_chain_decl (info);
869       info->static_chain_added |= 2;
870 
871       for (i = info->outer; i->context != target_context; i = i->outer)
872 	{
873 	  tree field = get_chain_field (i);
874 
875 	  x = build_simple_mem_ref (x);
876 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
877 	  x = init_tmp_var (info, x, gsi);
878 	}
879 
880       x = build_simple_mem_ref (x);
881     }
882 
883   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
884   return x;
885 }
886 
887 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
888 
889 /* A subroutine of convert_nonlocal_reference_op.  Create a local variable
890    in the nested function with DECL_VALUE_EXPR set to reference the true
891    variable in the parent function.  This is used both for debug info
892    and in OMP lowering.  */
893 
894 static tree
get_nonlocal_debug_decl(struct nesting_info * info,tree decl)895 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
896 {
897   tree target_context;
898   struct nesting_info *i;
899   tree x, field, new_decl;
900 
901   tree *slot = &info->var_map->get_or_insert (decl);
902 
903   if (*slot)
904     return *slot;
905 
906   target_context = decl_function_context (decl);
907 
908   /* A copy of the code in get_frame_field, but without the temporaries.  */
909   if (info->context == target_context)
910     {
911       /* Make sure frame_decl gets created.  */
912       (void) get_frame_type (info);
913       x = info->frame_decl;
914       i = info;
915       info->static_chain_added |= 1;
916     }
917   else
918     {
919       x = get_chain_decl (info);
920       info->static_chain_added |= 2;
921       for (i = info->outer; i->context != target_context; i = i->outer)
922 	{
923 	  field = get_chain_field (i);
924 	  x = build_simple_mem_ref (x);
925 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
926 	}
927       x = build_simple_mem_ref (x);
928     }
929 
930   field = lookup_field_for_decl (i, decl, INSERT);
931   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
932   if (use_pointer_in_frame (decl))
933     x = build_simple_mem_ref (x);
934 
935   /* ??? We should be remapping types as well, surely.  */
936   new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
937 			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
938   DECL_CONTEXT (new_decl) = info->context;
939   DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
940   DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
941   TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
942   TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
943   TREE_READONLY (new_decl) = TREE_READONLY (decl);
944   TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
945   DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
946   if ((TREE_CODE (decl) == PARM_DECL
947        || TREE_CODE (decl) == RESULT_DECL
948        || VAR_P (decl))
949       && DECL_BY_REFERENCE (decl))
950     DECL_BY_REFERENCE (new_decl) = 1;
951 
952   SET_DECL_VALUE_EXPR (new_decl, x);
953   DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
954 
955   *slot = new_decl;
956   DECL_CHAIN (new_decl) = info->debug_var_chain;
957   info->debug_var_chain = new_decl;
958 
959   if (!optimize
960       && info->context != target_context
961       && variably_modified_type_p (TREE_TYPE (decl), NULL))
962     note_nonlocal_vla_type (info, TREE_TYPE (decl));
963 
964   return new_decl;
965 }
966 
967 
968 /* Callback for walk_gimple_stmt, rewrite all references to VAR
969    and PARM_DECLs that belong to outer functions.
970 
971    The rewrite will involve some number of structure accesses back up
972    the static chain.  E.g. for a variable FOO up one nesting level it'll
973    be CHAIN->FOO.  For two levels it'll be CHAIN->__chain->FOO.  Further
974    indirections apply to decls for which use_pointer_in_frame is true.  */
975 
976 static tree
convert_nonlocal_reference_op(tree * tp,int * walk_subtrees,void * data)977 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
978 {
979   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
980   struct nesting_info *const info = (struct nesting_info *) wi->info;
981   tree t = *tp;
982 
983   *walk_subtrees = 0;
984   switch (TREE_CODE (t))
985     {
986     case VAR_DECL:
987       /* Non-automatic variables are never processed.  */
988       if (TREE_STATIC (t) || DECL_EXTERNAL (t))
989 	break;
990       /* FALLTHRU */
991 
992     case PARM_DECL:
993       if (decl_function_context (t) != info->context)
994 	{
995 	  tree x;
996 	  wi->changed = true;
997 
998 	  x = get_nonlocal_debug_decl (info, t);
999 	  if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1000 	    {
1001 	      tree target_context = decl_function_context (t);
1002 	      struct nesting_info *i;
1003 	      for (i = info->outer; i->context != target_context; i = i->outer)
1004 		continue;
1005 	      x = lookup_field_for_decl (i, t, INSERT);
1006 	      x = get_frame_field (info, target_context, x, &wi->gsi);
1007 	      if (use_pointer_in_frame (t))
1008 		{
1009 		  x = init_tmp_var (info, x, &wi->gsi);
1010 		  x = build_simple_mem_ref (x);
1011 		}
1012 	    }
1013 
1014 	  if (wi->val_only)
1015 	    {
1016 	      if (wi->is_lhs)
1017 		x = save_tmp_var (info, x, &wi->gsi);
1018 	      else
1019 		x = init_tmp_var (info, x, &wi->gsi);
1020 	    }
1021 
1022 	  *tp = x;
1023 	}
1024       break;
1025 
1026     case LABEL_DECL:
1027       /* We're taking the address of a label from a parent function, but
1028 	 this is not itself a non-local goto.  Mark the label such that it
1029 	 will not be deleted, much as we would with a label address in
1030 	 static storage.  */
1031       if (decl_function_context (t) != info->context)
1032         FORCED_LABEL (t) = 1;
1033       break;
1034 
1035     case ADDR_EXPR:
1036       {
1037 	bool save_val_only = wi->val_only;
1038 
1039 	wi->val_only = false;
1040 	wi->is_lhs = false;
1041 	wi->changed = false;
1042 	walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1043 	wi->val_only = true;
1044 
1045 	if (wi->changed)
1046 	  {
1047 	    tree save_context;
1048 
1049 	    /* If we changed anything, we might no longer be directly
1050 	       referencing a decl.  */
1051 	    save_context = current_function_decl;
1052 	    current_function_decl = info->context;
1053 	    recompute_tree_invariant_for_addr_expr (t);
1054 	    current_function_decl = save_context;
1055 
1056 	    /* If the callback converted the address argument in a context
1057 	       where we only accept variables (and min_invariant, presumably),
1058 	       then compute the address into a temporary.  */
1059 	    if (save_val_only)
1060 	      *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1061 				      t, &wi->gsi);
1062 	  }
1063       }
1064       break;
1065 
1066     case REALPART_EXPR:
1067     case IMAGPART_EXPR:
1068     case COMPONENT_REF:
1069     case ARRAY_REF:
1070     case ARRAY_RANGE_REF:
1071     case BIT_FIELD_REF:
1072       /* Go down this entire nest and just look at the final prefix and
1073 	 anything that describes the references.  Otherwise, we lose track
1074 	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
1075       wi->val_only = true;
1076       wi->is_lhs = false;
1077       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1078 	{
1079 	  if (TREE_CODE (t) == COMPONENT_REF)
1080 	    walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1081 		       NULL);
1082 	  else if (TREE_CODE (t) == ARRAY_REF
1083 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1084 	    {
1085 	      walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1086 			 wi, NULL);
1087 	      walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1088 			 wi, NULL);
1089 	      walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1090 			 wi, NULL);
1091 	    }
1092 	}
1093       wi->val_only = false;
1094       walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1095       break;
1096 
1097     case VIEW_CONVERT_EXPR:
1098       /* Just request to look at the subtrees, leaving val_only and lhs
1099 	 untouched.  This might actually be for !val_only + lhs, in which
1100 	 case we don't want to force a replacement by a temporary.  */
1101       *walk_subtrees = 1;
1102       break;
1103 
1104     default:
1105       if (!IS_TYPE_OR_DECL_P (t))
1106 	{
1107 	  *walk_subtrees = 1;
1108           wi->val_only = true;
1109 	  wi->is_lhs = false;
1110 	}
1111       break;
1112     }
1113 
1114   return NULL_TREE;
1115 }
1116 
1117 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1118 					     struct walk_stmt_info *);
1119 
1120 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1121    and PARM_DECLs that belong to outer functions.  */
1122 
1123 static bool
convert_nonlocal_omp_clauses(tree * pclauses,struct walk_stmt_info * wi)1124 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1125 {
1126   struct nesting_info *const info = (struct nesting_info *) wi->info;
1127   bool need_chain = false, need_stmts = false;
1128   tree clause, decl, *pdecl;
1129   int dummy;
1130   bitmap new_suppress;
1131 
1132   new_suppress = BITMAP_GGC_ALLOC ();
1133   bitmap_copy (new_suppress, info->suppress_expansion);
1134 
1135   for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1136     {
1137       pdecl = NULL;
1138       switch (OMP_CLAUSE_CODE (clause))
1139 	{
1140 	case OMP_CLAUSE_REDUCTION:
1141 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1142 	    need_stmts = true;
1143 	  if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
1144 	    {
1145 	      pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
1146 	      if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
1147 		pdecl = &TREE_OPERAND (*pdecl, 0);
1148 	      if (TREE_CODE (*pdecl) == INDIRECT_REF
1149 		  || TREE_CODE (*pdecl) == ADDR_EXPR)
1150 		pdecl = &TREE_OPERAND (*pdecl, 0);
1151 	    }
1152 	  goto do_decl_clause;
1153 
1154 	case OMP_CLAUSE_LASTPRIVATE:
1155 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1156 	    need_stmts = true;
1157 	  goto do_decl_clause;
1158 
1159 	case OMP_CLAUSE_LINEAR:
1160 	  if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1161 	    need_stmts = true;
1162 	  wi->val_only = true;
1163 	  wi->is_lhs = false;
1164 	  convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1165 					 &dummy, wi);
1166 	  goto do_decl_clause;
1167 
1168 	case OMP_CLAUSE_PRIVATE:
1169 	case OMP_CLAUSE_FIRSTPRIVATE:
1170 	case OMP_CLAUSE_COPYPRIVATE:
1171 	case OMP_CLAUSE_SHARED:
1172 	case OMP_CLAUSE_TO_DECLARE:
1173 	case OMP_CLAUSE_LINK:
1174 	case OMP_CLAUSE_USE_DEVICE_PTR:
1175 	case OMP_CLAUSE_IS_DEVICE_PTR:
1176 	do_decl_clause:
1177 	  if (pdecl == NULL)
1178 	    pdecl = &OMP_CLAUSE_DECL (clause);
1179 	  decl = *pdecl;
1180 	  if (VAR_P (decl)
1181 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1182 	    break;
1183 	  if (decl_function_context (decl) != info->context)
1184 	    {
1185 	      if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1186 		OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1187 	      bitmap_set_bit (new_suppress, DECL_UID (decl));
1188 	      *pdecl = get_nonlocal_debug_decl (info, decl);
1189 	      if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1190 		need_chain = true;
1191 	    }
1192 	  break;
1193 
1194 	case OMP_CLAUSE_SCHEDULE:
1195 	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1196 	    break;
1197 	  /* FALLTHRU */
1198 	case OMP_CLAUSE_FINAL:
1199 	case OMP_CLAUSE_IF:
1200 	case OMP_CLAUSE_NUM_THREADS:
1201 	case OMP_CLAUSE_DEPEND:
1202 	case OMP_CLAUSE_DEVICE:
1203 	case OMP_CLAUSE_NUM_TEAMS:
1204 	case OMP_CLAUSE_THREAD_LIMIT:
1205 	case OMP_CLAUSE_SAFELEN:
1206 	case OMP_CLAUSE_SIMDLEN:
1207 	case OMP_CLAUSE_PRIORITY:
1208 	case OMP_CLAUSE_GRAINSIZE:
1209 	case OMP_CLAUSE_NUM_TASKS:
1210 	case OMP_CLAUSE_HINT:
1211 	case OMP_CLAUSE_NUM_GANGS:
1212 	case OMP_CLAUSE_NUM_WORKERS:
1213 	case OMP_CLAUSE_VECTOR_LENGTH:
1214 	case OMP_CLAUSE_GANG:
1215 	case OMP_CLAUSE_WORKER:
1216 	case OMP_CLAUSE_VECTOR:
1217 	case OMP_CLAUSE_ASYNC:
1218 	case OMP_CLAUSE_WAIT:
1219 	  /* Several OpenACC clauses have optional arguments.  Check if they
1220 	     are present.  */
1221 	  if (OMP_CLAUSE_OPERAND (clause, 0))
1222 	    {
1223 	      wi->val_only = true;
1224 	      wi->is_lhs = false;
1225 	      convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1226 					     &dummy, wi);
1227 	    }
1228 
1229 	  /* The gang clause accepts two arguments.  */
1230 	  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1231 	      && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1232 	    {
1233 		wi->val_only = true;
1234 		wi->is_lhs = false;
1235 		convert_nonlocal_reference_op
1236 		  (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1237 	    }
1238 	  break;
1239 
1240 	case OMP_CLAUSE_DIST_SCHEDULE:
1241 	  if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1242 	    {
1243 	      wi->val_only = true;
1244 	      wi->is_lhs = false;
1245 	      convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1246 					     &dummy, wi);
1247 	    }
1248 	  break;
1249 
1250 	case OMP_CLAUSE_MAP:
1251 	case OMP_CLAUSE_TO:
1252 	case OMP_CLAUSE_FROM:
1253 	  if (OMP_CLAUSE_SIZE (clause))
1254 	    {
1255 	      wi->val_only = true;
1256 	      wi->is_lhs = false;
1257 	      convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1258 					     &dummy, wi);
1259 	    }
1260 	  if (DECL_P (OMP_CLAUSE_DECL (clause)))
1261 	    goto do_decl_clause;
1262 	  wi->val_only = true;
1263 	  wi->is_lhs = false;
1264 	  walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1265 		     wi, NULL);
1266 	  break;
1267 
1268 	case OMP_CLAUSE_ALIGNED:
1269 	  if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1270 	    {
1271 	      wi->val_only = true;
1272 	      wi->is_lhs = false;
1273 	      convert_nonlocal_reference_op
1274 		(&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1275 	    }
1276 	  /* Like do_decl_clause, but don't add any suppression.  */
1277 	  decl = OMP_CLAUSE_DECL (clause);
1278 	  if (VAR_P (decl)
1279 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1280 	    break;
1281 	  if (decl_function_context (decl) != info->context)
1282 	    {
1283 	      OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1284 	      if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1285 		need_chain = true;
1286 	    }
1287 	  break;
1288 
1289 	case OMP_CLAUSE_NOWAIT:
1290 	case OMP_CLAUSE_ORDERED:
1291 	case OMP_CLAUSE_DEFAULT:
1292 	case OMP_CLAUSE_COPYIN:
1293 	case OMP_CLAUSE_COLLAPSE:
1294 	case OMP_CLAUSE_TILE:
1295 	case OMP_CLAUSE_UNTIED:
1296 	case OMP_CLAUSE_MERGEABLE:
1297 	case OMP_CLAUSE_PROC_BIND:
1298 	case OMP_CLAUSE_NOGROUP:
1299 	case OMP_CLAUSE_THREADS:
1300 	case OMP_CLAUSE_SIMD:
1301 	case OMP_CLAUSE_DEFAULTMAP:
1302 	case OMP_CLAUSE_SEQ:
1303 	case OMP_CLAUSE_INDEPENDENT:
1304 	case OMP_CLAUSE_AUTO:
1305 	  break;
1306 
1307 	  /* The following clause belongs to the OpenACC cache directive, which
1308 	     is discarded during gimplification.  */
1309 	case OMP_CLAUSE__CACHE_:
1310 	  /* The following clauses are only allowed in the OpenMP declare simd
1311 	     directive, so not seen here.  */
1312 	case OMP_CLAUSE_UNIFORM:
1313 	case OMP_CLAUSE_INBRANCH:
1314 	case OMP_CLAUSE_NOTINBRANCH:
1315 	  /* The following clauses are only allowed on OpenMP cancel and
1316 	     cancellation point directives, which at this point have already
1317 	     been lowered into a function call.  */
1318 	case OMP_CLAUSE_FOR:
1319 	case OMP_CLAUSE_PARALLEL:
1320 	case OMP_CLAUSE_SECTIONS:
1321 	case OMP_CLAUSE_TASKGROUP:
1322 	  /* The following clauses are only added during OMP lowering; nested
1323 	     function decomposition happens before that.  */
1324 	case OMP_CLAUSE__LOOPTEMP_:
1325 	case OMP_CLAUSE__SIMDUID_:
1326 	case OMP_CLAUSE__GRIDDIM_:
1327 	  /* Anything else.  */
1328 	default:
1329 	  gcc_unreachable ();
1330 	}
1331     }
1332 
1333   info->suppress_expansion = new_suppress;
1334 
1335   if (need_stmts)
1336     for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1337       switch (OMP_CLAUSE_CODE (clause))
1338 	{
1339 	case OMP_CLAUSE_REDUCTION:
1340 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1341 	    {
1342 	      tree old_context
1343 		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1344 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1345 		= info->context;
1346 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1347 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1348 		  = info->context;
1349 	      tree save_local_var_chain = info->new_local_var_chain;
1350 	      info->new_local_var_chain = NULL;
1351 	      gimple_seq *seq = &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause);
1352 	      walk_body (convert_nonlocal_reference_stmt,
1353 			 convert_nonlocal_reference_op, info, seq);
1354 	      if (info->new_local_var_chain)
1355 		declare_vars (info->new_local_var_chain,
1356 			      gimple_seq_first_stmt (*seq), false);
1357 	      info->new_local_var_chain = NULL;
1358 	      seq = &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause);
1359 	      walk_body (convert_nonlocal_reference_stmt,
1360 			 convert_nonlocal_reference_op, info, seq);
1361 	      if (info->new_local_var_chain)
1362 		declare_vars (info->new_local_var_chain,
1363 			      gimple_seq_first_stmt (*seq), false);
1364 	      info->new_local_var_chain = save_local_var_chain;
1365 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1366 		= old_context;
1367 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1368 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1369 		  = old_context;
1370 	    }
1371 	  break;
1372 
1373 	case OMP_CLAUSE_LASTPRIVATE:
1374 	  {
1375 	    tree save_local_var_chain = info->new_local_var_chain;
1376 	    info->new_local_var_chain = NULL;
1377 	    gimple_seq *seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause);
1378 	    walk_body (convert_nonlocal_reference_stmt,
1379 		       convert_nonlocal_reference_op, info, seq);
1380 	    if (info->new_local_var_chain)
1381 	      declare_vars (info->new_local_var_chain,
1382 			    gimple_seq_first_stmt (*seq), false);
1383 	    info->new_local_var_chain = save_local_var_chain;
1384 	  }
1385 	  break;
1386 
1387 	case OMP_CLAUSE_LINEAR:
1388 	  {
1389 	    tree save_local_var_chain = info->new_local_var_chain;
1390 	    info->new_local_var_chain = NULL;
1391 	    gimple_seq *seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause);
1392 	    walk_body (convert_nonlocal_reference_stmt,
1393 		       convert_nonlocal_reference_op, info, seq);
1394 	    if (info->new_local_var_chain)
1395 	      declare_vars (info->new_local_var_chain,
1396 			    gimple_seq_first_stmt (*seq), false);
1397 	    info->new_local_var_chain = save_local_var_chain;
1398 	  }
1399 	  break;
1400 
1401 	default:
1402 	  break;
1403 	}
1404 
1405   return need_chain;
1406 }
1407 
1408 /* Create nonlocal debug decls for nonlocal VLA array bounds.  */
1409 
1410 static void
note_nonlocal_vla_type(struct nesting_info * info,tree type)1411 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1412 {
1413   while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1414     type = TREE_TYPE (type);
1415 
1416   if (TYPE_NAME (type)
1417       && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1418       && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1419     type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1420 
1421   while (POINTER_TYPE_P (type)
1422 	 || TREE_CODE (type) == VECTOR_TYPE
1423 	 || TREE_CODE (type) == FUNCTION_TYPE
1424 	 || TREE_CODE (type) == METHOD_TYPE)
1425     type = TREE_TYPE (type);
1426 
1427   if (TREE_CODE (type) == ARRAY_TYPE)
1428     {
1429       tree domain, t;
1430 
1431       note_nonlocal_vla_type (info, TREE_TYPE (type));
1432       domain = TYPE_DOMAIN (type);
1433       if (domain)
1434 	{
1435 	  t = TYPE_MIN_VALUE (domain);
1436 	  if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1437 	      && decl_function_context (t) != info->context)
1438 	    get_nonlocal_debug_decl (info, t);
1439 	  t = TYPE_MAX_VALUE (domain);
1440 	  if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1441 	      && decl_function_context (t) != info->context)
1442 	    get_nonlocal_debug_decl (info, t);
1443 	}
1444     }
1445 }
1446 
1447 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1448    in BLOCK.  */
1449 
1450 static void
note_nonlocal_block_vlas(struct nesting_info * info,tree block)1451 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1452 {
1453   tree var;
1454 
1455   for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1456     if (VAR_P (var)
1457 	&& variably_modified_type_p (TREE_TYPE (var), NULL)
1458 	&& DECL_HAS_VALUE_EXPR_P (var)
1459 	&& decl_function_context (var) != info->context)
1460       note_nonlocal_vla_type (info, TREE_TYPE (var));
1461 }
1462 
1463 /* Callback for walk_gimple_stmt.  Rewrite all references to VAR and
1464    PARM_DECLs that belong to outer functions.  This handles statements
1465    that are not handled via the standard recursion done in
1466    walk_gimple_stmt.  STMT is the statement to examine, DATA is as in
1467    convert_nonlocal_reference_op.  Set *HANDLED_OPS_P to true if all the
1468    operands of STMT have been handled by this function.  */
1469 
1470 static tree
convert_nonlocal_reference_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)1471 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1472 				 struct walk_stmt_info *wi)
1473 {
1474   struct nesting_info *info = (struct nesting_info *) wi->info;
1475   tree save_local_var_chain;
1476   bitmap save_suppress;
1477   gimple *stmt = gsi_stmt (*gsi);
1478 
1479   switch (gimple_code (stmt))
1480     {
1481     case GIMPLE_GOTO:
1482       /* Don't walk non-local gotos for now.  */
1483       if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1484 	{
1485 	  wi->val_only = true;
1486 	  wi->is_lhs = false;
1487 	  *handled_ops_p = false;
1488 	  return NULL_TREE;
1489 	}
1490       break;
1491 
1492     case GIMPLE_OMP_PARALLEL:
1493     case GIMPLE_OMP_TASK:
1494       save_suppress = info->suppress_expansion;
1495       if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1496 	                                wi))
1497 	{
1498 	  tree c, decl;
1499 	  decl = get_chain_decl (info);
1500 	  c = build_omp_clause (gimple_location (stmt),
1501 				OMP_CLAUSE_FIRSTPRIVATE);
1502 	  OMP_CLAUSE_DECL (c) = decl;
1503 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1504 	  gimple_omp_taskreg_set_clauses (stmt, c);
1505 	}
1506 
1507       save_local_var_chain = info->new_local_var_chain;
1508       info->new_local_var_chain = NULL;
1509 
1510       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1511 	         info, gimple_omp_body_ptr (stmt));
1512 
1513       if (info->new_local_var_chain)
1514 	declare_vars (info->new_local_var_chain,
1515 	              gimple_seq_first_stmt (gimple_omp_body (stmt)),
1516 		      false);
1517       info->new_local_var_chain = save_local_var_chain;
1518       info->suppress_expansion = save_suppress;
1519       break;
1520 
1521     case GIMPLE_OMP_FOR:
1522       save_suppress = info->suppress_expansion;
1523       convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1524       walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1525 			   convert_nonlocal_reference_stmt,
1526 	  		   convert_nonlocal_reference_op, info);
1527       walk_body (convert_nonlocal_reference_stmt,
1528 	  	 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1529       info->suppress_expansion = save_suppress;
1530       break;
1531 
1532     case GIMPLE_OMP_SECTIONS:
1533       save_suppress = info->suppress_expansion;
1534       convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1535       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1536 	         info, gimple_omp_body_ptr (stmt));
1537       info->suppress_expansion = save_suppress;
1538       break;
1539 
1540     case GIMPLE_OMP_SINGLE:
1541       save_suppress = info->suppress_expansion;
1542       convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1543       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1544 	         info, gimple_omp_body_ptr (stmt));
1545       info->suppress_expansion = save_suppress;
1546       break;
1547 
1548     case GIMPLE_OMP_TARGET:
1549       if (!is_gimple_omp_offloaded (stmt))
1550 	{
1551 	  save_suppress = info->suppress_expansion;
1552 	  convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1553 					wi);
1554 	  info->suppress_expansion = save_suppress;
1555 	  walk_body (convert_nonlocal_reference_stmt,
1556 		     convert_nonlocal_reference_op, info,
1557 		     gimple_omp_body_ptr (stmt));
1558 	  break;
1559 	}
1560       save_suppress = info->suppress_expansion;
1561       if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1562 					wi))
1563 	{
1564 	  tree c, decl;
1565 	  decl = get_chain_decl (info);
1566 	  c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1567 	  OMP_CLAUSE_DECL (c) = decl;
1568 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1569 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1570 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1571 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1572 	}
1573 
1574       save_local_var_chain = info->new_local_var_chain;
1575       info->new_local_var_chain = NULL;
1576 
1577       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1578 		 info, gimple_omp_body_ptr (stmt));
1579 
1580       if (info->new_local_var_chain)
1581 	declare_vars (info->new_local_var_chain,
1582 		      gimple_seq_first_stmt (gimple_omp_body (stmt)),
1583 		      false);
1584       info->new_local_var_chain = save_local_var_chain;
1585       info->suppress_expansion = save_suppress;
1586       break;
1587 
1588     case GIMPLE_OMP_TEAMS:
1589       save_suppress = info->suppress_expansion;
1590       convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1591       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1592 		 info, gimple_omp_body_ptr (stmt));
1593       info->suppress_expansion = save_suppress;
1594       break;
1595 
1596     case GIMPLE_OMP_SECTION:
1597     case GIMPLE_OMP_MASTER:
1598     case GIMPLE_OMP_TASKGROUP:
1599     case GIMPLE_OMP_ORDERED:
1600       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1601 	         info, gimple_omp_body_ptr (stmt));
1602       break;
1603 
1604     case GIMPLE_BIND:
1605       {
1606       gbind *bind_stmt = as_a <gbind *> (stmt);
1607       if (!optimize && gimple_bind_block (bind_stmt))
1608 	note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1609 
1610       for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1611 	if (TREE_CODE (var) == NAMELIST_DECL)
1612 	  {
1613 	    /* Adjust decls mentioned in NAMELIST_DECL.  */
1614 	    tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1615 	    tree decl;
1616 	    unsigned int i;
1617 
1618 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1619 	      {
1620 		if (VAR_P (decl)
1621 		    && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1622 		  continue;
1623 		if (decl_function_context (decl) != info->context)
1624 		  CONSTRUCTOR_ELT (decls, i)->value
1625 		    = get_nonlocal_debug_decl (info, decl);
1626 	      }
1627 	  }
1628 
1629       *handled_ops_p = false;
1630       return NULL_TREE;
1631       }
1632     case GIMPLE_COND:
1633       wi->val_only = true;
1634       wi->is_lhs = false;
1635       *handled_ops_p = false;
1636       return NULL_TREE;
1637 
1638     default:
1639       /* For every other statement that we are not interested in
1640 	 handling here, let the walker traverse the operands.  */
1641       *handled_ops_p = false;
1642       return NULL_TREE;
1643     }
1644 
1645   /* We have handled all of STMT operands, no need to traverse the operands.  */
1646   *handled_ops_p = true;
1647   return NULL_TREE;
1648 }
1649 
1650 
1651 /* A subroutine of convert_local_reference.  Create a local variable
1652    in the parent function with DECL_VALUE_EXPR set to reference the
1653    field in FRAME.  This is used both for debug info and in OMP
1654    lowering.  */
1655 
1656 static tree
get_local_debug_decl(struct nesting_info * info,tree decl,tree field)1657 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1658 {
1659   tree x, new_decl;
1660 
1661   tree *slot = &info->var_map->get_or_insert (decl);
1662   if (*slot)
1663     return *slot;
1664 
1665   /* Make sure frame_decl gets created.  */
1666   (void) get_frame_type (info);
1667   x = info->frame_decl;
1668   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1669 
1670   new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1671 			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1672   DECL_CONTEXT (new_decl) = info->context;
1673   DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1674   DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1675   TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1676   TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1677   TREE_READONLY (new_decl) = TREE_READONLY (decl);
1678   TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1679   DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1680   if ((TREE_CODE (decl) == PARM_DECL
1681        || TREE_CODE (decl) == RESULT_DECL
1682        || VAR_P (decl))
1683       && DECL_BY_REFERENCE (decl))
1684     DECL_BY_REFERENCE (new_decl) = 1;
1685 
1686   SET_DECL_VALUE_EXPR (new_decl, x);
1687   DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1688   *slot = new_decl;
1689 
1690   DECL_CHAIN (new_decl) = info->debug_var_chain;
1691   info->debug_var_chain = new_decl;
1692 
1693   /* Do not emit debug info twice.  */
1694   DECL_IGNORED_P (decl) = 1;
1695 
1696   return new_decl;
1697 }
1698 
1699 
1700 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1701    and PARM_DECLs that were referenced by inner nested functions.
1702    The rewrite will be a structure reference to the local frame variable.  */
1703 
1704 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1705 
1706 static tree
convert_local_reference_op(tree * tp,int * walk_subtrees,void * data)1707 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1708 {
1709   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1710   struct nesting_info *const info = (struct nesting_info *) wi->info;
1711   tree t = *tp, field, x;
1712   bool save_val_only;
1713 
1714   *walk_subtrees = 0;
1715   switch (TREE_CODE (t))
1716     {
1717     case VAR_DECL:
1718       /* Non-automatic variables are never processed.  */
1719       if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1720 	break;
1721       /* FALLTHRU */
1722 
1723     case PARM_DECL:
1724       if (decl_function_context (t) == info->context)
1725 	{
1726 	  /* If we copied a pointer to the frame, then the original decl
1727 	     is used unchanged in the parent function.  */
1728 	  if (use_pointer_in_frame (t))
1729 	    break;
1730 
1731 	  /* No need to transform anything if no child references the
1732 	     variable.  */
1733 	  field = lookup_field_for_decl (info, t, NO_INSERT);
1734 	  if (!field)
1735 	    break;
1736 	  wi->changed = true;
1737 
1738 	  x = get_local_debug_decl (info, t, field);
1739 	  if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1740 	    x = get_frame_field (info, info->context, field, &wi->gsi);
1741 
1742 	  if (wi->val_only)
1743 	    {
1744 	      if (wi->is_lhs)
1745 		x = save_tmp_var (info, x, &wi->gsi);
1746 	      else
1747 		x = init_tmp_var (info, x, &wi->gsi);
1748 	    }
1749 
1750 	  *tp = x;
1751 	}
1752       break;
1753 
1754     case ADDR_EXPR:
1755       save_val_only = wi->val_only;
1756       wi->val_only = false;
1757       wi->is_lhs = false;
1758       wi->changed = false;
1759       walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1760       wi->val_only = save_val_only;
1761 
1762       /* If we converted anything ... */
1763       if (wi->changed)
1764 	{
1765 	  tree save_context;
1766 
1767 	  /* Then the frame decl is now addressable.  */
1768 	  TREE_ADDRESSABLE (info->frame_decl) = 1;
1769 
1770 	  save_context = current_function_decl;
1771 	  current_function_decl = info->context;
1772 	  recompute_tree_invariant_for_addr_expr (t);
1773 	  current_function_decl = save_context;
1774 
1775 	  /* If we are in a context where we only accept values, then
1776 	     compute the address into a temporary.  */
1777 	  if (save_val_only)
1778 	    *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1779 				    t, &wi->gsi);
1780 	}
1781       break;
1782 
1783     case REALPART_EXPR:
1784     case IMAGPART_EXPR:
1785     case COMPONENT_REF:
1786     case ARRAY_REF:
1787     case ARRAY_RANGE_REF:
1788     case BIT_FIELD_REF:
1789       /* Go down this entire nest and just look at the final prefix and
1790 	 anything that describes the references.  Otherwise, we lose track
1791 	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
1792       save_val_only = wi->val_only;
1793       wi->val_only = true;
1794       wi->is_lhs = false;
1795       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1796 	{
1797 	  if (TREE_CODE (t) == COMPONENT_REF)
1798 	    walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1799 		       NULL);
1800 	  else if (TREE_CODE (t) == ARRAY_REF
1801 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1802 	    {
1803 	      walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1804 			 NULL);
1805 	      walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1806 			 NULL);
1807 	      walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1808 			 NULL);
1809 	    }
1810 	}
1811       wi->val_only = false;
1812       walk_tree (tp, convert_local_reference_op, wi, NULL);
1813       wi->val_only = save_val_only;
1814       break;
1815 
1816     case MEM_REF:
1817       save_val_only = wi->val_only;
1818       wi->val_only = true;
1819       wi->is_lhs = false;
1820       walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1821 		 wi, NULL);
1822       /* We need to re-fold the MEM_REF as component references as
1823 	 part of a ADDR_EXPR address are not allowed.  But we cannot
1824 	 fold here, as the chain record type is not yet finalized.  */
1825       if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1826 	  && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1827 	info->mem_refs->add (tp);
1828       wi->val_only = save_val_only;
1829       break;
1830 
1831     case VIEW_CONVERT_EXPR:
1832       /* Just request to look at the subtrees, leaving val_only and lhs
1833 	 untouched.  This might actually be for !val_only + lhs, in which
1834 	 case we don't want to force a replacement by a temporary.  */
1835       *walk_subtrees = 1;
1836       break;
1837 
1838     default:
1839       if (!IS_TYPE_OR_DECL_P (t))
1840 	{
1841 	  *walk_subtrees = 1;
1842 	  wi->val_only = true;
1843 	  wi->is_lhs = false;
1844 	}
1845       break;
1846     }
1847 
1848   return NULL_TREE;
1849 }
1850 
1851 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1852 					  struct walk_stmt_info *);
1853 
1854 /* Helper for convert_local_reference.  Convert all the references in
1855    the chain of clauses at *PCLAUSES.  WI is as in convert_local_reference.  */
1856 
1857 static bool
convert_local_omp_clauses(tree * pclauses,struct walk_stmt_info * wi)1858 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1859 {
1860   struct nesting_info *const info = (struct nesting_info *) wi->info;
1861   bool need_frame = false, need_stmts = false;
1862   tree clause, decl, *pdecl;
1863   int dummy;
1864   bitmap new_suppress;
1865 
1866   new_suppress = BITMAP_GGC_ALLOC ();
1867   bitmap_copy (new_suppress, info->suppress_expansion);
1868 
1869   for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1870     {
1871       pdecl = NULL;
1872       switch (OMP_CLAUSE_CODE (clause))
1873 	{
1874 	case OMP_CLAUSE_REDUCTION:
1875 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1876 	    need_stmts = true;
1877 	  if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
1878 	    {
1879 	      pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
1880 	      if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
1881 		pdecl = &TREE_OPERAND (*pdecl, 0);
1882 	      if (TREE_CODE (*pdecl) == INDIRECT_REF
1883 		  || TREE_CODE (*pdecl) == ADDR_EXPR)
1884 		pdecl = &TREE_OPERAND (*pdecl, 0);
1885 	    }
1886 	  goto do_decl_clause;
1887 
1888 	case OMP_CLAUSE_LASTPRIVATE:
1889 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1890 	    need_stmts = true;
1891 	  goto do_decl_clause;
1892 
1893 	case OMP_CLAUSE_LINEAR:
1894 	  if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1895 	    need_stmts = true;
1896 	  wi->val_only = true;
1897 	  wi->is_lhs = false;
1898 	  convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1899 				      wi);
1900 	  goto do_decl_clause;
1901 
1902 	case OMP_CLAUSE_PRIVATE:
1903 	case OMP_CLAUSE_FIRSTPRIVATE:
1904 	case OMP_CLAUSE_COPYPRIVATE:
1905 	case OMP_CLAUSE_SHARED:
1906 	case OMP_CLAUSE_TO_DECLARE:
1907 	case OMP_CLAUSE_LINK:
1908 	case OMP_CLAUSE_USE_DEVICE_PTR:
1909 	case OMP_CLAUSE_IS_DEVICE_PTR:
1910 	do_decl_clause:
1911 	  if (pdecl == NULL)
1912 	    pdecl = &OMP_CLAUSE_DECL (clause);
1913 	  decl = *pdecl;
1914 	  if (VAR_P (decl)
1915 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1916 	    break;
1917 	  if (decl_function_context (decl) == info->context
1918 	      && !use_pointer_in_frame (decl))
1919 	    {
1920 	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1921 	      if (field)
1922 		{
1923 		  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1924 		    OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1925 		  bitmap_set_bit (new_suppress, DECL_UID (decl));
1926 		  *pdecl = get_local_debug_decl (info, decl, field);
1927 		  need_frame = true;
1928 		}
1929 	    }
1930 	  break;
1931 
1932 	case OMP_CLAUSE_SCHEDULE:
1933 	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1934 	    break;
1935 	  /* FALLTHRU */
1936 	case OMP_CLAUSE_FINAL:
1937 	case OMP_CLAUSE_IF:
1938 	case OMP_CLAUSE_NUM_THREADS:
1939 	case OMP_CLAUSE_DEPEND:
1940 	case OMP_CLAUSE_DEVICE:
1941 	case OMP_CLAUSE_NUM_TEAMS:
1942 	case OMP_CLAUSE_THREAD_LIMIT:
1943 	case OMP_CLAUSE_SAFELEN:
1944 	case OMP_CLAUSE_SIMDLEN:
1945 	case OMP_CLAUSE_PRIORITY:
1946 	case OMP_CLAUSE_GRAINSIZE:
1947 	case OMP_CLAUSE_NUM_TASKS:
1948 	case OMP_CLAUSE_HINT:
1949 	case OMP_CLAUSE_NUM_GANGS:
1950 	case OMP_CLAUSE_NUM_WORKERS:
1951 	case OMP_CLAUSE_VECTOR_LENGTH:
1952 	case OMP_CLAUSE_GANG:
1953 	case OMP_CLAUSE_WORKER:
1954 	case OMP_CLAUSE_VECTOR:
1955 	case OMP_CLAUSE_ASYNC:
1956 	case OMP_CLAUSE_WAIT:
1957 	  /* Several OpenACC clauses have optional arguments.  Check if they
1958 	     are present.  */
1959 	  if (OMP_CLAUSE_OPERAND (clause, 0))
1960 	    {
1961 	      wi->val_only = true;
1962 	      wi->is_lhs = false;
1963 	      convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1964 					  &dummy, wi);
1965 	    }
1966 
1967 	  /* The gang clause accepts two arguments.  */
1968 	  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1969 	      && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1970 	    {
1971 		wi->val_only = true;
1972 		wi->is_lhs = false;
1973 		convert_nonlocal_reference_op
1974 		  (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1975 	    }
1976 	  break;
1977 
1978 	case OMP_CLAUSE_DIST_SCHEDULE:
1979 	  if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1980 	    {
1981 	      wi->val_only = true;
1982 	      wi->is_lhs = false;
1983 	      convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1984 					  &dummy, wi);
1985 	    }
1986 	  break;
1987 
1988 	case OMP_CLAUSE_MAP:
1989 	case OMP_CLAUSE_TO:
1990 	case OMP_CLAUSE_FROM:
1991 	  if (OMP_CLAUSE_SIZE (clause))
1992 	    {
1993 	      wi->val_only = true;
1994 	      wi->is_lhs = false;
1995 	      convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1996 					  &dummy, wi);
1997 	    }
1998 	  if (DECL_P (OMP_CLAUSE_DECL (clause)))
1999 	    goto do_decl_clause;
2000 	  wi->val_only = true;
2001 	  wi->is_lhs = false;
2002 	  walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
2003 		     wi, NULL);
2004 	  break;
2005 
2006 	case OMP_CLAUSE_ALIGNED:
2007 	  if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2008 	    {
2009 	      wi->val_only = true;
2010 	      wi->is_lhs = false;
2011 	      convert_local_reference_op
2012 		(&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
2013 	    }
2014 	  /* Like do_decl_clause, but don't add any suppression.  */
2015 	  decl = OMP_CLAUSE_DECL (clause);
2016 	  if (VAR_P (decl)
2017 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2018 	    break;
2019 	  if (decl_function_context (decl) == info->context
2020 	      && !use_pointer_in_frame (decl))
2021 	    {
2022 	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2023 	      if (field)
2024 		{
2025 		  OMP_CLAUSE_DECL (clause)
2026 		    = get_local_debug_decl (info, decl, field);
2027 		  need_frame = true;
2028 		}
2029 	    }
2030 	  break;
2031 
2032 	case OMP_CLAUSE_NOWAIT:
2033 	case OMP_CLAUSE_ORDERED:
2034 	case OMP_CLAUSE_DEFAULT:
2035 	case OMP_CLAUSE_COPYIN:
2036 	case OMP_CLAUSE_COLLAPSE:
2037 	case OMP_CLAUSE_TILE:
2038 	case OMP_CLAUSE_UNTIED:
2039 	case OMP_CLAUSE_MERGEABLE:
2040 	case OMP_CLAUSE_PROC_BIND:
2041 	case OMP_CLAUSE_NOGROUP:
2042 	case OMP_CLAUSE_THREADS:
2043 	case OMP_CLAUSE_SIMD:
2044 	case OMP_CLAUSE_DEFAULTMAP:
2045 	case OMP_CLAUSE_SEQ:
2046 	case OMP_CLAUSE_INDEPENDENT:
2047 	case OMP_CLAUSE_AUTO:
2048 	  break;
2049 
2050 	  /* The following clause belongs to the OpenACC cache directive, which
2051 	     is discarded during gimplification.  */
2052 	case OMP_CLAUSE__CACHE_:
2053 	  /* The following clauses are only allowed in the OpenMP declare simd
2054 	     directive, so not seen here.  */
2055 	case OMP_CLAUSE_UNIFORM:
2056 	case OMP_CLAUSE_INBRANCH:
2057 	case OMP_CLAUSE_NOTINBRANCH:
2058 	  /* The following clauses are only allowed on OpenMP cancel and
2059 	     cancellation point directives, which at this point have already
2060 	     been lowered into a function call.  */
2061 	case OMP_CLAUSE_FOR:
2062 	case OMP_CLAUSE_PARALLEL:
2063 	case OMP_CLAUSE_SECTIONS:
2064 	case OMP_CLAUSE_TASKGROUP:
2065 	  /* The following clauses are only added during OMP lowering; nested
2066 	     function decomposition happens before that.  */
2067 	case OMP_CLAUSE__LOOPTEMP_:
2068 	case OMP_CLAUSE__SIMDUID_:
2069 	case OMP_CLAUSE__GRIDDIM_:
2070 	  /* Anything else.  */
2071 	default:
2072 	  gcc_unreachable ();
2073 	}
2074     }
2075 
2076   info->suppress_expansion = new_suppress;
2077 
2078   if (need_stmts)
2079     for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2080       switch (OMP_CLAUSE_CODE (clause))
2081 	{
2082 	case OMP_CLAUSE_REDUCTION:
2083 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2084 	    {
2085 	      tree old_context
2086 		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2087 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2088 		= info->context;
2089 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2090 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2091 		  = info->context;
2092 	      walk_body (convert_local_reference_stmt,
2093 			 convert_local_reference_op, info,
2094 			 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2095 	      walk_body (convert_local_reference_stmt,
2096 			 convert_local_reference_op, info,
2097 			 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2098 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2099 		= old_context;
2100 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2101 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2102 		  = old_context;
2103 	    }
2104 	  break;
2105 
2106 	case OMP_CLAUSE_LASTPRIVATE:
2107 	  walk_body (convert_local_reference_stmt,
2108 		     convert_local_reference_op, info,
2109 		     &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2110 	  break;
2111 
2112 	case OMP_CLAUSE_LINEAR:
2113 	  walk_body (convert_local_reference_stmt,
2114 		     convert_local_reference_op, info,
2115 		     &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2116 	  break;
2117 
2118 	default:
2119 	  break;
2120 	}
2121 
2122   return need_frame;
2123 }
2124 
2125 
2126 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2127    and PARM_DECLs that were referenced by inner nested functions.
2128    The rewrite will be a structure reference to the local frame variable.  */
2129 
2130 static tree
convert_local_reference_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)2131 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2132 			      struct walk_stmt_info *wi)
2133 {
2134   struct nesting_info *info = (struct nesting_info *) wi->info;
2135   tree save_local_var_chain;
2136   bitmap save_suppress;
2137   char save_static_chain_added;
2138   bool frame_decl_added;
2139   gimple *stmt = gsi_stmt (*gsi);
2140 
2141   switch (gimple_code (stmt))
2142     {
2143     case GIMPLE_OMP_PARALLEL:
2144     case GIMPLE_OMP_TASK:
2145       save_suppress = info->suppress_expansion;
2146       frame_decl_added = false;
2147       if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2148 	                             wi))
2149 	{
2150 	  tree c = build_omp_clause (gimple_location (stmt),
2151 				     OMP_CLAUSE_SHARED);
2152 	  (void) get_frame_type (info);
2153 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2154 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2155 	  gimple_omp_taskreg_set_clauses (stmt, c);
2156 	  info->static_chain_added |= 4;
2157 	  frame_decl_added = true;
2158 	}
2159 
2160       save_local_var_chain = info->new_local_var_chain;
2161       save_static_chain_added = info->static_chain_added;
2162       info->new_local_var_chain = NULL;
2163       info->static_chain_added = 0;
2164 
2165       walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2166 	         gimple_omp_body_ptr (stmt));
2167 
2168       if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2169 	{
2170 	  tree c = build_omp_clause (gimple_location (stmt),
2171 				     OMP_CLAUSE_SHARED);
2172 	  (void) get_frame_type (info);
2173 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2174 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2175 	  info->static_chain_added |= 4;
2176 	  gimple_omp_taskreg_set_clauses (stmt, c);
2177 	}
2178       if (info->new_local_var_chain)
2179 	declare_vars (info->new_local_var_chain,
2180 		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2181       info->new_local_var_chain = save_local_var_chain;
2182       info->suppress_expansion = save_suppress;
2183       info->static_chain_added |= save_static_chain_added;
2184       break;
2185 
2186     case GIMPLE_OMP_FOR:
2187       save_suppress = info->suppress_expansion;
2188       convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2189       walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2190 			   convert_local_reference_stmt,
2191 			   convert_local_reference_op, info);
2192       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2193 		 info, gimple_omp_body_ptr (stmt));
2194       info->suppress_expansion = save_suppress;
2195       break;
2196 
2197     case GIMPLE_OMP_SECTIONS:
2198       save_suppress = info->suppress_expansion;
2199       convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2200       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2201 		 info, gimple_omp_body_ptr (stmt));
2202       info->suppress_expansion = save_suppress;
2203       break;
2204 
2205     case GIMPLE_OMP_SINGLE:
2206       save_suppress = info->suppress_expansion;
2207       convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2208       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2209 		 info, gimple_omp_body_ptr (stmt));
2210       info->suppress_expansion = save_suppress;
2211       break;
2212 
2213     case GIMPLE_OMP_TARGET:
2214       if (!is_gimple_omp_offloaded (stmt))
2215 	{
2216 	  save_suppress = info->suppress_expansion;
2217 	  convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2218 	  info->suppress_expansion = save_suppress;
2219 	  walk_body (convert_local_reference_stmt, convert_local_reference_op,
2220 		     info, gimple_omp_body_ptr (stmt));
2221 	  break;
2222 	}
2223       save_suppress = info->suppress_expansion;
2224       frame_decl_added = false;
2225       if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2226 	{
2227 	  tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2228 	  (void) get_frame_type (info);
2229 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2230 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2231 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2232 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2233 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2234 	  info->static_chain_added |= 4;
2235 	  frame_decl_added = true;
2236 	}
2237 
2238       save_local_var_chain = info->new_local_var_chain;
2239       save_static_chain_added = info->static_chain_added;
2240       info->new_local_var_chain = NULL;
2241       info->static_chain_added = 0;
2242 
2243       walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2244 		 gimple_omp_body_ptr (stmt));
2245 
2246       if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2247 	{
2248 	  tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2249 	  (void) get_frame_type (info);
2250 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2251 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2252 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2253 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2254 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2255 	  info->static_chain_added |= 4;
2256 	}
2257 
2258       if (info->new_local_var_chain)
2259 	declare_vars (info->new_local_var_chain,
2260 		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2261       info->new_local_var_chain = save_local_var_chain;
2262       info->suppress_expansion = save_suppress;
2263       info->static_chain_added |= save_static_chain_added;
2264       break;
2265 
2266     case GIMPLE_OMP_TEAMS:
2267       save_suppress = info->suppress_expansion;
2268       convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2269       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2270 		 info, gimple_omp_body_ptr (stmt));
2271       info->suppress_expansion = save_suppress;
2272       break;
2273 
2274     case GIMPLE_OMP_SECTION:
2275     case GIMPLE_OMP_MASTER:
2276     case GIMPLE_OMP_TASKGROUP:
2277     case GIMPLE_OMP_ORDERED:
2278       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2279 		 info, gimple_omp_body_ptr (stmt));
2280       break;
2281 
2282     case GIMPLE_COND:
2283       wi->val_only = true;
2284       wi->is_lhs = false;
2285       *handled_ops_p = false;
2286       return NULL_TREE;
2287 
2288     case GIMPLE_ASSIGN:
2289       if (gimple_clobber_p (stmt))
2290 	{
2291 	  tree lhs = gimple_assign_lhs (stmt);
2292 	  if (!use_pointer_in_frame (lhs)
2293 	      && lookup_field_for_decl (info, lhs, NO_INSERT))
2294 	    {
2295 	      gsi_replace (gsi, gimple_build_nop (), true);
2296 	      break;
2297 	    }
2298 	}
2299       *handled_ops_p = false;
2300       return NULL_TREE;
2301 
2302     case GIMPLE_BIND:
2303       for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2304 	   var;
2305 	   var = DECL_CHAIN (var))
2306 	if (TREE_CODE (var) == NAMELIST_DECL)
2307 	  {
2308 	    /* Adjust decls mentioned in NAMELIST_DECL.  */
2309 	    tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2310 	    tree decl;
2311 	    unsigned int i;
2312 
2313 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2314 	      {
2315 		if (VAR_P (decl)
2316 		    && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2317 		  continue;
2318 		if (decl_function_context (decl) == info->context
2319 		    && !use_pointer_in_frame (decl))
2320 		  {
2321 		    tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2322 		    if (field)
2323 		      {
2324 			CONSTRUCTOR_ELT (decls, i)->value
2325 			  = get_local_debug_decl (info, decl, field);
2326 		      }
2327 		  }
2328 	      }
2329 	  }
2330 
2331       *handled_ops_p = false;
2332       return NULL_TREE;
2333 
2334     default:
2335       /* For every other statement that we are not interested in
2336 	 handling here, let the walker traverse the operands.  */
2337       *handled_ops_p = false;
2338       return NULL_TREE;
2339     }
2340 
2341   /* Indicate that we have handled all the operands ourselves.  */
2342   *handled_ops_p = true;
2343   return NULL_TREE;
2344 }
2345 
2346 
2347 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2348    that reference labels from outer functions.  The rewrite will be a
2349    call to __builtin_nonlocal_goto.  */
2350 
2351 static tree
convert_nl_goto_reference(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)2352 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2353 			   struct walk_stmt_info *wi)
2354 {
2355   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2356   tree label, new_label, target_context, x, field;
2357   gcall *call;
2358   gimple *stmt = gsi_stmt (*gsi);
2359 
2360   if (gimple_code (stmt) != GIMPLE_GOTO)
2361     {
2362       *handled_ops_p = false;
2363       return NULL_TREE;
2364     }
2365 
2366   label = gimple_goto_dest (stmt);
2367   if (TREE_CODE (label) != LABEL_DECL)
2368     {
2369       *handled_ops_p = false;
2370       return NULL_TREE;
2371     }
2372 
2373   target_context = decl_function_context (label);
2374   if (target_context == info->context)
2375     {
2376       *handled_ops_p = false;
2377       return NULL_TREE;
2378     }
2379 
2380   for (i = info->outer; target_context != i->context; i = i->outer)
2381     continue;
2382 
2383   /* The original user label may also be use for a normal goto, therefore
2384      we must create a new label that will actually receive the abnormal
2385      control transfer.  This new label will be marked LABEL_NONLOCAL; this
2386      mark will trigger proper behavior in the cfg, as well as cause the
2387      (hairy target-specific) non-local goto receiver code to be generated
2388      when we expand rtl.  Enter this association into var_map so that we
2389      can insert the new label into the IL during a second pass.  */
2390   tree *slot = &i->var_map->get_or_insert (label);
2391   if (*slot == NULL)
2392     {
2393       new_label = create_artificial_label (UNKNOWN_LOCATION);
2394       DECL_NONLOCAL (new_label) = 1;
2395       *slot = new_label;
2396     }
2397   else
2398     new_label = *slot;
2399 
2400   /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field).  */
2401   field = get_nl_goto_field (i);
2402   x = get_frame_field (info, target_context, field, gsi);
2403   x = build_addr (x);
2404   x = gsi_gimplify_val (info, x, gsi);
2405   call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2406 			    2, build_addr (new_label), x);
2407   gsi_replace (gsi, call, false);
2408 
2409   /* We have handled all of STMT's operands, no need to keep going.  */
2410   *handled_ops_p = true;
2411   return NULL_TREE;
2412 }
2413 
2414 
2415 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2416    are referenced via nonlocal goto from a nested function.  The rewrite
2417    will involve installing a newly generated DECL_NONLOCAL label, and
2418    (potentially) a branch around the rtl gunk that is assumed to be
2419    attached to such a label.  */
2420 
2421 static tree
convert_nl_goto_receiver(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)2422 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2423 			  struct walk_stmt_info *wi)
2424 {
2425   struct nesting_info *const info = (struct nesting_info *) wi->info;
2426   tree label, new_label;
2427   gimple_stmt_iterator tmp_gsi;
2428   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2429 
2430   if (!stmt)
2431     {
2432       *handled_ops_p = false;
2433       return NULL_TREE;
2434     }
2435 
2436   label = gimple_label_label (stmt);
2437 
2438   tree *slot = info->var_map->get (label);
2439   if (!slot)
2440     {
2441       *handled_ops_p = false;
2442       return NULL_TREE;
2443     }
2444 
2445   /* If there's any possibility that the previous statement falls through,
2446      then we must branch around the new non-local label.  */
2447   tmp_gsi = wi->gsi;
2448   gsi_prev (&tmp_gsi);
2449   if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2450     {
2451       gimple *stmt = gimple_build_goto (label);
2452       gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2453     }
2454 
2455   new_label = (tree) *slot;
2456   stmt = gimple_build_label (new_label);
2457   gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2458 
2459   *handled_ops_p = true;
2460   return NULL_TREE;
2461 }
2462 
2463 
2464 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2465    of nested functions that require the use of trampolines.  The rewrite
2466    will involve a reference a trampoline generated for the occasion.  */
2467 
2468 static tree
convert_tramp_reference_op(tree * tp,int * walk_subtrees,void * data)2469 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2470 {
2471   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2472   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2473   tree t = *tp, decl, target_context, x, builtin;
2474   bool descr;
2475   gcall *call;
2476 
2477   *walk_subtrees = 0;
2478   switch (TREE_CODE (t))
2479     {
2480     case ADDR_EXPR:
2481       /* Build
2482 	   T.1 = &CHAIN->tramp;
2483 	   T.2 = __builtin_adjust_trampoline (T.1);
2484 	   T.3 = (func_type)T.2;
2485       */
2486 
2487       decl = TREE_OPERAND (t, 0);
2488       if (TREE_CODE (decl) != FUNCTION_DECL)
2489 	break;
2490 
2491       /* Only need to process nested functions.  */
2492       target_context = decl_function_context (decl);
2493       if (!target_context)
2494 	break;
2495 
2496       /* If the nested function doesn't use a static chain, then
2497 	 it doesn't need a trampoline.  */
2498       if (!DECL_STATIC_CHAIN (decl))
2499 	break;
2500 
2501       /* If we don't want a trampoline, then don't build one.  */
2502       if (TREE_NO_TRAMPOLINE (t))
2503 	break;
2504 
2505       /* Lookup the immediate parent of the callee, as that's where
2506 	 we need to insert the trampoline.  */
2507       for (i = info; i->context != target_context; i = i->outer)
2508 	continue;
2509 
2510       /* Decide whether to generate a descriptor or a trampoline. */
2511       descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2512 
2513       if (descr)
2514 	x = lookup_descr_for_decl (i, decl, INSERT);
2515       else
2516 	x = lookup_tramp_for_decl (i, decl, INSERT);
2517 
2518       /* Compute the address of the field holding the trampoline.  */
2519       x = get_frame_field (info, target_context, x, &wi->gsi);
2520       x = build_addr (x);
2521       x = gsi_gimplify_val (info, x, &wi->gsi);
2522 
2523       /* Do machine-specific ugliness.  Normally this will involve
2524 	 computing extra alignment, but it can really be anything.  */
2525       if (descr)
2526 	builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2527       else
2528 	builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2529       call = gimple_build_call (builtin, 1, x);
2530       x = init_tmp_var_with_call (info, &wi->gsi, call);
2531 
2532       /* Cast back to the proper function type.  */
2533       x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2534       x = init_tmp_var (info, x, &wi->gsi);
2535 
2536       *tp = x;
2537       break;
2538 
2539     default:
2540       if (!IS_TYPE_OR_DECL_P (t))
2541 	*walk_subtrees = 1;
2542       break;
2543     }
2544 
2545   return NULL_TREE;
2546 }
2547 
2548 
2549 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2550    to addresses of nested functions that require the use of
2551    trampolines.  The rewrite will involve a reference a trampoline
2552    generated for the occasion.  */
2553 
2554 static tree
convert_tramp_reference_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)2555 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2556 			      struct walk_stmt_info *wi)
2557 {
2558   struct nesting_info *info = (struct nesting_info *) wi->info;
2559   gimple *stmt = gsi_stmt (*gsi);
2560 
2561   switch (gimple_code (stmt))
2562     {
2563     case GIMPLE_CALL:
2564       {
2565 	/* Only walk call arguments, lest we generate trampolines for
2566 	   direct calls.  */
2567 	unsigned long i, nargs = gimple_call_num_args (stmt);
2568 	for (i = 0; i < nargs; i++)
2569 	  walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2570 		     wi, NULL);
2571 	break;
2572       }
2573 
2574     case GIMPLE_OMP_TARGET:
2575       if (!is_gimple_omp_offloaded (stmt))
2576 	{
2577 	  *handled_ops_p = false;
2578 	  return NULL_TREE;
2579 	}
2580       /* FALLTHRU */
2581     case GIMPLE_OMP_PARALLEL:
2582     case GIMPLE_OMP_TASK:
2583       {
2584 	tree save_local_var_chain = info->new_local_var_chain;
2585         walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2586 	info->new_local_var_chain = NULL;
2587 	char save_static_chain_added = info->static_chain_added;
2588 	info->static_chain_added = 0;
2589         walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2590 		   info, gimple_omp_body_ptr (stmt));
2591 	if (info->new_local_var_chain)
2592 	  declare_vars (info->new_local_var_chain,
2593 			gimple_seq_first_stmt (gimple_omp_body (stmt)),
2594 			false);
2595 	for (int i = 0; i < 2; i++)
2596 	  {
2597 	    tree c, decl;
2598 	    if ((info->static_chain_added & (1 << i)) == 0)
2599 	      continue;
2600 	    decl = i ? get_chain_decl (info) : info->frame_decl;
2601 	    /* Don't add CHAIN.* or FRAME.* twice.  */
2602 	    for (c = gimple_omp_taskreg_clauses (stmt);
2603 		 c;
2604 		 c = OMP_CLAUSE_CHAIN (c))
2605 	      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2606 		   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2607 		  && OMP_CLAUSE_DECL (c) == decl)
2608 		break;
2609 	    if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2610 	      {
2611 		c = build_omp_clause (gimple_location (stmt),
2612 				      i ? OMP_CLAUSE_FIRSTPRIVATE
2613 				      : OMP_CLAUSE_SHARED);
2614 		OMP_CLAUSE_DECL (c) = decl;
2615 		OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2616 		gimple_omp_taskreg_set_clauses (stmt, c);
2617 	      }
2618 	    else if (c == NULL)
2619 	      {
2620 		c = build_omp_clause (gimple_location (stmt),
2621 				      OMP_CLAUSE_MAP);
2622 		OMP_CLAUSE_DECL (c) = decl;
2623 		OMP_CLAUSE_SET_MAP_KIND (c,
2624 					 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2625 		OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2626 		OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2627 		gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2628 					       c);
2629 	      }
2630 	  }
2631 	info->new_local_var_chain = save_local_var_chain;
2632 	info->static_chain_added |= save_static_chain_added;
2633       }
2634       break;
2635 
2636     default:
2637       *handled_ops_p = false;
2638       return NULL_TREE;
2639     }
2640 
2641   *handled_ops_p = true;
2642   return NULL_TREE;
2643 }
2644 
2645 
2646 
2647 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2648    that reference nested functions to make sure that the static chain
2649    is set up properly for the call.  */
2650 
2651 static tree
convert_gimple_call(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)2652 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2653                      struct walk_stmt_info *wi)
2654 {
2655   struct nesting_info *const info = (struct nesting_info *) wi->info;
2656   tree decl, target_context;
2657   char save_static_chain_added;
2658   int i;
2659   gimple *stmt = gsi_stmt (*gsi);
2660 
2661   switch (gimple_code (stmt))
2662     {
2663     case GIMPLE_CALL:
2664       if (gimple_call_chain (stmt))
2665 	break;
2666       decl = gimple_call_fndecl (stmt);
2667       if (!decl)
2668 	break;
2669       target_context = decl_function_context (decl);
2670       if (target_context && DECL_STATIC_CHAIN (decl))
2671 	{
2672 	  gimple_call_set_chain (as_a <gcall *> (stmt),
2673 				 get_static_chain (info, target_context,
2674 						   &wi->gsi));
2675 	  info->static_chain_added |= (1 << (info->context != target_context));
2676 	}
2677       break;
2678 
2679     case GIMPLE_OMP_PARALLEL:
2680     case GIMPLE_OMP_TASK:
2681       save_static_chain_added = info->static_chain_added;
2682       info->static_chain_added = 0;
2683       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2684       for (i = 0; i < 2; i++)
2685 	{
2686 	  tree c, decl;
2687 	  if ((info->static_chain_added & (1 << i)) == 0)
2688 	    continue;
2689 	  decl = i ? get_chain_decl (info) : info->frame_decl;
2690 	  /* Don't add CHAIN.* or FRAME.* twice.  */
2691 	  for (c = gimple_omp_taskreg_clauses (stmt);
2692 	       c;
2693 	       c = OMP_CLAUSE_CHAIN (c))
2694 	    if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2695 		 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2696 		&& OMP_CLAUSE_DECL (c) == decl)
2697 	      break;
2698 	  if (c == NULL)
2699 	    {
2700 	      c = build_omp_clause (gimple_location (stmt),
2701 				    i ? OMP_CLAUSE_FIRSTPRIVATE
2702 				    : OMP_CLAUSE_SHARED);
2703 	      OMP_CLAUSE_DECL (c) = decl;
2704 	      OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2705 	      gimple_omp_taskreg_set_clauses (stmt, c);
2706 	    }
2707 	}
2708       info->static_chain_added |= save_static_chain_added;
2709       break;
2710 
2711     case GIMPLE_OMP_TARGET:
2712       if (!is_gimple_omp_offloaded (stmt))
2713 	{
2714 	  walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2715 	  break;
2716 	}
2717       save_static_chain_added = info->static_chain_added;
2718       info->static_chain_added = 0;
2719       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2720       for (i = 0; i < 2; i++)
2721 	{
2722 	  tree c, decl;
2723 	  if ((info->static_chain_added & (1 << i)) == 0)
2724 	    continue;
2725 	  decl = i ? get_chain_decl (info) : info->frame_decl;
2726 	  /* Don't add CHAIN.* or FRAME.* twice.  */
2727 	  for (c = gimple_omp_target_clauses (stmt);
2728 	       c;
2729 	       c = OMP_CLAUSE_CHAIN (c))
2730 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2731 		&& OMP_CLAUSE_DECL (c) == decl)
2732 	      break;
2733 	  if (c == NULL)
2734 	    {
2735 	      c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2736 	      OMP_CLAUSE_DECL (c) = decl;
2737 	      OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2738 	      OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2739 	      OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2740 	      gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2741 					     c);
2742 	    }
2743 	}
2744       info->static_chain_added |= save_static_chain_added;
2745       break;
2746 
2747     case GIMPLE_OMP_FOR:
2748       walk_body (convert_gimple_call, NULL, info,
2749 	  	 gimple_omp_for_pre_body_ptr (stmt));
2750       /* FALLTHRU */
2751     case GIMPLE_OMP_SECTIONS:
2752     case GIMPLE_OMP_SECTION:
2753     case GIMPLE_OMP_SINGLE:
2754     case GIMPLE_OMP_TEAMS:
2755     case GIMPLE_OMP_MASTER:
2756     case GIMPLE_OMP_TASKGROUP:
2757     case GIMPLE_OMP_ORDERED:
2758     case GIMPLE_OMP_CRITICAL:
2759       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2760       break;
2761 
2762     default:
2763       /* Keep looking for other operands.  */
2764       *handled_ops_p = false;
2765       return NULL_TREE;
2766     }
2767 
2768   *handled_ops_p = true;
2769   return NULL_TREE;
2770 }
2771 
2772 /* Walk the nesting tree starting with ROOT.  Convert all trampolines and
2773    call expressions.  At the same time, determine if a nested function
2774    actually uses its static chain; if not, remember that.  */
2775 
2776 static void
convert_all_function_calls(struct nesting_info * root)2777 convert_all_function_calls (struct nesting_info *root)
2778 {
2779   unsigned int chain_count = 0, old_chain_count, iter_count;
2780   struct nesting_info *n;
2781 
2782   /* First, optimistically clear static_chain for all decls that haven't
2783      used the static chain already for variable access.  But always create
2784      it if not optimizing.  This makes it possible to reconstruct the static
2785      nesting tree at run time and thus to resolve up-level references from
2786      within the debugger.  */
2787   FOR_EACH_NEST_INFO (n, root)
2788     {
2789       tree decl = n->context;
2790       if (!optimize)
2791 	{
2792 	  if (n->inner)
2793 	    (void) get_frame_type (n);
2794 	  if (n->outer)
2795 	    (void) get_chain_decl (n);
2796 	}
2797       else if (!n->outer || (!n->chain_decl && !n->chain_field))
2798 	{
2799 	  DECL_STATIC_CHAIN (decl) = 0;
2800 	  if (dump_file && (dump_flags & TDF_DETAILS))
2801 	    fprintf (dump_file, "Guessing no static-chain for %s\n",
2802 		     lang_hooks.decl_printable_name (decl, 2));
2803 	}
2804       else
2805 	DECL_STATIC_CHAIN (decl) = 1;
2806       chain_count += DECL_STATIC_CHAIN (decl);
2807     }
2808 
2809   /* Walk the functions and perform transformations.  Note that these
2810      transformations can induce new uses of the static chain, which in turn
2811      require re-examining all users of the decl.  */
2812   /* ??? It would make sense to try to use the call graph to speed this up,
2813      but the call graph hasn't really been built yet.  Even if it did, we
2814      would still need to iterate in this loop since address-of references
2815      wouldn't show up in the callgraph anyway.  */
2816   iter_count = 0;
2817   do
2818     {
2819       old_chain_count = chain_count;
2820       chain_count = 0;
2821       iter_count++;
2822 
2823       if (dump_file && (dump_flags & TDF_DETAILS))
2824 	fputc ('\n', dump_file);
2825 
2826       FOR_EACH_NEST_INFO (n, root)
2827 	{
2828 	  tree decl = n->context;
2829 	  walk_function (convert_tramp_reference_stmt,
2830 			 convert_tramp_reference_op, n);
2831 	  walk_function (convert_gimple_call, NULL, n);
2832 	  chain_count += DECL_STATIC_CHAIN (decl);
2833 	}
2834     }
2835   while (chain_count != old_chain_count);
2836 
2837   if (dump_file && (dump_flags & TDF_DETAILS))
2838     fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2839 	     iter_count);
2840 }
2841 
2842 struct nesting_copy_body_data
2843 {
2844   copy_body_data cb;
2845   struct nesting_info *root;
2846 };
2847 
2848 /* A helper subroutine for debug_var_chain type remapping.  */
2849 
2850 static tree
nesting_copy_decl(tree decl,copy_body_data * id)2851 nesting_copy_decl (tree decl, copy_body_data *id)
2852 {
2853   struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2854   tree *slot = nid->root->var_map->get (decl);
2855 
2856   if (slot)
2857     return (tree) *slot;
2858 
2859   if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2860     {
2861       tree new_decl = copy_decl_no_change (decl, id);
2862       DECL_ORIGINAL_TYPE (new_decl)
2863 	= remap_type (DECL_ORIGINAL_TYPE (decl), id);
2864       return new_decl;
2865     }
2866 
2867   if (VAR_P (decl)
2868       || TREE_CODE (decl) == PARM_DECL
2869       || TREE_CODE (decl) == RESULT_DECL)
2870     return decl;
2871 
2872   return copy_decl_no_change (decl, id);
2873 }
2874 
2875 /* A helper function for remap_vla_decls.  See if *TP contains
2876    some remapped variables.  */
2877 
2878 static tree
contains_remapped_vars(tree * tp,int * walk_subtrees,void * data)2879 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2880 {
2881   struct nesting_info *root = (struct nesting_info *) data;
2882   tree t = *tp;
2883 
2884   if (DECL_P (t))
2885     {
2886       *walk_subtrees = 0;
2887       tree *slot = root->var_map->get (t);
2888 
2889       if (slot)
2890 	return *slot;
2891     }
2892   return NULL;
2893 }
2894 
2895 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2896    involved.  */
2897 
2898 static void
remap_vla_decls(tree block,struct nesting_info * root)2899 remap_vla_decls (tree block, struct nesting_info *root)
2900 {
2901   tree var, subblock, val, type;
2902   struct nesting_copy_body_data id;
2903 
2904   for (subblock = BLOCK_SUBBLOCKS (block);
2905        subblock;
2906        subblock = BLOCK_CHAIN (subblock))
2907     remap_vla_decls (subblock, root);
2908 
2909   for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2910     if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
2911       {
2912 	val = DECL_VALUE_EXPR (var);
2913 	type = TREE_TYPE (var);
2914 
2915 	if (!(TREE_CODE (val) == INDIRECT_REF
2916 	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2917 	      && variably_modified_type_p (type, NULL)))
2918 	  continue;
2919 
2920 	if (root->var_map->get (TREE_OPERAND (val, 0))
2921 	    || walk_tree (&type, contains_remapped_vars, root, NULL))
2922 	  break;
2923       }
2924 
2925   if (var == NULL_TREE)
2926     return;
2927 
2928   memset (&id, 0, sizeof (id));
2929   id.cb.copy_decl = nesting_copy_decl;
2930   id.cb.decl_map = new hash_map<tree, tree>;
2931   id.root = root;
2932 
2933   for (; var; var = DECL_CHAIN (var))
2934     if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
2935       {
2936 	struct nesting_info *i;
2937 	tree newt, context;
2938 
2939 	val = DECL_VALUE_EXPR (var);
2940 	type = TREE_TYPE (var);
2941 
2942 	if (!(TREE_CODE (val) == INDIRECT_REF
2943 	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2944 	      && variably_modified_type_p (type, NULL)))
2945 	  continue;
2946 
2947 	tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2948 	if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2949 	  continue;
2950 
2951 	context = decl_function_context (var);
2952 	for (i = root; i; i = i->outer)
2953 	  if (i->context == context)
2954 	    break;
2955 
2956 	if (i == NULL)
2957 	  continue;
2958 
2959 	/* Fully expand value expressions.  This avoids having debug variables
2960 	   only referenced from them and that can be swept during GC.  */
2961         if (slot)
2962 	  {
2963 	    tree t = (tree) *slot;
2964 	    gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2965 	    val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2966 	  }
2967 
2968 	id.cb.src_fn = i->context;
2969 	id.cb.dst_fn = i->context;
2970 	id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2971 
2972 	TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2973 	while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2974 	  {
2975 	    newt = TREE_TYPE (newt);
2976 	    type = TREE_TYPE (type);
2977 	  }
2978 	if (TYPE_NAME (newt)
2979 	    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2980 	    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2981 	    && newt != type
2982 	    && TYPE_NAME (newt) == TYPE_NAME (type))
2983 	  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2984 
2985 	walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2986 	if (val != DECL_VALUE_EXPR (var))
2987 	  SET_DECL_VALUE_EXPR (var, val);
2988       }
2989 
2990   delete id.cb.decl_map;
2991 }
2992 
2993 /* Fold the MEM_REF *E.  */
2994 bool
fold_mem_refs(tree * const & e,void * data ATTRIBUTE_UNUSED)2995 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2996 {
2997   tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2998   *ref_p = fold (*ref_p);
2999   return true;
3000 }
3001 
3002 /* Given DECL, a nested function, build an initialization call for FIELD,
3003    the trampoline or descriptor for DECL, using FUNC as the function.  */
3004 
3005 static gcall *
build_init_call_stmt(struct nesting_info * info,tree decl,tree field,tree func)3006 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
3007 		      tree func)
3008 {
3009   tree arg1, arg2, arg3, x;
3010 
3011   gcc_assert (DECL_STATIC_CHAIN (decl));
3012   arg3 = build_addr (info->frame_decl);
3013 
3014   arg2 = build_addr (decl);
3015 
3016   x = build3 (COMPONENT_REF, TREE_TYPE (field),
3017 	      info->frame_decl, field, NULL_TREE);
3018   arg1 = build_addr (x);
3019 
3020   return gimple_build_call (func, 3, arg1, arg2, arg3);
3021 }
3022 
3023 /* Do "everything else" to clean up or complete state collected by the various
3024    walking passes -- create a field to hold the frame base address, lay out the
3025    types and decls, generate code to initialize the frame decl, store critical
3026    expressions in the struct function for rtl to find.  */
3027 
3028 static void
finalize_nesting_tree_1(struct nesting_info * root)3029 finalize_nesting_tree_1 (struct nesting_info *root)
3030 {
3031   gimple_seq stmt_list;
3032   gimple *stmt;
3033   tree context = root->context;
3034   struct function *sf;
3035 
3036   stmt_list = NULL;
3037 
3038   /* If we created a non-local frame type or decl, we need to lay them
3039      out at this time.  */
3040   if (root->frame_type)
3041     {
3042       /* Debugging information needs to compute the frame base address of the
3043 	 parent frame out of the static chain from the nested frame.
3044 
3045 	 The static chain is the address of the FRAME record, so one could
3046 	 imagine it would be possible to compute the frame base address just
3047 	 adding a constant offset to this address.  Unfortunately, this is not
3048 	 possible: if the FRAME object has alignment constraints that are
3049 	 stronger than the stack, then the offset between the frame base and
3050 	 the FRAME object will be dynamic.
3051 
3052 	 What we do instead is to append a field to the FRAME object that holds
3053 	 the frame base address: then debug info just has to fetch this
3054 	 field.  */
3055 
3056       /* Debugging information will refer to the CFA as the frame base
3057 	 address: we will do the same here.  */
3058       const tree frame_addr_fndecl
3059         = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3060 
3061       /* Create a field in the FRAME record to hold the frame base address for
3062 	 this stack frame.  Since it will be used only by the debugger, put it
3063 	 at the end of the record in order not to shift all other offsets.  */
3064       tree fb_decl = make_node (FIELD_DECL);
3065 
3066       DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3067       TREE_TYPE (fb_decl) = ptr_type_node;
3068       TREE_ADDRESSABLE (fb_decl) = 1;
3069       DECL_CONTEXT (fb_decl) = root->frame_type;
3070       TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3071 						fb_decl);
3072 
3073       /* In some cases the frame type will trigger the -Wpadded warning.
3074 	 This is not helpful; suppress it. */
3075       int save_warn_padded = warn_padded;
3076       warn_padded = 0;
3077       layout_type (root->frame_type);
3078       warn_padded = save_warn_padded;
3079       layout_decl (root->frame_decl, 0);
3080 
3081       /* Initialize the frame base address field.  If the builtin we need is
3082 	 not available, set it to NULL so that debugging information does not
3083 	 reference junk.  */
3084       tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3085 			    root->frame_decl, fb_decl, NULL_TREE);
3086       tree fb_tmp;
3087 
3088       if (frame_addr_fndecl != NULL_TREE)
3089 	{
3090 	  gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3091 						integer_zero_node);
3092 	  gimple_stmt_iterator gsi = gsi_last (stmt_list);
3093 
3094 	  fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3095 	}
3096       else
3097 	fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3098       gimple_seq_add_stmt (&stmt_list,
3099 			   gimple_build_assign (fb_ref, fb_tmp));
3100 
3101       /* Remove root->frame_decl from root->new_local_var_chain, so
3102 	 that we can declare it also in the lexical blocks, which
3103 	 helps ensure virtual regs that end up appearing in its RTL
3104 	 expression get substituted in instantiate_virtual_regs().  */
3105       tree *adjust;
3106       for (adjust = &root->new_local_var_chain;
3107 	   *adjust != root->frame_decl;
3108 	   adjust = &DECL_CHAIN (*adjust))
3109 	gcc_assert (DECL_CHAIN (*adjust));
3110       *adjust = DECL_CHAIN (*adjust);
3111 
3112       DECL_CHAIN (root->frame_decl) = NULL_TREE;
3113       declare_vars (root->frame_decl,
3114 		    gimple_seq_first_stmt (gimple_body (context)), true);
3115     }
3116 
3117   /* If any parameters were referenced non-locally, then we need to
3118      insert a copy.  Likewise, if any variables were referenced by
3119      pointer, we need to initialize the address.  */
3120   if (root->any_parm_remapped)
3121     {
3122       tree p;
3123       for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3124 	{
3125 	  tree field, x, y;
3126 
3127 	  field = lookup_field_for_decl (root, p, NO_INSERT);
3128 	  if (!field)
3129 	    continue;
3130 
3131 	  if (use_pointer_in_frame (p))
3132 	    x = build_addr (p);
3133 	  else
3134 	    x = p;
3135 
3136 	  /* If the assignment is from a non-register the stmt is
3137 	     not valid gimple.  Make it so by using a temporary instead.  */
3138 	  if (!is_gimple_reg (x)
3139 	      && is_gimple_reg_type (TREE_TYPE (x)))
3140 	    {
3141 	      gimple_stmt_iterator gsi = gsi_last (stmt_list);
3142 	      x = init_tmp_var (root, x, &gsi);
3143 	    }
3144 
3145 	  y = build3 (COMPONENT_REF, TREE_TYPE (field),
3146 		      root->frame_decl, field, NULL_TREE);
3147 	  stmt = gimple_build_assign (y, x);
3148 	  gimple_seq_add_stmt (&stmt_list, stmt);
3149 	}
3150     }
3151 
3152   /* If a chain_field was created, then it needs to be initialized
3153      from chain_decl.  */
3154   if (root->chain_field)
3155     {
3156       tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3157 		       root->frame_decl, root->chain_field, NULL_TREE);
3158       stmt = gimple_build_assign (x, get_chain_decl (root));
3159       gimple_seq_add_stmt (&stmt_list, stmt);
3160     }
3161 
3162   /* If trampolines were created, then we need to initialize them.  */
3163   if (root->any_tramp_created)
3164     {
3165       struct nesting_info *i;
3166       for (i = root->inner; i ; i = i->next)
3167 	{
3168 	  tree field, x;
3169 
3170 	  field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3171 	  if (!field)
3172 	    continue;
3173 
3174 	  x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3175 	  stmt = build_init_call_stmt (root, i->context, field, x);
3176 	  gimple_seq_add_stmt (&stmt_list, stmt);
3177 	}
3178     }
3179 
3180   /* If descriptors were created, then we need to initialize them.  */
3181   if (root->any_descr_created)
3182     {
3183       struct nesting_info *i;
3184       for (i = root->inner; i ; i = i->next)
3185 	{
3186 	  tree field, x;
3187 
3188 	  field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3189 	  if (!field)
3190 	    continue;
3191 
3192 	  x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3193 	  stmt = build_init_call_stmt (root, i->context, field, x);
3194 	  gimple_seq_add_stmt (&stmt_list, stmt);
3195 	}
3196     }
3197 
3198   /* If we created initialization statements, insert them.  */
3199   if (stmt_list)
3200     {
3201       gbind *bind;
3202       annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3203       bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3204       gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3205       gimple_bind_set_body (bind, stmt_list);
3206     }
3207 
3208   /* If a chain_decl was created, then it needs to be registered with
3209      struct function so that it gets initialized from the static chain
3210      register at the beginning of the function.  */
3211   sf = DECL_STRUCT_FUNCTION (root->context);
3212   sf->static_chain_decl = root->chain_decl;
3213 
3214   /* Similarly for the non-local goto save area.  */
3215   if (root->nl_goto_field)
3216     {
3217       sf->nonlocal_goto_save_area
3218 	= get_frame_field (root, context, root->nl_goto_field, NULL);
3219       sf->has_nonlocal_label = 1;
3220     }
3221 
3222   /* Make sure all new local variables get inserted into the
3223      proper BIND_EXPR.  */
3224   if (root->new_local_var_chain)
3225     declare_vars (root->new_local_var_chain,
3226 		  gimple_seq_first_stmt (gimple_body (root->context)),
3227 		  false);
3228 
3229   if (root->debug_var_chain)
3230     {
3231       tree debug_var;
3232       gbind *scope;
3233 
3234       remap_vla_decls (DECL_INITIAL (root->context), root);
3235 
3236       for (debug_var = root->debug_var_chain; debug_var;
3237 	   debug_var = DECL_CHAIN (debug_var))
3238 	if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3239 	  break;
3240 
3241       /* If there are any debug decls with variable length types,
3242 	 remap those types using other debug_var_chain variables.  */
3243       if (debug_var)
3244 	{
3245 	  struct nesting_copy_body_data id;
3246 
3247 	  memset (&id, 0, sizeof (id));
3248 	  id.cb.copy_decl = nesting_copy_decl;
3249 	  id.cb.decl_map = new hash_map<tree, tree>;
3250 	  id.root = root;
3251 
3252 	  for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3253 	    if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3254 	      {
3255 		tree type = TREE_TYPE (debug_var);
3256 		tree newt, t = type;
3257 		struct nesting_info *i;
3258 
3259 		for (i = root; i; i = i->outer)
3260 		  if (variably_modified_type_p (type, i->context))
3261 		    break;
3262 
3263 		if (i == NULL)
3264 		  continue;
3265 
3266 		id.cb.src_fn = i->context;
3267 		id.cb.dst_fn = i->context;
3268 		id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3269 
3270 		TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3271 		while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3272 		  {
3273 		    newt = TREE_TYPE (newt);
3274 		    t = TREE_TYPE (t);
3275 		  }
3276 		if (TYPE_NAME (newt)
3277 		    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3278 		    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3279 		    && newt != t
3280 		    && TYPE_NAME (newt) == TYPE_NAME (t))
3281 		  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3282 	      }
3283 
3284 	  delete id.cb.decl_map;
3285 	}
3286 
3287       scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3288       if (gimple_bind_block (scope))
3289 	declare_vars (root->debug_var_chain, scope, true);
3290       else
3291 	BLOCK_VARS (DECL_INITIAL (root->context))
3292 	  = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3293 		     root->debug_var_chain);
3294     }
3295 
3296   /* Fold the rewritten MEM_REF trees.  */
3297   root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3298 
3299   /* Dump the translated tree function.  */
3300   if (dump_file)
3301     {
3302       fputs ("\n\n", dump_file);
3303       dump_function_to_file (root->context, dump_file, dump_flags);
3304     }
3305 }
3306 
3307 static void
finalize_nesting_tree(struct nesting_info * root)3308 finalize_nesting_tree (struct nesting_info *root)
3309 {
3310   struct nesting_info *n;
3311   FOR_EACH_NEST_INFO (n, root)
3312     finalize_nesting_tree_1 (n);
3313 }
3314 
3315 /* Unnest the nodes and pass them to cgraph.  */
3316 
3317 static void
unnest_nesting_tree_1(struct nesting_info * root)3318 unnest_nesting_tree_1 (struct nesting_info *root)
3319 {
3320   struct cgraph_node *node = cgraph_node::get (root->context);
3321 
3322   /* For nested functions update the cgraph to reflect unnesting.
3323      We also delay finalizing of these functions up to this point.  */
3324   if (node->origin)
3325     {
3326        node->unnest ();
3327        cgraph_node::finalize_function (root->context, true);
3328     }
3329 }
3330 
3331 static void
unnest_nesting_tree(struct nesting_info * root)3332 unnest_nesting_tree (struct nesting_info *root)
3333 {
3334   struct nesting_info *n;
3335   FOR_EACH_NEST_INFO (n, root)
3336     unnest_nesting_tree_1 (n);
3337 }
3338 
3339 /* Free the data structures allocated during this pass.  */
3340 
3341 static void
free_nesting_tree(struct nesting_info * root)3342 free_nesting_tree (struct nesting_info *root)
3343 {
3344   struct nesting_info *node, *next;
3345 
3346   node = iter_nestinfo_start (root);
3347   do
3348     {
3349       next = iter_nestinfo_next (node);
3350       delete node->var_map;
3351       delete node->field_map;
3352       delete node->mem_refs;
3353       free (node);
3354       node = next;
3355     }
3356   while (node);
3357 }
3358 
3359 /* Gimplify a function and all its nested functions.  */
3360 static void
gimplify_all_functions(struct cgraph_node * root)3361 gimplify_all_functions (struct cgraph_node *root)
3362 {
3363   struct cgraph_node *iter;
3364   if (!gimple_body (root->decl))
3365     gimplify_function_tree (root->decl);
3366   for (iter = root->nested; iter; iter = iter->next_nested)
3367     gimplify_all_functions (iter);
3368 }
3369 
3370 /* Main entry point for this pass.  Process FNDECL and all of its nested
3371    subroutines and turn them into something less tightly bound.  */
3372 
3373 void
lower_nested_functions(tree fndecl)3374 lower_nested_functions (tree fndecl)
3375 {
3376   struct cgraph_node *cgn;
3377   struct nesting_info *root;
3378 
3379   /* If there are no nested functions, there's nothing to do.  */
3380   cgn = cgraph_node::get (fndecl);
3381   if (!cgn->nested)
3382     return;
3383 
3384   gimplify_all_functions (cgn);
3385 
3386   dump_file = dump_begin (TDI_nested, &dump_flags);
3387   if (dump_file)
3388     fprintf (dump_file, "\n;; Function %s\n\n",
3389 	     lang_hooks.decl_printable_name (fndecl, 2));
3390 
3391   bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3392   root = create_nesting_tree (cgn);
3393 
3394   walk_all_functions (convert_nonlocal_reference_stmt,
3395                       convert_nonlocal_reference_op,
3396 		      root);
3397   walk_all_functions (convert_local_reference_stmt,
3398                       convert_local_reference_op,
3399 		      root);
3400   walk_all_functions (convert_nl_goto_reference, NULL, root);
3401   walk_all_functions (convert_nl_goto_receiver, NULL, root);
3402 
3403   convert_all_function_calls (root);
3404   finalize_nesting_tree (root);
3405   unnest_nesting_tree (root);
3406 
3407   free_nesting_tree (root);
3408   bitmap_obstack_release (&nesting_info_bitmap_obstack);
3409 
3410   if (dump_file)
3411     {
3412       dump_end (TDI_nested, dump_file);
3413       dump_file = NULL;
3414     }
3415 }
3416 
3417 #include "gt-tree-nested.h"
3418