xref: /dragonfly/contrib/gcc-8.0/gcc/tree-sra.c (revision aefcce89)
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2    references into scalar references, exposing them to the scalar
3    optimizers.
4    Copyright (C) 2008-2018 Free Software Foundation, Inc.
5    Contributed by Martin Jambor <mjambor@suse.cz>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 /* This file implements Scalar Reduction of Aggregates (SRA).  SRA is run
24    twice, once in the early stages of compilation (early SRA) and once in the
25    late stages (late SRA).  The aim of both is to turn references to scalar
26    parts of aggregates into uses of independent scalar variables.
27 
28    The two passes are nearly identical, the only difference is that early SRA
29    does not scalarize unions which are used as the result in a GIMPLE_RETURN
30    statement because together with inlining this can lead to weird type
31    conversions.
32 
33    Both passes operate in four stages:
34 
35    1. The declarations that have properties which make them candidates for
36       scalarization are identified in function find_var_candidates().  The
37       candidates are stored in candidate_bitmap.
38 
39    2. The function body is scanned.  In the process, declarations which are
40       used in a manner that prevent their scalarization are removed from the
41       candidate bitmap.  More importantly, for every access into an aggregate,
42       an access structure (struct access) is created by create_access() and
43       stored in a vector associated with the aggregate.  Among other
44       information, the aggregate declaration, the offset and size of the access
45       and its type are stored in the structure.
46 
47       On a related note, assign_link structures are created for every assign
48       statement between candidate aggregates and attached to the related
49       accesses.
50 
51    3. The vectors of accesses are analyzed.  They are first sorted according to
52       their offset and size and then scanned for partially overlapping accesses
53       (i.e. those which overlap but one is not entirely within another).  Such
54       an access disqualifies the whole aggregate from being scalarized.
55 
56       If there is no such inhibiting overlap, a representative access structure
57       is chosen for every unique combination of offset and size.  Afterwards,
58       the pass builds a set of trees from these structures, in which children
59       of an access are within their parent (in terms of offset and size).
60 
61       Then accesses  are propagated  whenever possible (i.e.  in cases  when it
62       does not create a partially overlapping access) across assign_links from
63       the right hand side to the left hand side.
64 
65       Then the set of trees for each declaration is traversed again and those
66       accesses which should be replaced by a scalar are identified.
67 
68    4. The function is traversed again, and for every reference into an
69       aggregate that has some component which is about to be scalarized,
70       statements are amended and new statements are created as necessary.
71       Finally, if a parameter got scalarized, the scalar replacements are
72       initialized with values from respective parameter aggregates.  */
73 
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "symbol-summary.h"
100 #include "ipa-param-manipulation.h"
101 #include "ipa-prop.h"
102 #include "params.h"
103 #include "dbgcnt.h"
104 #include "tree-inline.h"
105 #include "ipa-fnsummary.h"
106 #include "ipa-utils.h"
107 #include "builtins.h"
108 
109 /* Enumeration of all aggregate reductions we can do.  */
110 enum sra_mode { SRA_MODE_EARLY_IPA,   /* early call regularization */
111 		SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
112 		SRA_MODE_INTRA };     /* late intraprocedural SRA */
113 
114 /* Global variable describing which aggregate reduction we are performing at
115    the moment.  */
116 static enum sra_mode sra_mode;
117 
118 struct assign_link;
119 
120 /* ACCESS represents each access to an aggregate variable (as a whole or a
121    part).  It can also represent a group of accesses that refer to exactly the
122    same fragment of an aggregate (i.e. those that have exactly the same offset
123    and size).  Such representatives for a single aggregate, once determined,
124    are linked in a linked list and have the group fields set.
125 
126    Moreover, when doing intraprocedural SRA, a tree is built from those
127    representatives (by the means of first_child and next_sibling pointers), in
128    which all items in a subtree are "within" the root, i.e. their offset is
129    greater or equal to offset of the root and offset+size is smaller or equal
130    to offset+size of the root.  Children of an access are sorted by offset.
131 
132    Note that accesses to parts of vector and complex number types always
133    represented by an access to the whole complex number or a vector.  It is a
134    duty of the modifying functions to replace them appropriately.  */
135 
136 struct access
137 {
138   /* Values returned by  `get_ref_base_and_extent' for each component reference
139      If EXPR isn't a component reference  just set `BASE = EXPR', `OFFSET = 0',
140      `SIZE = TREE_SIZE (TREE_TYPE (expr))'.  */
141   HOST_WIDE_INT offset;
142   HOST_WIDE_INT size;
143   tree base;
144 
145   /* Expression.  It is context dependent so do not use it to create new
146      expressions to access the original aggregate.  See PR 42154 for a
147      testcase.  */
148   tree expr;
149   /* Type.  */
150   tree type;
151 
152   /* The statement this access belongs to.  */
153   gimple *stmt;
154 
155   /* Next group representative for this aggregate. */
156   struct access *next_grp;
157 
158   /* Pointer to the group representative.  Pointer to itself if the struct is
159      the representative.  */
160   struct access *group_representative;
161 
162   /* After access tree has been constructed, this points to the parent of the
163      current access, if there is one.  NULL for roots.  */
164   struct access *parent;
165 
166   /* If this access has any children (in terms of the definition above), this
167      points to the first one.  */
168   struct access *first_child;
169 
170   /* In intraprocedural SRA, pointer to the next sibling in the access tree as
171      described above.  In IPA-SRA this is a pointer to the next access
172      belonging to the same group (having the same representative).  */
173   struct access *next_sibling;
174 
175   /* Pointers to the first and last element in the linked list of assign
176      links.  */
177   struct assign_link *first_link, *last_link;
178 
179   /* Pointer to the next access in the work queue.  */
180   struct access *next_queued;
181 
182   /* Replacement variable for this access "region."  Never to be accessed
183      directly, always only by the means of get_access_replacement() and only
184      when grp_to_be_replaced flag is set.  */
185   tree replacement_decl;
186 
187   /* Is this access an access to a non-addressable field? */
188   unsigned non_addressable : 1;
189 
190   /* Is this access made in reverse storage order? */
191   unsigned reverse : 1;
192 
193   /* Is this particular access write access? */
194   unsigned write : 1;
195 
196   /* Is this access currently in the work queue?  */
197   unsigned grp_queued : 1;
198 
199   /* Does this group contain a write access?  This flag is propagated down the
200      access tree.  */
201   unsigned grp_write : 1;
202 
203   /* Does this group contain a read access?  This flag is propagated down the
204      access tree.  */
205   unsigned grp_read : 1;
206 
207   /* Does this group contain a read access that comes from an assignment
208      statement?  This flag is propagated down the access tree.  */
209   unsigned grp_assignment_read : 1;
210 
211   /* Does this group contain a write access that comes from an assignment
212      statement?  This flag is propagated down the access tree.  */
213   unsigned grp_assignment_write : 1;
214 
215   /* Does this group contain a read access through a scalar type?  This flag is
216      not propagated in the access tree in any direction.  */
217   unsigned grp_scalar_read : 1;
218 
219   /* Does this group contain a write access through a scalar type?  This flag
220      is not propagated in the access tree in any direction.  */
221   unsigned grp_scalar_write : 1;
222 
223   /* Is this access an artificial one created to scalarize some record
224      entirely? */
225   unsigned grp_total_scalarization : 1;
226 
227   /* Other passes of the analysis use this bit to make function
228      analyze_access_subtree create scalar replacements for this group if
229      possible.  */
230   unsigned grp_hint : 1;
231 
232   /* Is the subtree rooted in this access fully covered by scalar
233      replacements?  */
234   unsigned grp_covered : 1;
235 
236   /* If set to true, this access and all below it in an access tree must not be
237      scalarized.  */
238   unsigned grp_unscalarizable_region : 1;
239 
240   /* Whether data have been written to parts of the aggregate covered by this
241      access which is not to be scalarized.  This flag is propagated up in the
242      access tree.  */
243   unsigned grp_unscalarized_data : 1;
244 
245   /* Does this access and/or group contain a write access through a
246      BIT_FIELD_REF?  */
247   unsigned grp_partial_lhs : 1;
248 
249   /* Set when a scalar replacement should be created for this variable.  */
250   unsigned grp_to_be_replaced : 1;
251 
252   /* Set when we want a replacement for the sole purpose of having it in
253      generated debug statements.  */
254   unsigned grp_to_be_debug_replaced : 1;
255 
256   /* Should TREE_NO_WARNING of a replacement be set?  */
257   unsigned grp_no_warning : 1;
258 
259   /* Is it possible that the group refers to data which might be (directly or
260      otherwise) modified?  */
261   unsigned grp_maybe_modified : 1;
262 
263   /* Set when this is a representative of a pointer to scalar (i.e. by
264      reference) parameter which we consider for turning into a plain scalar
265      (i.e. a by value parameter).  */
266   unsigned grp_scalar_ptr : 1;
267 
268   /* Set when we discover that this pointer is not safe to dereference in the
269      caller.  */
270   unsigned grp_not_necessarilly_dereferenced : 1;
271 };
272 
273 typedef struct access *access_p;
274 
275 
276 /* Alloc pool for allocating access structures.  */
277 static object_allocator<struct access> access_pool ("SRA accesses");
278 
279 /* A structure linking lhs and rhs accesses from an aggregate assignment.  They
280    are used to propagate subaccesses from rhs to lhs as long as they don't
281    conflict with what is already there.  */
282 struct assign_link
283 {
284   struct access *lacc, *racc;
285   struct assign_link *next;
286 };
287 
288 /* Alloc pool for allocating assign link structures.  */
289 static object_allocator<assign_link> assign_link_pool ("SRA links");
290 
291 /* Base (tree) -> Vector (vec<access_p> *) map.  */
292 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
293 
294 /* Candidate hash table helpers.  */
295 
296 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
297 {
298   static inline hashval_t hash (const tree_node *);
299   static inline bool equal (const tree_node *, const tree_node *);
300 };
301 
302 /* Hash a tree in a uid_decl_map.  */
303 
304 inline hashval_t
305 uid_decl_hasher::hash (const tree_node *item)
306 {
307   return item->decl_minimal.uid;
308 }
309 
310 /* Return true if the DECL_UID in both trees are equal.  */
311 
312 inline bool
313 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
314 {
315   return (a->decl_minimal.uid == b->decl_minimal.uid);
316 }
317 
318 /* Set of candidates.  */
319 static bitmap candidate_bitmap;
320 static hash_table<uid_decl_hasher> *candidates;
321 
322 /* For a candidate UID return the candidates decl.  */
323 
324 static inline tree
325 candidate (unsigned uid)
326 {
327  tree_node t;
328  t.decl_minimal.uid = uid;
329  return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
330 }
331 
332 /* Bitmap of candidates which we should try to entirely scalarize away and
333    those which cannot be (because they are and need be used as a whole).  */
334 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
335 
336 /* Bitmap of candidates in the constant pool, which cannot be scalarized
337    because this would produce non-constant expressions (e.g. Ada).  */
338 static bitmap disqualified_constants;
339 
340 /* Obstack for creation of fancy names.  */
341 static struct obstack name_obstack;
342 
343 /* Head of a linked list of accesses that need to have its subaccesses
344    propagated to their assignment counterparts. */
345 static struct access *work_queue_head;
346 
347 /* Number of parameters of the analyzed function when doing early ipa SRA.  */
348 static int func_param_count;
349 
350 /* scan_function sets the following to true if it encounters a call to
351    __builtin_apply_args.  */
352 static bool encountered_apply_args;
353 
354 /* Set by scan_function when it finds a recursive call.  */
355 static bool encountered_recursive_call;
356 
357 /* Set by scan_function when it finds a recursive call with less actual
358    arguments than formal parameters..  */
359 static bool encountered_unchangable_recursive_call;
360 
361 /* This is a table in which for each basic block and parameter there is a
362    distance (offset + size) in that parameter which is dereferenced and
363    accessed in that BB.  */
364 static HOST_WIDE_INT *bb_dereferences;
365 /* Bitmap of BBs that can cause the function to "stop" progressing by
366    returning, throwing externally, looping infinitely or calling a function
367    which might abort etc.. */
368 static bitmap final_bbs;
369 
370 /* Representative of no accesses at all. */
371 static struct access  no_accesses_representant;
372 
373 /* Predicate to test the special value.  */
374 
375 static inline bool
376 no_accesses_p (struct access *access)
377 {
378   return access == &no_accesses_representant;
379 }
380 
381 /* Dump contents of ACCESS to file F in a human friendly way.  If GRP is true,
382    representative fields are dumped, otherwise those which only describe the
383    individual access are.  */
384 
385 static struct
386 {
387   /* Number of processed aggregates is readily available in
388      analyze_all_variable_accesses and so is not stored here.  */
389 
390   /* Number of created scalar replacements.  */
391   int replacements;
392 
393   /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
394      expression.  */
395   int exprs;
396 
397   /* Number of statements created by generate_subtree_copies.  */
398   int subtree_copies;
399 
400   /* Number of statements created by load_assign_lhs_subreplacements.  */
401   int subreplacements;
402 
403   /* Number of times sra_modify_assign has deleted a statement.  */
404   int deleted;
405 
406   /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
407      RHS reparately due to type conversions or nonexistent matching
408      references.  */
409   int separate_lhs_rhs_handling;
410 
411   /* Number of parameters that were removed because they were unused.  */
412   int deleted_unused_parameters;
413 
414   /* Number of scalars passed as parameters by reference that have been
415      converted to be passed by value.  */
416   int scalar_by_ref_to_by_val;
417 
418   /* Number of aggregate parameters that were replaced by one or more of their
419      components.  */
420   int aggregate_params_reduced;
421 
422   /* Numbber of components created when splitting aggregate parameters.  */
423   int param_reductions_created;
424 } sra_stats;
425 
426 static void
427 dump_access (FILE *f, struct access *access, bool grp)
428 {
429   fprintf (f, "access { ");
430   fprintf (f, "base = (%d)'", DECL_UID (access->base));
431   print_generic_expr (f, access->base);
432   fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
433   fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
434   fprintf (f, ", expr = ");
435   print_generic_expr (f, access->expr);
436   fprintf (f, ", type = ");
437   print_generic_expr (f, access->type);
438   fprintf (f, ", non_addressable = %d, reverse = %d",
439 	   access->non_addressable, access->reverse);
440   if (grp)
441     fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
442 	     "grp_assignment_write = %d, grp_scalar_read = %d, "
443 	     "grp_scalar_write = %d, grp_total_scalarization = %d, "
444 	     "grp_hint = %d, grp_covered = %d, "
445 	     "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
446 	     "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
447 	     "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
448 	     "grp_not_necessarilly_dereferenced = %d\n",
449 	     access->grp_read, access->grp_write, access->grp_assignment_read,
450 	     access->grp_assignment_write, access->grp_scalar_read,
451 	     access->grp_scalar_write, access->grp_total_scalarization,
452 	     access->grp_hint, access->grp_covered,
453 	     access->grp_unscalarizable_region, access->grp_unscalarized_data,
454 	     access->grp_partial_lhs, access->grp_to_be_replaced,
455 	     access->grp_to_be_debug_replaced, access->grp_maybe_modified,
456 	     access->grp_not_necessarilly_dereferenced);
457   else
458     fprintf (f, ", write = %d, grp_total_scalarization = %d, "
459 	     "grp_partial_lhs = %d\n",
460 	     access->write, access->grp_total_scalarization,
461 	     access->grp_partial_lhs);
462 }
463 
464 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL.  */
465 
466 static void
467 dump_access_tree_1 (FILE *f, struct access *access, int level)
468 {
469   do
470     {
471       int i;
472 
473       for (i = 0; i < level; i++)
474 	fputs ("* ", f);
475 
476       dump_access (f, access, true);
477 
478       if (access->first_child)
479 	dump_access_tree_1 (f, access->first_child, level + 1);
480 
481       access = access->next_sibling;
482     }
483   while (access);
484 }
485 
486 /* Dump all access trees for a variable, given the pointer to the first root in
487    ACCESS.  */
488 
489 static void
490 dump_access_tree (FILE *f, struct access *access)
491 {
492   for (; access; access = access->next_grp)
493     dump_access_tree_1 (f, access, 0);
494 }
495 
496 /* Return true iff ACC is non-NULL and has subaccesses.  */
497 
498 static inline bool
499 access_has_children_p (struct access *acc)
500 {
501   return acc && acc->first_child;
502 }
503 
504 /* Return true iff ACC is (partly) covered by at least one replacement.  */
505 
506 static bool
507 access_has_replacements_p (struct access *acc)
508 {
509   struct access *child;
510   if (acc->grp_to_be_replaced)
511     return true;
512   for (child = acc->first_child; child; child = child->next_sibling)
513     if (access_has_replacements_p (child))
514       return true;
515   return false;
516 }
517 
518 /* Return a vector of pointers to accesses for the variable given in BASE or
519    NULL if there is none.  */
520 
521 static vec<access_p> *
522 get_base_access_vector (tree base)
523 {
524   return base_access_vec->get (base);
525 }
526 
527 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
528    in ACCESS.  Return NULL if it cannot be found.  */
529 
530 static struct access *
531 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
532 			HOST_WIDE_INT size)
533 {
534   while (access && (access->offset != offset || access->size != size))
535     {
536       struct access *child = access->first_child;
537 
538       while (child && (child->offset + child->size <= offset))
539 	child = child->next_sibling;
540       access = child;
541     }
542 
543   return access;
544 }
545 
546 /* Return the first group representative for DECL or NULL if none exists.  */
547 
548 static struct access *
549 get_first_repr_for_decl (tree base)
550 {
551   vec<access_p> *access_vec;
552 
553   access_vec = get_base_access_vector (base);
554   if (!access_vec)
555     return NULL;
556 
557   return (*access_vec)[0];
558 }
559 
560 /* Find an access representative for the variable BASE and given OFFSET and
561    SIZE.  Requires that access trees have already been built.  Return NULL if
562    it cannot be found.  */
563 
564 static struct access *
565 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
566 				 HOST_WIDE_INT size)
567 {
568   struct access *access;
569 
570   access = get_first_repr_for_decl (base);
571   while (access && (access->offset + access->size <= offset))
572     access = access->next_grp;
573   if (!access)
574     return NULL;
575 
576   return find_access_in_subtree (access, offset, size);
577 }
578 
579 /* Add LINK to the linked list of assign links of RACC.  */
580 static void
581 add_link_to_rhs (struct access *racc, struct assign_link *link)
582 {
583   gcc_assert (link->racc == racc);
584 
585   if (!racc->first_link)
586     {
587       gcc_assert (!racc->last_link);
588       racc->first_link = link;
589     }
590   else
591     racc->last_link->next = link;
592 
593   racc->last_link = link;
594   link->next = NULL;
595 }
596 
597 /* Move all link structures in their linked list in OLD_RACC to the linked list
598    in NEW_RACC.  */
599 static void
600 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
601 {
602   if (!old_racc->first_link)
603     {
604       gcc_assert (!old_racc->last_link);
605       return;
606     }
607 
608   if (new_racc->first_link)
609     {
610       gcc_assert (!new_racc->last_link->next);
611       gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
612 
613       new_racc->last_link->next = old_racc->first_link;
614       new_racc->last_link = old_racc->last_link;
615     }
616   else
617     {
618       gcc_assert (!new_racc->last_link);
619 
620       new_racc->first_link = old_racc->first_link;
621       new_racc->last_link = old_racc->last_link;
622     }
623   old_racc->first_link = old_racc->last_link = NULL;
624 }
625 
626 /* Add ACCESS to the work queue (which is actually a stack).  */
627 
628 static void
629 add_access_to_work_queue (struct access *access)
630 {
631   if (access->first_link && !access->grp_queued)
632     {
633       gcc_assert (!access->next_queued);
634       access->next_queued = work_queue_head;
635       access->grp_queued = 1;
636       work_queue_head = access;
637     }
638 }
639 
640 /* Pop an access from the work queue, and return it, assuming there is one.  */
641 
642 static struct access *
643 pop_access_from_work_queue (void)
644 {
645   struct access *access = work_queue_head;
646 
647   work_queue_head = access->next_queued;
648   access->next_queued = NULL;
649   access->grp_queued = 0;
650   return access;
651 }
652 
653 
654 /* Allocate necessary structures.  */
655 
656 static void
657 sra_initialize (void)
658 {
659   candidate_bitmap = BITMAP_ALLOC (NULL);
660   candidates = new hash_table<uid_decl_hasher>
661     (vec_safe_length (cfun->local_decls) / 2);
662   should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
663   cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
664   disqualified_constants = BITMAP_ALLOC (NULL);
665   gcc_obstack_init (&name_obstack);
666   base_access_vec = new hash_map<tree, auto_vec<access_p> >;
667   memset (&sra_stats, 0, sizeof (sra_stats));
668   encountered_apply_args = false;
669   encountered_recursive_call = false;
670   encountered_unchangable_recursive_call = false;
671 }
672 
673 /* Deallocate all general structures.  */
674 
675 static void
676 sra_deinitialize (void)
677 {
678   BITMAP_FREE (candidate_bitmap);
679   delete candidates;
680   candidates = NULL;
681   BITMAP_FREE (should_scalarize_away_bitmap);
682   BITMAP_FREE (cannot_scalarize_away_bitmap);
683   BITMAP_FREE (disqualified_constants);
684   access_pool.release ();
685   assign_link_pool.release ();
686   obstack_free (&name_obstack, NULL);
687 
688   delete base_access_vec;
689 }
690 
691 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise.  */
692 
693 static bool constant_decl_p (tree decl)
694 {
695   return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
696 }
697 
698 /* Remove DECL from candidates for SRA and write REASON to the dump file if
699    there is one.  */
700 
701 static void
702 disqualify_candidate (tree decl, const char *reason)
703 {
704   if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
705     candidates->remove_elt_with_hash (decl, DECL_UID (decl));
706   if (constant_decl_p (decl))
707     bitmap_set_bit (disqualified_constants, DECL_UID (decl));
708 
709   if (dump_file && (dump_flags & TDF_DETAILS))
710     {
711       fprintf (dump_file, "! Disqualifying ");
712       print_generic_expr (dump_file, decl);
713       fprintf (dump_file, " - %s\n", reason);
714     }
715 }
716 
717 /* Return true iff the type contains a field or an element which does not allow
718    scalarization.  */
719 
720 static bool
721 type_internals_preclude_sra_p (tree type, const char **msg)
722 {
723   tree fld;
724   tree et;
725 
726   switch (TREE_CODE (type))
727     {
728     case RECORD_TYPE:
729     case UNION_TYPE:
730     case QUAL_UNION_TYPE:
731       for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
732 	if (TREE_CODE (fld) == FIELD_DECL)
733 	  {
734 	    tree ft = TREE_TYPE (fld);
735 
736 	    if (TREE_THIS_VOLATILE (fld))
737 	      {
738 		*msg = "volatile structure field";
739 		return true;
740 	      }
741 	    if (!DECL_FIELD_OFFSET (fld))
742 	      {
743 		*msg = "no structure field offset";
744 		return true;
745 	      }
746 	    if (!DECL_SIZE (fld))
747 	      {
748 		*msg = "zero structure field size";
749 	        return true;
750 	      }
751 	    if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
752 	      {
753 		*msg = "structure field offset not fixed";
754 		return true;
755 	      }
756 	    if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
757 	      {
758 	        *msg = "structure field size not fixed";
759 		return true;
760 	      }
761 	    if (!tree_fits_shwi_p (bit_position (fld)))
762 	      {
763 	        *msg = "structure field size too big";
764 		return true;
765 	      }
766 	    if (AGGREGATE_TYPE_P (ft)
767 		    && int_bit_position (fld) % BITS_PER_UNIT != 0)
768 	      {
769 		*msg = "structure field is bit field";
770 	        return true;
771 	      }
772 
773 	    if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
774 	      return true;
775 	  }
776 
777       return false;
778 
779     case ARRAY_TYPE:
780       et = TREE_TYPE (type);
781 
782       if (TYPE_VOLATILE (et))
783 	{
784 	  *msg = "element type is volatile";
785 	  return true;
786 	}
787 
788       if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
789 	return true;
790 
791       return false;
792 
793     default:
794       return false;
795     }
796 }
797 
798 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
799    base variable if it is.  Return T if it is not an SSA_NAME.  */
800 
801 static tree
802 get_ssa_base_param (tree t)
803 {
804   if (TREE_CODE (t) == SSA_NAME)
805     {
806       if (SSA_NAME_IS_DEFAULT_DEF (t))
807 	return SSA_NAME_VAR (t);
808       else
809 	return NULL_TREE;
810     }
811   return t;
812 }
813 
814 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
815    belongs to, unless the BB has already been marked as a potentially
816    final.  */
817 
818 static void
819 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
820 {
821   basic_block bb = gimple_bb (stmt);
822   int idx, parm_index = 0;
823   tree parm;
824 
825   if (bitmap_bit_p (final_bbs, bb->index))
826     return;
827 
828   for (parm = DECL_ARGUMENTS (current_function_decl);
829        parm && parm != base;
830        parm = DECL_CHAIN (parm))
831     parm_index++;
832 
833   gcc_assert (parm_index < func_param_count);
834 
835   idx = bb->index * func_param_count + parm_index;
836   if (bb_dereferences[idx] < dist)
837     bb_dereferences[idx] = dist;
838 }
839 
840 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
841    the three fields.  Also add it to the vector of accesses corresponding to
842    the base.  Finally, return the new access.  */
843 
844 static struct access *
845 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
846 {
847   struct access *access = access_pool.allocate ();
848 
849   memset (access, 0, sizeof (struct access));
850   access->base = base;
851   access->offset = offset;
852   access->size = size;
853 
854   base_access_vec->get_or_insert (base).safe_push (access);
855 
856   return access;
857 }
858 
859 static bool maybe_add_sra_candidate (tree);
860 
861 /* Create and insert access for EXPR. Return created access, or NULL if it is
862    not possible.  Also scan for uses of constant pool as we go along and add
863    to candidates.  */
864 
865 static struct access *
866 create_access (tree expr, gimple *stmt, bool write)
867 {
868   struct access *access;
869   poly_int64 poffset, psize, pmax_size;
870   HOST_WIDE_INT offset, size, max_size;
871   tree base = expr;
872   bool reverse, ptr, unscalarizable_region = false;
873 
874   base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
875 				  &reverse);
876   if (!poffset.is_constant (&offset)
877       || !psize.is_constant (&size)
878       || !pmax_size.is_constant (&max_size))
879     {
880       disqualify_candidate (base, "Encountered a polynomial-sized access.");
881       return NULL;
882     }
883 
884   if (sra_mode == SRA_MODE_EARLY_IPA
885       && TREE_CODE (base) == MEM_REF)
886     {
887       base = get_ssa_base_param (TREE_OPERAND (base, 0));
888       if (!base)
889 	return NULL;
890       ptr = true;
891     }
892   else
893     ptr = false;
894 
895   /* For constant-pool entries, check we can substitute the constant value.  */
896   if (constant_decl_p (base)
897       && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA))
898     {
899       gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
900       if (expr != base
901 	  && !is_gimple_reg_type (TREE_TYPE (expr))
902 	  && dump_file && (dump_flags & TDF_DETAILS))
903 	{
904 	  /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
905 	     and elements of multidimensional arrays (which are
906 	     multi-element arrays in their own right).  */
907 	  fprintf (dump_file, "Allowing non-reg-type load of part"
908 			      " of constant-pool entry: ");
909 	  print_generic_expr (dump_file, expr);
910 	}
911       maybe_add_sra_candidate (base);
912     }
913 
914   if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
915     return NULL;
916 
917   if (sra_mode == SRA_MODE_EARLY_IPA)
918     {
919       if (size < 0 || size != max_size)
920 	{
921 	  disqualify_candidate (base, "Encountered a variable sized access.");
922 	  return NULL;
923 	}
924       if (TREE_CODE (expr) == COMPONENT_REF
925 	  && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
926 	{
927 	  disqualify_candidate (base, "Encountered a bit-field access.");
928 	  return NULL;
929 	}
930       gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
931 
932       if (ptr)
933 	mark_parm_dereference (base, offset + size, stmt);
934     }
935   else
936     {
937       if (size != max_size)
938 	{
939 	  size = max_size;
940 	  unscalarizable_region = true;
941 	}
942       if (size < 0)
943 	{
944 	  disqualify_candidate (base, "Encountered an unconstrained access.");
945 	  return NULL;
946 	}
947     }
948 
949   access = create_access_1 (base, offset, size);
950   access->expr = expr;
951   access->type = TREE_TYPE (expr);
952   access->write = write;
953   access->grp_unscalarizable_region = unscalarizable_region;
954   access->stmt = stmt;
955   access->reverse = reverse;
956 
957   if (TREE_CODE (expr) == COMPONENT_REF
958       && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
959     access->non_addressable = 1;
960 
961   return access;
962 }
963 
964 
965 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
966    ARRAY_TYPE with fields that are either of gimple register types (excluding
967    bit-fields) or (recursively) scalarizable types.  CONST_DECL must be true if
968    we are considering a decl from constant pool.  If it is false, char arrays
969    will be refused.  */
970 
971 static bool
972 scalarizable_type_p (tree type, bool const_decl)
973 {
974   gcc_assert (!is_gimple_reg_type (type));
975   if (type_contains_placeholder_p (type))
976     return false;
977 
978   switch (TREE_CODE (type))
979   {
980   case RECORD_TYPE:
981     for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
982       if (TREE_CODE (fld) == FIELD_DECL)
983 	{
984 	  tree ft = TREE_TYPE (fld);
985 
986 	  if (DECL_BIT_FIELD (fld))
987 	    return false;
988 
989 	  if (!is_gimple_reg_type (ft)
990 	      && !scalarizable_type_p (ft, const_decl))
991 	    return false;
992 	}
993 
994     return true;
995 
996   case ARRAY_TYPE:
997     {
998       HOST_WIDE_INT min_elem_size;
999       if (const_decl)
1000 	min_elem_size = 0;
1001       else
1002 	min_elem_size = BITS_PER_UNIT;
1003 
1004       if (TYPE_DOMAIN (type) == NULL_TREE
1005 	  || !tree_fits_shwi_p (TYPE_SIZE (type))
1006 	  || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
1007 	  || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= min_elem_size)
1008 	  || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
1009 	return false;
1010       if (tree_to_shwi (TYPE_SIZE (type)) == 0
1011 	  && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
1012 	/* Zero-element array, should not prevent scalarization.  */
1013 	;
1014       else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
1015 	       || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
1016 	/* Variable-length array, do not allow scalarization.  */
1017 	return false;
1018 
1019       tree elem = TREE_TYPE (type);
1020       if (!is_gimple_reg_type (elem)
1021 	  && !scalarizable_type_p (elem, const_decl))
1022 	return false;
1023       return true;
1024     }
1025   default:
1026     return false;
1027   }
1028 }
1029 
1030 static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, bool, tree, tree);
1031 
1032 /* Create total_scalarization accesses for all scalar fields of a member
1033    of type DECL_TYPE conforming to scalarizable_type_p.  BASE
1034    must be the top-most VAR_DECL representing the variable; within that,
1035    OFFSET locates the member and REF must be the memory reference expression for
1036    the member.  */
1037 
1038 static void
1039 completely_scalarize (tree base, tree decl_type, HOST_WIDE_INT offset, tree ref)
1040 {
1041   switch (TREE_CODE (decl_type))
1042     {
1043     case RECORD_TYPE:
1044       for (tree fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
1045 	if (TREE_CODE (fld) == FIELD_DECL)
1046 	  {
1047 	    HOST_WIDE_INT pos = offset + int_bit_position (fld);
1048 	    tree ft = TREE_TYPE (fld);
1049 	    tree nref = build3 (COMPONENT_REF, ft, ref, fld, NULL_TREE);
1050 
1051 	    scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)),
1052 			    TYPE_REVERSE_STORAGE_ORDER (decl_type),
1053 			    nref, ft);
1054 	  }
1055       break;
1056     case ARRAY_TYPE:
1057       {
1058 	tree elemtype = TREE_TYPE (decl_type);
1059 	tree elem_size = TYPE_SIZE (elemtype);
1060 	gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
1061 	HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
1062 	gcc_assert (el_size > 0);
1063 
1064 	tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (decl_type));
1065 	gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
1066 	tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (decl_type));
1067 	/* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1.  */
1068 	if (maxidx)
1069 	  {
1070 	    gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
1071 	    tree domain = TYPE_DOMAIN (decl_type);
1072 	    /* MINIDX and MAXIDX are inclusive, and must be interpreted in
1073 	       DOMAIN (e.g. signed int, whereas min/max may be size_int).  */
1074 	    offset_int idx = wi::to_offset (minidx);
1075 	    offset_int max = wi::to_offset (maxidx);
1076 	    if (!TYPE_UNSIGNED (domain))
1077 	      {
1078 		idx = wi::sext (idx, TYPE_PRECISION (domain));
1079 		max = wi::sext (max, TYPE_PRECISION (domain));
1080 	      }
1081 	    for (int el_off = offset; idx <= max; ++idx)
1082 	      {
1083 		tree nref = build4 (ARRAY_REF, elemtype,
1084 				    ref,
1085 				    wide_int_to_tree (domain, idx),
1086 				    NULL_TREE, NULL_TREE);
1087 		scalarize_elem (base, el_off, el_size,
1088 				TYPE_REVERSE_STORAGE_ORDER (decl_type),
1089 				nref, elemtype);
1090 		el_off += el_size;
1091 	      }
1092 	  }
1093       }
1094       break;
1095     default:
1096       gcc_unreachable ();
1097     }
1098 }
1099 
1100 /* Create total_scalarization accesses for a member of type TYPE, which must
1101    satisfy either is_gimple_reg_type or scalarizable_type_p.  BASE must be the
1102    top-most VAR_DECL representing the variable; within that, POS and SIZE locate
1103    the member, REVERSE gives its torage order. and REF must be the reference
1104    expression for it.  */
1105 
1106 static void
1107 scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size, bool reverse,
1108 		tree ref, tree type)
1109 {
1110   if (is_gimple_reg_type (type))
1111   {
1112     struct access *access = create_access_1 (base, pos, size);
1113     access->expr = ref;
1114     access->type = type;
1115     access->grp_total_scalarization = 1;
1116     access->reverse = reverse;
1117     /* Accesses for intraprocedural SRA can have their stmt NULL.  */
1118   }
1119   else
1120     completely_scalarize (base, type, pos, ref);
1121 }
1122 
1123 /* Create a total_scalarization access for VAR as a whole.  VAR must be of a
1124    RECORD_TYPE or ARRAY_TYPE conforming to scalarizable_type_p.  */
1125 
1126 static void
1127 create_total_scalarization_access (tree var)
1128 {
1129   HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1130   struct access *access;
1131 
1132   access = create_access_1 (var, 0, size);
1133   access->expr = var;
1134   access->type = TREE_TYPE (var);
1135   access->grp_total_scalarization = 1;
1136 }
1137 
1138 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it.  */
1139 
1140 static inline bool
1141 contains_view_convert_expr_p (const_tree ref)
1142 {
1143   while (handled_component_p (ref))
1144     {
1145       if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1146 	return true;
1147       ref = TREE_OPERAND (ref, 0);
1148     }
1149 
1150   return false;
1151 }
1152 
1153 /* Return true if REF contains a VIEW_CONVERT_EXPR or a MEM_REF that performs
1154    type conversion or a COMPONENT_REF with a bit-field field declaration.  */
1155 
1156 static bool
1157 contains_vce_or_bfcref_p (const_tree ref)
1158 {
1159   while (handled_component_p (ref))
1160     {
1161       if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
1162 	  || (TREE_CODE (ref) == COMPONENT_REF
1163 	      && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
1164 	return true;
1165       ref = TREE_OPERAND (ref, 0);
1166     }
1167 
1168   if (TREE_CODE (ref) != MEM_REF
1169       || TREE_CODE (TREE_OPERAND (ref, 0)) != ADDR_EXPR)
1170     return false;
1171 
1172   tree mem = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
1173   if (TYPE_MAIN_VARIANT (TREE_TYPE (ref))
1174       != TYPE_MAIN_VARIANT (TREE_TYPE (mem)))
1175     return true;
1176 
1177   return false;
1178 }
1179 
1180 /* Search the given tree for a declaration by skipping handled components and
1181    exclude it from the candidates.  */
1182 
1183 static void
1184 disqualify_base_of_expr (tree t, const char *reason)
1185 {
1186   t = get_base_address (t);
1187   if (sra_mode == SRA_MODE_EARLY_IPA
1188       && TREE_CODE (t) == MEM_REF)
1189     t = get_ssa_base_param (TREE_OPERAND (t, 0));
1190 
1191   if (t && DECL_P (t))
1192     disqualify_candidate (t, reason);
1193 }
1194 
1195 /* Scan expression EXPR and create access structures for all accesses to
1196    candidates for scalarization.  Return the created access or NULL if none is
1197    created.  */
1198 
1199 static struct access *
1200 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1201 {
1202   struct access *ret = NULL;
1203   bool partial_ref;
1204 
1205   if (TREE_CODE (expr) == BIT_FIELD_REF
1206       || TREE_CODE (expr) == IMAGPART_EXPR
1207       || TREE_CODE (expr) == REALPART_EXPR)
1208     {
1209       expr = TREE_OPERAND (expr, 0);
1210       partial_ref = true;
1211     }
1212   else
1213     partial_ref = false;
1214 
1215   if (storage_order_barrier_p (expr))
1216     {
1217       disqualify_base_of_expr (expr, "storage order barrier.");
1218       return NULL;
1219     }
1220 
1221   /* We need to dive through V_C_Es in order to get the size of its parameter
1222      and not the result type.  Ada produces such statements.  We are also
1223      capable of handling the topmost V_C_E but not any of those buried in other
1224      handled components.  */
1225   if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1226     expr = TREE_OPERAND (expr, 0);
1227 
1228   if (contains_view_convert_expr_p (expr))
1229     {
1230       disqualify_base_of_expr (expr, "V_C_E under a different handled "
1231 			       "component.");
1232       return NULL;
1233     }
1234   if (TREE_THIS_VOLATILE (expr))
1235     {
1236       disqualify_base_of_expr (expr, "part of a volatile reference.");
1237       return NULL;
1238     }
1239 
1240   switch (TREE_CODE (expr))
1241     {
1242     case MEM_REF:
1243       if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1244 	  && sra_mode != SRA_MODE_EARLY_IPA)
1245 	return NULL;
1246       /* fall through */
1247     case VAR_DECL:
1248     case PARM_DECL:
1249     case RESULT_DECL:
1250     case COMPONENT_REF:
1251     case ARRAY_REF:
1252     case ARRAY_RANGE_REF:
1253       ret = create_access (expr, stmt, write);
1254       break;
1255 
1256     default:
1257       break;
1258     }
1259 
1260   if (write && partial_ref && ret)
1261     ret->grp_partial_lhs = 1;
1262 
1263   return ret;
1264 }
1265 
1266 /* Scan expression EXPR and create access structures for all accesses to
1267    candidates for scalarization.  Return true if any access has been inserted.
1268    STMT must be the statement from which the expression is taken, WRITE must be
1269    true if the expression is a store and false otherwise. */
1270 
1271 static bool
1272 build_access_from_expr (tree expr, gimple *stmt, bool write)
1273 {
1274   struct access *access;
1275 
1276   access = build_access_from_expr_1 (expr, stmt, write);
1277   if (access)
1278     {
1279       /* This means the aggregate is accesses as a whole in a way other than an
1280 	 assign statement and thus cannot be removed even if we had a scalar
1281 	 replacement for everything.  */
1282       if (cannot_scalarize_away_bitmap)
1283 	bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1284       return true;
1285     }
1286   return false;
1287 }
1288 
1289 /* Return the single non-EH successor edge of BB or NULL if there is none or
1290    more than one.  */
1291 
1292 static edge
1293 single_non_eh_succ (basic_block bb)
1294 {
1295   edge e, res = NULL;
1296   edge_iterator ei;
1297 
1298   FOR_EACH_EDGE (e, ei, bb->succs)
1299     if (!(e->flags & EDGE_EH))
1300       {
1301 	if (res)
1302 	  return NULL;
1303 	res = e;
1304       }
1305 
1306   return res;
1307 }
1308 
1309 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1310    there is no alternative spot where to put statements SRA might need to
1311    generate after it.  The spot we are looking for is an edge leading to a
1312    single non-EH successor, if it exists and is indeed single.  RHS may be
1313    NULL, in that case ignore it.  */
1314 
1315 static bool
1316 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1317 {
1318   if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1319       && stmt_ends_bb_p (stmt))
1320     {
1321       if (single_non_eh_succ (gimple_bb (stmt)))
1322 	return false;
1323 
1324       disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1325       if (rhs)
1326 	disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1327       return true;
1328     }
1329   return false;
1330 }
1331 
1332 /* Return true if the nature of BASE is such that it contains data even if
1333    there is no write to it in the function.  */
1334 
1335 static bool
1336 comes_initialized_p (tree base)
1337 {
1338   return TREE_CODE (base) == PARM_DECL || constant_decl_p (base);
1339 }
1340 
1341 /* Scan expressions occurring in STMT, create access structures for all accesses
1342    to candidates for scalarization and remove those candidates which occur in
1343    statements or expressions that prevent them from being split apart.  Return
1344    true if any access has been inserted.  */
1345 
1346 static bool
1347 build_accesses_from_assign (gimple *stmt)
1348 {
1349   tree lhs, rhs;
1350   struct access *lacc, *racc;
1351 
1352   if (!gimple_assign_single_p (stmt)
1353       /* Scope clobbers don't influence scalarization.  */
1354       || gimple_clobber_p (stmt))
1355     return false;
1356 
1357   lhs = gimple_assign_lhs (stmt);
1358   rhs = gimple_assign_rhs1 (stmt);
1359 
1360   if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1361     return false;
1362 
1363   racc = build_access_from_expr_1 (rhs, stmt, false);
1364   lacc = build_access_from_expr_1 (lhs, stmt, true);
1365 
1366   if (lacc)
1367     {
1368       lacc->grp_assignment_write = 1;
1369       if (storage_order_barrier_p (rhs))
1370 	lacc->grp_unscalarizable_region = 1;
1371     }
1372 
1373   if (racc)
1374     {
1375       racc->grp_assignment_read = 1;
1376       if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1377 	  && !is_gimple_reg_type (racc->type))
1378 	{
1379 	  if (contains_vce_or_bfcref_p (rhs))
1380 	    bitmap_set_bit (cannot_scalarize_away_bitmap,
1381 			    DECL_UID (racc->base));
1382 	  else
1383 	    bitmap_set_bit (should_scalarize_away_bitmap,
1384 			    DECL_UID (racc->base));
1385 	}
1386       if (storage_order_barrier_p (lhs))
1387 	racc->grp_unscalarizable_region = 1;
1388     }
1389 
1390   if (lacc && racc
1391       && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1392       && !lacc->grp_unscalarizable_region
1393       && !racc->grp_unscalarizable_region
1394       && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1395       && lacc->size == racc->size
1396       && useless_type_conversion_p (lacc->type, racc->type))
1397     {
1398       struct assign_link *link;
1399 
1400       link = assign_link_pool.allocate ();
1401       memset (link, 0, sizeof (struct assign_link));
1402 
1403       link->lacc = lacc;
1404       link->racc = racc;
1405       add_link_to_rhs (racc, link);
1406       add_access_to_work_queue (racc);
1407 
1408       /* Let's delay marking the areas as written until propagation of accesses
1409 	 across link, unless the nature of rhs tells us that its data comes
1410 	 from elsewhere.  */
1411       if (!comes_initialized_p (racc->base))
1412 	lacc->write = false;
1413     }
1414 
1415   return lacc || racc;
1416 }
1417 
1418 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1419    GIMPLE_ASM operands with memory constrains which cannot be scalarized.  */
1420 
1421 static bool
1422 asm_visit_addr (gimple *, tree op, tree, void *)
1423 {
1424   op = get_base_address (op);
1425   if (op
1426       && DECL_P (op))
1427     disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1428 
1429   return false;
1430 }
1431 
1432 /* Return true iff callsite CALL has at least as many actual arguments as there
1433    are formal parameters of the function currently processed by IPA-SRA and
1434    that their types match.  */
1435 
1436 static inline bool
1437 callsite_arguments_match_p (gimple *call)
1438 {
1439   if (gimple_call_num_args (call) < (unsigned) func_param_count)
1440     return false;
1441 
1442   tree parm;
1443   int i;
1444   for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1445        parm;
1446        parm = DECL_CHAIN (parm), i++)
1447     {
1448       tree arg = gimple_call_arg (call, i);
1449       if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1450 	return false;
1451     }
1452   return true;
1453 }
1454 
1455 /* Scan function and look for interesting expressions and create access
1456    structures for them.  Return true iff any access is created.  */
1457 
1458 static bool
1459 scan_function (void)
1460 {
1461   basic_block bb;
1462   bool ret = false;
1463 
1464   FOR_EACH_BB_FN (bb, cfun)
1465     {
1466       gimple_stmt_iterator gsi;
1467       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1468 	{
1469 	  gimple *stmt = gsi_stmt (gsi);
1470 	  tree t;
1471 	  unsigned i;
1472 
1473 	  if (final_bbs && stmt_can_throw_external (stmt))
1474 	    bitmap_set_bit (final_bbs, bb->index);
1475 	  switch (gimple_code (stmt))
1476 	    {
1477 	    case GIMPLE_RETURN:
1478 	      t = gimple_return_retval (as_a <greturn *> (stmt));
1479 	      if (t != NULL_TREE)
1480 		ret |= build_access_from_expr (t, stmt, false);
1481 	      if (final_bbs)
1482 		bitmap_set_bit (final_bbs, bb->index);
1483 	      break;
1484 
1485 	    case GIMPLE_ASSIGN:
1486 	      ret |= build_accesses_from_assign (stmt);
1487 	      break;
1488 
1489 	    case GIMPLE_CALL:
1490 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
1491 		ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1492 					       stmt, false);
1493 
1494 	      if (sra_mode == SRA_MODE_EARLY_IPA)
1495 		{
1496 		  tree dest = gimple_call_fndecl (stmt);
1497 		  int flags = gimple_call_flags (stmt);
1498 
1499 		  if (dest)
1500 		    {
1501 		      if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1502 			  && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1503 			encountered_apply_args = true;
1504 		      if (recursive_call_p (current_function_decl, dest))
1505 			{
1506 			  encountered_recursive_call = true;
1507 			  if (!callsite_arguments_match_p (stmt))
1508 			    encountered_unchangable_recursive_call = true;
1509 			}
1510 		    }
1511 
1512 		  if (final_bbs
1513 		      && (flags & (ECF_CONST | ECF_PURE)) == 0)
1514 		    bitmap_set_bit (final_bbs, bb->index);
1515 		}
1516 
1517 	      t = gimple_call_lhs (stmt);
1518 	      if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1519 		ret |= build_access_from_expr (t, stmt, true);
1520 	      break;
1521 
1522 	    case GIMPLE_ASM:
1523 	      {
1524 		gasm *asm_stmt = as_a <gasm *> (stmt);
1525 		walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1526 					       asm_visit_addr);
1527 		if (final_bbs)
1528 		  bitmap_set_bit (final_bbs, bb->index);
1529 
1530 		for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1531 		  {
1532 		    t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1533 		    ret |= build_access_from_expr (t, asm_stmt, false);
1534 		  }
1535 		for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1536 		  {
1537 		    t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1538 		    ret |= build_access_from_expr (t, asm_stmt, true);
1539 		  }
1540 	      }
1541 	      break;
1542 
1543 	    default:
1544 	      break;
1545 	    }
1546 	}
1547     }
1548 
1549   return ret;
1550 }
1551 
1552 /* Helper of QSORT function. There are pointers to accesses in the array.  An
1553    access is considered smaller than another if it has smaller offset or if the
1554    offsets are the same but is size is bigger. */
1555 
1556 static int
1557 compare_access_positions (const void *a, const void *b)
1558 {
1559   const access_p *fp1 = (const access_p *) a;
1560   const access_p *fp2 = (const access_p *) b;
1561   const access_p f1 = *fp1;
1562   const access_p f2 = *fp2;
1563 
1564   if (f1->offset != f2->offset)
1565     return f1->offset < f2->offset ? -1 : 1;
1566 
1567   if (f1->size == f2->size)
1568     {
1569       if (f1->type == f2->type)
1570 	return 0;
1571       /* Put any non-aggregate type before any aggregate type.  */
1572       else if (!is_gimple_reg_type (f1->type)
1573 	  && is_gimple_reg_type (f2->type))
1574 	return 1;
1575       else if (is_gimple_reg_type (f1->type)
1576 	       && !is_gimple_reg_type (f2->type))
1577 	return -1;
1578       /* Put any complex or vector type before any other scalar type.  */
1579       else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1580 	       && TREE_CODE (f1->type) != VECTOR_TYPE
1581 	       && (TREE_CODE (f2->type) == COMPLEX_TYPE
1582 		   || TREE_CODE (f2->type) == VECTOR_TYPE))
1583 	return 1;
1584       else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1585 		|| TREE_CODE (f1->type) == VECTOR_TYPE)
1586 	       && TREE_CODE (f2->type) != COMPLEX_TYPE
1587 	       && TREE_CODE (f2->type) != VECTOR_TYPE)
1588 	return -1;
1589       /* Put any integral type before any non-integral type.  When splicing, we
1590 	 make sure that those with insufficient precision and occupying the
1591 	 same space are not scalarized.  */
1592       else if (INTEGRAL_TYPE_P (f1->type)
1593 	       && !INTEGRAL_TYPE_P (f2->type))
1594 	return -1;
1595       else if (!INTEGRAL_TYPE_P (f1->type)
1596 	       && INTEGRAL_TYPE_P (f2->type))
1597 	return 1;
1598       /* Put the integral type with the bigger precision first.  */
1599       else if (INTEGRAL_TYPE_P (f1->type)
1600 	       && INTEGRAL_TYPE_P (f2->type)
1601 	       && (TYPE_PRECISION (f2->type) != TYPE_PRECISION (f1->type)))
1602 	return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1603       /* Stabilize the sort.  */
1604       return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1605     }
1606 
1607   /* We want the bigger accesses first, thus the opposite operator in the next
1608      line: */
1609   return f1->size > f2->size ? -1 : 1;
1610 }
1611 
1612 
1613 /* Append a name of the declaration to the name obstack.  A helper function for
1614    make_fancy_name.  */
1615 
1616 static void
1617 make_fancy_decl_name (tree decl)
1618 {
1619   char buffer[32];
1620 
1621   tree name = DECL_NAME (decl);
1622   if (name)
1623     obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1624 		  IDENTIFIER_LENGTH (name));
1625   else
1626     {
1627       sprintf (buffer, "D%u", DECL_UID (decl));
1628       obstack_grow (&name_obstack, buffer, strlen (buffer));
1629     }
1630 }
1631 
1632 /* Helper for make_fancy_name.  */
1633 
1634 static void
1635 make_fancy_name_1 (tree expr)
1636 {
1637   char buffer[32];
1638   tree index;
1639 
1640   if (DECL_P (expr))
1641     {
1642       make_fancy_decl_name (expr);
1643       return;
1644     }
1645 
1646   switch (TREE_CODE (expr))
1647     {
1648     case COMPONENT_REF:
1649       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1650       obstack_1grow (&name_obstack, '$');
1651       make_fancy_decl_name (TREE_OPERAND (expr, 1));
1652       break;
1653 
1654     case ARRAY_REF:
1655       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1656       obstack_1grow (&name_obstack, '$');
1657       /* Arrays with only one element may not have a constant as their
1658 	 index. */
1659       index = TREE_OPERAND (expr, 1);
1660       if (TREE_CODE (index) != INTEGER_CST)
1661 	break;
1662       sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1663       obstack_grow (&name_obstack, buffer, strlen (buffer));
1664       break;
1665 
1666     case ADDR_EXPR:
1667       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1668       break;
1669 
1670     case MEM_REF:
1671       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1672       if (!integer_zerop (TREE_OPERAND (expr, 1)))
1673 	{
1674 	  obstack_1grow (&name_obstack, '$');
1675 	  sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1676 		   TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1677 	  obstack_grow (&name_obstack, buffer, strlen (buffer));
1678 	}
1679       break;
1680 
1681     case BIT_FIELD_REF:
1682     case REALPART_EXPR:
1683     case IMAGPART_EXPR:
1684       gcc_unreachable (); 	/* we treat these as scalars.  */
1685       break;
1686     default:
1687       break;
1688     }
1689 }
1690 
1691 /* Create a human readable name for replacement variable of ACCESS.  */
1692 
1693 static char *
1694 make_fancy_name (tree expr)
1695 {
1696   make_fancy_name_1 (expr);
1697   obstack_1grow (&name_obstack, '\0');
1698   return XOBFINISH (&name_obstack, char *);
1699 }
1700 
1701 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1702    EXP_TYPE at the given OFFSET and with storage order REVERSE.  If BASE is
1703    something for which get_addr_base_and_unit_offset returns NULL, gsi must
1704    be non-NULL and is used to insert new statements either before or below
1705    the current one as specified by INSERT_AFTER.  This function is not capable
1706    of handling bitfields.  */
1707 
1708 tree
1709 build_ref_for_offset (location_t loc, tree base, poly_int64 offset,
1710 		      bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1711 		      bool insert_after)
1712 {
1713   tree prev_base = base;
1714   tree off;
1715   tree mem_ref;
1716   poly_int64 base_offset;
1717   unsigned HOST_WIDE_INT misalign;
1718   unsigned int align;
1719 
1720   /* Preserve address-space information.  */
1721   addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1722   if (as != TYPE_ADDR_SPACE (exp_type))
1723     exp_type = build_qualified_type (exp_type,
1724 				     TYPE_QUALS (exp_type)
1725 				     | ENCODE_QUAL_ADDR_SPACE (as));
1726 
1727   poly_int64 byte_offset = exact_div (offset, BITS_PER_UNIT);
1728   get_object_alignment_1 (base, &align, &misalign);
1729   base = get_addr_base_and_unit_offset (base, &base_offset);
1730 
1731   /* get_addr_base_and_unit_offset returns NULL for references with a variable
1732      offset such as array[var_index].  */
1733   if (!base)
1734     {
1735       gassign *stmt;
1736       tree tmp, addr;
1737 
1738       gcc_checking_assert (gsi);
1739       tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1740       addr = build_fold_addr_expr (unshare_expr (prev_base));
1741       STRIP_USELESS_TYPE_CONVERSION (addr);
1742       stmt = gimple_build_assign (tmp, addr);
1743       gimple_set_location (stmt, loc);
1744       if (insert_after)
1745 	gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1746       else
1747 	gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1748 
1749       off = build_int_cst (reference_alias_ptr_type (prev_base), byte_offset);
1750       base = tmp;
1751     }
1752   else if (TREE_CODE (base) == MEM_REF)
1753     {
1754       off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1755 			   base_offset + byte_offset);
1756       off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1757       base = unshare_expr (TREE_OPERAND (base, 0));
1758     }
1759   else
1760     {
1761       off = build_int_cst (reference_alias_ptr_type (prev_base),
1762 			   base_offset + byte_offset);
1763       base = build_fold_addr_expr (unshare_expr (base));
1764     }
1765 
1766   unsigned int align_bound = known_alignment (misalign + offset);
1767   if (align_bound != 0)
1768     align = MIN (align, align_bound);
1769   if (align != TYPE_ALIGN (exp_type))
1770     exp_type = build_aligned_type (exp_type, align);
1771 
1772   mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1773   REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1774   if (TREE_THIS_VOLATILE (prev_base))
1775     TREE_THIS_VOLATILE (mem_ref) = 1;
1776   if (TREE_SIDE_EFFECTS (prev_base))
1777     TREE_SIDE_EFFECTS (mem_ref) = 1;
1778   return mem_ref;
1779 }
1780 
1781 /* Construct a memory reference to a part of an aggregate BASE at the given
1782    OFFSET and of the same type as MODEL.  In case this is a reference to a
1783    bit-field, the function will replicate the last component_ref of model's
1784    expr to access it.  GSI and INSERT_AFTER have the same meaning as in
1785    build_ref_for_offset.  */
1786 
1787 static tree
1788 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1789 		     struct access *model, gimple_stmt_iterator *gsi,
1790 		     bool insert_after)
1791 {
1792   if (TREE_CODE (model->expr) == COMPONENT_REF
1793       && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1794     {
1795       /* This access represents a bit-field.  */
1796       tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1797 
1798       offset -= int_bit_position (fld);
1799       exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1800       t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1801 				gsi, insert_after);
1802       /* The flag will be set on the record type.  */
1803       REF_REVERSE_STORAGE_ORDER (t) = 0;
1804       return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1805 			      NULL_TREE);
1806     }
1807   else
1808     return
1809       build_ref_for_offset (loc, base, offset, model->reverse, model->type,
1810 			    gsi, insert_after);
1811 }
1812 
1813 /* Attempt to build a memory reference that we could but into a gimple
1814    debug_bind statement.  Similar to build_ref_for_model but punts if it has to
1815    create statements and return s NULL instead.  This function also ignores
1816    alignment issues and so its results should never end up in non-debug
1817    statements.  */
1818 
1819 static tree
1820 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1821 			   struct access *model)
1822 {
1823   poly_int64 base_offset;
1824   tree off;
1825 
1826   if (TREE_CODE (model->expr) == COMPONENT_REF
1827       && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1828     return NULL_TREE;
1829 
1830   base = get_addr_base_and_unit_offset (base, &base_offset);
1831   if (!base)
1832     return NULL_TREE;
1833   if (TREE_CODE (base) == MEM_REF)
1834     {
1835       off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1836 			   base_offset + offset / BITS_PER_UNIT);
1837       off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1838       base = unshare_expr (TREE_OPERAND (base, 0));
1839     }
1840   else
1841     {
1842       off = build_int_cst (reference_alias_ptr_type (base),
1843 			   base_offset + offset / BITS_PER_UNIT);
1844       base = build_fold_addr_expr (unshare_expr (base));
1845     }
1846 
1847   return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1848 }
1849 
1850 /* Construct a memory reference consisting of component_refs and array_refs to
1851    a part of an aggregate *RES (which is of type TYPE).  The requested part
1852    should have type EXP_TYPE at be the given OFFSET.  This function might not
1853    succeed, it returns true when it does and only then *RES points to something
1854    meaningful.  This function should be used only to build expressions that we
1855    might need to present to user (e.g. in warnings).  In all other situations,
1856    build_ref_for_model or build_ref_for_offset should be used instead.  */
1857 
1858 static bool
1859 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1860 				    tree exp_type)
1861 {
1862   while (1)
1863     {
1864       tree fld;
1865       tree tr_size, index, minidx;
1866       HOST_WIDE_INT el_size;
1867 
1868       if (offset == 0 && exp_type
1869 	  && types_compatible_p (exp_type, type))
1870 	return true;
1871 
1872       switch (TREE_CODE (type))
1873 	{
1874 	case UNION_TYPE:
1875 	case QUAL_UNION_TYPE:
1876 	case RECORD_TYPE:
1877 	  for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1878 	    {
1879 	      HOST_WIDE_INT pos, size;
1880 	      tree tr_pos, expr, *expr_ptr;
1881 
1882 	      if (TREE_CODE (fld) != FIELD_DECL)
1883 		continue;
1884 
1885 	      tr_pos = bit_position (fld);
1886 	      if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1887 		continue;
1888 	      pos = tree_to_uhwi (tr_pos);
1889 	      gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1890 	      tr_size = DECL_SIZE (fld);
1891 	      if (!tr_size || !tree_fits_uhwi_p (tr_size))
1892 		continue;
1893 	      size = tree_to_uhwi (tr_size);
1894 	      if (size == 0)
1895 		{
1896 		  if (pos != offset)
1897 		    continue;
1898 		}
1899 	      else if (pos > offset || (pos + size) <= offset)
1900 		continue;
1901 
1902 	      expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1903 			     NULL_TREE);
1904 	      expr_ptr = &expr;
1905 	      if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1906 						      offset - pos, exp_type))
1907 		{
1908 		  *res = expr;
1909 		  return true;
1910 		}
1911 	    }
1912 	  return false;
1913 
1914 	case ARRAY_TYPE:
1915 	  tr_size = TYPE_SIZE (TREE_TYPE (type));
1916 	  if (!tr_size || !tree_fits_uhwi_p (tr_size))
1917 	    return false;
1918 	  el_size = tree_to_uhwi (tr_size);
1919 
1920 	  minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1921 	  if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1922 	    return false;
1923 	  index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1924 	  if (!integer_zerop (minidx))
1925 	    index = int_const_binop (PLUS_EXPR, index, minidx);
1926 	  *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1927 			 NULL_TREE, NULL_TREE);
1928 	  offset = offset % el_size;
1929 	  type = TREE_TYPE (type);
1930 	  break;
1931 
1932 	default:
1933 	  if (offset != 0)
1934 	    return false;
1935 
1936 	  if (exp_type)
1937 	    return false;
1938 	  else
1939 	    return true;
1940 	}
1941     }
1942 }
1943 
1944 /* Return true iff TYPE is stdarg va_list type.  */
1945 
1946 static inline bool
1947 is_va_list_type (tree type)
1948 {
1949   return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1950 }
1951 
1952 /* Print message to dump file why a variable was rejected. */
1953 
1954 static void
1955 reject (tree var, const char *msg)
1956 {
1957   if (dump_file && (dump_flags & TDF_DETAILS))
1958     {
1959       fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1960       print_generic_expr (dump_file, var);
1961       fprintf (dump_file, "\n");
1962     }
1963 }
1964 
1965 /* Return true if VAR is a candidate for SRA.  */
1966 
1967 static bool
1968 maybe_add_sra_candidate (tree var)
1969 {
1970   tree type = TREE_TYPE (var);
1971   const char *msg;
1972   tree_node **slot;
1973 
1974   if (!AGGREGATE_TYPE_P (type))
1975     {
1976       reject (var, "not aggregate");
1977       return false;
1978     }
1979   /* Allow constant-pool entries (that "need to live in memory")
1980      unless we are doing IPA SRA.  */
1981   if (needs_to_live_in_memory (var)
1982       && (sra_mode == SRA_MODE_EARLY_IPA || !constant_decl_p (var)))
1983     {
1984       reject (var, "needs to live in memory");
1985       return false;
1986     }
1987   if (TREE_THIS_VOLATILE (var))
1988     {
1989       reject (var, "is volatile");
1990       return false;
1991     }
1992   if (!COMPLETE_TYPE_P (type))
1993     {
1994       reject (var, "has incomplete type");
1995       return false;
1996     }
1997   if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1998     {
1999       reject (var, "type size not fixed");
2000       return false;
2001     }
2002   if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
2003     {
2004       reject (var, "type size is zero");
2005       return false;
2006     }
2007   if (type_internals_preclude_sra_p (type, &msg))
2008     {
2009       reject (var, msg);
2010       return false;
2011     }
2012   if (/* Fix for PR 41089.  tree-stdarg.c needs to have va_lists intact but
2013 	 we also want to schedule it rather late.  Thus we ignore it in
2014 	 the early pass. */
2015       (sra_mode == SRA_MODE_EARLY_INTRA
2016        && is_va_list_type (type)))
2017     {
2018       reject (var, "is va_list");
2019       return false;
2020     }
2021 
2022   bitmap_set_bit (candidate_bitmap, DECL_UID (var));
2023   slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
2024   *slot = var;
2025 
2026   if (dump_file && (dump_flags & TDF_DETAILS))
2027     {
2028       fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
2029       print_generic_expr (dump_file, var);
2030       fprintf (dump_file, "\n");
2031     }
2032 
2033   return true;
2034 }
2035 
2036 /* The very first phase of intraprocedural SRA.  It marks in candidate_bitmap
2037    those with type which is suitable for scalarization.  */
2038 
2039 static bool
2040 find_var_candidates (void)
2041 {
2042   tree var, parm;
2043   unsigned int i;
2044   bool ret = false;
2045 
2046   for (parm = DECL_ARGUMENTS (current_function_decl);
2047        parm;
2048        parm = DECL_CHAIN (parm))
2049     ret |= maybe_add_sra_candidate (parm);
2050 
2051   FOR_EACH_LOCAL_DECL (cfun, i, var)
2052     {
2053       if (!VAR_P (var))
2054         continue;
2055 
2056       ret |= maybe_add_sra_candidate (var);
2057     }
2058 
2059   return ret;
2060 }
2061 
2062 /* Sort all accesses for the given variable, check for partial overlaps and
2063    return NULL if there are any.  If there are none, pick a representative for
2064    each combination of offset and size and create a linked list out of them.
2065    Return the pointer to the first representative and make sure it is the first
2066    one in the vector of accesses.  */
2067 
2068 static struct access *
2069 sort_and_splice_var_accesses (tree var)
2070 {
2071   int i, j, access_count;
2072   struct access *res, **prev_acc_ptr = &res;
2073   vec<access_p> *access_vec;
2074   bool first = true;
2075   HOST_WIDE_INT low = -1, high = 0;
2076 
2077   access_vec = get_base_access_vector (var);
2078   if (!access_vec)
2079     return NULL;
2080   access_count = access_vec->length ();
2081 
2082   /* Sort by <OFFSET, SIZE>.  */
2083   access_vec->qsort (compare_access_positions);
2084 
2085   i = 0;
2086   while (i < access_count)
2087     {
2088       struct access *access = (*access_vec)[i];
2089       bool grp_write = access->write;
2090       bool grp_read = !access->write;
2091       bool grp_scalar_write = access->write
2092 	&& is_gimple_reg_type (access->type);
2093       bool grp_scalar_read = !access->write
2094 	&& is_gimple_reg_type (access->type);
2095       bool grp_assignment_read = access->grp_assignment_read;
2096       bool grp_assignment_write = access->grp_assignment_write;
2097       bool multiple_scalar_reads = false;
2098       bool total_scalarization = access->grp_total_scalarization;
2099       bool grp_partial_lhs = access->grp_partial_lhs;
2100       bool first_scalar = is_gimple_reg_type (access->type);
2101       bool unscalarizable_region = access->grp_unscalarizable_region;
2102       bool bf_non_full_precision
2103 	= (INTEGRAL_TYPE_P (access->type)
2104 	   && TYPE_PRECISION (access->type) != access->size
2105 	   && TREE_CODE (access->expr) == COMPONENT_REF
2106 	   && DECL_BIT_FIELD (TREE_OPERAND (access->expr, 1)));
2107 
2108       if (first || access->offset >= high)
2109 	{
2110 	  first = false;
2111 	  low = access->offset;
2112 	  high = access->offset + access->size;
2113 	}
2114       else if (access->offset > low && access->offset + access->size > high)
2115 	return NULL;
2116       else
2117 	gcc_assert (access->offset >= low
2118 		    && access->offset + access->size <= high);
2119 
2120       j = i + 1;
2121       while (j < access_count)
2122 	{
2123 	  struct access *ac2 = (*access_vec)[j];
2124 	  if (ac2->offset != access->offset || ac2->size != access->size)
2125 	    break;
2126 	  if (ac2->write)
2127 	    {
2128 	      grp_write = true;
2129 	      grp_scalar_write = (grp_scalar_write
2130 				  || is_gimple_reg_type (ac2->type));
2131 	    }
2132 	  else
2133 	    {
2134 	      grp_read = true;
2135 	      if (is_gimple_reg_type (ac2->type))
2136 		{
2137 		  if (grp_scalar_read)
2138 		    multiple_scalar_reads = true;
2139 		  else
2140 		    grp_scalar_read = true;
2141 		}
2142 	    }
2143 	  grp_assignment_read |= ac2->grp_assignment_read;
2144 	  grp_assignment_write |= ac2->grp_assignment_write;
2145 	  grp_partial_lhs |= ac2->grp_partial_lhs;
2146 	  unscalarizable_region |= ac2->grp_unscalarizable_region;
2147 	  total_scalarization |= ac2->grp_total_scalarization;
2148 	  relink_to_new_repr (access, ac2);
2149 
2150 	  /* If there are both aggregate-type and scalar-type accesses with
2151 	     this combination of size and offset, the comparison function
2152 	     should have put the scalars first.  */
2153 	  gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2154 	  /* It also prefers integral types to non-integral.  However, when the
2155 	     precision of the selected type does not span the entire area and
2156 	     should also be used for a non-integer (i.e. float), we must not
2157 	     let that happen.  Normally analyze_access_subtree expands the type
2158 	     to cover the entire area but for bit-fields it doesn't.  */
2159 	  if (bf_non_full_precision && !INTEGRAL_TYPE_P (ac2->type))
2160 	    {
2161 	      if (dump_file && (dump_flags & TDF_DETAILS))
2162 		{
2163 		  fprintf (dump_file, "Cannot scalarize the following access "
2164 			   "because insufficient precision integer type was "
2165 			   "selected.\n  ");
2166 		  dump_access (dump_file, access, false);
2167 		}
2168 	      unscalarizable_region = true;
2169 	    }
2170 	  ac2->group_representative = access;
2171 	  j++;
2172 	}
2173 
2174       i = j;
2175 
2176       access->group_representative = access;
2177       access->grp_write = grp_write;
2178       access->grp_read = grp_read;
2179       access->grp_scalar_read = grp_scalar_read;
2180       access->grp_scalar_write = grp_scalar_write;
2181       access->grp_assignment_read = grp_assignment_read;
2182       access->grp_assignment_write = grp_assignment_write;
2183       access->grp_hint = total_scalarization
2184 	|| (multiple_scalar_reads && !constant_decl_p (var));
2185       access->grp_total_scalarization = total_scalarization;
2186       access->grp_partial_lhs = grp_partial_lhs;
2187       access->grp_unscalarizable_region = unscalarizable_region;
2188 
2189       *prev_acc_ptr = access;
2190       prev_acc_ptr = &access->next_grp;
2191     }
2192 
2193   gcc_assert (res == (*access_vec)[0]);
2194   return res;
2195 }
2196 
2197 /* Create a variable for the given ACCESS which determines the type, name and a
2198    few other properties.  Return the variable declaration and store it also to
2199    ACCESS->replacement.  */
2200 
2201 static tree
2202 create_access_replacement (struct access *access)
2203 {
2204   tree repl;
2205 
2206   if (access->grp_to_be_debug_replaced)
2207     {
2208       repl = create_tmp_var_raw (access->type);
2209       DECL_CONTEXT (repl) = current_function_decl;
2210     }
2211   else
2212     /* Drop any special alignment on the type if it's not on the main
2213        variant.  This avoids issues with weirdo ABIs like AAPCS.  */
2214     repl = create_tmp_var (build_qualified_type
2215 			     (TYPE_MAIN_VARIANT (access->type),
2216 			      TYPE_QUALS (access->type)), "SR");
2217   if (TREE_CODE (access->type) == COMPLEX_TYPE
2218       || TREE_CODE (access->type) == VECTOR_TYPE)
2219     {
2220       if (!access->grp_partial_lhs)
2221 	DECL_GIMPLE_REG_P (repl) = 1;
2222     }
2223   else if (access->grp_partial_lhs
2224 	   && is_gimple_reg_type (access->type))
2225     TREE_ADDRESSABLE (repl) = 1;
2226 
2227   DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2228   DECL_ARTIFICIAL (repl) = 1;
2229   DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2230 
2231   if (DECL_NAME (access->base)
2232       && !DECL_IGNORED_P (access->base)
2233       && !DECL_ARTIFICIAL (access->base))
2234     {
2235       char *pretty_name = make_fancy_name (access->expr);
2236       tree debug_expr = unshare_expr_without_location (access->expr), d;
2237       bool fail = false;
2238 
2239       DECL_NAME (repl) = get_identifier (pretty_name);
2240       DECL_NAMELESS (repl) = 1;
2241       obstack_free (&name_obstack, pretty_name);
2242 
2243       /* Get rid of any SSA_NAMEs embedded in debug_expr,
2244 	 as DECL_DEBUG_EXPR isn't considered when looking for still
2245 	 used SSA_NAMEs and thus they could be freed.  All debug info
2246 	 generation cares is whether something is constant or variable
2247 	 and that get_ref_base_and_extent works properly on the
2248 	 expression.  It cannot handle accesses at a non-constant offset
2249 	 though, so just give up in those cases.  */
2250       for (d = debug_expr;
2251 	   !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2252 	   d = TREE_OPERAND (d, 0))
2253 	switch (TREE_CODE (d))
2254 	  {
2255 	  case ARRAY_REF:
2256 	  case ARRAY_RANGE_REF:
2257 	    if (TREE_OPERAND (d, 1)
2258 		&& TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2259 	      fail = true;
2260 	    if (TREE_OPERAND (d, 3)
2261 		&& TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2262 	      fail = true;
2263 	    /* FALLTHRU */
2264 	  case COMPONENT_REF:
2265 	    if (TREE_OPERAND (d, 2)
2266 		&& TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2267 	      fail = true;
2268 	    break;
2269 	  case MEM_REF:
2270 	    if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2271 	      fail = true;
2272 	    else
2273 	      d = TREE_OPERAND (d, 0);
2274 	    break;
2275 	  default:
2276 	    break;
2277 	  }
2278       if (!fail)
2279 	{
2280 	  SET_DECL_DEBUG_EXPR (repl, debug_expr);
2281 	  DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2282 	}
2283       if (access->grp_no_warning)
2284 	TREE_NO_WARNING (repl) = 1;
2285       else
2286 	TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2287     }
2288   else
2289     TREE_NO_WARNING (repl) = 1;
2290 
2291   if (dump_file)
2292     {
2293       if (access->grp_to_be_debug_replaced)
2294 	{
2295 	  fprintf (dump_file, "Created a debug-only replacement for ");
2296 	  print_generic_expr (dump_file, access->base);
2297 	  fprintf (dump_file, " offset: %u, size: %u\n",
2298 		   (unsigned) access->offset, (unsigned) access->size);
2299 	}
2300       else
2301 	{
2302 	  fprintf (dump_file, "Created a replacement for ");
2303 	  print_generic_expr (dump_file, access->base);
2304 	  fprintf (dump_file, " offset: %u, size: %u: ",
2305 		   (unsigned) access->offset, (unsigned) access->size);
2306 	  print_generic_expr (dump_file, repl);
2307 	  fprintf (dump_file, "\n");
2308 	}
2309     }
2310   sra_stats.replacements++;
2311 
2312   return repl;
2313 }
2314 
2315 /* Return ACCESS scalar replacement, which must exist.  */
2316 
2317 static inline tree
2318 get_access_replacement (struct access *access)
2319 {
2320   gcc_checking_assert (access->replacement_decl);
2321   return access->replacement_decl;
2322 }
2323 
2324 
2325 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2326    linked list along the way.  Stop when *ACCESS is NULL or the access pointed
2327    to it is not "within" the root.  Return false iff some accesses partially
2328    overlap.  */
2329 
2330 static bool
2331 build_access_subtree (struct access **access)
2332 {
2333   struct access *root = *access, *last_child = NULL;
2334   HOST_WIDE_INT limit = root->offset + root->size;
2335 
2336   *access = (*access)->next_grp;
2337   while  (*access && (*access)->offset + (*access)->size <= limit)
2338     {
2339       if (!last_child)
2340 	root->first_child = *access;
2341       else
2342 	last_child->next_sibling = *access;
2343       last_child = *access;
2344       (*access)->parent = root;
2345       (*access)->grp_write |= root->grp_write;
2346 
2347       if (!build_access_subtree (access))
2348 	return false;
2349     }
2350 
2351   if (*access && (*access)->offset < limit)
2352     return false;
2353 
2354   return true;
2355 }
2356 
2357 /* Build a tree of access representatives, ACCESS is the pointer to the first
2358    one, others are linked in a list by the next_grp field.  Return false iff
2359    some accesses partially overlap.  */
2360 
2361 static bool
2362 build_access_trees (struct access *access)
2363 {
2364   while (access)
2365     {
2366       struct access *root = access;
2367 
2368       if (!build_access_subtree (&access))
2369 	return false;
2370       root->next_grp = access;
2371     }
2372   return true;
2373 }
2374 
2375 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2376    array.  */
2377 
2378 static bool
2379 expr_with_var_bounded_array_refs_p (tree expr)
2380 {
2381   while (handled_component_p (expr))
2382     {
2383       if (TREE_CODE (expr) == ARRAY_REF
2384 	  && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2385 	return true;
2386       expr = TREE_OPERAND (expr, 0);
2387     }
2388   return false;
2389 }
2390 
2391 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2392    both seeming beneficial and when ALLOW_REPLACEMENTS allows it.  Also set all
2393    sorts of access flags appropriately along the way, notably always set
2394    grp_read and grp_assign_read according to MARK_READ and grp_write when
2395    MARK_WRITE is true.
2396 
2397    Creating a replacement for a scalar access is considered beneficial if its
2398    grp_hint is set (this means we are either attempting total scalarization or
2399    there is more than one direct read access) or according to the following
2400    table:
2401 
2402    Access written to through a scalar type (once or more times)
2403    |
2404    |	Written to in an assignment statement
2405    |	|
2406    |	|	Access read as scalar _once_
2407    |	|	|
2408    |   	|	|	Read in an assignment statement
2409    |	|	|	|
2410    |   	|	|	|	Scalarize	Comment
2411 -----------------------------------------------------------------------------
2412    0	0	0	0			No access for the scalar
2413    0	0	0	1			No access for the scalar
2414    0	0	1	0	No		Single read - won't help
2415    0	0	1	1	No		The same case
2416    0	1	0	0			No access for the scalar
2417    0	1	0	1			No access for the scalar
2418    0	1	1	0	Yes		s = *g; return s.i;
2419    0	1	1	1       Yes		The same case as above
2420    1	0	0	0	No		Won't help
2421    1	0	0	1	Yes		s.i = 1; *g = s;
2422    1	0	1	0	Yes		s.i = 5; g = s.i;
2423    1	0	1	1	Yes		The same case as above
2424    1	1	0	0	No		Won't help.
2425    1	1	0	1	Yes		s.i = 1; *g = s;
2426    1	1	1	0	Yes		s = *g; return s.i;
2427    1	1	1	1	Yes		Any of the above yeses  */
2428 
2429 static bool
2430 analyze_access_subtree (struct access *root, struct access *parent,
2431 			bool allow_replacements)
2432 {
2433   struct access *child;
2434   HOST_WIDE_INT limit = root->offset + root->size;
2435   HOST_WIDE_INT covered_to = root->offset;
2436   bool scalar = is_gimple_reg_type (root->type);
2437   bool hole = false, sth_created = false;
2438 
2439   if (parent)
2440     {
2441       if (parent->grp_read)
2442 	root->grp_read = 1;
2443       if (parent->grp_assignment_read)
2444 	root->grp_assignment_read = 1;
2445       if (parent->grp_write)
2446 	root->grp_write = 1;
2447       if (parent->grp_assignment_write)
2448 	root->grp_assignment_write = 1;
2449       if (parent->grp_total_scalarization)
2450 	root->grp_total_scalarization = 1;
2451     }
2452 
2453   if (root->grp_unscalarizable_region)
2454     allow_replacements = false;
2455 
2456   if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2457     allow_replacements = false;
2458 
2459   for (child = root->first_child; child; child = child->next_sibling)
2460     {
2461       hole |= covered_to < child->offset;
2462       sth_created |= analyze_access_subtree (child, root,
2463 					     allow_replacements && !scalar);
2464 
2465       root->grp_unscalarized_data |= child->grp_unscalarized_data;
2466       root->grp_total_scalarization &= child->grp_total_scalarization;
2467       if (child->grp_covered)
2468 	covered_to += child->size;
2469       else
2470 	hole = true;
2471     }
2472 
2473   if (allow_replacements && scalar && !root->first_child
2474       && (root->grp_hint
2475 	  || ((root->grp_scalar_read || root->grp_assignment_read)
2476 	      && (root->grp_scalar_write || root->grp_assignment_write))))
2477     {
2478       /* Always create access replacements that cover the whole access.
2479          For integral types this means the precision has to match.
2480 	 Avoid assumptions based on the integral type kind, too.  */
2481       if (INTEGRAL_TYPE_P (root->type)
2482 	  && (TREE_CODE (root->type) != INTEGER_TYPE
2483 	      || TYPE_PRECISION (root->type) != root->size)
2484 	  /* But leave bitfield accesses alone.  */
2485 	  && (TREE_CODE (root->expr) != COMPONENT_REF
2486 	      || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2487 	{
2488 	  tree rt = root->type;
2489 	  gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2490 		      && (root->size % BITS_PER_UNIT) == 0);
2491 	  root->type = build_nonstandard_integer_type (root->size,
2492 						       TYPE_UNSIGNED (rt));
2493 	  root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2494 					     root->offset, root->reverse,
2495 					     root->type, NULL, false);
2496 
2497 	  if (dump_file && (dump_flags & TDF_DETAILS))
2498 	    {
2499 	      fprintf (dump_file, "Changing the type of a replacement for ");
2500 	      print_generic_expr (dump_file, root->base);
2501 	      fprintf (dump_file, " offset: %u, size: %u ",
2502 		       (unsigned) root->offset, (unsigned) root->size);
2503 	      fprintf (dump_file, " to an integer.\n");
2504 	    }
2505 	}
2506 
2507       root->grp_to_be_replaced = 1;
2508       root->replacement_decl = create_access_replacement (root);
2509       sth_created = true;
2510       hole = false;
2511     }
2512   else
2513     {
2514       if (allow_replacements
2515 	  && scalar && !root->first_child
2516 	  && (root->grp_scalar_write || root->grp_assignment_write)
2517 	  && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2518 			    DECL_UID (root->base)))
2519 	{
2520 	  gcc_checking_assert (!root->grp_scalar_read
2521 			       && !root->grp_assignment_read);
2522 	  sth_created = true;
2523 	  if (MAY_HAVE_DEBUG_BIND_STMTS)
2524 	    {
2525 	      root->grp_to_be_debug_replaced = 1;
2526 	      root->replacement_decl = create_access_replacement (root);
2527 	    }
2528 	}
2529 
2530       if (covered_to < limit)
2531 	hole = true;
2532       if (scalar || !allow_replacements)
2533 	root->grp_total_scalarization = 0;
2534     }
2535 
2536   if (!hole || root->grp_total_scalarization)
2537     root->grp_covered = 1;
2538   else if (root->grp_write || comes_initialized_p (root->base))
2539     root->grp_unscalarized_data = 1; /* not covered and written to */
2540   return sth_created;
2541 }
2542 
2543 /* Analyze all access trees linked by next_grp by the means of
2544    analyze_access_subtree.  */
2545 static bool
2546 analyze_access_trees (struct access *access)
2547 {
2548   bool ret = false;
2549 
2550   while (access)
2551     {
2552       if (analyze_access_subtree (access, NULL, true))
2553 	ret = true;
2554       access = access->next_grp;
2555     }
2556 
2557   return ret;
2558 }
2559 
2560 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2561    SIZE would conflict with an already existing one.  If exactly such a child
2562    already exists in LACC, store a pointer to it in EXACT_MATCH.  */
2563 
2564 static bool
2565 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2566 			      HOST_WIDE_INT size, struct access **exact_match)
2567 {
2568   struct access *child;
2569 
2570   for (child = lacc->first_child; child; child = child->next_sibling)
2571     {
2572       if (child->offset == norm_offset && child->size == size)
2573 	{
2574 	  *exact_match = child;
2575 	  return true;
2576 	}
2577 
2578       if (child->offset < norm_offset + size
2579 	  && child->offset + child->size > norm_offset)
2580 	return true;
2581     }
2582 
2583   return false;
2584 }
2585 
2586 /* Create a new child access of PARENT, with all properties just like MODEL
2587    except for its offset and with its grp_write false and grp_read true.
2588    Return the new access or NULL if it cannot be created.  Note that this
2589    access is created long after all splicing and sorting, it's not located in
2590    any access vector and is automatically a representative of its group.  Set
2591    the gpr_write flag of the new accesss if SET_GRP_WRITE is true.  */
2592 
2593 static struct access *
2594 create_artificial_child_access (struct access *parent, struct access *model,
2595 				HOST_WIDE_INT new_offset,
2596 				bool set_grp_write)
2597 {
2598   struct access **child;
2599   tree expr = parent->base;
2600 
2601   gcc_assert (!model->grp_unscalarizable_region);
2602 
2603   struct access *access = access_pool.allocate ();
2604   memset (access, 0, sizeof (struct access));
2605   if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2606 					   model->type))
2607     {
2608       access->grp_no_warning = true;
2609       expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2610 				  new_offset, model, NULL, false);
2611     }
2612 
2613   access->base = parent->base;
2614   access->expr = expr;
2615   access->offset = new_offset;
2616   access->size = model->size;
2617   access->type = model->type;
2618   access->grp_write = set_grp_write;
2619   access->grp_read = false;
2620   access->reverse = model->reverse;
2621 
2622   child = &parent->first_child;
2623   while (*child && (*child)->offset < new_offset)
2624     child = &(*child)->next_sibling;
2625 
2626   access->next_sibling = *child;
2627   *child = access;
2628 
2629   return access;
2630 }
2631 
2632 
2633 /* Beginning with ACCESS, traverse its whole access subtree and mark all
2634    sub-trees as written to.  If any of them has not been marked so previously
2635    and has assignment links leading from it, re-enqueue it.  */
2636 
2637 static void
2638 subtree_mark_written_and_enqueue (struct access *access)
2639 {
2640   if (access->grp_write)
2641     return;
2642   access->grp_write = true;
2643   add_access_to_work_queue (access);
2644 
2645   struct access *child;
2646   for (child = access->first_child; child; child = child->next_sibling)
2647     subtree_mark_written_and_enqueue (child);
2648 }
2649 
2650 /* Propagate subaccesses and grp_write flags of RACC across an assignment link
2651    to LACC.  Enqueue sub-accesses as necessary so that the write flag is
2652    propagated transitively.  Return true if anything changed.  Additionally, if
2653    RACC is a scalar access but LACC is not, change the type of the latter, if
2654    possible.  */
2655 
2656 static bool
2657 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2658 {
2659   struct access *rchild;
2660   HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2661   bool ret = false;
2662 
2663   /* IF the LHS is still not marked as being written to, we only need to do so
2664      if the RHS at this level actually was.  */
2665   if (!lacc->grp_write)
2666     {
2667       gcc_checking_assert (!comes_initialized_p (racc->base));
2668       if (racc->grp_write)
2669 	{
2670 	  subtree_mark_written_and_enqueue (lacc);
2671 	  ret = true;
2672 	}
2673     }
2674 
2675   if (is_gimple_reg_type (lacc->type)
2676       || lacc->grp_unscalarizable_region
2677       || racc->grp_unscalarizable_region)
2678     {
2679       if (!lacc->grp_write)
2680 	{
2681 	  ret = true;
2682 	  subtree_mark_written_and_enqueue (lacc);
2683 	}
2684       return ret;
2685     }
2686 
2687   if (is_gimple_reg_type (racc->type))
2688     {
2689       if (!lacc->grp_write)
2690 	{
2691 	  ret = true;
2692 	  subtree_mark_written_and_enqueue (lacc);
2693 	}
2694       if (!lacc->first_child && !racc->first_child)
2695 	{
2696 	  tree t = lacc->base;
2697 
2698 	  lacc->type = racc->type;
2699 	  if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2700 						  lacc->offset, racc->type))
2701 	    lacc->expr = t;
2702 	  else
2703 	    {
2704 	      lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2705 						lacc->base, lacc->offset,
2706 						racc, NULL, false);
2707 	      lacc->grp_no_warning = true;
2708 	    }
2709 	}
2710       return ret;
2711     }
2712 
2713   for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2714     {
2715       struct access *new_acc = NULL;
2716       HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2717 
2718       if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2719 					&new_acc))
2720 	{
2721 	  if (new_acc)
2722 	    {
2723 	      if (!new_acc->grp_write && rchild->grp_write)
2724 		{
2725 		  gcc_assert (!lacc->grp_write);
2726 		  subtree_mark_written_and_enqueue (new_acc);
2727 		  ret = true;
2728 		}
2729 
2730 	      rchild->grp_hint = 1;
2731 	      new_acc->grp_hint |= new_acc->grp_read;
2732 	      if (rchild->first_child
2733 		  && propagate_subaccesses_across_link (new_acc, rchild))
2734 		{
2735 		  ret = 1;
2736 		  add_access_to_work_queue (new_acc);
2737 		}
2738 	    }
2739 	  else
2740 	    {
2741 	      if (!lacc->grp_write)
2742 		{
2743 		  ret = true;
2744 		  subtree_mark_written_and_enqueue (lacc);
2745 		}
2746 	    }
2747 	  continue;
2748 	}
2749 
2750       if (rchild->grp_unscalarizable_region)
2751 	{
2752 	  if (rchild->grp_write && !lacc->grp_write)
2753 	    {
2754 	      ret = true;
2755 	      subtree_mark_written_and_enqueue (lacc);
2756 	    }
2757 	  continue;
2758 	}
2759 
2760       rchild->grp_hint = 1;
2761       new_acc = create_artificial_child_access (lacc, rchild, norm_offset,
2762 						lacc->grp_write
2763 						|| rchild->grp_write);
2764       gcc_checking_assert (new_acc);
2765       if (racc->first_child)
2766 	propagate_subaccesses_across_link (new_acc, rchild);
2767 
2768       add_access_to_work_queue (lacc);
2769       ret = true;
2770     }
2771 
2772   return ret;
2773 }
2774 
2775 /* Propagate all subaccesses across assignment links.  */
2776 
2777 static void
2778 propagate_all_subaccesses (void)
2779 {
2780   while (work_queue_head)
2781     {
2782       struct access *racc = pop_access_from_work_queue ();
2783       struct assign_link *link;
2784 
2785       if (racc->group_representative)
2786 	racc= racc->group_representative;
2787       gcc_assert (racc->first_link);
2788 
2789       for (link = racc->first_link; link; link = link->next)
2790 	{
2791 	  struct access *lacc = link->lacc;
2792 
2793 	  if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2794 	    continue;
2795 	  lacc = lacc->group_representative;
2796 
2797 	  bool reque_parents = false;
2798 	  if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
2799 	    {
2800 	      if (!lacc->grp_write)
2801 		{
2802 		  subtree_mark_written_and_enqueue (lacc);
2803 		  reque_parents = true;
2804 		}
2805 	    }
2806 	  else if (propagate_subaccesses_across_link (lacc, racc))
2807 	    reque_parents = true;
2808 
2809 	  if (reque_parents)
2810 	    do
2811 	      {
2812 		add_access_to_work_queue (lacc);
2813 		lacc = lacc->parent;
2814 	      }
2815 	    while (lacc);
2816 	}
2817     }
2818 }
2819 
2820 /* Go through all accesses collected throughout the (intraprocedural) analysis
2821    stage, exclude overlapping ones, identify representatives and build trees
2822    out of them, making decisions about scalarization on the way.  Return true
2823    iff there are any to-be-scalarized variables after this stage. */
2824 
2825 static bool
2826 analyze_all_variable_accesses (void)
2827 {
2828   int res = 0;
2829   bitmap tmp = BITMAP_ALLOC (NULL);
2830   bitmap_iterator bi;
2831   unsigned i;
2832   bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2833 
2834   enum compiler_param param = optimize_speed_p
2835 			? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2836 			: PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2837 
2838   /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2839      fall back to a target default.  */
2840   unsigned HOST_WIDE_INT max_scalarization_size
2841     = global_options_set.x_param_values[param]
2842       ? PARAM_VALUE (param)
2843       : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2844 
2845   max_scalarization_size *= BITS_PER_UNIT;
2846 
2847   EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2848     if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2849 	&& !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2850       {
2851 	tree var = candidate (i);
2852 
2853 	if (VAR_P (var) && scalarizable_type_p (TREE_TYPE (var),
2854 						constant_decl_p (var)))
2855 	  {
2856 	    if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2857 		<= max_scalarization_size)
2858 	      {
2859 		create_total_scalarization_access (var);
2860 		completely_scalarize (var, TREE_TYPE (var), 0, var);
2861 		statistics_counter_event (cfun,
2862 					  "Totally-scalarized aggregates", 1);
2863 		if (dump_file && (dump_flags & TDF_DETAILS))
2864 		  {
2865 		    fprintf (dump_file, "Will attempt to totally scalarize ");
2866 		    print_generic_expr (dump_file, var);
2867 		    fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2868 		  }
2869 	      }
2870 	    else if (dump_file && (dump_flags & TDF_DETAILS))
2871 	      {
2872 		fprintf (dump_file, "Too big to totally scalarize: ");
2873 		print_generic_expr (dump_file, var);
2874 		fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2875 	      }
2876 	  }
2877       }
2878 
2879   bitmap_copy (tmp, candidate_bitmap);
2880   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2881     {
2882       tree var = candidate (i);
2883       struct access *access;
2884 
2885       access = sort_and_splice_var_accesses (var);
2886       if (!access || !build_access_trees (access))
2887 	disqualify_candidate (var,
2888 			      "No or inhibitingly overlapping accesses.");
2889     }
2890 
2891   propagate_all_subaccesses ();
2892 
2893   bitmap_copy (tmp, candidate_bitmap);
2894   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2895     {
2896       tree var = candidate (i);
2897       struct access *access = get_first_repr_for_decl (var);
2898 
2899       if (analyze_access_trees (access))
2900 	{
2901 	  res++;
2902 	  if (dump_file && (dump_flags & TDF_DETAILS))
2903 	    {
2904 	      fprintf (dump_file, "\nAccess trees for ");
2905 	      print_generic_expr (dump_file, var);
2906 	      fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2907 	      dump_access_tree (dump_file, access);
2908 	      fprintf (dump_file, "\n");
2909 	    }
2910 	}
2911       else
2912 	disqualify_candidate (var, "No scalar replacements to be created.");
2913     }
2914 
2915   BITMAP_FREE (tmp);
2916 
2917   if (res)
2918     {
2919       statistics_counter_event (cfun, "Scalarized aggregates", res);
2920       return true;
2921     }
2922   else
2923     return false;
2924 }
2925 
2926 /* Generate statements copying scalar replacements of accesses within a subtree
2927    into or out of AGG.  ACCESS, all its children, siblings and their children
2928    are to be processed.  AGG is an aggregate type expression (can be a
2929    declaration but does not have to be, it can for example also be a mem_ref or
2930    a series of handled components).  TOP_OFFSET is the offset of the processed
2931    subtree which has to be subtracted from offsets of individual accesses to
2932    get corresponding offsets for AGG.  If CHUNK_SIZE is non-null, copy only
2933    replacements in the interval <start_offset, start_offset + chunk_size>,
2934    otherwise copy all.  GSI is a statement iterator used to place the new
2935    statements.  WRITE should be true when the statements should write from AGG
2936    to the replacement and false if vice versa.  if INSERT_AFTER is true, new
2937    statements will be added after the current statement in GSI, they will be
2938    added before the statement otherwise.  */
2939 
2940 static void
2941 generate_subtree_copies (struct access *access, tree agg,
2942 			 HOST_WIDE_INT top_offset,
2943 			 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2944 			 gimple_stmt_iterator *gsi, bool write,
2945 			 bool insert_after, location_t loc)
2946 {
2947   /* Never write anything into constant pool decls.  See PR70602.  */
2948   if (!write && constant_decl_p (agg))
2949     return;
2950   do
2951     {
2952       if (chunk_size && access->offset >= start_offset + chunk_size)
2953 	return;
2954 
2955       if (access->grp_to_be_replaced
2956 	  && (chunk_size == 0
2957 	      || access->offset + access->size > start_offset))
2958 	{
2959 	  tree expr, repl = get_access_replacement (access);
2960 	  gassign *stmt;
2961 
2962 	  expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2963 				      access, gsi, insert_after);
2964 
2965 	  if (write)
2966 	    {
2967 	      if (access->grp_partial_lhs)
2968 		expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2969 						 !insert_after,
2970 						 insert_after ? GSI_NEW_STMT
2971 						 : GSI_SAME_STMT);
2972 	      stmt = gimple_build_assign (repl, expr);
2973 	    }
2974 	  else
2975 	    {
2976 	      TREE_NO_WARNING (repl) = 1;
2977 	      if (access->grp_partial_lhs)
2978 		repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2979 						 !insert_after,
2980 						 insert_after ? GSI_NEW_STMT
2981 						 : GSI_SAME_STMT);
2982 	      stmt = gimple_build_assign (expr, repl);
2983 	    }
2984 	  gimple_set_location (stmt, loc);
2985 
2986 	  if (insert_after)
2987 	    gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2988 	  else
2989 	    gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2990 	  update_stmt (stmt);
2991 	  sra_stats.subtree_copies++;
2992 	}
2993       else if (write
2994 	       && access->grp_to_be_debug_replaced
2995 	       && (chunk_size == 0
2996 		   || access->offset + access->size > start_offset))
2997 	{
2998 	  gdebug *ds;
2999 	  tree drhs = build_debug_ref_for_model (loc, agg,
3000 						 access->offset - top_offset,
3001 						 access);
3002 	  ds = gimple_build_debug_bind (get_access_replacement (access),
3003 					drhs, gsi_stmt (*gsi));
3004 	  if (insert_after)
3005 	    gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3006 	  else
3007 	    gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3008 	}
3009 
3010       if (access->first_child)
3011 	generate_subtree_copies (access->first_child, agg, top_offset,
3012 				 start_offset, chunk_size, gsi,
3013 				 write, insert_after, loc);
3014 
3015       access = access->next_sibling;
3016     }
3017   while (access);
3018 }
3019 
3020 /* Assign zero to all scalar replacements in an access subtree.  ACCESS is the
3021    root of the subtree to be processed.  GSI is the statement iterator used
3022    for inserting statements which are added after the current statement if
3023    INSERT_AFTER is true or before it otherwise.  */
3024 
3025 static void
3026 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
3027 			bool insert_after, location_t loc)
3028 
3029 {
3030   struct access *child;
3031 
3032   if (access->grp_to_be_replaced)
3033     {
3034       gassign *stmt;
3035 
3036       stmt = gimple_build_assign (get_access_replacement (access),
3037 				  build_zero_cst (access->type));
3038       if (insert_after)
3039 	gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3040       else
3041 	gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3042       update_stmt (stmt);
3043       gimple_set_location (stmt, loc);
3044     }
3045   else if (access->grp_to_be_debug_replaced)
3046     {
3047       gdebug *ds
3048 	= gimple_build_debug_bind (get_access_replacement (access),
3049 				   build_zero_cst (access->type),
3050 				   gsi_stmt (*gsi));
3051       if (insert_after)
3052 	gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3053       else
3054 	gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3055     }
3056 
3057   for (child = access->first_child; child; child = child->next_sibling)
3058     init_subtree_with_zero (child, gsi, insert_after, loc);
3059 }
3060 
3061 /* Clobber all scalar replacements in an access subtree.  ACCESS is the
3062    root of the subtree to be processed.  GSI is the statement iterator used
3063    for inserting statements which are added after the current statement if
3064    INSERT_AFTER is true or before it otherwise.  */
3065 
3066 static void
3067 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
3068 		bool insert_after, location_t loc)
3069 
3070 {
3071   struct access *child;
3072 
3073   if (access->grp_to_be_replaced)
3074     {
3075       tree rep = get_access_replacement (access);
3076       tree clobber = build_constructor (access->type, NULL);
3077       TREE_THIS_VOLATILE (clobber) = 1;
3078       gimple *stmt = gimple_build_assign (rep, clobber);
3079 
3080       if (insert_after)
3081 	gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3082       else
3083 	gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3084       update_stmt (stmt);
3085       gimple_set_location (stmt, loc);
3086     }
3087 
3088   for (child = access->first_child; child; child = child->next_sibling)
3089     clobber_subtree (child, gsi, insert_after, loc);
3090 }
3091 
3092 /* Search for an access representative for the given expression EXPR and
3093    return it or NULL if it cannot be found.  */
3094 
3095 static struct access *
3096 get_access_for_expr (tree expr)
3097 {
3098   poly_int64 poffset, psize, pmax_size;
3099   HOST_WIDE_INT offset, max_size;
3100   tree base;
3101   bool reverse;
3102 
3103   /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
3104      a different size than the size of its argument and we need the latter
3105      one.  */
3106   if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3107     expr = TREE_OPERAND (expr, 0);
3108 
3109   base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
3110 				  &reverse);
3111   if (!known_size_p (pmax_size)
3112       || !pmax_size.is_constant (&max_size)
3113       || !poffset.is_constant (&offset)
3114       || !DECL_P (base))
3115     return NULL;
3116 
3117   if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
3118     return NULL;
3119 
3120   return get_var_base_offset_size_access (base, offset, max_size);
3121 }
3122 
3123 /* Replace the expression EXPR with a scalar replacement if there is one and
3124    generate other statements to do type conversion or subtree copying if
3125    necessary.  GSI is used to place newly created statements, WRITE is true if
3126    the expression is being written to (it is on a LHS of a statement or output
3127    in an assembly statement).  */
3128 
3129 static bool
3130 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
3131 {
3132   location_t loc;
3133   struct access *access;
3134   tree type, bfr, orig_expr;
3135 
3136   if (TREE_CODE (*expr) == BIT_FIELD_REF)
3137     {
3138       bfr = *expr;
3139       expr = &TREE_OPERAND (*expr, 0);
3140     }
3141   else
3142     bfr = NULL_TREE;
3143 
3144   if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
3145     expr = &TREE_OPERAND (*expr, 0);
3146   access = get_access_for_expr (*expr);
3147   if (!access)
3148     return false;
3149   type = TREE_TYPE (*expr);
3150   orig_expr = *expr;
3151 
3152   loc = gimple_location (gsi_stmt (*gsi));
3153   gimple_stmt_iterator alt_gsi = gsi_none ();
3154   if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
3155     {
3156       alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3157       gsi = &alt_gsi;
3158     }
3159 
3160   if (access->grp_to_be_replaced)
3161     {
3162       tree repl = get_access_replacement (access);
3163       /* If we replace a non-register typed access simply use the original
3164          access expression to extract the scalar component afterwards.
3165 	 This happens if scalarizing a function return value or parameter
3166 	 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
3167 	 gcc.c-torture/compile/20011217-1.c.
3168 
3169          We also want to use this when accessing a complex or vector which can
3170          be accessed as a different type too, potentially creating a need for
3171          type conversion (see PR42196) and when scalarized unions are involved
3172          in assembler statements (see PR42398).  */
3173       if (!useless_type_conversion_p (type, access->type))
3174 	{
3175 	  tree ref;
3176 
3177 	  ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
3178 
3179 	  if (write)
3180 	    {
3181 	      gassign *stmt;
3182 
3183 	      if (access->grp_partial_lhs)
3184 		ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
3185 						 false, GSI_NEW_STMT);
3186 	      stmt = gimple_build_assign (repl, ref);
3187 	      gimple_set_location (stmt, loc);
3188 	      gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3189 	    }
3190 	  else
3191 	    {
3192 	      gassign *stmt;
3193 
3194 	      if (access->grp_partial_lhs)
3195 		repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3196 						 true, GSI_SAME_STMT);
3197 	      stmt = gimple_build_assign (ref, repl);
3198 	      gimple_set_location (stmt, loc);
3199 	      gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3200 	    }
3201 	}
3202       else
3203 	*expr = repl;
3204       sra_stats.exprs++;
3205     }
3206   else if (write && access->grp_to_be_debug_replaced)
3207     {
3208       gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
3209 					    NULL_TREE,
3210 					    gsi_stmt (*gsi));
3211       gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3212     }
3213 
3214   if (access->first_child)
3215     {
3216       HOST_WIDE_INT start_offset, chunk_size;
3217       if (bfr
3218 	  && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
3219 	  && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
3220 	{
3221 	  chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
3222 	  start_offset = access->offset
3223 	    + tree_to_uhwi (TREE_OPERAND (bfr, 2));
3224 	}
3225       else
3226 	start_offset = chunk_size = 0;
3227 
3228       generate_subtree_copies (access->first_child, orig_expr, access->offset,
3229 			       start_offset, chunk_size, gsi, write, write,
3230 			       loc);
3231     }
3232   return true;
3233 }
3234 
3235 /* Where scalar replacements of the RHS have been written to when a replacement
3236    of a LHS of an assigments cannot be direclty loaded from a replacement of
3237    the RHS. */
3238 enum unscalarized_data_handling { SRA_UDH_NONE,  /* Nothing done so far. */
3239 				  SRA_UDH_RIGHT, /* Data flushed to the RHS. */
3240 				  SRA_UDH_LEFT }; /* Data flushed to the LHS. */
3241 
3242 struct subreplacement_assignment_data
3243 {
3244   /* Offset of the access representing the lhs of the assignment.  */
3245   HOST_WIDE_INT left_offset;
3246 
3247   /* LHS and RHS of the original assignment.  */
3248   tree assignment_lhs, assignment_rhs;
3249 
3250   /* Access representing the rhs of the whole assignment.  */
3251   struct access *top_racc;
3252 
3253   /* Stmt iterator used for statement insertions after the original assignment.
3254    It points to the main GSI used to traverse a BB during function body
3255    modification.  */
3256   gimple_stmt_iterator *new_gsi;
3257 
3258   /* Stmt iterator used for statement insertions before the original
3259    assignment.  Keeps on pointing to the original statement.  */
3260   gimple_stmt_iterator old_gsi;
3261 
3262   /* Location of the assignment.   */
3263   location_t loc;
3264 
3265   /* Keeps the information whether we have needed to refresh replacements of
3266    the LHS and from which side of the assignments this takes place.  */
3267   enum unscalarized_data_handling refreshed;
3268 };
3269 
3270 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3271    base aggregate if there are unscalarized data or directly to LHS of the
3272    statement that is pointed to by GSI otherwise.  */
3273 
3274 static void
3275 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3276 {
3277   tree src;
3278   if (sad->top_racc->grp_unscalarized_data)
3279     {
3280       src = sad->assignment_rhs;
3281       sad->refreshed = SRA_UDH_RIGHT;
3282     }
3283   else
3284     {
3285       src = sad->assignment_lhs;
3286       sad->refreshed = SRA_UDH_LEFT;
3287     }
3288   generate_subtree_copies (sad->top_racc->first_child, src,
3289 			   sad->top_racc->offset, 0, 0,
3290 			   &sad->old_gsi, false, false, sad->loc);
3291 }
3292 
3293 /* Try to generate statements to load all sub-replacements in an access subtree
3294    formed by children of LACC from scalar replacements in the SAD->top_racc
3295    subtree.  If that is not possible, refresh the SAD->top_racc base aggregate
3296    and load the accesses from it.  */
3297 
3298 static void
3299 load_assign_lhs_subreplacements (struct access *lacc,
3300 				 struct subreplacement_assignment_data *sad)
3301 {
3302   for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3303     {
3304       HOST_WIDE_INT offset;
3305       offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3306 
3307       if (lacc->grp_to_be_replaced)
3308 	{
3309 	  struct access *racc;
3310 	  gassign *stmt;
3311 	  tree rhs;
3312 
3313 	  racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3314 	  if (racc && racc->grp_to_be_replaced)
3315 	    {
3316 	      rhs = get_access_replacement (racc);
3317 	      if (!useless_type_conversion_p (lacc->type, racc->type))
3318 		rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3319 				       lacc->type, rhs);
3320 
3321 	      if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3322 		rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3323 						NULL_TREE, true, GSI_SAME_STMT);
3324 	    }
3325 	  else
3326 	    {
3327 	      /* No suitable access on the right hand side, need to load from
3328 		 the aggregate.  See if we have to update it first... */
3329 	      if (sad->refreshed == SRA_UDH_NONE)
3330 		handle_unscalarized_data_in_subtree (sad);
3331 
3332 	      if (sad->refreshed == SRA_UDH_LEFT)
3333 		rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3334 					   lacc->offset - sad->left_offset,
3335 					   lacc, sad->new_gsi, true);
3336 	      else
3337 		rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3338 					   lacc->offset - sad->left_offset,
3339 					   lacc, sad->new_gsi, true);
3340 	      if (lacc->grp_partial_lhs)
3341 		rhs = force_gimple_operand_gsi (sad->new_gsi,
3342 						rhs, true, NULL_TREE,
3343 						false, GSI_NEW_STMT);
3344 	    }
3345 
3346 	  stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3347 	  gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3348 	  gimple_set_location (stmt, sad->loc);
3349 	  update_stmt (stmt);
3350 	  sra_stats.subreplacements++;
3351 	}
3352       else
3353 	{
3354 	  if (sad->refreshed == SRA_UDH_NONE
3355 	      && lacc->grp_read && !lacc->grp_covered)
3356 	    handle_unscalarized_data_in_subtree (sad);
3357 
3358 	  if (lacc && lacc->grp_to_be_debug_replaced)
3359 	    {
3360 	      gdebug *ds;
3361 	      tree drhs;
3362 	      struct access *racc = find_access_in_subtree (sad->top_racc,
3363 							    offset,
3364 							    lacc->size);
3365 
3366 	      if (racc && racc->grp_to_be_replaced)
3367 		{
3368 		  if (racc->grp_write || constant_decl_p (racc->base))
3369 		    drhs = get_access_replacement (racc);
3370 		  else
3371 		    drhs = NULL;
3372 		}
3373 	      else if (sad->refreshed == SRA_UDH_LEFT)
3374 		drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3375 						  lacc->offset, lacc);
3376 	      else if (sad->refreshed == SRA_UDH_RIGHT)
3377 		drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3378 						  offset, lacc);
3379 	      else
3380 		drhs = NULL_TREE;
3381 	      if (drhs
3382 		  && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3383 		drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3384 					lacc->type, drhs);
3385 	      ds = gimple_build_debug_bind (get_access_replacement (lacc),
3386 					    drhs, gsi_stmt (sad->old_gsi));
3387 	      gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3388 	    }
3389 	}
3390 
3391       if (lacc->first_child)
3392 	load_assign_lhs_subreplacements (lacc, sad);
3393     }
3394 }
3395 
3396 /* Result code for SRA assignment modification.  */
3397 enum assignment_mod_result { SRA_AM_NONE,       /* nothing done for the stmt */
3398 			     SRA_AM_MODIFIED,  /* stmt changed but not
3399 						  removed */
3400 			     SRA_AM_REMOVED };  /* stmt eliminated */
3401 
3402 /* Modify assignments with a CONSTRUCTOR on their RHS.  STMT contains a pointer
3403    to the assignment and GSI is the statement iterator pointing at it.  Returns
3404    the same values as sra_modify_assign.  */
3405 
3406 static enum assignment_mod_result
3407 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3408 {
3409   tree lhs = gimple_assign_lhs (stmt);
3410   struct access *acc = get_access_for_expr (lhs);
3411   if (!acc)
3412     return SRA_AM_NONE;
3413   location_t loc = gimple_location (stmt);
3414 
3415   if (gimple_clobber_p (stmt))
3416     {
3417       /* Clobber the replacement variable.  */
3418       clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3419       /* Remove clobbers of fully scalarized variables, they are dead.  */
3420       if (acc->grp_covered)
3421 	{
3422 	  unlink_stmt_vdef (stmt);
3423 	  gsi_remove (gsi, true);
3424 	  release_defs (stmt);
3425 	  return SRA_AM_REMOVED;
3426 	}
3427       else
3428 	return SRA_AM_MODIFIED;
3429     }
3430 
3431   if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
3432     {
3433       /* I have never seen this code path trigger but if it can happen the
3434 	 following should handle it gracefully.  */
3435       if (access_has_children_p (acc))
3436 	generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3437 				 true, true, loc);
3438       return SRA_AM_MODIFIED;
3439     }
3440 
3441   if (acc->grp_covered)
3442     {
3443       init_subtree_with_zero (acc, gsi, false, loc);
3444       unlink_stmt_vdef (stmt);
3445       gsi_remove (gsi, true);
3446       release_defs (stmt);
3447       return SRA_AM_REMOVED;
3448     }
3449   else
3450     {
3451       init_subtree_with_zero (acc, gsi, true, loc);
3452       return SRA_AM_MODIFIED;
3453     }
3454 }
3455 
3456 /* Create and return a new suitable default definition SSA_NAME for RACC which
3457    is an access describing an uninitialized part of an aggregate that is being
3458    loaded.  */
3459 
3460 static tree
3461 get_repl_default_def_ssa_name (struct access *racc)
3462 {
3463   gcc_checking_assert (!racc->grp_to_be_replaced
3464 		       && !racc->grp_to_be_debug_replaced);
3465   if (!racc->replacement_decl)
3466     racc->replacement_decl = create_access_replacement (racc);
3467   return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3468 }
3469 
3470 /* Examine both sides of the assignment statement pointed to by STMT, replace
3471    them with a scalare replacement if there is one and generate copying of
3472    replacements if scalarized aggregates have been used in the assignment.  GSI
3473    is used to hold generated statements for type conversions and subtree
3474    copying.  */
3475 
3476 static enum assignment_mod_result
3477 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3478 {
3479   struct access *lacc, *racc;
3480   tree lhs, rhs;
3481   bool modify_this_stmt = false;
3482   bool force_gimple_rhs = false;
3483   location_t loc;
3484   gimple_stmt_iterator orig_gsi = *gsi;
3485 
3486   if (!gimple_assign_single_p (stmt))
3487     return SRA_AM_NONE;
3488   lhs = gimple_assign_lhs (stmt);
3489   rhs = gimple_assign_rhs1 (stmt);
3490 
3491   if (TREE_CODE (rhs) == CONSTRUCTOR)
3492     return sra_modify_constructor_assign (stmt, gsi);
3493 
3494   if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3495       || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3496       || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3497     {
3498       modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3499 					  gsi, false);
3500       modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3501 					   gsi, true);
3502       return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3503     }
3504 
3505   lacc = get_access_for_expr (lhs);
3506   racc = get_access_for_expr (rhs);
3507   if (!lacc && !racc)
3508     return SRA_AM_NONE;
3509   /* Avoid modifying initializations of constant-pool replacements.  */
3510   if (racc && (racc->replacement_decl == lhs))
3511     return SRA_AM_NONE;
3512 
3513   loc = gimple_location (stmt);
3514   if (lacc && lacc->grp_to_be_replaced)
3515     {
3516       lhs = get_access_replacement (lacc);
3517       gimple_assign_set_lhs (stmt, lhs);
3518       modify_this_stmt = true;
3519       if (lacc->grp_partial_lhs)
3520 	force_gimple_rhs = true;
3521       sra_stats.exprs++;
3522     }
3523 
3524   if (racc && racc->grp_to_be_replaced)
3525     {
3526       rhs = get_access_replacement (racc);
3527       modify_this_stmt = true;
3528       if (racc->grp_partial_lhs)
3529 	force_gimple_rhs = true;
3530       sra_stats.exprs++;
3531     }
3532   else if (racc
3533 	   && !racc->grp_unscalarized_data
3534 	   && !racc->grp_unscalarizable_region
3535 	   && TREE_CODE (lhs) == SSA_NAME
3536 	   && !access_has_replacements_p (racc))
3537     {
3538       rhs = get_repl_default_def_ssa_name (racc);
3539       modify_this_stmt = true;
3540       sra_stats.exprs++;
3541     }
3542 
3543   if (modify_this_stmt)
3544     {
3545       if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3546 	{
3547 	  /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3548 	     ???  This should move to fold_stmt which we simply should
3549 	     call after building a VIEW_CONVERT_EXPR here.  */
3550 	  if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3551 	      && !contains_bitfld_component_ref_p (lhs))
3552 	    {
3553 	      lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3554 	      gimple_assign_set_lhs (stmt, lhs);
3555 	    }
3556 	  else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3557 		   && !contains_vce_or_bfcref_p (rhs))
3558 	    rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3559 
3560 	  if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3561 	    {
3562 	      rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3563 				     rhs);
3564 	      if (is_gimple_reg_type (TREE_TYPE (lhs))
3565 		  && TREE_CODE (lhs) != SSA_NAME)
3566 		force_gimple_rhs = true;
3567 	    }
3568 	}
3569     }
3570 
3571   if (lacc && lacc->grp_to_be_debug_replaced)
3572     {
3573       tree dlhs = get_access_replacement (lacc);
3574       tree drhs = unshare_expr (rhs);
3575       if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3576 	{
3577 	  if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3578 	      && !contains_vce_or_bfcref_p (drhs))
3579 	    drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3580 	  if (drhs
3581 	      && !useless_type_conversion_p (TREE_TYPE (dlhs),
3582 					     TREE_TYPE (drhs)))
3583 	    drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3584 				    TREE_TYPE (dlhs), drhs);
3585 	}
3586       gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3587       gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3588     }
3589 
3590   /* From this point on, the function deals with assignments in between
3591      aggregates when at least one has scalar reductions of some of its
3592      components.  There are three possible scenarios: Both the LHS and RHS have
3593      to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3594 
3595      In the first case, we would like to load the LHS components from RHS
3596      components whenever possible.  If that is not possible, we would like to
3597      read it directly from the RHS (after updating it by storing in it its own
3598      components).  If there are some necessary unscalarized data in the LHS,
3599      those will be loaded by the original assignment too.  If neither of these
3600      cases happen, the original statement can be removed.  Most of this is done
3601      by load_assign_lhs_subreplacements.
3602 
3603      In the second case, we would like to store all RHS scalarized components
3604      directly into LHS and if they cover the aggregate completely, remove the
3605      statement too.  In the third case, we want the LHS components to be loaded
3606      directly from the RHS (DSE will remove the original statement if it
3607      becomes redundant).
3608 
3609      This is a bit complex but manageable when types match and when unions do
3610      not cause confusion in a way that we cannot really load a component of LHS
3611      from the RHS or vice versa (the access representing this level can have
3612      subaccesses that are accessible only through a different union field at a
3613      higher level - different from the one used in the examined expression).
3614      Unions are fun.
3615 
3616      Therefore, I specially handle a fourth case, happening when there is a
3617      specific type cast or it is impossible to locate a scalarized subaccess on
3618      the other side of the expression.  If that happens, I simply "refresh" the
3619      RHS by storing in it is scalarized components leave the original statement
3620      there to do the copying and then load the scalar replacements of the LHS.
3621      This is what the first branch does.  */
3622 
3623   if (modify_this_stmt
3624       || gimple_has_volatile_ops (stmt)
3625       || contains_vce_or_bfcref_p (rhs)
3626       || contains_vce_or_bfcref_p (lhs)
3627       || stmt_ends_bb_p (stmt))
3628     {
3629       /* No need to copy into a constant-pool, it comes pre-initialized.  */
3630       if (access_has_children_p (racc) && !constant_decl_p (racc->base))
3631 	generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3632 				 gsi, false, false, loc);
3633       if (access_has_children_p (lacc))
3634 	{
3635 	  gimple_stmt_iterator alt_gsi = gsi_none ();
3636 	  if (stmt_ends_bb_p (stmt))
3637 	    {
3638 	      alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3639 	      gsi = &alt_gsi;
3640 	    }
3641 	  generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3642 				   gsi, true, true, loc);
3643 	}
3644       sra_stats.separate_lhs_rhs_handling++;
3645 
3646       /* This gimplification must be done after generate_subtree_copies,
3647 	 lest we insert the subtree copies in the middle of the gimplified
3648 	 sequence.  */
3649       if (force_gimple_rhs)
3650 	rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3651 					true, GSI_SAME_STMT);
3652       if (gimple_assign_rhs1 (stmt) != rhs)
3653 	{
3654 	  modify_this_stmt = true;
3655 	  gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3656 	  gcc_assert (stmt == gsi_stmt (orig_gsi));
3657 	}
3658 
3659       return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3660     }
3661   else
3662     {
3663       if (access_has_children_p (lacc)
3664 	  && access_has_children_p (racc)
3665 	  /* When an access represents an unscalarizable region, it usually
3666 	     represents accesses with variable offset and thus must not be used
3667 	     to generate new memory accesses.  */
3668 	  && !lacc->grp_unscalarizable_region
3669 	  && !racc->grp_unscalarizable_region)
3670 	{
3671 	  struct subreplacement_assignment_data sad;
3672 
3673 	  sad.left_offset = lacc->offset;
3674 	  sad.assignment_lhs = lhs;
3675 	  sad.assignment_rhs = rhs;
3676 	  sad.top_racc = racc;
3677 	  sad.old_gsi = *gsi;
3678 	  sad.new_gsi = gsi;
3679 	  sad.loc = gimple_location (stmt);
3680 	  sad.refreshed = SRA_UDH_NONE;
3681 
3682 	  if (lacc->grp_read && !lacc->grp_covered)
3683 	    handle_unscalarized_data_in_subtree (&sad);
3684 
3685 	  load_assign_lhs_subreplacements (lacc, &sad);
3686 	  if (sad.refreshed != SRA_UDH_RIGHT)
3687 	    {
3688 	      gsi_next (gsi);
3689 	      unlink_stmt_vdef (stmt);
3690 	      gsi_remove (&sad.old_gsi, true);
3691 	      release_defs (stmt);
3692 	      sra_stats.deleted++;
3693 	      return SRA_AM_REMOVED;
3694 	    }
3695 	}
3696       else
3697 	{
3698 	  if (access_has_children_p (racc)
3699 	      && !racc->grp_unscalarized_data
3700 	      && TREE_CODE (lhs) != SSA_NAME)
3701 	    {
3702 	      if (dump_file)
3703 		{
3704 		  fprintf (dump_file, "Removing load: ");
3705 		  print_gimple_stmt (dump_file, stmt, 0);
3706 		}
3707 	      generate_subtree_copies (racc->first_child, lhs,
3708 				       racc->offset, 0, 0, gsi,
3709 				       false, false, loc);
3710 	      gcc_assert (stmt == gsi_stmt (*gsi));
3711 	      unlink_stmt_vdef (stmt);
3712 	      gsi_remove (gsi, true);
3713 	      release_defs (stmt);
3714 	      sra_stats.deleted++;
3715 	      return SRA_AM_REMOVED;
3716 	    }
3717 	  /* Restore the aggregate RHS from its components so the
3718 	     prevailing aggregate copy does the right thing.  */
3719 	  if (access_has_children_p (racc))
3720 	    generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3721 				     gsi, false, false, loc);
3722 	  /* Re-load the components of the aggregate copy destination.
3723 	     But use the RHS aggregate to load from to expose more
3724 	     optimization opportunities.  */
3725 	  if (access_has_children_p (lacc))
3726 	    generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3727 				     0, 0, gsi, true, true, loc);
3728 	}
3729 
3730       return SRA_AM_NONE;
3731     }
3732 }
3733 
3734 /* Set any scalar replacements of values in the constant pool to the initial
3735    value of the constant.  (Constant-pool decls like *.LC0 have effectively
3736    been initialized before the program starts, we must do the same for their
3737    replacements.)  Thus, we output statements like 'SR.1 = *.LC0[0];' into
3738    the function's entry block.  */
3739 
3740 static void
3741 initialize_constant_pool_replacements (void)
3742 {
3743   gimple_seq seq = NULL;
3744   gimple_stmt_iterator gsi = gsi_start (seq);
3745   bitmap_iterator bi;
3746   unsigned i;
3747 
3748   EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3749     {
3750       tree var = candidate (i);
3751       if (!constant_decl_p (var))
3752 	continue;
3753       vec<access_p> *access_vec = get_base_access_vector (var);
3754       if (!access_vec)
3755 	continue;
3756       for (unsigned i = 0; i < access_vec->length (); i++)
3757 	{
3758 	  struct access *access = (*access_vec)[i];
3759 	  if (!access->replacement_decl)
3760 	    continue;
3761 	  gassign *stmt
3762 	    = gimple_build_assign (get_access_replacement (access),
3763 				   unshare_expr (access->expr));
3764 	  if (dump_file && (dump_flags & TDF_DETAILS))
3765 	    {
3766 	      fprintf (dump_file, "Generating constant initializer: ");
3767 	      print_gimple_stmt (dump_file, stmt, 0);
3768 	      fprintf (dump_file, "\n");
3769 	    }
3770 	  gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3771 	  update_stmt (stmt);
3772 	}
3773     }
3774 
3775   seq = gsi_seq (gsi);
3776   if (seq)
3777     gsi_insert_seq_on_edge_immediate (
3778       single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3779 }
3780 
3781 /* Traverse the function body and all modifications as decided in
3782    analyze_all_variable_accesses.  Return true iff the CFG has been
3783    changed.  */
3784 
3785 static bool
3786 sra_modify_function_body (void)
3787 {
3788   bool cfg_changed = false;
3789   basic_block bb;
3790 
3791   initialize_constant_pool_replacements ();
3792 
3793   FOR_EACH_BB_FN (bb, cfun)
3794     {
3795       gimple_stmt_iterator gsi = gsi_start_bb (bb);
3796       while (!gsi_end_p (gsi))
3797 	{
3798 	  gimple *stmt = gsi_stmt (gsi);
3799 	  enum assignment_mod_result assign_result;
3800 	  bool modified = false, deleted = false;
3801 	  tree *t;
3802 	  unsigned i;
3803 
3804 	  switch (gimple_code (stmt))
3805 	    {
3806 	    case GIMPLE_RETURN:
3807 	      t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3808 	      if (*t != NULL_TREE)
3809 		modified |= sra_modify_expr (t, &gsi, false);
3810 	      break;
3811 
3812 	    case GIMPLE_ASSIGN:
3813 	      assign_result = sra_modify_assign (stmt, &gsi);
3814 	      modified |= assign_result == SRA_AM_MODIFIED;
3815 	      deleted = assign_result == SRA_AM_REMOVED;
3816 	      break;
3817 
3818 	    case GIMPLE_CALL:
3819 	      /* Operands must be processed before the lhs.  */
3820 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
3821 		{
3822 		  t = gimple_call_arg_ptr (stmt, i);
3823 		  modified |= sra_modify_expr (t, &gsi, false);
3824 		}
3825 
3826 	      if (gimple_call_lhs (stmt))
3827 		{
3828 		  t = gimple_call_lhs_ptr (stmt);
3829 		  modified |= sra_modify_expr (t, &gsi, true);
3830 		}
3831 	      break;
3832 
3833 	    case GIMPLE_ASM:
3834 	      {
3835 		gasm *asm_stmt = as_a <gasm *> (stmt);
3836 		for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3837 		  {
3838 		    t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3839 		    modified |= sra_modify_expr (t, &gsi, false);
3840 		  }
3841 		for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3842 		  {
3843 		    t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3844 		    modified |= sra_modify_expr (t, &gsi, true);
3845 		  }
3846 	      }
3847 	      break;
3848 
3849 	    default:
3850 	      break;
3851 	    }
3852 
3853 	  if (modified)
3854 	    {
3855 	      update_stmt (stmt);
3856 	      if (maybe_clean_eh_stmt (stmt)
3857 		  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3858 		cfg_changed = true;
3859 	    }
3860 	  if (!deleted)
3861 	    gsi_next (&gsi);
3862 	}
3863     }
3864 
3865   gsi_commit_edge_inserts ();
3866   return cfg_changed;
3867 }
3868 
3869 /* Generate statements initializing scalar replacements of parts of function
3870    parameters.  */
3871 
3872 static void
3873 initialize_parameter_reductions (void)
3874 {
3875   gimple_stmt_iterator gsi;
3876   gimple_seq seq = NULL;
3877   tree parm;
3878 
3879   gsi = gsi_start (seq);
3880   for (parm = DECL_ARGUMENTS (current_function_decl);
3881        parm;
3882        parm = DECL_CHAIN (parm))
3883     {
3884       vec<access_p> *access_vec;
3885       struct access *access;
3886 
3887       if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3888 	continue;
3889       access_vec = get_base_access_vector (parm);
3890       if (!access_vec)
3891 	continue;
3892 
3893       for (access = (*access_vec)[0];
3894 	   access;
3895 	   access = access->next_grp)
3896 	generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3897 				 EXPR_LOCATION (parm));
3898     }
3899 
3900   seq = gsi_seq (gsi);
3901   if (seq)
3902     gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3903 }
3904 
3905 /* The "main" function of intraprocedural SRA passes.  Runs the analysis and if
3906    it reveals there are components of some aggregates to be scalarized, it runs
3907    the required transformations.  */
3908 static unsigned int
3909 perform_intra_sra (void)
3910 {
3911   int ret = 0;
3912   sra_initialize ();
3913 
3914   if (!find_var_candidates ())
3915     goto out;
3916 
3917   if (!scan_function ())
3918     goto out;
3919 
3920   if (!analyze_all_variable_accesses ())
3921     goto out;
3922 
3923   if (sra_modify_function_body ())
3924     ret = TODO_update_ssa | TODO_cleanup_cfg;
3925   else
3926     ret = TODO_update_ssa;
3927   initialize_parameter_reductions ();
3928 
3929   statistics_counter_event (cfun, "Scalar replacements created",
3930 			    sra_stats.replacements);
3931   statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3932   statistics_counter_event (cfun, "Subtree copy stmts",
3933 			    sra_stats.subtree_copies);
3934   statistics_counter_event (cfun, "Subreplacement stmts",
3935 			    sra_stats.subreplacements);
3936   statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3937   statistics_counter_event (cfun, "Separate LHS and RHS handling",
3938 			    sra_stats.separate_lhs_rhs_handling);
3939 
3940  out:
3941   sra_deinitialize ();
3942   return ret;
3943 }
3944 
3945 /* Perform early intraprocedural SRA.  */
3946 static unsigned int
3947 early_intra_sra (void)
3948 {
3949   sra_mode = SRA_MODE_EARLY_INTRA;
3950   return perform_intra_sra ();
3951 }
3952 
3953 /* Perform "late" intraprocedural SRA.  */
3954 static unsigned int
3955 late_intra_sra (void)
3956 {
3957   sra_mode = SRA_MODE_INTRA;
3958   return perform_intra_sra ();
3959 }
3960 
3961 
3962 static bool
3963 gate_intra_sra (void)
3964 {
3965   return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3966 }
3967 
3968 
3969 namespace {
3970 
3971 const pass_data pass_data_sra_early =
3972 {
3973   GIMPLE_PASS, /* type */
3974   "esra", /* name */
3975   OPTGROUP_NONE, /* optinfo_flags */
3976   TV_TREE_SRA, /* tv_id */
3977   ( PROP_cfg | PROP_ssa ), /* properties_required */
3978   0, /* properties_provided */
3979   0, /* properties_destroyed */
3980   0, /* todo_flags_start */
3981   TODO_update_ssa, /* todo_flags_finish */
3982 };
3983 
3984 class pass_sra_early : public gimple_opt_pass
3985 {
3986 public:
3987   pass_sra_early (gcc::context *ctxt)
3988     : gimple_opt_pass (pass_data_sra_early, ctxt)
3989   {}
3990 
3991   /* opt_pass methods: */
3992   virtual bool gate (function *) { return gate_intra_sra (); }
3993   virtual unsigned int execute (function *) { return early_intra_sra (); }
3994 
3995 }; // class pass_sra_early
3996 
3997 } // anon namespace
3998 
3999 gimple_opt_pass *
4000 make_pass_sra_early (gcc::context *ctxt)
4001 {
4002   return new pass_sra_early (ctxt);
4003 }
4004 
4005 namespace {
4006 
4007 const pass_data pass_data_sra =
4008 {
4009   GIMPLE_PASS, /* type */
4010   "sra", /* name */
4011   OPTGROUP_NONE, /* optinfo_flags */
4012   TV_TREE_SRA, /* tv_id */
4013   ( PROP_cfg | PROP_ssa ), /* properties_required */
4014   0, /* properties_provided */
4015   0, /* properties_destroyed */
4016   TODO_update_address_taken, /* todo_flags_start */
4017   TODO_update_ssa, /* todo_flags_finish */
4018 };
4019 
4020 class pass_sra : public gimple_opt_pass
4021 {
4022 public:
4023   pass_sra (gcc::context *ctxt)
4024     : gimple_opt_pass (pass_data_sra, ctxt)
4025   {}
4026 
4027   /* opt_pass methods: */
4028   virtual bool gate (function *) { return gate_intra_sra (); }
4029   virtual unsigned int execute (function *) { return late_intra_sra (); }
4030 
4031 }; // class pass_sra
4032 
4033 } // anon namespace
4034 
4035 gimple_opt_pass *
4036 make_pass_sra (gcc::context *ctxt)
4037 {
4038   return new pass_sra (ctxt);
4039 }
4040 
4041 
4042 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
4043    parameter.  */
4044 
4045 static bool
4046 is_unused_scalar_param (tree parm)
4047 {
4048   tree name;
4049   return (is_gimple_reg (parm)
4050 	  && (!(name = ssa_default_def (cfun, parm))
4051 	      || has_zero_uses (name)));
4052 }
4053 
4054 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
4055    examine whether there are any direct or otherwise infeasible ones.  If so,
4056    return true, otherwise return false.  PARM must be a gimple register with a
4057    non-NULL default definition.  */
4058 
4059 static bool
4060 ptr_parm_has_direct_uses (tree parm)
4061 {
4062   imm_use_iterator ui;
4063   gimple *stmt;
4064   tree name = ssa_default_def (cfun, parm);
4065   bool ret = false;
4066 
4067   FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4068     {
4069       int uses_ok = 0;
4070       use_operand_p use_p;
4071 
4072       if (is_gimple_debug (stmt))
4073 	continue;
4074 
4075       /* Valid uses include dereferences on the lhs and the rhs.  */
4076       if (gimple_has_lhs (stmt))
4077 	{
4078 	  tree lhs = gimple_get_lhs (stmt);
4079 	  while (handled_component_p (lhs))
4080 	    lhs = TREE_OPERAND (lhs, 0);
4081 	  if (TREE_CODE (lhs) == MEM_REF
4082 	      && TREE_OPERAND (lhs, 0) == name
4083 	      && integer_zerop (TREE_OPERAND (lhs, 1))
4084 	      && types_compatible_p (TREE_TYPE (lhs),
4085 				     TREE_TYPE (TREE_TYPE (name)))
4086 	      && !TREE_THIS_VOLATILE (lhs))
4087 	    uses_ok++;
4088 	}
4089       if (gimple_assign_single_p (stmt))
4090 	{
4091 	  tree rhs = gimple_assign_rhs1 (stmt);
4092 	  while (handled_component_p (rhs))
4093 	    rhs = TREE_OPERAND (rhs, 0);
4094 	  if (TREE_CODE (rhs) == MEM_REF
4095 	      && TREE_OPERAND (rhs, 0) == name
4096 	      && integer_zerop (TREE_OPERAND (rhs, 1))
4097 	      && types_compatible_p (TREE_TYPE (rhs),
4098 				     TREE_TYPE (TREE_TYPE (name)))
4099 	      && !TREE_THIS_VOLATILE (rhs))
4100 	    uses_ok++;
4101 	}
4102       else if (is_gimple_call (stmt))
4103 	{
4104 	  unsigned i;
4105 	  for (i = 0; i < gimple_call_num_args (stmt); ++i)
4106 	    {
4107 	      tree arg = gimple_call_arg (stmt, i);
4108 	      while (handled_component_p (arg))
4109 		arg = TREE_OPERAND (arg, 0);
4110 	      if (TREE_CODE (arg) == MEM_REF
4111 		  && TREE_OPERAND (arg, 0) == name
4112 		  && integer_zerop (TREE_OPERAND (arg, 1))
4113 		  && types_compatible_p (TREE_TYPE (arg),
4114 					 TREE_TYPE (TREE_TYPE (name)))
4115 		  && !TREE_THIS_VOLATILE (arg))
4116 		uses_ok++;
4117 	    }
4118 	}
4119 
4120       /* If the number of valid uses does not match the number of
4121          uses in this stmt there is an unhandled use.  */
4122       FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4123 	--uses_ok;
4124 
4125       if (uses_ok != 0)
4126 	ret = true;
4127 
4128       if (ret)
4129 	BREAK_FROM_IMM_USE_STMT (ui);
4130     }
4131 
4132   return ret;
4133 }
4134 
4135 /* Identify candidates for reduction for IPA-SRA based on their type and mark
4136    them in candidate_bitmap.  Note that these do not necessarily include
4137    parameter which are unused and thus can be removed.  Return true iff any
4138    such candidate has been found.  */
4139 
4140 static bool
4141 find_param_candidates (void)
4142 {
4143   tree parm;
4144   int count = 0;
4145   bool ret = false;
4146   const char *msg;
4147 
4148   for (parm = DECL_ARGUMENTS (current_function_decl);
4149        parm;
4150        parm = DECL_CHAIN (parm))
4151     {
4152       tree type = TREE_TYPE (parm);
4153       tree_node **slot;
4154 
4155       count++;
4156 
4157       if (TREE_THIS_VOLATILE (parm)
4158 	  || TREE_ADDRESSABLE (parm)
4159 	  || (!is_gimple_reg_type (type) && is_va_list_type (type)))
4160 	continue;
4161 
4162       if (is_unused_scalar_param (parm))
4163 	{
4164 	  ret = true;
4165 	  continue;
4166 	}
4167 
4168       if (POINTER_TYPE_P (type))
4169 	{
4170 	  type = TREE_TYPE (type);
4171 
4172 	  if (TREE_CODE (type) == FUNCTION_TYPE
4173 	      || TYPE_VOLATILE (type)
4174 	      || (TREE_CODE (type) == ARRAY_TYPE
4175 		  && TYPE_NONALIASED_COMPONENT (type))
4176 	      || !is_gimple_reg (parm)
4177 	      || is_va_list_type (type)
4178 	      || ptr_parm_has_direct_uses (parm))
4179 	    continue;
4180 	}
4181       else if (!AGGREGATE_TYPE_P (type))
4182 	continue;
4183 
4184       if (!COMPLETE_TYPE_P (type)
4185 	  || !tree_fits_uhwi_p (TYPE_SIZE (type))
4186           || tree_to_uhwi (TYPE_SIZE (type)) == 0
4187 	  || (AGGREGATE_TYPE_P (type)
4188 	      && type_internals_preclude_sra_p (type, &msg)))
4189 	continue;
4190 
4191       bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
4192       slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
4193       *slot = parm;
4194 
4195       ret = true;
4196       if (dump_file && (dump_flags & TDF_DETAILS))
4197 	{
4198 	  fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
4199 	  print_generic_expr (dump_file, parm);
4200 	  fprintf (dump_file, "\n");
4201 	}
4202     }
4203 
4204   func_param_count = count;
4205   return ret;
4206 }
4207 
4208 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
4209    maybe_modified. */
4210 
4211 static bool
4212 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
4213 		     void *data)
4214 {
4215   struct access *repr = (struct access *) data;
4216 
4217   repr->grp_maybe_modified = 1;
4218   return true;
4219 }
4220 
4221 /* Analyze what representatives (in linked lists accessible from
4222    REPRESENTATIVES) can be modified by side effects of statements in the
4223    current function.  */
4224 
4225 static void
4226 analyze_modified_params (vec<access_p> representatives)
4227 {
4228   int i;
4229 
4230   for (i = 0; i < func_param_count; i++)
4231     {
4232       struct access *repr;
4233 
4234       for (repr = representatives[i];
4235 	   repr;
4236 	   repr = repr->next_grp)
4237 	{
4238 	  struct access *access;
4239 	  bitmap visited;
4240 	  ao_ref ar;
4241 
4242 	  if (no_accesses_p (repr))
4243 	    continue;
4244 	  if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
4245 	      || repr->grp_maybe_modified)
4246 	    continue;
4247 
4248 	  ao_ref_init (&ar, repr->expr);
4249 	  visited = BITMAP_ALLOC (NULL);
4250 	  for (access = repr; access; access = access->next_sibling)
4251 	    {
4252 	      /* All accesses are read ones, otherwise grp_maybe_modified would
4253 		 be trivially set.  */
4254 	      walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
4255 				  mark_maybe_modified, repr, &visited);
4256 	      if (repr->grp_maybe_modified)
4257 		break;
4258 	    }
4259 	  BITMAP_FREE (visited);
4260 	}
4261     }
4262 }
4263 
4264 /* Propagate distances in bb_dereferences in the opposite direction than the
4265    control flow edges, in each step storing the maximum of the current value
4266    and the minimum of all successors.  These steps are repeated until the table
4267    stabilizes.  Note that BBs which might terminate the functions (according to
4268    final_bbs bitmap) never updated in this way.  */
4269 
4270 static void
4271 propagate_dereference_distances (void)
4272 {
4273   basic_block bb;
4274 
4275   auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
4276   queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4277   FOR_EACH_BB_FN (bb, cfun)
4278     {
4279       queue.quick_push (bb);
4280       bb->aux = bb;
4281     }
4282 
4283   while (!queue.is_empty ())
4284     {
4285       edge_iterator ei;
4286       edge e;
4287       bool change = false;
4288       int i;
4289 
4290       bb = queue.pop ();
4291       bb->aux = NULL;
4292 
4293       if (bitmap_bit_p (final_bbs, bb->index))
4294 	continue;
4295 
4296       for (i = 0; i < func_param_count; i++)
4297 	{
4298 	  int idx = bb->index * func_param_count + i;
4299 	  bool first = true;
4300 	  HOST_WIDE_INT inh = 0;
4301 
4302 	  FOR_EACH_EDGE (e, ei, bb->succs)
4303 	  {
4304 	    int succ_idx = e->dest->index * func_param_count + i;
4305 
4306 	    if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
4307 	      continue;
4308 
4309 	    if (first)
4310 	      {
4311 		first = false;
4312 		inh = bb_dereferences [succ_idx];
4313 	      }
4314 	    else if (bb_dereferences [succ_idx] < inh)
4315 	      inh = bb_dereferences [succ_idx];
4316 	  }
4317 
4318 	  if (!first && bb_dereferences[idx] < inh)
4319 	    {
4320 	      bb_dereferences[idx] = inh;
4321 	      change = true;
4322 	    }
4323 	}
4324 
4325       if (change && !bitmap_bit_p (final_bbs, bb->index))
4326 	FOR_EACH_EDGE (e, ei, bb->preds)
4327 	  {
4328 	    if (e->src->aux)
4329 	      continue;
4330 
4331 	    e->src->aux = e->src;
4332 	    queue.quick_push (e->src);
4333 	  }
4334     }
4335 }
4336 
4337 /* Dump a dereferences TABLE with heading STR to file F.  */
4338 
4339 static void
4340 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
4341 {
4342   basic_block bb;
4343 
4344   fprintf (dump_file, "%s", str);
4345   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
4346 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
4347     {
4348       fprintf (f, "%4i  %i   ", bb->index, bitmap_bit_p (final_bbs, bb->index));
4349       if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4350 	{
4351 	  int i;
4352 	  for (i = 0; i < func_param_count; i++)
4353 	    {
4354 	      int idx = bb->index * func_param_count + i;
4355 	      fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
4356 	    }
4357 	}
4358       fprintf (f, "\n");
4359     }
4360   fprintf (dump_file, "\n");
4361 }
4362 
4363 /* Determine what (parts of) parameters passed by reference that are not
4364    assigned to are not certainly dereferenced in this function and thus the
4365    dereferencing cannot be safely moved to the caller without potentially
4366    introducing a segfault.  Mark such REPRESENTATIVES as
4367    grp_not_necessarilly_dereferenced.
4368 
4369    The dereferenced maximum "distance," i.e. the offset + size of the accessed
4370    part is calculated rather than simple booleans are calculated for each
4371    pointer parameter to handle cases when only a fraction of the whole
4372    aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4373    an example).
4374 
4375    The maximum dereference distances for each pointer parameter and BB are
4376    already stored in bb_dereference.  This routine simply propagates these
4377    values upwards by propagate_dereference_distances and then compares the
4378    distances of individual parameters in the ENTRY BB to the equivalent
4379    distances of each representative of a (fraction of a) parameter.  */
4380 
4381 static void
4382 analyze_caller_dereference_legality (vec<access_p> representatives)
4383 {
4384   int i;
4385 
4386   if (dump_file && (dump_flags & TDF_DETAILS))
4387     dump_dereferences_table (dump_file,
4388 			     "Dereference table before propagation:\n",
4389 			     bb_dereferences);
4390 
4391   propagate_dereference_distances ();
4392 
4393   if (dump_file && (dump_flags & TDF_DETAILS))
4394     dump_dereferences_table (dump_file,
4395 			     "Dereference table after propagation:\n",
4396 			     bb_dereferences);
4397 
4398   for (i = 0; i < func_param_count; i++)
4399     {
4400       struct access *repr = representatives[i];
4401       int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4402 
4403       if (!repr || no_accesses_p (repr))
4404 	continue;
4405 
4406       do
4407 	{
4408 	  if ((repr->offset + repr->size) > bb_dereferences[idx])
4409 	    repr->grp_not_necessarilly_dereferenced = 1;
4410 	  repr = repr->next_grp;
4411 	}
4412       while (repr);
4413     }
4414 }
4415 
4416 /* Return the representative access for the parameter declaration PARM if it is
4417    a scalar passed by reference which is not written to and the pointer value
4418    is not used directly.  Thus, if it is legal to dereference it in the caller
4419    and we can rule out modifications through aliases, such parameter should be
4420    turned into one passed by value.  Return NULL otherwise.  */
4421 
4422 static struct access *
4423 unmodified_by_ref_scalar_representative (tree parm)
4424 {
4425   int i, access_count;
4426   struct access *repr;
4427   vec<access_p> *access_vec;
4428 
4429   access_vec = get_base_access_vector (parm);
4430   gcc_assert (access_vec);
4431   repr = (*access_vec)[0];
4432   if (repr->write)
4433     return NULL;
4434   repr->group_representative = repr;
4435 
4436   access_count = access_vec->length ();
4437   for (i = 1; i < access_count; i++)
4438     {
4439       struct access *access = (*access_vec)[i];
4440       if (access->write)
4441 	return NULL;
4442       access->group_representative = repr;
4443       access->next_sibling = repr->next_sibling;
4444       repr->next_sibling = access;
4445     }
4446 
4447   repr->grp_read = 1;
4448   repr->grp_scalar_ptr = 1;
4449   return repr;
4450 }
4451 
4452 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4453    associated with.  REQ_ALIGN is the minimum required alignment.  */
4454 
4455 static bool
4456 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4457 {
4458   unsigned int exp_align;
4459   /* Avoid issues such as the second simple testcase in PR 42025.  The problem
4460      is incompatible assign in a call statement (and possibly even in asm
4461      statements).  This can be relaxed by using a new temporary but only for
4462      non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4463      intraprocedural SRA we deal with this by keeping the old aggregate around,
4464      something we cannot do in IPA-SRA.)  */
4465   if (access->write
4466       && (is_gimple_call (access->stmt)
4467 	  || gimple_code (access->stmt) == GIMPLE_ASM))
4468     return true;
4469 
4470   exp_align = get_object_alignment (access->expr);
4471   if (exp_align < req_align)
4472     return true;
4473 
4474   return false;
4475 }
4476 
4477 
4478 /* Sort collected accesses for parameter PARM, identify representatives for
4479    each accessed region and link them together.  Return NULL if there are
4480    different but overlapping accesses, return the special ptr value meaning
4481    there are no accesses for this parameter if that is the case and return the
4482    first representative otherwise.  Set *RO_GRP if there is a group of accesses
4483    with only read (i.e. no write) accesses.  */
4484 
4485 static struct access *
4486 splice_param_accesses (tree parm, bool *ro_grp)
4487 {
4488   int i, j, access_count, group_count;
4489   int total_size = 0;
4490   struct access *access, *res, **prev_acc_ptr = &res;
4491   vec<access_p> *access_vec;
4492 
4493   access_vec = get_base_access_vector (parm);
4494   if (!access_vec)
4495     return &no_accesses_representant;
4496   access_count = access_vec->length ();
4497 
4498   access_vec->qsort (compare_access_positions);
4499 
4500   i = 0;
4501   total_size = 0;
4502   group_count = 0;
4503   while (i < access_count)
4504     {
4505       bool modification;
4506       tree a1_alias_type;
4507       access = (*access_vec)[i];
4508       modification = access->write;
4509       if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4510 	return NULL;
4511       a1_alias_type = reference_alias_ptr_type (access->expr);
4512 
4513       /* Access is about to become group representative unless we find some
4514 	 nasty overlap which would preclude us from breaking this parameter
4515 	 apart. */
4516 
4517       j = i + 1;
4518       while (j < access_count)
4519 	{
4520 	  struct access *ac2 = (*access_vec)[j];
4521 	  if (ac2->offset != access->offset)
4522 	    {
4523 	      /* All or nothing law for parameters. */
4524 	      if (access->offset + access->size > ac2->offset)
4525 		return NULL;
4526 	      else
4527 		break;
4528 	    }
4529 	  else if (ac2->size != access->size)
4530 	    return NULL;
4531 
4532 	  if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4533 	      || (ac2->type != access->type
4534 		  && (TREE_ADDRESSABLE (ac2->type)
4535 		      || TREE_ADDRESSABLE (access->type)))
4536 	      || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4537 	    return NULL;
4538 
4539 	  modification |= ac2->write;
4540 	  ac2->group_representative = access;
4541 	  ac2->next_sibling = access->next_sibling;
4542 	  access->next_sibling = ac2;
4543 	  j++;
4544 	}
4545 
4546       group_count++;
4547       access->grp_maybe_modified = modification;
4548       if (!modification)
4549 	*ro_grp = true;
4550       *prev_acc_ptr = access;
4551       prev_acc_ptr = &access->next_grp;
4552       total_size += access->size;
4553       i = j;
4554     }
4555 
4556   gcc_assert (group_count > 0);
4557   return res;
4558 }
4559 
4560 /* Decide whether parameters with representative accesses given by REPR should
4561    be reduced into components.  */
4562 
4563 static int
4564 decide_one_param_reduction (struct access *repr)
4565 {
4566   HOST_WIDE_INT total_size, cur_parm_size;
4567   bool by_ref;
4568   tree parm;
4569 
4570   parm = repr->base;
4571   cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4572   gcc_assert (cur_parm_size > 0);
4573 
4574   if (POINTER_TYPE_P (TREE_TYPE (parm)))
4575     by_ref = true;
4576   else
4577     by_ref = false;
4578 
4579   if (dump_file)
4580     {
4581       struct access *acc;
4582       fprintf (dump_file, "Evaluating PARAM group sizes for ");
4583       print_generic_expr (dump_file, parm);
4584       fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4585       for (acc = repr; acc; acc = acc->next_grp)
4586 	dump_access (dump_file, acc, true);
4587     }
4588 
4589   total_size = 0;
4590   int new_param_count = 0;
4591 
4592   for (; repr; repr = repr->next_grp)
4593     {
4594       gcc_assert (parm == repr->base);
4595 
4596       /* Taking the address of a non-addressable field is verboten.  */
4597       if (by_ref && repr->non_addressable)
4598 	return 0;
4599 
4600       /* Do not decompose a non-BLKmode param in a way that would
4601          create BLKmode params.  Especially for by-reference passing
4602 	 (thus, pointer-type param) this is hardly worthwhile.  */
4603       if (DECL_MODE (parm) != BLKmode
4604 	  && TYPE_MODE (repr->type) == BLKmode)
4605 	return 0;
4606 
4607       if (!by_ref || (!repr->grp_maybe_modified
4608 		      && !repr->grp_not_necessarilly_dereferenced))
4609 	total_size += repr->size;
4610       else
4611 	total_size += cur_parm_size;
4612 
4613       new_param_count++;
4614     }
4615 
4616   gcc_assert (new_param_count > 0);
4617 
4618   if (!by_ref)
4619     {
4620       if (total_size >= cur_parm_size)
4621 	return 0;
4622     }
4623   else
4624     {
4625       int parm_num_limit;
4626       if (optimize_function_for_size_p (cfun))
4627 	parm_num_limit = 1;
4628       else
4629 	parm_num_limit = PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR);
4630 
4631       if (new_param_count > parm_num_limit
4632 	  || total_size > (parm_num_limit * cur_parm_size))
4633 	return 0;
4634     }
4635 
4636   if (dump_file)
4637     fprintf (dump_file, "    ....will be split into %i components\n",
4638 	     new_param_count);
4639   return new_param_count;
4640 }
4641 
4642 /* The order of the following enums is important, we need to do extra work for
4643    UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES.  */
4644 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4645 			  MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4646 
4647 /* Identify representatives of all accesses to all candidate parameters for
4648    IPA-SRA.  Return result based on what representatives have been found. */
4649 
4650 static enum ipa_splicing_result
4651 splice_all_param_accesses (vec<access_p> &representatives)
4652 {
4653   enum ipa_splicing_result result = NO_GOOD_ACCESS;
4654   tree parm;
4655   struct access *repr;
4656 
4657   representatives.create (func_param_count);
4658 
4659   for (parm = DECL_ARGUMENTS (current_function_decl);
4660        parm;
4661        parm = DECL_CHAIN (parm))
4662     {
4663       if (is_unused_scalar_param (parm))
4664 	{
4665 	  representatives.quick_push (&no_accesses_representant);
4666 	  if (result == NO_GOOD_ACCESS)
4667 	    result = UNUSED_PARAMS;
4668 	}
4669       else if (POINTER_TYPE_P (TREE_TYPE (parm))
4670 	       && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4671 	       && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4672 	{
4673 	  repr = unmodified_by_ref_scalar_representative (parm);
4674 	  representatives.quick_push (repr);
4675 	  if (repr)
4676 	    result = UNMODIF_BY_REF_ACCESSES;
4677 	}
4678       else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4679 	{
4680 	  bool ro_grp = false;
4681 	  repr = splice_param_accesses (parm, &ro_grp);
4682 	  representatives.quick_push (repr);
4683 
4684 	  if (repr && !no_accesses_p (repr))
4685 	    {
4686 	      if (POINTER_TYPE_P (TREE_TYPE (parm)))
4687 		{
4688 		  if (ro_grp)
4689 		    result = UNMODIF_BY_REF_ACCESSES;
4690 		  else if (result < MODIF_BY_REF_ACCESSES)
4691 		    result = MODIF_BY_REF_ACCESSES;
4692 		}
4693 	      else if (result < BY_VAL_ACCESSES)
4694 		result = BY_VAL_ACCESSES;
4695 	    }
4696 	  else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4697 	    result = UNUSED_PARAMS;
4698 	}
4699       else
4700 	representatives.quick_push (NULL);
4701     }
4702 
4703   if (result == NO_GOOD_ACCESS)
4704     {
4705       representatives.release ();
4706       return NO_GOOD_ACCESS;
4707     }
4708 
4709   return result;
4710 }
4711 
4712 /* Return the index of BASE in PARMS.  Abort if it is not found.  */
4713 
4714 static inline int
4715 get_param_index (tree base, vec<tree> parms)
4716 {
4717   int i, len;
4718 
4719   len = parms.length ();
4720   for (i = 0; i < len; i++)
4721     if (parms[i] == base)
4722       return i;
4723   gcc_unreachable ();
4724 }
4725 
4726 /* Convert the decisions made at the representative level into compact
4727    parameter adjustments.  REPRESENTATIVES are pointers to first
4728    representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4729    final number of adjustments.  */
4730 
4731 static ipa_parm_adjustment_vec
4732 turn_representatives_into_adjustments (vec<access_p> representatives,
4733 				       int adjustments_count)
4734 {
4735   vec<tree> parms;
4736   ipa_parm_adjustment_vec adjustments;
4737   tree parm;
4738   int i;
4739 
4740   gcc_assert (adjustments_count > 0);
4741   parms = ipa_get_vector_of_formal_parms (current_function_decl);
4742   adjustments.create (adjustments_count);
4743   parm = DECL_ARGUMENTS (current_function_decl);
4744   for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4745     {
4746       struct access *repr = representatives[i];
4747 
4748       if (!repr || no_accesses_p (repr))
4749 	{
4750 	  struct ipa_parm_adjustment adj;
4751 
4752 	  memset (&adj, 0, sizeof (adj));
4753 	  adj.base_index = get_param_index (parm, parms);
4754 	  adj.base = parm;
4755 	  if (!repr)
4756 	    adj.op = IPA_PARM_OP_COPY;
4757 	  else
4758 	    adj.op = IPA_PARM_OP_REMOVE;
4759 	  adj.arg_prefix = "ISRA";
4760 	  adjustments.quick_push (adj);
4761 	}
4762       else
4763 	{
4764 	  struct ipa_parm_adjustment adj;
4765 	  int index = get_param_index (parm, parms);
4766 
4767 	  for (; repr; repr = repr->next_grp)
4768 	    {
4769 	      memset (&adj, 0, sizeof (adj));
4770 	      gcc_assert (repr->base == parm);
4771 	      adj.base_index = index;
4772 	      adj.base = repr->base;
4773 	      adj.type = repr->type;
4774 	      adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4775 	      adj.offset = repr->offset;
4776 	      adj.reverse = repr->reverse;
4777 	      adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4778 			    && (repr->grp_maybe_modified
4779 				|| repr->grp_not_necessarilly_dereferenced));
4780 	      adj.arg_prefix = "ISRA";
4781 	      adjustments.quick_push (adj);
4782 	    }
4783 	}
4784     }
4785   parms.release ();
4786   return adjustments;
4787 }
4788 
4789 /* Analyze the collected accesses and produce a plan what to do with the
4790    parameters in the form of adjustments, NULL meaning nothing.  */
4791 
4792 static ipa_parm_adjustment_vec
4793 analyze_all_param_acesses (void)
4794 {
4795   enum ipa_splicing_result repr_state;
4796   bool proceed = false;
4797   int i, adjustments_count = 0;
4798   vec<access_p> representatives;
4799   ipa_parm_adjustment_vec adjustments;
4800 
4801   repr_state = splice_all_param_accesses (representatives);
4802   if (repr_state == NO_GOOD_ACCESS)
4803     return ipa_parm_adjustment_vec ();
4804 
4805   /* If there are any parameters passed by reference which are not modified
4806      directly, we need to check whether they can be modified indirectly.  */
4807   if (repr_state == UNMODIF_BY_REF_ACCESSES)
4808     {
4809       analyze_caller_dereference_legality (representatives);
4810       analyze_modified_params (representatives);
4811     }
4812 
4813   for (i = 0; i < func_param_count; i++)
4814     {
4815       struct access *repr = representatives[i];
4816 
4817       if (repr && !no_accesses_p (repr))
4818 	{
4819 	  if (repr->grp_scalar_ptr)
4820 	    {
4821 	      adjustments_count++;
4822 	      if (repr->grp_not_necessarilly_dereferenced
4823 		  || repr->grp_maybe_modified)
4824 		representatives[i] = NULL;
4825 	      else
4826 		{
4827 		  proceed = true;
4828 		  sra_stats.scalar_by_ref_to_by_val++;
4829 		}
4830 	    }
4831 	  else
4832 	    {
4833 	      int new_components = decide_one_param_reduction (repr);
4834 
4835 	      if (new_components == 0)
4836 		{
4837 		  representatives[i] = NULL;
4838 		  adjustments_count++;
4839 		}
4840 	      else
4841 		{
4842 		  adjustments_count += new_components;
4843 		  sra_stats.aggregate_params_reduced++;
4844 		  sra_stats.param_reductions_created += new_components;
4845 		  proceed = true;
4846 		}
4847 	    }
4848 	}
4849       else
4850 	{
4851 	  if (no_accesses_p (repr))
4852 	    {
4853 	      proceed = true;
4854 	      sra_stats.deleted_unused_parameters++;
4855 	    }
4856 	  adjustments_count++;
4857 	}
4858     }
4859 
4860   if (!proceed && dump_file)
4861     fprintf (dump_file, "NOT proceeding to change params.\n");
4862 
4863   if (proceed)
4864     adjustments = turn_representatives_into_adjustments (representatives,
4865 							 adjustments_count);
4866   else
4867     adjustments = ipa_parm_adjustment_vec ();
4868 
4869   representatives.release ();
4870   return adjustments;
4871 }
4872 
4873 /* If a parameter replacement identified by ADJ does not yet exist in the form
4874    of declaration, create it and record it, otherwise return the previously
4875    created one.  */
4876 
4877 static tree
4878 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4879 {
4880   tree repl;
4881   if (!adj->new_ssa_base)
4882     {
4883       char *pretty_name = make_fancy_name (adj->base);
4884 
4885       repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4886       DECL_NAME (repl) = get_identifier (pretty_name);
4887       DECL_NAMELESS (repl) = 1;
4888       obstack_free (&name_obstack, pretty_name);
4889 
4890       adj->new_ssa_base = repl;
4891     }
4892   else
4893     repl = adj->new_ssa_base;
4894   return repl;
4895 }
4896 
4897 /* Find the first adjustment for a particular parameter BASE in a vector of
4898    ADJUSTMENTS which is not a copy_param.  Return NULL if there is no such
4899    adjustment. */
4900 
4901 static struct ipa_parm_adjustment *
4902 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4903 {
4904   int i, len;
4905 
4906   len = adjustments.length ();
4907   for (i = 0; i < len; i++)
4908     {
4909       struct ipa_parm_adjustment *adj;
4910 
4911       adj = &adjustments[i];
4912       if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4913 	return adj;
4914     }
4915 
4916   return NULL;
4917 }
4918 
4919 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
4920    parameter which is to be removed because its value is not used, create a new
4921    SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
4922    original with it and return it.  If there is no need to re-map, return NULL.
4923    ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments.  */
4924 
4925 static tree
4926 replace_removed_params_ssa_names (tree old_name, gimple *stmt,
4927 				  ipa_parm_adjustment_vec adjustments)
4928 {
4929   struct ipa_parm_adjustment *adj;
4930   tree decl, repl, new_name;
4931 
4932   if (TREE_CODE (old_name) != SSA_NAME)
4933     return NULL;
4934 
4935   decl = SSA_NAME_VAR (old_name);
4936   if (decl == NULL_TREE
4937       || TREE_CODE (decl) != PARM_DECL)
4938     return NULL;
4939 
4940   adj = get_adjustment_for_base (adjustments, decl);
4941   if (!adj)
4942     return NULL;
4943 
4944   repl = get_replaced_param_substitute (adj);
4945   new_name = make_ssa_name (repl, stmt);
4946   SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
4947     = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
4948 
4949   if (dump_file)
4950     {
4951       fprintf (dump_file, "replacing an SSA name of a removed param ");
4952       print_generic_expr (dump_file, old_name);
4953       fprintf (dump_file, " with ");
4954       print_generic_expr (dump_file, new_name);
4955       fprintf (dump_file, "\n");
4956     }
4957 
4958   replace_uses_by (old_name, new_name);
4959   return new_name;
4960 }
4961 
4962 /* If the statement STMT contains any expressions that need to replaced with a
4963    different one as noted by ADJUSTMENTS, do so.  Handle any potential type
4964    incompatibilities (GSI is used to accommodate conversion statements and must
4965    point to the statement).  Return true iff the statement was modified.  */
4966 
4967 static bool
4968 sra_ipa_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
4969 		       ipa_parm_adjustment_vec adjustments)
4970 {
4971   tree *lhs_p, *rhs_p;
4972   bool any;
4973 
4974   if (!gimple_assign_single_p (stmt))
4975     return false;
4976 
4977   rhs_p = gimple_assign_rhs1_ptr (stmt);
4978   lhs_p = gimple_assign_lhs_ptr (stmt);
4979 
4980   any = ipa_modify_expr (rhs_p, false, adjustments);
4981   any |= ipa_modify_expr (lhs_p, false, adjustments);
4982   if (any)
4983     {
4984       tree new_rhs = NULL_TREE;
4985 
4986       if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4987 	{
4988 	  if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4989 	    {
4990 	      /* V_C_Es of constructors can cause trouble (PR 42714).  */
4991 	      if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4992 		*rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4993 	      else
4994 		*rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4995 					    NULL);
4996 	    }
4997 	  else
4998 	    new_rhs = fold_build1_loc (gimple_location (stmt),
4999 				       VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
5000 				       *rhs_p);
5001 	}
5002       else if (REFERENCE_CLASS_P (*rhs_p)
5003 	       && is_gimple_reg_type (TREE_TYPE (*lhs_p))
5004 	       && !is_gimple_reg (*lhs_p))
5005 	/* This can happen when an assignment in between two single field
5006 	   structures is turned into an assignment in between two pointers to
5007 	   scalars (PR 42237).  */
5008 	new_rhs = *rhs_p;
5009 
5010       if (new_rhs)
5011 	{
5012 	  tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
5013 					       true, GSI_SAME_STMT);
5014 
5015 	  gimple_assign_set_rhs_from_tree (gsi, tmp);
5016 	}
5017 
5018       return true;
5019     }
5020 
5021   return false;
5022 }
5023 
5024 /* Traverse the function body and all modifications as described in
5025    ADJUSTMENTS.  Return true iff the CFG has been changed.  */
5026 
5027 bool
5028 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
5029 {
5030   bool cfg_changed = false;
5031   basic_block bb;
5032 
5033   FOR_EACH_BB_FN (bb, cfun)
5034     {
5035       gimple_stmt_iterator gsi;
5036 
5037       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5038 	{
5039 	  gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
5040 	  tree new_lhs, old_lhs = gimple_phi_result (phi);
5041 	  new_lhs = replace_removed_params_ssa_names (old_lhs, phi, adjustments);
5042 	  if (new_lhs)
5043 	    {
5044 	      gimple_phi_set_result (phi, new_lhs);
5045 	      release_ssa_name (old_lhs);
5046 	    }
5047 	}
5048 
5049       gsi = gsi_start_bb (bb);
5050       while (!gsi_end_p (gsi))
5051 	{
5052 	  gimple *stmt = gsi_stmt (gsi);
5053 	  bool modified = false;
5054 	  tree *t;
5055 	  unsigned i;
5056 
5057 	  switch (gimple_code (stmt))
5058 	    {
5059 	    case GIMPLE_RETURN:
5060 	      t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
5061 	      if (*t != NULL_TREE)
5062 		modified |= ipa_modify_expr (t, true, adjustments);
5063 	      break;
5064 
5065 	    case GIMPLE_ASSIGN:
5066 	      modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
5067 	      break;
5068 
5069 	    case GIMPLE_CALL:
5070 	      /* Operands must be processed before the lhs.  */
5071 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
5072 		{
5073 		  t = gimple_call_arg_ptr (stmt, i);
5074 		  modified |= ipa_modify_expr (t, true, adjustments);
5075 		}
5076 
5077 	      if (gimple_call_lhs (stmt))
5078 		{
5079 		  t = gimple_call_lhs_ptr (stmt);
5080 		  modified |= ipa_modify_expr (t, false, adjustments);
5081 		}
5082 	      break;
5083 
5084 	    case GIMPLE_ASM:
5085 	      {
5086 		gasm *asm_stmt = as_a <gasm *> (stmt);
5087 		for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
5088 		  {
5089 		    t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
5090 		    modified |= ipa_modify_expr (t, true, adjustments);
5091 		  }
5092 		for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
5093 		  {
5094 		    t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
5095 		    modified |= ipa_modify_expr (t, false, adjustments);
5096 		  }
5097 	      }
5098 	      break;
5099 
5100 	    default:
5101 	      break;
5102 	    }
5103 
5104 	  def_operand_p defp;
5105 	  ssa_op_iter iter;
5106 	  FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5107 	    {
5108 	      tree old_def = DEF_FROM_PTR (defp);
5109 	      if (tree new_def = replace_removed_params_ssa_names (old_def, stmt,
5110 								   adjustments))
5111 		{
5112 		  SET_DEF (defp, new_def);
5113 		  release_ssa_name (old_def);
5114 		  modified = true;
5115 		}
5116 	    }
5117 
5118 	  if (modified)
5119 	    {
5120 	      update_stmt (stmt);
5121 	      if (maybe_clean_eh_stmt (stmt)
5122 		  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5123 		cfg_changed = true;
5124 	    }
5125 	  gsi_next (&gsi);
5126 	}
5127     }
5128 
5129   return cfg_changed;
5130 }
5131 
5132 /* Call gimple_debug_bind_reset_value on all debug statements describing
5133    gimple register parameters that are being removed or replaced.  */
5134 
5135 static void
5136 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
5137 {
5138   int i, len;
5139   gimple_stmt_iterator *gsip = NULL, gsi;
5140 
5141   if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
5142     {
5143       gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5144       gsip = &gsi;
5145     }
5146   len = adjustments.length ();
5147   for (i = 0; i < len; i++)
5148     {
5149       struct ipa_parm_adjustment *adj;
5150       imm_use_iterator ui;
5151       gimple *stmt;
5152       gdebug *def_temp;
5153       tree name, vexpr, copy = NULL_TREE;
5154       use_operand_p use_p;
5155 
5156       adj = &adjustments[i];
5157       if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
5158 	continue;
5159       name = ssa_default_def (cfun, adj->base);
5160       vexpr = NULL;
5161       if (name)
5162 	FOR_EACH_IMM_USE_STMT (stmt, ui, name)
5163 	  {
5164 	    if (gimple_clobber_p (stmt))
5165 	      {
5166 		gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
5167 		unlink_stmt_vdef (stmt);
5168 		gsi_remove (&cgsi, true);
5169 		release_defs (stmt);
5170 		continue;
5171 	      }
5172 	    /* All other users must have been removed by
5173 	       ipa_sra_modify_function_body.  */
5174 	    gcc_assert (is_gimple_debug (stmt));
5175 	    if (vexpr == NULL && gsip != NULL)
5176 	      {
5177 		gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5178 		vexpr = make_node (DEBUG_EXPR_DECL);
5179 		def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
5180 							   NULL);
5181 		DECL_ARTIFICIAL (vexpr) = 1;
5182 		TREE_TYPE (vexpr) = TREE_TYPE (name);
5183 		SET_DECL_MODE (vexpr, DECL_MODE (adj->base));
5184 		gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5185 	      }
5186 	    if (vexpr)
5187 	      {
5188 		FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
5189 		  SET_USE (use_p, vexpr);
5190 	      }
5191 	    else
5192 	      gimple_debug_bind_reset_value (stmt);
5193 	    update_stmt (stmt);
5194 	  }
5195       /* Create a VAR_DECL for debug info purposes.  */
5196       if (!DECL_IGNORED_P (adj->base))
5197 	{
5198 	  copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
5199 			     VAR_DECL, DECL_NAME (adj->base),
5200 			     TREE_TYPE (adj->base));
5201 	  if (DECL_PT_UID_SET_P (adj->base))
5202 	    SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
5203 	  TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
5204 	  TREE_READONLY (copy) = TREE_READONLY (adj->base);
5205 	  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
5206 	  DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
5207 	  DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
5208 	  DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
5209 	  DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
5210 	  DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
5211 	  SET_DECL_RTL (copy, 0);
5212 	  TREE_USED (copy) = 1;
5213 	  DECL_CONTEXT (copy) = current_function_decl;
5214 	  add_local_decl (cfun, copy);
5215 	  DECL_CHAIN (copy) =
5216 	    BLOCK_VARS (DECL_INITIAL (current_function_decl));
5217 	  BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
5218 	}
5219       if (gsip != NULL && copy && target_for_debug_bind (adj->base))
5220 	{
5221 	  gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5222 	  if (vexpr)
5223 	    def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
5224 	  else
5225 	    def_temp = gimple_build_debug_source_bind (copy, adj->base,
5226 						       NULL);
5227 	  gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5228 	}
5229     }
5230 }
5231 
5232 /* Return false if all callers have at least as many actual arguments as there
5233    are formal parameters in the current function and that their types
5234    match.  */
5235 
5236 static bool
5237 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
5238 					  void *data ATTRIBUTE_UNUSED)
5239 {
5240   struct cgraph_edge *cs;
5241   for (cs = node->callers; cs; cs = cs->next_caller)
5242     if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
5243       return true;
5244 
5245   return false;
5246 }
5247 
5248 /* Return false if all callers have vuse attached to a call statement.  */
5249 
5250 static bool
5251 some_callers_have_no_vuse_p (struct cgraph_node *node,
5252 			     void *data ATTRIBUTE_UNUSED)
5253 {
5254   struct cgraph_edge *cs;
5255   for (cs = node->callers; cs; cs = cs->next_caller)
5256     if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
5257       return true;
5258 
5259   return false;
5260 }
5261 
5262 /* Convert all callers of NODE.  */
5263 
5264 static bool
5265 convert_callers_for_node (struct cgraph_node *node,
5266 		          void *data)
5267 {
5268   ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
5269   bitmap recomputed_callers = BITMAP_ALLOC (NULL);
5270   struct cgraph_edge *cs;
5271 
5272   for (cs = node->callers; cs; cs = cs->next_caller)
5273     {
5274       push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
5275 
5276       if (dump_file)
5277 	fprintf (dump_file, "Adjusting call %s -> %s\n",
5278 		 cs->caller->dump_name (), cs->callee->dump_name ());
5279 
5280       ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
5281 
5282       pop_cfun ();
5283     }
5284 
5285   for (cs = node->callers; cs; cs = cs->next_caller)
5286     if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
5287 	&& gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
5288       compute_fn_summary (cs->caller, true);
5289   BITMAP_FREE (recomputed_callers);
5290 
5291   return true;
5292 }
5293 
5294 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS.  */
5295 
5296 static void
5297 convert_callers (struct cgraph_node *node, tree old_decl,
5298 		 ipa_parm_adjustment_vec adjustments)
5299 {
5300   basic_block this_block;
5301 
5302   node->call_for_symbol_and_aliases (convert_callers_for_node,
5303 				     &adjustments, false);
5304 
5305   if (!encountered_recursive_call)
5306     return;
5307 
5308   FOR_EACH_BB_FN (this_block, cfun)
5309     {
5310       gimple_stmt_iterator gsi;
5311 
5312       for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
5313         {
5314 	  gcall *stmt;
5315 	  tree call_fndecl;
5316 	  stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
5317 	  if (!stmt)
5318 	    continue;
5319 	  call_fndecl = gimple_call_fndecl (stmt);
5320 	  if (call_fndecl == old_decl)
5321 	    {
5322 	      if (dump_file)
5323 		fprintf (dump_file, "Adjusting recursive call");
5324 	      gimple_call_set_fndecl (stmt, node->decl);
5325 	      ipa_modify_call_arguments (NULL, stmt, adjustments);
5326 	    }
5327 	}
5328     }
5329 
5330   return;
5331 }
5332 
5333 /* Perform all the modification required in IPA-SRA for NODE to have parameters
5334    as given in ADJUSTMENTS.  Return true iff the CFG has been changed.  */
5335 
5336 static bool
5337 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
5338 {
5339   struct cgraph_node *new_node;
5340   bool cfg_changed;
5341 
5342   cgraph_edge::rebuild_edges ();
5343   free_dominance_info (CDI_DOMINATORS);
5344   pop_cfun ();
5345 
5346   /* This must be done after rebuilding cgraph edges for node above.
5347      Otherwise any recursive calls to node that are recorded in
5348      redirect_callers will be corrupted.  */
5349   vec<cgraph_edge *> redirect_callers = node->collect_callers ();
5350   new_node = node->create_version_clone_with_body (redirect_callers, NULL,
5351 						   NULL, false, NULL, NULL,
5352 						   "isra");
5353   redirect_callers.release ();
5354 
5355   push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
5356   ipa_modify_formal_parameters (current_function_decl, adjustments);
5357   cfg_changed = ipa_sra_modify_function_body (adjustments);
5358   sra_ipa_reset_debug_stmts (adjustments);
5359   convert_callers (new_node, node->decl, adjustments);
5360   new_node->make_local ();
5361   return cfg_changed;
5362 }
5363 
5364 /* Means of communication between ipa_sra_check_caller and
5365    ipa_sra_preliminary_function_checks.  */
5366 
5367 struct ipa_sra_check_caller_data
5368 {
5369   bool has_callers;
5370   bool bad_arg_alignment;
5371   bool has_thunk;
5372 };
5373 
5374 /* If NODE has a caller, mark that fact in DATA which is pointer to
5375    ipa_sra_check_caller_data.  Also check all aggregate arguments in all known
5376    calls if they are unit aligned and if not, set the appropriate flag in DATA
5377    too. */
5378 
5379 static bool
5380 ipa_sra_check_caller (struct cgraph_node *node, void *data)
5381 {
5382   if (!node->callers)
5383     return false;
5384 
5385   struct ipa_sra_check_caller_data *iscc;
5386   iscc = (struct ipa_sra_check_caller_data *) data;
5387   iscc->has_callers = true;
5388 
5389   for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5390     {
5391       if (cs->caller->thunk.thunk_p)
5392 	{
5393 	  iscc->has_thunk = true;
5394 	  return true;
5395 	}
5396       gimple *call_stmt = cs->call_stmt;
5397       unsigned count = gimple_call_num_args (call_stmt);
5398       for (unsigned i = 0; i < count; i++)
5399 	{
5400 	  tree arg = gimple_call_arg (call_stmt, i);
5401 	  if (is_gimple_reg (arg))
5402 	      continue;
5403 
5404 	  tree offset;
5405 	  poly_int64 bitsize, bitpos;
5406 	  machine_mode mode;
5407 	  int unsignedp, reversep, volatilep = 0;
5408 	  get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
5409 			       &unsignedp, &reversep, &volatilep);
5410 	  if (!multiple_p (bitpos, BITS_PER_UNIT))
5411 	    {
5412 	      iscc->bad_arg_alignment = true;
5413 	      return true;
5414 	    }
5415 	}
5416     }
5417 
5418   return false;
5419 }
5420 
5421 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5422    attributes, return true otherwise.  NODE is the cgraph node of the current
5423    function.  */
5424 
5425 static bool
5426 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5427 {
5428   if (!node->can_be_local_p ())
5429     {
5430       if (dump_file)
5431 	fprintf (dump_file, "Function not local to this compilation unit.\n");
5432       return false;
5433     }
5434 
5435   if (!node->local.can_change_signature)
5436     {
5437       if (dump_file)
5438 	fprintf (dump_file, "Function can not change signature.\n");
5439       return false;
5440     }
5441 
5442   if (!tree_versionable_function_p (node->decl))
5443     {
5444       if (dump_file)
5445 	fprintf (dump_file, "Function is not versionable.\n");
5446       return false;
5447     }
5448 
5449   if (!opt_for_fn (node->decl, optimize)
5450       || !opt_for_fn (node->decl, flag_ipa_sra))
5451     {
5452       if (dump_file)
5453 	fprintf (dump_file, "Function not optimized.\n");
5454       return false;
5455     }
5456 
5457   if (DECL_VIRTUAL_P (current_function_decl))
5458     {
5459       if (dump_file)
5460 	fprintf (dump_file, "Function is a virtual method.\n");
5461       return false;
5462     }
5463 
5464   if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
5465       && ipa_fn_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
5466     {
5467       if (dump_file)
5468 	fprintf (dump_file, "Function too big to be made truly local.\n");
5469       return false;
5470     }
5471 
5472   if (cfun->stdarg)
5473     {
5474       if (dump_file)
5475 	fprintf (dump_file, "Function uses stdarg. \n");
5476       return false;
5477     }
5478 
5479   if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5480     return false;
5481 
5482   if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5483     {
5484       if (dump_file)
5485 	fprintf (dump_file, "Always inline function will be inlined "
5486 		 "anyway. \n");
5487       return false;
5488     }
5489 
5490   struct ipa_sra_check_caller_data iscc;
5491   memset (&iscc, 0, sizeof(iscc));
5492   node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
5493   if (!iscc.has_callers)
5494     {
5495       if (dump_file)
5496 	fprintf (dump_file,
5497 		 "Function has no callers in this compilation unit.\n");
5498       return false;
5499     }
5500 
5501   if (iscc.bad_arg_alignment)
5502     {
5503       if (dump_file)
5504 	fprintf (dump_file,
5505 		 "A function call has an argument with non-unit alignment.\n");
5506       return false;
5507     }
5508 
5509   if (iscc.has_thunk)
5510     {
5511       if (dump_file)
5512 	fprintf (dump_file,
5513 		 "A has thunk.\n");
5514       return false;
5515     }
5516 
5517   return true;
5518 }
5519 
5520 /* Perform early interprocedural SRA.  */
5521 
5522 static unsigned int
5523 ipa_early_sra (void)
5524 {
5525   struct cgraph_node *node = cgraph_node::get (current_function_decl);
5526   ipa_parm_adjustment_vec adjustments;
5527   int ret = 0;
5528 
5529   if (!ipa_sra_preliminary_function_checks (node))
5530     return 0;
5531 
5532   sra_initialize ();
5533   sra_mode = SRA_MODE_EARLY_IPA;
5534 
5535   if (!find_param_candidates ())
5536     {
5537       if (dump_file)
5538 	fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5539       goto simple_out;
5540     }
5541 
5542   if (node->call_for_symbol_and_aliases
5543        (some_callers_have_mismatched_arguments_p, NULL, true))
5544     {
5545       if (dump_file)
5546 	fprintf (dump_file, "There are callers with insufficient number of "
5547 		 "arguments or arguments with type mismatches.\n");
5548       goto simple_out;
5549     }
5550 
5551   if (node->call_for_symbol_and_aliases
5552        (some_callers_have_no_vuse_p, NULL, true))
5553     {
5554       if (dump_file)
5555 	fprintf (dump_file, "There are callers with no VUSE attached "
5556 		 "to a call stmt.\n");
5557       goto simple_out;
5558     }
5559 
5560   bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5561 				 func_param_count
5562 				 * last_basic_block_for_fn (cfun));
5563   final_bbs = BITMAP_ALLOC (NULL);
5564 
5565   scan_function ();
5566   if (encountered_apply_args)
5567     {
5568       if (dump_file)
5569 	fprintf (dump_file, "Function calls  __builtin_apply_args().\n");
5570       goto out;
5571     }
5572 
5573   if (encountered_unchangable_recursive_call)
5574     {
5575       if (dump_file)
5576 	fprintf (dump_file, "Function calls itself with insufficient "
5577 		 "number of arguments.\n");
5578       goto out;
5579     }
5580 
5581   adjustments = analyze_all_param_acesses ();
5582   if (!adjustments.exists ())
5583     goto out;
5584   if (dump_file)
5585     ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5586 
5587   if (modify_function (node, adjustments))
5588     ret = TODO_update_ssa | TODO_cleanup_cfg;
5589   else
5590     ret = TODO_update_ssa;
5591   adjustments.release ();
5592 
5593   statistics_counter_event (cfun, "Unused parameters deleted",
5594 			    sra_stats.deleted_unused_parameters);
5595   statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5596 			    sra_stats.scalar_by_ref_to_by_val);
5597   statistics_counter_event (cfun, "Aggregate parameters broken up",
5598 			    sra_stats.aggregate_params_reduced);
5599   statistics_counter_event (cfun, "Aggregate parameter components created",
5600 			    sra_stats.param_reductions_created);
5601 
5602  out:
5603   BITMAP_FREE (final_bbs);
5604   free (bb_dereferences);
5605  simple_out:
5606   sra_deinitialize ();
5607   return ret;
5608 }
5609 
5610 namespace {
5611 
5612 const pass_data pass_data_early_ipa_sra =
5613 {
5614   GIMPLE_PASS, /* type */
5615   "eipa_sra", /* name */
5616   OPTGROUP_NONE, /* optinfo_flags */
5617   TV_IPA_SRA, /* tv_id */
5618   0, /* properties_required */
5619   0, /* properties_provided */
5620   0, /* properties_destroyed */
5621   0, /* todo_flags_start */
5622   TODO_dump_symtab, /* todo_flags_finish */
5623 };
5624 
5625 class pass_early_ipa_sra : public gimple_opt_pass
5626 {
5627 public:
5628   pass_early_ipa_sra (gcc::context *ctxt)
5629     : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5630   {}
5631 
5632   /* opt_pass methods: */
5633   virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5634   virtual unsigned int execute (function *) { return ipa_early_sra (); }
5635 
5636 }; // class pass_early_ipa_sra
5637 
5638 } // anon namespace
5639 
5640 gimple_opt_pass *
5641 make_pass_early_ipa_sra (gcc::context *ctxt)
5642 {
5643   return new pass_early_ipa_sra (ctxt);
5644 }
5645