xref: /dragonfly/contrib/gcc-4.7/gcc/tree-sra.c (revision cf89a63b)
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2    references into scalar references, exposing them to the scalar
3    optimizers.
4    Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
5    Contributed by Martin Jambor <mjambor@suse.cz>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 /* This file implements Scalar Reduction of Aggregates (SRA).  SRA is run
24    twice, once in the early stages of compilation (early SRA) and once in the
25    late stages (late SRA).  The aim of both is to turn references to scalar
26    parts of aggregates into uses of independent scalar variables.
27 
28    The two passes are nearly identical, the only difference is that early SRA
29    does not scalarize unions which are used as the result in a GIMPLE_RETURN
30    statement because together with inlining this can lead to weird type
31    conversions.
32 
33    Both passes operate in four stages:
34 
35    1. The declarations that have properties which make them candidates for
36       scalarization are identified in function find_var_candidates().  The
37       candidates are stored in candidate_bitmap.
38 
39    2. The function body is scanned.  In the process, declarations which are
40       used in a manner that prevent their scalarization are removed from the
41       candidate bitmap.  More importantly, for every access into an aggregate,
42       an access structure (struct access) is created by create_access() and
43       stored in a vector associated with the aggregate.  Among other
44       information, the aggregate declaration, the offset and size of the access
45       and its type are stored in the structure.
46 
47       On a related note, assign_link structures are created for every assign
48       statement between candidate aggregates and attached to the related
49       accesses.
50 
51    3. The vectors of accesses are analyzed.  They are first sorted according to
52       their offset and size and then scanned for partially overlapping accesses
53       (i.e. those which overlap but one is not entirely within another).  Such
54       an access disqualifies the whole aggregate from being scalarized.
55 
56       If there is no such inhibiting overlap, a representative access structure
57       is chosen for every unique combination of offset and size.  Afterwards,
58       the pass builds a set of trees from these structures, in which children
59       of an access are within their parent (in terms of offset and size).
60 
61       Then accesses  are propagated  whenever possible (i.e.  in cases  when it
62       does not create a partially overlapping access) across assign_links from
63       the right hand side to the left hand side.
64 
65       Then the set of trees for each declaration is traversed again and those
66       accesses which should be replaced by a scalar are identified.
67 
68    4. The function is traversed again, and for every reference into an
69       aggregate that has some component which is about to be scalarized,
70       statements are amended and new statements are created as necessary.
71       Finally, if a parameter got scalarized, the scalar replacements are
72       initialized with values from respective parameter aggregates.  */
73 
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "cgraph.h"
82 #include "tree-flow.h"
83 #include "ipa-prop.h"
84 #include "tree-pretty-print.h"
85 #include "statistics.h"
86 #include "tree-dump.h"
87 #include "timevar.h"
88 #include "params.h"
89 #include "target.h"
90 #include "flags.h"
91 #include "dbgcnt.h"
92 #include "tree-inline.h"
93 #include "gimple-pretty-print.h"
94 #include "ipa-inline.h"
95 
96 /* Enumeration of all aggregate reductions we can do.  */
97 enum sra_mode { SRA_MODE_EARLY_IPA,   /* early call regularization */
98 		SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
99 		SRA_MODE_INTRA };     /* late intraprocedural SRA */
100 
101 /* Global variable describing which aggregate reduction we are performing at
102    the moment.  */
103 static enum sra_mode sra_mode;
104 
105 struct assign_link;
106 
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108    part).  It can also represent a group of accesses that refer to exactly the
109    same fragment of an aggregate (i.e. those that have exactly the same offset
110    and size).  Such representatives for a single aggregate, once determined,
111    are linked in a linked list and have the group fields set.
112 
113    Moreover, when doing intraprocedural SRA, a tree is built from those
114    representatives (by the means of first_child and next_sibling pointers), in
115    which all items in a subtree are "within" the root, i.e. their offset is
116    greater or equal to offset of the root and offset+size is smaller or equal
117    to offset+size of the root.  Children of an access are sorted by offset.
118 
119    Note that accesses to parts of vector and complex number types always
120    represented by an access to the whole complex number or a vector.  It is a
121    duty of the modifying functions to replace them appropriately.  */
122 
123 struct access
124 {
125   /* Values returned by  `get_ref_base_and_extent' for each component reference
126      If EXPR isn't a component reference  just set `BASE = EXPR', `OFFSET = 0',
127      `SIZE = TREE_SIZE (TREE_TYPE (expr))'.  */
128   HOST_WIDE_INT offset;
129   HOST_WIDE_INT size;
130   tree base;
131 
132   /* Expression.  It is context dependent so do not use it to create new
133      expressions to access the original aggregate.  See PR 42154 for a
134      testcase.  */
135   tree expr;
136   /* Type.  */
137   tree type;
138 
139   /* The statement this access belongs to.  */
140   gimple stmt;
141 
142   /* Next group representative for this aggregate. */
143   struct access *next_grp;
144 
145   /* Pointer to the group representative.  Pointer to itself if the struct is
146      the representative.  */
147   struct access *group_representative;
148 
149   /* If this access has any children (in terms of the definition above), this
150      points to the first one.  */
151   struct access *first_child;
152 
153   /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154      described above.  In IPA-SRA this is a pointer to the next access
155      belonging to the same group (having the same representative).  */
156   struct access *next_sibling;
157 
158   /* Pointers to the first and last element in the linked list of assign
159      links.  */
160   struct assign_link *first_link, *last_link;
161 
162   /* Pointer to the next access in the work queue.  */
163   struct access *next_queued;
164 
165   /* Replacement variable for this access "region."  Never to be accessed
166      directly, always only by the means of get_access_replacement() and only
167      when grp_to_be_replaced flag is set.  */
168   tree replacement_decl;
169 
170   /* Is this particular access write access? */
171   unsigned write : 1;
172 
173   /* Is this access an access to a non-addressable field? */
174   unsigned non_addressable : 1;
175 
176   /* Is this access currently in the work queue?  */
177   unsigned grp_queued : 1;
178 
179   /* Does this group contain a write access?  This flag is propagated down the
180      access tree.  */
181   unsigned grp_write : 1;
182 
183   /* Does this group contain a read access?  This flag is propagated down the
184      access tree.  */
185   unsigned grp_read : 1;
186 
187   /* Does this group contain a read access that comes from an assignment
188      statement?  This flag is propagated down the access tree.  */
189   unsigned grp_assignment_read : 1;
190 
191   /* Does this group contain a write access that comes from an assignment
192      statement?  This flag is propagated down the access tree.  */
193   unsigned grp_assignment_write : 1;
194 
195   /* Does this group contain a read access through a scalar type?  This flag is
196      not propagated in the access tree in any direction.  */
197   unsigned grp_scalar_read : 1;
198 
199   /* Does this group contain a write access through a scalar type?  This flag
200      is not propagated in the access tree in any direction.  */
201   unsigned grp_scalar_write : 1;
202 
203   /* Is this access an artificial one created to scalarize some record
204      entirely? */
205   unsigned grp_total_scalarization : 1;
206 
207   /* Other passes of the analysis use this bit to make function
208      analyze_access_subtree create scalar replacements for this group if
209      possible.  */
210   unsigned grp_hint : 1;
211 
212   /* Is the subtree rooted in this access fully covered by scalar
213      replacements?  */
214   unsigned grp_covered : 1;
215 
216   /* If set to true, this access and all below it in an access tree must not be
217      scalarized.  */
218   unsigned grp_unscalarizable_region : 1;
219 
220   /* Whether data have been written to parts of the aggregate covered by this
221      access which is not to be scalarized.  This flag is propagated up in the
222      access tree.  */
223   unsigned grp_unscalarized_data : 1;
224 
225   /* Does this access and/or group contain a write access through a
226      BIT_FIELD_REF?  */
227   unsigned grp_partial_lhs : 1;
228 
229   /* Set when a scalar replacement should be created for this variable.  We do
230      the decision and creation at different places because create_tmp_var
231      cannot be called from within FOR_EACH_REFERENCED_VAR. */
232   unsigned grp_to_be_replaced : 1;
233 
234   /* Should TREE_NO_WARNING of a replacement be set?  */
235   unsigned grp_no_warning : 1;
236 
237   /* Is it possible that the group refers to data which might be (directly or
238      otherwise) modified?  */
239   unsigned grp_maybe_modified : 1;
240 
241   /* Set when this is a representative of a pointer to scalar (i.e. by
242      reference) parameter which we consider for turning into a plain scalar
243      (i.e. a by value parameter).  */
244   unsigned grp_scalar_ptr : 1;
245 
246   /* Set when we discover that this pointer is not safe to dereference in the
247      caller.  */
248   unsigned grp_not_necessarilly_dereferenced : 1;
249 };
250 
251 typedef struct access *access_p;
252 
253 DEF_VEC_P (access_p);
254 DEF_VEC_ALLOC_P (access_p, heap);
255 
256 /* Alloc pool for allocating access structures.  */
257 static alloc_pool access_pool;
258 
259 /* A structure linking lhs and rhs accesses from an aggregate assignment.  They
260    are used to propagate subaccesses from rhs to lhs as long as they don't
261    conflict with what is already there.  */
262 struct assign_link
263 {
264   struct access *lacc, *racc;
265   struct assign_link *next;
266 };
267 
268 /* Alloc pool for allocating assign link structures.  */
269 static alloc_pool link_pool;
270 
271 /* Base (tree) -> Vector (VEC(access_p,heap) *) map.  */
272 static struct pointer_map_t *base_access_vec;
273 
274 /* Bitmap of candidates.  */
275 static bitmap candidate_bitmap;
276 
277 /* Bitmap of candidates which we should try to entirely scalarize away and
278    those which cannot be (because they are and need be used as a whole).  */
279 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
280 
281 /* Obstack for creation of fancy names.  */
282 static struct obstack name_obstack;
283 
284 /* Head of a linked list of accesses that need to have its subaccesses
285    propagated to their assignment counterparts. */
286 static struct access *work_queue_head;
287 
288 /* Number of parameters of the analyzed function when doing early ipa SRA.  */
289 static int func_param_count;
290 
291 /* scan_function sets the following to true if it encounters a call to
292    __builtin_apply_args.  */
293 static bool encountered_apply_args;
294 
295 /* Set by scan_function when it finds a recursive call.  */
296 static bool encountered_recursive_call;
297 
298 /* Set by scan_function when it finds a recursive call with less actual
299    arguments than formal parameters..  */
300 static bool encountered_unchangable_recursive_call;
301 
302 /* This is a table in which for each basic block and parameter there is a
303    distance (offset + size) in that parameter which is dereferenced and
304    accessed in that BB.  */
305 static HOST_WIDE_INT *bb_dereferences;
306 /* Bitmap of BBs that can cause the function to "stop" progressing by
307    returning, throwing externally, looping infinitely or calling a function
308    which might abort etc.. */
309 static bitmap final_bbs;
310 
311 /* Representative of no accesses at all. */
312 static struct access  no_accesses_representant;
313 
314 /* Predicate to test the special value.  */
315 
316 static inline bool
317 no_accesses_p (struct access *access)
318 {
319   return access == &no_accesses_representant;
320 }
321 
322 /* Dump contents of ACCESS to file F in a human friendly way.  If GRP is true,
323    representative fields are dumped, otherwise those which only describe the
324    individual access are.  */
325 
326 static struct
327 {
328   /* Number of processed aggregates is readily available in
329      analyze_all_variable_accesses and so is not stored here.  */
330 
331   /* Number of created scalar replacements.  */
332   int replacements;
333 
334   /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
335      expression.  */
336   int exprs;
337 
338   /* Number of statements created by generate_subtree_copies.  */
339   int subtree_copies;
340 
341   /* Number of statements created by load_assign_lhs_subreplacements.  */
342   int subreplacements;
343 
344   /* Number of times sra_modify_assign has deleted a statement.  */
345   int deleted;
346 
347   /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
348      RHS reparately due to type conversions or nonexistent matching
349      references.  */
350   int separate_lhs_rhs_handling;
351 
352   /* Number of parameters that were removed because they were unused.  */
353   int deleted_unused_parameters;
354 
355   /* Number of scalars passed as parameters by reference that have been
356      converted to be passed by value.  */
357   int scalar_by_ref_to_by_val;
358 
359   /* Number of aggregate parameters that were replaced by one or more of their
360      components.  */
361   int aggregate_params_reduced;
362 
363   /* Numbber of components created when splitting aggregate parameters.  */
364   int param_reductions_created;
365 } sra_stats;
366 
367 static void
368 dump_access (FILE *f, struct access *access, bool grp)
369 {
370   fprintf (f, "access { ");
371   fprintf (f, "base = (%d)'", DECL_UID (access->base));
372   print_generic_expr (f, access->base, 0);
373   fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
374   fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
375   fprintf (f, ", expr = ");
376   print_generic_expr (f, access->expr, 0);
377   fprintf (f, ", type = ");
378   print_generic_expr (f, access->type, 0);
379   if (grp)
380     fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
381 	     "grp_assignment_write = %d, grp_scalar_read = %d, "
382 	     "grp_scalar_write = %d, grp_total_scalarization = %d, "
383 	     "grp_hint = %d, grp_covered = %d, "
384 	     "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
385 	     "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
386 	     "grp_maybe_modified = %d, "
387 	     "grp_not_necessarilly_dereferenced = %d\n",
388 	     access->grp_read, access->grp_write, access->grp_assignment_read,
389 	     access->grp_assignment_write, access->grp_scalar_read,
390 	     access->grp_scalar_write, access->grp_total_scalarization,
391 	     access->grp_hint, access->grp_covered,
392 	     access->grp_unscalarizable_region, access->grp_unscalarized_data,
393 	     access->grp_partial_lhs, access->grp_to_be_replaced,
394 	     access->grp_maybe_modified,
395 	     access->grp_not_necessarilly_dereferenced);
396   else
397     fprintf (f, ", write = %d, grp_total_scalarization = %d, "
398 	     "grp_partial_lhs = %d\n",
399 	     access->write, access->grp_total_scalarization,
400 	     access->grp_partial_lhs);
401 }
402 
403 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL.  */
404 
405 static void
406 dump_access_tree_1 (FILE *f, struct access *access, int level)
407 {
408   do
409     {
410       int i;
411 
412       for (i = 0; i < level; i++)
413 	fputs ("* ", dump_file);
414 
415       dump_access (f, access, true);
416 
417       if (access->first_child)
418 	dump_access_tree_1 (f, access->first_child, level + 1);
419 
420       access = access->next_sibling;
421     }
422   while (access);
423 }
424 
425 /* Dump all access trees for a variable, given the pointer to the first root in
426    ACCESS.  */
427 
428 static void
429 dump_access_tree (FILE *f, struct access *access)
430 {
431   for (; access; access = access->next_grp)
432     dump_access_tree_1 (f, access, 0);
433 }
434 
435 /* Return true iff ACC is non-NULL and has subaccesses.  */
436 
437 static inline bool
438 access_has_children_p (struct access *acc)
439 {
440   return acc && acc->first_child;
441 }
442 
443 /* Return true iff ACC is (partly) covered by at least one replacement.  */
444 
445 static bool
446 access_has_replacements_p (struct access *acc)
447 {
448   struct access *child;
449   if (acc->grp_to_be_replaced)
450     return true;
451   for (child = acc->first_child; child; child = child->next_sibling)
452     if (access_has_replacements_p (child))
453       return true;
454   return false;
455 }
456 
457 /* Return a vector of pointers to accesses for the variable given in BASE or
458    NULL if there is none.  */
459 
460 static VEC (access_p, heap) *
461 get_base_access_vector (tree base)
462 {
463   void **slot;
464 
465   slot = pointer_map_contains (base_access_vec, base);
466   if (!slot)
467     return NULL;
468   else
469     return *(VEC (access_p, heap) **) slot;
470 }
471 
472 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
473    in ACCESS.  Return NULL if it cannot be found.  */
474 
475 static struct access *
476 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
477 			HOST_WIDE_INT size)
478 {
479   while (access && (access->offset != offset || access->size != size))
480     {
481       struct access *child = access->first_child;
482 
483       while (child && (child->offset + child->size <= offset))
484 	child = child->next_sibling;
485       access = child;
486     }
487 
488   return access;
489 }
490 
491 /* Return the first group representative for DECL or NULL if none exists.  */
492 
493 static struct access *
494 get_first_repr_for_decl (tree base)
495 {
496   VEC (access_p, heap) *access_vec;
497 
498   access_vec = get_base_access_vector (base);
499   if (!access_vec)
500     return NULL;
501 
502   return VEC_index (access_p, access_vec, 0);
503 }
504 
505 /* Find an access representative for the variable BASE and given OFFSET and
506    SIZE.  Requires that access trees have already been built.  Return NULL if
507    it cannot be found.  */
508 
509 static struct access *
510 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
511 				 HOST_WIDE_INT size)
512 {
513   struct access *access;
514 
515   access = get_first_repr_for_decl (base);
516   while (access && (access->offset + access->size <= offset))
517     access = access->next_grp;
518   if (!access)
519     return NULL;
520 
521   return find_access_in_subtree (access, offset, size);
522 }
523 
524 /* Add LINK to the linked list of assign links of RACC.  */
525 static void
526 add_link_to_rhs (struct access *racc, struct assign_link *link)
527 {
528   gcc_assert (link->racc == racc);
529 
530   if (!racc->first_link)
531     {
532       gcc_assert (!racc->last_link);
533       racc->first_link = link;
534     }
535   else
536     racc->last_link->next = link;
537 
538   racc->last_link = link;
539   link->next = NULL;
540 }
541 
542 /* Move all link structures in their linked list in OLD_RACC to the linked list
543    in NEW_RACC.  */
544 static void
545 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
546 {
547   if (!old_racc->first_link)
548     {
549       gcc_assert (!old_racc->last_link);
550       return;
551     }
552 
553   if (new_racc->first_link)
554     {
555       gcc_assert (!new_racc->last_link->next);
556       gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
557 
558       new_racc->last_link->next = old_racc->first_link;
559       new_racc->last_link = old_racc->last_link;
560     }
561   else
562     {
563       gcc_assert (!new_racc->last_link);
564 
565       new_racc->first_link = old_racc->first_link;
566       new_racc->last_link = old_racc->last_link;
567     }
568   old_racc->first_link = old_racc->last_link = NULL;
569 }
570 
571 /* Add ACCESS to the work queue (which is actually a stack).  */
572 
573 static void
574 add_access_to_work_queue (struct access *access)
575 {
576   if (!access->grp_queued)
577     {
578       gcc_assert (!access->next_queued);
579       access->next_queued = work_queue_head;
580       access->grp_queued = 1;
581       work_queue_head = access;
582     }
583 }
584 
585 /* Pop an access from the work queue, and return it, assuming there is one.  */
586 
587 static struct access *
588 pop_access_from_work_queue (void)
589 {
590   struct access *access = work_queue_head;
591 
592   work_queue_head = access->next_queued;
593   access->next_queued = NULL;
594   access->grp_queued = 0;
595   return access;
596 }
597 
598 
599 /* Allocate necessary structures.  */
600 
601 static void
602 sra_initialize (void)
603 {
604   candidate_bitmap = BITMAP_ALLOC (NULL);
605   should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
606   cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
607   gcc_obstack_init (&name_obstack);
608   access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
609   link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
610   base_access_vec = pointer_map_create ();
611   memset (&sra_stats, 0, sizeof (sra_stats));
612   encountered_apply_args = false;
613   encountered_recursive_call = false;
614   encountered_unchangable_recursive_call = false;
615 }
616 
617 /* Hook fed to pointer_map_traverse, deallocate stored vectors.  */
618 
619 static bool
620 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
621 		     void *data ATTRIBUTE_UNUSED)
622 {
623   VEC (access_p, heap) *access_vec;
624   access_vec = (VEC (access_p, heap) *) *value;
625   VEC_free (access_p, heap, access_vec);
626 
627   return true;
628 }
629 
630 /* Deallocate all general structures.  */
631 
632 static void
633 sra_deinitialize (void)
634 {
635   BITMAP_FREE (candidate_bitmap);
636   BITMAP_FREE (should_scalarize_away_bitmap);
637   BITMAP_FREE (cannot_scalarize_away_bitmap);
638   free_alloc_pool (access_pool);
639   free_alloc_pool (link_pool);
640   obstack_free (&name_obstack, NULL);
641 
642   pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
643   pointer_map_destroy (base_access_vec);
644 }
645 
646 /* Remove DECL from candidates for SRA and write REASON to the dump file if
647    there is one.  */
648 static void
649 disqualify_candidate (tree decl, const char *reason)
650 {
651   bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
652 
653   if (dump_file && (dump_flags & TDF_DETAILS))
654     {
655       fprintf (dump_file, "! Disqualifying ");
656       print_generic_expr (dump_file, decl, 0);
657       fprintf (dump_file, " - %s\n", reason);
658     }
659 }
660 
661 /* Return true iff the type contains a field or an element which does not allow
662    scalarization.  */
663 
664 static bool
665 type_internals_preclude_sra_p (tree type, const char **msg)
666 {
667   tree fld;
668   tree et;
669 
670   switch (TREE_CODE (type))
671     {
672     case RECORD_TYPE:
673     case UNION_TYPE:
674     case QUAL_UNION_TYPE:
675       for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
676 	if (TREE_CODE (fld) == FIELD_DECL)
677 	  {
678 	    tree ft = TREE_TYPE (fld);
679 
680 	    if (TREE_THIS_VOLATILE (fld))
681 	      {
682 		*msg = "volatile structure field";
683 		return true;
684 	      }
685 	    if (!DECL_FIELD_OFFSET (fld))
686 	      {
687 		*msg = "no structure field offset";
688 		return true;
689 	      }
690 	    if (!DECL_SIZE (fld))
691 	      {
692 		*msg = "zero structure field size";
693 	        return true;
694 	      }
695 	    if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
696 	      {
697 		*msg = "structure field offset not fixed";
698 		return true;
699 	      }
700 	    if (!host_integerp (DECL_SIZE (fld), 1))
701 	      {
702 	        *msg = "structure field size not fixed";
703 		return true;
704 	      }
705 	    if (!host_integerp (bit_position (fld), 0))
706 	      {
707 	        *msg = "structure field size too big";
708 		return true;
709 	      }
710 	    if (AGGREGATE_TYPE_P (ft)
711 		    && int_bit_position (fld) % BITS_PER_UNIT != 0)
712 	      {
713 		*msg = "structure field is bit field";
714 	        return true;
715 	      }
716 
717 	    if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
718 	      return true;
719 	  }
720 
721       return false;
722 
723     case ARRAY_TYPE:
724       et = TREE_TYPE (type);
725 
726       if (TYPE_VOLATILE (et))
727 	{
728 	  *msg = "element type is volatile";
729 	  return true;
730 	}
731 
732       if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
733 	return true;
734 
735       return false;
736 
737     default:
738       return false;
739     }
740 }
741 
742 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
743    base variable if it is.  Return T if it is not an SSA_NAME.  */
744 
745 static tree
746 get_ssa_base_param (tree t)
747 {
748   if (TREE_CODE (t) == SSA_NAME)
749     {
750       if (SSA_NAME_IS_DEFAULT_DEF (t))
751 	return SSA_NAME_VAR (t);
752       else
753 	return NULL_TREE;
754     }
755   return t;
756 }
757 
758 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
759    belongs to, unless the BB has already been marked as a potentially
760    final.  */
761 
762 static void
763 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
764 {
765   basic_block bb = gimple_bb (stmt);
766   int idx, parm_index = 0;
767   tree parm;
768 
769   if (bitmap_bit_p (final_bbs, bb->index))
770     return;
771 
772   for (parm = DECL_ARGUMENTS (current_function_decl);
773        parm && parm != base;
774        parm = DECL_CHAIN (parm))
775     parm_index++;
776 
777   gcc_assert (parm_index < func_param_count);
778 
779   idx = bb->index * func_param_count + parm_index;
780   if (bb_dereferences[idx] < dist)
781     bb_dereferences[idx] = dist;
782 }
783 
784 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
785    the three fields.  Also add it to the vector of accesses corresponding to
786    the base.  Finally, return the new access.  */
787 
788 static struct access *
789 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
790 {
791   VEC (access_p, heap) *vec;
792   struct access *access;
793   void **slot;
794 
795   access = (struct access *) pool_alloc (access_pool);
796   memset (access, 0, sizeof (struct access));
797   access->base = base;
798   access->offset = offset;
799   access->size = size;
800 
801   slot = pointer_map_contains (base_access_vec, base);
802   if (slot)
803     vec = (VEC (access_p, heap) *) *slot;
804   else
805     vec = VEC_alloc (access_p, heap, 32);
806 
807   VEC_safe_push (access_p, heap, vec, access);
808 
809   *((struct VEC (access_p,heap) **)
810 	pointer_map_insert (base_access_vec, base)) = vec;
811 
812   return access;
813 }
814 
815 /* Create and insert access for EXPR. Return created access, or NULL if it is
816    not possible.  */
817 
818 static struct access *
819 create_access (tree expr, gimple stmt, bool write)
820 {
821   struct access *access;
822   HOST_WIDE_INT offset, size, max_size;
823   tree base = expr;
824   bool ptr, unscalarizable_region = false;
825 
826   base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
827 
828   if (sra_mode == SRA_MODE_EARLY_IPA
829       && TREE_CODE (base) == MEM_REF)
830     {
831       base = get_ssa_base_param (TREE_OPERAND (base, 0));
832       if (!base)
833 	return NULL;
834       ptr = true;
835     }
836   else
837     ptr = false;
838 
839   if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
840     return NULL;
841 
842   if (sra_mode == SRA_MODE_EARLY_IPA)
843     {
844       if (size < 0 || size != max_size)
845 	{
846 	  disqualify_candidate (base, "Encountered a variable sized access.");
847 	  return NULL;
848 	}
849       if (TREE_CODE (expr) == COMPONENT_REF
850 	  && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
851 	{
852 	  disqualify_candidate (base, "Encountered a bit-field access.");
853 	  return NULL;
854 	}
855       gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
856 
857       if (ptr)
858 	mark_parm_dereference (base, offset + size, stmt);
859     }
860   else
861     {
862       if (size != max_size)
863 	{
864 	  size = max_size;
865 	  unscalarizable_region = true;
866 	}
867       if (size < 0)
868 	{
869 	  disqualify_candidate (base, "Encountered an unconstrained access.");
870 	  return NULL;
871 	}
872     }
873 
874   access = create_access_1 (base, offset, size);
875   access->expr = expr;
876   access->type = TREE_TYPE (expr);
877   access->write = write;
878   access->grp_unscalarizable_region = unscalarizable_region;
879   access->stmt = stmt;
880 
881   if (TREE_CODE (expr) == COMPONENT_REF
882       && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
883     access->non_addressable = 1;
884 
885   return access;
886 }
887 
888 
889 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
890    register types or (recursively) records with only these two kinds of fields.
891    It also returns false if any of these records contains a bit-field.  */
892 
893 static bool
894 type_consists_of_records_p (tree type)
895 {
896   tree fld;
897 
898   if (TREE_CODE (type) != RECORD_TYPE)
899     return false;
900 
901   for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
902     if (TREE_CODE (fld) == FIELD_DECL)
903       {
904 	tree ft = TREE_TYPE (fld);
905 
906 	if (DECL_BIT_FIELD (fld))
907 	  return false;
908 
909 	if (!is_gimple_reg_type (ft)
910 	    && !type_consists_of_records_p (ft))
911 	  return false;
912       }
913 
914   return true;
915 }
916 
917 /* Create total_scalarization accesses for all scalar type fields in DECL that
918    must be of a RECORD_TYPE conforming to type_consists_of_records_p.  BASE
919    must be the top-most VAR_DECL representing the variable, OFFSET must be the
920    offset of DECL within BASE.  REF must be the memory reference expression for
921    the given decl.  */
922 
923 static void
924 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
925 			     tree ref)
926 {
927   tree fld, decl_type = TREE_TYPE (decl);
928 
929   for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
930     if (TREE_CODE (fld) == FIELD_DECL)
931       {
932 	HOST_WIDE_INT pos = offset + int_bit_position (fld);
933 	tree ft = TREE_TYPE (fld);
934 	tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
935 			    NULL_TREE);
936 
937 	if (is_gimple_reg_type (ft))
938 	  {
939 	    struct access *access;
940 	    HOST_WIDE_INT size;
941 
942 	    size = tree_low_cst (DECL_SIZE (fld), 1);
943 	    access = create_access_1 (base, pos, size);
944 	    access->expr = nref;
945 	    access->type = ft;
946 	    access->grp_total_scalarization = 1;
947 	    /* Accesses for intraprocedural SRA can have their stmt NULL.  */
948 	  }
949 	else
950 	  completely_scalarize_record (base, fld, pos, nref);
951       }
952 }
953 
954 /* Create total_scalarization accesses for all scalar type fields in VAR and
955    for VAR a a whole.  VAR must be of a RECORD_TYPE conforming to
956    type_consists_of_records_p.   */
957 
958 static void
959 completely_scalarize_var (tree var)
960 {
961   HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
962   struct access *access;
963 
964   access = create_access_1 (var, 0, size);
965   access->expr = var;
966   access->type = TREE_TYPE (var);
967   access->grp_total_scalarization = 1;
968 
969   completely_scalarize_record (var, var, 0, var);
970 }
971 
972 /* Search the given tree for a declaration by skipping handled components and
973    exclude it from the candidates.  */
974 
975 static void
976 disqualify_base_of_expr (tree t, const char *reason)
977 {
978   t = get_base_address (t);
979   if (t
980       && sra_mode == SRA_MODE_EARLY_IPA
981       && TREE_CODE (t) == MEM_REF)
982     t = get_ssa_base_param (TREE_OPERAND (t, 0));
983 
984   if (t && DECL_P (t))
985     disqualify_candidate (t, reason);
986 }
987 
988 /* Scan expression EXPR and create access structures for all accesses to
989    candidates for scalarization.  Return the created access or NULL if none is
990    created.  */
991 
992 static struct access *
993 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
994 {
995   struct access *ret = NULL;
996   bool partial_ref;
997 
998   if (TREE_CODE (expr) == BIT_FIELD_REF
999       || TREE_CODE (expr) == IMAGPART_EXPR
1000       || TREE_CODE (expr) == REALPART_EXPR)
1001     {
1002       expr = TREE_OPERAND (expr, 0);
1003       partial_ref = true;
1004     }
1005   else
1006     partial_ref = false;
1007 
1008   /* We need to dive through V_C_Es in order to get the size of its parameter
1009      and not the result type.  Ada produces such statements.  We are also
1010      capable of handling the topmost V_C_E but not any of those buried in other
1011      handled components.  */
1012   if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1013     expr = TREE_OPERAND (expr, 0);
1014 
1015   if (contains_view_convert_expr_p (expr))
1016     {
1017       disqualify_base_of_expr (expr, "V_C_E under a different handled "
1018 			       "component.");
1019       return NULL;
1020     }
1021 
1022   switch (TREE_CODE (expr))
1023     {
1024     case MEM_REF:
1025       if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1026 	  && sra_mode != SRA_MODE_EARLY_IPA)
1027 	return NULL;
1028       /* fall through */
1029     case VAR_DECL:
1030     case PARM_DECL:
1031     case RESULT_DECL:
1032     case COMPONENT_REF:
1033     case ARRAY_REF:
1034     case ARRAY_RANGE_REF:
1035       ret = create_access (expr, stmt, write);
1036       break;
1037 
1038     default:
1039       break;
1040     }
1041 
1042   if (write && partial_ref && ret)
1043     ret->grp_partial_lhs = 1;
1044 
1045   return ret;
1046 }
1047 
1048 /* Scan expression EXPR and create access structures for all accesses to
1049    candidates for scalarization.  Return true if any access has been inserted.
1050    STMT must be the statement from which the expression is taken, WRITE must be
1051    true if the expression is a store and false otherwise. */
1052 
1053 static bool
1054 build_access_from_expr (tree expr, gimple stmt, bool write)
1055 {
1056   struct access *access;
1057 
1058   access = build_access_from_expr_1 (expr, stmt, write);
1059   if (access)
1060     {
1061       /* This means the aggregate is accesses as a whole in a way other than an
1062 	 assign statement and thus cannot be removed even if we had a scalar
1063 	 replacement for everything.  */
1064       if (cannot_scalarize_away_bitmap)
1065 	bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1066       return true;
1067     }
1068   return false;
1069 }
1070 
1071 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1072    modes in which it matters, return true iff they have been disqualified.  RHS
1073    may be NULL, in that case ignore it.  If we scalarize an aggregate in
1074    intra-SRA we may need to add statements after each statement.  This is not
1075    possible if a statement unconditionally has to end the basic block.  */
1076 static bool
1077 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1078 {
1079   if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1080       && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1081     {
1082       disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1083       if (rhs)
1084 	disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1085       return true;
1086     }
1087   return false;
1088 }
1089 
1090 /* Return true if EXP is a memory reference less aligned than ALIGN.  This is
1091    invoked only on strict-alignment targets.  */
1092 
1093 static bool
1094 tree_non_aligned_mem_p (tree exp, unsigned int align)
1095 {
1096   unsigned int exp_align;
1097 
1098   if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
1099     exp = TREE_OPERAND (exp, 0);
1100 
1101   if (TREE_CODE (exp) == SSA_NAME || is_gimple_min_invariant (exp))
1102     return false;
1103 
1104   /* get_object_alignment will fall back to BITS_PER_UNIT if it cannot
1105      compute an explicit alignment.  Pretend that dereferenced pointers
1106      are always aligned on strict-alignment targets.  */
1107   if (TREE_CODE (exp) == MEM_REF || TREE_CODE (exp) == TARGET_MEM_REF)
1108     exp_align = get_object_or_type_alignment (exp);
1109   else
1110     exp_align = get_object_alignment (exp);
1111 
1112   if (exp_align < align)
1113     return true;
1114 
1115   return false;
1116 }
1117 
1118 /* Return true if EXP is a memory reference less aligned than what the access
1119    ACC would require.  This is invoked only on strict-alignment targets.  */
1120 
1121 static bool
1122 tree_non_aligned_mem_for_access_p (tree exp, struct access *acc)
1123 {
1124   unsigned int acc_align;
1125 
1126   /* The alignment of the access is that of its expression.  However, it may
1127      have been artificially increased, e.g. by a local alignment promotion,
1128      so we cap it to the alignment of the type of the base, on the grounds
1129      that valid sub-accesses cannot be more aligned than that.  */
1130   acc_align = get_object_alignment (acc->expr);
1131   if (acc->base && acc_align > TYPE_ALIGN (TREE_TYPE (acc->base)))
1132     acc_align = TYPE_ALIGN (TREE_TYPE (acc->base));
1133 
1134   return tree_non_aligned_mem_p (exp, acc_align);
1135 }
1136 
1137 /* Scan expressions occuring in STMT, create access structures for all accesses
1138    to candidates for scalarization and remove those candidates which occur in
1139    statements or expressions that prevent them from being split apart.  Return
1140    true if any access has been inserted.  */
1141 
1142 static bool
1143 build_accesses_from_assign (gimple stmt)
1144 {
1145   tree lhs, rhs;
1146   struct access *lacc, *racc;
1147 
1148   if (!gimple_assign_single_p (stmt)
1149       /* Scope clobbers don't influence scalarization.  */
1150       || gimple_clobber_p (stmt))
1151     return false;
1152 
1153   lhs = gimple_assign_lhs (stmt);
1154   rhs = gimple_assign_rhs1 (stmt);
1155 
1156   if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1157     return false;
1158 
1159   racc = build_access_from_expr_1 (rhs, stmt, false);
1160   lacc = build_access_from_expr_1 (lhs, stmt, true);
1161 
1162   if (lacc)
1163     {
1164       lacc->grp_assignment_write = 1;
1165       if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (rhs, lacc))
1166         lacc->grp_unscalarizable_region = 1;
1167     }
1168 
1169   if (racc)
1170     {
1171       racc->grp_assignment_read = 1;
1172       if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1173 	  && !is_gimple_reg_type (racc->type))
1174 	bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1175       if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (lhs, racc))
1176         racc->grp_unscalarizable_region = 1;
1177     }
1178 
1179   if (lacc && racc
1180       && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1181       && !lacc->grp_unscalarizable_region
1182       && !racc->grp_unscalarizable_region
1183       && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1184       /* FIXME: Turn the following line into an assert after PR 40058 is
1185 	 fixed.  */
1186       && lacc->size == racc->size
1187       && useless_type_conversion_p (lacc->type, racc->type))
1188     {
1189       struct assign_link *link;
1190 
1191       link = (struct assign_link *) pool_alloc (link_pool);
1192       memset (link, 0, sizeof (struct assign_link));
1193 
1194       link->lacc = lacc;
1195       link->racc = racc;
1196 
1197       add_link_to_rhs (racc, link);
1198     }
1199 
1200   return lacc || racc;
1201 }
1202 
1203 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1204    GIMPLE_ASM operands with memory constrains which cannot be scalarized.  */
1205 
1206 static bool
1207 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1208 		void *data ATTRIBUTE_UNUSED)
1209 {
1210   op = get_base_address (op);
1211   if (op
1212       && DECL_P (op))
1213     disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1214 
1215   return false;
1216 }
1217 
1218 /* Return true iff callsite CALL has at least as many actual arguments as there
1219    are formal parameters of the function currently processed by IPA-SRA.  */
1220 
1221 static inline bool
1222 callsite_has_enough_arguments_p (gimple call)
1223 {
1224   return gimple_call_num_args (call) >= (unsigned) func_param_count;
1225 }
1226 
1227 /* Scan function and look for interesting expressions and create access
1228    structures for them.  Return true iff any access is created.  */
1229 
1230 static bool
1231 scan_function (void)
1232 {
1233   basic_block bb;
1234   bool ret = false;
1235 
1236   FOR_EACH_BB (bb)
1237     {
1238       gimple_stmt_iterator gsi;
1239       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1240 	{
1241 	  gimple stmt = gsi_stmt (gsi);
1242 	  tree t;
1243 	  unsigned i;
1244 
1245 	  if (final_bbs && stmt_can_throw_external (stmt))
1246 	    bitmap_set_bit (final_bbs, bb->index);
1247 	  switch (gimple_code (stmt))
1248 	    {
1249 	    case GIMPLE_RETURN:
1250 	      t = gimple_return_retval (stmt);
1251 	      if (t != NULL_TREE)
1252 		ret |= build_access_from_expr (t, stmt, false);
1253 	      if (final_bbs)
1254 		bitmap_set_bit (final_bbs, bb->index);
1255 	      break;
1256 
1257 	    case GIMPLE_ASSIGN:
1258 	      ret |= build_accesses_from_assign (stmt);
1259 	      break;
1260 
1261 	    case GIMPLE_CALL:
1262 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
1263 		ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1264 					       stmt, false);
1265 
1266 	      if (sra_mode == SRA_MODE_EARLY_IPA)
1267 		{
1268 		  tree dest = gimple_call_fndecl (stmt);
1269 		  int flags = gimple_call_flags (stmt);
1270 
1271 		  if (dest)
1272 		    {
1273 		      if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1274 			  && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1275 			encountered_apply_args = true;
1276 		      if (cgraph_get_node (dest)
1277 			  == cgraph_get_node (current_function_decl))
1278 			{
1279 			  encountered_recursive_call = true;
1280 			  if (!callsite_has_enough_arguments_p (stmt))
1281 			    encountered_unchangable_recursive_call = true;
1282 			}
1283 		    }
1284 
1285 		  if (final_bbs
1286 		      && (flags & (ECF_CONST | ECF_PURE)) == 0)
1287 		    bitmap_set_bit (final_bbs, bb->index);
1288 		}
1289 
1290 	      t = gimple_call_lhs (stmt);
1291 	      if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1292 		ret |= build_access_from_expr (t, stmt, true);
1293 	      break;
1294 
1295 	    case GIMPLE_ASM:
1296 	      walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1297 					     asm_visit_addr);
1298 	      if (final_bbs)
1299 		bitmap_set_bit (final_bbs, bb->index);
1300 
1301 	      for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1302 		{
1303 		  t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1304 		  ret |= build_access_from_expr (t, stmt, false);
1305 		}
1306 	      for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1307 		{
1308 		  t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1309 		  ret |= build_access_from_expr (t, stmt, true);
1310 		}
1311 	      break;
1312 
1313 	    default:
1314 	      break;
1315 	    }
1316 	}
1317     }
1318 
1319   return ret;
1320 }
1321 
1322 /* Helper of QSORT function. There are pointers to accesses in the array.  An
1323    access is considered smaller than another if it has smaller offset or if the
1324    offsets are the same but is size is bigger. */
1325 
1326 static int
1327 compare_access_positions (const void *a, const void *b)
1328 {
1329   const access_p *fp1 = (const access_p *) a;
1330   const access_p *fp2 = (const access_p *) b;
1331   const access_p f1 = *fp1;
1332   const access_p f2 = *fp2;
1333 
1334   if (f1->offset != f2->offset)
1335     return f1->offset < f2->offset ? -1 : 1;
1336 
1337   if (f1->size == f2->size)
1338     {
1339       if (f1->type == f2->type)
1340 	return 0;
1341       /* Put any non-aggregate type before any aggregate type.  */
1342       else if (!is_gimple_reg_type (f1->type)
1343 	  && is_gimple_reg_type (f2->type))
1344 	return 1;
1345       else if (is_gimple_reg_type (f1->type)
1346 	       && !is_gimple_reg_type (f2->type))
1347 	return -1;
1348       /* Put any complex or vector type before any other scalar type.  */
1349       else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1350 	       && TREE_CODE (f1->type) != VECTOR_TYPE
1351 	       && (TREE_CODE (f2->type) == COMPLEX_TYPE
1352 		   || TREE_CODE (f2->type) == VECTOR_TYPE))
1353 	return 1;
1354       else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1355 		|| TREE_CODE (f1->type) == VECTOR_TYPE)
1356 	       && TREE_CODE (f2->type) != COMPLEX_TYPE
1357 	       && TREE_CODE (f2->type) != VECTOR_TYPE)
1358 	return -1;
1359       /* Put the integral type with the bigger precision first.  */
1360       else if (INTEGRAL_TYPE_P (f1->type)
1361 	       && INTEGRAL_TYPE_P (f2->type))
1362 	return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1363       /* Put any integral type with non-full precision last.  */
1364       else if (INTEGRAL_TYPE_P (f1->type)
1365 	       && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1366 		   != TYPE_PRECISION (f1->type)))
1367 	return 1;
1368       else if (INTEGRAL_TYPE_P (f2->type)
1369 	       && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1370 		   != TYPE_PRECISION (f2->type)))
1371 	return -1;
1372       /* Stabilize the sort.  */
1373       return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1374     }
1375 
1376   /* We want the bigger accesses first, thus the opposite operator in the next
1377      line: */
1378   return f1->size > f2->size ? -1 : 1;
1379 }
1380 
1381 
1382 /* Append a name of the declaration to the name obstack.  A helper function for
1383    make_fancy_name.  */
1384 
1385 static void
1386 make_fancy_decl_name (tree decl)
1387 {
1388   char buffer[32];
1389 
1390   tree name = DECL_NAME (decl);
1391   if (name)
1392     obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1393 		  IDENTIFIER_LENGTH (name));
1394   else
1395     {
1396       sprintf (buffer, "D%u", DECL_UID (decl));
1397       obstack_grow (&name_obstack, buffer, strlen (buffer));
1398     }
1399 }
1400 
1401 /* Helper for make_fancy_name.  */
1402 
1403 static void
1404 make_fancy_name_1 (tree expr)
1405 {
1406   char buffer[32];
1407   tree index;
1408 
1409   if (DECL_P (expr))
1410     {
1411       make_fancy_decl_name (expr);
1412       return;
1413     }
1414 
1415   switch (TREE_CODE (expr))
1416     {
1417     case COMPONENT_REF:
1418       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1419       obstack_1grow (&name_obstack, '$');
1420       make_fancy_decl_name (TREE_OPERAND (expr, 1));
1421       break;
1422 
1423     case ARRAY_REF:
1424       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1425       obstack_1grow (&name_obstack, '$');
1426       /* Arrays with only one element may not have a constant as their
1427 	 index. */
1428       index = TREE_OPERAND (expr, 1);
1429       if (TREE_CODE (index) != INTEGER_CST)
1430 	break;
1431       sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1432       obstack_grow (&name_obstack, buffer, strlen (buffer));
1433       break;
1434 
1435     case ADDR_EXPR:
1436       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1437       break;
1438 
1439     case MEM_REF:
1440       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1441       if (!integer_zerop (TREE_OPERAND (expr, 1)))
1442 	{
1443 	  obstack_1grow (&name_obstack, '$');
1444 	  sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1445 		   TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1446 	  obstack_grow (&name_obstack, buffer, strlen (buffer));
1447 	}
1448       break;
1449 
1450     case BIT_FIELD_REF:
1451     case REALPART_EXPR:
1452     case IMAGPART_EXPR:
1453       gcc_unreachable (); 	/* we treat these as scalars.  */
1454       break;
1455     default:
1456       break;
1457     }
1458 }
1459 
1460 /* Create a human readable name for replacement variable of ACCESS.  */
1461 
1462 static char *
1463 make_fancy_name (tree expr)
1464 {
1465   make_fancy_name_1 (expr);
1466   obstack_1grow (&name_obstack, '\0');
1467   return XOBFINISH (&name_obstack, char *);
1468 }
1469 
1470 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1471    EXP_TYPE at the given OFFSET.  If BASE is something for which
1472    get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1473    to insert new statements either before or below the current one as specified
1474    by INSERT_AFTER.  This function is not capable of handling bitfields.  */
1475 
1476 tree
1477 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1478 		      tree exp_type, gimple_stmt_iterator *gsi,
1479 		      bool insert_after)
1480 {
1481   tree prev_base = base;
1482   tree off;
1483   HOST_WIDE_INT base_offset;
1484   unsigned HOST_WIDE_INT misalign;
1485   unsigned int align;
1486 
1487   gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1488 
1489   base = get_addr_base_and_unit_offset (base, &base_offset);
1490 
1491   /* get_addr_base_and_unit_offset returns NULL for references with a variable
1492      offset such as array[var_index].  */
1493   if (!base)
1494     {
1495       gimple stmt;
1496       tree tmp, addr;
1497 
1498       gcc_checking_assert (gsi);
1499       tmp = create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1500       add_referenced_var (tmp);
1501       tmp = make_ssa_name (tmp, NULL);
1502       addr = build_fold_addr_expr (unshare_expr (prev_base));
1503       STRIP_USELESS_TYPE_CONVERSION (addr);
1504       stmt = gimple_build_assign (tmp, addr);
1505       gimple_set_location (stmt, loc);
1506       SSA_NAME_DEF_STMT (tmp) = stmt;
1507       if (insert_after)
1508 	gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1509       else
1510 	gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1511       update_stmt (stmt);
1512 
1513       off = build_int_cst (reference_alias_ptr_type (prev_base),
1514 			   offset / BITS_PER_UNIT);
1515       base = tmp;
1516     }
1517   else if (TREE_CODE (base) == MEM_REF)
1518     {
1519       off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1520 			   base_offset + offset / BITS_PER_UNIT);
1521       off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1522       base = unshare_expr (TREE_OPERAND (base, 0));
1523     }
1524   else
1525     {
1526       off = build_int_cst (reference_alias_ptr_type (base),
1527 			   base_offset + offset / BITS_PER_UNIT);
1528       base = build_fold_addr_expr (unshare_expr (base));
1529     }
1530 
1531   /* If prev_base were always an originally performed access
1532      we can extract more optimistic alignment information
1533      by looking at the access mode.  That would constrain the
1534      alignment of base + base_offset which we would need to
1535      adjust according to offset.  */
1536   align = get_pointer_alignment_1 (base, &misalign);
1537   if (misalign == 0
1538       && (TREE_CODE (prev_base) == MEM_REF
1539 	  || TREE_CODE (prev_base) == TARGET_MEM_REF))
1540     align = MAX (align, TYPE_ALIGN (TREE_TYPE (prev_base)));
1541   misalign += (double_int_sext (tree_to_double_int (off),
1542 				TYPE_PRECISION (TREE_TYPE (off))).low
1543 	       * BITS_PER_UNIT);
1544   misalign = misalign & (align - 1);
1545   if (misalign != 0)
1546     align = (misalign & -misalign);
1547   if (align < TYPE_ALIGN (exp_type))
1548     exp_type = build_aligned_type (exp_type, align);
1549 
1550   return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1551 }
1552 
1553 DEF_VEC_ALLOC_P_STACK (tree);
1554 #define VEC_tree_stack_alloc(alloc) VEC_stack_alloc (tree, alloc)
1555 
1556 /* Construct a memory reference to a part of an aggregate BASE at the given
1557    OFFSET and of the type of MODEL.  In case this is a chain of references
1558    to component, the function will replicate the chain of COMPONENT_REFs of
1559    the expression of MODEL to access it.  GSI and INSERT_AFTER have the same
1560    meaning as in build_ref_for_offset.  */
1561 
1562 static tree
1563 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1564 		     struct access *model, gimple_stmt_iterator *gsi,
1565 		     bool insert_after)
1566 {
1567   tree type = model->type, t;
1568   VEC(tree,stack) *cr_stack = NULL;
1569 
1570   if (TREE_CODE (model->expr) == COMPONENT_REF)
1571     {
1572       tree expr = model->expr;
1573 
1574       /* Create a stack of the COMPONENT_REFs so later we can walk them in
1575 	 order from inner to outer.  */
1576       cr_stack = VEC_alloc (tree, stack, 6);
1577 
1578       do {
1579 	tree field = TREE_OPERAND (expr, 1);
1580 	tree cr_offset = component_ref_field_offset (expr);
1581 	HOST_WIDE_INT bit_pos
1582 	  = tree_low_cst (cr_offset, 1) * BITS_PER_UNIT
1583 	      + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1584 
1585 	/* We can be called with a model different from the one associated
1586 	   with BASE so we need to avoid going up the chain too far.  */
1587 	if (offset - bit_pos < 0)
1588 	  break;
1589 
1590 	offset -= bit_pos;
1591 	VEC_safe_push (tree, stack, cr_stack, expr);
1592 
1593 	expr = TREE_OPERAND (expr, 0);
1594 	type = TREE_TYPE (expr);
1595       } while (TREE_CODE (expr) == COMPONENT_REF);
1596     }
1597 
1598   t = build_ref_for_offset (loc, base, offset, type, gsi, insert_after);
1599 
1600   if (TREE_CODE (model->expr) == COMPONENT_REF)
1601     {
1602       unsigned i;
1603       tree expr;
1604 
1605       /* Now replicate the chain of COMPONENT_REFs from inner to outer.  */
1606       FOR_EACH_VEC_ELT_REVERSE (tree, cr_stack, i, expr)
1607 	{
1608 	  tree field = TREE_OPERAND (expr, 1);
1609 	  t = fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (field), t, field,
1610 			       TREE_OPERAND (expr, 2));
1611 	}
1612 
1613       VEC_free (tree, stack, cr_stack);
1614     }
1615 
1616   return t;
1617 }
1618 
1619 /* Construct a memory reference consisting of component_refs and array_refs to
1620    a part of an aggregate *RES (which is of type TYPE).  The requested part
1621    should have type EXP_TYPE at be the given OFFSET.  This function might not
1622    succeed, it returns true when it does and only then *RES points to something
1623    meaningful.  This function should be used only to build expressions that we
1624    might need to present to user (e.g. in warnings).  In all other situations,
1625    build_ref_for_model or build_ref_for_offset should be used instead.  */
1626 
1627 static bool
1628 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1629 				    tree exp_type)
1630 {
1631   while (1)
1632     {
1633       tree fld;
1634       tree tr_size, index, minidx;
1635       HOST_WIDE_INT el_size;
1636 
1637       if (offset == 0 && exp_type
1638 	  && types_compatible_p (exp_type, type))
1639 	return true;
1640 
1641       switch (TREE_CODE (type))
1642 	{
1643 	case UNION_TYPE:
1644 	case QUAL_UNION_TYPE:
1645 	case RECORD_TYPE:
1646 	  for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1647 	    {
1648 	      HOST_WIDE_INT pos, size;
1649 	      tree expr, *expr_ptr;
1650 
1651 	      if (TREE_CODE (fld) != FIELD_DECL)
1652 		continue;
1653 
1654 	      pos = int_bit_position (fld);
1655 	      gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1656 	      tr_size = DECL_SIZE (fld);
1657 	      if (!tr_size || !host_integerp (tr_size, 1))
1658 		continue;
1659 	      size = tree_low_cst (tr_size, 1);
1660 	      if (size == 0)
1661 		{
1662 		  if (pos != offset)
1663 		    continue;
1664 		}
1665 	      else if (pos > offset || (pos + size) <= offset)
1666 		continue;
1667 
1668 	      expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1669 			     NULL_TREE);
1670 	      expr_ptr = &expr;
1671 	      if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1672 						      offset - pos, exp_type))
1673 		{
1674 		  *res = expr;
1675 		  return true;
1676 		}
1677 	    }
1678 	  return false;
1679 
1680 	case ARRAY_TYPE:
1681 	  tr_size = TYPE_SIZE (TREE_TYPE (type));
1682 	  if (!tr_size || !host_integerp (tr_size, 1))
1683 	    return false;
1684 	  el_size = tree_low_cst (tr_size, 1);
1685 
1686 	  minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1687 	  if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1688 	    return false;
1689 	  index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1690 	  if (!integer_zerop (minidx))
1691 	    index = int_const_binop (PLUS_EXPR, index, minidx);
1692 	  *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1693 			 NULL_TREE, NULL_TREE);
1694 	  offset = offset % el_size;
1695 	  type = TREE_TYPE (type);
1696 	  break;
1697 
1698 	default:
1699 	  if (offset != 0)
1700 	    return false;
1701 
1702 	  if (exp_type)
1703 	    return false;
1704 	  else
1705 	    return true;
1706 	}
1707     }
1708 }
1709 
1710 /* Return true iff TYPE is stdarg va_list type.  */
1711 
1712 static inline bool
1713 is_va_list_type (tree type)
1714 {
1715   return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1716 }
1717 
1718 /* Print message to dump file why a variable was rejected. */
1719 
1720 static void
1721 reject (tree var, const char *msg)
1722 {
1723   if (dump_file && (dump_flags & TDF_DETAILS))
1724     {
1725       fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1726       print_generic_expr (dump_file, var, 0);
1727       fprintf (dump_file, "\n");
1728     }
1729 }
1730 
1731 /* The very first phase of intraprocedural SRA.  It marks in candidate_bitmap
1732    those with type which is suitable for scalarization.  */
1733 
1734 static bool
1735 find_var_candidates (void)
1736 {
1737   tree var, type;
1738   referenced_var_iterator rvi;
1739   bool ret = false;
1740   const char *msg;
1741 
1742   FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
1743     {
1744       if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1745         continue;
1746       type = TREE_TYPE (var);
1747 
1748       if (!AGGREGATE_TYPE_P (type))
1749         {
1750           reject (var, "not aggregate");
1751           continue;
1752 	}
1753       if (needs_to_live_in_memory (var))
1754         {
1755           reject (var, "needs to live in memory");
1756           continue;
1757         }
1758       if (TREE_THIS_VOLATILE (var))
1759         {
1760           reject (var, "is volatile");
1761 	  continue;
1762         }
1763       if (!COMPLETE_TYPE_P (type))
1764         {
1765           reject (var, "has incomplete type");
1766 	  continue;
1767         }
1768       if (!host_integerp (TYPE_SIZE (type), 1))
1769         {
1770           reject (var, "type size not fixed");
1771 	  continue;
1772         }
1773       if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
1774         {
1775           reject (var, "type size is zero");
1776           continue;
1777         }
1778       if (type_internals_preclude_sra_p (type, &msg))
1779 	{
1780 	  reject (var, msg);
1781 	  continue;
1782 	}
1783       if (/* Fix for PR 41089.  tree-stdarg.c needs to have va_lists intact but
1784 	      we also want to schedule it rather late.  Thus we ignore it in
1785 	      the early pass. */
1786 	  (sra_mode == SRA_MODE_EARLY_INTRA
1787 	      && is_va_list_type (type)))
1788         {
1789 	  reject (var, "is va_list");
1790 	  continue;
1791 	}
1792 
1793       bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1794 
1795       if (dump_file && (dump_flags & TDF_DETAILS))
1796 	{
1797 	  fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1798 	  print_generic_expr (dump_file, var, 0);
1799 	  fprintf (dump_file, "\n");
1800 	}
1801       ret = true;
1802     }
1803 
1804   return ret;
1805 }
1806 
1807 /* Sort all accesses for the given variable, check for partial overlaps and
1808    return NULL if there are any.  If there are none, pick a representative for
1809    each combination of offset and size and create a linked list out of them.
1810    Return the pointer to the first representative and make sure it is the first
1811    one in the vector of accesses.  */
1812 
1813 static struct access *
1814 sort_and_splice_var_accesses (tree var)
1815 {
1816   int i, j, access_count;
1817   struct access *res, **prev_acc_ptr = &res;
1818   VEC (access_p, heap) *access_vec;
1819   bool first = true;
1820   HOST_WIDE_INT low = -1, high = 0;
1821 
1822   access_vec = get_base_access_vector (var);
1823   if (!access_vec)
1824     return NULL;
1825   access_count = VEC_length (access_p, access_vec);
1826 
1827   /* Sort by <OFFSET, SIZE>.  */
1828   VEC_qsort (access_p, access_vec, compare_access_positions);
1829 
1830   i = 0;
1831   while (i < access_count)
1832     {
1833       struct access *access = VEC_index (access_p, access_vec, i);
1834       bool grp_write = access->write;
1835       bool grp_read = !access->write;
1836       bool grp_scalar_write = access->write
1837 	&& is_gimple_reg_type (access->type);
1838       bool grp_scalar_read = !access->write
1839 	&& is_gimple_reg_type (access->type);
1840       bool grp_assignment_read = access->grp_assignment_read;
1841       bool grp_assignment_write = access->grp_assignment_write;
1842       bool multiple_scalar_reads = false;
1843       bool total_scalarization = access->grp_total_scalarization;
1844       bool grp_partial_lhs = access->grp_partial_lhs;
1845       bool first_scalar = is_gimple_reg_type (access->type);
1846       bool unscalarizable_region = access->grp_unscalarizable_region;
1847 
1848       if (first || access->offset >= high)
1849 	{
1850 	  first = false;
1851 	  low = access->offset;
1852 	  high = access->offset + access->size;
1853 	}
1854       else if (access->offset > low && access->offset + access->size > high)
1855 	return NULL;
1856       else
1857 	gcc_assert (access->offset >= low
1858 		    && access->offset + access->size <= high);
1859 
1860       j = i + 1;
1861       while (j < access_count)
1862 	{
1863 	  struct access *ac2 = VEC_index (access_p, access_vec, j);
1864 	  if (ac2->offset != access->offset || ac2->size != access->size)
1865 	    break;
1866 	  if (ac2->write)
1867 	    {
1868 	      grp_write = true;
1869 	      grp_scalar_write = (grp_scalar_write
1870 				  || is_gimple_reg_type (ac2->type));
1871 	    }
1872 	  else
1873 	    {
1874 	      grp_read = true;
1875 	      if (is_gimple_reg_type (ac2->type))
1876 		{
1877 		  if (grp_scalar_read)
1878 		    multiple_scalar_reads = true;
1879 		  else
1880 		    grp_scalar_read = true;
1881 		}
1882 	    }
1883 	  grp_assignment_read |= ac2->grp_assignment_read;
1884 	  grp_assignment_write |= ac2->grp_assignment_write;
1885 	  grp_partial_lhs |= ac2->grp_partial_lhs;
1886 	  unscalarizable_region |= ac2->grp_unscalarizable_region;
1887 	  total_scalarization |= ac2->grp_total_scalarization;
1888 	  relink_to_new_repr (access, ac2);
1889 
1890 	  /* If there are both aggregate-type and scalar-type accesses with
1891 	     this combination of size and offset, the comparison function
1892 	     should have put the scalars first.  */
1893 	  gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1894 	  ac2->group_representative = access;
1895 	  j++;
1896 	}
1897 
1898       i = j;
1899 
1900       access->group_representative = access;
1901       access->grp_write = grp_write;
1902       access->grp_read = grp_read;
1903       access->grp_scalar_read = grp_scalar_read;
1904       access->grp_scalar_write = grp_scalar_write;
1905       access->grp_assignment_read = grp_assignment_read;
1906       access->grp_assignment_write = grp_assignment_write;
1907       access->grp_hint = multiple_scalar_reads || total_scalarization;
1908       access->grp_total_scalarization = total_scalarization;
1909       access->grp_partial_lhs = grp_partial_lhs;
1910       access->grp_unscalarizable_region = unscalarizable_region;
1911       if (access->first_link)
1912 	add_access_to_work_queue (access);
1913 
1914       *prev_acc_ptr = access;
1915       prev_acc_ptr = &access->next_grp;
1916     }
1917 
1918   gcc_assert (res == VEC_index (access_p, access_vec, 0));
1919   return res;
1920 }
1921 
1922 /* Create a variable for the given ACCESS which determines the type, name and a
1923    few other properties.  Return the variable declaration and store it also to
1924    ACCESS->replacement.  */
1925 
1926 static tree
1927 create_access_replacement (struct access *access, bool rename)
1928 {
1929   tree repl;
1930 
1931   repl = create_tmp_var (access->type, "SR");
1932   add_referenced_var (repl);
1933   if (rename)
1934     mark_sym_for_renaming (repl);
1935 
1936   if (!access->grp_partial_lhs
1937       && (TREE_CODE (access->type) == COMPLEX_TYPE
1938 	  || TREE_CODE (access->type) == VECTOR_TYPE))
1939     DECL_GIMPLE_REG_P (repl) = 1;
1940 
1941   DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1942   DECL_ARTIFICIAL (repl) = 1;
1943   DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1944 
1945   if (DECL_NAME (access->base)
1946       && !DECL_IGNORED_P (access->base)
1947       && !DECL_ARTIFICIAL (access->base))
1948     {
1949       char *pretty_name = make_fancy_name (access->expr);
1950       tree debug_expr = unshare_expr (access->expr), d;
1951 
1952       DECL_NAME (repl) = get_identifier (pretty_name);
1953       obstack_free (&name_obstack, pretty_name);
1954 
1955       /* Get rid of any SSA_NAMEs embedded in debug_expr,
1956 	 as DECL_DEBUG_EXPR isn't considered when looking for still
1957 	 used SSA_NAMEs and thus they could be freed.  All debug info
1958 	 generation cares is whether something is constant or variable
1959 	 and that get_ref_base_and_extent works properly on the
1960 	 expression.  */
1961       for (d = debug_expr; handled_component_p (d); d = TREE_OPERAND (d, 0))
1962 	switch (TREE_CODE (d))
1963 	  {
1964 	  case ARRAY_REF:
1965 	  case ARRAY_RANGE_REF:
1966 	    if (TREE_OPERAND (d, 1)
1967 		&& TREE_CODE (TREE_OPERAND (d, 1)) == SSA_NAME)
1968 	      TREE_OPERAND (d, 1) = SSA_NAME_VAR (TREE_OPERAND (d, 1));
1969 	    if (TREE_OPERAND (d, 3)
1970 		&& TREE_CODE (TREE_OPERAND (d, 3)) == SSA_NAME)
1971 	      TREE_OPERAND (d, 3) = SSA_NAME_VAR (TREE_OPERAND (d, 3));
1972 	    /* FALLTHRU */
1973 	  case COMPONENT_REF:
1974 	    if (TREE_OPERAND (d, 2)
1975 		&& TREE_CODE (TREE_OPERAND (d, 2)) == SSA_NAME)
1976 	      TREE_OPERAND (d, 2) = SSA_NAME_VAR (TREE_OPERAND (d, 2));
1977 	    break;
1978 	  default:
1979 	    break;
1980 	  }
1981       SET_DECL_DEBUG_EXPR (repl, debug_expr);
1982       DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1983       if (access->grp_no_warning)
1984 	TREE_NO_WARNING (repl) = 1;
1985       else
1986 	TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1987     }
1988   else
1989     TREE_NO_WARNING (repl) = 1;
1990 
1991   if (dump_file)
1992     {
1993       fprintf (dump_file, "Created a replacement for ");
1994       print_generic_expr (dump_file, access->base, 0);
1995       fprintf (dump_file, " offset: %u, size: %u: ",
1996 	       (unsigned) access->offset, (unsigned) access->size);
1997       print_generic_expr (dump_file, repl, 0);
1998       fprintf (dump_file, "\n");
1999     }
2000   sra_stats.replacements++;
2001 
2002   return repl;
2003 }
2004 
2005 /* Return ACCESS scalar replacement, create it if it does not exist yet.  */
2006 
2007 static inline tree
2008 get_access_replacement (struct access *access)
2009 {
2010   gcc_assert (access->grp_to_be_replaced);
2011 
2012   if (!access->replacement_decl)
2013     access->replacement_decl = create_access_replacement (access, true);
2014   return access->replacement_decl;
2015 }
2016 
2017 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
2018    not mark it for renaming.  */
2019 
2020 static inline tree
2021 get_unrenamed_access_replacement (struct access *access)
2022 {
2023   gcc_assert (!access->grp_to_be_replaced);
2024 
2025   if (!access->replacement_decl)
2026     access->replacement_decl = create_access_replacement (access, false);
2027   return access->replacement_decl;
2028 }
2029 
2030 
2031 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2032    linked list along the way.  Stop when *ACCESS is NULL or the access pointed
2033    to it is not "within" the root.  Return false iff some accesses partially
2034    overlap.  */
2035 
2036 static bool
2037 build_access_subtree (struct access **access)
2038 {
2039   struct access *root = *access, *last_child = NULL;
2040   HOST_WIDE_INT limit = root->offset + root->size;
2041 
2042   *access = (*access)->next_grp;
2043   while  (*access && (*access)->offset + (*access)->size <= limit)
2044     {
2045       if (!last_child)
2046 	root->first_child = *access;
2047       else
2048 	last_child->next_sibling = *access;
2049       last_child = *access;
2050 
2051       if (!build_access_subtree (access))
2052 	return false;
2053     }
2054 
2055   if (*access && (*access)->offset < limit)
2056     return false;
2057 
2058   return true;
2059 }
2060 
2061 /* Build a tree of access representatives, ACCESS is the pointer to the first
2062    one, others are linked in a list by the next_grp field.  Return false iff
2063    some accesses partially overlap.  */
2064 
2065 static bool
2066 build_access_trees (struct access *access)
2067 {
2068   while (access)
2069     {
2070       struct access *root = access;
2071 
2072       if (!build_access_subtree (&access))
2073 	return false;
2074       root->next_grp = access;
2075     }
2076   return true;
2077 }
2078 
2079 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2080    array.  */
2081 
2082 static bool
2083 expr_with_var_bounded_array_refs_p (tree expr)
2084 {
2085   while (handled_component_p (expr))
2086     {
2087       if (TREE_CODE (expr) == ARRAY_REF
2088 	  && !host_integerp (array_ref_low_bound (expr), 0))
2089 	return true;
2090       expr = TREE_OPERAND (expr, 0);
2091     }
2092   return false;
2093 }
2094 
2095 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2096    both seeming beneficial and when ALLOW_REPLACEMENTS allows it.  Also set all
2097    sorts of access flags appropriately along the way, notably always set
2098    grp_read and grp_assign_read according to MARK_READ and grp_write when
2099    MARK_WRITE is true.
2100 
2101    Creating a replacement for a scalar access is considered beneficial if its
2102    grp_hint is set (this means we are either attempting total scalarization or
2103    there is more than one direct read access) or according to the following
2104    table:
2105 
2106    Access written to through a scalar type (once or more times)
2107    |
2108    |	Written to in an assignment statement
2109    |	|
2110    |	|	Access read as scalar _once_
2111    |	|	|
2112    |   	|	|	Read in an assignment statement
2113    |	|	|	|
2114    |   	|	|	|	Scalarize	Comment
2115 -----------------------------------------------------------------------------
2116    0	0	0	0			No access for the scalar
2117    0	0	0	1			No access for the scalar
2118    0	0	1	0	No		Single read - won't help
2119    0	0	1	1	No		The same case
2120    0	1	0	0			No access for the scalar
2121    0	1	0	1			No access for the scalar
2122    0	1	1	0	Yes		s = *g; return s.i;
2123    0	1	1	1       Yes		The same case as above
2124    1	0	0	0	No		Won't help
2125    1	0	0	1	Yes		s.i = 1; *g = s;
2126    1	0	1	0	Yes		s.i = 5; g = s.i;
2127    1	0	1	1	Yes		The same case as above
2128    1	1	0	0	No		Won't help.
2129    1	1	0	1	Yes		s.i = 1; *g = s;
2130    1	1	1	0	Yes		s = *g; return s.i;
2131    1	1	1	1	Yes		Any of the above yeses  */
2132 
2133 static bool
2134 analyze_access_subtree (struct access *root, struct access *parent,
2135 			bool allow_replacements)
2136 {
2137   struct access *child;
2138   HOST_WIDE_INT limit = root->offset + root->size;
2139   HOST_WIDE_INT covered_to = root->offset;
2140   bool scalar = is_gimple_reg_type (root->type);
2141   bool hole = false, sth_created = false;
2142 
2143   if (parent)
2144     {
2145       if (parent->grp_read)
2146 	root->grp_read = 1;
2147       if (parent->grp_assignment_read)
2148 	root->grp_assignment_read = 1;
2149       if (parent->grp_write)
2150 	root->grp_write = 1;
2151       if (parent->grp_assignment_write)
2152 	root->grp_assignment_write = 1;
2153       if (parent->grp_total_scalarization)
2154 	root->grp_total_scalarization = 1;
2155     }
2156 
2157   if (root->grp_unscalarizable_region)
2158     allow_replacements = false;
2159 
2160   if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2161     allow_replacements = false;
2162 
2163   for (child = root->first_child; child; child = child->next_sibling)
2164     {
2165       hole |= covered_to < child->offset;
2166       sth_created |= analyze_access_subtree (child, root,
2167 					     allow_replacements && !scalar);
2168 
2169       root->grp_unscalarized_data |= child->grp_unscalarized_data;
2170       root->grp_total_scalarization &= child->grp_total_scalarization;
2171       if (child->grp_covered)
2172 	covered_to += child->size;
2173       else
2174 	hole = true;
2175     }
2176 
2177   if (allow_replacements && scalar && !root->first_child
2178       && (root->grp_hint
2179 	  || ((root->grp_scalar_read || root->grp_assignment_read)
2180 	      && (root->grp_scalar_write || root->grp_assignment_write))))
2181     {
2182       bool new_integer_type;
2183       /* Always create access replacements that cover the whole access.
2184          For integral types this means the precision has to match.
2185 	 Avoid assumptions based on the integral type kind, too.  */
2186       if (INTEGRAL_TYPE_P (root->type)
2187 	  && (TREE_CODE (root->type) != INTEGER_TYPE
2188 	      || TYPE_PRECISION (root->type) != root->size)
2189 	  /* But leave bitfield accesses alone.  */
2190 	  && (TREE_CODE (root->expr) != COMPONENT_REF
2191 	      || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2192 	{
2193 	  tree rt = root->type;
2194 	  gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2195 		      && (root->size % BITS_PER_UNIT) == 0);
2196 	  root->type = build_nonstandard_integer_type (root->size,
2197 						       TYPE_UNSIGNED (rt));
2198 	  root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2199 					     root->base, root->offset,
2200 					     root->type, NULL, false);
2201 	  new_integer_type = true;
2202 	}
2203       else
2204 	new_integer_type = false;
2205 
2206       if (dump_file && (dump_flags & TDF_DETAILS))
2207 	{
2208 	  fprintf (dump_file, "Marking ");
2209 	  print_generic_expr (dump_file, root->base, 0);
2210 	  fprintf (dump_file, " offset: %u, size: %u ",
2211 		   (unsigned) root->offset, (unsigned) root->size);
2212 	  fprintf (dump_file, " to be replaced%s.\n",
2213 		   new_integer_type ? " with an integer": "");
2214 	}
2215 
2216       root->grp_to_be_replaced = 1;
2217       sth_created = true;
2218       hole = false;
2219     }
2220   else
2221     {
2222       if (covered_to < limit)
2223 	hole = true;
2224       if (scalar)
2225 	root->grp_total_scalarization = 0;
2226     }
2227 
2228   if (sth_created
2229       && (!hole || root->grp_total_scalarization))
2230     {
2231       root->grp_covered = 1;
2232       return true;
2233     }
2234   if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2235     root->grp_unscalarized_data = 1; /* not covered and written to */
2236   if (sth_created)
2237     return true;
2238   return false;
2239 }
2240 
2241 /* Analyze all access trees linked by next_grp by the means of
2242    analyze_access_subtree.  */
2243 static bool
2244 analyze_access_trees (struct access *access)
2245 {
2246   bool ret = false;
2247 
2248   while (access)
2249     {
2250       if (analyze_access_subtree (access, NULL, true))
2251 	ret = true;
2252       access = access->next_grp;
2253     }
2254 
2255   return ret;
2256 }
2257 
2258 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2259    SIZE would conflict with an already existing one.  If exactly such a child
2260    already exists in LACC, store a pointer to it in EXACT_MATCH.  */
2261 
2262 static bool
2263 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2264 			      HOST_WIDE_INT size, struct access **exact_match)
2265 {
2266   struct access *child;
2267 
2268   for (child = lacc->first_child; child; child = child->next_sibling)
2269     {
2270       if (child->offset == norm_offset && child->size == size)
2271 	{
2272 	  *exact_match = child;
2273 	  return true;
2274 	}
2275 
2276       if (child->offset < norm_offset + size
2277 	  && child->offset + child->size > norm_offset)
2278 	return true;
2279     }
2280 
2281   return false;
2282 }
2283 
2284 /* Create a new child access of PARENT, with all properties just like MODEL
2285    except for its offset and with its grp_write false and grp_read true.
2286    Return the new access or NULL if it cannot be created.  Note that this access
2287    is created long after all splicing and sorting, it's not located in any
2288    access vector and is automatically a representative of its group.  */
2289 
2290 static struct access *
2291 create_artificial_child_access (struct access *parent, struct access *model,
2292 				HOST_WIDE_INT new_offset)
2293 {
2294   struct access *access;
2295   struct access **child;
2296   tree expr = parent->base;
2297 
2298   gcc_assert (!model->grp_unscalarizable_region);
2299 
2300   access = (struct access *) pool_alloc (access_pool);
2301   memset (access, 0, sizeof (struct access));
2302   if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2303 					   model->type))
2304     {
2305       access->grp_no_warning = true;
2306       expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2307 				  new_offset, model, NULL, false);
2308     }
2309 
2310   access->base = parent->base;
2311   access->expr = expr;
2312   access->offset = new_offset;
2313   access->size = model->size;
2314   access->type = model->type;
2315   access->grp_write = true;
2316   access->grp_read = false;
2317 
2318   child = &parent->first_child;
2319   while (*child && (*child)->offset < new_offset)
2320     child = &(*child)->next_sibling;
2321 
2322   access->next_sibling = *child;
2323   *child = access;
2324 
2325   return access;
2326 }
2327 
2328 
2329 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2330    true if any new subaccess was created.  Additionally, if RACC is a scalar
2331    access but LACC is not, change the type of the latter, if possible.  */
2332 
2333 static bool
2334 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2335 {
2336   struct access *rchild;
2337   HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2338   bool ret = false;
2339 
2340   if (is_gimple_reg_type (lacc->type)
2341       || lacc->grp_unscalarizable_region
2342       || racc->grp_unscalarizable_region)
2343     return false;
2344 
2345   if (is_gimple_reg_type (racc->type))
2346     {
2347       if (!lacc->first_child && !racc->first_child)
2348 	{
2349 	  tree t = lacc->base;
2350 
2351 	  lacc->type = racc->type;
2352 	  if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2353 						  lacc->offset, racc->type))
2354 	    lacc->expr = t;
2355 	  else
2356 	    {
2357 	      lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2358 						lacc->base, lacc->offset,
2359 						racc, NULL, false);
2360 	      lacc->grp_no_warning = true;
2361 	    }
2362 	}
2363       return false;
2364     }
2365 
2366   for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2367     {
2368       struct access *new_acc = NULL;
2369       HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2370 
2371       if (rchild->grp_unscalarizable_region)
2372 	continue;
2373 
2374       if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2375 					&new_acc))
2376 	{
2377 	  if (new_acc)
2378 	    {
2379 	      rchild->grp_hint = 1;
2380 	      new_acc->grp_hint |= new_acc->grp_read;
2381 	      if (rchild->first_child)
2382 		ret |= propagate_subaccesses_across_link (new_acc, rchild);
2383 	    }
2384 	  continue;
2385 	}
2386 
2387       rchild->grp_hint = 1;
2388       new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2389       if (new_acc)
2390 	{
2391 	  ret = true;
2392 	  if (racc->first_child)
2393 	    propagate_subaccesses_across_link (new_acc, rchild);
2394 	}
2395     }
2396 
2397   return ret;
2398 }
2399 
2400 /* Propagate all subaccesses across assignment links.  */
2401 
2402 static void
2403 propagate_all_subaccesses (void)
2404 {
2405   while (work_queue_head)
2406     {
2407       struct access *racc = pop_access_from_work_queue ();
2408       struct assign_link *link;
2409 
2410       gcc_assert (racc->first_link);
2411 
2412       for (link = racc->first_link; link; link = link->next)
2413 	{
2414 	  struct access *lacc = link->lacc;
2415 
2416 	  if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2417 	    continue;
2418 	  lacc = lacc->group_representative;
2419 	  if (propagate_subaccesses_across_link (lacc, racc)
2420 	      && lacc->first_link)
2421 	    add_access_to_work_queue (lacc);
2422 	}
2423     }
2424 }
2425 
2426 /* Go through all accesses collected throughout the (intraprocedural) analysis
2427    stage, exclude overlapping ones, identify representatives and build trees
2428    out of them, making decisions about scalarization on the way.  Return true
2429    iff there are any to-be-scalarized variables after this stage. */
2430 
2431 static bool
2432 analyze_all_variable_accesses (void)
2433 {
2434   int res = 0;
2435   bitmap tmp = BITMAP_ALLOC (NULL);
2436   bitmap_iterator bi;
2437   unsigned i, max_total_scalarization_size;
2438 
2439   max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2440     * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2441 
2442   EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2443     if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2444 	&& !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2445       {
2446 	tree var = referenced_var (i);
2447 
2448 	if (TREE_CODE (var) == VAR_DECL
2449 	    && type_consists_of_records_p (TREE_TYPE (var)))
2450 	  {
2451 	    if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2452 		<= max_total_scalarization_size)
2453 	      {
2454 		completely_scalarize_var (var);
2455 		if (dump_file && (dump_flags & TDF_DETAILS))
2456 		  {
2457 		    fprintf (dump_file, "Will attempt to totally scalarize ");
2458 		    print_generic_expr (dump_file, var, 0);
2459 		    fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2460 		  }
2461 	      }
2462 	    else if (dump_file && (dump_flags & TDF_DETAILS))
2463 	      {
2464 		fprintf (dump_file, "Too big to totally scalarize: ");
2465 		print_generic_expr (dump_file, var, 0);
2466 		fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2467 	      }
2468 	  }
2469       }
2470 
2471   bitmap_copy (tmp, candidate_bitmap);
2472   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2473     {
2474       tree var = referenced_var (i);
2475       struct access *access;
2476 
2477       access = sort_and_splice_var_accesses (var);
2478       if (!access || !build_access_trees (access))
2479 	disqualify_candidate (var,
2480 			      "No or inhibitingly overlapping accesses.");
2481     }
2482 
2483   propagate_all_subaccesses ();
2484 
2485   bitmap_copy (tmp, candidate_bitmap);
2486   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2487     {
2488       tree var = referenced_var (i);
2489       struct access *access = get_first_repr_for_decl (var);
2490 
2491       if (analyze_access_trees (access))
2492 	{
2493 	  res++;
2494 	  if (dump_file && (dump_flags & TDF_DETAILS))
2495 	    {
2496 	      fprintf (dump_file, "\nAccess trees for ");
2497 	      print_generic_expr (dump_file, var, 0);
2498 	      fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2499 	      dump_access_tree (dump_file, access);
2500 	      fprintf (dump_file, "\n");
2501 	    }
2502 	}
2503       else
2504 	disqualify_candidate (var, "No scalar replacements to be created.");
2505     }
2506 
2507   BITMAP_FREE (tmp);
2508 
2509   if (res)
2510     {
2511       statistics_counter_event (cfun, "Scalarized aggregates", res);
2512       return true;
2513     }
2514   else
2515     return false;
2516 }
2517 
2518 /* Generate statements copying scalar replacements of accesses within a subtree
2519    into or out of AGG.  ACCESS, all its children, siblings and their children
2520    are to be processed.  AGG is an aggregate type expression (can be a
2521    declaration but does not have to be, it can for example also be a mem_ref or
2522    a series of handled components).  TOP_OFFSET is the offset of the processed
2523    subtree which has to be subtracted from offsets of individual accesses to
2524    get corresponding offsets for AGG.  If CHUNK_SIZE is non-null, copy only
2525    replacements in the interval <start_offset, start_offset + chunk_size>,
2526    otherwise copy all.  GSI is a statement iterator used to place the new
2527    statements.  WRITE should be true when the statements should write from AGG
2528    to the replacement and false if vice versa.  if INSERT_AFTER is true, new
2529    statements will be added after the current statement in GSI, they will be
2530    added before the statement otherwise.  */
2531 
2532 static void
2533 generate_subtree_copies (struct access *access, tree agg,
2534 			 HOST_WIDE_INT top_offset,
2535 			 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2536 			 gimple_stmt_iterator *gsi, bool write,
2537 			 bool insert_after, location_t loc)
2538 {
2539   do
2540     {
2541       if (chunk_size && access->offset >= start_offset + chunk_size)
2542 	return;
2543 
2544       if (access->grp_to_be_replaced
2545 	  && (chunk_size == 0
2546 	      || access->offset + access->size > start_offset))
2547 	{
2548 	  tree expr, repl = get_access_replacement (access);
2549 	  gimple stmt;
2550 
2551 	  expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2552 				      access, gsi, insert_after);
2553 
2554 	  if (write)
2555 	    {
2556 	      if (access->grp_partial_lhs)
2557 		expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2558 						 !insert_after,
2559 						 insert_after ? GSI_NEW_STMT
2560 						 : GSI_SAME_STMT);
2561 	      stmt = gimple_build_assign (repl, expr);
2562 	    }
2563 	  else
2564 	    {
2565 	      TREE_NO_WARNING (repl) = 1;
2566 	      if (access->grp_partial_lhs)
2567 		repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2568 						 !insert_after,
2569 						 insert_after ? GSI_NEW_STMT
2570 						 : GSI_SAME_STMT);
2571 	      stmt = gimple_build_assign (expr, repl);
2572 	    }
2573 	  gimple_set_location (stmt, loc);
2574 
2575 	  if (insert_after)
2576 	    gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2577 	  else
2578 	    gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2579 	  update_stmt (stmt);
2580 	  sra_stats.subtree_copies++;
2581 	}
2582 
2583       if (access->first_child)
2584 	generate_subtree_copies (access->first_child, agg, top_offset,
2585 				 start_offset, chunk_size, gsi,
2586 				 write, insert_after, loc);
2587 
2588       access = access->next_sibling;
2589     }
2590   while (access);
2591 }
2592 
2593 /* Assign zero to all scalar replacements in an access subtree.  ACCESS is the
2594    the root of the subtree to be processed.  GSI is the statement iterator used
2595    for inserting statements which are added after the current statement if
2596    INSERT_AFTER is true or before it otherwise.  */
2597 
2598 static void
2599 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2600 			bool insert_after, location_t loc)
2601 
2602 {
2603   struct access *child;
2604 
2605   if (access->grp_to_be_replaced)
2606     {
2607       gimple stmt;
2608 
2609       stmt = gimple_build_assign (get_access_replacement (access),
2610 				  build_zero_cst (access->type));
2611       if (insert_after)
2612 	gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2613       else
2614 	gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2615       update_stmt (stmt);
2616       gimple_set_location (stmt, loc);
2617     }
2618 
2619   for (child = access->first_child; child; child = child->next_sibling)
2620     init_subtree_with_zero (child, gsi, insert_after, loc);
2621 }
2622 
2623 /* Search for an access representative for the given expression EXPR and
2624    return it or NULL if it cannot be found.  */
2625 
2626 static struct access *
2627 get_access_for_expr (tree expr)
2628 {
2629   HOST_WIDE_INT offset, size, max_size;
2630   tree base;
2631 
2632   /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2633      a different size than the size of its argument and we need the latter
2634      one.  */
2635   if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2636     expr = TREE_OPERAND (expr, 0);
2637 
2638   base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2639   if (max_size == -1 || !DECL_P (base))
2640     return NULL;
2641 
2642   if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2643     return NULL;
2644 
2645   return get_var_base_offset_size_access (base, offset, max_size);
2646 }
2647 
2648 /* Replace the expression EXPR with a scalar replacement if there is one and
2649    generate other statements to do type conversion or subtree copying if
2650    necessary.  GSI is used to place newly created statements, WRITE is true if
2651    the expression is being written to (it is on a LHS of a statement or output
2652    in an assembly statement).  */
2653 
2654 static bool
2655 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2656 {
2657   location_t loc;
2658   struct access *access;
2659   tree type, bfr;
2660 
2661   if (TREE_CODE (*expr) == BIT_FIELD_REF)
2662     {
2663       bfr = *expr;
2664       expr = &TREE_OPERAND (*expr, 0);
2665     }
2666   else
2667     bfr = NULL_TREE;
2668 
2669   if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2670     expr = &TREE_OPERAND (*expr, 0);
2671   access = get_access_for_expr (*expr);
2672   if (!access)
2673     return false;
2674   type = TREE_TYPE (*expr);
2675 
2676   loc = gimple_location (gsi_stmt (*gsi));
2677   if (access->grp_to_be_replaced)
2678     {
2679       tree repl = get_access_replacement (access);
2680       /* If we replace a non-register typed access simply use the original
2681          access expression to extract the scalar component afterwards.
2682 	 This happens if scalarizing a function return value or parameter
2683 	 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2684 	 gcc.c-torture/compile/20011217-1.c.
2685 
2686          We also want to use this when accessing a complex or vector which can
2687          be accessed as a different type too, potentially creating a need for
2688          type conversion (see PR42196) and when scalarized unions are involved
2689          in assembler statements (see PR42398).  */
2690       if (!useless_type_conversion_p (type, access->type))
2691 	{
2692 	  tree ref;
2693 
2694 	  ref = build_ref_for_model (loc, access->base, access->offset, access,
2695 				     NULL, false);
2696 
2697 	  if (write)
2698 	    {
2699 	      gimple stmt;
2700 
2701 	      if (access->grp_partial_lhs)
2702 		ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2703 						 false, GSI_NEW_STMT);
2704 	      stmt = gimple_build_assign (repl, ref);
2705 	      gimple_set_location (stmt, loc);
2706 	      gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2707 	    }
2708 	  else
2709 	    {
2710 	      gimple stmt;
2711 
2712 	      if (access->grp_partial_lhs)
2713 		repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2714 						 true, GSI_SAME_STMT);
2715 	      stmt = gimple_build_assign (ref, repl);
2716 	      gimple_set_location (stmt, loc);
2717 	      gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2718 	    }
2719 	}
2720       else
2721 	*expr = repl;
2722       sra_stats.exprs++;
2723     }
2724 
2725   if (access->first_child)
2726     {
2727       HOST_WIDE_INT start_offset, chunk_size;
2728       if (bfr
2729 	  && host_integerp (TREE_OPERAND (bfr, 1), 1)
2730 	  && host_integerp (TREE_OPERAND (bfr, 2), 1))
2731 	{
2732 	  chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2733 	  start_offset = access->offset
2734 	    + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2735 	}
2736       else
2737 	start_offset = chunk_size = 0;
2738 
2739       generate_subtree_copies (access->first_child, access->base, 0,
2740 			       start_offset, chunk_size, gsi, write, write,
2741 			       loc);
2742     }
2743   return true;
2744 }
2745 
2746 /* Where scalar replacements of the RHS have been written to when a replacement
2747    of a LHS of an assigments cannot be direclty loaded from a replacement of
2748    the RHS. */
2749 enum unscalarized_data_handling { SRA_UDH_NONE,  /* Nothing done so far. */
2750 				  SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2751 				  SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2752 
2753 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2754    base aggregate if there are unscalarized data or directly to LHS of the
2755    statement that is pointed to by GSI otherwise.  */
2756 
2757 static enum unscalarized_data_handling
2758 handle_unscalarized_data_in_subtree (struct access *top_racc,
2759 				     gimple_stmt_iterator *gsi)
2760 {
2761   if (top_racc->grp_unscalarized_data)
2762     {
2763       generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2764 			       gsi, false, false,
2765 			       gimple_location (gsi_stmt (*gsi)));
2766       return SRA_UDH_RIGHT;
2767     }
2768   else
2769     {
2770       tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2771       generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2772 			       0, 0, gsi, false, false,
2773 			       gimple_location (gsi_stmt (*gsi)));
2774       return SRA_UDH_LEFT;
2775     }
2776 }
2777 
2778 
2779 /* Try to generate statements to load all sub-replacements in an access subtree
2780    formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2781    If that is not possible, refresh the TOP_RACC base aggregate and load the
2782    accesses from it.  LEFT_OFFSET is the offset of the left whole subtree being
2783    copied. NEW_GSI is stmt iterator used for statement insertions after the
2784    original assignment, OLD_GSI is used to insert statements before the
2785    assignment.  *REFRESHED keeps the information whether we have needed to
2786    refresh replacements of the LHS and from which side of the assignments this
2787    takes place.  */
2788 
2789 static void
2790 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2791 				 HOST_WIDE_INT left_offset,
2792 				 gimple_stmt_iterator *old_gsi,
2793 				 gimple_stmt_iterator *new_gsi,
2794 				 enum unscalarized_data_handling *refreshed)
2795 {
2796   location_t loc = gimple_location (gsi_stmt (*old_gsi));
2797   for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2798     {
2799       if (lacc->grp_to_be_replaced)
2800 	{
2801 	  struct access *racc;
2802 	  HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2803 	  gimple stmt;
2804 	  tree rhs;
2805 
2806 	  racc = find_access_in_subtree (top_racc, offset, lacc->size);
2807 	  if (racc && racc->grp_to_be_replaced)
2808 	    {
2809 	      rhs = get_access_replacement (racc);
2810 	      if (!useless_type_conversion_p (lacc->type, racc->type))
2811 		rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2812 
2813 	      if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2814 		rhs = force_gimple_operand_gsi (old_gsi, rhs, true, NULL_TREE,
2815 						true, GSI_SAME_STMT);
2816 	    }
2817 	  else
2818 	    {
2819 	      /* No suitable access on the right hand side, need to load from
2820 		 the aggregate.  See if we have to update it first... */
2821 	      if (*refreshed == SRA_UDH_NONE)
2822 		*refreshed = handle_unscalarized_data_in_subtree (top_racc,
2823 								  old_gsi);
2824 
2825 	      if (*refreshed == SRA_UDH_LEFT)
2826 		rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2827 					    new_gsi, true);
2828 	      else
2829 		rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2830 					    new_gsi, true);
2831 	      if (lacc->grp_partial_lhs)
2832 		rhs = force_gimple_operand_gsi (new_gsi, rhs, true, NULL_TREE,
2833 						false, GSI_NEW_STMT);
2834 	    }
2835 
2836 	  stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2837 	  gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2838 	  gimple_set_location (stmt, loc);
2839 	  update_stmt (stmt);
2840 	  sra_stats.subreplacements++;
2841 	}
2842       else if (*refreshed == SRA_UDH_NONE
2843 	       && lacc->grp_read && !lacc->grp_covered)
2844 	*refreshed = handle_unscalarized_data_in_subtree (top_racc,
2845 							  old_gsi);
2846 
2847       if (lacc->first_child)
2848 	load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2849 					 old_gsi, new_gsi, refreshed);
2850     }
2851 }
2852 
2853 /* Result code for SRA assignment modification.  */
2854 enum assignment_mod_result { SRA_AM_NONE,       /* nothing done for the stmt */
2855 			     SRA_AM_MODIFIED,  /* stmt changed but not
2856 						  removed */
2857 			     SRA_AM_REMOVED };  /* stmt eliminated */
2858 
2859 /* Modify assignments with a CONSTRUCTOR on their RHS.  STMT contains a pointer
2860    to the assignment and GSI is the statement iterator pointing at it.  Returns
2861    the same values as sra_modify_assign.  */
2862 
2863 static enum assignment_mod_result
2864 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2865 {
2866   tree lhs = gimple_assign_lhs (*stmt);
2867   struct access *acc;
2868   location_t loc;
2869 
2870   acc = get_access_for_expr (lhs);
2871   if (!acc)
2872     return SRA_AM_NONE;
2873 
2874   if (gimple_clobber_p (*stmt))
2875     {
2876       /* Remove clobbers of fully scalarized variables, otherwise
2877 	 do nothing.  */
2878       if (acc->grp_covered)
2879 	{
2880 	  unlink_stmt_vdef (*stmt);
2881 	  gsi_remove (gsi, true);
2882 	  return SRA_AM_REMOVED;
2883 	}
2884       else
2885 	return SRA_AM_NONE;
2886     }
2887 
2888   loc = gimple_location (*stmt);
2889   if (VEC_length (constructor_elt,
2890 		  CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2891     {
2892       /* I have never seen this code path trigger but if it can happen the
2893 	 following should handle it gracefully.  */
2894       if (access_has_children_p (acc))
2895 	generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2896 				 true, true, loc);
2897       return SRA_AM_MODIFIED;
2898     }
2899 
2900   if (acc->grp_covered)
2901     {
2902       init_subtree_with_zero (acc, gsi, false, loc);
2903       unlink_stmt_vdef (*stmt);
2904       gsi_remove (gsi, true);
2905       return SRA_AM_REMOVED;
2906     }
2907   else
2908     {
2909       init_subtree_with_zero (acc, gsi, true, loc);
2910       return SRA_AM_MODIFIED;
2911     }
2912 }
2913 
2914 /* Create and return a new suitable default definition SSA_NAME for RACC which
2915    is an access describing an uninitialized part of an aggregate that is being
2916    loaded.  */
2917 
2918 static tree
2919 get_repl_default_def_ssa_name (struct access *racc)
2920 {
2921   tree repl, decl;
2922 
2923   decl = get_unrenamed_access_replacement (racc);
2924 
2925   repl = gimple_default_def (cfun, decl);
2926   if (!repl)
2927     {
2928       repl = make_ssa_name (decl, gimple_build_nop ());
2929       set_default_def (decl, repl);
2930     }
2931 
2932   return repl;
2933 }
2934 
2935 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2936    somewhere in it.  */
2937 
2938 static inline bool
2939 contains_bitfld_comp_ref_p (const_tree ref)
2940 {
2941   while (handled_component_p (ref))
2942     {
2943       if (TREE_CODE (ref) == COMPONENT_REF
2944           && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
2945         return true;
2946       ref = TREE_OPERAND (ref, 0);
2947     }
2948 
2949   return false;
2950 }
2951 
2952 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2953    bit-field field declaration somewhere in it.  */
2954 
2955 static inline bool
2956 contains_vce_or_bfcref_p (const_tree ref)
2957 {
2958   while (handled_component_p (ref))
2959     {
2960       if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
2961 	  || (TREE_CODE (ref) == COMPONENT_REF
2962 	      && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
2963 	return true;
2964       ref = TREE_OPERAND (ref, 0);
2965     }
2966 
2967   return false;
2968 }
2969 
2970 /* Examine both sides of the assignment statement pointed to by STMT, replace
2971    them with a scalare replacement if there is one and generate copying of
2972    replacements if scalarized aggregates have been used in the assignment.  GSI
2973    is used to hold generated statements for type conversions and subtree
2974    copying.  */
2975 
2976 static enum assignment_mod_result
2977 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2978 {
2979   struct access *lacc, *racc;
2980   tree lhs, rhs;
2981   bool modify_this_stmt = false;
2982   bool force_gimple_rhs = false;
2983   location_t loc;
2984   gimple_stmt_iterator orig_gsi = *gsi;
2985 
2986   if (!gimple_assign_single_p (*stmt))
2987     return SRA_AM_NONE;
2988   lhs = gimple_assign_lhs (*stmt);
2989   rhs = gimple_assign_rhs1 (*stmt);
2990 
2991   if (TREE_CODE (rhs) == CONSTRUCTOR)
2992     return sra_modify_constructor_assign (stmt, gsi);
2993 
2994   if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2995       || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2996       || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2997     {
2998       modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2999 					  gsi, false);
3000       modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
3001 					   gsi, true);
3002       return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3003     }
3004 
3005   lacc = get_access_for_expr (lhs);
3006   racc = get_access_for_expr (rhs);
3007   if (!lacc && !racc)
3008     return SRA_AM_NONE;
3009 
3010   loc = gimple_location (*stmt);
3011   if (lacc && lacc->grp_to_be_replaced)
3012     {
3013       lhs = get_access_replacement (lacc);
3014       gimple_assign_set_lhs (*stmt, lhs);
3015       modify_this_stmt = true;
3016       if (lacc->grp_partial_lhs)
3017 	force_gimple_rhs = true;
3018       sra_stats.exprs++;
3019     }
3020 
3021   if (racc && racc->grp_to_be_replaced)
3022     {
3023       rhs = get_access_replacement (racc);
3024       modify_this_stmt = true;
3025       if (racc->grp_partial_lhs)
3026 	force_gimple_rhs = true;
3027       sra_stats.exprs++;
3028     }
3029   else if (racc
3030 	   && !racc->grp_unscalarized_data
3031 	   && TREE_CODE (lhs) == SSA_NAME
3032 	   && !access_has_replacements_p (racc))
3033     {
3034       rhs = get_repl_default_def_ssa_name (racc);
3035       modify_this_stmt = true;
3036       sra_stats.exprs++;
3037     }
3038 
3039   if (modify_this_stmt)
3040     {
3041       if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3042 	{
3043 	  /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3044 	     ???  This should move to fold_stmt which we simply should
3045 	     call after building a VIEW_CONVERT_EXPR here.  */
3046 	  if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3047 	      && !contains_bitfld_comp_ref_p (lhs))
3048 	    {
3049 	      lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3050 	      gimple_assign_set_lhs (*stmt, lhs);
3051 	    }
3052 	  else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3053 		   && !contains_vce_or_bfcref_p (rhs))
3054 	    rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3055 
3056 	  if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3057 	    {
3058 	      rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3059 				     rhs);
3060 	      if (is_gimple_reg_type (TREE_TYPE (lhs))
3061 		  && TREE_CODE (lhs) != SSA_NAME)
3062 		force_gimple_rhs = true;
3063 	    }
3064 	}
3065     }
3066 
3067   /* From this point on, the function deals with assignments in between
3068      aggregates when at least one has scalar reductions of some of its
3069      components.  There are three possible scenarios: Both the LHS and RHS have
3070      to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3071 
3072      In the first case, we would like to load the LHS components from RHS
3073      components whenever possible.  If that is not possible, we would like to
3074      read it directly from the RHS (after updating it by storing in it its own
3075      components).  If there are some necessary unscalarized data in the LHS,
3076      those will be loaded by the original assignment too.  If neither of these
3077      cases happen, the original statement can be removed.  Most of this is done
3078      by load_assign_lhs_subreplacements.
3079 
3080      In the second case, we would like to store all RHS scalarized components
3081      directly into LHS and if they cover the aggregate completely, remove the
3082      statement too.  In the third case, we want the LHS components to be loaded
3083      directly from the RHS (DSE will remove the original statement if it
3084      becomes redundant).
3085 
3086      This is a bit complex but manageable when types match and when unions do
3087      not cause confusion in a way that we cannot really load a component of LHS
3088      from the RHS or vice versa (the access representing this level can have
3089      subaccesses that are accessible only through a different union field at a
3090      higher level - different from the one used in the examined expression).
3091      Unions are fun.
3092 
3093      Therefore, I specially handle a fourth case, happening when there is a
3094      specific type cast or it is impossible to locate a scalarized subaccess on
3095      the other side of the expression.  If that happens, I simply "refresh" the
3096      RHS by storing in it is scalarized components leave the original statement
3097      there to do the copying and then load the scalar replacements of the LHS.
3098      This is what the first branch does.  */
3099 
3100   if (modify_this_stmt
3101       || gimple_has_volatile_ops (*stmt)
3102       || contains_vce_or_bfcref_p (rhs)
3103       || contains_vce_or_bfcref_p (lhs))
3104     {
3105       if (access_has_children_p (racc))
3106 	generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3107 				 gsi, false, false, loc);
3108       if (access_has_children_p (lacc))
3109 	generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
3110 				 gsi, true, true, loc);
3111       sra_stats.separate_lhs_rhs_handling++;
3112 
3113       /* This gimplification must be done after generate_subtree_copies,
3114 	 lest we insert the subtree copies in the middle of the gimplified
3115 	 sequence.  */
3116       if (force_gimple_rhs)
3117 	rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3118 					true, GSI_SAME_STMT);
3119       if (gimple_assign_rhs1 (*stmt) != rhs)
3120 	{
3121 	  modify_this_stmt = true;
3122 	  gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3123 	  gcc_assert (*stmt == gsi_stmt (orig_gsi));
3124 	}
3125 
3126       return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3127     }
3128   else
3129     {
3130       if (access_has_children_p (lacc)
3131 	  && access_has_children_p (racc)
3132 	  /* When an access represents an unscalarizable region, it usually
3133 	     represents accesses with variable offset and thus must not be used
3134 	     to generate new memory accesses.  */
3135 	  && !lacc->grp_unscalarizable_region
3136 	  && !racc->grp_unscalarizable_region)
3137 	{
3138 	  gimple_stmt_iterator orig_gsi = *gsi;
3139 	  enum unscalarized_data_handling refreshed;
3140 
3141 	  if (lacc->grp_read && !lacc->grp_covered)
3142 	    refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
3143 	  else
3144 	    refreshed = SRA_UDH_NONE;
3145 
3146 	  load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
3147 					   &orig_gsi, gsi, &refreshed);
3148 	  if (refreshed != SRA_UDH_RIGHT)
3149 	    {
3150 	      gsi_next (gsi);
3151 	      unlink_stmt_vdef (*stmt);
3152 	      gsi_remove (&orig_gsi, true);
3153 	      sra_stats.deleted++;
3154 	      return SRA_AM_REMOVED;
3155 	    }
3156 	}
3157       else
3158 	{
3159 	  if (access_has_children_p (racc)
3160 	      && !racc->grp_unscalarized_data)
3161 	    {
3162 	      if (dump_file)
3163 		{
3164 		  fprintf (dump_file, "Removing load: ");
3165 		  print_gimple_stmt (dump_file, *stmt, 0, 0);
3166 		}
3167 	      generate_subtree_copies (racc->first_child, lhs,
3168 				       racc->offset, 0, 0, gsi,
3169 				       false, false, loc);
3170 	      gcc_assert (*stmt == gsi_stmt (*gsi));
3171 	      unlink_stmt_vdef (*stmt);
3172 	      gsi_remove (gsi, true);
3173 	      sra_stats.deleted++;
3174 	      return SRA_AM_REMOVED;
3175 	    }
3176 	  /* Restore the aggregate RHS from its components so the
3177 	     prevailing aggregate copy does the right thing.  */
3178 	  if (access_has_children_p (racc))
3179 	    generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3180 				     gsi, false, false, loc);
3181 	  /* Re-load the components of the aggregate copy destination.
3182 	     But use the RHS aggregate to load from to expose more
3183 	     optimization opportunities.  */
3184 	  if (access_has_children_p (lacc))
3185 	    generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3186 				     0, 0, gsi, true, true, loc);
3187 	}
3188 
3189       return SRA_AM_NONE;
3190     }
3191 }
3192 
3193 /* Traverse the function body and all modifications as decided in
3194    analyze_all_variable_accesses.  Return true iff the CFG has been
3195    changed.  */
3196 
3197 static bool
3198 sra_modify_function_body (void)
3199 {
3200   bool cfg_changed = false;
3201   basic_block bb;
3202 
3203   FOR_EACH_BB (bb)
3204     {
3205       gimple_stmt_iterator gsi = gsi_start_bb (bb);
3206       while (!gsi_end_p (gsi))
3207 	{
3208 	  gimple stmt = gsi_stmt (gsi);
3209 	  enum assignment_mod_result assign_result;
3210 	  bool modified = false, deleted = false;
3211 	  tree *t;
3212 	  unsigned i;
3213 
3214 	  switch (gimple_code (stmt))
3215 	    {
3216 	    case GIMPLE_RETURN:
3217 	      t = gimple_return_retval_ptr (stmt);
3218 	      if (*t != NULL_TREE)
3219 		modified |= sra_modify_expr (t, &gsi, false);
3220 	      break;
3221 
3222 	    case GIMPLE_ASSIGN:
3223 	      assign_result = sra_modify_assign (&stmt, &gsi);
3224 	      modified |= assign_result == SRA_AM_MODIFIED;
3225 	      deleted = assign_result == SRA_AM_REMOVED;
3226 	      break;
3227 
3228 	    case GIMPLE_CALL:
3229 	      /* Operands must be processed before the lhs.  */
3230 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
3231 		{
3232 		  t = gimple_call_arg_ptr (stmt, i);
3233 		  modified |= sra_modify_expr (t, &gsi, false);
3234 		}
3235 
3236 	      if (gimple_call_lhs (stmt))
3237 		{
3238 		  t = gimple_call_lhs_ptr (stmt);
3239 		  modified |= sra_modify_expr (t, &gsi, true);
3240 		}
3241 	      break;
3242 
3243 	    case GIMPLE_ASM:
3244 	      for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3245 		{
3246 		  t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3247 		  modified |= sra_modify_expr (t, &gsi, false);
3248 		}
3249 	      for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3250 		{
3251 		  t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3252 		  modified |= sra_modify_expr (t, &gsi, true);
3253 		}
3254 	      break;
3255 
3256 	    default:
3257 	      break;
3258 	    }
3259 
3260 	  if (modified)
3261 	    {
3262 	      update_stmt (stmt);
3263 	      if (maybe_clean_eh_stmt (stmt)
3264 		  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3265 		cfg_changed = true;
3266 	    }
3267 	  if (!deleted)
3268 	    gsi_next (&gsi);
3269 	}
3270     }
3271 
3272   return cfg_changed;
3273 }
3274 
3275 /* Generate statements initializing scalar replacements of parts of function
3276    parameters.  */
3277 
3278 static void
3279 initialize_parameter_reductions (void)
3280 {
3281   gimple_stmt_iterator gsi;
3282   gimple_seq seq = NULL;
3283   tree parm;
3284 
3285   for (parm = DECL_ARGUMENTS (current_function_decl);
3286        parm;
3287        parm = DECL_CHAIN (parm))
3288     {
3289       VEC (access_p, heap) *access_vec;
3290       struct access *access;
3291 
3292       if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3293 	continue;
3294       access_vec = get_base_access_vector (parm);
3295       if (!access_vec)
3296 	continue;
3297 
3298       if (!seq)
3299 	{
3300 	  seq = gimple_seq_alloc ();
3301 	  gsi = gsi_start (seq);
3302 	}
3303 
3304       for (access = VEC_index (access_p, access_vec, 0);
3305 	   access;
3306 	   access = access->next_grp)
3307 	generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3308 				 EXPR_LOCATION (parm));
3309     }
3310 
3311   if (seq)
3312     gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3313 }
3314 
3315 /* The "main" function of intraprocedural SRA passes.  Runs the analysis and if
3316    it reveals there are components of some aggregates to be scalarized, it runs
3317    the required transformations.  */
3318 static unsigned int
3319 perform_intra_sra (void)
3320 {
3321   int ret = 0;
3322   sra_initialize ();
3323 
3324   if (!find_var_candidates ())
3325     goto out;
3326 
3327   if (!scan_function ())
3328     goto out;
3329 
3330   if (!analyze_all_variable_accesses ())
3331     goto out;
3332 
3333   if (sra_modify_function_body ())
3334     ret = TODO_update_ssa | TODO_cleanup_cfg;
3335   else
3336     ret = TODO_update_ssa;
3337   initialize_parameter_reductions ();
3338 
3339   statistics_counter_event (cfun, "Scalar replacements created",
3340 			    sra_stats.replacements);
3341   statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3342   statistics_counter_event (cfun, "Subtree copy stmts",
3343 			    sra_stats.subtree_copies);
3344   statistics_counter_event (cfun, "Subreplacement stmts",
3345 			    sra_stats.subreplacements);
3346   statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3347   statistics_counter_event (cfun, "Separate LHS and RHS handling",
3348 			    sra_stats.separate_lhs_rhs_handling);
3349 
3350  out:
3351   sra_deinitialize ();
3352   return ret;
3353 }
3354 
3355 /* Perform early intraprocedural SRA.  */
3356 static unsigned int
3357 early_intra_sra (void)
3358 {
3359   sra_mode = SRA_MODE_EARLY_INTRA;
3360   return perform_intra_sra ();
3361 }
3362 
3363 /* Perform "late" intraprocedural SRA.  */
3364 static unsigned int
3365 late_intra_sra (void)
3366 {
3367   sra_mode = SRA_MODE_INTRA;
3368   return perform_intra_sra ();
3369 }
3370 
3371 
3372 static bool
3373 gate_intra_sra (void)
3374 {
3375   return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3376 }
3377 
3378 
3379 struct gimple_opt_pass pass_sra_early =
3380 {
3381  {
3382   GIMPLE_PASS,
3383   "esra",	 			/* name */
3384   gate_intra_sra,			/* gate */
3385   early_intra_sra,			/* execute */
3386   NULL,					/* sub */
3387   NULL,					/* next */
3388   0,					/* static_pass_number */
3389   TV_TREE_SRA,				/* tv_id */
3390   PROP_cfg | PROP_ssa,                  /* properties_required */
3391   0,					/* properties_provided */
3392   0,					/* properties_destroyed */
3393   0,					/* todo_flags_start */
3394   TODO_update_ssa
3395   | TODO_ggc_collect
3396   | TODO_verify_ssa			/* todo_flags_finish */
3397  }
3398 };
3399 
3400 struct gimple_opt_pass pass_sra =
3401 {
3402  {
3403   GIMPLE_PASS,
3404   "sra",	 			/* name */
3405   gate_intra_sra,			/* gate */
3406   late_intra_sra,			/* execute */
3407   NULL,					/* sub */
3408   NULL,					/* next */
3409   0,					/* static_pass_number */
3410   TV_TREE_SRA,				/* tv_id */
3411   PROP_cfg | PROP_ssa,                  /* properties_required */
3412   0,					/* properties_provided */
3413   0,					/* properties_destroyed */
3414   TODO_update_address_taken,		/* todo_flags_start */
3415   TODO_update_ssa
3416   | TODO_ggc_collect
3417   | TODO_verify_ssa			/* todo_flags_finish */
3418  }
3419 };
3420 
3421 
3422 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3423    parameter.  */
3424 
3425 static bool
3426 is_unused_scalar_param (tree parm)
3427 {
3428   tree name;
3429   return (is_gimple_reg (parm)
3430 	  && (!(name = gimple_default_def (cfun, parm))
3431 	      || has_zero_uses (name)));
3432 }
3433 
3434 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3435    examine whether there are any direct or otherwise infeasible ones.  If so,
3436    return true, otherwise return false.  PARM must be a gimple register with a
3437    non-NULL default definition.  */
3438 
3439 static bool
3440 ptr_parm_has_direct_uses (tree parm)
3441 {
3442   imm_use_iterator ui;
3443   gimple stmt;
3444   tree name = gimple_default_def (cfun, parm);
3445   bool ret = false;
3446 
3447   FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3448     {
3449       int uses_ok = 0;
3450       use_operand_p use_p;
3451 
3452       if (is_gimple_debug (stmt))
3453 	continue;
3454 
3455       /* Valid uses include dereferences on the lhs and the rhs.  */
3456       if (gimple_has_lhs (stmt))
3457 	{
3458 	  tree lhs = gimple_get_lhs (stmt);
3459 	  while (handled_component_p (lhs))
3460 	    lhs = TREE_OPERAND (lhs, 0);
3461 	  if (TREE_CODE (lhs) == MEM_REF
3462 	      && TREE_OPERAND (lhs, 0) == name
3463 	      && integer_zerop (TREE_OPERAND (lhs, 1))
3464 	      && types_compatible_p (TREE_TYPE (lhs),
3465 				     TREE_TYPE (TREE_TYPE (name)))
3466 	      && !TREE_THIS_VOLATILE (lhs))
3467 	    uses_ok++;
3468 	}
3469       if (gimple_assign_single_p (stmt))
3470 	{
3471 	  tree rhs = gimple_assign_rhs1 (stmt);
3472 	  while (handled_component_p (rhs))
3473 	    rhs = TREE_OPERAND (rhs, 0);
3474 	  if (TREE_CODE (rhs) == MEM_REF
3475 	      && TREE_OPERAND (rhs, 0) == name
3476 	      && integer_zerop (TREE_OPERAND (rhs, 1))
3477 	      && types_compatible_p (TREE_TYPE (rhs),
3478 				     TREE_TYPE (TREE_TYPE (name)))
3479 	      && !TREE_THIS_VOLATILE (rhs))
3480 	    uses_ok++;
3481 	}
3482       else if (is_gimple_call (stmt))
3483 	{
3484 	  unsigned i;
3485 	  for (i = 0; i < gimple_call_num_args (stmt); ++i)
3486 	    {
3487 	      tree arg = gimple_call_arg (stmt, i);
3488 	      while (handled_component_p (arg))
3489 		arg = TREE_OPERAND (arg, 0);
3490 	      if (TREE_CODE (arg) == MEM_REF
3491 		  && TREE_OPERAND (arg, 0) == name
3492 		  && integer_zerop (TREE_OPERAND (arg, 1))
3493 		  && types_compatible_p (TREE_TYPE (arg),
3494 					 TREE_TYPE (TREE_TYPE (name)))
3495 		  && !TREE_THIS_VOLATILE (arg))
3496 		uses_ok++;
3497 	    }
3498 	}
3499 
3500       /* If the number of valid uses does not match the number of
3501          uses in this stmt there is an unhandled use.  */
3502       FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3503 	--uses_ok;
3504 
3505       if (uses_ok != 0)
3506 	ret = true;
3507 
3508       if (ret)
3509 	BREAK_FROM_IMM_USE_STMT (ui);
3510     }
3511 
3512   return ret;
3513 }
3514 
3515 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3516    them in candidate_bitmap.  Note that these do not necessarily include
3517    parameter which are unused and thus can be removed.  Return true iff any
3518    such candidate has been found.  */
3519 
3520 static bool
3521 find_param_candidates (void)
3522 {
3523   tree parm;
3524   int count = 0;
3525   bool ret = false;
3526   const char *msg;
3527 
3528   for (parm = DECL_ARGUMENTS (current_function_decl);
3529        parm;
3530        parm = DECL_CHAIN (parm))
3531     {
3532       tree type = TREE_TYPE (parm);
3533 
3534       count++;
3535 
3536       if (TREE_THIS_VOLATILE (parm)
3537 	  || TREE_ADDRESSABLE (parm)
3538 	  || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3539 	continue;
3540 
3541       if (is_unused_scalar_param (parm))
3542 	{
3543 	  ret = true;
3544 	  continue;
3545 	}
3546 
3547       if (POINTER_TYPE_P (type))
3548 	{
3549 	  type = TREE_TYPE (type);
3550 
3551 	  if (TREE_CODE (type) == FUNCTION_TYPE
3552 	      || TYPE_VOLATILE (type)
3553 	      || (TREE_CODE (type) == ARRAY_TYPE
3554 		  && TYPE_NONALIASED_COMPONENT (type))
3555 	      || !is_gimple_reg (parm)
3556 	      || is_va_list_type (type)
3557 	      || ptr_parm_has_direct_uses (parm))
3558 	    continue;
3559 	}
3560       else if (!AGGREGATE_TYPE_P (type))
3561 	continue;
3562 
3563       if (!COMPLETE_TYPE_P (type)
3564 	  || !host_integerp (TYPE_SIZE (type), 1)
3565           || tree_low_cst (TYPE_SIZE (type), 1) == 0
3566 	  || (AGGREGATE_TYPE_P (type)
3567 	      && type_internals_preclude_sra_p (type, &msg)))
3568 	continue;
3569 
3570       bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3571       ret = true;
3572       if (dump_file && (dump_flags & TDF_DETAILS))
3573 	{
3574 	  fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3575 	  print_generic_expr (dump_file, parm, 0);
3576 	  fprintf (dump_file, "\n");
3577 	}
3578     }
3579 
3580   func_param_count = count;
3581   return ret;
3582 }
3583 
3584 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3585    maybe_modified. */
3586 
3587 static bool
3588 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3589 		     void *data)
3590 {
3591   struct access *repr = (struct access *) data;
3592 
3593   repr->grp_maybe_modified = 1;
3594   return true;
3595 }
3596 
3597 /* Analyze what representatives (in linked lists accessible from
3598    REPRESENTATIVES) can be modified by side effects of statements in the
3599    current function.  */
3600 
3601 static void
3602 analyze_modified_params (VEC (access_p, heap) *representatives)
3603 {
3604   int i;
3605 
3606   for (i = 0; i < func_param_count; i++)
3607     {
3608       struct access *repr;
3609 
3610       for (repr = VEC_index (access_p, representatives, i);
3611 	   repr;
3612 	   repr = repr->next_grp)
3613 	{
3614 	  struct access *access;
3615 	  bitmap visited;
3616 	  ao_ref ar;
3617 
3618 	  if (no_accesses_p (repr))
3619 	    continue;
3620 	  if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3621 	      || repr->grp_maybe_modified)
3622 	    continue;
3623 
3624 	  ao_ref_init (&ar, repr->expr);
3625 	  visited = BITMAP_ALLOC (NULL);
3626 	  for (access = repr; access; access = access->next_sibling)
3627 	    {
3628 	      /* All accesses are read ones, otherwise grp_maybe_modified would
3629 		 be trivially set.  */
3630 	      walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3631 				  mark_maybe_modified, repr, &visited);
3632 	      if (repr->grp_maybe_modified)
3633 		break;
3634 	    }
3635 	  BITMAP_FREE (visited);
3636 	}
3637     }
3638 }
3639 
3640 /* Propagate distances in bb_dereferences in the opposite direction than the
3641    control flow edges, in each step storing the maximum of the current value
3642    and the minimum of all successors.  These steps are repeated until the table
3643    stabilizes.  Note that BBs which might terminate the functions (according to
3644    final_bbs bitmap) never updated in this way.  */
3645 
3646 static void
3647 propagate_dereference_distances (void)
3648 {
3649   VEC (basic_block, heap) *queue;
3650   basic_block bb;
3651 
3652   queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3653   VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3654   FOR_EACH_BB (bb)
3655     {
3656       VEC_quick_push (basic_block, queue, bb);
3657       bb->aux = bb;
3658     }
3659 
3660   while (!VEC_empty (basic_block, queue))
3661     {
3662       edge_iterator ei;
3663       edge e;
3664       bool change = false;
3665       int i;
3666 
3667       bb = VEC_pop (basic_block, queue);
3668       bb->aux = NULL;
3669 
3670       if (bitmap_bit_p (final_bbs, bb->index))
3671 	continue;
3672 
3673       for (i = 0; i < func_param_count; i++)
3674 	{
3675 	  int idx = bb->index * func_param_count + i;
3676 	  bool first = true;
3677 	  HOST_WIDE_INT inh = 0;
3678 
3679 	  FOR_EACH_EDGE (e, ei, bb->succs)
3680 	  {
3681 	    int succ_idx = e->dest->index * func_param_count + i;
3682 
3683 	    if (e->src == EXIT_BLOCK_PTR)
3684 	      continue;
3685 
3686 	    if (first)
3687 	      {
3688 		first = false;
3689 		inh = bb_dereferences [succ_idx];
3690 	      }
3691 	    else if (bb_dereferences [succ_idx] < inh)
3692 	      inh = bb_dereferences [succ_idx];
3693 	  }
3694 
3695 	  if (!first && bb_dereferences[idx] < inh)
3696 	    {
3697 	      bb_dereferences[idx] = inh;
3698 	      change = true;
3699 	    }
3700 	}
3701 
3702       if (change && !bitmap_bit_p (final_bbs, bb->index))
3703 	FOR_EACH_EDGE (e, ei, bb->preds)
3704 	  {
3705 	    if (e->src->aux)
3706 	      continue;
3707 
3708 	    e->src->aux = e->src;
3709 	    VEC_quick_push (basic_block, queue, e->src);
3710 	  }
3711     }
3712 
3713   VEC_free (basic_block, heap, queue);
3714 }
3715 
3716 /* Dump a dereferences TABLE with heading STR to file F.  */
3717 
3718 static void
3719 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3720 {
3721   basic_block bb;
3722 
3723   fprintf (dump_file, str);
3724   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3725     {
3726       fprintf (f, "%4i  %i   ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3727       if (bb != EXIT_BLOCK_PTR)
3728 	{
3729 	  int i;
3730 	  for (i = 0; i < func_param_count; i++)
3731 	    {
3732 	      int idx = bb->index * func_param_count + i;
3733 	      fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3734 	    }
3735 	}
3736       fprintf (f, "\n");
3737     }
3738   fprintf (dump_file, "\n");
3739 }
3740 
3741 /* Determine what (parts of) parameters passed by reference that are not
3742    assigned to are not certainly dereferenced in this function and thus the
3743    dereferencing cannot be safely moved to the caller without potentially
3744    introducing a segfault.  Mark such REPRESENTATIVES as
3745    grp_not_necessarilly_dereferenced.
3746 
3747    The dereferenced maximum "distance," i.e. the offset + size of the accessed
3748    part is calculated rather than simple booleans are calculated for each
3749    pointer parameter to handle cases when only a fraction of the whole
3750    aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3751    an example).
3752 
3753    The maximum dereference distances for each pointer parameter and BB are
3754    already stored in bb_dereference.  This routine simply propagates these
3755    values upwards by propagate_dereference_distances and then compares the
3756    distances of individual parameters in the ENTRY BB to the equivalent
3757    distances of each representative of a (fraction of a) parameter.  */
3758 
3759 static void
3760 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3761 {
3762   int i;
3763 
3764   if (dump_file && (dump_flags & TDF_DETAILS))
3765     dump_dereferences_table (dump_file,
3766 			     "Dereference table before propagation:\n",
3767 			     bb_dereferences);
3768 
3769   propagate_dereference_distances ();
3770 
3771   if (dump_file && (dump_flags & TDF_DETAILS))
3772     dump_dereferences_table (dump_file,
3773 			     "Dereference table after propagation:\n",
3774 			     bb_dereferences);
3775 
3776   for (i = 0; i < func_param_count; i++)
3777     {
3778       struct access *repr = VEC_index (access_p, representatives, i);
3779       int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3780 
3781       if (!repr || no_accesses_p (repr))
3782 	continue;
3783 
3784       do
3785 	{
3786 	  if ((repr->offset + repr->size) > bb_dereferences[idx])
3787 	    repr->grp_not_necessarilly_dereferenced = 1;
3788 	  repr = repr->next_grp;
3789 	}
3790       while (repr);
3791     }
3792 }
3793 
3794 /* Return the representative access for the parameter declaration PARM if it is
3795    a scalar passed by reference which is not written to and the pointer value
3796    is not used directly.  Thus, if it is legal to dereference it in the caller
3797    and we can rule out modifications through aliases, such parameter should be
3798    turned into one passed by value.  Return NULL otherwise.  */
3799 
3800 static struct access *
3801 unmodified_by_ref_scalar_representative (tree parm)
3802 {
3803   int i, access_count;
3804   struct access *repr;
3805   VEC (access_p, heap) *access_vec;
3806 
3807   access_vec = get_base_access_vector (parm);
3808   gcc_assert (access_vec);
3809   repr = VEC_index (access_p, access_vec, 0);
3810   if (repr->write)
3811     return NULL;
3812   repr->group_representative = repr;
3813 
3814   access_count = VEC_length (access_p, access_vec);
3815   for (i = 1; i < access_count; i++)
3816     {
3817       struct access *access = VEC_index (access_p, access_vec, i);
3818       if (access->write)
3819 	return NULL;
3820       access->group_representative = repr;
3821       access->next_sibling = repr->next_sibling;
3822       repr->next_sibling = access;
3823     }
3824 
3825   repr->grp_read = 1;
3826   repr->grp_scalar_ptr = 1;
3827   return repr;
3828 }
3829 
3830 /* Return true iff this access precludes IPA-SRA of the parameter it is
3831    associated with. */
3832 
3833 static bool
3834 access_precludes_ipa_sra_p (struct access *access)
3835 {
3836   /* Avoid issues such as the second simple testcase in PR 42025.  The problem
3837      is incompatible assign in a call statement (and possibly even in asm
3838      statements).  This can be relaxed by using a new temporary but only for
3839      non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3840      intraprocedural SRA we deal with this by keeping the old aggregate around,
3841      something we cannot do in IPA-SRA.)  */
3842   if (access->write
3843       && (is_gimple_call (access->stmt)
3844 	  || gimple_code (access->stmt) == GIMPLE_ASM))
3845     return true;
3846 
3847   if (STRICT_ALIGNMENT
3848       && tree_non_aligned_mem_p (access->expr, TYPE_ALIGN (access->type)))
3849     return true;
3850 
3851   return false;
3852 }
3853 
3854 
3855 /* Sort collected accesses for parameter PARM, identify representatives for
3856    each accessed region and link them together.  Return NULL if there are
3857    different but overlapping accesses, return the special ptr value meaning
3858    there are no accesses for this parameter if that is the case and return the
3859    first representative otherwise.  Set *RO_GRP if there is a group of accesses
3860    with only read (i.e. no write) accesses.  */
3861 
3862 static struct access *
3863 splice_param_accesses (tree parm, bool *ro_grp)
3864 {
3865   int i, j, access_count, group_count;
3866   int agg_size, total_size = 0;
3867   struct access *access, *res, **prev_acc_ptr = &res;
3868   VEC (access_p, heap) *access_vec;
3869 
3870   access_vec = get_base_access_vector (parm);
3871   if (!access_vec)
3872     return &no_accesses_representant;
3873   access_count = VEC_length (access_p, access_vec);
3874 
3875   VEC_qsort (access_p, access_vec, compare_access_positions);
3876 
3877   i = 0;
3878   total_size = 0;
3879   group_count = 0;
3880   while (i < access_count)
3881     {
3882       bool modification;
3883       tree a1_alias_type;
3884       access = VEC_index (access_p, access_vec, i);
3885       modification = access->write;
3886       if (access_precludes_ipa_sra_p (access))
3887 	return NULL;
3888       a1_alias_type = reference_alias_ptr_type (access->expr);
3889 
3890       /* Access is about to become group representative unless we find some
3891 	 nasty overlap which would preclude us from breaking this parameter
3892 	 apart. */
3893 
3894       j = i + 1;
3895       while (j < access_count)
3896 	{
3897 	  struct access *ac2 = VEC_index (access_p, access_vec, j);
3898 	  if (ac2->offset != access->offset)
3899 	    {
3900 	      /* All or nothing law for parameters. */
3901 	      if (access->offset + access->size > ac2->offset)
3902 		return NULL;
3903 	      else
3904 		break;
3905 	    }
3906 	  else if (ac2->size != access->size)
3907 	    return NULL;
3908 
3909 	  if (access_precludes_ipa_sra_p (ac2)
3910 	      || (ac2->type != access->type
3911 		  && (TREE_ADDRESSABLE (ac2->type)
3912 		      || TREE_ADDRESSABLE (access->type)))
3913 	      || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
3914 	    return NULL;
3915 
3916 	  modification |= ac2->write;
3917 	  ac2->group_representative = access;
3918 	  ac2->next_sibling = access->next_sibling;
3919 	  access->next_sibling = ac2;
3920 	  j++;
3921 	}
3922 
3923       group_count++;
3924       access->grp_maybe_modified = modification;
3925       if (!modification)
3926 	*ro_grp = true;
3927       *prev_acc_ptr = access;
3928       prev_acc_ptr = &access->next_grp;
3929       total_size += access->size;
3930       i = j;
3931     }
3932 
3933   if (POINTER_TYPE_P (TREE_TYPE (parm)))
3934     agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3935   else
3936     agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3937   if (total_size >= agg_size)
3938     return NULL;
3939 
3940   gcc_assert (group_count > 0);
3941   return res;
3942 }
3943 
3944 /* Decide whether parameters with representative accesses given by REPR should
3945    be reduced into components.  */
3946 
3947 static int
3948 decide_one_param_reduction (struct access *repr)
3949 {
3950   int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3951   bool by_ref;
3952   tree parm;
3953 
3954   parm = repr->base;
3955   cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3956   gcc_assert (cur_parm_size > 0);
3957 
3958   if (POINTER_TYPE_P (TREE_TYPE (parm)))
3959     {
3960       by_ref = true;
3961       agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3962     }
3963   else
3964     {
3965       by_ref = false;
3966       agg_size = cur_parm_size;
3967     }
3968 
3969   if (dump_file)
3970     {
3971       struct access *acc;
3972       fprintf (dump_file, "Evaluating PARAM group sizes for ");
3973       print_generic_expr (dump_file, parm, 0);
3974       fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3975       for (acc = repr; acc; acc = acc->next_grp)
3976 	dump_access (dump_file, acc, true);
3977     }
3978 
3979   total_size = 0;
3980   new_param_count = 0;
3981 
3982   for (; repr; repr = repr->next_grp)
3983     {
3984       gcc_assert (parm == repr->base);
3985 
3986       /* Taking the address of a non-addressable field is verboten.  */
3987       if (by_ref && repr->non_addressable)
3988 	return 0;
3989 
3990       /* Do not decompose a non-BLKmode param in a way that would
3991          create BLKmode params.  Especially for by-reference passing
3992 	 (thus, pointer-type param) this is hardly worthwhile.  */
3993       if (DECL_MODE (parm) != BLKmode
3994 	  && TYPE_MODE (repr->type) == BLKmode)
3995 	return 0;
3996 
3997       if (!by_ref || (!repr->grp_maybe_modified
3998 		      && !repr->grp_not_necessarilly_dereferenced))
3999 	total_size += repr->size;
4000       else
4001 	total_size += cur_parm_size;
4002 
4003       new_param_count++;
4004     }
4005 
4006   gcc_assert (new_param_count > 0);
4007 
4008   if (optimize_function_for_size_p (cfun))
4009     parm_size_limit = cur_parm_size;
4010   else
4011     parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4012                        * cur_parm_size);
4013 
4014   if (total_size < agg_size
4015       && total_size <= parm_size_limit)
4016     {
4017       if (dump_file)
4018 	fprintf (dump_file, "    ....will be split into %i components\n",
4019 		 new_param_count);
4020       return new_param_count;
4021     }
4022   else
4023     return 0;
4024 }
4025 
4026 /* The order of the following enums is important, we need to do extra work for
4027    UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES.  */
4028 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4029 			  MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4030 
4031 /* Identify representatives of all accesses to all candidate parameters for
4032    IPA-SRA.  Return result based on what representatives have been found. */
4033 
4034 static enum ipa_splicing_result
4035 splice_all_param_accesses (VEC (access_p, heap) **representatives)
4036 {
4037   enum ipa_splicing_result result = NO_GOOD_ACCESS;
4038   tree parm;
4039   struct access *repr;
4040 
4041   *representatives = VEC_alloc (access_p, heap, func_param_count);
4042 
4043   for (parm = DECL_ARGUMENTS (current_function_decl);
4044        parm;
4045        parm = DECL_CHAIN (parm))
4046     {
4047       if (is_unused_scalar_param (parm))
4048 	{
4049 	  VEC_quick_push (access_p, *representatives,
4050 			  &no_accesses_representant);
4051 	  if (result == NO_GOOD_ACCESS)
4052 	    result = UNUSED_PARAMS;
4053 	}
4054       else if (POINTER_TYPE_P (TREE_TYPE (parm))
4055 	       && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4056 	       && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4057 	{
4058 	  repr = unmodified_by_ref_scalar_representative (parm);
4059 	  VEC_quick_push (access_p, *representatives, repr);
4060 	  if (repr)
4061 	    result = UNMODIF_BY_REF_ACCESSES;
4062 	}
4063       else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4064 	{
4065 	  bool ro_grp = false;
4066 	  repr = splice_param_accesses (parm, &ro_grp);
4067 	  VEC_quick_push (access_p, *representatives, repr);
4068 
4069 	  if (repr && !no_accesses_p (repr))
4070 	    {
4071 	      if (POINTER_TYPE_P (TREE_TYPE (parm)))
4072 		{
4073 		  if (ro_grp)
4074 		    result = UNMODIF_BY_REF_ACCESSES;
4075 		  else if (result < MODIF_BY_REF_ACCESSES)
4076 		    result = MODIF_BY_REF_ACCESSES;
4077 		}
4078 	      else if (result < BY_VAL_ACCESSES)
4079 		result = BY_VAL_ACCESSES;
4080 	    }
4081 	  else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4082 	    result = UNUSED_PARAMS;
4083 	}
4084       else
4085 	VEC_quick_push (access_p, *representatives, NULL);
4086     }
4087 
4088   if (result == NO_GOOD_ACCESS)
4089     {
4090       VEC_free (access_p, heap, *representatives);
4091       *representatives = NULL;
4092       return NO_GOOD_ACCESS;
4093     }
4094 
4095   return result;
4096 }
4097 
4098 /* Return the index of BASE in PARMS.  Abort if it is not found.  */
4099 
4100 static inline int
4101 get_param_index (tree base, VEC(tree, heap) *parms)
4102 {
4103   int i, len;
4104 
4105   len = VEC_length (tree, parms);
4106   for (i = 0; i < len; i++)
4107     if (VEC_index (tree, parms, i) == base)
4108       return i;
4109   gcc_unreachable ();
4110 }
4111 
4112 /* Convert the decisions made at the representative level into compact
4113    parameter adjustments.  REPRESENTATIVES are pointers to first
4114    representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4115    final number of adjustments.  */
4116 
4117 static ipa_parm_adjustment_vec
4118 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
4119 				       int adjustments_count)
4120 {
4121   VEC (tree, heap) *parms;
4122   ipa_parm_adjustment_vec adjustments;
4123   tree parm;
4124   int i;
4125 
4126   gcc_assert (adjustments_count > 0);
4127   parms = ipa_get_vector_of_formal_parms (current_function_decl);
4128   adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
4129   parm = DECL_ARGUMENTS (current_function_decl);
4130   for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4131     {
4132       struct access *repr = VEC_index (access_p, representatives, i);
4133 
4134       if (!repr || no_accesses_p (repr))
4135 	{
4136 	  struct ipa_parm_adjustment *adj;
4137 
4138 	  adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4139 	  memset (adj, 0, sizeof (*adj));
4140 	  adj->base_index = get_param_index (parm, parms);
4141 	  adj->base = parm;
4142 	  if (!repr)
4143 	    adj->copy_param = 1;
4144 	  else
4145 	    adj->remove_param = 1;
4146 	}
4147       else
4148 	{
4149 	  struct ipa_parm_adjustment *adj;
4150 	  int index = get_param_index (parm, parms);
4151 
4152 	  for (; repr; repr = repr->next_grp)
4153 	    {
4154 	      adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4155 	      memset (adj, 0, sizeof (*adj));
4156 	      gcc_assert (repr->base == parm);
4157 	      adj->base_index = index;
4158 	      adj->base = repr->base;
4159 	      adj->type = repr->type;
4160 	      adj->alias_ptr_type = reference_alias_ptr_type (repr->expr);
4161 	      adj->offset = repr->offset;
4162 	      adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4163 			     && (repr->grp_maybe_modified
4164 				 || repr->grp_not_necessarilly_dereferenced));
4165 
4166 	    }
4167 	}
4168     }
4169   VEC_free (tree, heap, parms);
4170   return adjustments;
4171 }
4172 
4173 /* Analyze the collected accesses and produce a plan what to do with the
4174    parameters in the form of adjustments, NULL meaning nothing.  */
4175 
4176 static ipa_parm_adjustment_vec
4177 analyze_all_param_acesses (void)
4178 {
4179   enum ipa_splicing_result repr_state;
4180   bool proceed = false;
4181   int i, adjustments_count = 0;
4182   VEC (access_p, heap) *representatives;
4183   ipa_parm_adjustment_vec adjustments;
4184 
4185   repr_state = splice_all_param_accesses (&representatives);
4186   if (repr_state == NO_GOOD_ACCESS)
4187     return NULL;
4188 
4189   /* If there are any parameters passed by reference which are not modified
4190      directly, we need to check whether they can be modified indirectly.  */
4191   if (repr_state == UNMODIF_BY_REF_ACCESSES)
4192     {
4193       analyze_caller_dereference_legality (representatives);
4194       analyze_modified_params (representatives);
4195     }
4196 
4197   for (i = 0; i < func_param_count; i++)
4198     {
4199       struct access *repr = VEC_index (access_p, representatives, i);
4200 
4201       if (repr && !no_accesses_p (repr))
4202 	{
4203 	  if (repr->grp_scalar_ptr)
4204 	    {
4205 	      adjustments_count++;
4206 	      if (repr->grp_not_necessarilly_dereferenced
4207 		  || repr->grp_maybe_modified)
4208 		VEC_replace (access_p, representatives, i, NULL);
4209 	      else
4210 		{
4211 		  proceed = true;
4212 		  sra_stats.scalar_by_ref_to_by_val++;
4213 		}
4214 	    }
4215 	  else
4216 	    {
4217 	      int new_components = decide_one_param_reduction (repr);
4218 
4219 	      if (new_components == 0)
4220 		{
4221 		  VEC_replace (access_p, representatives, i, NULL);
4222 		  adjustments_count++;
4223 		}
4224 	      else
4225 		{
4226 		  adjustments_count += new_components;
4227 		  sra_stats.aggregate_params_reduced++;
4228 		  sra_stats.param_reductions_created += new_components;
4229 		  proceed = true;
4230 		}
4231 	    }
4232 	}
4233       else
4234 	{
4235 	  if (no_accesses_p (repr))
4236 	    {
4237 	      proceed = true;
4238 	      sra_stats.deleted_unused_parameters++;
4239 	    }
4240 	  adjustments_count++;
4241 	}
4242     }
4243 
4244   if (!proceed && dump_file)
4245     fprintf (dump_file, "NOT proceeding to change params.\n");
4246 
4247   if (proceed)
4248     adjustments = turn_representatives_into_adjustments (representatives,
4249 							 adjustments_count);
4250   else
4251     adjustments = NULL;
4252 
4253   VEC_free (access_p, heap, representatives);
4254   return adjustments;
4255 }
4256 
4257 /* If a parameter replacement identified by ADJ does not yet exist in the form
4258    of declaration, create it and record it, otherwise return the previously
4259    created one.  */
4260 
4261 static tree
4262 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4263 {
4264   tree repl;
4265   if (!adj->new_ssa_base)
4266     {
4267       char *pretty_name = make_fancy_name (adj->base);
4268 
4269       repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4270       DECL_NAME (repl) = get_identifier (pretty_name);
4271       obstack_free (&name_obstack, pretty_name);
4272 
4273       add_referenced_var (repl);
4274       adj->new_ssa_base = repl;
4275     }
4276   else
4277     repl = adj->new_ssa_base;
4278   return repl;
4279 }
4280 
4281 /* Find the first adjustment for a particular parameter BASE in a vector of
4282    ADJUSTMENTS which is not a copy_param.  Return NULL if there is no such
4283    adjustment. */
4284 
4285 static struct ipa_parm_adjustment *
4286 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4287 {
4288   int i, len;
4289 
4290   len = VEC_length (ipa_parm_adjustment_t, adjustments);
4291   for (i = 0; i < len; i++)
4292     {
4293       struct ipa_parm_adjustment *adj;
4294 
4295       adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4296       if (!adj->copy_param && adj->base == base)
4297 	return adj;
4298     }
4299 
4300   return NULL;
4301 }
4302 
4303 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4304    removed because its value is not used, replace the SSA_NAME with a one
4305    relating to a created VAR_DECL together all of its uses and return true.
4306    ADJUSTMENTS is a pointer to an adjustments vector.  */
4307 
4308 static bool
4309 replace_removed_params_ssa_names (gimple stmt,
4310 				  ipa_parm_adjustment_vec adjustments)
4311 {
4312   struct ipa_parm_adjustment *adj;
4313   tree lhs, decl, repl, name;
4314 
4315   if (gimple_code (stmt) == GIMPLE_PHI)
4316     lhs = gimple_phi_result (stmt);
4317   else if (is_gimple_assign (stmt))
4318     lhs = gimple_assign_lhs (stmt);
4319   else if (is_gimple_call (stmt))
4320     lhs = gimple_call_lhs (stmt);
4321   else
4322     gcc_unreachable ();
4323 
4324   if (TREE_CODE (lhs) != SSA_NAME)
4325     return false;
4326   decl = SSA_NAME_VAR (lhs);
4327   if (TREE_CODE (decl) != PARM_DECL)
4328     return false;
4329 
4330   adj = get_adjustment_for_base (adjustments, decl);
4331   if (!adj)
4332     return false;
4333 
4334   repl = get_replaced_param_substitute (adj);
4335   name = make_ssa_name (repl, stmt);
4336 
4337   if (dump_file)
4338     {
4339       fprintf (dump_file, "replacing an SSA name of a removed param ");
4340       print_generic_expr (dump_file, lhs, 0);
4341       fprintf (dump_file, " with ");
4342       print_generic_expr (dump_file, name, 0);
4343       fprintf (dump_file, "\n");
4344     }
4345 
4346   if (is_gimple_assign (stmt))
4347     gimple_assign_set_lhs (stmt, name);
4348   else if (is_gimple_call (stmt))
4349     gimple_call_set_lhs (stmt, name);
4350   else
4351     gimple_phi_set_result (stmt, name);
4352 
4353   replace_uses_by (lhs, name);
4354   release_ssa_name (lhs);
4355   return true;
4356 }
4357 
4358 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4359    so.  ADJUSTMENTS is a pointer to a vector of adjustments.  CONVERT
4360    specifies whether the function should care about type incompatibility the
4361    current and new expressions.  If it is false, the function will leave
4362    incompatibility issues to the caller.  Return true iff the expression
4363    was modified. */
4364 
4365 static bool
4366 sra_ipa_modify_expr (tree *expr, bool convert,
4367 		     ipa_parm_adjustment_vec adjustments)
4368 {
4369   int i, len;
4370   struct ipa_parm_adjustment *adj, *cand = NULL;
4371   HOST_WIDE_INT offset, size, max_size;
4372   tree base, src;
4373 
4374   len = VEC_length (ipa_parm_adjustment_t, adjustments);
4375 
4376   if (TREE_CODE (*expr) == BIT_FIELD_REF
4377       || TREE_CODE (*expr) == IMAGPART_EXPR
4378       || TREE_CODE (*expr) == REALPART_EXPR)
4379     {
4380       expr = &TREE_OPERAND (*expr, 0);
4381       convert = true;
4382     }
4383 
4384   base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4385   if (!base || size == -1 || max_size == -1)
4386     return false;
4387 
4388   if (TREE_CODE (base) == MEM_REF)
4389     {
4390       offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4391       base = TREE_OPERAND (base, 0);
4392     }
4393 
4394   base = get_ssa_base_param (base);
4395   if (!base || TREE_CODE (base) != PARM_DECL)
4396     return false;
4397 
4398   for (i = 0; i < len; i++)
4399     {
4400       adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4401 
4402       if (adj->base == base &&
4403 	  (adj->offset == offset || adj->remove_param))
4404 	{
4405 	  cand = adj;
4406 	  break;
4407 	}
4408     }
4409   if (!cand || cand->copy_param || cand->remove_param)
4410     return false;
4411 
4412   if (cand->by_ref)
4413     src = build_simple_mem_ref (cand->reduction);
4414   else
4415     src = cand->reduction;
4416 
4417   if (dump_file && (dump_flags & TDF_DETAILS))
4418     {
4419       fprintf (dump_file, "About to replace expr ");
4420       print_generic_expr (dump_file, *expr, 0);
4421       fprintf (dump_file, " with ");
4422       print_generic_expr (dump_file, src, 0);
4423       fprintf (dump_file, "\n");
4424     }
4425 
4426   if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4427     {
4428       tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4429       *expr = vce;
4430     }
4431   else
4432     *expr = src;
4433   return true;
4434 }
4435 
4436 /* If the statement pointed to by STMT_PTR contains any expressions that need
4437    to replaced with a different one as noted by ADJUSTMENTS, do so.  Handle any
4438    potential type incompatibilities (GSI is used to accommodate conversion
4439    statements and must point to the statement).  Return true iff the statement
4440    was modified.  */
4441 
4442 static bool
4443 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4444 		       ipa_parm_adjustment_vec adjustments)
4445 {
4446   gimple stmt = *stmt_ptr;
4447   tree *lhs_p, *rhs_p;
4448   bool any;
4449 
4450   if (!gimple_assign_single_p (stmt))
4451     return false;
4452 
4453   rhs_p = gimple_assign_rhs1_ptr (stmt);
4454   lhs_p = gimple_assign_lhs_ptr (stmt);
4455 
4456   any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4457   any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4458   if (any)
4459     {
4460       tree new_rhs = NULL_TREE;
4461 
4462       if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4463 	{
4464 	  if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4465 	    {
4466 	      /* V_C_Es of constructors can cause trouble (PR 42714).  */
4467 	      if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4468 		*rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4469 	      else
4470 		*rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
4471 	    }
4472 	  else
4473 	    new_rhs = fold_build1_loc (gimple_location (stmt),
4474 				       VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4475 				       *rhs_p);
4476 	}
4477       else if (REFERENCE_CLASS_P (*rhs_p)
4478 	       && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4479 	       && !is_gimple_reg (*lhs_p))
4480 	/* This can happen when an assignment in between two single field
4481 	   structures is turned into an assignment in between two pointers to
4482 	   scalars (PR 42237).  */
4483 	new_rhs = *rhs_p;
4484 
4485       if (new_rhs)
4486 	{
4487 	  tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4488 					       true, GSI_SAME_STMT);
4489 
4490 	  gimple_assign_set_rhs_from_tree (gsi, tmp);
4491 	}
4492 
4493       return true;
4494     }
4495 
4496   return false;
4497 }
4498 
4499 /* Traverse the function body and all modifications as described in
4500    ADJUSTMENTS.  Return true iff the CFG has been changed.  */
4501 
4502 static bool
4503 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4504 {
4505   bool cfg_changed = false;
4506   basic_block bb;
4507 
4508   FOR_EACH_BB (bb)
4509     {
4510       gimple_stmt_iterator gsi;
4511 
4512       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4513 	replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4514 
4515       gsi = gsi_start_bb (bb);
4516       while (!gsi_end_p (gsi))
4517 	{
4518 	  gimple stmt = gsi_stmt (gsi);
4519 	  bool modified = false;
4520 	  tree *t;
4521 	  unsigned i;
4522 
4523 	  switch (gimple_code (stmt))
4524 	    {
4525 	    case GIMPLE_RETURN:
4526 	      t = gimple_return_retval_ptr (stmt);
4527 	      if (*t != NULL_TREE)
4528 		modified |= sra_ipa_modify_expr (t, true, adjustments);
4529 	      break;
4530 
4531 	    case GIMPLE_ASSIGN:
4532 	      modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4533 	      modified |= replace_removed_params_ssa_names (stmt, adjustments);
4534 	      break;
4535 
4536 	    case GIMPLE_CALL:
4537 	      /* Operands must be processed before the lhs.  */
4538 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
4539 		{
4540 		  t = gimple_call_arg_ptr (stmt, i);
4541 		  modified |= sra_ipa_modify_expr (t, true, adjustments);
4542 		}
4543 
4544 	      if (gimple_call_lhs (stmt))
4545 		{
4546 		  t = gimple_call_lhs_ptr (stmt);
4547 		  modified |= sra_ipa_modify_expr (t, false, adjustments);
4548 		  modified |= replace_removed_params_ssa_names (stmt,
4549 								adjustments);
4550 		}
4551 	      break;
4552 
4553 	    case GIMPLE_ASM:
4554 	      for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4555 		{
4556 		  t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4557 		  modified |= sra_ipa_modify_expr (t, true, adjustments);
4558 		}
4559 	      for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4560 		{
4561 		  t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4562 		  modified |= sra_ipa_modify_expr (t, false, adjustments);
4563 		}
4564 	      break;
4565 
4566 	    default:
4567 	      break;
4568 	    }
4569 
4570 	  if (modified)
4571 	    {
4572 	      update_stmt (stmt);
4573 	      if (maybe_clean_eh_stmt (stmt)
4574 		  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4575 		cfg_changed = true;
4576 	    }
4577 	  gsi_next (&gsi);
4578 	}
4579     }
4580 
4581   return cfg_changed;
4582 }
4583 
4584 /* Call gimple_debug_bind_reset_value on all debug statements describing
4585    gimple register parameters that are being removed or replaced.  */
4586 
4587 static void
4588 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4589 {
4590   int i, len;
4591   gimple_stmt_iterator *gsip = NULL, gsi;
4592 
4593   if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
4594     {
4595       gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
4596       gsip = &gsi;
4597     }
4598   len = VEC_length (ipa_parm_adjustment_t, adjustments);
4599   for (i = 0; i < len; i++)
4600     {
4601       struct ipa_parm_adjustment *adj;
4602       imm_use_iterator ui;
4603       gimple stmt, def_temp;
4604       tree name, vexpr, copy = NULL_TREE;
4605       use_operand_p use_p;
4606 
4607       adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4608       if (adj->copy_param || !is_gimple_reg (adj->base))
4609 	continue;
4610       name = gimple_default_def (cfun, adj->base);
4611       vexpr = NULL;
4612       if (name)
4613 	FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4614 	  {
4615 	    /* All other users must have been removed by
4616 	       ipa_sra_modify_function_body.  */
4617 	    gcc_assert (is_gimple_debug (stmt));
4618 	    if (vexpr == NULL && gsip != NULL)
4619 	      {
4620 		gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4621 		vexpr = make_node (DEBUG_EXPR_DECL);
4622 		def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4623 							   NULL);
4624 		DECL_ARTIFICIAL (vexpr) = 1;
4625 		TREE_TYPE (vexpr) = TREE_TYPE (name);
4626 		DECL_MODE (vexpr) = DECL_MODE (adj->base);
4627 		gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4628 	      }
4629 	    if (vexpr)
4630 	      {
4631 		FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4632 		  SET_USE (use_p, vexpr);
4633 	      }
4634 	    else
4635 	      gimple_debug_bind_reset_value (stmt);
4636 	    update_stmt (stmt);
4637 	  }
4638       /* Create a VAR_DECL for debug info purposes.  */
4639       if (!DECL_IGNORED_P (adj->base))
4640 	{
4641 	  copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4642 			     VAR_DECL, DECL_NAME (adj->base),
4643 			     TREE_TYPE (adj->base));
4644 	  if (DECL_PT_UID_SET_P (adj->base))
4645 	    SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4646 	  TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4647 	  TREE_READONLY (copy) = TREE_READONLY (adj->base);
4648 	  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4649 	  DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4650 	  DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4651 	  DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4652 	  DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4653 	  DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4654 	  SET_DECL_RTL (copy, 0);
4655 	  TREE_USED (copy) = 1;
4656 	  DECL_CONTEXT (copy) = current_function_decl;
4657 	  add_referenced_var (copy);
4658 	  add_local_decl (cfun, copy);
4659 	  DECL_CHAIN (copy) =
4660 	    BLOCK_VARS (DECL_INITIAL (current_function_decl));
4661 	  BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4662 	}
4663       if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4664 	{
4665 	  gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4666 	  if (vexpr)
4667 	    def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4668 	  else
4669 	    def_temp = gimple_build_debug_source_bind (copy, adj->base,
4670 						       NULL);
4671 	  gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4672 	}
4673     }
4674 }
4675 
4676 /* Return false iff all callers have at least as many actual arguments as there
4677    are formal parameters in the current function.  */
4678 
4679 static bool
4680 not_all_callers_have_enough_arguments_p (struct cgraph_node *node,
4681 					 void *data ATTRIBUTE_UNUSED)
4682 {
4683   struct cgraph_edge *cs;
4684   for (cs = node->callers; cs; cs = cs->next_caller)
4685     if (!callsite_has_enough_arguments_p (cs->call_stmt))
4686       return true;
4687 
4688   return false;
4689 }
4690 
4691 /* Convert all callers of NODE.  */
4692 
4693 static bool
4694 convert_callers_for_node (struct cgraph_node *node,
4695 		          void *data)
4696 {
4697   ipa_parm_adjustment_vec adjustments = (ipa_parm_adjustment_vec)data;
4698   bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4699   struct cgraph_edge *cs;
4700 
4701   for (cs = node->callers; cs; cs = cs->next_caller)
4702     {
4703       current_function_decl = cs->caller->decl;
4704       push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4705 
4706       if (dump_file)
4707 	fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4708 		 cs->caller->uid, cs->callee->uid,
4709 		 xstrdup (cgraph_node_name (cs->caller)),
4710 		 xstrdup (cgraph_node_name (cs->callee)));
4711 
4712       ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4713 
4714       pop_cfun ();
4715     }
4716 
4717   for (cs = node->callers; cs; cs = cs->next_caller)
4718     if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4719 	&& gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4720       compute_inline_parameters (cs->caller, true);
4721   BITMAP_FREE (recomputed_callers);
4722 
4723   return true;
4724 }
4725 
4726 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS.  */
4727 
4728 static void
4729 convert_callers (struct cgraph_node *node, tree old_decl,
4730 		 ipa_parm_adjustment_vec adjustments)
4731 {
4732   tree old_cur_fndecl = current_function_decl;
4733   basic_block this_block;
4734 
4735   cgraph_for_node_and_aliases (node, convert_callers_for_node,
4736 			       adjustments, false);
4737 
4738   current_function_decl = old_cur_fndecl;
4739 
4740   if (!encountered_recursive_call)
4741     return;
4742 
4743   FOR_EACH_BB (this_block)
4744     {
4745       gimple_stmt_iterator gsi;
4746 
4747       for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4748         {
4749 	  gimple stmt = gsi_stmt (gsi);
4750 	  tree call_fndecl;
4751 	  if (gimple_code (stmt) != GIMPLE_CALL)
4752 	    continue;
4753 	  call_fndecl = gimple_call_fndecl (stmt);
4754 	  if (call_fndecl == old_decl)
4755 	    {
4756 	      if (dump_file)
4757 		fprintf (dump_file, "Adjusting recursive call");
4758 	      gimple_call_set_fndecl (stmt, node->decl);
4759 	      ipa_modify_call_arguments (NULL, stmt, adjustments);
4760 	    }
4761 	}
4762     }
4763 
4764   return;
4765 }
4766 
4767 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4768    as given in ADJUSTMENTS.  Return true iff the CFG has been changed.  */
4769 
4770 static bool
4771 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4772 {
4773   struct cgraph_node *new_node;
4774   bool cfg_changed;
4775   VEC (cgraph_edge_p, heap) * redirect_callers = collect_callers_of_node (node);
4776 
4777   rebuild_cgraph_edges ();
4778   free_dominance_info (CDI_DOMINATORS);
4779   pop_cfun ();
4780   current_function_decl = NULL_TREE;
4781 
4782   new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
4783 					 false, NULL, NULL, "isra");
4784   VEC_free (cgraph_edge_p, heap, redirect_callers);
4785 
4786   current_function_decl = new_node->decl;
4787   push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4788 
4789   ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4790   cfg_changed = ipa_sra_modify_function_body (adjustments);
4791   sra_ipa_reset_debug_stmts (adjustments);
4792   convert_callers (new_node, node->decl, adjustments);
4793   cgraph_make_node_local (new_node);
4794   return cfg_changed;
4795 }
4796 
4797 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4798    attributes, return true otherwise.  NODE is the cgraph node of the current
4799    function.  */
4800 
4801 static bool
4802 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4803 {
4804   if (!cgraph_node_can_be_local_p (node))
4805     {
4806       if (dump_file)
4807 	fprintf (dump_file, "Function not local to this compilation unit.\n");
4808       return false;
4809     }
4810 
4811   if (!node->local.can_change_signature)
4812     {
4813       if (dump_file)
4814 	fprintf (dump_file, "Function can not change signature.\n");
4815       return false;
4816     }
4817 
4818   if (!tree_versionable_function_p (node->decl))
4819     {
4820       if (dump_file)
4821 	fprintf (dump_file, "Function is not versionable.\n");
4822       return false;
4823     }
4824 
4825   if (DECL_VIRTUAL_P (current_function_decl))
4826     {
4827       if (dump_file)
4828 	fprintf (dump_file, "Function is a virtual method.\n");
4829       return false;
4830     }
4831 
4832   if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4833       && inline_summary(node)->size >= MAX_INLINE_INSNS_AUTO)
4834     {
4835       if (dump_file)
4836 	fprintf (dump_file, "Function too big to be made truly local.\n");
4837       return false;
4838     }
4839 
4840   if (!node->callers)
4841     {
4842       if (dump_file)
4843 	fprintf (dump_file,
4844 		 "Function has no callers in this compilation unit.\n");
4845       return false;
4846     }
4847 
4848   if (cfun->stdarg)
4849     {
4850       if (dump_file)
4851 	fprintf (dump_file, "Function uses stdarg. \n");
4852       return false;
4853     }
4854 
4855   if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4856     return false;
4857 
4858   return true;
4859 }
4860 
4861 /* Perform early interprocedural SRA.  */
4862 
4863 static unsigned int
4864 ipa_early_sra (void)
4865 {
4866   struct cgraph_node *node = cgraph_get_node (current_function_decl);
4867   ipa_parm_adjustment_vec adjustments;
4868   int ret = 0;
4869 
4870   if (!ipa_sra_preliminary_function_checks (node))
4871     return 0;
4872 
4873   sra_initialize ();
4874   sra_mode = SRA_MODE_EARLY_IPA;
4875 
4876   if (!find_param_candidates ())
4877     {
4878       if (dump_file)
4879 	fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4880       goto simple_out;
4881     }
4882 
4883   if (cgraph_for_node_and_aliases (node, not_all_callers_have_enough_arguments_p,
4884 				   NULL, true))
4885     {
4886       if (dump_file)
4887 	fprintf (dump_file, "There are callers with insufficient number of "
4888 		 "arguments.\n");
4889       goto simple_out;
4890     }
4891 
4892   bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4893 				 func_param_count
4894 				 * last_basic_block_for_function (cfun));
4895   final_bbs = BITMAP_ALLOC (NULL);
4896 
4897   scan_function ();
4898   if (encountered_apply_args)
4899     {
4900       if (dump_file)
4901 	fprintf (dump_file, "Function calls  __builtin_apply_args().\n");
4902       goto out;
4903     }
4904 
4905   if (encountered_unchangable_recursive_call)
4906     {
4907       if (dump_file)
4908 	fprintf (dump_file, "Function calls itself with insufficient "
4909 		 "number of arguments.\n");
4910       goto out;
4911     }
4912 
4913   adjustments = analyze_all_param_acesses ();
4914   if (!adjustments)
4915     goto out;
4916   if (dump_file)
4917     ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4918 
4919   if (modify_function (node, adjustments))
4920     ret = TODO_update_ssa | TODO_cleanup_cfg;
4921   else
4922     ret = TODO_update_ssa;
4923   VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4924 
4925   statistics_counter_event (cfun, "Unused parameters deleted",
4926 			    sra_stats.deleted_unused_parameters);
4927   statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4928 			    sra_stats.scalar_by_ref_to_by_val);
4929   statistics_counter_event (cfun, "Aggregate parameters broken up",
4930 			    sra_stats.aggregate_params_reduced);
4931   statistics_counter_event (cfun, "Aggregate parameter components created",
4932 			    sra_stats.param_reductions_created);
4933 
4934  out:
4935   BITMAP_FREE (final_bbs);
4936   free (bb_dereferences);
4937  simple_out:
4938   sra_deinitialize ();
4939   return ret;
4940 }
4941 
4942 /* Return if early ipa sra shall be performed.  */
4943 static bool
4944 ipa_early_sra_gate (void)
4945 {
4946   return flag_ipa_sra && dbg_cnt (eipa_sra);
4947 }
4948 
4949 struct gimple_opt_pass pass_early_ipa_sra =
4950 {
4951  {
4952   GIMPLE_PASS,
4953   "eipa_sra",	 			/* name */
4954   ipa_early_sra_gate,			/* gate */
4955   ipa_early_sra,			/* execute */
4956   NULL,					/* sub */
4957   NULL,					/* next */
4958   0,					/* static_pass_number */
4959   TV_IPA_SRA,				/* tv_id */
4960   0,	                                /* properties_required */
4961   0,					/* properties_provided */
4962   0,					/* properties_destroyed */
4963   0,					/* todo_flags_start */
4964   TODO_dump_cgraph              	/* todo_flags_finish */
4965  }
4966 };
4967