xref: /dragonfly/contrib/gcc-4.7/gcc/tree-sra.c (revision 77b0c609)
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2    references into scalar references, exposing them to the scalar
3    optimizers.
4    Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
5    Contributed by Martin Jambor <mjambor@suse.cz>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 /* This file implements Scalar Reduction of Aggregates (SRA).  SRA is run
24    twice, once in the early stages of compilation (early SRA) and once in the
25    late stages (late SRA).  The aim of both is to turn references to scalar
26    parts of aggregates into uses of independent scalar variables.
27 
28    The two passes are nearly identical, the only difference is that early SRA
29    does not scalarize unions which are used as the result in a GIMPLE_RETURN
30    statement because together with inlining this can lead to weird type
31    conversions.
32 
33    Both passes operate in four stages:
34 
35    1. The declarations that have properties which make them candidates for
36       scalarization are identified in function find_var_candidates().  The
37       candidates are stored in candidate_bitmap.
38 
39    2. The function body is scanned.  In the process, declarations which are
40       used in a manner that prevent their scalarization are removed from the
41       candidate bitmap.  More importantly, for every access into an aggregate,
42       an access structure (struct access) is created by create_access() and
43       stored in a vector associated with the aggregate.  Among other
44       information, the aggregate declaration, the offset and size of the access
45       and its type are stored in the structure.
46 
47       On a related note, assign_link structures are created for every assign
48       statement between candidate aggregates and attached to the related
49       accesses.
50 
51    3. The vectors of accesses are analyzed.  They are first sorted according to
52       their offset and size and then scanned for partially overlapping accesses
53       (i.e. those which overlap but one is not entirely within another).  Such
54       an access disqualifies the whole aggregate from being scalarized.
55 
56       If there is no such inhibiting overlap, a representative access structure
57       is chosen for every unique combination of offset and size.  Afterwards,
58       the pass builds a set of trees from these structures, in which children
59       of an access are within their parent (in terms of offset and size).
60 
61       Then accesses  are propagated  whenever possible (i.e.  in cases  when it
62       does not create a partially overlapping access) across assign_links from
63       the right hand side to the left hand side.
64 
65       Then the set of trees for each declaration is traversed again and those
66       accesses which should be replaced by a scalar are identified.
67 
68    4. The function is traversed again, and for every reference into an
69       aggregate that has some component which is about to be scalarized,
70       statements are amended and new statements are created as necessary.
71       Finally, if a parameter got scalarized, the scalar replacements are
72       initialized with values from respective parameter aggregates.  */
73 
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "cgraph.h"
82 #include "tree-flow.h"
83 #include "ipa-prop.h"
84 #include "tree-pretty-print.h"
85 #include "statistics.h"
86 #include "tree-dump.h"
87 #include "timevar.h"
88 #include "params.h"
89 #include "target.h"
90 #include "flags.h"
91 #include "dbgcnt.h"
92 #include "tree-inline.h"
93 #include "gimple-pretty-print.h"
94 #include "ipa-inline.h"
95 
96 /* Enumeration of all aggregate reductions we can do.  */
97 enum sra_mode { SRA_MODE_EARLY_IPA,   /* early call regularization */
98 		SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
99 		SRA_MODE_INTRA };     /* late intraprocedural SRA */
100 
101 /* Global variable describing which aggregate reduction we are performing at
102    the moment.  */
103 static enum sra_mode sra_mode;
104 
105 struct assign_link;
106 
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108    part).  It can also represent a group of accesses that refer to exactly the
109    same fragment of an aggregate (i.e. those that have exactly the same offset
110    and size).  Such representatives for a single aggregate, once determined,
111    are linked in a linked list and have the group fields set.
112 
113    Moreover, when doing intraprocedural SRA, a tree is built from those
114    representatives (by the means of first_child and next_sibling pointers), in
115    which all items in a subtree are "within" the root, i.e. their offset is
116    greater or equal to offset of the root and offset+size is smaller or equal
117    to offset+size of the root.  Children of an access are sorted by offset.
118 
119    Note that accesses to parts of vector and complex number types always
120    represented by an access to the whole complex number or a vector.  It is a
121    duty of the modifying functions to replace them appropriately.  */
122 
123 struct access
124 {
125   /* Values returned by  `get_ref_base_and_extent' for each component reference
126      If EXPR isn't a component reference  just set `BASE = EXPR', `OFFSET = 0',
127      `SIZE = TREE_SIZE (TREE_TYPE (expr))'.  */
128   HOST_WIDE_INT offset;
129   HOST_WIDE_INT size;
130   tree base;
131 
132   /* Expression.  It is context dependent so do not use it to create new
133      expressions to access the original aggregate.  See PR 42154 for a
134      testcase.  */
135   tree expr;
136   /* Type.  */
137   tree type;
138 
139   /* The statement this access belongs to.  */
140   gimple stmt;
141 
142   /* Next group representative for this aggregate. */
143   struct access *next_grp;
144 
145   /* Pointer to the group representative.  Pointer to itself if the struct is
146      the representative.  */
147   struct access *group_representative;
148 
149   /* If this access has any children (in terms of the definition above), this
150      points to the first one.  */
151   struct access *first_child;
152 
153   /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154      described above.  In IPA-SRA this is a pointer to the next access
155      belonging to the same group (having the same representative).  */
156   struct access *next_sibling;
157 
158   /* Pointers to the first and last element in the linked list of assign
159      links.  */
160   struct assign_link *first_link, *last_link;
161 
162   /* Pointer to the next access in the work queue.  */
163   struct access *next_queued;
164 
165   /* Replacement variable for this access "region."  Never to be accessed
166      directly, always only by the means of get_access_replacement() and only
167      when grp_to_be_replaced flag is set.  */
168   tree replacement_decl;
169 
170   /* Is this particular access write access? */
171   unsigned write : 1;
172 
173   /* Is this access an access to a non-addressable field? */
174   unsigned non_addressable : 1;
175 
176   /* Is this access currently in the work queue?  */
177   unsigned grp_queued : 1;
178 
179   /* Does this group contain a write access?  This flag is propagated down the
180      access tree.  */
181   unsigned grp_write : 1;
182 
183   /* Does this group contain a read access?  This flag is propagated down the
184      access tree.  */
185   unsigned grp_read : 1;
186 
187   /* Does this group contain a read access that comes from an assignment
188      statement?  This flag is propagated down the access tree.  */
189   unsigned grp_assignment_read : 1;
190 
191   /* Does this group contain a write access that comes from an assignment
192      statement?  This flag is propagated down the access tree.  */
193   unsigned grp_assignment_write : 1;
194 
195   /* Does this group contain a read access through a scalar type?  This flag is
196      not propagated in the access tree in any direction.  */
197   unsigned grp_scalar_read : 1;
198 
199   /* Does this group contain a write access through a scalar type?  This flag
200      is not propagated in the access tree in any direction.  */
201   unsigned grp_scalar_write : 1;
202 
203   /* Is this access an artificial one created to scalarize some record
204      entirely? */
205   unsigned grp_total_scalarization : 1;
206 
207   /* Other passes of the analysis use this bit to make function
208      analyze_access_subtree create scalar replacements for this group if
209      possible.  */
210   unsigned grp_hint : 1;
211 
212   /* Is the subtree rooted in this access fully covered by scalar
213      replacements?  */
214   unsigned grp_covered : 1;
215 
216   /* If set to true, this access and all below it in an access tree must not be
217      scalarized.  */
218   unsigned grp_unscalarizable_region : 1;
219 
220   /* Whether data have been written to parts of the aggregate covered by this
221      access which is not to be scalarized.  This flag is propagated up in the
222      access tree.  */
223   unsigned grp_unscalarized_data : 1;
224 
225   /* Does this access and/or group contain a write access through a
226      BIT_FIELD_REF?  */
227   unsigned grp_partial_lhs : 1;
228 
229   /* Set when a scalar replacement should be created for this variable.  We do
230      the decision and creation at different places because create_tmp_var
231      cannot be called from within FOR_EACH_REFERENCED_VAR. */
232   unsigned grp_to_be_replaced : 1;
233 
234   /* Should TREE_NO_WARNING of a replacement be set?  */
235   unsigned grp_no_warning : 1;
236 
237   /* Is it possible that the group refers to data which might be (directly or
238      otherwise) modified?  */
239   unsigned grp_maybe_modified : 1;
240 
241   /* Set when this is a representative of a pointer to scalar (i.e. by
242      reference) parameter which we consider for turning into a plain scalar
243      (i.e. a by value parameter).  */
244   unsigned grp_scalar_ptr : 1;
245 
246   /* Set when we discover that this pointer is not safe to dereference in the
247      caller.  */
248   unsigned grp_not_necessarilly_dereferenced : 1;
249 };
250 
251 typedef struct access *access_p;
252 
253 DEF_VEC_P (access_p);
254 DEF_VEC_ALLOC_P (access_p, heap);
255 
256 /* Alloc pool for allocating access structures.  */
257 static alloc_pool access_pool;
258 
259 /* A structure linking lhs and rhs accesses from an aggregate assignment.  They
260    are used to propagate subaccesses from rhs to lhs as long as they don't
261    conflict with what is already there.  */
262 struct assign_link
263 {
264   struct access *lacc, *racc;
265   struct assign_link *next;
266 };
267 
268 /* Alloc pool for allocating assign link structures.  */
269 static alloc_pool link_pool;
270 
271 /* Base (tree) -> Vector (VEC(access_p,heap) *) map.  */
272 static struct pointer_map_t *base_access_vec;
273 
274 /* Bitmap of candidates.  */
275 static bitmap candidate_bitmap;
276 
277 /* Bitmap of candidates which we should try to entirely scalarize away and
278    those which cannot be (because they are and need be used as a whole).  */
279 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
280 
281 /* Obstack for creation of fancy names.  */
282 static struct obstack name_obstack;
283 
284 /* Head of a linked list of accesses that need to have its subaccesses
285    propagated to their assignment counterparts. */
286 static struct access *work_queue_head;
287 
288 /* Number of parameters of the analyzed function when doing early ipa SRA.  */
289 static int func_param_count;
290 
291 /* scan_function sets the following to true if it encounters a call to
292    __builtin_apply_args.  */
293 static bool encountered_apply_args;
294 
295 /* Set by scan_function when it finds a recursive call.  */
296 static bool encountered_recursive_call;
297 
298 /* Set by scan_function when it finds a recursive call with less actual
299    arguments than formal parameters..  */
300 static bool encountered_unchangable_recursive_call;
301 
302 /* This is a table in which for each basic block and parameter there is a
303    distance (offset + size) in that parameter which is dereferenced and
304    accessed in that BB.  */
305 static HOST_WIDE_INT *bb_dereferences;
306 /* Bitmap of BBs that can cause the function to "stop" progressing by
307    returning, throwing externally, looping infinitely or calling a function
308    which might abort etc.. */
309 static bitmap final_bbs;
310 
311 /* Representative of no accesses at all. */
312 static struct access  no_accesses_representant;
313 
314 /* Predicate to test the special value.  */
315 
316 static inline bool
317 no_accesses_p (struct access *access)
318 {
319   return access == &no_accesses_representant;
320 }
321 
322 /* Dump contents of ACCESS to file F in a human friendly way.  If GRP is true,
323    representative fields are dumped, otherwise those which only describe the
324    individual access are.  */
325 
326 static struct
327 {
328   /* Number of processed aggregates is readily available in
329      analyze_all_variable_accesses and so is not stored here.  */
330 
331   /* Number of created scalar replacements.  */
332   int replacements;
333 
334   /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
335      expression.  */
336   int exprs;
337 
338   /* Number of statements created by generate_subtree_copies.  */
339   int subtree_copies;
340 
341   /* Number of statements created by load_assign_lhs_subreplacements.  */
342   int subreplacements;
343 
344   /* Number of times sra_modify_assign has deleted a statement.  */
345   int deleted;
346 
347   /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
348      RHS reparately due to type conversions or nonexistent matching
349      references.  */
350   int separate_lhs_rhs_handling;
351 
352   /* Number of parameters that were removed because they were unused.  */
353   int deleted_unused_parameters;
354 
355   /* Number of scalars passed as parameters by reference that have been
356      converted to be passed by value.  */
357   int scalar_by_ref_to_by_val;
358 
359   /* Number of aggregate parameters that were replaced by one or more of their
360      components.  */
361   int aggregate_params_reduced;
362 
363   /* Numbber of components created when splitting aggregate parameters.  */
364   int param_reductions_created;
365 } sra_stats;
366 
367 static void
368 dump_access (FILE *f, struct access *access, bool grp)
369 {
370   fprintf (f, "access { ");
371   fprintf (f, "base = (%d)'", DECL_UID (access->base));
372   print_generic_expr (f, access->base, 0);
373   fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
374   fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
375   fprintf (f, ", expr = ");
376   print_generic_expr (f, access->expr, 0);
377   fprintf (f, ", type = ");
378   print_generic_expr (f, access->type, 0);
379   if (grp)
380     fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
381 	     "grp_assignment_write = %d, grp_scalar_read = %d, "
382 	     "grp_scalar_write = %d, grp_total_scalarization = %d, "
383 	     "grp_hint = %d, grp_covered = %d, "
384 	     "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
385 	     "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
386 	     "grp_maybe_modified = %d, "
387 	     "grp_not_necessarilly_dereferenced = %d\n",
388 	     access->grp_read, access->grp_write, access->grp_assignment_read,
389 	     access->grp_assignment_write, access->grp_scalar_read,
390 	     access->grp_scalar_write, access->grp_total_scalarization,
391 	     access->grp_hint, access->grp_covered,
392 	     access->grp_unscalarizable_region, access->grp_unscalarized_data,
393 	     access->grp_partial_lhs, access->grp_to_be_replaced,
394 	     access->grp_maybe_modified,
395 	     access->grp_not_necessarilly_dereferenced);
396   else
397     fprintf (f, ", write = %d, grp_total_scalarization = %d, "
398 	     "grp_partial_lhs = %d\n",
399 	     access->write, access->grp_total_scalarization,
400 	     access->grp_partial_lhs);
401 }
402 
403 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL.  */
404 
405 static void
406 dump_access_tree_1 (FILE *f, struct access *access, int level)
407 {
408   do
409     {
410       int i;
411 
412       for (i = 0; i < level; i++)
413 	fputs ("* ", dump_file);
414 
415       dump_access (f, access, true);
416 
417       if (access->first_child)
418 	dump_access_tree_1 (f, access->first_child, level + 1);
419 
420       access = access->next_sibling;
421     }
422   while (access);
423 }
424 
425 /* Dump all access trees for a variable, given the pointer to the first root in
426    ACCESS.  */
427 
428 static void
429 dump_access_tree (FILE *f, struct access *access)
430 {
431   for (; access; access = access->next_grp)
432     dump_access_tree_1 (f, access, 0);
433 }
434 
435 /* Return true iff ACC is non-NULL and has subaccesses.  */
436 
437 static inline bool
438 access_has_children_p (struct access *acc)
439 {
440   return acc && acc->first_child;
441 }
442 
443 /* Return true iff ACC is (partly) covered by at least one replacement.  */
444 
445 static bool
446 access_has_replacements_p (struct access *acc)
447 {
448   struct access *child;
449   if (acc->grp_to_be_replaced)
450     return true;
451   for (child = acc->first_child; child; child = child->next_sibling)
452     if (access_has_replacements_p (child))
453       return true;
454   return false;
455 }
456 
457 /* Return a vector of pointers to accesses for the variable given in BASE or
458    NULL if there is none.  */
459 
460 static VEC (access_p, heap) *
461 get_base_access_vector (tree base)
462 {
463   void **slot;
464 
465   slot = pointer_map_contains (base_access_vec, base);
466   if (!slot)
467     return NULL;
468   else
469     return *(VEC (access_p, heap) **) slot;
470 }
471 
472 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
473    in ACCESS.  Return NULL if it cannot be found.  */
474 
475 static struct access *
476 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
477 			HOST_WIDE_INT size)
478 {
479   while (access && (access->offset != offset || access->size != size))
480     {
481       struct access *child = access->first_child;
482 
483       while (child && (child->offset + child->size <= offset))
484 	child = child->next_sibling;
485       access = child;
486     }
487 
488   return access;
489 }
490 
491 /* Return the first group representative for DECL or NULL if none exists.  */
492 
493 static struct access *
494 get_first_repr_for_decl (tree base)
495 {
496   VEC (access_p, heap) *access_vec;
497 
498   access_vec = get_base_access_vector (base);
499   if (!access_vec)
500     return NULL;
501 
502   return VEC_index (access_p, access_vec, 0);
503 }
504 
505 /* Find an access representative for the variable BASE and given OFFSET and
506    SIZE.  Requires that access trees have already been built.  Return NULL if
507    it cannot be found.  */
508 
509 static struct access *
510 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
511 				 HOST_WIDE_INT size)
512 {
513   struct access *access;
514 
515   access = get_first_repr_for_decl (base);
516   while (access && (access->offset + access->size <= offset))
517     access = access->next_grp;
518   if (!access)
519     return NULL;
520 
521   return find_access_in_subtree (access, offset, size);
522 }
523 
524 /* Add LINK to the linked list of assign links of RACC.  */
525 static void
526 add_link_to_rhs (struct access *racc, struct assign_link *link)
527 {
528   gcc_assert (link->racc == racc);
529 
530   if (!racc->first_link)
531     {
532       gcc_assert (!racc->last_link);
533       racc->first_link = link;
534     }
535   else
536     racc->last_link->next = link;
537 
538   racc->last_link = link;
539   link->next = NULL;
540 }
541 
542 /* Move all link structures in their linked list in OLD_RACC to the linked list
543    in NEW_RACC.  */
544 static void
545 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
546 {
547   if (!old_racc->first_link)
548     {
549       gcc_assert (!old_racc->last_link);
550       return;
551     }
552 
553   if (new_racc->first_link)
554     {
555       gcc_assert (!new_racc->last_link->next);
556       gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
557 
558       new_racc->last_link->next = old_racc->first_link;
559       new_racc->last_link = old_racc->last_link;
560     }
561   else
562     {
563       gcc_assert (!new_racc->last_link);
564 
565       new_racc->first_link = old_racc->first_link;
566       new_racc->last_link = old_racc->last_link;
567     }
568   old_racc->first_link = old_racc->last_link = NULL;
569 }
570 
571 /* Add ACCESS to the work queue (which is actually a stack).  */
572 
573 static void
574 add_access_to_work_queue (struct access *access)
575 {
576   if (!access->grp_queued)
577     {
578       gcc_assert (!access->next_queued);
579       access->next_queued = work_queue_head;
580       access->grp_queued = 1;
581       work_queue_head = access;
582     }
583 }
584 
585 /* Pop an access from the work queue, and return it, assuming there is one.  */
586 
587 static struct access *
588 pop_access_from_work_queue (void)
589 {
590   struct access *access = work_queue_head;
591 
592   work_queue_head = access->next_queued;
593   access->next_queued = NULL;
594   access->grp_queued = 0;
595   return access;
596 }
597 
598 
599 /* Allocate necessary structures.  */
600 
601 static void
602 sra_initialize (void)
603 {
604   candidate_bitmap = BITMAP_ALLOC (NULL);
605   should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
606   cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
607   gcc_obstack_init (&name_obstack);
608   access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
609   link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
610   base_access_vec = pointer_map_create ();
611   memset (&sra_stats, 0, sizeof (sra_stats));
612   encountered_apply_args = false;
613   encountered_recursive_call = false;
614   encountered_unchangable_recursive_call = false;
615 }
616 
617 /* Hook fed to pointer_map_traverse, deallocate stored vectors.  */
618 
619 static bool
620 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
621 		     void *data ATTRIBUTE_UNUSED)
622 {
623   VEC (access_p, heap) *access_vec;
624   access_vec = (VEC (access_p, heap) *) *value;
625   VEC_free (access_p, heap, access_vec);
626 
627   return true;
628 }
629 
630 /* Deallocate all general structures.  */
631 
632 static void
633 sra_deinitialize (void)
634 {
635   BITMAP_FREE (candidate_bitmap);
636   BITMAP_FREE (should_scalarize_away_bitmap);
637   BITMAP_FREE (cannot_scalarize_away_bitmap);
638   free_alloc_pool (access_pool);
639   free_alloc_pool (link_pool);
640   obstack_free (&name_obstack, NULL);
641 
642   pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
643   pointer_map_destroy (base_access_vec);
644 }
645 
646 /* Remove DECL from candidates for SRA and write REASON to the dump file if
647    there is one.  */
648 static void
649 disqualify_candidate (tree decl, const char *reason)
650 {
651   bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
652 
653   if (dump_file && (dump_flags & TDF_DETAILS))
654     {
655       fprintf (dump_file, "! Disqualifying ");
656       print_generic_expr (dump_file, decl, 0);
657       fprintf (dump_file, " - %s\n", reason);
658     }
659 }
660 
661 /* Return true iff the type contains a field or an element which does not allow
662    scalarization.  */
663 
664 static bool
665 type_internals_preclude_sra_p (tree type, const char **msg)
666 {
667   tree fld;
668   tree et;
669 
670   switch (TREE_CODE (type))
671     {
672     case RECORD_TYPE:
673     case UNION_TYPE:
674     case QUAL_UNION_TYPE:
675       for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
676 	if (TREE_CODE (fld) == FIELD_DECL)
677 	  {
678 	    tree ft = TREE_TYPE (fld);
679 
680 	    if (TREE_THIS_VOLATILE (fld))
681 	      {
682 		*msg = "volatile structure field";
683 		return true;
684 	      }
685 	    if (!DECL_FIELD_OFFSET (fld))
686 	      {
687 		*msg = "no structure field offset";
688 		return true;
689 	      }
690 	    if (!DECL_SIZE (fld))
691 	      {
692 		*msg = "zero structure field size";
693 	        return true;
694 	      }
695 	    if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
696 	      {
697 		*msg = "structure field offset not fixed";
698 		return true;
699 	      }
700 	    if (!host_integerp (DECL_SIZE (fld), 1))
701 	      {
702 	        *msg = "structure field size not fixed";
703 		return true;
704 	      }
705 	    if (AGGREGATE_TYPE_P (ft)
706 		    && int_bit_position (fld) % BITS_PER_UNIT != 0)
707 	      {
708 		*msg = "structure field is bit field";
709 	        return true;
710 	      }
711 
712 	    if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
713 	      return true;
714 	  }
715 
716       return false;
717 
718     case ARRAY_TYPE:
719       et = TREE_TYPE (type);
720 
721       if (TYPE_VOLATILE (et))
722 	{
723 	  *msg = "element type is volatile";
724 	  return true;
725 	}
726 
727       if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
728 	return true;
729 
730       return false;
731 
732     default:
733       return false;
734     }
735 }
736 
737 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
738    base variable if it is.  Return T if it is not an SSA_NAME.  */
739 
740 static tree
741 get_ssa_base_param (tree t)
742 {
743   if (TREE_CODE (t) == SSA_NAME)
744     {
745       if (SSA_NAME_IS_DEFAULT_DEF (t))
746 	return SSA_NAME_VAR (t);
747       else
748 	return NULL_TREE;
749     }
750   return t;
751 }
752 
753 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
754    belongs to, unless the BB has already been marked as a potentially
755    final.  */
756 
757 static void
758 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
759 {
760   basic_block bb = gimple_bb (stmt);
761   int idx, parm_index = 0;
762   tree parm;
763 
764   if (bitmap_bit_p (final_bbs, bb->index))
765     return;
766 
767   for (parm = DECL_ARGUMENTS (current_function_decl);
768        parm && parm != base;
769        parm = DECL_CHAIN (parm))
770     parm_index++;
771 
772   gcc_assert (parm_index < func_param_count);
773 
774   idx = bb->index * func_param_count + parm_index;
775   if (bb_dereferences[idx] < dist)
776     bb_dereferences[idx] = dist;
777 }
778 
779 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
780    the three fields.  Also add it to the vector of accesses corresponding to
781    the base.  Finally, return the new access.  */
782 
783 static struct access *
784 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
785 {
786   VEC (access_p, heap) *vec;
787   struct access *access;
788   void **slot;
789 
790   access = (struct access *) pool_alloc (access_pool);
791   memset (access, 0, sizeof (struct access));
792   access->base = base;
793   access->offset = offset;
794   access->size = size;
795 
796   slot = pointer_map_contains (base_access_vec, base);
797   if (slot)
798     vec = (VEC (access_p, heap) *) *slot;
799   else
800     vec = VEC_alloc (access_p, heap, 32);
801 
802   VEC_safe_push (access_p, heap, vec, access);
803 
804   *((struct VEC (access_p,heap) **)
805 	pointer_map_insert (base_access_vec, base)) = vec;
806 
807   return access;
808 }
809 
810 /* Create and insert access for EXPR. Return created access, or NULL if it is
811    not possible.  */
812 
813 static struct access *
814 create_access (tree expr, gimple stmt, bool write)
815 {
816   struct access *access;
817   HOST_WIDE_INT offset, size, max_size;
818   tree base = expr;
819   bool ptr, unscalarizable_region = false;
820 
821   base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
822 
823   if (sra_mode == SRA_MODE_EARLY_IPA
824       && TREE_CODE (base) == MEM_REF)
825     {
826       base = get_ssa_base_param (TREE_OPERAND (base, 0));
827       if (!base)
828 	return NULL;
829       ptr = true;
830     }
831   else
832     ptr = false;
833 
834   if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
835     return NULL;
836 
837   if (sra_mode == SRA_MODE_EARLY_IPA)
838     {
839       if (size < 0 || size != max_size)
840 	{
841 	  disqualify_candidate (base, "Encountered a variable sized access.");
842 	  return NULL;
843 	}
844       if (TREE_CODE (expr) == COMPONENT_REF
845 	  && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
846 	{
847 	  disqualify_candidate (base, "Encountered a bit-field access.");
848 	  return NULL;
849 	}
850       gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
851 
852       if (ptr)
853 	mark_parm_dereference (base, offset + size, stmt);
854     }
855   else
856     {
857       if (size != max_size)
858 	{
859 	  size = max_size;
860 	  unscalarizable_region = true;
861 	}
862       if (size < 0)
863 	{
864 	  disqualify_candidate (base, "Encountered an unconstrained access.");
865 	  return NULL;
866 	}
867     }
868 
869   access = create_access_1 (base, offset, size);
870   access->expr = expr;
871   access->type = TREE_TYPE (expr);
872   access->write = write;
873   access->grp_unscalarizable_region = unscalarizable_region;
874   access->stmt = stmt;
875 
876   if (TREE_CODE (expr) == COMPONENT_REF
877       && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
878     access->non_addressable = 1;
879 
880   return access;
881 }
882 
883 
884 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
885    register types or (recursively) records with only these two kinds of fields.
886    It also returns false if any of these records contains a bit-field.  */
887 
888 static bool
889 type_consists_of_records_p (tree type)
890 {
891   tree fld;
892 
893   if (TREE_CODE (type) != RECORD_TYPE)
894     return false;
895 
896   for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
897     if (TREE_CODE (fld) == FIELD_DECL)
898       {
899 	tree ft = TREE_TYPE (fld);
900 
901 	if (DECL_BIT_FIELD (fld))
902 	  return false;
903 
904 	if (!is_gimple_reg_type (ft)
905 	    && !type_consists_of_records_p (ft))
906 	  return false;
907       }
908 
909   return true;
910 }
911 
912 /* Create total_scalarization accesses for all scalar type fields in DECL that
913    must be of a RECORD_TYPE conforming to type_consists_of_records_p.  BASE
914    must be the top-most VAR_DECL representing the variable, OFFSET must be the
915    offset of DECL within BASE.  REF must be the memory reference expression for
916    the given decl.  */
917 
918 static void
919 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
920 			     tree ref)
921 {
922   tree fld, decl_type = TREE_TYPE (decl);
923 
924   for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
925     if (TREE_CODE (fld) == FIELD_DECL)
926       {
927 	HOST_WIDE_INT pos = offset + int_bit_position (fld);
928 	tree ft = TREE_TYPE (fld);
929 	tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
930 			    NULL_TREE);
931 
932 	if (is_gimple_reg_type (ft))
933 	  {
934 	    struct access *access;
935 	    HOST_WIDE_INT size;
936 
937 	    size = tree_low_cst (DECL_SIZE (fld), 1);
938 	    access = create_access_1 (base, pos, size);
939 	    access->expr = nref;
940 	    access->type = ft;
941 	    access->grp_total_scalarization = 1;
942 	    /* Accesses for intraprocedural SRA can have their stmt NULL.  */
943 	  }
944 	else
945 	  completely_scalarize_record (base, fld, pos, nref);
946       }
947 }
948 
949 /* Create total_scalarization accesses for all scalar type fields in VAR and
950    for VAR a a whole.  VAR must be of a RECORD_TYPE conforming to
951    type_consists_of_records_p.   */
952 
953 static void
954 completely_scalarize_var (tree var)
955 {
956   HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
957   struct access *access;
958 
959   access = create_access_1 (var, 0, size);
960   access->expr = var;
961   access->type = TREE_TYPE (var);
962   access->grp_total_scalarization = 1;
963 
964   completely_scalarize_record (var, var, 0, var);
965 }
966 
967 /* Search the given tree for a declaration by skipping handled components and
968    exclude it from the candidates.  */
969 
970 static void
971 disqualify_base_of_expr (tree t, const char *reason)
972 {
973   t = get_base_address (t);
974   if (t
975       && sra_mode == SRA_MODE_EARLY_IPA
976       && TREE_CODE (t) == MEM_REF)
977     t = get_ssa_base_param (TREE_OPERAND (t, 0));
978 
979   if (t && DECL_P (t))
980     disqualify_candidate (t, reason);
981 }
982 
983 /* Scan expression EXPR and create access structures for all accesses to
984    candidates for scalarization.  Return the created access or NULL if none is
985    created.  */
986 
987 static struct access *
988 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
989 {
990   struct access *ret = NULL;
991   bool partial_ref;
992 
993   if (TREE_CODE (expr) == BIT_FIELD_REF
994       || TREE_CODE (expr) == IMAGPART_EXPR
995       || TREE_CODE (expr) == REALPART_EXPR)
996     {
997       expr = TREE_OPERAND (expr, 0);
998       partial_ref = true;
999     }
1000   else
1001     partial_ref = false;
1002 
1003   /* We need to dive through V_C_Es in order to get the size of its parameter
1004      and not the result type.  Ada produces such statements.  We are also
1005      capable of handling the topmost V_C_E but not any of those buried in other
1006      handled components.  */
1007   if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1008     expr = TREE_OPERAND (expr, 0);
1009 
1010   if (contains_view_convert_expr_p (expr))
1011     {
1012       disqualify_base_of_expr (expr, "V_C_E under a different handled "
1013 			       "component.");
1014       return NULL;
1015     }
1016 
1017   switch (TREE_CODE (expr))
1018     {
1019     case MEM_REF:
1020       if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1021 	  && sra_mode != SRA_MODE_EARLY_IPA)
1022 	return NULL;
1023       /* fall through */
1024     case VAR_DECL:
1025     case PARM_DECL:
1026     case RESULT_DECL:
1027     case COMPONENT_REF:
1028     case ARRAY_REF:
1029     case ARRAY_RANGE_REF:
1030       ret = create_access (expr, stmt, write);
1031       break;
1032 
1033     default:
1034       break;
1035     }
1036 
1037   if (write && partial_ref && ret)
1038     ret->grp_partial_lhs = 1;
1039 
1040   return ret;
1041 }
1042 
1043 /* Scan expression EXPR and create access structures for all accesses to
1044    candidates for scalarization.  Return true if any access has been inserted.
1045    STMT must be the statement from which the expression is taken, WRITE must be
1046    true if the expression is a store and false otherwise. */
1047 
1048 static bool
1049 build_access_from_expr (tree expr, gimple stmt, bool write)
1050 {
1051   struct access *access;
1052 
1053   access = build_access_from_expr_1 (expr, stmt, write);
1054   if (access)
1055     {
1056       /* This means the aggregate is accesses as a whole in a way other than an
1057 	 assign statement and thus cannot be removed even if we had a scalar
1058 	 replacement for everything.  */
1059       if (cannot_scalarize_away_bitmap)
1060 	bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1061       return true;
1062     }
1063   return false;
1064 }
1065 
1066 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1067    modes in which it matters, return true iff they have been disqualified.  RHS
1068    may be NULL, in that case ignore it.  If we scalarize an aggregate in
1069    intra-SRA we may need to add statements after each statement.  This is not
1070    possible if a statement unconditionally has to end the basic block.  */
1071 static bool
1072 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1073 {
1074   if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1075       && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1076     {
1077       disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1078       if (rhs)
1079 	disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1080       return true;
1081     }
1082   return false;
1083 }
1084 
1085 /* Return true if EXP is a memory reference less aligned than ALIGN.  This is
1086    invoked only on strict-alignment targets.  */
1087 
1088 static bool
1089 tree_non_aligned_mem_p (tree exp, unsigned int align)
1090 {
1091   unsigned int exp_align;
1092 
1093   if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
1094     exp = TREE_OPERAND (exp, 0);
1095 
1096   if (TREE_CODE (exp) == SSA_NAME || is_gimple_min_invariant (exp))
1097     return false;
1098 
1099   /* get_object_alignment will fall back to BITS_PER_UNIT if it cannot
1100      compute an explicit alignment.  Pretend that dereferenced pointers
1101      are always aligned on strict-alignment targets.  */
1102   if (TREE_CODE (exp) == MEM_REF || TREE_CODE (exp) == TARGET_MEM_REF)
1103     exp_align = get_object_or_type_alignment (exp);
1104   else
1105     exp_align = get_object_alignment (exp);
1106 
1107   if (exp_align < align)
1108     return true;
1109 
1110   return false;
1111 }
1112 
1113 /* Return true if EXP is a memory reference less aligned than what the access
1114    ACC would require.  This is invoked only on strict-alignment targets.  */
1115 
1116 static bool
1117 tree_non_aligned_mem_for_access_p (tree exp, struct access *acc)
1118 {
1119   unsigned int acc_align;
1120 
1121   /* The alignment of the access is that of its expression.  However, it may
1122      have been artificially increased, e.g. by a local alignment promotion,
1123      so we cap it to the alignment of the type of the base, on the grounds
1124      that valid sub-accesses cannot be more aligned than that.  */
1125   acc_align = get_object_alignment (acc->expr);
1126   if (acc->base && acc_align > TYPE_ALIGN (TREE_TYPE (acc->base)))
1127     acc_align = TYPE_ALIGN (TREE_TYPE (acc->base));
1128 
1129   return tree_non_aligned_mem_p (exp, acc_align);
1130 }
1131 
1132 /* Scan expressions occuring in STMT, create access structures for all accesses
1133    to candidates for scalarization and remove those candidates which occur in
1134    statements or expressions that prevent them from being split apart.  Return
1135    true if any access has been inserted.  */
1136 
1137 static bool
1138 build_accesses_from_assign (gimple stmt)
1139 {
1140   tree lhs, rhs;
1141   struct access *lacc, *racc;
1142 
1143   if (!gimple_assign_single_p (stmt)
1144       /* Scope clobbers don't influence scalarization.  */
1145       || gimple_clobber_p (stmt))
1146     return false;
1147 
1148   lhs = gimple_assign_lhs (stmt);
1149   rhs = gimple_assign_rhs1 (stmt);
1150 
1151   if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1152     return false;
1153 
1154   racc = build_access_from_expr_1 (rhs, stmt, false);
1155   lacc = build_access_from_expr_1 (lhs, stmt, true);
1156 
1157   if (lacc)
1158     {
1159       lacc->grp_assignment_write = 1;
1160       if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (rhs, lacc))
1161         lacc->grp_unscalarizable_region = 1;
1162     }
1163 
1164   if (racc)
1165     {
1166       racc->grp_assignment_read = 1;
1167       if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1168 	  && !is_gimple_reg_type (racc->type))
1169 	bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1170       if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (lhs, racc))
1171         racc->grp_unscalarizable_region = 1;
1172     }
1173 
1174   if (lacc && racc
1175       && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1176       && !lacc->grp_unscalarizable_region
1177       && !racc->grp_unscalarizable_region
1178       && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1179       /* FIXME: Turn the following line into an assert after PR 40058 is
1180 	 fixed.  */
1181       && lacc->size == racc->size
1182       && useless_type_conversion_p (lacc->type, racc->type))
1183     {
1184       struct assign_link *link;
1185 
1186       link = (struct assign_link *) pool_alloc (link_pool);
1187       memset (link, 0, sizeof (struct assign_link));
1188 
1189       link->lacc = lacc;
1190       link->racc = racc;
1191 
1192       add_link_to_rhs (racc, link);
1193     }
1194 
1195   return lacc || racc;
1196 }
1197 
1198 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1199    GIMPLE_ASM operands with memory constrains which cannot be scalarized.  */
1200 
1201 static bool
1202 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1203 		void *data ATTRIBUTE_UNUSED)
1204 {
1205   op = get_base_address (op);
1206   if (op
1207       && DECL_P (op))
1208     disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1209 
1210   return false;
1211 }
1212 
1213 /* Return true iff callsite CALL has at least as many actual arguments as there
1214    are formal parameters of the function currently processed by IPA-SRA.  */
1215 
1216 static inline bool
1217 callsite_has_enough_arguments_p (gimple call)
1218 {
1219   return gimple_call_num_args (call) >= (unsigned) func_param_count;
1220 }
1221 
1222 /* Scan function and look for interesting expressions and create access
1223    structures for them.  Return true iff any access is created.  */
1224 
1225 static bool
1226 scan_function (void)
1227 {
1228   basic_block bb;
1229   bool ret = false;
1230 
1231   FOR_EACH_BB (bb)
1232     {
1233       gimple_stmt_iterator gsi;
1234       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1235 	{
1236 	  gimple stmt = gsi_stmt (gsi);
1237 	  tree t;
1238 	  unsigned i;
1239 
1240 	  if (final_bbs && stmt_can_throw_external (stmt))
1241 	    bitmap_set_bit (final_bbs, bb->index);
1242 	  switch (gimple_code (stmt))
1243 	    {
1244 	    case GIMPLE_RETURN:
1245 	      t = gimple_return_retval (stmt);
1246 	      if (t != NULL_TREE)
1247 		ret |= build_access_from_expr (t, stmt, false);
1248 	      if (final_bbs)
1249 		bitmap_set_bit (final_bbs, bb->index);
1250 	      break;
1251 
1252 	    case GIMPLE_ASSIGN:
1253 	      ret |= build_accesses_from_assign (stmt);
1254 	      break;
1255 
1256 	    case GIMPLE_CALL:
1257 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
1258 		ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1259 					       stmt, false);
1260 
1261 	      if (sra_mode == SRA_MODE_EARLY_IPA)
1262 		{
1263 		  tree dest = gimple_call_fndecl (stmt);
1264 		  int flags = gimple_call_flags (stmt);
1265 
1266 		  if (dest)
1267 		    {
1268 		      if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1269 			  && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1270 			encountered_apply_args = true;
1271 		      if (cgraph_get_node (dest)
1272 			  == cgraph_get_node (current_function_decl))
1273 			{
1274 			  encountered_recursive_call = true;
1275 			  if (!callsite_has_enough_arguments_p (stmt))
1276 			    encountered_unchangable_recursive_call = true;
1277 			}
1278 		    }
1279 
1280 		  if (final_bbs
1281 		      && (flags & (ECF_CONST | ECF_PURE)) == 0)
1282 		    bitmap_set_bit (final_bbs, bb->index);
1283 		}
1284 
1285 	      t = gimple_call_lhs (stmt);
1286 	      if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1287 		ret |= build_access_from_expr (t, stmt, true);
1288 	      break;
1289 
1290 	    case GIMPLE_ASM:
1291 	      walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1292 					     asm_visit_addr);
1293 	      if (final_bbs)
1294 		bitmap_set_bit (final_bbs, bb->index);
1295 
1296 	      for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1297 		{
1298 		  t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1299 		  ret |= build_access_from_expr (t, stmt, false);
1300 		}
1301 	      for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1302 		{
1303 		  t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1304 		  ret |= build_access_from_expr (t, stmt, true);
1305 		}
1306 	      break;
1307 
1308 	    default:
1309 	      break;
1310 	    }
1311 	}
1312     }
1313 
1314   return ret;
1315 }
1316 
1317 /* Helper of QSORT function. There are pointers to accesses in the array.  An
1318    access is considered smaller than another if it has smaller offset or if the
1319    offsets are the same but is size is bigger. */
1320 
1321 static int
1322 compare_access_positions (const void *a, const void *b)
1323 {
1324   const access_p *fp1 = (const access_p *) a;
1325   const access_p *fp2 = (const access_p *) b;
1326   const access_p f1 = *fp1;
1327   const access_p f2 = *fp2;
1328 
1329   if (f1->offset != f2->offset)
1330     return f1->offset < f2->offset ? -1 : 1;
1331 
1332   if (f1->size == f2->size)
1333     {
1334       if (f1->type == f2->type)
1335 	return 0;
1336       /* Put any non-aggregate type before any aggregate type.  */
1337       else if (!is_gimple_reg_type (f1->type)
1338 	  && is_gimple_reg_type (f2->type))
1339 	return 1;
1340       else if (is_gimple_reg_type (f1->type)
1341 	       && !is_gimple_reg_type (f2->type))
1342 	return -1;
1343       /* Put any complex or vector type before any other scalar type.  */
1344       else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1345 	       && TREE_CODE (f1->type) != VECTOR_TYPE
1346 	       && (TREE_CODE (f2->type) == COMPLEX_TYPE
1347 		   || TREE_CODE (f2->type) == VECTOR_TYPE))
1348 	return 1;
1349       else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1350 		|| TREE_CODE (f1->type) == VECTOR_TYPE)
1351 	       && TREE_CODE (f2->type) != COMPLEX_TYPE
1352 	       && TREE_CODE (f2->type) != VECTOR_TYPE)
1353 	return -1;
1354       /* Put the integral type with the bigger precision first.  */
1355       else if (INTEGRAL_TYPE_P (f1->type)
1356 	       && INTEGRAL_TYPE_P (f2->type))
1357 	return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1358       /* Put any integral type with non-full precision last.  */
1359       else if (INTEGRAL_TYPE_P (f1->type)
1360 	       && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1361 		   != TYPE_PRECISION (f1->type)))
1362 	return 1;
1363       else if (INTEGRAL_TYPE_P (f2->type)
1364 	       && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1365 		   != TYPE_PRECISION (f2->type)))
1366 	return -1;
1367       /* Stabilize the sort.  */
1368       return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1369     }
1370 
1371   /* We want the bigger accesses first, thus the opposite operator in the next
1372      line: */
1373   return f1->size > f2->size ? -1 : 1;
1374 }
1375 
1376 
1377 /* Append a name of the declaration to the name obstack.  A helper function for
1378    make_fancy_name.  */
1379 
1380 static void
1381 make_fancy_decl_name (tree decl)
1382 {
1383   char buffer[32];
1384 
1385   tree name = DECL_NAME (decl);
1386   if (name)
1387     obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1388 		  IDENTIFIER_LENGTH (name));
1389   else
1390     {
1391       sprintf (buffer, "D%u", DECL_UID (decl));
1392       obstack_grow (&name_obstack, buffer, strlen (buffer));
1393     }
1394 }
1395 
1396 /* Helper for make_fancy_name.  */
1397 
1398 static void
1399 make_fancy_name_1 (tree expr)
1400 {
1401   char buffer[32];
1402   tree index;
1403 
1404   if (DECL_P (expr))
1405     {
1406       make_fancy_decl_name (expr);
1407       return;
1408     }
1409 
1410   switch (TREE_CODE (expr))
1411     {
1412     case COMPONENT_REF:
1413       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1414       obstack_1grow (&name_obstack, '$');
1415       make_fancy_decl_name (TREE_OPERAND (expr, 1));
1416       break;
1417 
1418     case ARRAY_REF:
1419       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1420       obstack_1grow (&name_obstack, '$');
1421       /* Arrays with only one element may not have a constant as their
1422 	 index. */
1423       index = TREE_OPERAND (expr, 1);
1424       if (TREE_CODE (index) != INTEGER_CST)
1425 	break;
1426       sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1427       obstack_grow (&name_obstack, buffer, strlen (buffer));
1428       break;
1429 
1430     case ADDR_EXPR:
1431       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1432       break;
1433 
1434     case MEM_REF:
1435       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1436       if (!integer_zerop (TREE_OPERAND (expr, 1)))
1437 	{
1438 	  obstack_1grow (&name_obstack, '$');
1439 	  sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1440 		   TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1441 	  obstack_grow (&name_obstack, buffer, strlen (buffer));
1442 	}
1443       break;
1444 
1445     case BIT_FIELD_REF:
1446     case REALPART_EXPR:
1447     case IMAGPART_EXPR:
1448       gcc_unreachable (); 	/* we treat these as scalars.  */
1449       break;
1450     default:
1451       break;
1452     }
1453 }
1454 
1455 /* Create a human readable name for replacement variable of ACCESS.  */
1456 
1457 static char *
1458 make_fancy_name (tree expr)
1459 {
1460   make_fancy_name_1 (expr);
1461   obstack_1grow (&name_obstack, '\0');
1462   return XOBFINISH (&name_obstack, char *);
1463 }
1464 
1465 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1466    EXP_TYPE at the given OFFSET.  If BASE is something for which
1467    get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1468    to insert new statements either before or below the current one as specified
1469    by INSERT_AFTER.  This function is not capable of handling bitfields.  */
1470 
1471 tree
1472 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1473 		      tree exp_type, gimple_stmt_iterator *gsi,
1474 		      bool insert_after)
1475 {
1476   tree prev_base = base;
1477   tree off;
1478   HOST_WIDE_INT base_offset;
1479   unsigned HOST_WIDE_INT misalign;
1480   unsigned int align;
1481 
1482   gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1483 
1484   base = get_addr_base_and_unit_offset (base, &base_offset);
1485 
1486   /* get_addr_base_and_unit_offset returns NULL for references with a variable
1487      offset such as array[var_index].  */
1488   if (!base)
1489     {
1490       gimple stmt;
1491       tree tmp, addr;
1492 
1493       gcc_checking_assert (gsi);
1494       tmp = create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1495       add_referenced_var (tmp);
1496       tmp = make_ssa_name (tmp, NULL);
1497       addr = build_fold_addr_expr (unshare_expr (prev_base));
1498       STRIP_USELESS_TYPE_CONVERSION (addr);
1499       stmt = gimple_build_assign (tmp, addr);
1500       gimple_set_location (stmt, loc);
1501       SSA_NAME_DEF_STMT (tmp) = stmt;
1502       if (insert_after)
1503 	gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1504       else
1505 	gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1506       update_stmt (stmt);
1507 
1508       off = build_int_cst (reference_alias_ptr_type (prev_base),
1509 			   offset / BITS_PER_UNIT);
1510       base = tmp;
1511     }
1512   else if (TREE_CODE (base) == MEM_REF)
1513     {
1514       off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1515 			   base_offset + offset / BITS_PER_UNIT);
1516       off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1517       base = unshare_expr (TREE_OPERAND (base, 0));
1518     }
1519   else
1520     {
1521       off = build_int_cst (reference_alias_ptr_type (base),
1522 			   base_offset + offset / BITS_PER_UNIT);
1523       base = build_fold_addr_expr (unshare_expr (base));
1524     }
1525 
1526   /* If prev_base were always an originally performed access
1527      we can extract more optimistic alignment information
1528      by looking at the access mode.  That would constrain the
1529      alignment of base + base_offset which we would need to
1530      adjust according to offset.  */
1531   align = get_pointer_alignment_1 (base, &misalign);
1532   if (misalign == 0
1533       && (TREE_CODE (prev_base) == MEM_REF
1534 	  || TREE_CODE (prev_base) == TARGET_MEM_REF))
1535     align = MAX (align, TYPE_ALIGN (TREE_TYPE (prev_base)));
1536   misalign += (double_int_sext (tree_to_double_int (off),
1537 				TYPE_PRECISION (TREE_TYPE (off))).low
1538 	       * BITS_PER_UNIT);
1539   misalign = misalign & (align - 1);
1540   if (misalign != 0)
1541     align = (misalign & -misalign);
1542   if (align < TYPE_ALIGN (exp_type))
1543     exp_type = build_aligned_type (exp_type, align);
1544 
1545   return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1546 }
1547 
1548 DEF_VEC_ALLOC_P_STACK (tree);
1549 #define VEC_tree_stack_alloc(alloc) VEC_stack_alloc (tree, alloc)
1550 
1551 /* Construct a memory reference to a part of an aggregate BASE at the given
1552    OFFSET and of the type of MODEL.  In case this is a chain of references
1553    to component, the function will replicate the chain of COMPONENT_REFs of
1554    the expression of MODEL to access it.  GSI and INSERT_AFTER have the same
1555    meaning as in build_ref_for_offset.  */
1556 
1557 static tree
1558 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1559 		     struct access *model, gimple_stmt_iterator *gsi,
1560 		     bool insert_after)
1561 {
1562   tree type = model->type, t;
1563   VEC(tree,stack) *cr_stack = NULL;
1564 
1565   if (TREE_CODE (model->expr) == COMPONENT_REF)
1566     {
1567       tree expr = model->expr;
1568 
1569       /* Create a stack of the COMPONENT_REFs so later we can walk them in
1570 	 order from inner to outer.  */
1571       cr_stack = VEC_alloc (tree, stack, 6);
1572 
1573       do {
1574 	tree field = TREE_OPERAND (expr, 1);
1575 	tree cr_offset = component_ref_field_offset (expr);
1576 	HOST_WIDE_INT bit_pos
1577 	  = tree_low_cst (cr_offset, 1) * BITS_PER_UNIT
1578 	      + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1579 
1580 	/* We can be called with a model different from the one associated
1581 	   with BASE so we need to avoid going up the chain too far.  */
1582 	if (offset - bit_pos < 0)
1583 	  break;
1584 
1585 	offset -= bit_pos;
1586 	VEC_safe_push (tree, stack, cr_stack, expr);
1587 
1588 	expr = TREE_OPERAND (expr, 0);
1589 	type = TREE_TYPE (expr);
1590       } while (TREE_CODE (expr) == COMPONENT_REF);
1591     }
1592 
1593   t = build_ref_for_offset (loc, base, offset, type, gsi, insert_after);
1594 
1595   if (TREE_CODE (model->expr) == COMPONENT_REF)
1596     {
1597       unsigned i;
1598       tree expr;
1599 
1600       /* Now replicate the chain of COMPONENT_REFs from inner to outer.  */
1601       FOR_EACH_VEC_ELT_REVERSE (tree, cr_stack, i, expr)
1602 	{
1603 	  tree field = TREE_OPERAND (expr, 1);
1604 	  t = fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (field), t, field,
1605 			       TREE_OPERAND (expr, 2));
1606 	}
1607 
1608       VEC_free (tree, stack, cr_stack);
1609     }
1610 
1611   return t;
1612 }
1613 
1614 /* Construct a memory reference consisting of component_refs and array_refs to
1615    a part of an aggregate *RES (which is of type TYPE).  The requested part
1616    should have type EXP_TYPE at be the given OFFSET.  This function might not
1617    succeed, it returns true when it does and only then *RES points to something
1618    meaningful.  This function should be used only to build expressions that we
1619    might need to present to user (e.g. in warnings).  In all other situations,
1620    build_ref_for_model or build_ref_for_offset should be used instead.  */
1621 
1622 static bool
1623 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1624 				    tree exp_type)
1625 {
1626   while (1)
1627     {
1628       tree fld;
1629       tree tr_size, index, minidx;
1630       HOST_WIDE_INT el_size;
1631 
1632       if (offset == 0 && exp_type
1633 	  && types_compatible_p (exp_type, type))
1634 	return true;
1635 
1636       switch (TREE_CODE (type))
1637 	{
1638 	case UNION_TYPE:
1639 	case QUAL_UNION_TYPE:
1640 	case RECORD_TYPE:
1641 	  for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1642 	    {
1643 	      HOST_WIDE_INT pos, size;
1644 	      tree expr, *expr_ptr;
1645 
1646 	      if (TREE_CODE (fld) != FIELD_DECL)
1647 		continue;
1648 
1649 	      pos = int_bit_position (fld);
1650 	      gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1651 	      tr_size = DECL_SIZE (fld);
1652 	      if (!tr_size || !host_integerp (tr_size, 1))
1653 		continue;
1654 	      size = tree_low_cst (tr_size, 1);
1655 	      if (size == 0)
1656 		{
1657 		  if (pos != offset)
1658 		    continue;
1659 		}
1660 	      else if (pos > offset || (pos + size) <= offset)
1661 		continue;
1662 
1663 	      expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1664 			     NULL_TREE);
1665 	      expr_ptr = &expr;
1666 	      if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1667 						      offset - pos, exp_type))
1668 		{
1669 		  *res = expr;
1670 		  return true;
1671 		}
1672 	    }
1673 	  return false;
1674 
1675 	case ARRAY_TYPE:
1676 	  tr_size = TYPE_SIZE (TREE_TYPE (type));
1677 	  if (!tr_size || !host_integerp (tr_size, 1))
1678 	    return false;
1679 	  el_size = tree_low_cst (tr_size, 1);
1680 
1681 	  minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1682 	  if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1683 	    return false;
1684 	  index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1685 	  if (!integer_zerop (minidx))
1686 	    index = int_const_binop (PLUS_EXPR, index, minidx);
1687 	  *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1688 			 NULL_TREE, NULL_TREE);
1689 	  offset = offset % el_size;
1690 	  type = TREE_TYPE (type);
1691 	  break;
1692 
1693 	default:
1694 	  if (offset != 0)
1695 	    return false;
1696 
1697 	  if (exp_type)
1698 	    return false;
1699 	  else
1700 	    return true;
1701 	}
1702     }
1703 }
1704 
1705 /* Return true iff TYPE is stdarg va_list type.  */
1706 
1707 static inline bool
1708 is_va_list_type (tree type)
1709 {
1710   return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1711 }
1712 
1713 /* Print message to dump file why a variable was rejected. */
1714 
1715 static void
1716 reject (tree var, const char *msg)
1717 {
1718   if (dump_file && (dump_flags & TDF_DETAILS))
1719     {
1720       fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1721       print_generic_expr (dump_file, var, 0);
1722       fprintf (dump_file, "\n");
1723     }
1724 }
1725 
1726 /* The very first phase of intraprocedural SRA.  It marks in candidate_bitmap
1727    those with type which is suitable for scalarization.  */
1728 
1729 static bool
1730 find_var_candidates (void)
1731 {
1732   tree var, type;
1733   referenced_var_iterator rvi;
1734   bool ret = false;
1735   const char *msg;
1736 
1737   FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
1738     {
1739       if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1740         continue;
1741       type = TREE_TYPE (var);
1742 
1743       if (!AGGREGATE_TYPE_P (type))
1744         {
1745           reject (var, "not aggregate");
1746           continue;
1747 	}
1748       if (needs_to_live_in_memory (var))
1749         {
1750           reject (var, "needs to live in memory");
1751           continue;
1752         }
1753       if (TREE_THIS_VOLATILE (var))
1754         {
1755           reject (var, "is volatile");
1756 	  continue;
1757         }
1758       if (!COMPLETE_TYPE_P (type))
1759         {
1760           reject (var, "has incomplete type");
1761 	  continue;
1762         }
1763       if (!host_integerp (TYPE_SIZE (type), 1))
1764         {
1765           reject (var, "type size not fixed");
1766 	  continue;
1767         }
1768       if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
1769         {
1770           reject (var, "type size is zero");
1771           continue;
1772         }
1773       if (type_internals_preclude_sra_p (type, &msg))
1774 	{
1775 	  reject (var, msg);
1776 	  continue;
1777 	}
1778       if (/* Fix for PR 41089.  tree-stdarg.c needs to have va_lists intact but
1779 	      we also want to schedule it rather late.  Thus we ignore it in
1780 	      the early pass. */
1781 	  (sra_mode == SRA_MODE_EARLY_INTRA
1782 	      && is_va_list_type (type)))
1783         {
1784 	  reject (var, "is va_list");
1785 	  continue;
1786 	}
1787 
1788       bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1789 
1790       if (dump_file && (dump_flags & TDF_DETAILS))
1791 	{
1792 	  fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1793 	  print_generic_expr (dump_file, var, 0);
1794 	  fprintf (dump_file, "\n");
1795 	}
1796       ret = true;
1797     }
1798 
1799   return ret;
1800 }
1801 
1802 /* Sort all accesses for the given variable, check for partial overlaps and
1803    return NULL if there are any.  If there are none, pick a representative for
1804    each combination of offset and size and create a linked list out of them.
1805    Return the pointer to the first representative and make sure it is the first
1806    one in the vector of accesses.  */
1807 
1808 static struct access *
1809 sort_and_splice_var_accesses (tree var)
1810 {
1811   int i, j, access_count;
1812   struct access *res, **prev_acc_ptr = &res;
1813   VEC (access_p, heap) *access_vec;
1814   bool first = true;
1815   HOST_WIDE_INT low = -1, high = 0;
1816 
1817   access_vec = get_base_access_vector (var);
1818   if (!access_vec)
1819     return NULL;
1820   access_count = VEC_length (access_p, access_vec);
1821 
1822   /* Sort by <OFFSET, SIZE>.  */
1823   VEC_qsort (access_p, access_vec, compare_access_positions);
1824 
1825   i = 0;
1826   while (i < access_count)
1827     {
1828       struct access *access = VEC_index (access_p, access_vec, i);
1829       bool grp_write = access->write;
1830       bool grp_read = !access->write;
1831       bool grp_scalar_write = access->write
1832 	&& is_gimple_reg_type (access->type);
1833       bool grp_scalar_read = !access->write
1834 	&& is_gimple_reg_type (access->type);
1835       bool grp_assignment_read = access->grp_assignment_read;
1836       bool grp_assignment_write = access->grp_assignment_write;
1837       bool multiple_scalar_reads = false;
1838       bool total_scalarization = access->grp_total_scalarization;
1839       bool grp_partial_lhs = access->grp_partial_lhs;
1840       bool first_scalar = is_gimple_reg_type (access->type);
1841       bool unscalarizable_region = access->grp_unscalarizable_region;
1842 
1843       if (first || access->offset >= high)
1844 	{
1845 	  first = false;
1846 	  low = access->offset;
1847 	  high = access->offset + access->size;
1848 	}
1849       else if (access->offset > low && access->offset + access->size > high)
1850 	return NULL;
1851       else
1852 	gcc_assert (access->offset >= low
1853 		    && access->offset + access->size <= high);
1854 
1855       j = i + 1;
1856       while (j < access_count)
1857 	{
1858 	  struct access *ac2 = VEC_index (access_p, access_vec, j);
1859 	  if (ac2->offset != access->offset || ac2->size != access->size)
1860 	    break;
1861 	  if (ac2->write)
1862 	    {
1863 	      grp_write = true;
1864 	      grp_scalar_write = (grp_scalar_write
1865 				  || is_gimple_reg_type (ac2->type));
1866 	    }
1867 	  else
1868 	    {
1869 	      grp_read = true;
1870 	      if (is_gimple_reg_type (ac2->type))
1871 		{
1872 		  if (grp_scalar_read)
1873 		    multiple_scalar_reads = true;
1874 		  else
1875 		    grp_scalar_read = true;
1876 		}
1877 	    }
1878 	  grp_assignment_read |= ac2->grp_assignment_read;
1879 	  grp_assignment_write |= ac2->grp_assignment_write;
1880 	  grp_partial_lhs |= ac2->grp_partial_lhs;
1881 	  unscalarizable_region |= ac2->grp_unscalarizable_region;
1882 	  total_scalarization |= ac2->grp_total_scalarization;
1883 	  relink_to_new_repr (access, ac2);
1884 
1885 	  /* If there are both aggregate-type and scalar-type accesses with
1886 	     this combination of size and offset, the comparison function
1887 	     should have put the scalars first.  */
1888 	  gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1889 	  ac2->group_representative = access;
1890 	  j++;
1891 	}
1892 
1893       i = j;
1894 
1895       access->group_representative = access;
1896       access->grp_write = grp_write;
1897       access->grp_read = grp_read;
1898       access->grp_scalar_read = grp_scalar_read;
1899       access->grp_scalar_write = grp_scalar_write;
1900       access->grp_assignment_read = grp_assignment_read;
1901       access->grp_assignment_write = grp_assignment_write;
1902       access->grp_hint = multiple_scalar_reads || total_scalarization;
1903       access->grp_total_scalarization = total_scalarization;
1904       access->grp_partial_lhs = grp_partial_lhs;
1905       access->grp_unscalarizable_region = unscalarizable_region;
1906       if (access->first_link)
1907 	add_access_to_work_queue (access);
1908 
1909       *prev_acc_ptr = access;
1910       prev_acc_ptr = &access->next_grp;
1911     }
1912 
1913   gcc_assert (res == VEC_index (access_p, access_vec, 0));
1914   return res;
1915 }
1916 
1917 /* Create a variable for the given ACCESS which determines the type, name and a
1918    few other properties.  Return the variable declaration and store it also to
1919    ACCESS->replacement.  */
1920 
1921 static tree
1922 create_access_replacement (struct access *access, bool rename)
1923 {
1924   tree repl;
1925 
1926   repl = create_tmp_var (access->type, "SR");
1927   add_referenced_var (repl);
1928   if (rename)
1929     mark_sym_for_renaming (repl);
1930 
1931   if (!access->grp_partial_lhs
1932       && (TREE_CODE (access->type) == COMPLEX_TYPE
1933 	  || TREE_CODE (access->type) == VECTOR_TYPE))
1934     DECL_GIMPLE_REG_P (repl) = 1;
1935 
1936   DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1937   DECL_ARTIFICIAL (repl) = 1;
1938   DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1939 
1940   if (DECL_NAME (access->base)
1941       && !DECL_IGNORED_P (access->base)
1942       && !DECL_ARTIFICIAL (access->base))
1943     {
1944       char *pretty_name = make_fancy_name (access->expr);
1945       tree debug_expr = unshare_expr (access->expr), d;
1946 
1947       DECL_NAME (repl) = get_identifier (pretty_name);
1948       obstack_free (&name_obstack, pretty_name);
1949 
1950       /* Get rid of any SSA_NAMEs embedded in debug_expr,
1951 	 as DECL_DEBUG_EXPR isn't considered when looking for still
1952 	 used SSA_NAMEs and thus they could be freed.  All debug info
1953 	 generation cares is whether something is constant or variable
1954 	 and that get_ref_base_and_extent works properly on the
1955 	 expression.  */
1956       for (d = debug_expr; handled_component_p (d); d = TREE_OPERAND (d, 0))
1957 	switch (TREE_CODE (d))
1958 	  {
1959 	  case ARRAY_REF:
1960 	  case ARRAY_RANGE_REF:
1961 	    if (TREE_OPERAND (d, 1)
1962 		&& TREE_CODE (TREE_OPERAND (d, 1)) == SSA_NAME)
1963 	      TREE_OPERAND (d, 1) = SSA_NAME_VAR (TREE_OPERAND (d, 1));
1964 	    if (TREE_OPERAND (d, 3)
1965 		&& TREE_CODE (TREE_OPERAND (d, 3)) == SSA_NAME)
1966 	      TREE_OPERAND (d, 3) = SSA_NAME_VAR (TREE_OPERAND (d, 3));
1967 	    /* FALLTHRU */
1968 	  case COMPONENT_REF:
1969 	    if (TREE_OPERAND (d, 2)
1970 		&& TREE_CODE (TREE_OPERAND (d, 2)) == SSA_NAME)
1971 	      TREE_OPERAND (d, 2) = SSA_NAME_VAR (TREE_OPERAND (d, 2));
1972 	    break;
1973 	  default:
1974 	    break;
1975 	  }
1976       SET_DECL_DEBUG_EXPR (repl, debug_expr);
1977       DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1978       if (access->grp_no_warning)
1979 	TREE_NO_WARNING (repl) = 1;
1980       else
1981 	TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1982     }
1983   else
1984     TREE_NO_WARNING (repl) = 1;
1985 
1986   if (dump_file)
1987     {
1988       fprintf (dump_file, "Created a replacement for ");
1989       print_generic_expr (dump_file, access->base, 0);
1990       fprintf (dump_file, " offset: %u, size: %u: ",
1991 	       (unsigned) access->offset, (unsigned) access->size);
1992       print_generic_expr (dump_file, repl, 0);
1993       fprintf (dump_file, "\n");
1994     }
1995   sra_stats.replacements++;
1996 
1997   return repl;
1998 }
1999 
2000 /* Return ACCESS scalar replacement, create it if it does not exist yet.  */
2001 
2002 static inline tree
2003 get_access_replacement (struct access *access)
2004 {
2005   gcc_assert (access->grp_to_be_replaced);
2006 
2007   if (!access->replacement_decl)
2008     access->replacement_decl = create_access_replacement (access, true);
2009   return access->replacement_decl;
2010 }
2011 
2012 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
2013    not mark it for renaming.  */
2014 
2015 static inline tree
2016 get_unrenamed_access_replacement (struct access *access)
2017 {
2018   gcc_assert (!access->grp_to_be_replaced);
2019 
2020   if (!access->replacement_decl)
2021     access->replacement_decl = create_access_replacement (access, false);
2022   return access->replacement_decl;
2023 }
2024 
2025 
2026 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2027    linked list along the way.  Stop when *ACCESS is NULL or the access pointed
2028    to it is not "within" the root.  Return false iff some accesses partially
2029    overlap.  */
2030 
2031 static bool
2032 build_access_subtree (struct access **access)
2033 {
2034   struct access *root = *access, *last_child = NULL;
2035   HOST_WIDE_INT limit = root->offset + root->size;
2036 
2037   *access = (*access)->next_grp;
2038   while  (*access && (*access)->offset + (*access)->size <= limit)
2039     {
2040       if (!last_child)
2041 	root->first_child = *access;
2042       else
2043 	last_child->next_sibling = *access;
2044       last_child = *access;
2045 
2046       if (!build_access_subtree (access))
2047 	return false;
2048     }
2049 
2050   if (*access && (*access)->offset < limit)
2051     return false;
2052 
2053   return true;
2054 }
2055 
2056 /* Build a tree of access representatives, ACCESS is the pointer to the first
2057    one, others are linked in a list by the next_grp field.  Return false iff
2058    some accesses partially overlap.  */
2059 
2060 static bool
2061 build_access_trees (struct access *access)
2062 {
2063   while (access)
2064     {
2065       struct access *root = access;
2066 
2067       if (!build_access_subtree (&access))
2068 	return false;
2069       root->next_grp = access;
2070     }
2071   return true;
2072 }
2073 
2074 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2075    array.  */
2076 
2077 static bool
2078 expr_with_var_bounded_array_refs_p (tree expr)
2079 {
2080   while (handled_component_p (expr))
2081     {
2082       if (TREE_CODE (expr) == ARRAY_REF
2083 	  && !host_integerp (array_ref_low_bound (expr), 0))
2084 	return true;
2085       expr = TREE_OPERAND (expr, 0);
2086     }
2087   return false;
2088 }
2089 
2090 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2091    both seeming beneficial and when ALLOW_REPLACEMENTS allows it.  Also set all
2092    sorts of access flags appropriately along the way, notably always set
2093    grp_read and grp_assign_read according to MARK_READ and grp_write when
2094    MARK_WRITE is true.
2095 
2096    Creating a replacement for a scalar access is considered beneficial if its
2097    grp_hint is set (this means we are either attempting total scalarization or
2098    there is more than one direct read access) or according to the following
2099    table:
2100 
2101    Access written to through a scalar type (once or more times)
2102    |
2103    |	Written to in an assignment statement
2104    |	|
2105    |	|	Access read as scalar _once_
2106    |	|	|
2107    |   	|	|	Read in an assignment statement
2108    |	|	|	|
2109    |   	|	|	|	Scalarize	Comment
2110 -----------------------------------------------------------------------------
2111    0	0	0	0			No access for the scalar
2112    0	0	0	1			No access for the scalar
2113    0	0	1	0	No		Single read - won't help
2114    0	0	1	1	No		The same case
2115    0	1	0	0			No access for the scalar
2116    0	1	0	1			No access for the scalar
2117    0	1	1	0	Yes		s = *g; return s.i;
2118    0	1	1	1       Yes		The same case as above
2119    1	0	0	0	No		Won't help
2120    1	0	0	1	Yes		s.i = 1; *g = s;
2121    1	0	1	0	Yes		s.i = 5; g = s.i;
2122    1	0	1	1	Yes		The same case as above
2123    1	1	0	0	No		Won't help.
2124    1	1	0	1	Yes		s.i = 1; *g = s;
2125    1	1	1	0	Yes		s = *g; return s.i;
2126    1	1	1	1	Yes		Any of the above yeses  */
2127 
2128 static bool
2129 analyze_access_subtree (struct access *root, struct access *parent,
2130 			bool allow_replacements)
2131 {
2132   struct access *child;
2133   HOST_WIDE_INT limit = root->offset + root->size;
2134   HOST_WIDE_INT covered_to = root->offset;
2135   bool scalar = is_gimple_reg_type (root->type);
2136   bool hole = false, sth_created = false;
2137 
2138   if (parent)
2139     {
2140       if (parent->grp_read)
2141 	root->grp_read = 1;
2142       if (parent->grp_assignment_read)
2143 	root->grp_assignment_read = 1;
2144       if (parent->grp_write)
2145 	root->grp_write = 1;
2146       if (parent->grp_assignment_write)
2147 	root->grp_assignment_write = 1;
2148       if (parent->grp_total_scalarization)
2149 	root->grp_total_scalarization = 1;
2150     }
2151 
2152   if (root->grp_unscalarizable_region)
2153     allow_replacements = false;
2154 
2155   if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2156     allow_replacements = false;
2157 
2158   for (child = root->first_child; child; child = child->next_sibling)
2159     {
2160       hole |= covered_to < child->offset;
2161       sth_created |= analyze_access_subtree (child, root,
2162 					     allow_replacements && !scalar);
2163 
2164       root->grp_unscalarized_data |= child->grp_unscalarized_data;
2165       root->grp_total_scalarization &= child->grp_total_scalarization;
2166       if (child->grp_covered)
2167 	covered_to += child->size;
2168       else
2169 	hole = true;
2170     }
2171 
2172   if (allow_replacements && scalar && !root->first_child
2173       && (root->grp_hint
2174 	  || ((root->grp_scalar_read || root->grp_assignment_read)
2175 	      && (root->grp_scalar_write || root->grp_assignment_write))))
2176     {
2177       bool new_integer_type;
2178       /* Always create access replacements that cover the whole access.
2179          For integral types this means the precision has to match.
2180 	 Avoid assumptions based on the integral type kind, too.  */
2181       if (INTEGRAL_TYPE_P (root->type)
2182 	  && (TREE_CODE (root->type) != INTEGER_TYPE
2183 	      || TYPE_PRECISION (root->type) != root->size)
2184 	  /* But leave bitfield accesses alone.  */
2185 	  && (TREE_CODE (root->expr) != COMPONENT_REF
2186 	      || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2187 	{
2188 	  tree rt = root->type;
2189 	  gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2190 		      && (root->size % BITS_PER_UNIT) == 0);
2191 	  root->type = build_nonstandard_integer_type (root->size,
2192 						       TYPE_UNSIGNED (rt));
2193 	  root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2194 					     root->base, root->offset,
2195 					     root->type, NULL, false);
2196 	  new_integer_type = true;
2197 	}
2198       else
2199 	new_integer_type = false;
2200 
2201       if (dump_file && (dump_flags & TDF_DETAILS))
2202 	{
2203 	  fprintf (dump_file, "Marking ");
2204 	  print_generic_expr (dump_file, root->base, 0);
2205 	  fprintf (dump_file, " offset: %u, size: %u ",
2206 		   (unsigned) root->offset, (unsigned) root->size);
2207 	  fprintf (dump_file, " to be replaced%s.\n",
2208 		   new_integer_type ? " with an integer": "");
2209 	}
2210 
2211       root->grp_to_be_replaced = 1;
2212       sth_created = true;
2213       hole = false;
2214     }
2215   else
2216     {
2217       if (covered_to < limit)
2218 	hole = true;
2219       if (scalar)
2220 	root->grp_total_scalarization = 0;
2221     }
2222 
2223   if (sth_created
2224       && (!hole || root->grp_total_scalarization))
2225     {
2226       root->grp_covered = 1;
2227       return true;
2228     }
2229   if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2230     root->grp_unscalarized_data = 1; /* not covered and written to */
2231   if (sth_created)
2232     return true;
2233   return false;
2234 }
2235 
2236 /* Analyze all access trees linked by next_grp by the means of
2237    analyze_access_subtree.  */
2238 static bool
2239 analyze_access_trees (struct access *access)
2240 {
2241   bool ret = false;
2242 
2243   while (access)
2244     {
2245       if (analyze_access_subtree (access, NULL, true))
2246 	ret = true;
2247       access = access->next_grp;
2248     }
2249 
2250   return ret;
2251 }
2252 
2253 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2254    SIZE would conflict with an already existing one.  If exactly such a child
2255    already exists in LACC, store a pointer to it in EXACT_MATCH.  */
2256 
2257 static bool
2258 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2259 			      HOST_WIDE_INT size, struct access **exact_match)
2260 {
2261   struct access *child;
2262 
2263   for (child = lacc->first_child; child; child = child->next_sibling)
2264     {
2265       if (child->offset == norm_offset && child->size == size)
2266 	{
2267 	  *exact_match = child;
2268 	  return true;
2269 	}
2270 
2271       if (child->offset < norm_offset + size
2272 	  && child->offset + child->size > norm_offset)
2273 	return true;
2274     }
2275 
2276   return false;
2277 }
2278 
2279 /* Create a new child access of PARENT, with all properties just like MODEL
2280    except for its offset and with its grp_write false and grp_read true.
2281    Return the new access or NULL if it cannot be created.  Note that this access
2282    is created long after all splicing and sorting, it's not located in any
2283    access vector and is automatically a representative of its group.  */
2284 
2285 static struct access *
2286 create_artificial_child_access (struct access *parent, struct access *model,
2287 				HOST_WIDE_INT new_offset)
2288 {
2289   struct access *access;
2290   struct access **child;
2291   tree expr = parent->base;
2292 
2293   gcc_assert (!model->grp_unscalarizable_region);
2294 
2295   access = (struct access *) pool_alloc (access_pool);
2296   memset (access, 0, sizeof (struct access));
2297   if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2298 					   model->type))
2299     {
2300       access->grp_no_warning = true;
2301       expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2302 				  new_offset, model, NULL, false);
2303     }
2304 
2305   access->base = parent->base;
2306   access->expr = expr;
2307   access->offset = new_offset;
2308   access->size = model->size;
2309   access->type = model->type;
2310   access->grp_write = true;
2311   access->grp_read = false;
2312 
2313   child = &parent->first_child;
2314   while (*child && (*child)->offset < new_offset)
2315     child = &(*child)->next_sibling;
2316 
2317   access->next_sibling = *child;
2318   *child = access;
2319 
2320   return access;
2321 }
2322 
2323 
2324 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2325    true if any new subaccess was created.  Additionally, if RACC is a scalar
2326    access but LACC is not, change the type of the latter, if possible.  */
2327 
2328 static bool
2329 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2330 {
2331   struct access *rchild;
2332   HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2333   bool ret = false;
2334 
2335   if (is_gimple_reg_type (lacc->type)
2336       || lacc->grp_unscalarizable_region
2337       || racc->grp_unscalarizable_region)
2338     return false;
2339 
2340   if (is_gimple_reg_type (racc->type))
2341     {
2342       if (!lacc->first_child && !racc->first_child)
2343 	{
2344 	  tree t = lacc->base;
2345 
2346 	  lacc->type = racc->type;
2347 	  if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2348 						  lacc->offset, racc->type))
2349 	    lacc->expr = t;
2350 	  else
2351 	    {
2352 	      lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2353 						lacc->base, lacc->offset,
2354 						racc, NULL, false);
2355 	      lacc->grp_no_warning = true;
2356 	    }
2357 	}
2358       return false;
2359     }
2360 
2361   for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2362     {
2363       struct access *new_acc = NULL;
2364       HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2365 
2366       if (rchild->grp_unscalarizable_region)
2367 	continue;
2368 
2369       if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2370 					&new_acc))
2371 	{
2372 	  if (new_acc)
2373 	    {
2374 	      rchild->grp_hint = 1;
2375 	      new_acc->grp_hint |= new_acc->grp_read;
2376 	      if (rchild->first_child)
2377 		ret |= propagate_subaccesses_across_link (new_acc, rchild);
2378 	    }
2379 	  continue;
2380 	}
2381 
2382       rchild->grp_hint = 1;
2383       new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2384       if (new_acc)
2385 	{
2386 	  ret = true;
2387 	  if (racc->first_child)
2388 	    propagate_subaccesses_across_link (new_acc, rchild);
2389 	}
2390     }
2391 
2392   return ret;
2393 }
2394 
2395 /* Propagate all subaccesses across assignment links.  */
2396 
2397 static void
2398 propagate_all_subaccesses (void)
2399 {
2400   while (work_queue_head)
2401     {
2402       struct access *racc = pop_access_from_work_queue ();
2403       struct assign_link *link;
2404 
2405       gcc_assert (racc->first_link);
2406 
2407       for (link = racc->first_link; link; link = link->next)
2408 	{
2409 	  struct access *lacc = link->lacc;
2410 
2411 	  if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2412 	    continue;
2413 	  lacc = lacc->group_representative;
2414 	  if (propagate_subaccesses_across_link (lacc, racc)
2415 	      && lacc->first_link)
2416 	    add_access_to_work_queue (lacc);
2417 	}
2418     }
2419 }
2420 
2421 /* Go through all accesses collected throughout the (intraprocedural) analysis
2422    stage, exclude overlapping ones, identify representatives and build trees
2423    out of them, making decisions about scalarization on the way.  Return true
2424    iff there are any to-be-scalarized variables after this stage. */
2425 
2426 static bool
2427 analyze_all_variable_accesses (void)
2428 {
2429   int res = 0;
2430   bitmap tmp = BITMAP_ALLOC (NULL);
2431   bitmap_iterator bi;
2432   unsigned i, max_total_scalarization_size;
2433 
2434   max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2435     * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2436 
2437   EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2438     if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2439 	&& !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2440       {
2441 	tree var = referenced_var (i);
2442 
2443 	if (TREE_CODE (var) == VAR_DECL
2444 	    && type_consists_of_records_p (TREE_TYPE (var)))
2445 	  {
2446 	    if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2447 		<= max_total_scalarization_size)
2448 	      {
2449 		completely_scalarize_var (var);
2450 		if (dump_file && (dump_flags & TDF_DETAILS))
2451 		  {
2452 		    fprintf (dump_file, "Will attempt to totally scalarize ");
2453 		    print_generic_expr (dump_file, var, 0);
2454 		    fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2455 		  }
2456 	      }
2457 	    else if (dump_file && (dump_flags & TDF_DETAILS))
2458 	      {
2459 		fprintf (dump_file, "Too big to totally scalarize: ");
2460 		print_generic_expr (dump_file, var, 0);
2461 		fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2462 	      }
2463 	  }
2464       }
2465 
2466   bitmap_copy (tmp, candidate_bitmap);
2467   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2468     {
2469       tree var = referenced_var (i);
2470       struct access *access;
2471 
2472       access = sort_and_splice_var_accesses (var);
2473       if (!access || !build_access_trees (access))
2474 	disqualify_candidate (var,
2475 			      "No or inhibitingly overlapping accesses.");
2476     }
2477 
2478   propagate_all_subaccesses ();
2479 
2480   bitmap_copy (tmp, candidate_bitmap);
2481   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2482     {
2483       tree var = referenced_var (i);
2484       struct access *access = get_first_repr_for_decl (var);
2485 
2486       if (analyze_access_trees (access))
2487 	{
2488 	  res++;
2489 	  if (dump_file && (dump_flags & TDF_DETAILS))
2490 	    {
2491 	      fprintf (dump_file, "\nAccess trees for ");
2492 	      print_generic_expr (dump_file, var, 0);
2493 	      fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2494 	      dump_access_tree (dump_file, access);
2495 	      fprintf (dump_file, "\n");
2496 	    }
2497 	}
2498       else
2499 	disqualify_candidate (var, "No scalar replacements to be created.");
2500     }
2501 
2502   BITMAP_FREE (tmp);
2503 
2504   if (res)
2505     {
2506       statistics_counter_event (cfun, "Scalarized aggregates", res);
2507       return true;
2508     }
2509   else
2510     return false;
2511 }
2512 
2513 /* Generate statements copying scalar replacements of accesses within a subtree
2514    into or out of AGG.  ACCESS, all its children, siblings and their children
2515    are to be processed.  AGG is an aggregate type expression (can be a
2516    declaration but does not have to be, it can for example also be a mem_ref or
2517    a series of handled components).  TOP_OFFSET is the offset of the processed
2518    subtree which has to be subtracted from offsets of individual accesses to
2519    get corresponding offsets for AGG.  If CHUNK_SIZE is non-null, copy only
2520    replacements in the interval <start_offset, start_offset + chunk_size>,
2521    otherwise copy all.  GSI is a statement iterator used to place the new
2522    statements.  WRITE should be true when the statements should write from AGG
2523    to the replacement and false if vice versa.  if INSERT_AFTER is true, new
2524    statements will be added after the current statement in GSI, they will be
2525    added before the statement otherwise.  */
2526 
2527 static void
2528 generate_subtree_copies (struct access *access, tree agg,
2529 			 HOST_WIDE_INT top_offset,
2530 			 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2531 			 gimple_stmt_iterator *gsi, bool write,
2532 			 bool insert_after, location_t loc)
2533 {
2534   do
2535     {
2536       if (chunk_size && access->offset >= start_offset + chunk_size)
2537 	return;
2538 
2539       if (access->grp_to_be_replaced
2540 	  && (chunk_size == 0
2541 	      || access->offset + access->size > start_offset))
2542 	{
2543 	  tree expr, repl = get_access_replacement (access);
2544 	  gimple stmt;
2545 
2546 	  expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2547 				      access, gsi, insert_after);
2548 
2549 	  if (write)
2550 	    {
2551 	      if (access->grp_partial_lhs)
2552 		expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2553 						 !insert_after,
2554 						 insert_after ? GSI_NEW_STMT
2555 						 : GSI_SAME_STMT);
2556 	      stmt = gimple_build_assign (repl, expr);
2557 	    }
2558 	  else
2559 	    {
2560 	      TREE_NO_WARNING (repl) = 1;
2561 	      if (access->grp_partial_lhs)
2562 		repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2563 						 !insert_after,
2564 						 insert_after ? GSI_NEW_STMT
2565 						 : GSI_SAME_STMT);
2566 	      stmt = gimple_build_assign (expr, repl);
2567 	    }
2568 	  gimple_set_location (stmt, loc);
2569 
2570 	  if (insert_after)
2571 	    gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2572 	  else
2573 	    gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2574 	  update_stmt (stmt);
2575 	  sra_stats.subtree_copies++;
2576 	}
2577 
2578       if (access->first_child)
2579 	generate_subtree_copies (access->first_child, agg, top_offset,
2580 				 start_offset, chunk_size, gsi,
2581 				 write, insert_after, loc);
2582 
2583       access = access->next_sibling;
2584     }
2585   while (access);
2586 }
2587 
2588 /* Assign zero to all scalar replacements in an access subtree.  ACCESS is the
2589    the root of the subtree to be processed.  GSI is the statement iterator used
2590    for inserting statements which are added after the current statement if
2591    INSERT_AFTER is true or before it otherwise.  */
2592 
2593 static void
2594 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2595 			bool insert_after, location_t loc)
2596 
2597 {
2598   struct access *child;
2599 
2600   if (access->grp_to_be_replaced)
2601     {
2602       gimple stmt;
2603 
2604       stmt = gimple_build_assign (get_access_replacement (access),
2605 				  build_zero_cst (access->type));
2606       if (insert_after)
2607 	gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2608       else
2609 	gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2610       update_stmt (stmt);
2611       gimple_set_location (stmt, loc);
2612     }
2613 
2614   for (child = access->first_child; child; child = child->next_sibling)
2615     init_subtree_with_zero (child, gsi, insert_after, loc);
2616 }
2617 
2618 /* Search for an access representative for the given expression EXPR and
2619    return it or NULL if it cannot be found.  */
2620 
2621 static struct access *
2622 get_access_for_expr (tree expr)
2623 {
2624   HOST_WIDE_INT offset, size, max_size;
2625   tree base;
2626 
2627   /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2628      a different size than the size of its argument and we need the latter
2629      one.  */
2630   if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2631     expr = TREE_OPERAND (expr, 0);
2632 
2633   base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2634   if (max_size == -1 || !DECL_P (base))
2635     return NULL;
2636 
2637   if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2638     return NULL;
2639 
2640   return get_var_base_offset_size_access (base, offset, max_size);
2641 }
2642 
2643 /* Replace the expression EXPR with a scalar replacement if there is one and
2644    generate other statements to do type conversion or subtree copying if
2645    necessary.  GSI is used to place newly created statements, WRITE is true if
2646    the expression is being written to (it is on a LHS of a statement or output
2647    in an assembly statement).  */
2648 
2649 static bool
2650 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2651 {
2652   location_t loc;
2653   struct access *access;
2654   tree type, bfr;
2655 
2656   if (TREE_CODE (*expr) == BIT_FIELD_REF)
2657     {
2658       bfr = *expr;
2659       expr = &TREE_OPERAND (*expr, 0);
2660     }
2661   else
2662     bfr = NULL_TREE;
2663 
2664   if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2665     expr = &TREE_OPERAND (*expr, 0);
2666   access = get_access_for_expr (*expr);
2667   if (!access)
2668     return false;
2669   type = TREE_TYPE (*expr);
2670 
2671   loc = gimple_location (gsi_stmt (*gsi));
2672   if (access->grp_to_be_replaced)
2673     {
2674       tree repl = get_access_replacement (access);
2675       /* If we replace a non-register typed access simply use the original
2676          access expression to extract the scalar component afterwards.
2677 	 This happens if scalarizing a function return value or parameter
2678 	 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2679 	 gcc.c-torture/compile/20011217-1.c.
2680 
2681          We also want to use this when accessing a complex or vector which can
2682          be accessed as a different type too, potentially creating a need for
2683          type conversion (see PR42196) and when scalarized unions are involved
2684          in assembler statements (see PR42398).  */
2685       if (!useless_type_conversion_p (type, access->type))
2686 	{
2687 	  tree ref;
2688 
2689 	  ref = build_ref_for_model (loc, access->base, access->offset, access,
2690 				     NULL, false);
2691 
2692 	  if (write)
2693 	    {
2694 	      gimple stmt;
2695 
2696 	      if (access->grp_partial_lhs)
2697 		ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2698 						 false, GSI_NEW_STMT);
2699 	      stmt = gimple_build_assign (repl, ref);
2700 	      gimple_set_location (stmt, loc);
2701 	      gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2702 	    }
2703 	  else
2704 	    {
2705 	      gimple stmt;
2706 
2707 	      if (access->grp_partial_lhs)
2708 		repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2709 						 true, GSI_SAME_STMT);
2710 	      stmt = gimple_build_assign (ref, repl);
2711 	      gimple_set_location (stmt, loc);
2712 	      gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2713 	    }
2714 	}
2715       else
2716 	*expr = repl;
2717       sra_stats.exprs++;
2718     }
2719 
2720   if (access->first_child)
2721     {
2722       HOST_WIDE_INT start_offset, chunk_size;
2723       if (bfr
2724 	  && host_integerp (TREE_OPERAND (bfr, 1), 1)
2725 	  && host_integerp (TREE_OPERAND (bfr, 2), 1))
2726 	{
2727 	  chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2728 	  start_offset = access->offset
2729 	    + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2730 	}
2731       else
2732 	start_offset = chunk_size = 0;
2733 
2734       generate_subtree_copies (access->first_child, access->base, 0,
2735 			       start_offset, chunk_size, gsi, write, write,
2736 			       loc);
2737     }
2738   return true;
2739 }
2740 
2741 /* Where scalar replacements of the RHS have been written to when a replacement
2742    of a LHS of an assigments cannot be direclty loaded from a replacement of
2743    the RHS. */
2744 enum unscalarized_data_handling { SRA_UDH_NONE,  /* Nothing done so far. */
2745 				  SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2746 				  SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2747 
2748 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2749    base aggregate if there are unscalarized data or directly to LHS of the
2750    statement that is pointed to by GSI otherwise.  */
2751 
2752 static enum unscalarized_data_handling
2753 handle_unscalarized_data_in_subtree (struct access *top_racc,
2754 				     gimple_stmt_iterator *gsi)
2755 {
2756   if (top_racc->grp_unscalarized_data)
2757     {
2758       generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2759 			       gsi, false, false,
2760 			       gimple_location (gsi_stmt (*gsi)));
2761       return SRA_UDH_RIGHT;
2762     }
2763   else
2764     {
2765       tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2766       generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2767 			       0, 0, gsi, false, false,
2768 			       gimple_location (gsi_stmt (*gsi)));
2769       return SRA_UDH_LEFT;
2770     }
2771 }
2772 
2773 
2774 /* Try to generate statements to load all sub-replacements in an access subtree
2775    formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2776    If that is not possible, refresh the TOP_RACC base aggregate and load the
2777    accesses from it.  LEFT_OFFSET is the offset of the left whole subtree being
2778    copied. NEW_GSI is stmt iterator used for statement insertions after the
2779    original assignment, OLD_GSI is used to insert statements before the
2780    assignment.  *REFRESHED keeps the information whether we have needed to
2781    refresh replacements of the LHS and from which side of the assignments this
2782    takes place.  */
2783 
2784 static void
2785 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2786 				 HOST_WIDE_INT left_offset,
2787 				 gimple_stmt_iterator *old_gsi,
2788 				 gimple_stmt_iterator *new_gsi,
2789 				 enum unscalarized_data_handling *refreshed)
2790 {
2791   location_t loc = gimple_location (gsi_stmt (*old_gsi));
2792   for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2793     {
2794       if (lacc->grp_to_be_replaced)
2795 	{
2796 	  struct access *racc;
2797 	  HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2798 	  gimple stmt;
2799 	  tree rhs;
2800 
2801 	  racc = find_access_in_subtree (top_racc, offset, lacc->size);
2802 	  if (racc && racc->grp_to_be_replaced)
2803 	    {
2804 	      rhs = get_access_replacement (racc);
2805 	      if (!useless_type_conversion_p (lacc->type, racc->type))
2806 		rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2807 
2808 	      if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2809 		rhs = force_gimple_operand_gsi (old_gsi, rhs, true, NULL_TREE,
2810 						true, GSI_SAME_STMT);
2811 	    }
2812 	  else
2813 	    {
2814 	      /* No suitable access on the right hand side, need to load from
2815 		 the aggregate.  See if we have to update it first... */
2816 	      if (*refreshed == SRA_UDH_NONE)
2817 		*refreshed = handle_unscalarized_data_in_subtree (top_racc,
2818 								  old_gsi);
2819 
2820 	      if (*refreshed == SRA_UDH_LEFT)
2821 		rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2822 					    new_gsi, true);
2823 	      else
2824 		rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2825 					    new_gsi, true);
2826 	      if (lacc->grp_partial_lhs)
2827 		rhs = force_gimple_operand_gsi (new_gsi, rhs, true, NULL_TREE,
2828 						false, GSI_NEW_STMT);
2829 	    }
2830 
2831 	  stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2832 	  gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2833 	  gimple_set_location (stmt, loc);
2834 	  update_stmt (stmt);
2835 	  sra_stats.subreplacements++;
2836 	}
2837       else if (*refreshed == SRA_UDH_NONE
2838 	       && lacc->grp_read && !lacc->grp_covered)
2839 	*refreshed = handle_unscalarized_data_in_subtree (top_racc,
2840 							  old_gsi);
2841 
2842       if (lacc->first_child)
2843 	load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2844 					 old_gsi, new_gsi, refreshed);
2845     }
2846 }
2847 
2848 /* Result code for SRA assignment modification.  */
2849 enum assignment_mod_result { SRA_AM_NONE,       /* nothing done for the stmt */
2850 			     SRA_AM_MODIFIED,  /* stmt changed but not
2851 						  removed */
2852 			     SRA_AM_REMOVED };  /* stmt eliminated */
2853 
2854 /* Modify assignments with a CONSTRUCTOR on their RHS.  STMT contains a pointer
2855    to the assignment and GSI is the statement iterator pointing at it.  Returns
2856    the same values as sra_modify_assign.  */
2857 
2858 static enum assignment_mod_result
2859 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2860 {
2861   tree lhs = gimple_assign_lhs (*stmt);
2862   struct access *acc;
2863   location_t loc;
2864 
2865   acc = get_access_for_expr (lhs);
2866   if (!acc)
2867     return SRA_AM_NONE;
2868 
2869   if (gimple_clobber_p (*stmt))
2870     {
2871       /* Remove clobbers of fully scalarized variables, otherwise
2872 	 do nothing.  */
2873       if (acc->grp_covered)
2874 	{
2875 	  unlink_stmt_vdef (*stmt);
2876 	  gsi_remove (gsi, true);
2877 	  return SRA_AM_REMOVED;
2878 	}
2879       else
2880 	return SRA_AM_NONE;
2881     }
2882 
2883   loc = gimple_location (*stmt);
2884   if (VEC_length (constructor_elt,
2885 		  CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2886     {
2887       /* I have never seen this code path trigger but if it can happen the
2888 	 following should handle it gracefully.  */
2889       if (access_has_children_p (acc))
2890 	generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2891 				 true, true, loc);
2892       return SRA_AM_MODIFIED;
2893     }
2894 
2895   if (acc->grp_covered)
2896     {
2897       init_subtree_with_zero (acc, gsi, false, loc);
2898       unlink_stmt_vdef (*stmt);
2899       gsi_remove (gsi, true);
2900       return SRA_AM_REMOVED;
2901     }
2902   else
2903     {
2904       init_subtree_with_zero (acc, gsi, true, loc);
2905       return SRA_AM_MODIFIED;
2906     }
2907 }
2908 
2909 /* Create and return a new suitable default definition SSA_NAME for RACC which
2910    is an access describing an uninitialized part of an aggregate that is being
2911    loaded.  */
2912 
2913 static tree
2914 get_repl_default_def_ssa_name (struct access *racc)
2915 {
2916   tree repl, decl;
2917 
2918   decl = get_unrenamed_access_replacement (racc);
2919 
2920   repl = gimple_default_def (cfun, decl);
2921   if (!repl)
2922     {
2923       repl = make_ssa_name (decl, gimple_build_nop ());
2924       set_default_def (decl, repl);
2925     }
2926 
2927   return repl;
2928 }
2929 
2930 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2931    somewhere in it.  */
2932 
2933 static inline bool
2934 contains_bitfld_comp_ref_p (const_tree ref)
2935 {
2936   while (handled_component_p (ref))
2937     {
2938       if (TREE_CODE (ref) == COMPONENT_REF
2939           && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
2940         return true;
2941       ref = TREE_OPERAND (ref, 0);
2942     }
2943 
2944   return false;
2945 }
2946 
2947 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2948    bit-field field declaration somewhere in it.  */
2949 
2950 static inline bool
2951 contains_vce_or_bfcref_p (const_tree ref)
2952 {
2953   while (handled_component_p (ref))
2954     {
2955       if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
2956 	  || (TREE_CODE (ref) == COMPONENT_REF
2957 	      && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
2958 	return true;
2959       ref = TREE_OPERAND (ref, 0);
2960     }
2961 
2962   return false;
2963 }
2964 
2965 /* Examine both sides of the assignment statement pointed to by STMT, replace
2966    them with a scalare replacement if there is one and generate copying of
2967    replacements if scalarized aggregates have been used in the assignment.  GSI
2968    is used to hold generated statements for type conversions and subtree
2969    copying.  */
2970 
2971 static enum assignment_mod_result
2972 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2973 {
2974   struct access *lacc, *racc;
2975   tree lhs, rhs;
2976   bool modify_this_stmt = false;
2977   bool force_gimple_rhs = false;
2978   location_t loc;
2979   gimple_stmt_iterator orig_gsi = *gsi;
2980 
2981   if (!gimple_assign_single_p (*stmt))
2982     return SRA_AM_NONE;
2983   lhs = gimple_assign_lhs (*stmt);
2984   rhs = gimple_assign_rhs1 (*stmt);
2985 
2986   if (TREE_CODE (rhs) == CONSTRUCTOR)
2987     return sra_modify_constructor_assign (stmt, gsi);
2988 
2989   if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2990       || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2991       || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2992     {
2993       modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2994 					  gsi, false);
2995       modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2996 					   gsi, true);
2997       return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2998     }
2999 
3000   lacc = get_access_for_expr (lhs);
3001   racc = get_access_for_expr (rhs);
3002   if (!lacc && !racc)
3003     return SRA_AM_NONE;
3004 
3005   loc = gimple_location (*stmt);
3006   if (lacc && lacc->grp_to_be_replaced)
3007     {
3008       lhs = get_access_replacement (lacc);
3009       gimple_assign_set_lhs (*stmt, lhs);
3010       modify_this_stmt = true;
3011       if (lacc->grp_partial_lhs)
3012 	force_gimple_rhs = true;
3013       sra_stats.exprs++;
3014     }
3015 
3016   if (racc && racc->grp_to_be_replaced)
3017     {
3018       rhs = get_access_replacement (racc);
3019       modify_this_stmt = true;
3020       if (racc->grp_partial_lhs)
3021 	force_gimple_rhs = true;
3022       sra_stats.exprs++;
3023     }
3024   else if (racc
3025 	   && !racc->grp_unscalarized_data
3026 	   && TREE_CODE (lhs) == SSA_NAME
3027 	   && !access_has_replacements_p (racc))
3028     {
3029       rhs = get_repl_default_def_ssa_name (racc);
3030       modify_this_stmt = true;
3031       sra_stats.exprs++;
3032     }
3033 
3034   if (modify_this_stmt)
3035     {
3036       if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3037 	{
3038 	  /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3039 	     ???  This should move to fold_stmt which we simply should
3040 	     call after building a VIEW_CONVERT_EXPR here.  */
3041 	  if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3042 	      && !contains_bitfld_comp_ref_p (lhs)
3043 	      && !access_has_children_p (lacc))
3044 	    {
3045 	      lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3046 	      gimple_assign_set_lhs (*stmt, lhs);
3047 	    }
3048 	  else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3049 		   && !contains_vce_or_bfcref_p (rhs)
3050 		   && !access_has_children_p (racc))
3051 	    rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3052 
3053 	  if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3054 	    {
3055 	      rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3056 				     rhs);
3057 	      if (is_gimple_reg_type (TREE_TYPE (lhs))
3058 		  && TREE_CODE (lhs) != SSA_NAME)
3059 		force_gimple_rhs = true;
3060 	    }
3061 	}
3062     }
3063 
3064   /* From this point on, the function deals with assignments in between
3065      aggregates when at least one has scalar reductions of some of its
3066      components.  There are three possible scenarios: Both the LHS and RHS have
3067      to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3068 
3069      In the first case, we would like to load the LHS components from RHS
3070      components whenever possible.  If that is not possible, we would like to
3071      read it directly from the RHS (after updating it by storing in it its own
3072      components).  If there are some necessary unscalarized data in the LHS,
3073      those will be loaded by the original assignment too.  If neither of these
3074      cases happen, the original statement can be removed.  Most of this is done
3075      by load_assign_lhs_subreplacements.
3076 
3077      In the second case, we would like to store all RHS scalarized components
3078      directly into LHS and if they cover the aggregate completely, remove the
3079      statement too.  In the third case, we want the LHS components to be loaded
3080      directly from the RHS (DSE will remove the original statement if it
3081      becomes redundant).
3082 
3083      This is a bit complex but manageable when types match and when unions do
3084      not cause confusion in a way that we cannot really load a component of LHS
3085      from the RHS or vice versa (the access representing this level can have
3086      subaccesses that are accessible only through a different union field at a
3087      higher level - different from the one used in the examined expression).
3088      Unions are fun.
3089 
3090      Therefore, I specially handle a fourth case, happening when there is a
3091      specific type cast or it is impossible to locate a scalarized subaccess on
3092      the other side of the expression.  If that happens, I simply "refresh" the
3093      RHS by storing in it is scalarized components leave the original statement
3094      there to do the copying and then load the scalar replacements of the LHS.
3095      This is what the first branch does.  */
3096 
3097   if (modify_this_stmt
3098       || gimple_has_volatile_ops (*stmt)
3099       || contains_vce_or_bfcref_p (rhs)
3100       || contains_vce_or_bfcref_p (lhs))
3101     {
3102       if (access_has_children_p (racc))
3103 	generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3104 				 gsi, false, false, loc);
3105       if (access_has_children_p (lacc))
3106 	generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
3107 				 gsi, true, true, loc);
3108       sra_stats.separate_lhs_rhs_handling++;
3109 
3110       /* This gimplification must be done after generate_subtree_copies,
3111 	 lest we insert the subtree copies in the middle of the gimplified
3112 	 sequence.  */
3113       if (force_gimple_rhs)
3114 	rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3115 					true, GSI_SAME_STMT);
3116       if (gimple_assign_rhs1 (*stmt) != rhs)
3117 	{
3118 	  modify_this_stmt = true;
3119 	  gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3120 	  gcc_assert (*stmt == gsi_stmt (orig_gsi));
3121 	}
3122 
3123       return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3124     }
3125   else
3126     {
3127       if (access_has_children_p (lacc)
3128 	  && access_has_children_p (racc)
3129 	  /* When an access represents an unscalarizable region, it usually
3130 	     represents accesses with variable offset and thus must not be used
3131 	     to generate new memory accesses.  */
3132 	  && !lacc->grp_unscalarizable_region
3133 	  && !racc->grp_unscalarizable_region)
3134 	{
3135 	  gimple_stmt_iterator orig_gsi = *gsi;
3136 	  enum unscalarized_data_handling refreshed;
3137 
3138 	  if (lacc->grp_read && !lacc->grp_covered)
3139 	    refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
3140 	  else
3141 	    refreshed = SRA_UDH_NONE;
3142 
3143 	  load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
3144 					   &orig_gsi, gsi, &refreshed);
3145 	  if (refreshed != SRA_UDH_RIGHT)
3146 	    {
3147 	      gsi_next (gsi);
3148 	      unlink_stmt_vdef (*stmt);
3149 	      gsi_remove (&orig_gsi, true);
3150 	      sra_stats.deleted++;
3151 	      return SRA_AM_REMOVED;
3152 	    }
3153 	}
3154       else
3155 	{
3156 	  if (access_has_children_p (racc)
3157 	      && !racc->grp_unscalarized_data)
3158 	    {
3159 	      if (dump_file)
3160 		{
3161 		  fprintf (dump_file, "Removing load: ");
3162 		  print_gimple_stmt (dump_file, *stmt, 0, 0);
3163 		}
3164 	      generate_subtree_copies (racc->first_child, lhs,
3165 				       racc->offset, 0, 0, gsi,
3166 				       false, false, loc);
3167 	      gcc_assert (*stmt == gsi_stmt (*gsi));
3168 	      unlink_stmt_vdef (*stmt);
3169 	      gsi_remove (gsi, true);
3170 	      sra_stats.deleted++;
3171 	      return SRA_AM_REMOVED;
3172 	    }
3173 	  /* Restore the aggregate RHS from its components so the
3174 	     prevailing aggregate copy does the right thing.  */
3175 	  if (access_has_children_p (racc))
3176 	    generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3177 				     gsi, false, false, loc);
3178 	  /* Re-load the components of the aggregate copy destination.
3179 	     But use the RHS aggregate to load from to expose more
3180 	     optimization opportunities.  */
3181 	  if (access_has_children_p (lacc))
3182 	    generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3183 				     0, 0, gsi, true, true, loc);
3184 	}
3185 
3186       return SRA_AM_NONE;
3187     }
3188 }
3189 
3190 /* Traverse the function body and all modifications as decided in
3191    analyze_all_variable_accesses.  Return true iff the CFG has been
3192    changed.  */
3193 
3194 static bool
3195 sra_modify_function_body (void)
3196 {
3197   bool cfg_changed = false;
3198   basic_block bb;
3199 
3200   FOR_EACH_BB (bb)
3201     {
3202       gimple_stmt_iterator gsi = gsi_start_bb (bb);
3203       while (!gsi_end_p (gsi))
3204 	{
3205 	  gimple stmt = gsi_stmt (gsi);
3206 	  enum assignment_mod_result assign_result;
3207 	  bool modified = false, deleted = false;
3208 	  tree *t;
3209 	  unsigned i;
3210 
3211 	  switch (gimple_code (stmt))
3212 	    {
3213 	    case GIMPLE_RETURN:
3214 	      t = gimple_return_retval_ptr (stmt);
3215 	      if (*t != NULL_TREE)
3216 		modified |= sra_modify_expr (t, &gsi, false);
3217 	      break;
3218 
3219 	    case GIMPLE_ASSIGN:
3220 	      assign_result = sra_modify_assign (&stmt, &gsi);
3221 	      modified |= assign_result == SRA_AM_MODIFIED;
3222 	      deleted = assign_result == SRA_AM_REMOVED;
3223 	      break;
3224 
3225 	    case GIMPLE_CALL:
3226 	      /* Operands must be processed before the lhs.  */
3227 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
3228 		{
3229 		  t = gimple_call_arg_ptr (stmt, i);
3230 		  modified |= sra_modify_expr (t, &gsi, false);
3231 		}
3232 
3233 	      if (gimple_call_lhs (stmt))
3234 		{
3235 		  t = gimple_call_lhs_ptr (stmt);
3236 		  modified |= sra_modify_expr (t, &gsi, true);
3237 		}
3238 	      break;
3239 
3240 	    case GIMPLE_ASM:
3241 	      for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3242 		{
3243 		  t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3244 		  modified |= sra_modify_expr (t, &gsi, false);
3245 		}
3246 	      for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3247 		{
3248 		  t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3249 		  modified |= sra_modify_expr (t, &gsi, true);
3250 		}
3251 	      break;
3252 
3253 	    default:
3254 	      break;
3255 	    }
3256 
3257 	  if (modified)
3258 	    {
3259 	      update_stmt (stmt);
3260 	      if (maybe_clean_eh_stmt (stmt)
3261 		  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3262 		cfg_changed = true;
3263 	    }
3264 	  if (!deleted)
3265 	    gsi_next (&gsi);
3266 	}
3267     }
3268 
3269   return cfg_changed;
3270 }
3271 
3272 /* Generate statements initializing scalar replacements of parts of function
3273    parameters.  */
3274 
3275 static void
3276 initialize_parameter_reductions (void)
3277 {
3278   gimple_stmt_iterator gsi;
3279   gimple_seq seq = NULL;
3280   tree parm;
3281 
3282   for (parm = DECL_ARGUMENTS (current_function_decl);
3283        parm;
3284        parm = DECL_CHAIN (parm))
3285     {
3286       VEC (access_p, heap) *access_vec;
3287       struct access *access;
3288 
3289       if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3290 	continue;
3291       access_vec = get_base_access_vector (parm);
3292       if (!access_vec)
3293 	continue;
3294 
3295       if (!seq)
3296 	{
3297 	  seq = gimple_seq_alloc ();
3298 	  gsi = gsi_start (seq);
3299 	}
3300 
3301       for (access = VEC_index (access_p, access_vec, 0);
3302 	   access;
3303 	   access = access->next_grp)
3304 	generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3305 				 EXPR_LOCATION (parm));
3306     }
3307 
3308   if (seq)
3309     gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3310 }
3311 
3312 /* The "main" function of intraprocedural SRA passes.  Runs the analysis and if
3313    it reveals there are components of some aggregates to be scalarized, it runs
3314    the required transformations.  */
3315 static unsigned int
3316 perform_intra_sra (void)
3317 {
3318   int ret = 0;
3319   sra_initialize ();
3320 
3321   if (!find_var_candidates ())
3322     goto out;
3323 
3324   if (!scan_function ())
3325     goto out;
3326 
3327   if (!analyze_all_variable_accesses ())
3328     goto out;
3329 
3330   if (sra_modify_function_body ())
3331     ret = TODO_update_ssa | TODO_cleanup_cfg;
3332   else
3333     ret = TODO_update_ssa;
3334   initialize_parameter_reductions ();
3335 
3336   statistics_counter_event (cfun, "Scalar replacements created",
3337 			    sra_stats.replacements);
3338   statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3339   statistics_counter_event (cfun, "Subtree copy stmts",
3340 			    sra_stats.subtree_copies);
3341   statistics_counter_event (cfun, "Subreplacement stmts",
3342 			    sra_stats.subreplacements);
3343   statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3344   statistics_counter_event (cfun, "Separate LHS and RHS handling",
3345 			    sra_stats.separate_lhs_rhs_handling);
3346 
3347  out:
3348   sra_deinitialize ();
3349   return ret;
3350 }
3351 
3352 /* Perform early intraprocedural SRA.  */
3353 static unsigned int
3354 early_intra_sra (void)
3355 {
3356   sra_mode = SRA_MODE_EARLY_INTRA;
3357   return perform_intra_sra ();
3358 }
3359 
3360 /* Perform "late" intraprocedural SRA.  */
3361 static unsigned int
3362 late_intra_sra (void)
3363 {
3364   sra_mode = SRA_MODE_INTRA;
3365   return perform_intra_sra ();
3366 }
3367 
3368 
3369 static bool
3370 gate_intra_sra (void)
3371 {
3372   return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3373 }
3374 
3375 
3376 struct gimple_opt_pass pass_sra_early =
3377 {
3378  {
3379   GIMPLE_PASS,
3380   "esra",	 			/* name */
3381   gate_intra_sra,			/* gate */
3382   early_intra_sra,			/* execute */
3383   NULL,					/* sub */
3384   NULL,					/* next */
3385   0,					/* static_pass_number */
3386   TV_TREE_SRA,				/* tv_id */
3387   PROP_cfg | PROP_ssa,                  /* properties_required */
3388   0,					/* properties_provided */
3389   0,					/* properties_destroyed */
3390   0,					/* todo_flags_start */
3391   TODO_update_ssa
3392   | TODO_ggc_collect
3393   | TODO_verify_ssa			/* todo_flags_finish */
3394  }
3395 };
3396 
3397 struct gimple_opt_pass pass_sra =
3398 {
3399  {
3400   GIMPLE_PASS,
3401   "sra",	 			/* name */
3402   gate_intra_sra,			/* gate */
3403   late_intra_sra,			/* execute */
3404   NULL,					/* sub */
3405   NULL,					/* next */
3406   0,					/* static_pass_number */
3407   TV_TREE_SRA,				/* tv_id */
3408   PROP_cfg | PROP_ssa,                  /* properties_required */
3409   0,					/* properties_provided */
3410   0,					/* properties_destroyed */
3411   TODO_update_address_taken,		/* todo_flags_start */
3412   TODO_update_ssa
3413   | TODO_ggc_collect
3414   | TODO_verify_ssa			/* todo_flags_finish */
3415  }
3416 };
3417 
3418 
3419 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3420    parameter.  */
3421 
3422 static bool
3423 is_unused_scalar_param (tree parm)
3424 {
3425   tree name;
3426   return (is_gimple_reg (parm)
3427 	  && (!(name = gimple_default_def (cfun, parm))
3428 	      || has_zero_uses (name)));
3429 }
3430 
3431 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3432    examine whether there are any direct or otherwise infeasible ones.  If so,
3433    return true, otherwise return false.  PARM must be a gimple register with a
3434    non-NULL default definition.  */
3435 
3436 static bool
3437 ptr_parm_has_direct_uses (tree parm)
3438 {
3439   imm_use_iterator ui;
3440   gimple stmt;
3441   tree name = gimple_default_def (cfun, parm);
3442   bool ret = false;
3443 
3444   FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3445     {
3446       int uses_ok = 0;
3447       use_operand_p use_p;
3448 
3449       if (is_gimple_debug (stmt))
3450 	continue;
3451 
3452       /* Valid uses include dereferences on the lhs and the rhs.  */
3453       if (gimple_has_lhs (stmt))
3454 	{
3455 	  tree lhs = gimple_get_lhs (stmt);
3456 	  while (handled_component_p (lhs))
3457 	    lhs = TREE_OPERAND (lhs, 0);
3458 	  if (TREE_CODE (lhs) == MEM_REF
3459 	      && TREE_OPERAND (lhs, 0) == name
3460 	      && integer_zerop (TREE_OPERAND (lhs, 1))
3461 	      && types_compatible_p (TREE_TYPE (lhs),
3462 				     TREE_TYPE (TREE_TYPE (name)))
3463 	      && !TREE_THIS_VOLATILE (lhs))
3464 	    uses_ok++;
3465 	}
3466       if (gimple_assign_single_p (stmt))
3467 	{
3468 	  tree rhs = gimple_assign_rhs1 (stmt);
3469 	  while (handled_component_p (rhs))
3470 	    rhs = TREE_OPERAND (rhs, 0);
3471 	  if (TREE_CODE (rhs) == MEM_REF
3472 	      && TREE_OPERAND (rhs, 0) == name
3473 	      && integer_zerop (TREE_OPERAND (rhs, 1))
3474 	      && types_compatible_p (TREE_TYPE (rhs),
3475 				     TREE_TYPE (TREE_TYPE (name)))
3476 	      && !TREE_THIS_VOLATILE (rhs))
3477 	    uses_ok++;
3478 	}
3479       else if (is_gimple_call (stmt))
3480 	{
3481 	  unsigned i;
3482 	  for (i = 0; i < gimple_call_num_args (stmt); ++i)
3483 	    {
3484 	      tree arg = gimple_call_arg (stmt, i);
3485 	      while (handled_component_p (arg))
3486 		arg = TREE_OPERAND (arg, 0);
3487 	      if (TREE_CODE (arg) == MEM_REF
3488 		  && TREE_OPERAND (arg, 0) == name
3489 		  && integer_zerop (TREE_OPERAND (arg, 1))
3490 		  && types_compatible_p (TREE_TYPE (arg),
3491 					 TREE_TYPE (TREE_TYPE (name)))
3492 		  && !TREE_THIS_VOLATILE (arg))
3493 		uses_ok++;
3494 	    }
3495 	}
3496 
3497       /* If the number of valid uses does not match the number of
3498          uses in this stmt there is an unhandled use.  */
3499       FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3500 	--uses_ok;
3501 
3502       if (uses_ok != 0)
3503 	ret = true;
3504 
3505       if (ret)
3506 	BREAK_FROM_IMM_USE_STMT (ui);
3507     }
3508 
3509   return ret;
3510 }
3511 
3512 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3513    them in candidate_bitmap.  Note that these do not necessarily include
3514    parameter which are unused and thus can be removed.  Return true iff any
3515    such candidate has been found.  */
3516 
3517 static bool
3518 find_param_candidates (void)
3519 {
3520   tree parm;
3521   int count = 0;
3522   bool ret = false;
3523   const char *msg;
3524 
3525   for (parm = DECL_ARGUMENTS (current_function_decl);
3526        parm;
3527        parm = DECL_CHAIN (parm))
3528     {
3529       tree type = TREE_TYPE (parm);
3530 
3531       count++;
3532 
3533       if (TREE_THIS_VOLATILE (parm)
3534 	  || TREE_ADDRESSABLE (parm)
3535 	  || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3536 	continue;
3537 
3538       if (is_unused_scalar_param (parm))
3539 	{
3540 	  ret = true;
3541 	  continue;
3542 	}
3543 
3544       if (POINTER_TYPE_P (type))
3545 	{
3546 	  type = TREE_TYPE (type);
3547 
3548 	  if (TREE_CODE (type) == FUNCTION_TYPE
3549 	      || TYPE_VOLATILE (type)
3550 	      || (TREE_CODE (type) == ARRAY_TYPE
3551 		  && TYPE_NONALIASED_COMPONENT (type))
3552 	      || !is_gimple_reg (parm)
3553 	      || is_va_list_type (type)
3554 	      || ptr_parm_has_direct_uses (parm))
3555 	    continue;
3556 	}
3557       else if (!AGGREGATE_TYPE_P (type))
3558 	continue;
3559 
3560       if (!COMPLETE_TYPE_P (type)
3561 	  || !host_integerp (TYPE_SIZE (type), 1)
3562           || tree_low_cst (TYPE_SIZE (type), 1) == 0
3563 	  || (AGGREGATE_TYPE_P (type)
3564 	      && type_internals_preclude_sra_p (type, &msg)))
3565 	continue;
3566 
3567       bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3568       ret = true;
3569       if (dump_file && (dump_flags & TDF_DETAILS))
3570 	{
3571 	  fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3572 	  print_generic_expr (dump_file, parm, 0);
3573 	  fprintf (dump_file, "\n");
3574 	}
3575     }
3576 
3577   func_param_count = count;
3578   return ret;
3579 }
3580 
3581 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3582    maybe_modified. */
3583 
3584 static bool
3585 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3586 		     void *data)
3587 {
3588   struct access *repr = (struct access *) data;
3589 
3590   repr->grp_maybe_modified = 1;
3591   return true;
3592 }
3593 
3594 /* Analyze what representatives (in linked lists accessible from
3595    REPRESENTATIVES) can be modified by side effects of statements in the
3596    current function.  */
3597 
3598 static void
3599 analyze_modified_params (VEC (access_p, heap) *representatives)
3600 {
3601   int i;
3602 
3603   for (i = 0; i < func_param_count; i++)
3604     {
3605       struct access *repr;
3606 
3607       for (repr = VEC_index (access_p, representatives, i);
3608 	   repr;
3609 	   repr = repr->next_grp)
3610 	{
3611 	  struct access *access;
3612 	  bitmap visited;
3613 	  ao_ref ar;
3614 
3615 	  if (no_accesses_p (repr))
3616 	    continue;
3617 	  if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3618 	      || repr->grp_maybe_modified)
3619 	    continue;
3620 
3621 	  ao_ref_init (&ar, repr->expr);
3622 	  visited = BITMAP_ALLOC (NULL);
3623 	  for (access = repr; access; access = access->next_sibling)
3624 	    {
3625 	      /* All accesses are read ones, otherwise grp_maybe_modified would
3626 		 be trivially set.  */
3627 	      walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3628 				  mark_maybe_modified, repr, &visited);
3629 	      if (repr->grp_maybe_modified)
3630 		break;
3631 	    }
3632 	  BITMAP_FREE (visited);
3633 	}
3634     }
3635 }
3636 
3637 /* Propagate distances in bb_dereferences in the opposite direction than the
3638    control flow edges, in each step storing the maximum of the current value
3639    and the minimum of all successors.  These steps are repeated until the table
3640    stabilizes.  Note that BBs which might terminate the functions (according to
3641    final_bbs bitmap) never updated in this way.  */
3642 
3643 static void
3644 propagate_dereference_distances (void)
3645 {
3646   VEC (basic_block, heap) *queue;
3647   basic_block bb;
3648 
3649   queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3650   VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3651   FOR_EACH_BB (bb)
3652     {
3653       VEC_quick_push (basic_block, queue, bb);
3654       bb->aux = bb;
3655     }
3656 
3657   while (!VEC_empty (basic_block, queue))
3658     {
3659       edge_iterator ei;
3660       edge e;
3661       bool change = false;
3662       int i;
3663 
3664       bb = VEC_pop (basic_block, queue);
3665       bb->aux = NULL;
3666 
3667       if (bitmap_bit_p (final_bbs, bb->index))
3668 	continue;
3669 
3670       for (i = 0; i < func_param_count; i++)
3671 	{
3672 	  int idx = bb->index * func_param_count + i;
3673 	  bool first = true;
3674 	  HOST_WIDE_INT inh = 0;
3675 
3676 	  FOR_EACH_EDGE (e, ei, bb->succs)
3677 	  {
3678 	    int succ_idx = e->dest->index * func_param_count + i;
3679 
3680 	    if (e->src == EXIT_BLOCK_PTR)
3681 	      continue;
3682 
3683 	    if (first)
3684 	      {
3685 		first = false;
3686 		inh = bb_dereferences [succ_idx];
3687 	      }
3688 	    else if (bb_dereferences [succ_idx] < inh)
3689 	      inh = bb_dereferences [succ_idx];
3690 	  }
3691 
3692 	  if (!first && bb_dereferences[idx] < inh)
3693 	    {
3694 	      bb_dereferences[idx] = inh;
3695 	      change = true;
3696 	    }
3697 	}
3698 
3699       if (change && !bitmap_bit_p (final_bbs, bb->index))
3700 	FOR_EACH_EDGE (e, ei, bb->preds)
3701 	  {
3702 	    if (e->src->aux)
3703 	      continue;
3704 
3705 	    e->src->aux = e->src;
3706 	    VEC_quick_push (basic_block, queue, e->src);
3707 	  }
3708     }
3709 
3710   VEC_free (basic_block, heap, queue);
3711 }
3712 
3713 /* Dump a dereferences TABLE with heading STR to file F.  */
3714 
3715 static void
3716 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3717 {
3718   basic_block bb;
3719 
3720   fprintf (dump_file, str);
3721   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3722     {
3723       fprintf (f, "%4i  %i   ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3724       if (bb != EXIT_BLOCK_PTR)
3725 	{
3726 	  int i;
3727 	  for (i = 0; i < func_param_count; i++)
3728 	    {
3729 	      int idx = bb->index * func_param_count + i;
3730 	      fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3731 	    }
3732 	}
3733       fprintf (f, "\n");
3734     }
3735   fprintf (dump_file, "\n");
3736 }
3737 
3738 /* Determine what (parts of) parameters passed by reference that are not
3739    assigned to are not certainly dereferenced in this function and thus the
3740    dereferencing cannot be safely moved to the caller without potentially
3741    introducing a segfault.  Mark such REPRESENTATIVES as
3742    grp_not_necessarilly_dereferenced.
3743 
3744    The dereferenced maximum "distance," i.e. the offset + size of the accessed
3745    part is calculated rather than simple booleans are calculated for each
3746    pointer parameter to handle cases when only a fraction of the whole
3747    aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3748    an example).
3749 
3750    The maximum dereference distances for each pointer parameter and BB are
3751    already stored in bb_dereference.  This routine simply propagates these
3752    values upwards by propagate_dereference_distances and then compares the
3753    distances of individual parameters in the ENTRY BB to the equivalent
3754    distances of each representative of a (fraction of a) parameter.  */
3755 
3756 static void
3757 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3758 {
3759   int i;
3760 
3761   if (dump_file && (dump_flags & TDF_DETAILS))
3762     dump_dereferences_table (dump_file,
3763 			     "Dereference table before propagation:\n",
3764 			     bb_dereferences);
3765 
3766   propagate_dereference_distances ();
3767 
3768   if (dump_file && (dump_flags & TDF_DETAILS))
3769     dump_dereferences_table (dump_file,
3770 			     "Dereference table after propagation:\n",
3771 			     bb_dereferences);
3772 
3773   for (i = 0; i < func_param_count; i++)
3774     {
3775       struct access *repr = VEC_index (access_p, representatives, i);
3776       int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3777 
3778       if (!repr || no_accesses_p (repr))
3779 	continue;
3780 
3781       do
3782 	{
3783 	  if ((repr->offset + repr->size) > bb_dereferences[idx])
3784 	    repr->grp_not_necessarilly_dereferenced = 1;
3785 	  repr = repr->next_grp;
3786 	}
3787       while (repr);
3788     }
3789 }
3790 
3791 /* Return the representative access for the parameter declaration PARM if it is
3792    a scalar passed by reference which is not written to and the pointer value
3793    is not used directly.  Thus, if it is legal to dereference it in the caller
3794    and we can rule out modifications through aliases, such parameter should be
3795    turned into one passed by value.  Return NULL otherwise.  */
3796 
3797 static struct access *
3798 unmodified_by_ref_scalar_representative (tree parm)
3799 {
3800   int i, access_count;
3801   struct access *repr;
3802   VEC (access_p, heap) *access_vec;
3803 
3804   access_vec = get_base_access_vector (parm);
3805   gcc_assert (access_vec);
3806   repr = VEC_index (access_p, access_vec, 0);
3807   if (repr->write)
3808     return NULL;
3809   repr->group_representative = repr;
3810 
3811   access_count = VEC_length (access_p, access_vec);
3812   for (i = 1; i < access_count; i++)
3813     {
3814       struct access *access = VEC_index (access_p, access_vec, i);
3815       if (access->write)
3816 	return NULL;
3817       access->group_representative = repr;
3818       access->next_sibling = repr->next_sibling;
3819       repr->next_sibling = access;
3820     }
3821 
3822   repr->grp_read = 1;
3823   repr->grp_scalar_ptr = 1;
3824   return repr;
3825 }
3826 
3827 /* Return true iff this access precludes IPA-SRA of the parameter it is
3828    associated with. */
3829 
3830 static bool
3831 access_precludes_ipa_sra_p (struct access *access)
3832 {
3833   /* Avoid issues such as the second simple testcase in PR 42025.  The problem
3834      is incompatible assign in a call statement (and possibly even in asm
3835      statements).  This can be relaxed by using a new temporary but only for
3836      non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3837      intraprocedural SRA we deal with this by keeping the old aggregate around,
3838      something we cannot do in IPA-SRA.)  */
3839   if (access->write
3840       && (is_gimple_call (access->stmt)
3841 	  || gimple_code (access->stmt) == GIMPLE_ASM))
3842     return true;
3843 
3844   if (STRICT_ALIGNMENT
3845       && tree_non_aligned_mem_p (access->expr, TYPE_ALIGN (access->type)))
3846     return true;
3847 
3848   return false;
3849 }
3850 
3851 
3852 /* Sort collected accesses for parameter PARM, identify representatives for
3853    each accessed region and link them together.  Return NULL if there are
3854    different but overlapping accesses, return the special ptr value meaning
3855    there are no accesses for this parameter if that is the case and return the
3856    first representative otherwise.  Set *RO_GRP if there is a group of accesses
3857    with only read (i.e. no write) accesses.  */
3858 
3859 static struct access *
3860 splice_param_accesses (tree parm, bool *ro_grp)
3861 {
3862   int i, j, access_count, group_count;
3863   int agg_size, total_size = 0;
3864   struct access *access, *res, **prev_acc_ptr = &res;
3865   VEC (access_p, heap) *access_vec;
3866 
3867   access_vec = get_base_access_vector (parm);
3868   if (!access_vec)
3869     return &no_accesses_representant;
3870   access_count = VEC_length (access_p, access_vec);
3871 
3872   VEC_qsort (access_p, access_vec, compare_access_positions);
3873 
3874   i = 0;
3875   total_size = 0;
3876   group_count = 0;
3877   while (i < access_count)
3878     {
3879       bool modification;
3880       tree a1_alias_type;
3881       access = VEC_index (access_p, access_vec, i);
3882       modification = access->write;
3883       if (access_precludes_ipa_sra_p (access))
3884 	return NULL;
3885       a1_alias_type = reference_alias_ptr_type (access->expr);
3886 
3887       /* Access is about to become group representative unless we find some
3888 	 nasty overlap which would preclude us from breaking this parameter
3889 	 apart. */
3890 
3891       j = i + 1;
3892       while (j < access_count)
3893 	{
3894 	  struct access *ac2 = VEC_index (access_p, access_vec, j);
3895 	  if (ac2->offset != access->offset)
3896 	    {
3897 	      /* All or nothing law for parameters. */
3898 	      if (access->offset + access->size > ac2->offset)
3899 		return NULL;
3900 	      else
3901 		break;
3902 	    }
3903 	  else if (ac2->size != access->size)
3904 	    return NULL;
3905 
3906 	  if (access_precludes_ipa_sra_p (ac2)
3907 	      || (ac2->type != access->type
3908 		  && (TREE_ADDRESSABLE (ac2->type)
3909 		      || TREE_ADDRESSABLE (access->type)))
3910 	      || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
3911 	    return NULL;
3912 
3913 	  modification |= ac2->write;
3914 	  ac2->group_representative = access;
3915 	  ac2->next_sibling = access->next_sibling;
3916 	  access->next_sibling = ac2;
3917 	  j++;
3918 	}
3919 
3920       group_count++;
3921       access->grp_maybe_modified = modification;
3922       if (!modification)
3923 	*ro_grp = true;
3924       *prev_acc_ptr = access;
3925       prev_acc_ptr = &access->next_grp;
3926       total_size += access->size;
3927       i = j;
3928     }
3929 
3930   if (POINTER_TYPE_P (TREE_TYPE (parm)))
3931     agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3932   else
3933     agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3934   if (total_size >= agg_size)
3935     return NULL;
3936 
3937   gcc_assert (group_count > 0);
3938   return res;
3939 }
3940 
3941 /* Decide whether parameters with representative accesses given by REPR should
3942    be reduced into components.  */
3943 
3944 static int
3945 decide_one_param_reduction (struct access *repr)
3946 {
3947   int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3948   bool by_ref;
3949   tree parm;
3950 
3951   parm = repr->base;
3952   cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3953   gcc_assert (cur_parm_size > 0);
3954 
3955   if (POINTER_TYPE_P (TREE_TYPE (parm)))
3956     {
3957       by_ref = true;
3958       agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3959     }
3960   else
3961     {
3962       by_ref = false;
3963       agg_size = cur_parm_size;
3964     }
3965 
3966   if (dump_file)
3967     {
3968       struct access *acc;
3969       fprintf (dump_file, "Evaluating PARAM group sizes for ");
3970       print_generic_expr (dump_file, parm, 0);
3971       fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3972       for (acc = repr; acc; acc = acc->next_grp)
3973 	dump_access (dump_file, acc, true);
3974     }
3975 
3976   total_size = 0;
3977   new_param_count = 0;
3978 
3979   for (; repr; repr = repr->next_grp)
3980     {
3981       gcc_assert (parm == repr->base);
3982 
3983       /* Taking the address of a non-addressable field is verboten.  */
3984       if (by_ref && repr->non_addressable)
3985 	return 0;
3986 
3987       /* Do not decompose a non-BLKmode param in a way that would
3988          create BLKmode params.  Especially for by-reference passing
3989 	 (thus, pointer-type param) this is hardly worthwhile.  */
3990       if (DECL_MODE (parm) != BLKmode
3991 	  && TYPE_MODE (repr->type) == BLKmode)
3992 	return 0;
3993 
3994       if (!by_ref || (!repr->grp_maybe_modified
3995 		      && !repr->grp_not_necessarilly_dereferenced))
3996 	total_size += repr->size;
3997       else
3998 	total_size += cur_parm_size;
3999 
4000       new_param_count++;
4001     }
4002 
4003   gcc_assert (new_param_count > 0);
4004 
4005   if (optimize_function_for_size_p (cfun))
4006     parm_size_limit = cur_parm_size;
4007   else
4008     parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4009                        * cur_parm_size);
4010 
4011   if (total_size < agg_size
4012       && total_size <= parm_size_limit)
4013     {
4014       if (dump_file)
4015 	fprintf (dump_file, "    ....will be split into %i components\n",
4016 		 new_param_count);
4017       return new_param_count;
4018     }
4019   else
4020     return 0;
4021 }
4022 
4023 /* The order of the following enums is important, we need to do extra work for
4024    UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES.  */
4025 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4026 			  MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4027 
4028 /* Identify representatives of all accesses to all candidate parameters for
4029    IPA-SRA.  Return result based on what representatives have been found. */
4030 
4031 static enum ipa_splicing_result
4032 splice_all_param_accesses (VEC (access_p, heap) **representatives)
4033 {
4034   enum ipa_splicing_result result = NO_GOOD_ACCESS;
4035   tree parm;
4036   struct access *repr;
4037 
4038   *representatives = VEC_alloc (access_p, heap, func_param_count);
4039 
4040   for (parm = DECL_ARGUMENTS (current_function_decl);
4041        parm;
4042        parm = DECL_CHAIN (parm))
4043     {
4044       if (is_unused_scalar_param (parm))
4045 	{
4046 	  VEC_quick_push (access_p, *representatives,
4047 			  &no_accesses_representant);
4048 	  if (result == NO_GOOD_ACCESS)
4049 	    result = UNUSED_PARAMS;
4050 	}
4051       else if (POINTER_TYPE_P (TREE_TYPE (parm))
4052 	       && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4053 	       && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4054 	{
4055 	  repr = unmodified_by_ref_scalar_representative (parm);
4056 	  VEC_quick_push (access_p, *representatives, repr);
4057 	  if (repr)
4058 	    result = UNMODIF_BY_REF_ACCESSES;
4059 	}
4060       else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4061 	{
4062 	  bool ro_grp = false;
4063 	  repr = splice_param_accesses (parm, &ro_grp);
4064 	  VEC_quick_push (access_p, *representatives, repr);
4065 
4066 	  if (repr && !no_accesses_p (repr))
4067 	    {
4068 	      if (POINTER_TYPE_P (TREE_TYPE (parm)))
4069 		{
4070 		  if (ro_grp)
4071 		    result = UNMODIF_BY_REF_ACCESSES;
4072 		  else if (result < MODIF_BY_REF_ACCESSES)
4073 		    result = MODIF_BY_REF_ACCESSES;
4074 		}
4075 	      else if (result < BY_VAL_ACCESSES)
4076 		result = BY_VAL_ACCESSES;
4077 	    }
4078 	  else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4079 	    result = UNUSED_PARAMS;
4080 	}
4081       else
4082 	VEC_quick_push (access_p, *representatives, NULL);
4083     }
4084 
4085   if (result == NO_GOOD_ACCESS)
4086     {
4087       VEC_free (access_p, heap, *representatives);
4088       *representatives = NULL;
4089       return NO_GOOD_ACCESS;
4090     }
4091 
4092   return result;
4093 }
4094 
4095 /* Return the index of BASE in PARMS.  Abort if it is not found.  */
4096 
4097 static inline int
4098 get_param_index (tree base, VEC(tree, heap) *parms)
4099 {
4100   int i, len;
4101 
4102   len = VEC_length (tree, parms);
4103   for (i = 0; i < len; i++)
4104     if (VEC_index (tree, parms, i) == base)
4105       return i;
4106   gcc_unreachable ();
4107 }
4108 
4109 /* Convert the decisions made at the representative level into compact
4110    parameter adjustments.  REPRESENTATIVES are pointers to first
4111    representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4112    final number of adjustments.  */
4113 
4114 static ipa_parm_adjustment_vec
4115 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
4116 				       int adjustments_count)
4117 {
4118   VEC (tree, heap) *parms;
4119   ipa_parm_adjustment_vec adjustments;
4120   tree parm;
4121   int i;
4122 
4123   gcc_assert (adjustments_count > 0);
4124   parms = ipa_get_vector_of_formal_parms (current_function_decl);
4125   adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
4126   parm = DECL_ARGUMENTS (current_function_decl);
4127   for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4128     {
4129       struct access *repr = VEC_index (access_p, representatives, i);
4130 
4131       if (!repr || no_accesses_p (repr))
4132 	{
4133 	  struct ipa_parm_adjustment *adj;
4134 
4135 	  adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4136 	  memset (adj, 0, sizeof (*adj));
4137 	  adj->base_index = get_param_index (parm, parms);
4138 	  adj->base = parm;
4139 	  if (!repr)
4140 	    adj->copy_param = 1;
4141 	  else
4142 	    adj->remove_param = 1;
4143 	}
4144       else
4145 	{
4146 	  struct ipa_parm_adjustment *adj;
4147 	  int index = get_param_index (parm, parms);
4148 
4149 	  for (; repr; repr = repr->next_grp)
4150 	    {
4151 	      adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4152 	      memset (adj, 0, sizeof (*adj));
4153 	      gcc_assert (repr->base == parm);
4154 	      adj->base_index = index;
4155 	      adj->base = repr->base;
4156 	      adj->type = repr->type;
4157 	      adj->alias_ptr_type = reference_alias_ptr_type (repr->expr);
4158 	      adj->offset = repr->offset;
4159 	      adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4160 			     && (repr->grp_maybe_modified
4161 				 || repr->grp_not_necessarilly_dereferenced));
4162 
4163 	    }
4164 	}
4165     }
4166   VEC_free (tree, heap, parms);
4167   return adjustments;
4168 }
4169 
4170 /* Analyze the collected accesses and produce a plan what to do with the
4171    parameters in the form of adjustments, NULL meaning nothing.  */
4172 
4173 static ipa_parm_adjustment_vec
4174 analyze_all_param_acesses (void)
4175 {
4176   enum ipa_splicing_result repr_state;
4177   bool proceed = false;
4178   int i, adjustments_count = 0;
4179   VEC (access_p, heap) *representatives;
4180   ipa_parm_adjustment_vec adjustments;
4181 
4182   repr_state = splice_all_param_accesses (&representatives);
4183   if (repr_state == NO_GOOD_ACCESS)
4184     return NULL;
4185 
4186   /* If there are any parameters passed by reference which are not modified
4187      directly, we need to check whether they can be modified indirectly.  */
4188   if (repr_state == UNMODIF_BY_REF_ACCESSES)
4189     {
4190       analyze_caller_dereference_legality (representatives);
4191       analyze_modified_params (representatives);
4192     }
4193 
4194   for (i = 0; i < func_param_count; i++)
4195     {
4196       struct access *repr = VEC_index (access_p, representatives, i);
4197 
4198       if (repr && !no_accesses_p (repr))
4199 	{
4200 	  if (repr->grp_scalar_ptr)
4201 	    {
4202 	      adjustments_count++;
4203 	      if (repr->grp_not_necessarilly_dereferenced
4204 		  || repr->grp_maybe_modified)
4205 		VEC_replace (access_p, representatives, i, NULL);
4206 	      else
4207 		{
4208 		  proceed = true;
4209 		  sra_stats.scalar_by_ref_to_by_val++;
4210 		}
4211 	    }
4212 	  else
4213 	    {
4214 	      int new_components = decide_one_param_reduction (repr);
4215 
4216 	      if (new_components == 0)
4217 		{
4218 		  VEC_replace (access_p, representatives, i, NULL);
4219 		  adjustments_count++;
4220 		}
4221 	      else
4222 		{
4223 		  adjustments_count += new_components;
4224 		  sra_stats.aggregate_params_reduced++;
4225 		  sra_stats.param_reductions_created += new_components;
4226 		  proceed = true;
4227 		}
4228 	    }
4229 	}
4230       else
4231 	{
4232 	  if (no_accesses_p (repr))
4233 	    {
4234 	      proceed = true;
4235 	      sra_stats.deleted_unused_parameters++;
4236 	    }
4237 	  adjustments_count++;
4238 	}
4239     }
4240 
4241   if (!proceed && dump_file)
4242     fprintf (dump_file, "NOT proceeding to change params.\n");
4243 
4244   if (proceed)
4245     adjustments = turn_representatives_into_adjustments (representatives,
4246 							 adjustments_count);
4247   else
4248     adjustments = NULL;
4249 
4250   VEC_free (access_p, heap, representatives);
4251   return adjustments;
4252 }
4253 
4254 /* If a parameter replacement identified by ADJ does not yet exist in the form
4255    of declaration, create it and record it, otherwise return the previously
4256    created one.  */
4257 
4258 static tree
4259 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4260 {
4261   tree repl;
4262   if (!adj->new_ssa_base)
4263     {
4264       char *pretty_name = make_fancy_name (adj->base);
4265 
4266       repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4267       DECL_NAME (repl) = get_identifier (pretty_name);
4268       obstack_free (&name_obstack, pretty_name);
4269 
4270       add_referenced_var (repl);
4271       adj->new_ssa_base = repl;
4272     }
4273   else
4274     repl = adj->new_ssa_base;
4275   return repl;
4276 }
4277 
4278 /* Find the first adjustment for a particular parameter BASE in a vector of
4279    ADJUSTMENTS which is not a copy_param.  Return NULL if there is no such
4280    adjustment. */
4281 
4282 static struct ipa_parm_adjustment *
4283 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4284 {
4285   int i, len;
4286 
4287   len = VEC_length (ipa_parm_adjustment_t, adjustments);
4288   for (i = 0; i < len; i++)
4289     {
4290       struct ipa_parm_adjustment *adj;
4291 
4292       adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4293       if (!adj->copy_param && adj->base == base)
4294 	return adj;
4295     }
4296 
4297   return NULL;
4298 }
4299 
4300 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4301    removed because its value is not used, replace the SSA_NAME with a one
4302    relating to a created VAR_DECL together all of its uses and return true.
4303    ADJUSTMENTS is a pointer to an adjustments vector.  */
4304 
4305 static bool
4306 replace_removed_params_ssa_names (gimple stmt,
4307 				  ipa_parm_adjustment_vec adjustments)
4308 {
4309   struct ipa_parm_adjustment *adj;
4310   tree lhs, decl, repl, name;
4311 
4312   if (gimple_code (stmt) == GIMPLE_PHI)
4313     lhs = gimple_phi_result (stmt);
4314   else if (is_gimple_assign (stmt))
4315     lhs = gimple_assign_lhs (stmt);
4316   else if (is_gimple_call (stmt))
4317     lhs = gimple_call_lhs (stmt);
4318   else
4319     gcc_unreachable ();
4320 
4321   if (TREE_CODE (lhs) != SSA_NAME)
4322     return false;
4323   decl = SSA_NAME_VAR (lhs);
4324   if (TREE_CODE (decl) != PARM_DECL)
4325     return false;
4326 
4327   adj = get_adjustment_for_base (adjustments, decl);
4328   if (!adj)
4329     return false;
4330 
4331   repl = get_replaced_param_substitute (adj);
4332   name = make_ssa_name (repl, stmt);
4333 
4334   if (dump_file)
4335     {
4336       fprintf (dump_file, "replacing an SSA name of a removed param ");
4337       print_generic_expr (dump_file, lhs, 0);
4338       fprintf (dump_file, " with ");
4339       print_generic_expr (dump_file, name, 0);
4340       fprintf (dump_file, "\n");
4341     }
4342 
4343   if (is_gimple_assign (stmt))
4344     gimple_assign_set_lhs (stmt, name);
4345   else if (is_gimple_call (stmt))
4346     gimple_call_set_lhs (stmt, name);
4347   else
4348     gimple_phi_set_result (stmt, name);
4349 
4350   replace_uses_by (lhs, name);
4351   release_ssa_name (lhs);
4352   return true;
4353 }
4354 
4355 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4356    so.  ADJUSTMENTS is a pointer to a vector of adjustments.  CONVERT
4357    specifies whether the function should care about type incompatibility the
4358    current and new expressions.  If it is false, the function will leave
4359    incompatibility issues to the caller.  Return true iff the expression
4360    was modified. */
4361 
4362 static bool
4363 sra_ipa_modify_expr (tree *expr, bool convert,
4364 		     ipa_parm_adjustment_vec adjustments)
4365 {
4366   int i, len;
4367   struct ipa_parm_adjustment *adj, *cand = NULL;
4368   HOST_WIDE_INT offset, size, max_size;
4369   tree base, src;
4370 
4371   len = VEC_length (ipa_parm_adjustment_t, adjustments);
4372 
4373   if (TREE_CODE (*expr) == BIT_FIELD_REF
4374       || TREE_CODE (*expr) == IMAGPART_EXPR
4375       || TREE_CODE (*expr) == REALPART_EXPR)
4376     {
4377       expr = &TREE_OPERAND (*expr, 0);
4378       convert = true;
4379     }
4380 
4381   base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4382   if (!base || size == -1 || max_size == -1)
4383     return false;
4384 
4385   if (TREE_CODE (base) == MEM_REF)
4386     {
4387       offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4388       base = TREE_OPERAND (base, 0);
4389     }
4390 
4391   base = get_ssa_base_param (base);
4392   if (!base || TREE_CODE (base) != PARM_DECL)
4393     return false;
4394 
4395   for (i = 0; i < len; i++)
4396     {
4397       adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4398 
4399       if (adj->base == base &&
4400 	  (adj->offset == offset || adj->remove_param))
4401 	{
4402 	  cand = adj;
4403 	  break;
4404 	}
4405     }
4406   if (!cand || cand->copy_param || cand->remove_param)
4407     return false;
4408 
4409   if (cand->by_ref)
4410     src = build_simple_mem_ref (cand->reduction);
4411   else
4412     src = cand->reduction;
4413 
4414   if (dump_file && (dump_flags & TDF_DETAILS))
4415     {
4416       fprintf (dump_file, "About to replace expr ");
4417       print_generic_expr (dump_file, *expr, 0);
4418       fprintf (dump_file, " with ");
4419       print_generic_expr (dump_file, src, 0);
4420       fprintf (dump_file, "\n");
4421     }
4422 
4423   if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4424     {
4425       tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4426       *expr = vce;
4427     }
4428   else
4429     *expr = src;
4430   return true;
4431 }
4432 
4433 /* If the statement pointed to by STMT_PTR contains any expressions that need
4434    to replaced with a different one as noted by ADJUSTMENTS, do so.  Handle any
4435    potential type incompatibilities (GSI is used to accommodate conversion
4436    statements and must point to the statement).  Return true iff the statement
4437    was modified.  */
4438 
4439 static bool
4440 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4441 		       ipa_parm_adjustment_vec adjustments)
4442 {
4443   gimple stmt = *stmt_ptr;
4444   tree *lhs_p, *rhs_p;
4445   bool any;
4446 
4447   if (!gimple_assign_single_p (stmt))
4448     return false;
4449 
4450   rhs_p = gimple_assign_rhs1_ptr (stmt);
4451   lhs_p = gimple_assign_lhs_ptr (stmt);
4452 
4453   any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4454   any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4455   if (any)
4456     {
4457       tree new_rhs = NULL_TREE;
4458 
4459       if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4460 	{
4461 	  if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4462 	    {
4463 	      /* V_C_Es of constructors can cause trouble (PR 42714).  */
4464 	      if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4465 		*rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4466 	      else
4467 		*rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
4468 	    }
4469 	  else
4470 	    new_rhs = fold_build1_loc (gimple_location (stmt),
4471 				       VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4472 				       *rhs_p);
4473 	}
4474       else if (REFERENCE_CLASS_P (*rhs_p)
4475 	       && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4476 	       && !is_gimple_reg (*lhs_p))
4477 	/* This can happen when an assignment in between two single field
4478 	   structures is turned into an assignment in between two pointers to
4479 	   scalars (PR 42237).  */
4480 	new_rhs = *rhs_p;
4481 
4482       if (new_rhs)
4483 	{
4484 	  tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4485 					       true, GSI_SAME_STMT);
4486 
4487 	  gimple_assign_set_rhs_from_tree (gsi, tmp);
4488 	}
4489 
4490       return true;
4491     }
4492 
4493   return false;
4494 }
4495 
4496 /* Traverse the function body and all modifications as described in
4497    ADJUSTMENTS.  Return true iff the CFG has been changed.  */
4498 
4499 static bool
4500 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4501 {
4502   bool cfg_changed = false;
4503   basic_block bb;
4504 
4505   FOR_EACH_BB (bb)
4506     {
4507       gimple_stmt_iterator gsi;
4508 
4509       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4510 	replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4511 
4512       gsi = gsi_start_bb (bb);
4513       while (!gsi_end_p (gsi))
4514 	{
4515 	  gimple stmt = gsi_stmt (gsi);
4516 	  bool modified = false;
4517 	  tree *t;
4518 	  unsigned i;
4519 
4520 	  switch (gimple_code (stmt))
4521 	    {
4522 	    case GIMPLE_RETURN:
4523 	      t = gimple_return_retval_ptr (stmt);
4524 	      if (*t != NULL_TREE)
4525 		modified |= sra_ipa_modify_expr (t, true, adjustments);
4526 	      break;
4527 
4528 	    case GIMPLE_ASSIGN:
4529 	      modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4530 	      modified |= replace_removed_params_ssa_names (stmt, adjustments);
4531 	      break;
4532 
4533 	    case GIMPLE_CALL:
4534 	      /* Operands must be processed before the lhs.  */
4535 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
4536 		{
4537 		  t = gimple_call_arg_ptr (stmt, i);
4538 		  modified |= sra_ipa_modify_expr (t, true, adjustments);
4539 		}
4540 
4541 	      if (gimple_call_lhs (stmt))
4542 		{
4543 		  t = gimple_call_lhs_ptr (stmt);
4544 		  modified |= sra_ipa_modify_expr (t, false, adjustments);
4545 		  modified |= replace_removed_params_ssa_names (stmt,
4546 								adjustments);
4547 		}
4548 	      break;
4549 
4550 	    case GIMPLE_ASM:
4551 	      for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4552 		{
4553 		  t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4554 		  modified |= sra_ipa_modify_expr (t, true, adjustments);
4555 		}
4556 	      for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4557 		{
4558 		  t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4559 		  modified |= sra_ipa_modify_expr (t, false, adjustments);
4560 		}
4561 	      break;
4562 
4563 	    default:
4564 	      break;
4565 	    }
4566 
4567 	  if (modified)
4568 	    {
4569 	      update_stmt (stmt);
4570 	      if (maybe_clean_eh_stmt (stmt)
4571 		  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4572 		cfg_changed = true;
4573 	    }
4574 	  gsi_next (&gsi);
4575 	}
4576     }
4577 
4578   return cfg_changed;
4579 }
4580 
4581 /* Call gimple_debug_bind_reset_value on all debug statements describing
4582    gimple register parameters that are being removed or replaced.  */
4583 
4584 static void
4585 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4586 {
4587   int i, len;
4588   gimple_stmt_iterator *gsip = NULL, gsi;
4589 
4590   if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
4591     {
4592       gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
4593       gsip = &gsi;
4594     }
4595   len = VEC_length (ipa_parm_adjustment_t, adjustments);
4596   for (i = 0; i < len; i++)
4597     {
4598       struct ipa_parm_adjustment *adj;
4599       imm_use_iterator ui;
4600       gimple stmt, def_temp;
4601       tree name, vexpr, copy = NULL_TREE;
4602       use_operand_p use_p;
4603 
4604       adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4605       if (adj->copy_param || !is_gimple_reg (adj->base))
4606 	continue;
4607       name = gimple_default_def (cfun, adj->base);
4608       vexpr = NULL;
4609       if (name)
4610 	FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4611 	  {
4612 	    /* All other users must have been removed by
4613 	       ipa_sra_modify_function_body.  */
4614 	    gcc_assert (is_gimple_debug (stmt));
4615 	    if (vexpr == NULL && gsip != NULL)
4616 	      {
4617 		gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4618 		vexpr = make_node (DEBUG_EXPR_DECL);
4619 		def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4620 							   NULL);
4621 		DECL_ARTIFICIAL (vexpr) = 1;
4622 		TREE_TYPE (vexpr) = TREE_TYPE (name);
4623 		DECL_MODE (vexpr) = DECL_MODE (adj->base);
4624 		gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4625 	      }
4626 	    if (vexpr)
4627 	      {
4628 		FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4629 		  SET_USE (use_p, vexpr);
4630 	      }
4631 	    else
4632 	      gimple_debug_bind_reset_value (stmt);
4633 	    update_stmt (stmt);
4634 	  }
4635       /* Create a VAR_DECL for debug info purposes.  */
4636       if (!DECL_IGNORED_P (adj->base))
4637 	{
4638 	  copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4639 			     VAR_DECL, DECL_NAME (adj->base),
4640 			     TREE_TYPE (adj->base));
4641 	  if (DECL_PT_UID_SET_P (adj->base))
4642 	    SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4643 	  TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4644 	  TREE_READONLY (copy) = TREE_READONLY (adj->base);
4645 	  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4646 	  DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4647 	  DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4648 	  DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4649 	  DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4650 	  DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4651 	  SET_DECL_RTL (copy, 0);
4652 	  TREE_USED (copy) = 1;
4653 	  DECL_CONTEXT (copy) = current_function_decl;
4654 	  add_referenced_var (copy);
4655 	  add_local_decl (cfun, copy);
4656 	  DECL_CHAIN (copy) =
4657 	    BLOCK_VARS (DECL_INITIAL (current_function_decl));
4658 	  BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4659 	}
4660       if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4661 	{
4662 	  gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4663 	  if (vexpr)
4664 	    def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4665 	  else
4666 	    def_temp = gimple_build_debug_source_bind (copy, adj->base,
4667 						       NULL);
4668 	  gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4669 	}
4670     }
4671 }
4672 
4673 /* Return false iff all callers have at least as many actual arguments as there
4674    are formal parameters in the current function.  */
4675 
4676 static bool
4677 not_all_callers_have_enough_arguments_p (struct cgraph_node *node,
4678 					 void *data ATTRIBUTE_UNUSED)
4679 {
4680   struct cgraph_edge *cs;
4681   for (cs = node->callers; cs; cs = cs->next_caller)
4682     if (!callsite_has_enough_arguments_p (cs->call_stmt))
4683       return true;
4684 
4685   return false;
4686 }
4687 
4688 /* Convert all callers of NODE.  */
4689 
4690 static bool
4691 convert_callers_for_node (struct cgraph_node *node,
4692 		          void *data)
4693 {
4694   ipa_parm_adjustment_vec adjustments = (ipa_parm_adjustment_vec)data;
4695   bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4696   struct cgraph_edge *cs;
4697 
4698   for (cs = node->callers; cs; cs = cs->next_caller)
4699     {
4700       current_function_decl = cs->caller->decl;
4701       push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4702 
4703       if (dump_file)
4704 	fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4705 		 cs->caller->uid, cs->callee->uid,
4706 		 xstrdup (cgraph_node_name (cs->caller)),
4707 		 xstrdup (cgraph_node_name (cs->callee)));
4708 
4709       ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4710 
4711       pop_cfun ();
4712     }
4713 
4714   for (cs = node->callers; cs; cs = cs->next_caller)
4715     if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4716 	&& gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4717       compute_inline_parameters (cs->caller, true);
4718   BITMAP_FREE (recomputed_callers);
4719 
4720   return true;
4721 }
4722 
4723 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS.  */
4724 
4725 static void
4726 convert_callers (struct cgraph_node *node, tree old_decl,
4727 		 ipa_parm_adjustment_vec adjustments)
4728 {
4729   tree old_cur_fndecl = current_function_decl;
4730   basic_block this_block;
4731 
4732   cgraph_for_node_and_aliases (node, convert_callers_for_node,
4733 			       adjustments, false);
4734 
4735   current_function_decl = old_cur_fndecl;
4736 
4737   if (!encountered_recursive_call)
4738     return;
4739 
4740   FOR_EACH_BB (this_block)
4741     {
4742       gimple_stmt_iterator gsi;
4743 
4744       for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4745         {
4746 	  gimple stmt = gsi_stmt (gsi);
4747 	  tree call_fndecl;
4748 	  if (gimple_code (stmt) != GIMPLE_CALL)
4749 	    continue;
4750 	  call_fndecl = gimple_call_fndecl (stmt);
4751 	  if (call_fndecl == old_decl)
4752 	    {
4753 	      if (dump_file)
4754 		fprintf (dump_file, "Adjusting recursive call");
4755 	      gimple_call_set_fndecl (stmt, node->decl);
4756 	      ipa_modify_call_arguments (NULL, stmt, adjustments);
4757 	    }
4758 	}
4759     }
4760 
4761   return;
4762 }
4763 
4764 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4765    as given in ADJUSTMENTS.  Return true iff the CFG has been changed.  */
4766 
4767 static bool
4768 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4769 {
4770   struct cgraph_node *new_node;
4771   bool cfg_changed;
4772   VEC (cgraph_edge_p, heap) * redirect_callers = collect_callers_of_node (node);
4773 
4774   rebuild_cgraph_edges ();
4775   free_dominance_info (CDI_DOMINATORS);
4776   pop_cfun ();
4777   current_function_decl = NULL_TREE;
4778 
4779   new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
4780 					 false, NULL, NULL, "isra");
4781   VEC_free (cgraph_edge_p, heap, redirect_callers);
4782 
4783   current_function_decl = new_node->decl;
4784   push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4785 
4786   ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4787   cfg_changed = ipa_sra_modify_function_body (adjustments);
4788   sra_ipa_reset_debug_stmts (adjustments);
4789   convert_callers (new_node, node->decl, adjustments);
4790   cgraph_make_node_local (new_node);
4791   return cfg_changed;
4792 }
4793 
4794 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4795    attributes, return true otherwise.  NODE is the cgraph node of the current
4796    function.  */
4797 
4798 static bool
4799 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4800 {
4801   if (!cgraph_node_can_be_local_p (node))
4802     {
4803       if (dump_file)
4804 	fprintf (dump_file, "Function not local to this compilation unit.\n");
4805       return false;
4806     }
4807 
4808   if (!node->local.can_change_signature)
4809     {
4810       if (dump_file)
4811 	fprintf (dump_file, "Function can not change signature.\n");
4812       return false;
4813     }
4814 
4815   if (!tree_versionable_function_p (node->decl))
4816     {
4817       if (dump_file)
4818 	fprintf (dump_file, "Function is not versionable.\n");
4819       return false;
4820     }
4821 
4822   if (DECL_VIRTUAL_P (current_function_decl))
4823     {
4824       if (dump_file)
4825 	fprintf (dump_file, "Function is a virtual method.\n");
4826       return false;
4827     }
4828 
4829   if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4830       && inline_summary(node)->size >= MAX_INLINE_INSNS_AUTO)
4831     {
4832       if (dump_file)
4833 	fprintf (dump_file, "Function too big to be made truly local.\n");
4834       return false;
4835     }
4836 
4837   if (!node->callers)
4838     {
4839       if (dump_file)
4840 	fprintf (dump_file,
4841 		 "Function has no callers in this compilation unit.\n");
4842       return false;
4843     }
4844 
4845   if (cfun->stdarg)
4846     {
4847       if (dump_file)
4848 	fprintf (dump_file, "Function uses stdarg. \n");
4849       return false;
4850     }
4851 
4852   if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4853     return false;
4854 
4855   return true;
4856 }
4857 
4858 /* Perform early interprocedural SRA.  */
4859 
4860 static unsigned int
4861 ipa_early_sra (void)
4862 {
4863   struct cgraph_node *node = cgraph_get_node (current_function_decl);
4864   ipa_parm_adjustment_vec adjustments;
4865   int ret = 0;
4866 
4867   if (!ipa_sra_preliminary_function_checks (node))
4868     return 0;
4869 
4870   sra_initialize ();
4871   sra_mode = SRA_MODE_EARLY_IPA;
4872 
4873   if (!find_param_candidates ())
4874     {
4875       if (dump_file)
4876 	fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4877       goto simple_out;
4878     }
4879 
4880   if (cgraph_for_node_and_aliases (node, not_all_callers_have_enough_arguments_p,
4881 				   NULL, true))
4882     {
4883       if (dump_file)
4884 	fprintf (dump_file, "There are callers with insufficient number of "
4885 		 "arguments.\n");
4886       goto simple_out;
4887     }
4888 
4889   bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4890 				 func_param_count
4891 				 * last_basic_block_for_function (cfun));
4892   final_bbs = BITMAP_ALLOC (NULL);
4893 
4894   scan_function ();
4895   if (encountered_apply_args)
4896     {
4897       if (dump_file)
4898 	fprintf (dump_file, "Function calls  __builtin_apply_args().\n");
4899       goto out;
4900     }
4901 
4902   if (encountered_unchangable_recursive_call)
4903     {
4904       if (dump_file)
4905 	fprintf (dump_file, "Function calls itself with insufficient "
4906 		 "number of arguments.\n");
4907       goto out;
4908     }
4909 
4910   adjustments = analyze_all_param_acesses ();
4911   if (!adjustments)
4912     goto out;
4913   if (dump_file)
4914     ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4915 
4916   if (modify_function (node, adjustments))
4917     ret = TODO_update_ssa | TODO_cleanup_cfg;
4918   else
4919     ret = TODO_update_ssa;
4920   VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4921 
4922   statistics_counter_event (cfun, "Unused parameters deleted",
4923 			    sra_stats.deleted_unused_parameters);
4924   statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4925 			    sra_stats.scalar_by_ref_to_by_val);
4926   statistics_counter_event (cfun, "Aggregate parameters broken up",
4927 			    sra_stats.aggregate_params_reduced);
4928   statistics_counter_event (cfun, "Aggregate parameter components created",
4929 			    sra_stats.param_reductions_created);
4930 
4931  out:
4932   BITMAP_FREE (final_bbs);
4933   free (bb_dereferences);
4934  simple_out:
4935   sra_deinitialize ();
4936   return ret;
4937 }
4938 
4939 /* Return if early ipa sra shall be performed.  */
4940 static bool
4941 ipa_early_sra_gate (void)
4942 {
4943   return flag_ipa_sra && dbg_cnt (eipa_sra);
4944 }
4945 
4946 struct gimple_opt_pass pass_early_ipa_sra =
4947 {
4948  {
4949   GIMPLE_PASS,
4950   "eipa_sra",	 			/* name */
4951   ipa_early_sra_gate,			/* gate */
4952   ipa_early_sra,			/* execute */
4953   NULL,					/* sub */
4954   NULL,					/* next */
4955   0,					/* static_pass_number */
4956   TV_IPA_SRA,				/* tv_id */
4957   0,	                                /* properties_required */
4958   0,					/* properties_provided */
4959   0,					/* properties_destroyed */
4960   0,					/* todo_flags_start */
4961   TODO_dump_cgraph              	/* todo_flags_finish */
4962  }
4963 };
4964