xref: /dragonfly/contrib/gcc-4.7/gcc/tree-stdarg.c (revision 74ad0aa1)
1 /* Pass computing data for optimizing stdarg functions.
2    Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011
3    Free Software Foundation, Inc.
4    Contributed by Jakub Jelinek <jakub@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12 
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 GNU General Public License for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "function.h"
28 #include "langhooks.h"
29 #include "gimple-pretty-print.h"
30 #include "target.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-stdarg.h"
34 
35 /* A simple pass that attempts to optimize stdarg functions on architectures
36    that need to save register arguments to stack on entry to stdarg functions.
37    If the function doesn't use any va_start macros, no registers need to
38    be saved.  If va_start macros are used, the va_list variables don't escape
39    the function, it is only necessary to save registers that will be used
40    in va_arg macros.  E.g. if va_arg is only used with integral types
41    in the function, floating point registers don't need to be saved, etc.  */
42 
43 
44 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
45    is executed at most as many times as VA_START_BB.  */
46 
47 static bool
48 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
49 {
50   VEC (edge, heap) *stack = NULL;
51   edge e;
52   edge_iterator ei;
53   sbitmap visited;
54   bool ret;
55 
56   if (va_arg_bb == va_start_bb)
57     return true;
58 
59   if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
60     return false;
61 
62   visited = sbitmap_alloc (last_basic_block);
63   sbitmap_zero (visited);
64   ret = true;
65 
66   FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
67     VEC_safe_push (edge, heap, stack, e);
68 
69   while (! VEC_empty (edge, stack))
70     {
71       basic_block src;
72 
73       e = VEC_pop (edge, stack);
74       src = e->src;
75 
76       if (e->flags & EDGE_COMPLEX)
77 	{
78 	  ret = false;
79 	  break;
80 	}
81 
82       if (src == va_start_bb)
83 	continue;
84 
85       /* va_arg_bb can be executed more times than va_start_bb.  */
86       if (src == va_arg_bb)
87 	{
88 	  ret = false;
89 	  break;
90 	}
91 
92       gcc_assert (src != ENTRY_BLOCK_PTR);
93 
94       if (! TEST_BIT (visited, src->index))
95 	{
96 	  SET_BIT (visited, src->index);
97 	  FOR_EACH_EDGE (e, ei, src->preds)
98 	    VEC_safe_push (edge, heap, stack, e);
99 	}
100     }
101 
102   VEC_free (edge, heap, stack);
103   sbitmap_free (visited);
104   return ret;
105 }
106 
107 
108 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
109    return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
110    GPR_P is true if this is GPR counter.  */
111 
112 static unsigned HOST_WIDE_INT
113 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
114 		      bool gpr_p)
115 {
116   tree lhs, orig_lhs;
117   gimple stmt;
118   unsigned HOST_WIDE_INT ret = 0, val, counter_val;
119   unsigned int max_size;
120 
121   if (si->offsets == NULL)
122     {
123       unsigned int i;
124 
125       si->offsets = XNEWVEC (int, num_ssa_names);
126       for (i = 0; i < num_ssa_names; ++i)
127 	si->offsets[i] = -1;
128     }
129 
130   counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
131   max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
132   orig_lhs = lhs = rhs;
133   while (lhs)
134     {
135       enum tree_code rhs_code;
136       tree rhs1;
137 
138       if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
139 	{
140 	  if (counter_val >= max_size)
141 	    {
142 	      ret = max_size;
143 	      break;
144 	    }
145 
146 	  ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
147 	  break;
148 	}
149 
150       stmt = SSA_NAME_DEF_STMT (lhs);
151 
152       if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
153 	return (unsigned HOST_WIDE_INT) -1;
154 
155       rhs_code = gimple_assign_rhs_code (stmt);
156       rhs1 = gimple_assign_rhs1 (stmt);
157       if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
158 	   || gimple_assign_cast_p (stmt))
159 	  && TREE_CODE (rhs1) == SSA_NAME)
160 	{
161 	  lhs = rhs1;
162 	  continue;
163 	}
164 
165       if ((rhs_code == POINTER_PLUS_EXPR
166 	   || rhs_code == PLUS_EXPR)
167 	  && TREE_CODE (rhs1) == SSA_NAME
168 	  && host_integerp (gimple_assign_rhs2 (stmt), 1))
169 	{
170 	  ret += tree_low_cst (gimple_assign_rhs2 (stmt), 1);
171 	  lhs = rhs1;
172 	  continue;
173 	}
174 
175       if (rhs_code == ADDR_EXPR
176 	  && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
177 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
178 	  && host_integerp (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1))
179 	{
180 	  ret += tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1);
181 	  lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
182 	  continue;
183 	}
184 
185       if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
186 	return (unsigned HOST_WIDE_INT) -1;
187 
188       rhs = gimple_assign_rhs1 (stmt);
189       if (TREE_CODE (counter) != TREE_CODE (rhs))
190 	return (unsigned HOST_WIDE_INT) -1;
191 
192       if (TREE_CODE (counter) == COMPONENT_REF)
193 	{
194 	  if (get_base_address (counter) != get_base_address (rhs)
195 	      || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
196 	      || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
197 	    return (unsigned HOST_WIDE_INT) -1;
198 	}
199       else if (counter != rhs)
200 	return (unsigned HOST_WIDE_INT) -1;
201 
202       lhs = NULL;
203     }
204 
205   lhs = orig_lhs;
206   val = ret + counter_val;
207   while (lhs)
208     {
209       enum tree_code rhs_code;
210       tree rhs1;
211 
212       if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
213 	break;
214 
215       if (val >= max_size)
216 	si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
217       else
218 	si->offsets[SSA_NAME_VERSION (lhs)] = val;
219 
220       stmt = SSA_NAME_DEF_STMT (lhs);
221 
222       rhs_code = gimple_assign_rhs_code (stmt);
223       rhs1 = gimple_assign_rhs1 (stmt);
224       if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
225 	   || gimple_assign_cast_p (stmt))
226 	  && TREE_CODE (rhs1) == SSA_NAME)
227 	{
228 	  lhs = rhs1;
229 	  continue;
230 	}
231 
232       if ((rhs_code == POINTER_PLUS_EXPR
233 	   || rhs_code == PLUS_EXPR)
234 	  && TREE_CODE (rhs1) == SSA_NAME
235 	  && host_integerp (gimple_assign_rhs2 (stmt), 1))
236 	{
237 	  val -= tree_low_cst (gimple_assign_rhs2 (stmt), 1);
238 	  lhs = rhs1;
239 	  continue;
240 	}
241 
242       if (rhs_code == ADDR_EXPR
243 	  && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
244 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
245 	  && host_integerp (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1))
246 	{
247 	  val -= tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1);
248 	  lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
249 	  continue;
250 	}
251 
252       lhs = NULL;
253     }
254 
255   return ret;
256 }
257 
258 
259 /* Called by walk_tree to look for references to va_list variables.  */
260 
261 static tree
262 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
263 			void *data)
264 {
265   bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
266   tree var = *tp;
267 
268   if (TREE_CODE (var) == SSA_NAME)
269     var = SSA_NAME_VAR (var);
270 
271   if (TREE_CODE (var) == VAR_DECL
272       && bitmap_bit_p (va_list_vars, DECL_UID (var)))
273     return var;
274 
275   return NULL_TREE;
276 }
277 
278 
279 /* Helper function of va_list_counter_struct_op.  Compute
280    cfun->va_list_{g,f}pr_size.  AP is a va_list GPR/FPR counter,
281    if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
282    statement.  GPR_P is true if AP is a GPR counter, false if it is
283    a FPR counter.  */
284 
285 static void
286 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
287 		    bool write_p)
288 {
289   unsigned HOST_WIDE_INT increment;
290 
291   if (si->compute_sizes < 0)
292     {
293       si->compute_sizes = 0;
294       if (si->va_start_count == 1
295 	  && reachable_at_most_once (si->bb, si->va_start_bb))
296 	si->compute_sizes = 1;
297 
298       if (dump_file && (dump_flags & TDF_DETAILS))
299 	fprintf (dump_file,
300 		 "bb%d will %sbe executed at most once for each va_start "
301 		 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
302 		 si->va_start_bb->index);
303     }
304 
305   if (write_p
306       && si->compute_sizes
307       && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
308     {
309       if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
310 	{
311 	  cfun->va_list_gpr_size += increment;
312 	  return;
313 	}
314 
315       if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
316 	{
317 	  cfun->va_list_fpr_size += increment;
318 	  return;
319 	}
320     }
321 
322   if (write_p || !si->compute_sizes)
323     {
324       if (gpr_p)
325 	cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
326       else
327 	cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
328     }
329 }
330 
331 
332 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
333    If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
334    is false, AP has been seen in VAR = AP assignment.
335    Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
336    va_arg operation that doesn't cause the va_list variable to escape
337    current function.  */
338 
339 static bool
340 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
341 			   bool write_p)
342 {
343   tree base;
344 
345   if (TREE_CODE (ap) != COMPONENT_REF
346       || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
347     return false;
348 
349   if (TREE_CODE (var) != SSA_NAME
350       || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var))))
351     return false;
352 
353   base = get_base_address (ap);
354   if (TREE_CODE (base) != VAR_DECL
355       || !bitmap_bit_p (si->va_list_vars, DECL_UID (base)))
356     return false;
357 
358   if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
359     va_list_counter_op (si, ap, var, true, write_p);
360   else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
361     va_list_counter_op (si, ap, var, false, write_p);
362 
363   return true;
364 }
365 
366 
367 /* Check for TEM = AP.  Return true if found and the caller shouldn't
368    search for va_list references in the statement.  */
369 
370 static bool
371 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
372 {
373   if (TREE_CODE (ap) != VAR_DECL
374       || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
375     return false;
376 
377   if (TREE_CODE (tem) != SSA_NAME
378       || bitmap_bit_p (si->va_list_vars,
379 		       DECL_UID (SSA_NAME_VAR (tem)))
380       || is_global_var (SSA_NAME_VAR (tem)))
381     return false;
382 
383   if (si->compute_sizes < 0)
384     {
385       si->compute_sizes = 0;
386       if (si->va_start_count == 1
387 	  && reachable_at_most_once (si->bb, si->va_start_bb))
388 	si->compute_sizes = 1;
389 
390       if (dump_file && (dump_flags & TDF_DETAILS))
391 	fprintf (dump_file,
392 		 "bb%d will %sbe executed at most once for each va_start "
393 		 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
394 		 si->va_start_bb->index);
395     }
396 
397   /* For void * or char * va_list types, there is just one counter.
398      If va_arg is used in a loop, we don't know how many registers need
399      saving.  */
400   if (! si->compute_sizes)
401     return false;
402 
403   if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1)
404     return false;
405 
406   /* Note the temporary, as we need to track whether it doesn't escape
407      the current function.  */
408   bitmap_set_bit (si->va_list_escape_vars,
409 		  DECL_UID (SSA_NAME_VAR (tem)));
410   return true;
411 }
412 
413 
414 /* Check for:
415      tem1 = AP;
416      TEM2 = tem1 + CST;
417      AP = TEM2;
418    sequence and update cfun->va_list_gpr_size.  Return true if found.  */
419 
420 static bool
421 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
422 {
423   unsigned HOST_WIDE_INT increment;
424 
425   if (TREE_CODE (ap) != VAR_DECL
426       || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
427     return false;
428 
429   if (TREE_CODE (tem2) != SSA_NAME
430       || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2))))
431     return false;
432 
433   if (si->compute_sizes <= 0)
434     return false;
435 
436   increment = va_list_counter_bump (si, ap, tem2, true);
437   if (increment + 1 <= 1)
438     return false;
439 
440   if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
441     cfun->va_list_gpr_size += increment;
442   else
443     cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
444 
445   return true;
446 }
447 
448 
449 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
450    containing value of some va_list variable plus optionally some constant,
451    either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
452    depending whether LHS is a function local temporary.  */
453 
454 static void
455 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
456 {
457   if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
458     return;
459 
460   if (TREE_CODE (rhs) == SSA_NAME)
461     {
462       if (! bitmap_bit_p (si->va_list_escape_vars,
463 			  DECL_UID (SSA_NAME_VAR (rhs))))
464 	return;
465     }
466   else if (TREE_CODE (rhs) == ADDR_EXPR
467 	   && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
468 	   && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
469     {
470       if (! bitmap_bit_p (si->va_list_escape_vars,
471 			  DECL_UID (SSA_NAME_VAR (TREE_OPERAND
472 						  (TREE_OPERAND (rhs, 0), 0)))))
473 	return;
474     }
475   else
476     return;
477 
478   if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs)))
479     {
480       si->va_list_escapes = true;
481       return;
482     }
483 
484   if (si->compute_sizes < 0)
485     {
486       si->compute_sizes = 0;
487       if (si->va_start_count == 1
488 	  && reachable_at_most_once (si->bb, si->va_start_bb))
489 	si->compute_sizes = 1;
490 
491       if (dump_file && (dump_flags & TDF_DETAILS))
492 	fprintf (dump_file,
493 		 "bb%d will %sbe executed at most once for each va_start "
494 		 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
495 		 si->va_start_bb->index);
496     }
497 
498   /* For void * or char * va_list types, there is just one counter.
499      If va_arg is used in a loop, we don't know how many registers need
500      saving.  */
501   if (! si->compute_sizes)
502     {
503       si->va_list_escapes = true;
504       return;
505     }
506 
507   if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
508       == (unsigned HOST_WIDE_INT) -1)
509     {
510       si->va_list_escapes = true;
511       return;
512     }
513 
514   bitmap_set_bit (si->va_list_escape_vars,
515 		  DECL_UID (SSA_NAME_VAR (lhs)));
516 }
517 
518 
519 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
520    Return true if va_list might be escaping.  */
521 
522 static bool
523 check_all_va_list_escapes (struct stdarg_info *si)
524 {
525   basic_block bb;
526 
527   FOR_EACH_BB (bb)
528     {
529       gimple_stmt_iterator i;
530 
531       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
532 	{
533 	  gimple stmt = gsi_stmt (i);
534 	  tree use;
535 	  ssa_op_iter iter;
536 
537 	  if (is_gimple_debug (stmt))
538 	    continue;
539 
540 	  FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
541 	    {
542 	      if (! bitmap_bit_p (si->va_list_escape_vars,
543 				  DECL_UID (SSA_NAME_VAR (use))))
544 		continue;
545 
546 	      if (is_gimple_assign (stmt))
547 		{
548 		  tree rhs = gimple_assign_rhs1 (stmt);
549 		  enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
550 
551 		  /* x = *ap_temp;  */
552 		  if (rhs_code == MEM_REF
553 		      && TREE_OPERAND (rhs, 0) == use
554 		      && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
555 		      && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
556 		      && si->offsets[SSA_NAME_VERSION (use)] != -1)
557 		    {
558 		      unsigned HOST_WIDE_INT gpr_size;
559 		      tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
560 
561 		      gpr_size = si->offsets[SSA_NAME_VERSION (use)]
562 			  	 + tree_low_cst (TREE_OPERAND (rhs, 1), 0)
563 				 + tree_low_cst (access_size, 1);
564 		      if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
565 			cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
566 		      else if (gpr_size > cfun->va_list_gpr_size)
567 			cfun->va_list_gpr_size = gpr_size;
568 		      continue;
569 		    }
570 
571 		  /* va_arg sequences may contain
572 		     other_ap_temp = ap_temp;
573 		     other_ap_temp = ap_temp + constant;
574 		     other_ap_temp = (some_type *) ap_temp;
575 		     ap = ap_temp;
576 		     statements.  */
577 		  if (rhs == use
578 		      && ((rhs_code == POINTER_PLUS_EXPR
579 			   && (TREE_CODE (gimple_assign_rhs2 (stmt))
580 			       == INTEGER_CST))
581 			  || gimple_assign_cast_p (stmt)
582 			  || (get_gimple_rhs_class (rhs_code)
583 			      == GIMPLE_SINGLE_RHS)))
584 		    {
585 		      tree lhs = gimple_assign_lhs (stmt);
586 
587 		      if (TREE_CODE (lhs) == SSA_NAME
588 			  && bitmap_bit_p (si->va_list_escape_vars,
589 					   DECL_UID (SSA_NAME_VAR (lhs))))
590 			continue;
591 
592 		      if (TREE_CODE (lhs) == VAR_DECL
593 			  && bitmap_bit_p (si->va_list_vars,
594 					   DECL_UID (lhs)))
595 			continue;
596 		    }
597 		  else if (rhs_code == ADDR_EXPR
598 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
599 			   && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
600 		    {
601 		      tree lhs = gimple_assign_lhs (stmt);
602 
603 		      if (bitmap_bit_p (si->va_list_escape_vars,
604 					DECL_UID (SSA_NAME_VAR (lhs))))
605 			continue;
606 		    }
607 		}
608 
609 	      if (dump_file && (dump_flags & TDF_DETAILS))
610 		{
611 		  fputs ("va_list escapes in ", dump_file);
612 		  print_gimple_stmt (dump_file, stmt, 0, dump_flags);
613 		  fputc ('\n', dump_file);
614 		}
615 	      return true;
616 	    }
617 	}
618     }
619 
620   return false;
621 }
622 
623 
624 /* Return true if this optimization pass should be done.
625    It makes only sense for stdarg functions.  */
626 
627 static bool
628 gate_optimize_stdarg (void)
629 {
630   /* This optimization is only for stdarg functions.  */
631   return cfun->stdarg != 0;
632 }
633 
634 
635 /* Entry point to the stdarg optimization pass.  */
636 
637 static unsigned int
638 execute_optimize_stdarg (void)
639 {
640   basic_block bb;
641   bool va_list_escapes = false;
642   bool va_list_simple_ptr;
643   struct stdarg_info si;
644   struct walk_stmt_info wi;
645   const char *funcname = NULL;
646   tree cfun_va_list;
647 
648   cfun->va_list_gpr_size = 0;
649   cfun->va_list_fpr_size = 0;
650   memset (&si, 0, sizeof (si));
651   si.va_list_vars = BITMAP_ALLOC (NULL);
652   si.va_list_escape_vars = BITMAP_ALLOC (NULL);
653 
654   if (dump_file)
655     funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
656 
657   cfun_va_list = targetm.fn_abi_va_list (cfun->decl);
658   va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
659 		       && (TREE_TYPE (cfun_va_list) == void_type_node
660 			   || TREE_TYPE (cfun_va_list) == char_type_node);
661   gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
662 
663   FOR_EACH_BB (bb)
664     {
665       gimple_stmt_iterator i;
666 
667       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
668 	{
669 	  gimple stmt = gsi_stmt (i);
670 	  tree callee, ap;
671 
672 	  if (!is_gimple_call (stmt))
673 	    continue;
674 
675 	  callee = gimple_call_fndecl (stmt);
676 	  if (!callee
677 	      || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
678 	    continue;
679 
680 	  switch (DECL_FUNCTION_CODE (callee))
681 	    {
682 	    case BUILT_IN_VA_START:
683 	      break;
684 	      /* If old style builtins are used, don't optimize anything.  */
685 	    case BUILT_IN_SAVEREGS:
686 	    case BUILT_IN_NEXT_ARG:
687 	      va_list_escapes = true;
688 	      continue;
689 	    default:
690 	      continue;
691 	    }
692 
693 	  si.va_start_count++;
694 	  ap = gimple_call_arg (stmt, 0);
695 
696 	  if (TREE_CODE (ap) != ADDR_EXPR)
697 	    {
698 	      va_list_escapes = true;
699 	      break;
700 	    }
701 	  ap = TREE_OPERAND (ap, 0);
702 	  if (TREE_CODE (ap) == ARRAY_REF)
703 	    {
704 	      if (! integer_zerop (TREE_OPERAND (ap, 1)))
705 	        {
706 	          va_list_escapes = true;
707 	          break;
708 		}
709 	      ap = TREE_OPERAND (ap, 0);
710 	    }
711 	  if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
712 	      != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (cfun->decl))
713 	      || TREE_CODE (ap) != VAR_DECL)
714 	    {
715 	      va_list_escapes = true;
716 	      break;
717 	    }
718 
719 	  if (is_global_var (ap))
720 	    {
721 	      va_list_escapes = true;
722 	      break;
723 	    }
724 
725 	  bitmap_set_bit (si.va_list_vars, DECL_UID (ap));
726 
727 	  /* VA_START_BB and VA_START_AP will be only used if there is just
728 	     one va_start in the function.  */
729 	  si.va_start_bb = bb;
730 	  si.va_start_ap = ap;
731 	}
732 
733       if (va_list_escapes)
734 	break;
735     }
736 
737   /* If there were no va_start uses in the function, there is no need to
738      save anything.  */
739   if (si.va_start_count == 0)
740     goto finish;
741 
742   /* If some va_list arguments weren't local, we can't optimize.  */
743   if (va_list_escapes)
744     goto finish;
745 
746   /* For void * or char * va_list, something useful can be done only
747      if there is just one va_start.  */
748   if (va_list_simple_ptr && si.va_start_count > 1)
749     {
750       va_list_escapes = true;
751       goto finish;
752     }
753 
754   /* For struct * va_list, if the backend didn't tell us what the counter fields
755      are, there is nothing more we can do.  */
756   if (!va_list_simple_ptr
757       && va_list_gpr_counter_field == NULL_TREE
758       && va_list_fpr_counter_field == NULL_TREE)
759     {
760       va_list_escapes = true;
761       goto finish;
762     }
763 
764   /* For void * or char * va_list there is just one counter
765      (va_list itself).  Use VA_LIST_GPR_SIZE for it.  */
766   if (va_list_simple_ptr)
767     cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
768 
769   calculate_dominance_info (CDI_DOMINATORS);
770   memset (&wi, 0, sizeof (wi));
771   wi.info = si.va_list_vars;
772 
773   FOR_EACH_BB (bb)
774     {
775       gimple_stmt_iterator i;
776 
777       si.compute_sizes = -1;
778       si.bb = bb;
779 
780       /* For va_list_simple_ptr, we have to check PHI nodes too.  We treat
781 	 them as assignments for the purpose of escape analysis.  This is
782 	 not needed for non-simple va_list because virtual phis don't perform
783 	 any real data movement.  */
784       if (va_list_simple_ptr)
785 	{
786 	  tree lhs, rhs;
787 	  use_operand_p uop;
788 	  ssa_op_iter soi;
789 
790 	  for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
791 	    {
792 	      gimple phi = gsi_stmt (i);
793 	      lhs = PHI_RESULT (phi);
794 
795 	      if (!is_gimple_reg (lhs))
796 		continue;
797 
798 	      FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
799 		{
800 		  rhs = USE_FROM_PTR (uop);
801 		  if (va_list_ptr_read (&si, rhs, lhs))
802 		    continue;
803 		  else if (va_list_ptr_write (&si, lhs, rhs))
804 		    continue;
805 		  else
806 		    check_va_list_escapes (&si, lhs, rhs);
807 
808 		  if (si.va_list_escapes)
809 		    {
810 		      if (dump_file && (dump_flags & TDF_DETAILS))
811 			{
812 			  fputs ("va_list escapes in ", dump_file);
813 			  print_gimple_stmt (dump_file, phi, 0, dump_flags);
814 			  fputc ('\n', dump_file);
815 			}
816 		      va_list_escapes = true;
817 		    }
818 		}
819 	    }
820 	}
821 
822       for (i = gsi_start_bb (bb);
823 	   !gsi_end_p (i) && !va_list_escapes;
824 	   gsi_next (&i))
825 	{
826 	  gimple stmt = gsi_stmt (i);
827 
828 	  /* Don't look at __builtin_va_{start,end}, they are ok.  */
829 	  if (is_gimple_call (stmt))
830 	    {
831 	      tree callee = gimple_call_fndecl (stmt);
832 
833 	      if (callee
834 		  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
835 		  && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
836 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
837 		continue;
838 	    }
839 
840 	  if (is_gimple_assign (stmt))
841 	    {
842 	      tree lhs = gimple_assign_lhs (stmt);
843 	      tree rhs = gimple_assign_rhs1 (stmt);
844 
845 	      if (va_list_simple_ptr)
846 		{
847 		  if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
848 		      == GIMPLE_SINGLE_RHS)
849 		    {
850 		      /* Check for ap ={v} {}.  */
851 		      if (TREE_CLOBBER_P (rhs))
852 			continue;
853 
854 		      /* Check for tem = ap.  */
855 		      else if (va_list_ptr_read (&si, rhs, lhs))
856 			continue;
857 
858 		      /* Check for the last insn in:
859 			 tem1 = ap;
860 			 tem2 = tem1 + CST;
861 			 ap = tem2;
862 			 sequence.  */
863 		      else if (va_list_ptr_write (&si, lhs, rhs))
864 			continue;
865 		    }
866 
867 		  if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
868 		       && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
869 		      || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
870 		      || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
871 			  == GIMPLE_SINGLE_RHS))
872 		    check_va_list_escapes (&si, lhs, rhs);
873 		}
874 	      else
875 		{
876 		  if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
877 		      == GIMPLE_SINGLE_RHS)
878 		    {
879 		      /* Check for ap ={v} {}.  */
880 		      if (TREE_CLOBBER_P (rhs))
881 			continue;
882 
883 		      /* Check for ap[0].field = temp.  */
884 		      else if (va_list_counter_struct_op (&si, lhs, rhs, true))
885 			continue;
886 
887 		      /* Check for temp = ap[0].field.  */
888 		      else if (va_list_counter_struct_op (&si, rhs, lhs,
889 							  false))
890 			continue;
891 		    }
892 
893 		  /* Do any architecture specific checking.  */
894 		  if (targetm.stdarg_optimize_hook
895 		      && targetm.stdarg_optimize_hook (&si, stmt))
896 		    continue;
897 		}
898 	    }
899 	  else if (is_gimple_debug (stmt))
900 	    continue;
901 
902 	  /* All other uses of va_list are either va_copy (that is not handled
903 	     in this optimization), taking address of va_list variable or
904 	     passing va_list to other functions (in that case va_list might
905 	     escape the function and therefore va_start needs to set it up
906 	     fully), or some unexpected use of va_list.  None of these should
907 	     happen in a gimplified VA_ARG_EXPR.  */
908 	  if (si.va_list_escapes
909 	      || walk_gimple_op (stmt, find_va_list_reference, &wi))
910 	    {
911 	      if (dump_file && (dump_flags & TDF_DETAILS))
912 		{
913 		  fputs ("va_list escapes in ", dump_file);
914 		  print_gimple_stmt (dump_file, stmt, 0, dump_flags);
915 		  fputc ('\n', dump_file);
916 		}
917 	      va_list_escapes = true;
918 	    }
919 	}
920 
921       if (va_list_escapes)
922 	break;
923     }
924 
925   if (! va_list_escapes
926       && va_list_simple_ptr
927       && ! bitmap_empty_p (si.va_list_escape_vars)
928       && check_all_va_list_escapes (&si))
929     va_list_escapes = true;
930 
931 finish:
932   if (va_list_escapes)
933     {
934       cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
935       cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
936     }
937   BITMAP_FREE (si.va_list_vars);
938   BITMAP_FREE (si.va_list_escape_vars);
939   free (si.offsets);
940   if (dump_file)
941     {
942       fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
943 	       funcname, (int) va_list_escapes);
944       if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
945 	fputs ("all", dump_file);
946       else
947 	fprintf (dump_file, "%d", cfun->va_list_gpr_size);
948       fputs (" GPR units and ", dump_file);
949       if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
950 	fputs ("all", dump_file);
951       else
952 	fprintf (dump_file, "%d", cfun->va_list_fpr_size);
953       fputs (" FPR units.\n", dump_file);
954     }
955   return 0;
956 }
957 
958 
959 struct gimple_opt_pass pass_stdarg =
960 {
961  {
962   GIMPLE_PASS,
963   "stdarg",				/* name */
964   gate_optimize_stdarg,			/* gate */
965   execute_optimize_stdarg,		/* execute */
966   NULL,					/* sub */
967   NULL,					/* next */
968   0,					/* static_pass_number */
969   TV_NONE,				/* tv_id */
970   PROP_cfg | PROP_ssa,			/* properties_required */
971   0,					/* properties_provided */
972   0,					/* properties_destroyed */
973   0,					/* todo_flags_start */
974   0             			/* todo_flags_finish */
975  }
976 };
977