xref: /dragonfly/contrib/gcc-8.0/gcc/gimple-walk.c (revision ed183f8c)
1 /* Gimple walk support.
2 
3    Copyright (C) 2007-2018 Free Software Foundation, Inc.
4    Contributed by Aldy Hernandez <aldyh@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "gimple-iterator.h"
29 #include "gimple-walk.h"
30 #include "stmt.h"
31 
32 /* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
33    on each one.  WI is as in walk_gimple_stmt.
34 
35    If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
36    value is stored in WI->CALLBACK_RESULT.  Also, the statement that
37    produced the value is returned if this statement has not been
38    removed by a callback (wi->removed_stmt).  If the statement has
39    been removed, NULL is returned.
40 
41    Otherwise, all the statements are walked and NULL returned.  */
42 
43 gimple *
44 walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
45 		     walk_tree_fn callback_op, struct walk_stmt_info *wi)
46 {
47   gimple_stmt_iterator gsi;
48 
49   for (gsi = gsi_start (*pseq); !gsi_end_p (gsi); )
50     {
51       tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
52       if (ret)
53 	{
54 	  /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
55 	     to hold it.  */
56 	  gcc_assert (wi);
57 	  wi->callback_result = ret;
58 
59 	  return wi->removed_stmt ? NULL : gsi_stmt (gsi);
60 	}
61 
62       if (!wi->removed_stmt)
63 	gsi_next (&gsi);
64     }
65 
66   if (wi)
67     wi->callback_result = NULL_TREE;
68 
69   return NULL;
70 }
71 
72 
73 /* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
74    changed by the callbacks.  */
75 
76 gimple *
77 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
78 		 walk_tree_fn callback_op, struct walk_stmt_info *wi)
79 {
80   gimple_seq seq2 = seq;
81   gimple *ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
82   gcc_assert (seq2 == seq);
83   return ret;
84 }
85 
86 
87 /* Helper function for walk_gimple_stmt.  Walk operands of a GIMPLE_ASM.  */
88 
89 static tree
90 walk_gimple_asm (gasm *stmt, walk_tree_fn callback_op,
91 		 struct walk_stmt_info *wi)
92 {
93   tree ret, op;
94   unsigned noutputs;
95   const char **oconstraints;
96   unsigned i, n;
97   const char *constraint;
98   bool allows_mem, allows_reg, is_inout;
99 
100   noutputs = gimple_asm_noutputs (stmt);
101   oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
102 
103   for (i = 0; i < noutputs; i++)
104     {
105       op = gimple_asm_output_op (stmt, i);
106       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
107       oconstraints[i] = constraint;
108       if (wi)
109 	{
110 	  if (parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
111 				       &allows_reg, &is_inout))
112 	    wi->val_only = (allows_reg || !allows_mem);
113 	}
114       if (wi)
115 	wi->is_lhs = true;
116       ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
117       if (ret)
118 	return ret;
119     }
120 
121   n = gimple_asm_ninputs (stmt);
122   for (i = 0; i < n; i++)
123     {
124       op = gimple_asm_input_op (stmt, i);
125       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
126 
127       if (wi)
128 	{
129 	  if (parse_input_constraint (&constraint, 0, 0, noutputs, 0,
130 				      oconstraints, &allows_mem, &allows_reg))
131 	    {
132 	      wi->val_only = (allows_reg || !allows_mem);
133 	      /* Although input "m" is not really a LHS, we need a lvalue.  */
134 	      wi->is_lhs = !wi->val_only;
135 	    }
136 	}
137       ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
138       if (ret)
139 	return ret;
140     }
141 
142   if (wi)
143     {
144       wi->is_lhs = false;
145       wi->val_only = true;
146     }
147 
148   n = gimple_asm_nlabels (stmt);
149   for (i = 0; i < n; i++)
150     {
151       op = gimple_asm_label_op (stmt, i);
152       ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
153       if (ret)
154 	return ret;
155     }
156 
157   return NULL_TREE;
158 }
159 
160 
161 /* Helper function of WALK_GIMPLE_STMT.  Walk every tree operand in
162    STMT.  CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
163 
164    CALLBACK_OP is called on each operand of STMT via walk_tree.
165    Additional parameters to walk_tree must be stored in WI.  For each operand
166    OP, walk_tree is called as:
167 
168 	walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
169 
170    If CALLBACK_OP returns non-NULL for an operand, the remaining
171    operands are not scanned.
172 
173    The return value is that returned by the last call to walk_tree, or
174    NULL_TREE if no CALLBACK_OP is specified.  */
175 
176 tree
177 walk_gimple_op (gimple *stmt, walk_tree_fn callback_op,
178 		struct walk_stmt_info *wi)
179 {
180   hash_set<tree> *pset = (wi) ? wi->pset : NULL;
181   unsigned i;
182   tree ret = NULL_TREE;
183 
184   if (wi)
185     wi->stmt = stmt;
186 
187   switch (gimple_code (stmt))
188     {
189     case GIMPLE_ASSIGN:
190       /* Walk the RHS operands.  If the LHS is of a non-renamable type or
191          is a register variable, we may use a COMPONENT_REF on the RHS.  */
192       if (wi)
193 	{
194 	  tree lhs = gimple_assign_lhs (stmt);
195 	  wi->val_only
196 	    = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
197 	      || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
198 	}
199 
200       for (i = 1; i < gimple_num_ops (stmt); i++)
201 	{
202 	  ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
203 			   pset);
204 	  if (ret)
205 	    return ret;
206 	}
207 
208       /* Walk the LHS.  If the RHS is appropriate for a memory, we
209 	 may use a COMPONENT_REF on the LHS.  */
210       if (wi)
211 	{
212           /* If the RHS is of a non-renamable type or is a register variable,
213 	     we may use a COMPONENT_REF on the LHS.  */
214 	  tree rhs1 = gimple_assign_rhs1 (stmt);
215 	  wi->val_only
216 	    = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1))
217 	      || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
218 	  wi->is_lhs = true;
219 	}
220 
221       ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
222       if (ret)
223 	return ret;
224 
225       if (wi)
226 	{
227 	  wi->val_only = true;
228 	  wi->is_lhs = false;
229 	}
230       break;
231 
232     case GIMPLE_CALL:
233       if (wi)
234 	{
235 	  wi->is_lhs = false;
236 	  wi->val_only = true;
237 	}
238 
239       ret = walk_tree (gimple_call_chain_ptr (as_a <gcall *> (stmt)),
240 		       callback_op, wi, pset);
241       if (ret)
242         return ret;
243 
244       ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
245       if (ret)
246         return ret;
247 
248       for (i = 0; i < gimple_call_num_args (stmt); i++)
249 	{
250 	  if (wi)
251 	    wi->val_only
252 	      = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
253 	  ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
254 			   pset);
255 	  if (ret)
256 	    return ret;
257 	}
258 
259       if (gimple_call_lhs (stmt))
260 	{
261 	  if (wi)
262 	    {
263 	      wi->is_lhs = true;
264 	      wi->val_only
265 		= is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
266 	    }
267 
268 	  ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
269 	  if (ret)
270 	    return ret;
271 	}
272 
273       if (wi)
274 	{
275 	  wi->is_lhs = false;
276 	  wi->val_only = true;
277 	}
278       break;
279 
280     case GIMPLE_CATCH:
281       ret = walk_tree (gimple_catch_types_ptr (as_a <gcatch *> (stmt)),
282 		       callback_op, wi, pset);
283       if (ret)
284 	return ret;
285       break;
286 
287     case GIMPLE_EH_FILTER:
288       ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
289 		       pset);
290       if (ret)
291 	return ret;
292       break;
293 
294     case GIMPLE_ASM:
295       ret = walk_gimple_asm (as_a <gasm *> (stmt), callback_op, wi);
296       if (ret)
297 	return ret;
298       break;
299 
300     case GIMPLE_OMP_CONTINUE:
301       {
302 	gomp_continue *cont_stmt = as_a <gomp_continue *> (stmt);
303 	ret = walk_tree (gimple_omp_continue_control_def_ptr (cont_stmt),
304 			 callback_op, wi, pset);
305 	if (ret)
306 	  return ret;
307 
308 	ret = walk_tree (gimple_omp_continue_control_use_ptr (cont_stmt),
309 			 callback_op, wi, pset);
310 	if (ret)
311 	  return ret;
312       }
313       break;
314 
315     case GIMPLE_OMP_CRITICAL:
316       {
317 	gomp_critical *omp_stmt = as_a <gomp_critical *> (stmt);
318 	ret = walk_tree (gimple_omp_critical_name_ptr (omp_stmt),
319 			 callback_op, wi, pset);
320 	if (ret)
321 	  return ret;
322 	ret = walk_tree (gimple_omp_critical_clauses_ptr (omp_stmt),
323 			 callback_op, wi, pset);
324 	if (ret)
325 	  return ret;
326       }
327       break;
328 
329     case GIMPLE_OMP_ORDERED:
330       {
331 	gomp_ordered *omp_stmt = as_a <gomp_ordered *> (stmt);
332 	ret = walk_tree (gimple_omp_ordered_clauses_ptr (omp_stmt),
333 			 callback_op, wi, pset);
334 	if (ret)
335 	  return ret;
336       }
337       break;
338 
339     case GIMPLE_OMP_FOR:
340       ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
341 		       pset);
342       if (ret)
343 	return ret;
344       for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
345 	{
346 	  ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
347 			   wi, pset);
348 	  if (ret)
349 	    return ret;
350 	  ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
351 			   wi, pset);
352 	  if (ret)
353 	    return ret;
354 	  ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
355 			   wi, pset);
356 	  if (ret)
357 	    return ret;
358 	  ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
359 			   wi, pset);
360 	  if (ret)
361 	    return ret;
362 	}
363       break;
364 
365     case GIMPLE_OMP_PARALLEL:
366       {
367 	gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
368 	ret = walk_tree (gimple_omp_parallel_clauses_ptr (omp_par_stmt),
369 			 callback_op, wi, pset);
370 	if (ret)
371 	  return ret;
372 	ret = walk_tree (gimple_omp_parallel_child_fn_ptr (omp_par_stmt),
373 			 callback_op, wi, pset);
374 	if (ret)
375 	  return ret;
376 	ret = walk_tree (gimple_omp_parallel_data_arg_ptr (omp_par_stmt),
377 			 callback_op, wi, pset);
378 	if (ret)
379 	  return ret;
380       }
381       break;
382 
383     case GIMPLE_OMP_TASK:
384       ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
385 		       wi, pset);
386       if (ret)
387 	return ret;
388       ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
389 		       wi, pset);
390       if (ret)
391 	return ret;
392       ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
393 		       wi, pset);
394       if (ret)
395 	return ret;
396       ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
397 		       wi, pset);
398       if (ret)
399 	return ret;
400       ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
401 		       wi, pset);
402       if (ret)
403 	return ret;
404       ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
405 		       wi, pset);
406       if (ret)
407 	return ret;
408       break;
409 
410     case GIMPLE_OMP_SECTIONS:
411       ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
412 		       wi, pset);
413       if (ret)
414 	return ret;
415       ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
416 		       wi, pset);
417       if (ret)
418 	return ret;
419 
420       break;
421 
422     case GIMPLE_OMP_SINGLE:
423       ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
424 		       pset);
425       if (ret)
426 	return ret;
427       break;
428 
429     case GIMPLE_OMP_TARGET:
430       {
431 	gomp_target *omp_stmt = as_a <gomp_target *> (stmt);
432 	ret = walk_tree (gimple_omp_target_clauses_ptr (omp_stmt),
433 			 callback_op, wi, pset);
434 	if (ret)
435 	  return ret;
436 	ret = walk_tree (gimple_omp_target_child_fn_ptr (omp_stmt),
437 			 callback_op, wi, pset);
438 	if (ret)
439 	  return ret;
440 	ret = walk_tree (gimple_omp_target_data_arg_ptr (omp_stmt),
441 			 callback_op, wi, pset);
442 	if (ret)
443 	  return ret;
444       }
445       break;
446 
447     case GIMPLE_OMP_TEAMS:
448       ret = walk_tree (gimple_omp_teams_clauses_ptr (stmt), callback_op, wi,
449 		       pset);
450       if (ret)
451 	return ret;
452       break;
453 
454     case GIMPLE_OMP_ATOMIC_LOAD:
455       {
456 	gomp_atomic_load *omp_stmt = as_a <gomp_atomic_load *> (stmt);
457 	ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (omp_stmt),
458 			 callback_op, wi, pset);
459 	if (ret)
460 	  return ret;
461 	ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (omp_stmt),
462 			 callback_op, wi, pset);
463 	if (ret)
464 	  return ret;
465       }
466       break;
467 
468     case GIMPLE_OMP_ATOMIC_STORE:
469       {
470 	gomp_atomic_store *omp_stmt = as_a <gomp_atomic_store *> (stmt);
471 	ret = walk_tree (gimple_omp_atomic_store_val_ptr (omp_stmt),
472 			 callback_op, wi, pset);
473 	if (ret)
474 	  return ret;
475       }
476       break;
477 
478     case GIMPLE_TRANSACTION:
479       {
480 	gtransaction *txn = as_a <gtransaction *> (stmt);
481 
482 	ret = walk_tree (gimple_transaction_label_norm_ptr (txn),
483 			 callback_op, wi, pset);
484 	if (ret)
485 	  return ret;
486 	ret = walk_tree (gimple_transaction_label_uninst_ptr (txn),
487 			 callback_op, wi, pset);
488 	if (ret)
489 	  return ret;
490 	ret = walk_tree (gimple_transaction_label_over_ptr (txn),
491 			 callback_op, wi, pset);
492 	if (ret)
493 	  return ret;
494       }
495       break;
496 
497     case GIMPLE_OMP_RETURN:
498       ret = walk_tree (gimple_omp_return_lhs_ptr (stmt), callback_op, wi,
499 		       pset);
500       if (ret)
501 	return ret;
502       break;
503 
504       /* Tuples that do not have operands.  */
505     case GIMPLE_NOP:
506     case GIMPLE_RESX:
507     case GIMPLE_PREDICT:
508       break;
509 
510     default:
511       {
512 	enum gimple_statement_structure_enum gss;
513 	gss = gimple_statement_structure (stmt);
514 	if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
515 	  for (i = 0; i < gimple_num_ops (stmt); i++)
516 	    {
517 	      ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
518 	      if (ret)
519 		return ret;
520 	    }
521       }
522       break;
523     }
524 
525   return NULL_TREE;
526 }
527 
528 
529 /* Walk the current statement in GSI (optionally using traversal state
530    stored in WI).  If WI is NULL, no state is kept during traversal.
531    The callback CALLBACK_STMT is called.  If CALLBACK_STMT indicates
532    that it has handled all the operands of the statement, its return
533    value is returned.  Otherwise, the return value from CALLBACK_STMT
534    is discarded and its operands are scanned.
535 
536    If CALLBACK_STMT is NULL or it didn't handle the operands,
537    CALLBACK_OP is called on each operand of the statement via
538    walk_gimple_op.  If walk_gimple_op returns non-NULL for any
539    operand, the remaining operands are not scanned.  In this case, the
540    return value from CALLBACK_OP is returned.
541 
542    In any other case, NULL_TREE is returned.  */
543 
544 tree
545 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
546 		  walk_tree_fn callback_op, struct walk_stmt_info *wi)
547 {
548   gimple *ret;
549   tree tree_ret;
550   gimple *stmt = gsi_stmt (*gsi);
551 
552   if (wi)
553     {
554       wi->gsi = *gsi;
555       wi->removed_stmt = false;
556 
557       if (wi->want_locations && gimple_has_location (stmt))
558 	input_location = gimple_location (stmt);
559     }
560 
561   ret = NULL;
562 
563   /* Invoke the statement callback.  Return if the callback handled
564      all of STMT operands by itself.  */
565   if (callback_stmt)
566     {
567       bool handled_ops = false;
568       tree_ret = callback_stmt (gsi, &handled_ops, wi);
569       if (handled_ops)
570 	return tree_ret;
571 
572       /* If CALLBACK_STMT did not handle operands, it should not have
573 	 a value to return.  */
574       gcc_assert (tree_ret == NULL);
575 
576       if (wi && wi->removed_stmt)
577 	return NULL;
578 
579       /* Re-read stmt in case the callback changed it.  */
580       stmt = gsi_stmt (*gsi);
581     }
582 
583   /* If CALLBACK_OP is defined, invoke it on every operand of STMT.  */
584   if (callback_op)
585     {
586       tree_ret = walk_gimple_op (stmt, callback_op, wi);
587       if (tree_ret)
588 	return tree_ret;
589     }
590 
591   /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them.  */
592   switch (gimple_code (stmt))
593     {
594     case GIMPLE_BIND:
595       ret = walk_gimple_seq_mod (gimple_bind_body_ptr (as_a <gbind *> (stmt)),
596 				 callback_stmt, callback_op, wi);
597       if (ret)
598 	return wi->callback_result;
599       break;
600 
601     case GIMPLE_CATCH:
602       ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (
603 				   as_a <gcatch *> (stmt)),
604 				 callback_stmt, callback_op, wi);
605       if (ret)
606 	return wi->callback_result;
607       break;
608 
609     case GIMPLE_EH_FILTER:
610       ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt,
611 		             callback_op, wi);
612       if (ret)
613 	return wi->callback_result;
614       break;
615 
616     case GIMPLE_EH_ELSE:
617       {
618 	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
619 	ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (eh_else_stmt),
620 				   callback_stmt, callback_op, wi);
621 	if (ret)
622 	  return wi->callback_result;
623 	ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (eh_else_stmt),
624 				   callback_stmt, callback_op, wi);
625 	if (ret)
626 	  return wi->callback_result;
627       }
628       break;
629 
630     case GIMPLE_TRY:
631       ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op,
632 	                     wi);
633       if (ret)
634 	return wi->callback_result;
635 
636       ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt,
637 	                     callback_op, wi);
638       if (ret)
639 	return wi->callback_result;
640       break;
641 
642     case GIMPLE_OMP_FOR:
643       ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt,
644 		             callback_op, wi);
645       if (ret)
646 	return wi->callback_result;
647 
648       /* FALL THROUGH.  */
649     case GIMPLE_OMP_CRITICAL:
650     case GIMPLE_OMP_MASTER:
651     case GIMPLE_OMP_TASKGROUP:
652     case GIMPLE_OMP_ORDERED:
653     case GIMPLE_OMP_SECTION:
654     case GIMPLE_OMP_PARALLEL:
655     case GIMPLE_OMP_TASK:
656     case GIMPLE_OMP_SECTIONS:
657     case GIMPLE_OMP_SINGLE:
658     case GIMPLE_OMP_TARGET:
659     case GIMPLE_OMP_TEAMS:
660     case GIMPLE_OMP_GRID_BODY:
661       ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt,
662 			     callback_op, wi);
663       if (ret)
664 	return wi->callback_result;
665       break;
666 
667     case GIMPLE_WITH_CLEANUP_EXPR:
668       ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt,
669 			     callback_op, wi);
670       if (ret)
671 	return wi->callback_result;
672       break;
673 
674     case GIMPLE_TRANSACTION:
675       ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (
676 				   as_a <gtransaction *> (stmt)),
677 			     callback_stmt, callback_op, wi);
678       if (ret)
679 	return wi->callback_result;
680       break;
681 
682     default:
683       gcc_assert (!gimple_has_substatements (stmt));
684       break;
685     }
686 
687   return NULL;
688 }
689 
690 /* From a tree operand OP return the base of a load or store operation
691    or NULL_TREE if OP is not a load or a store.  */
692 
693 static tree
694 get_base_loadstore (tree op)
695 {
696   while (handled_component_p (op))
697     op = TREE_OPERAND (op, 0);
698   if (DECL_P (op)
699       || INDIRECT_REF_P (op)
700       || TREE_CODE (op) == MEM_REF
701       || TREE_CODE (op) == TARGET_MEM_REF)
702     return op;
703   return NULL_TREE;
704 }
705 
706 
707 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
708    VISIT_ADDR if non-NULL on loads, store and address-taken operands
709    passing the STMT, the base of the operand, the operand itself containing
710    the base and DATA to it.  The base will be either a decl, an indirect
711    reference (including TARGET_MEM_REF) or the argument of an address
712    expression.
713    Returns the results of these callbacks or'ed.  */
714 
715 bool
716 walk_stmt_load_store_addr_ops (gimple *stmt, void *data,
717 			       walk_stmt_load_store_addr_fn visit_load,
718 			       walk_stmt_load_store_addr_fn visit_store,
719 			       walk_stmt_load_store_addr_fn visit_addr)
720 {
721   bool ret = false;
722   unsigned i;
723   if (gimple_assign_single_p (stmt))
724     {
725       tree lhs, rhs, arg;
726       if (visit_store)
727 	{
728 	  arg = gimple_assign_lhs (stmt);
729 	  lhs = get_base_loadstore (arg);
730 	  if (lhs)
731 	    ret |= visit_store (stmt, lhs, arg, data);
732 	}
733       arg = gimple_assign_rhs1 (stmt);
734       rhs = arg;
735       while (handled_component_p (rhs))
736 	rhs = TREE_OPERAND (rhs, 0);
737       if (visit_addr)
738 	{
739 	  if (TREE_CODE (rhs) == ADDR_EXPR)
740 	    ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), arg, data);
741 	  else if (TREE_CODE (rhs) == TARGET_MEM_REF
742 		   && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
743 	    ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), arg,
744 			       data);
745 	  else if (TREE_CODE (rhs) == OBJ_TYPE_REF
746 		   && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
747 	    ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
748 						   0), arg, data);
749 	  else if (TREE_CODE (rhs) == CONSTRUCTOR)
750 	    {
751 	      unsigned int ix;
752 	      tree val;
753 
754 	      FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
755 		if (TREE_CODE (val) == ADDR_EXPR)
756 		  ret |= visit_addr (stmt, TREE_OPERAND (val, 0), arg, data);
757 		else if (TREE_CODE (val) == OBJ_TYPE_REF
758 			 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
759 		  ret |= visit_addr (stmt,
760 				     TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
761 						   0), arg, data);
762 	    }
763           lhs = gimple_assign_lhs (stmt);
764 	  if (TREE_CODE (lhs) == TARGET_MEM_REF
765               && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
766 	    ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), lhs, data);
767 	}
768       if (visit_load)
769 	{
770 	  rhs = get_base_loadstore (rhs);
771 	  if (rhs)
772 	    ret |= visit_load (stmt, rhs, arg, data);
773 	}
774     }
775   else if (visit_addr
776 	   && (is_gimple_assign (stmt)
777 	       || gimple_code (stmt) == GIMPLE_COND))
778     {
779       for (i = 0; i < gimple_num_ops (stmt); ++i)
780 	{
781 	  tree op = gimple_op (stmt, i);
782 	  if (op == NULL_TREE)
783 	    ;
784 	  else if (TREE_CODE (op) == ADDR_EXPR)
785 	    ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
786 	  /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
787 	     tree with two operands.  */
788 	  else if (i == 1 && COMPARISON_CLASS_P (op))
789 	    {
790 	      if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
791 		ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
792 						       0), op, data);
793 	      if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
794 		ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
795 						       0), op, data);
796 	    }
797 	}
798     }
799   else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
800     {
801       if (visit_store)
802 	{
803 	  tree arg = gimple_call_lhs (call_stmt);
804 	  if (arg)
805 	    {
806 	      tree lhs = get_base_loadstore (arg);
807 	      if (lhs)
808 		ret |= visit_store (stmt, lhs, arg, data);
809 	    }
810 	}
811       if (visit_load || visit_addr)
812 	for (i = 0; i < gimple_call_num_args (call_stmt); ++i)
813 	  {
814 	    tree arg = gimple_call_arg (call_stmt, i);
815 	    if (visit_addr
816 		&& TREE_CODE (arg) == ADDR_EXPR)
817 	      ret |= visit_addr (stmt, TREE_OPERAND (arg, 0), arg, data);
818 	    else if (visit_load)
819 	      {
820 		tree rhs = get_base_loadstore (arg);
821 		if (rhs)
822 		  ret |= visit_load (stmt, rhs, arg, data);
823 	      }
824 	  }
825       if (visit_addr
826 	  && gimple_call_chain (call_stmt)
827 	  && TREE_CODE (gimple_call_chain (call_stmt)) == ADDR_EXPR)
828 	ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (call_stmt), 0),
829 			   gimple_call_chain (call_stmt), data);
830       if (visit_addr
831 	  && gimple_call_return_slot_opt_p (call_stmt)
832 	  && gimple_call_lhs (call_stmt) != NULL_TREE
833 	  && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call_stmt))))
834 	ret |= visit_addr (stmt, gimple_call_lhs (call_stmt),
835 			   gimple_call_lhs (call_stmt), data);
836     }
837   else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
838     {
839       unsigned noutputs;
840       const char *constraint;
841       const char **oconstraints;
842       bool allows_mem, allows_reg, is_inout;
843       noutputs = gimple_asm_noutputs (asm_stmt);
844       oconstraints = XALLOCAVEC (const char *, noutputs);
845       if (visit_store || visit_addr)
846 	for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
847 	  {
848 	    tree link = gimple_asm_output_op (asm_stmt, i);
849 	    tree op = get_base_loadstore (TREE_VALUE (link));
850 	    if (op && visit_store)
851 	      ret |= visit_store (stmt, op, TREE_VALUE (link), data);
852 	    if (visit_addr)
853 	      {
854 		constraint = TREE_STRING_POINTER
855 		    (TREE_VALUE (TREE_PURPOSE (link)));
856 		oconstraints[i] = constraint;
857 		parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
858 					 &allows_reg, &is_inout);
859 		if (op && !allows_reg && allows_mem)
860 		  ret |= visit_addr (stmt, op, TREE_VALUE (link), data);
861 	      }
862 	  }
863       if (visit_load || visit_addr)
864 	for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
865 	  {
866 	    tree link = gimple_asm_input_op (asm_stmt, i);
867 	    tree op = TREE_VALUE (link);
868 	    if (visit_addr
869 		&& TREE_CODE (op) == ADDR_EXPR)
870 	      ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
871 	    else if (visit_load || visit_addr)
872 	      {
873 		op = get_base_loadstore (op);
874 		if (op)
875 		  {
876 		    if (visit_load)
877 		      ret |= visit_load (stmt, op, TREE_VALUE (link), data);
878 		    if (visit_addr)
879 		      {
880 			constraint = TREE_STRING_POINTER
881 			    (TREE_VALUE (TREE_PURPOSE (link)));
882 			parse_input_constraint (&constraint, 0, 0, noutputs,
883 						0, oconstraints,
884 						&allows_mem, &allows_reg);
885 			if (!allows_reg && allows_mem)
886 			  ret |= visit_addr (stmt, op, TREE_VALUE (link),
887 					     data);
888 		      }
889 		  }
890 	      }
891 	  }
892     }
893   else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
894     {
895       tree op = gimple_return_retval (return_stmt);
896       if (op)
897 	{
898 	  if (visit_addr
899 	      && TREE_CODE (op) == ADDR_EXPR)
900 	    ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
901 	  else if (visit_load)
902 	    {
903 	      tree base = get_base_loadstore (op);
904 	      if (base)
905 		ret |= visit_load (stmt, base, op, data);
906 	    }
907 	}
908     }
909   else if (visit_addr
910 	   && gimple_code (stmt) == GIMPLE_PHI)
911     {
912       for (i = 0; i < gimple_phi_num_args (stmt); ++i)
913 	{
914 	  tree op = gimple_phi_arg_def (stmt, i);
915 	  if (TREE_CODE (op) == ADDR_EXPR)
916 	    ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
917 	}
918     }
919   else if (visit_addr
920 	   && gimple_code (stmt) == GIMPLE_GOTO)
921     {
922       tree op = gimple_goto_dest (stmt);
923       if (TREE_CODE (op) == ADDR_EXPR)
924 	ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
925     }
926 
927   return ret;
928 }
929 
930 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr.  IPA-CP
931    should make a faster clone for this case.  */
932 
933 bool
934 walk_stmt_load_store_ops (gimple *stmt, void *data,
935 			  walk_stmt_load_store_addr_fn visit_load,
936 			  walk_stmt_load_store_addr_fn visit_store)
937 {
938   return walk_stmt_load_store_addr_ops (stmt, data,
939 					visit_load, visit_store, NULL);
940 }
941