1 /* Inline functions for tree-flow.h
2 Copyright (C) 2001, 2003, 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
21
22 #ifndef _TREE_FLOW_INLINE_H
23 #define _TREE_FLOW_INLINE_H 1
24
25 /* Inline functions for manipulating various data structures defined in
26 tree-flow.h. See tree-flow.h for documentation. */
27
28 /* Initialize the hashtable iterator HTI to point to hashtable TABLE */
29
30 static inline void *
first_htab_element(htab_iterator * hti,htab_t table)31 first_htab_element (htab_iterator *hti, htab_t table)
32 {
33 hti->htab = table;
34 hti->slot = table->entries;
35 hti->limit = hti->slot + htab_size (table);
36 do
37 {
38 PTR x = *(hti->slot);
39 if (x != HTAB_EMPTY_ENTRY && x != HTAB_DELETED_ENTRY)
40 break;
41 } while (++(hti->slot) < hti->limit);
42
43 if (hti->slot < hti->limit)
44 return *(hti->slot);
45 return NULL;
46 }
47
48 /* Return current non-empty/deleted slot of the hashtable pointed to by HTI,
49 or NULL if we have reached the end. */
50
51 static inline bool
end_htab_p(htab_iterator * hti)52 end_htab_p (htab_iterator *hti)
53 {
54 if (hti->slot >= hti->limit)
55 return true;
56 return false;
57 }
58
59 /* Advance the hashtable iterator pointed to by HTI to the next element of the
60 hashtable. */
61
62 static inline void *
next_htab_element(htab_iterator * hti)63 next_htab_element (htab_iterator *hti)
64 {
65 while (++(hti->slot) < hti->limit)
66 {
67 PTR x = *(hti->slot);
68 if (x != HTAB_EMPTY_ENTRY && x != HTAB_DELETED_ENTRY)
69 return x;
70 };
71 return NULL;
72 }
73
74 /* Initialize ITER to point to the first referenced variable in the
75 referenced_vars hashtable, and return that variable. */
76
77 static inline tree
first_referenced_var(referenced_var_iterator * iter)78 first_referenced_var (referenced_var_iterator *iter)
79 {
80 struct int_tree_map *itm;
81 itm = (struct int_tree_map *) first_htab_element (&iter->hti,
82 referenced_vars);
83 if (!itm)
84 return NULL;
85 return itm->to;
86 }
87
88 /* Return true if we have hit the end of the referenced variables ITER is
89 iterating through. */
90
91 static inline bool
end_referenced_vars_p(referenced_var_iterator * iter)92 end_referenced_vars_p (referenced_var_iterator *iter)
93 {
94 return end_htab_p (&iter->hti);
95 }
96
97 /* Make ITER point to the next referenced_var in the referenced_var hashtable,
98 and return that variable. */
99
100 static inline tree
next_referenced_var(referenced_var_iterator * iter)101 next_referenced_var (referenced_var_iterator *iter)
102 {
103 struct int_tree_map *itm;
104 itm = (struct int_tree_map *) next_htab_element (&iter->hti);
105 if (!itm)
106 return NULL;
107 return itm->to;
108 }
109
110 /* Fill up VEC with the variables in the referenced vars hashtable. */
111
112 static inline void
fill_referenced_var_vec(VEC (tree,heap)** vec)113 fill_referenced_var_vec (VEC (tree, heap) **vec)
114 {
115 referenced_var_iterator rvi;
116 tree var;
117 *vec = NULL;
118 FOR_EACH_REFERENCED_VAR (var, rvi)
119 VEC_safe_push (tree, heap, *vec, var);
120 }
121
122 /* Return the variable annotation for T, which must be a _DECL node.
123 Return NULL if the variable annotation doesn't already exist. */
124 static inline var_ann_t
var_ann(tree t)125 var_ann (tree t)
126 {
127 gcc_assert (t);
128 gcc_assert (DECL_P (t));
129 gcc_assert (TREE_CODE (t) != FUNCTION_DECL);
130 gcc_assert (!t->common.ann || t->common.ann->common.type == VAR_ANN);
131
132 return (var_ann_t) t->common.ann;
133 }
134
135 /* Return the variable annotation for T, which must be a _DECL node.
136 Create the variable annotation if it doesn't exist. */
137 static inline var_ann_t
get_var_ann(tree var)138 get_var_ann (tree var)
139 {
140 var_ann_t ann = var_ann (var);
141 return (ann) ? ann : create_var_ann (var);
142 }
143
144 /* Return the function annotation for T, which must be a FUNCTION_DECL node.
145 Return NULL if the function annotation doesn't already exist. */
146 static inline function_ann_t
function_ann(tree t)147 function_ann (tree t)
148 {
149 gcc_assert (t);
150 gcc_assert (TREE_CODE (t) == FUNCTION_DECL);
151 gcc_assert (!t->common.ann || t->common.ann->common.type == FUNCTION_ANN);
152
153 return (function_ann_t) t->common.ann;
154 }
155
156 /* Return the function annotation for T, which must be a FUNCTION_DECL node.
157 Create the function annotation if it doesn't exist. */
158 static inline function_ann_t
get_function_ann(tree var)159 get_function_ann (tree var)
160 {
161 function_ann_t ann = function_ann (var);
162 gcc_assert (!var->common.ann || var->common.ann->common.type == FUNCTION_ANN);
163 return (ann) ? ann : create_function_ann (var);
164 }
165
166 /* Return the statement annotation for T, which must be a statement
167 node. Return NULL if the statement annotation doesn't exist. */
168 static inline stmt_ann_t
stmt_ann(tree t)169 stmt_ann (tree t)
170 {
171 #ifdef ENABLE_CHECKING
172 gcc_assert (is_gimple_stmt (t));
173 #endif
174 gcc_assert (!t->common.ann || t->common.ann->common.type == STMT_ANN);
175 return (stmt_ann_t) t->common.ann;
176 }
177
178 /* Return the statement annotation for T, which must be a statement
179 node. Create the statement annotation if it doesn't exist. */
180 static inline stmt_ann_t
get_stmt_ann(tree stmt)181 get_stmt_ann (tree stmt)
182 {
183 stmt_ann_t ann = stmt_ann (stmt);
184 return (ann) ? ann : create_stmt_ann (stmt);
185 }
186
187 /* Return the annotation type for annotation ANN. */
188 static inline enum tree_ann_type
ann_type(tree_ann_t ann)189 ann_type (tree_ann_t ann)
190 {
191 return ann->common.type;
192 }
193
194 /* Return the basic block for statement T. */
195 static inline basic_block
bb_for_stmt(tree t)196 bb_for_stmt (tree t)
197 {
198 stmt_ann_t ann;
199
200 if (TREE_CODE (t) == PHI_NODE)
201 return PHI_BB (t);
202
203 ann = stmt_ann (t);
204 return ann ? ann->bb : NULL;
205 }
206
207 /* Return the may_aliases varray for variable VAR, or NULL if it has
208 no may aliases. */
VEC(tree,gc)209 static inline VEC(tree, gc) *
210 may_aliases (tree var)
211 {
212 var_ann_t ann = var_ann (var);
213 return ann ? ann->may_aliases : NULL;
214 }
215
216 /* Return the line number for EXPR, or return -1 if we have no line
217 number information for it. */
218 static inline int
get_lineno(tree expr)219 get_lineno (tree expr)
220 {
221 if (expr == NULL_TREE)
222 return -1;
223
224 if (TREE_CODE (expr) == COMPOUND_EXPR)
225 expr = TREE_OPERAND (expr, 0);
226
227 if (! EXPR_HAS_LOCATION (expr))
228 return -1;
229
230 return EXPR_LINENO (expr);
231 }
232
233 /* Return the file name for EXPR, or return "???" if we have no
234 filename information. */
235 static inline const char *
get_filename(tree expr)236 get_filename (tree expr)
237 {
238 const char *filename;
239 if (expr == NULL_TREE)
240 return "???";
241
242 if (TREE_CODE (expr) == COMPOUND_EXPR)
243 expr = TREE_OPERAND (expr, 0);
244
245 if (EXPR_HAS_LOCATION (expr) && (filename = EXPR_FILENAME (expr)))
246 return filename;
247 else
248 return "???";
249 }
250
251 /* Return true if T is a noreturn call. */
252 static inline bool
noreturn_call_p(tree t)253 noreturn_call_p (tree t)
254 {
255 tree call = get_call_expr_in (t);
256 return call != 0 && (call_expr_flags (call) & ECF_NORETURN) != 0;
257 }
258
259 /* Mark statement T as modified. */
260 static inline void
mark_stmt_modified(tree t)261 mark_stmt_modified (tree t)
262 {
263 stmt_ann_t ann;
264 if (TREE_CODE (t) == PHI_NODE)
265 return;
266
267 ann = stmt_ann (t);
268 if (ann == NULL)
269 ann = create_stmt_ann (t);
270 else if (noreturn_call_p (t))
271 VEC_safe_push (tree, gc, modified_noreturn_calls, t);
272 ann->modified = 1;
273 }
274
275 /* Mark statement T as modified, and update it. */
276 static inline void
update_stmt(tree t)277 update_stmt (tree t)
278 {
279 if (TREE_CODE (t) == PHI_NODE)
280 return;
281 mark_stmt_modified (t);
282 update_stmt_operands (t);
283 }
284
285 static inline void
update_stmt_if_modified(tree t)286 update_stmt_if_modified (tree t)
287 {
288 if (stmt_modified_p (t))
289 update_stmt_operands (t);
290 }
291
292 /* Return true if T is marked as modified, false otherwise. */
293 static inline bool
stmt_modified_p(tree t)294 stmt_modified_p (tree t)
295 {
296 stmt_ann_t ann = stmt_ann (t);
297
298 /* Note that if the statement doesn't yet have an annotation, we consider it
299 modified. This will force the next call to update_stmt_operands to scan
300 the statement. */
301 return ann ? ann->modified : true;
302 }
303
304 /* Delink an immediate_uses node from its chain. */
305 static inline void
delink_imm_use(ssa_use_operand_t * linknode)306 delink_imm_use (ssa_use_operand_t *linknode)
307 {
308 /* Return if this node is not in a list. */
309 if (linknode->prev == NULL)
310 return;
311
312 linknode->prev->next = linknode->next;
313 linknode->next->prev = linknode->prev;
314 linknode->prev = NULL;
315 linknode->next = NULL;
316 }
317
318 /* Link ssa_imm_use node LINKNODE into the chain for LIST. */
319 static inline void
link_imm_use_to_list(ssa_use_operand_t * linknode,ssa_use_operand_t * list)320 link_imm_use_to_list (ssa_use_operand_t *linknode, ssa_use_operand_t *list)
321 {
322 /* Link the new node at the head of the list. If we are in the process of
323 traversing the list, we won't visit any new nodes added to it. */
324 linknode->prev = list;
325 linknode->next = list->next;
326 list->next->prev = linknode;
327 list->next = linknode;
328 }
329
330 /* Link ssa_imm_use node LINKNODE into the chain for DEF. */
331 static inline void
link_imm_use(ssa_use_operand_t * linknode,tree def)332 link_imm_use (ssa_use_operand_t *linknode, tree def)
333 {
334 ssa_use_operand_t *root;
335
336 if (!def || TREE_CODE (def) != SSA_NAME)
337 linknode->prev = NULL;
338 else
339 {
340 root = &(SSA_NAME_IMM_USE_NODE (def));
341 #ifdef ENABLE_CHECKING
342 if (linknode->use)
343 gcc_assert (*(linknode->use) == def);
344 #endif
345 link_imm_use_to_list (linknode, root);
346 }
347 }
348
349 /* Set the value of a use pointed to by USE to VAL. */
350 static inline void
set_ssa_use_from_ptr(use_operand_p use,tree val)351 set_ssa_use_from_ptr (use_operand_p use, tree val)
352 {
353 delink_imm_use (use);
354 *(use->use) = val;
355 link_imm_use (use, val);
356 }
357
358 /* Link ssa_imm_use node LINKNODE into the chain for DEF, with use occurring
359 in STMT. */
360 static inline void
link_imm_use_stmt(ssa_use_operand_t * linknode,tree def,tree stmt)361 link_imm_use_stmt (ssa_use_operand_t *linknode, tree def, tree stmt)
362 {
363 if (stmt)
364 link_imm_use (linknode, def);
365 else
366 link_imm_use (linknode, NULL);
367 linknode->stmt = stmt;
368 }
369
370 /* Relink a new node in place of an old node in the list. */
371 static inline void
relink_imm_use(ssa_use_operand_t * node,ssa_use_operand_t * old)372 relink_imm_use (ssa_use_operand_t *node, ssa_use_operand_t *old)
373 {
374 /* The node one had better be in the same list. */
375 gcc_assert (*(old->use) == *(node->use));
376 node->prev = old->prev;
377 node->next = old->next;
378 if (old->prev)
379 {
380 old->prev->next = node;
381 old->next->prev = node;
382 /* Remove the old node from the list. */
383 old->prev = NULL;
384 }
385 }
386
387 /* Relink ssa_imm_use node LINKNODE into the chain for OLD, with use occurring
388 in STMT. */
389 static inline void
relink_imm_use_stmt(ssa_use_operand_t * linknode,ssa_use_operand_t * old,tree stmt)390 relink_imm_use_stmt (ssa_use_operand_t *linknode, ssa_use_operand_t *old, tree stmt)
391 {
392 if (stmt)
393 relink_imm_use (linknode, old);
394 else
395 link_imm_use (linknode, NULL);
396 linknode->stmt = stmt;
397 }
398
399
400 /* Return true is IMM has reached the end of the immediate use list. */
401 static inline bool
end_readonly_imm_use_p(imm_use_iterator * imm)402 end_readonly_imm_use_p (imm_use_iterator *imm)
403 {
404 return (imm->imm_use == imm->end_p);
405 }
406
407 /* Initialize iterator IMM to process the list for VAR. */
408 static inline use_operand_p
first_readonly_imm_use(imm_use_iterator * imm,tree var)409 first_readonly_imm_use (imm_use_iterator *imm, tree var)
410 {
411 gcc_assert (TREE_CODE (var) == SSA_NAME);
412
413 imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
414 imm->imm_use = imm->end_p->next;
415 #ifdef ENABLE_CHECKING
416 imm->iter_node.next = imm->imm_use->next;
417 #endif
418 if (end_readonly_imm_use_p (imm))
419 return NULL_USE_OPERAND_P;
420 return imm->imm_use;
421 }
422
423 /* Bump IMM to the next use in the list. */
424 static inline use_operand_p
next_readonly_imm_use(imm_use_iterator * imm)425 next_readonly_imm_use (imm_use_iterator *imm)
426 {
427 use_operand_p old = imm->imm_use;
428
429 #ifdef ENABLE_CHECKING
430 /* If this assertion fails, it indicates the 'next' pointer has changed
431 since we the last bump. This indicates that the list is being modified
432 via stmt changes, or SET_USE, or somesuch thing, and you need to be
433 using the SAFE version of the iterator. */
434 gcc_assert (imm->iter_node.next == old->next);
435 imm->iter_node.next = old->next->next;
436 #endif
437
438 imm->imm_use = old->next;
439 if (end_readonly_imm_use_p (imm))
440 return old;
441 return imm->imm_use;
442 }
443
444 /* Return true if VAR has no uses. */
445 static inline bool
has_zero_uses(tree var)446 has_zero_uses (tree var)
447 {
448 ssa_use_operand_t *ptr;
449 ptr = &(SSA_NAME_IMM_USE_NODE (var));
450 /* A single use means there is no items in the list. */
451 return (ptr == ptr->next);
452 }
453
454 /* Return true if VAR has a single use. */
455 static inline bool
has_single_use(tree var)456 has_single_use (tree var)
457 {
458 ssa_use_operand_t *ptr;
459 ptr = &(SSA_NAME_IMM_USE_NODE (var));
460 /* A single use means there is one item in the list. */
461 return (ptr != ptr->next && ptr == ptr->next->next);
462 }
463
464 /* If VAR has only a single immediate use, return true, and set USE_P and STMT
465 to the use pointer and stmt of occurrence. */
466 static inline bool
single_imm_use(tree var,use_operand_p * use_p,tree * stmt)467 single_imm_use (tree var, use_operand_p *use_p, tree *stmt)
468 {
469 ssa_use_operand_t *ptr;
470
471 ptr = &(SSA_NAME_IMM_USE_NODE (var));
472 if (ptr != ptr->next && ptr == ptr->next->next)
473 {
474 *use_p = ptr->next;
475 *stmt = ptr->next->stmt;
476 return true;
477 }
478 *use_p = NULL_USE_OPERAND_P;
479 *stmt = NULL_TREE;
480 return false;
481 }
482
483 /* Return the number of immediate uses of VAR. */
484 static inline unsigned int
num_imm_uses(tree var)485 num_imm_uses (tree var)
486 {
487 ssa_use_operand_t *ptr, *start;
488 unsigned int num;
489
490 start = &(SSA_NAME_IMM_USE_NODE (var));
491 num = 0;
492 for (ptr = start->next; ptr != start; ptr = ptr->next)
493 num++;
494
495 return num;
496 }
497
498
499 /* Return the tree pointer to by USE. */
500 static inline tree
get_use_from_ptr(use_operand_p use)501 get_use_from_ptr (use_operand_p use)
502 {
503 return *(use->use);
504 }
505
506 /* Return the tree pointer to by DEF. */
507 static inline tree
get_def_from_ptr(def_operand_p def)508 get_def_from_ptr (def_operand_p def)
509 {
510 return *def;
511 }
512
513 /* Return a def_operand_p pointer for the result of PHI. */
514 static inline def_operand_p
get_phi_result_ptr(tree phi)515 get_phi_result_ptr (tree phi)
516 {
517 return &(PHI_RESULT_TREE (phi));
518 }
519
520 /* Return a use_operand_p pointer for argument I of phinode PHI. */
521 static inline use_operand_p
get_phi_arg_def_ptr(tree phi,int i)522 get_phi_arg_def_ptr (tree phi, int i)
523 {
524 return &(PHI_ARG_IMM_USE_NODE (phi,i));
525 }
526
527
528 /* Return the bitmap of addresses taken by STMT, or NULL if it takes
529 no addresses. */
530 static inline bitmap
addresses_taken(tree stmt)531 addresses_taken (tree stmt)
532 {
533 stmt_ann_t ann = stmt_ann (stmt);
534 return ann ? ann->addresses_taken : NULL;
535 }
536
537 /* Return the PHI nodes for basic block BB, or NULL if there are no
538 PHI nodes. */
539 static inline tree
phi_nodes(basic_block bb)540 phi_nodes (basic_block bb)
541 {
542 return bb->phi_nodes;
543 }
544
545 /* Set list of phi nodes of a basic block BB to L. */
546
547 static inline void
set_phi_nodes(basic_block bb,tree l)548 set_phi_nodes (basic_block bb, tree l)
549 {
550 tree phi;
551
552 bb->phi_nodes = l;
553 for (phi = l; phi; phi = PHI_CHAIN (phi))
554 set_bb_for_stmt (phi, bb);
555 }
556
557 /* Return the phi argument which contains the specified use. */
558
559 static inline int
phi_arg_index_from_use(use_operand_p use)560 phi_arg_index_from_use (use_operand_p use)
561 {
562 struct phi_arg_d *element, *root;
563 int index;
564 tree phi;
565
566 /* Since the use is the first thing in a PHI argument element, we can
567 calculate its index based on casting it to an argument, and performing
568 pointer arithmetic. */
569
570 phi = USE_STMT (use);
571 gcc_assert (TREE_CODE (phi) == PHI_NODE);
572
573 element = (struct phi_arg_d *)use;
574 root = &(PHI_ARG_ELT (phi, 0));
575 index = element - root;
576
577 #ifdef ENABLE_CHECKING
578 /* Make sure the calculation doesn't have any leftover bytes. If it does,
579 then imm_use is likely not the first element in phi_arg_d. */
580 gcc_assert (
581 (((char *)element - (char *)root) % sizeof (struct phi_arg_d)) == 0);
582 gcc_assert (index >= 0 && index < PHI_ARG_CAPACITY (phi));
583 #endif
584
585 return index;
586 }
587
588 /* Mark VAR as used, so that it'll be preserved during rtl expansion. */
589
590 static inline void
set_is_used(tree var)591 set_is_used (tree var)
592 {
593 var_ann_t ann = get_var_ann (var);
594 ann->used = 1;
595 }
596
597
598 /* ----------------------------------------------------------------------- */
599
600 /* Return true if T is an executable statement. */
601 static inline bool
is_exec_stmt(tree t)602 is_exec_stmt (tree t)
603 {
604 return (t && !IS_EMPTY_STMT (t) && t != error_mark_node);
605 }
606
607
608 /* Return true if this stmt can be the target of a control transfer stmt such
609 as a goto. */
610 static inline bool
is_label_stmt(tree t)611 is_label_stmt (tree t)
612 {
613 if (t)
614 switch (TREE_CODE (t))
615 {
616 case LABEL_DECL:
617 case LABEL_EXPR:
618 case CASE_LABEL_EXPR:
619 return true;
620 default:
621 return false;
622 }
623 return false;
624 }
625
626 /* PHI nodes should contain only ssa_names and invariants. A test
627 for ssa_name is definitely simpler; don't let invalid contents
628 slip in in the meantime. */
629
630 static inline bool
phi_ssa_name_p(tree t)631 phi_ssa_name_p (tree t)
632 {
633 if (TREE_CODE (t) == SSA_NAME)
634 return true;
635 #ifdef ENABLE_CHECKING
636 gcc_assert (is_gimple_min_invariant (t));
637 #endif
638 return false;
639 }
640
641 /* ----------------------------------------------------------------------- */
642
643 /* Return a block_stmt_iterator that points to beginning of basic
644 block BB. */
645 static inline block_stmt_iterator
bsi_start(basic_block bb)646 bsi_start (basic_block bb)
647 {
648 block_stmt_iterator bsi;
649 if (bb->stmt_list)
650 bsi.tsi = tsi_start (bb->stmt_list);
651 else
652 {
653 gcc_assert (bb->index < NUM_FIXED_BLOCKS);
654 bsi.tsi.ptr = NULL;
655 bsi.tsi.container = NULL;
656 }
657 bsi.bb = bb;
658 return bsi;
659 }
660
661 /* Return a block statement iterator that points to the first non-label
662 statement in block BB. */
663
664 static inline block_stmt_iterator
bsi_after_labels(basic_block bb)665 bsi_after_labels (basic_block bb)
666 {
667 block_stmt_iterator bsi = bsi_start (bb);
668
669 while (!bsi_end_p (bsi) && TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
670 bsi_next (&bsi);
671
672 return bsi;
673 }
674
675 /* Return a block statement iterator that points to the end of basic
676 block BB. */
677 static inline block_stmt_iterator
bsi_last(basic_block bb)678 bsi_last (basic_block bb)
679 {
680 block_stmt_iterator bsi;
681 if (bb->stmt_list)
682 bsi.tsi = tsi_last (bb->stmt_list);
683 else
684 {
685 gcc_assert (bb->index < NUM_FIXED_BLOCKS);
686 bsi.tsi.ptr = NULL;
687 bsi.tsi.container = NULL;
688 }
689 bsi.bb = bb;
690 return bsi;
691 }
692
693 /* Return true if block statement iterator I has reached the end of
694 the basic block. */
695 static inline bool
bsi_end_p(block_stmt_iterator i)696 bsi_end_p (block_stmt_iterator i)
697 {
698 return tsi_end_p (i.tsi);
699 }
700
701 /* Modify block statement iterator I so that it is at the next
702 statement in the basic block. */
703 static inline void
bsi_next(block_stmt_iterator * i)704 bsi_next (block_stmt_iterator *i)
705 {
706 tsi_next (&i->tsi);
707 }
708
709 /* Modify block statement iterator I so that it is at the previous
710 statement in the basic block. */
711 static inline void
bsi_prev(block_stmt_iterator * i)712 bsi_prev (block_stmt_iterator *i)
713 {
714 tsi_prev (&i->tsi);
715 }
716
717 /* Return the statement that block statement iterator I is currently
718 at. */
719 static inline tree
bsi_stmt(block_stmt_iterator i)720 bsi_stmt (block_stmt_iterator i)
721 {
722 return tsi_stmt (i.tsi);
723 }
724
725 /* Return a pointer to the statement that block statement iterator I
726 is currently at. */
727 static inline tree *
bsi_stmt_ptr(block_stmt_iterator i)728 bsi_stmt_ptr (block_stmt_iterator i)
729 {
730 return tsi_stmt_ptr (i.tsi);
731 }
732
733 /* Returns the loop of the statement STMT. */
734
735 static inline struct loop *
loop_containing_stmt(tree stmt)736 loop_containing_stmt (tree stmt)
737 {
738 basic_block bb = bb_for_stmt (stmt);
739 if (!bb)
740 return NULL;
741
742 return bb->loop_father;
743 }
744
745 /* Return true if VAR is a clobbered by function calls. */
746 static inline bool
is_call_clobbered(tree var)747 is_call_clobbered (tree var)
748 {
749 if (!MTAG_P (var))
750 return DECL_CALL_CLOBBERED (var);
751 else
752 return bitmap_bit_p (call_clobbered_vars, DECL_UID (var));
753 }
754
755 /* Mark variable VAR as being clobbered by function calls. */
756 static inline void
mark_call_clobbered(tree var,unsigned int escape_type)757 mark_call_clobbered (tree var, unsigned int escape_type)
758 {
759 var_ann (var)->escape_mask |= escape_type;
760 if (!MTAG_P (var))
761 DECL_CALL_CLOBBERED (var) = true;
762 bitmap_set_bit (call_clobbered_vars, DECL_UID (var));
763 }
764
765 /* Clear the call-clobbered attribute from variable VAR. */
766 static inline void
clear_call_clobbered(tree var)767 clear_call_clobbered (tree var)
768 {
769 var_ann_t ann = var_ann (var);
770 ann->escape_mask = 0;
771 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
772 MTAG_GLOBAL (var) = 0;
773 if (!MTAG_P (var))
774 DECL_CALL_CLOBBERED (var) = false;
775 bitmap_clear_bit (call_clobbered_vars, DECL_UID (var));
776 }
777
778 /* Mark variable VAR as being non-addressable. */
779 static inline void
mark_non_addressable(tree var)780 mark_non_addressable (tree var)
781 {
782 if (!MTAG_P (var))
783 DECL_CALL_CLOBBERED (var) = false;
784 bitmap_clear_bit (call_clobbered_vars, DECL_UID (var));
785 TREE_ADDRESSABLE (var) = 0;
786 }
787
788 /* Return the common annotation for T. Return NULL if the annotation
789 doesn't already exist. */
790 static inline tree_ann_common_t
tree_common_ann(tree t)791 tree_common_ann (tree t)
792 {
793 return &t->common.ann->common;
794 }
795
796 /* Return a common annotation for T. Create the constant annotation if it
797 doesn't exist. */
798 static inline tree_ann_common_t
get_tree_common_ann(tree t)799 get_tree_common_ann (tree t)
800 {
801 tree_ann_common_t ann = tree_common_ann (t);
802 return (ann) ? ann : create_tree_common_ann (t);
803 }
804
805 /* ----------------------------------------------------------------------- */
806
807 /* The following set of routines are used to iterator over various type of
808 SSA operands. */
809
810 /* Return true if PTR is finished iterating. */
811 static inline bool
op_iter_done(ssa_op_iter * ptr)812 op_iter_done (ssa_op_iter *ptr)
813 {
814 return ptr->done;
815 }
816
817 /* Get the next iterator use value for PTR. */
818 static inline use_operand_p
op_iter_next_use(ssa_op_iter * ptr)819 op_iter_next_use (ssa_op_iter *ptr)
820 {
821 use_operand_p use_p;
822 #ifdef ENABLE_CHECKING
823 gcc_assert (ptr->iter_type == ssa_op_iter_use);
824 #endif
825 if (ptr->uses)
826 {
827 use_p = USE_OP_PTR (ptr->uses);
828 ptr->uses = ptr->uses->next;
829 return use_p;
830 }
831 if (ptr->vuses)
832 {
833 use_p = VUSE_OP_PTR (ptr->vuses);
834 ptr->vuses = ptr->vuses->next;
835 return use_p;
836 }
837 if (ptr->mayuses)
838 {
839 use_p = MAYDEF_OP_PTR (ptr->mayuses);
840 ptr->mayuses = ptr->mayuses->next;
841 return use_p;
842 }
843 if (ptr->mustkills)
844 {
845 use_p = MUSTDEF_KILL_PTR (ptr->mustkills);
846 ptr->mustkills = ptr->mustkills->next;
847 return use_p;
848 }
849 if (ptr->phi_i < ptr->num_phi)
850 {
851 return PHI_ARG_DEF_PTR (ptr->phi_stmt, (ptr->phi_i)++);
852 }
853 ptr->done = true;
854 return NULL_USE_OPERAND_P;
855 }
856
857 /* Get the next iterator def value for PTR. */
858 static inline def_operand_p
op_iter_next_def(ssa_op_iter * ptr)859 op_iter_next_def (ssa_op_iter *ptr)
860 {
861 def_operand_p def_p;
862 #ifdef ENABLE_CHECKING
863 gcc_assert (ptr->iter_type == ssa_op_iter_def);
864 #endif
865 if (ptr->defs)
866 {
867 def_p = DEF_OP_PTR (ptr->defs);
868 ptr->defs = ptr->defs->next;
869 return def_p;
870 }
871 if (ptr->mustdefs)
872 {
873 def_p = MUSTDEF_RESULT_PTR (ptr->mustdefs);
874 ptr->mustdefs = ptr->mustdefs->next;
875 return def_p;
876 }
877 if (ptr->maydefs)
878 {
879 def_p = MAYDEF_RESULT_PTR (ptr->maydefs);
880 ptr->maydefs = ptr->maydefs->next;
881 return def_p;
882 }
883 ptr->done = true;
884 return NULL_DEF_OPERAND_P;
885 }
886
887 /* Get the next iterator tree value for PTR. */
888 static inline tree
op_iter_next_tree(ssa_op_iter * ptr)889 op_iter_next_tree (ssa_op_iter *ptr)
890 {
891 tree val;
892 #ifdef ENABLE_CHECKING
893 gcc_assert (ptr->iter_type == ssa_op_iter_tree);
894 #endif
895 if (ptr->uses)
896 {
897 val = USE_OP (ptr->uses);
898 ptr->uses = ptr->uses->next;
899 return val;
900 }
901 if (ptr->vuses)
902 {
903 val = VUSE_OP (ptr->vuses);
904 ptr->vuses = ptr->vuses->next;
905 return val;
906 }
907 if (ptr->mayuses)
908 {
909 val = MAYDEF_OP (ptr->mayuses);
910 ptr->mayuses = ptr->mayuses->next;
911 return val;
912 }
913 if (ptr->mustkills)
914 {
915 val = MUSTDEF_KILL (ptr->mustkills);
916 ptr->mustkills = ptr->mustkills->next;
917 return val;
918 }
919 if (ptr->defs)
920 {
921 val = DEF_OP (ptr->defs);
922 ptr->defs = ptr->defs->next;
923 return val;
924 }
925 if (ptr->mustdefs)
926 {
927 val = MUSTDEF_RESULT (ptr->mustdefs);
928 ptr->mustdefs = ptr->mustdefs->next;
929 return val;
930 }
931 if (ptr->maydefs)
932 {
933 val = MAYDEF_RESULT (ptr->maydefs);
934 ptr->maydefs = ptr->maydefs->next;
935 return val;
936 }
937
938 ptr->done = true;
939 return NULL_TREE;
940
941 }
942
943
944 /* This functions clears the iterator PTR, and marks it done. This is normally
945 used to prevent warnings in the compile about might be uninitialized
946 components. */
947
948 static inline void
clear_and_done_ssa_iter(ssa_op_iter * ptr)949 clear_and_done_ssa_iter (ssa_op_iter *ptr)
950 {
951 ptr->defs = NULL;
952 ptr->uses = NULL;
953 ptr->vuses = NULL;
954 ptr->maydefs = NULL;
955 ptr->mayuses = NULL;
956 ptr->mustdefs = NULL;
957 ptr->mustkills = NULL;
958 ptr->iter_type = ssa_op_iter_none;
959 ptr->phi_i = 0;
960 ptr->num_phi = 0;
961 ptr->phi_stmt = NULL_TREE;
962 ptr->done = true;
963 }
964
965 /* Initialize the iterator PTR to the virtual defs in STMT. */
966 static inline void
op_iter_init(ssa_op_iter * ptr,tree stmt,int flags)967 op_iter_init (ssa_op_iter *ptr, tree stmt, int flags)
968 {
969 #ifdef ENABLE_CHECKING
970 gcc_assert (stmt_ann (stmt));
971 #endif
972
973 ptr->defs = (flags & SSA_OP_DEF) ? DEF_OPS (stmt) : NULL;
974 ptr->uses = (flags & SSA_OP_USE) ? USE_OPS (stmt) : NULL;
975 ptr->vuses = (flags & SSA_OP_VUSE) ? VUSE_OPS (stmt) : NULL;
976 ptr->maydefs = (flags & SSA_OP_VMAYDEF) ? MAYDEF_OPS (stmt) : NULL;
977 ptr->mayuses = (flags & SSA_OP_VMAYUSE) ? MAYDEF_OPS (stmt) : NULL;
978 ptr->mustdefs = (flags & SSA_OP_VMUSTDEF) ? MUSTDEF_OPS (stmt) : NULL;
979 ptr->mustkills = (flags & SSA_OP_VMUSTKILL) ? MUSTDEF_OPS (stmt) : NULL;
980 ptr->done = false;
981
982 ptr->phi_i = 0;
983 ptr->num_phi = 0;
984 ptr->phi_stmt = NULL_TREE;
985 }
986
987 /* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
988 the first use. */
989 static inline use_operand_p
op_iter_init_use(ssa_op_iter * ptr,tree stmt,int flags)990 op_iter_init_use (ssa_op_iter *ptr, tree stmt, int flags)
991 {
992 gcc_assert ((flags & SSA_OP_ALL_DEFS) == 0);
993 op_iter_init (ptr, stmt, flags);
994 ptr->iter_type = ssa_op_iter_use;
995 return op_iter_next_use (ptr);
996 }
997
998 /* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return
999 the first def. */
1000 static inline def_operand_p
op_iter_init_def(ssa_op_iter * ptr,tree stmt,int flags)1001 op_iter_init_def (ssa_op_iter *ptr, tree stmt, int flags)
1002 {
1003 gcc_assert ((flags & (SSA_OP_ALL_USES | SSA_OP_VIRTUAL_KILLS)) == 0);
1004 op_iter_init (ptr, stmt, flags);
1005 ptr->iter_type = ssa_op_iter_def;
1006 return op_iter_next_def (ptr);
1007 }
1008
1009 /* Initialize iterator PTR to the operands in STMT based on FLAGS. Return
1010 the first operand as a tree. */
1011 static inline tree
op_iter_init_tree(ssa_op_iter * ptr,tree stmt,int flags)1012 op_iter_init_tree (ssa_op_iter *ptr, tree stmt, int flags)
1013 {
1014 op_iter_init (ptr, stmt, flags);
1015 ptr->iter_type = ssa_op_iter_tree;
1016 return op_iter_next_tree (ptr);
1017 }
1018
1019 /* Get the next iterator mustdef value for PTR, returning the mustdef values in
1020 KILL and DEF. */
1021 static inline void
op_iter_next_maymustdef(use_operand_p * use,def_operand_p * def,ssa_op_iter * ptr)1022 op_iter_next_maymustdef (use_operand_p *use, def_operand_p *def,
1023 ssa_op_iter *ptr)
1024 {
1025 #ifdef ENABLE_CHECKING
1026 gcc_assert (ptr->iter_type == ssa_op_iter_maymustdef);
1027 #endif
1028 if (ptr->mayuses)
1029 {
1030 *def = MAYDEF_RESULT_PTR (ptr->mayuses);
1031 *use = MAYDEF_OP_PTR (ptr->mayuses);
1032 ptr->mayuses = ptr->mayuses->next;
1033 return;
1034 }
1035
1036 if (ptr->mustkills)
1037 {
1038 *def = MUSTDEF_RESULT_PTR (ptr->mustkills);
1039 *use = MUSTDEF_KILL_PTR (ptr->mustkills);
1040 ptr->mustkills = ptr->mustkills->next;
1041 return;
1042 }
1043
1044 *def = NULL_DEF_OPERAND_P;
1045 *use = NULL_USE_OPERAND_P;
1046 ptr->done = true;
1047 return;
1048 }
1049
1050
1051 /* Initialize iterator PTR to the operands in STMT. Return the first operands
1052 in USE and DEF. */
1053 static inline void
op_iter_init_maydef(ssa_op_iter * ptr,tree stmt,use_operand_p * use,def_operand_p * def)1054 op_iter_init_maydef (ssa_op_iter *ptr, tree stmt, use_operand_p *use,
1055 def_operand_p *def)
1056 {
1057 gcc_assert (TREE_CODE (stmt) != PHI_NODE);
1058
1059 op_iter_init (ptr, stmt, SSA_OP_VMAYUSE);
1060 ptr->iter_type = ssa_op_iter_maymustdef;
1061 op_iter_next_maymustdef (use, def, ptr);
1062 }
1063
1064
1065 /* Initialize iterator PTR to the operands in STMT. Return the first operands
1066 in KILL and DEF. */
1067 static inline void
op_iter_init_mustdef(ssa_op_iter * ptr,tree stmt,use_operand_p * kill,def_operand_p * def)1068 op_iter_init_mustdef (ssa_op_iter *ptr, tree stmt, use_operand_p *kill,
1069 def_operand_p *def)
1070 {
1071 gcc_assert (TREE_CODE (stmt) != PHI_NODE);
1072
1073 op_iter_init (ptr, stmt, SSA_OP_VMUSTKILL);
1074 ptr->iter_type = ssa_op_iter_maymustdef;
1075 op_iter_next_maymustdef (kill, def, ptr);
1076 }
1077
1078 /* Initialize iterator PTR to the operands in STMT. Return the first operands
1079 in KILL and DEF. */
1080 static inline void
op_iter_init_must_and_may_def(ssa_op_iter * ptr,tree stmt,use_operand_p * kill,def_operand_p * def)1081 op_iter_init_must_and_may_def (ssa_op_iter *ptr, tree stmt,
1082 use_operand_p *kill, def_operand_p *def)
1083 {
1084 gcc_assert (TREE_CODE (stmt) != PHI_NODE);
1085
1086 op_iter_init (ptr, stmt, SSA_OP_VMUSTKILL|SSA_OP_VMAYUSE);
1087 ptr->iter_type = ssa_op_iter_maymustdef;
1088 op_iter_next_maymustdef (kill, def, ptr);
1089 }
1090
1091
1092 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
1093 return NULL. */
1094 static inline tree
single_ssa_tree_operand(tree stmt,int flags)1095 single_ssa_tree_operand (tree stmt, int flags)
1096 {
1097 tree var;
1098 ssa_op_iter iter;
1099
1100 var = op_iter_init_tree (&iter, stmt, flags);
1101 if (op_iter_done (&iter))
1102 return NULL_TREE;
1103 op_iter_next_tree (&iter);
1104 if (op_iter_done (&iter))
1105 return var;
1106 return NULL_TREE;
1107 }
1108
1109
1110 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
1111 return NULL. */
1112 static inline use_operand_p
single_ssa_use_operand(tree stmt,int flags)1113 single_ssa_use_operand (tree stmt, int flags)
1114 {
1115 use_operand_p var;
1116 ssa_op_iter iter;
1117
1118 var = op_iter_init_use (&iter, stmt, flags);
1119 if (op_iter_done (&iter))
1120 return NULL_USE_OPERAND_P;
1121 op_iter_next_use (&iter);
1122 if (op_iter_done (&iter))
1123 return var;
1124 return NULL_USE_OPERAND_P;
1125 }
1126
1127
1128
1129 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
1130 return NULL. */
1131 static inline def_operand_p
single_ssa_def_operand(tree stmt,int flags)1132 single_ssa_def_operand (tree stmt, int flags)
1133 {
1134 def_operand_p var;
1135 ssa_op_iter iter;
1136
1137 var = op_iter_init_def (&iter, stmt, flags);
1138 if (op_iter_done (&iter))
1139 return NULL_DEF_OPERAND_P;
1140 op_iter_next_def (&iter);
1141 if (op_iter_done (&iter))
1142 return var;
1143 return NULL_DEF_OPERAND_P;
1144 }
1145
1146
1147 /* Return true if there are zero operands in STMT matching the type
1148 given in FLAGS. */
1149 static inline bool
zero_ssa_operands(tree stmt,int flags)1150 zero_ssa_operands (tree stmt, int flags)
1151 {
1152 ssa_op_iter iter;
1153
1154 op_iter_init_tree (&iter, stmt, flags);
1155 return op_iter_done (&iter);
1156 }
1157
1158
1159 /* Return the number of operands matching FLAGS in STMT. */
1160 static inline int
num_ssa_operands(tree stmt,int flags)1161 num_ssa_operands (tree stmt, int flags)
1162 {
1163 ssa_op_iter iter;
1164 tree t;
1165 int num = 0;
1166
1167 FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, flags)
1168 num++;
1169 return num;
1170 }
1171
1172
1173 /* Delink all immediate_use information for STMT. */
1174 static inline void
delink_stmt_imm_use(tree stmt)1175 delink_stmt_imm_use (tree stmt)
1176 {
1177 ssa_op_iter iter;
1178 use_operand_p use_p;
1179
1180 if (ssa_operands_active ())
1181 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
1182 (SSA_OP_ALL_USES | SSA_OP_ALL_KILLS))
1183 delink_imm_use (use_p);
1184 }
1185
1186
1187 /* This routine will compare all the operands matching FLAGS in STMT1 to those
1188 in STMT2. TRUE is returned if they are the same. STMTs can be NULL. */
1189 static inline bool
compare_ssa_operands_equal(tree stmt1,tree stmt2,int flags)1190 compare_ssa_operands_equal (tree stmt1, tree stmt2, int flags)
1191 {
1192 ssa_op_iter iter1, iter2;
1193 tree op1 = NULL_TREE;
1194 tree op2 = NULL_TREE;
1195 bool look1, look2;
1196
1197 if (stmt1 == stmt2)
1198 return true;
1199
1200 look1 = stmt1 && stmt_ann (stmt1);
1201 look2 = stmt2 && stmt_ann (stmt2);
1202
1203 if (look1)
1204 {
1205 op1 = op_iter_init_tree (&iter1, stmt1, flags);
1206 if (!look2)
1207 return op_iter_done (&iter1);
1208 }
1209 else
1210 clear_and_done_ssa_iter (&iter1);
1211
1212 if (look2)
1213 {
1214 op2 = op_iter_init_tree (&iter2, stmt2, flags);
1215 if (!look1)
1216 return op_iter_done (&iter2);
1217 }
1218 else
1219 clear_and_done_ssa_iter (&iter2);
1220
1221 while (!op_iter_done (&iter1) && !op_iter_done (&iter2))
1222 {
1223 if (op1 != op2)
1224 return false;
1225 op1 = op_iter_next_tree (&iter1);
1226 op2 = op_iter_next_tree (&iter2);
1227 }
1228
1229 return (op_iter_done (&iter1) && op_iter_done (&iter2));
1230 }
1231
1232
1233 /* If there is a single DEF in the PHI node which matches FLAG, return it.
1234 Otherwise return NULL_DEF_OPERAND_P. */
1235 static inline tree
single_phi_def(tree stmt,int flags)1236 single_phi_def (tree stmt, int flags)
1237 {
1238 tree def = PHI_RESULT (stmt);
1239 if ((flags & SSA_OP_DEF) && is_gimple_reg (def))
1240 return def;
1241 if ((flags & SSA_OP_VIRTUAL_DEFS) && !is_gimple_reg (def))
1242 return def;
1243 return NULL_TREE;
1244 }
1245
1246 /* Initialize the iterator PTR for uses matching FLAGS in PHI. FLAGS should
1247 be either SSA_OP_USES or SSA_OP_VIRTUAL_USES. */
1248 static inline use_operand_p
op_iter_init_phiuse(ssa_op_iter * ptr,tree phi,int flags)1249 op_iter_init_phiuse (ssa_op_iter *ptr, tree phi, int flags)
1250 {
1251 tree phi_def = PHI_RESULT (phi);
1252 int comp;
1253
1254 clear_and_done_ssa_iter (ptr);
1255 ptr->done = false;
1256
1257 gcc_assert ((flags & (SSA_OP_USE | SSA_OP_VIRTUAL_USES)) != 0);
1258
1259 comp = (is_gimple_reg (phi_def) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
1260
1261 /* If the PHI node doesn't the operand type we care about, we're done. */
1262 if ((flags & comp) == 0)
1263 {
1264 ptr->done = true;
1265 return NULL_USE_OPERAND_P;
1266 }
1267
1268 ptr->phi_stmt = phi;
1269 ptr->num_phi = PHI_NUM_ARGS (phi);
1270 ptr->iter_type = ssa_op_iter_use;
1271 return op_iter_next_use (ptr);
1272 }
1273
1274
1275 /* Start an iterator for a PHI definition. */
1276
1277 static inline def_operand_p
op_iter_init_phidef(ssa_op_iter * ptr,tree phi,int flags)1278 op_iter_init_phidef (ssa_op_iter *ptr, tree phi, int flags)
1279 {
1280 tree phi_def = PHI_RESULT (phi);
1281 int comp;
1282
1283 clear_and_done_ssa_iter (ptr);
1284 ptr->done = false;
1285
1286 gcc_assert ((flags & (SSA_OP_DEF | SSA_OP_VIRTUAL_DEFS)) != 0);
1287
1288 comp = (is_gimple_reg (phi_def) ? SSA_OP_DEF : SSA_OP_VIRTUAL_DEFS);
1289
1290 /* If the PHI node doesn't the operand type we care about, we're done. */
1291 if ((flags & comp) == 0)
1292 {
1293 ptr->done = true;
1294 return NULL_USE_OPERAND_P;
1295 }
1296
1297 ptr->iter_type = ssa_op_iter_def;
1298 /* The first call to op_iter_next_def will terminate the iterator since
1299 all the fields are NULL. Simply return the result here as the first and
1300 therefore only result. */
1301 return PHI_RESULT_PTR (phi);
1302 }
1303
1304 /* Return true is IMM has reached the end of the immediate use stmt list. */
1305
1306 static inline bool
end_imm_use_stmt_p(imm_use_iterator * imm)1307 end_imm_use_stmt_p (imm_use_iterator *imm)
1308 {
1309 return (imm->imm_use == imm->end_p);
1310 }
1311
1312 /* Finished the traverse of an immediate use stmt list IMM by removing the
1313 placeholder node from the list. */
1314
1315 static inline void
end_imm_use_stmt_traverse(imm_use_iterator * imm)1316 end_imm_use_stmt_traverse (imm_use_iterator *imm)
1317 {
1318 delink_imm_use (&(imm->iter_node));
1319 }
1320
1321 /* Immediate use traversal of uses within a stmt require that all the
1322 uses on a stmt be sequentially listed. This routine is used to build up
1323 this sequential list by adding USE_P to the end of the current list
1324 currently delimited by HEAD and LAST_P. The new LAST_P value is
1325 returned. */
1326
1327 static inline use_operand_p
move_use_after_head(use_operand_p use_p,use_operand_p head,use_operand_p last_p)1328 move_use_after_head (use_operand_p use_p, use_operand_p head,
1329 use_operand_p last_p)
1330 {
1331 gcc_assert (USE_FROM_PTR (use_p) == USE_FROM_PTR (head));
1332 /* Skip head when we find it. */
1333 if (use_p != head)
1334 {
1335 /* If use_p is already linked in after last_p, continue. */
1336 if (last_p->next == use_p)
1337 last_p = use_p;
1338 else
1339 {
1340 /* Delink from current location, and link in at last_p. */
1341 delink_imm_use (use_p);
1342 link_imm_use_to_list (use_p, last_p);
1343 last_p = use_p;
1344 }
1345 }
1346 return last_p;
1347 }
1348
1349
1350 /* This routine will relink all uses with the same stmt as HEAD into the list
1351 immediately following HEAD for iterator IMM. */
1352
1353 static inline void
link_use_stmts_after(use_operand_p head,imm_use_iterator * imm)1354 link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
1355 {
1356 use_operand_p use_p;
1357 use_operand_p last_p = head;
1358 tree head_stmt = USE_STMT (head);
1359 tree use = USE_FROM_PTR (head);
1360 ssa_op_iter op_iter;
1361 int flag;
1362
1363 /* Only look at virtual or real uses, depending on the type of HEAD. */
1364 flag = (is_gimple_reg (use) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
1365
1366 if (TREE_CODE (head_stmt) == PHI_NODE)
1367 {
1368 FOR_EACH_PHI_ARG (use_p, head_stmt, op_iter, flag)
1369 if (USE_FROM_PTR (use_p) == use)
1370 last_p = move_use_after_head (use_p, head, last_p);
1371 }
1372 else
1373 {
1374 FOR_EACH_SSA_USE_OPERAND (use_p, head_stmt, op_iter, flag)
1375 if (USE_FROM_PTR (use_p) == use)
1376 last_p = move_use_after_head (use_p, head, last_p);
1377 }
1378 /* LInk iter node in after last_p. */
1379 if (imm->iter_node.prev != NULL)
1380 delink_imm_use (&imm->iter_node);
1381 link_imm_use_to_list (&(imm->iter_node), last_p);
1382 }
1383
1384 /* Initialize IMM to traverse over uses of VAR. Return the first statement. */
1385 static inline tree
first_imm_use_stmt(imm_use_iterator * imm,tree var)1386 first_imm_use_stmt (imm_use_iterator *imm, tree var)
1387 {
1388 gcc_assert (TREE_CODE (var) == SSA_NAME);
1389
1390 imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
1391 imm->imm_use = imm->end_p->next;
1392 imm->next_imm_name = NULL_USE_OPERAND_P;
1393
1394 /* iter_node is used as a marker within the immediate use list to indicate
1395 where the end of the current stmt's uses are. Initialize it to NULL
1396 stmt and use, which indicates a marker node. */
1397 imm->iter_node.prev = NULL_USE_OPERAND_P;
1398 imm->iter_node.next = NULL_USE_OPERAND_P;
1399 imm->iter_node.stmt = NULL_TREE;
1400 imm->iter_node.use = NULL_USE_OPERAND_P;
1401
1402 if (end_imm_use_stmt_p (imm))
1403 return NULL_TREE;
1404
1405 link_use_stmts_after (imm->imm_use, imm);
1406
1407 return USE_STMT (imm->imm_use);
1408 }
1409
1410 /* Bump IMM to the next stmt which has a use of var. */
1411
1412 static inline tree
next_imm_use_stmt(imm_use_iterator * imm)1413 next_imm_use_stmt (imm_use_iterator *imm)
1414 {
1415 imm->imm_use = imm->iter_node.next;
1416 if (end_imm_use_stmt_p (imm))
1417 {
1418 if (imm->iter_node.prev != NULL)
1419 delink_imm_use (&imm->iter_node);
1420 return NULL_TREE;
1421 }
1422
1423 link_use_stmts_after (imm->imm_use, imm);
1424 return USE_STMT (imm->imm_use);
1425
1426 }
1427
1428 /* This routine will return the first use on the stmt IMM currently refers
1429 to. */
1430
1431 static inline use_operand_p
first_imm_use_on_stmt(imm_use_iterator * imm)1432 first_imm_use_on_stmt (imm_use_iterator *imm)
1433 {
1434 imm->next_imm_name = imm->imm_use->next;
1435 return imm->imm_use;
1436 }
1437
1438 /* Return TRUE if the last use on the stmt IMM refers to has been visited. */
1439
1440 static inline bool
end_imm_use_on_stmt_p(imm_use_iterator * imm)1441 end_imm_use_on_stmt_p (imm_use_iterator *imm)
1442 {
1443 return (imm->imm_use == &(imm->iter_node));
1444 }
1445
1446 /* Bump to the next use on the stmt IMM refers to, return NULL if done. */
1447
1448 static inline use_operand_p
next_imm_use_on_stmt(imm_use_iterator * imm)1449 next_imm_use_on_stmt (imm_use_iterator *imm)
1450 {
1451 imm->imm_use = imm->next_imm_name;
1452 if (end_imm_use_on_stmt_p (imm))
1453 return NULL_USE_OPERAND_P;
1454 else
1455 {
1456 imm->next_imm_name = imm->imm_use->next;
1457 return imm->imm_use;
1458 }
1459 }
1460
1461 /* Return true if VAR cannot be modified by the program. */
1462
1463 static inline bool
unmodifiable_var_p(tree var)1464 unmodifiable_var_p (tree var)
1465 {
1466 if (TREE_CODE (var) == SSA_NAME)
1467 var = SSA_NAME_VAR (var);
1468
1469 if (MTAG_P (var))
1470 return TREE_READONLY (var) && (TREE_STATIC (var) || MTAG_GLOBAL (var));
1471
1472 return TREE_READONLY (var) && (TREE_STATIC (var) || DECL_EXTERNAL (var));
1473 }
1474
1475 /* Return true if REF, an ARRAY_REF, has an INDIRECT_REF somewhere in it. */
1476
1477 static inline bool
array_ref_contains_indirect_ref(tree ref)1478 array_ref_contains_indirect_ref (tree ref)
1479 {
1480 gcc_assert (TREE_CODE (ref) == ARRAY_REF);
1481
1482 do {
1483 ref = TREE_OPERAND (ref, 0);
1484 } while (handled_component_p (ref));
1485
1486 return TREE_CODE (ref) == INDIRECT_REF;
1487 }
1488
1489 /* Return true if REF, a handled component reference, has an ARRAY_REF
1490 somewhere in it. */
1491
1492 static inline bool
ref_contains_array_ref(tree ref)1493 ref_contains_array_ref (tree ref)
1494 {
1495 gcc_assert (handled_component_p (ref));
1496
1497 do {
1498 if (TREE_CODE (ref) == ARRAY_REF)
1499 return true;
1500 ref = TREE_OPERAND (ref, 0);
1501 } while (handled_component_p (ref));
1502
1503 return false;
1504 }
1505
1506 /* Given a variable VAR, lookup and return a pointer to the list of
1507 subvariables for it. */
1508
1509 static inline subvar_t *
lookup_subvars_for_var(tree var)1510 lookup_subvars_for_var (tree var)
1511 {
1512 var_ann_t ann = var_ann (var);
1513 gcc_assert (ann);
1514 return &ann->subvars;
1515 }
1516
1517 /* Given a variable VAR, return a linked list of subvariables for VAR, or
1518 NULL, if there are no subvariables. */
1519
1520 static inline subvar_t
get_subvars_for_var(tree var)1521 get_subvars_for_var (tree var)
1522 {
1523 subvar_t subvars;
1524
1525 gcc_assert (SSA_VAR_P (var));
1526
1527 if (TREE_CODE (var) == SSA_NAME)
1528 subvars = *(lookup_subvars_for_var (SSA_NAME_VAR (var)));
1529 else
1530 subvars = *(lookup_subvars_for_var (var));
1531 return subvars;
1532 }
1533
1534 /* Return the subvariable of VAR at offset OFFSET. */
1535
1536 static inline tree
get_subvar_at(tree var,unsigned HOST_WIDE_INT offset)1537 get_subvar_at (tree var, unsigned HOST_WIDE_INT offset)
1538 {
1539 subvar_t sv;
1540
1541 for (sv = get_subvars_for_var (var); sv; sv = sv->next)
1542 if (SFT_OFFSET (sv->var) == offset)
1543 return sv->var;
1544
1545 return NULL_TREE;
1546 }
1547
1548 /* Return true if V is a tree that we can have subvars for.
1549 Normally, this is any aggregate type. Also complex
1550 types which are not gimple registers can have subvars. */
1551
1552 static inline bool
var_can_have_subvars(tree v)1553 var_can_have_subvars (tree v)
1554 {
1555 /* Volatile variables should never have subvars. */
1556 if (TREE_THIS_VOLATILE (v))
1557 return false;
1558
1559 /* Non decls or memory tags can never have subvars. */
1560 if (!DECL_P (v) || MTAG_P (v))
1561 return false;
1562
1563 /* Aggregates can have subvars. */
1564 if (AGGREGATE_TYPE_P (TREE_TYPE (v)))
1565 return true;
1566
1567 /* Complex types variables which are not also a gimple register can
1568 have subvars. */
1569 if (TREE_CODE (TREE_TYPE (v)) == COMPLEX_TYPE
1570 && !DECL_COMPLEX_GIMPLE_REG_P (v))
1571 return true;
1572
1573 return false;
1574 }
1575
1576
1577 /* Return true if OFFSET and SIZE define a range that overlaps with some
1578 portion of the range of SV, a subvar. If there was an exact overlap,
1579 *EXACT will be set to true upon return. */
1580
1581 static inline bool
overlap_subvar(unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree sv,bool * exact)1582 overlap_subvar (unsigned HOST_WIDE_INT offset, unsigned HOST_WIDE_INT size,
1583 tree sv, bool *exact)
1584 {
1585 /* There are three possible cases of overlap.
1586 1. We can have an exact overlap, like so:
1587 |offset, offset + size |
1588 |sv->offset, sv->offset + sv->size |
1589
1590 2. We can have offset starting after sv->offset, like so:
1591
1592 |offset, offset + size |
1593 |sv->offset, sv->offset + sv->size |
1594
1595 3. We can have offset starting before sv->offset, like so:
1596
1597 |offset, offset + size |
1598 |sv->offset, sv->offset + sv->size|
1599 */
1600
1601 if (exact)
1602 *exact = false;
1603 if (offset == SFT_OFFSET (sv) && size == SFT_SIZE (sv))
1604 {
1605 if (exact)
1606 *exact = true;
1607 return true;
1608 }
1609 else if (offset >= SFT_OFFSET (sv)
1610 && offset < (SFT_OFFSET (sv) + SFT_SIZE (sv)))
1611 {
1612 return true;
1613 }
1614 else if (offset < SFT_OFFSET (sv)
1615 && (size > SFT_OFFSET (sv) - offset))
1616 {
1617 return true;
1618 }
1619 return false;
1620
1621 }
1622
1623 #endif /* _TREE_FLOW_INLINE_H */
1624