1 /* Language-independent node constructors for parse phase of GNU compiler.
2    Copyright (C) 1987-2021 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains the low level primitives for operating on tree nodes,
21    including allocation, list operations, interning of identifiers,
22    construction of data type nodes and statement nodes,
23    and construction of type conversion nodes.  It also contains
24    tables index by tree code that describe how to take apart
25    nodes of that code.
26 
27    It is intended to be language-independent but can occasionally
28    calls language-dependent routines.  */
29 
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 
72 /* Tree code classes.  */
73 
74 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
75 #define END_OF_BASE_TREE_CODES tcc_exceptional,
76 
77 const enum tree_code_class tree_code_type[] = {
78 #include "all-tree.def"
79 };
80 
81 #undef DEFTREECODE
82 #undef END_OF_BASE_TREE_CODES
83 
84 /* Table indexed by tree code giving number of expression
85    operands beyond the fixed part of the node structure.
86    Not used for types or decls.  */
87 
88 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
89 #define END_OF_BASE_TREE_CODES 0,
90 
91 const unsigned char tree_code_length[] = {
92 #include "all-tree.def"
93 };
94 
95 #undef DEFTREECODE
96 #undef END_OF_BASE_TREE_CODES
97 
98 /* Names of tree components.
99    Used for printing out the tree and error messages.  */
100 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
101 #define END_OF_BASE_TREE_CODES "@dummy",
102 
103 static const char *const tree_code_name[] = {
104 #include "all-tree.def"
105 };
106 
107 #undef DEFTREECODE
108 #undef END_OF_BASE_TREE_CODES
109 
110 /* Each tree code class has an associated string representation.
111    These must correspond to the tree_code_class entries.  */
112 
113 const char *const tree_code_class_strings[] =
114 {
115   "exceptional",
116   "constant",
117   "type",
118   "declaration",
119   "reference",
120   "comparison",
121   "unary",
122   "binary",
123   "statement",
124   "vl_exp",
125   "expression"
126 };
127 
128 /* obstack.[ch] explicitly declined to prototype this.  */
129 extern int _obstack_allocated_p (struct obstack *h, void *obj);
130 
131 /* Statistics-gathering stuff.  */
132 
133 static uint64_t tree_code_counts[MAX_TREE_CODES];
134 uint64_t tree_node_counts[(int) all_kinds];
135 uint64_t tree_node_sizes[(int) all_kinds];
136 
137 /* Keep in sync with tree.h:enum tree_node_kind.  */
138 static const char * const tree_node_kind_names[] = {
139   "decls",
140   "types",
141   "blocks",
142   "stmts",
143   "refs",
144   "exprs",
145   "constants",
146   "identifiers",
147   "vecs",
148   "binfos",
149   "ssa names",
150   "constructors",
151   "random kinds",
152   "lang_decl kinds",
153   "lang_type kinds",
154   "omp clauses",
155 };
156 
157 /* Unique id for next decl created.  */
158 static GTY(()) int next_decl_uid;
159 /* Unique id for next type created.  */
160 static GTY(()) unsigned next_type_uid = 1;
161 /* Unique id for next debug decl created.  Use negative numbers,
162    to catch erroneous uses.  */
163 static GTY(()) int next_debug_decl_uid;
164 
165 /* Since we cannot rehash a type after it is in the table, we have to
166    keep the hash code.  */
167 
168 struct GTY((for_user)) type_hash {
169   unsigned long hash;
170   tree type;
171 };
172 
173 /* Initial size of the hash table (rounded to next prime).  */
174 #define TYPE_HASH_INITIAL_SIZE 1000
175 
176 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
177 {
hashtype_cache_hasher178   static hashval_t hash (type_hash *t) { return t->hash; }
179   static bool equal (type_hash *a, type_hash *b);
180 
181   static int
keep_cache_entrytype_cache_hasher182   keep_cache_entry (type_hash *&t)
183   {
184     return ggc_marked_p (t->type);
185   }
186 };
187 
188 /* Now here is the hash table.  When recording a type, it is added to
189    the slot whose index is the hash code.  Note that the hash table is
190    used for several kinds of types (function types, array types and
191    array index range types, for now).  While all these live in the
192    same table, they are completely independent, and the hash code is
193    computed differently for each of these.  */
194 
195 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
196 
197 /* Hash table and temporary node for larger integer const values.  */
198 static GTY (()) tree int_cst_node;
199 
200 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
201 {
202   static hashval_t hash (tree t);
203   static bool equal (tree x, tree y);
204 };
205 
206 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
207 
208 /* Class and variable for making sure that there is a single POLY_INT_CST
209    for a given value.  */
210 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
211 {
212   typedef std::pair<tree, const poly_wide_int *> compare_type;
213   static hashval_t hash (tree t);
214   static bool equal (tree x, const compare_type &y);
215 };
216 
217 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
218 
219 /* Hash table for optimization flags and target option flags.  Use the same
220    hash table for both sets of options.  Nodes for building the current
221    optimization and target option nodes.  The assumption is most of the time
222    the options created will already be in the hash table, so we avoid
223    allocating and freeing up a node repeatably.  */
224 static GTY (()) tree cl_optimization_node;
225 static GTY (()) tree cl_target_option_node;
226 
227 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
228 {
229   static hashval_t hash (tree t);
230   static bool equal (tree x, tree y);
231 };
232 
233 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
234 
235 /* General tree->tree mapping  structure for use in hash tables.  */
236 
237 
238 static GTY ((cache))
239      hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
240 
241 static GTY ((cache))
242      hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
243 
244 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
245 {
hashtree_vec_map_cache_hasher246   static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
247 
248   static bool
equaltree_vec_map_cache_hasher249   equal (tree_vec_map *a, tree_vec_map *b)
250   {
251     return a->base.from == b->base.from;
252   }
253 
254   static int
keep_cache_entrytree_vec_map_cache_hasher255   keep_cache_entry (tree_vec_map *&m)
256   {
257     return ggc_marked_p (m->base.from);
258   }
259 };
260 
261 static GTY ((cache))
262      hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
263 
264 static void set_type_quals (tree, int);
265 static void print_type_hash_statistics (void);
266 static void print_debug_expr_statistics (void);
267 static void print_value_expr_statistics (void);
268 
269 static tree build_array_type_1 (tree, tree, bool, bool, bool);
270 
271 tree global_trees[TI_MAX];
272 tree integer_types[itk_none];
273 
274 bool int_n_enabled_p[NUM_INT_N_ENTS];
275 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
276 
277 bool tree_contains_struct[MAX_TREE_CODES][64];
278 
279 /* Number of operands for each OpenMP clause.  */
280 unsigned const char omp_clause_num_ops[] =
281 {
282   0, /* OMP_CLAUSE_ERROR  */
283   1, /* OMP_CLAUSE_PRIVATE  */
284   1, /* OMP_CLAUSE_SHARED  */
285   1, /* OMP_CLAUSE_FIRSTPRIVATE  */
286   2, /* OMP_CLAUSE_LASTPRIVATE  */
287   5, /* OMP_CLAUSE_REDUCTION  */
288   5, /* OMP_CLAUSE_TASK_REDUCTION  */
289   5, /* OMP_CLAUSE_IN_REDUCTION  */
290   1, /* OMP_CLAUSE_COPYIN  */
291   1, /* OMP_CLAUSE_COPYPRIVATE  */
292   3, /* OMP_CLAUSE_LINEAR  */
293   2, /* OMP_CLAUSE_ALIGNED  */
294   2, /* OMP_CLAUSE_ALLOCATE  */
295   1, /* OMP_CLAUSE_DEPEND  */
296   1, /* OMP_CLAUSE_NONTEMPORAL  */
297   1, /* OMP_CLAUSE_UNIFORM  */
298   1, /* OMP_CLAUSE_TO_DECLARE  */
299   1, /* OMP_CLAUSE_LINK  */
300   1, /* OMP_CLAUSE_DETACH  */
301   1, /* OMP_CLAUSE_USE_DEVICE_PTR  */
302   1, /* OMP_CLAUSE_USE_DEVICE_ADDR  */
303   1, /* OMP_CLAUSE_IS_DEVICE_PTR  */
304   1, /* OMP_CLAUSE_INCLUSIVE  */
305   1, /* OMP_CLAUSE_EXCLUSIVE  */
306   2, /* OMP_CLAUSE_FROM  */
307   2, /* OMP_CLAUSE_TO  */
308   2, /* OMP_CLAUSE_MAP  */
309   2, /* OMP_CLAUSE__CACHE_  */
310   2, /* OMP_CLAUSE_GANG  */
311   1, /* OMP_CLAUSE_ASYNC  */
312   1, /* OMP_CLAUSE_WAIT  */
313   0, /* OMP_CLAUSE_AUTO  */
314   0, /* OMP_CLAUSE_SEQ  */
315   1, /* OMP_CLAUSE__LOOPTEMP_  */
316   1, /* OMP_CLAUSE__REDUCTEMP_  */
317   1, /* OMP_CLAUSE__CONDTEMP_  */
318   1, /* OMP_CLAUSE__SCANTEMP_  */
319   1, /* OMP_CLAUSE_IF  */
320   1, /* OMP_CLAUSE_NUM_THREADS  */
321   1, /* OMP_CLAUSE_SCHEDULE  */
322   0, /* OMP_CLAUSE_NOWAIT  */
323   1, /* OMP_CLAUSE_ORDERED  */
324   0, /* OMP_CLAUSE_DEFAULT  */
325   3, /* OMP_CLAUSE_COLLAPSE  */
326   0, /* OMP_CLAUSE_UNTIED   */
327   1, /* OMP_CLAUSE_FINAL  */
328   0, /* OMP_CLAUSE_MERGEABLE  */
329   1, /* OMP_CLAUSE_DEVICE  */
330   1, /* OMP_CLAUSE_DIST_SCHEDULE  */
331   0, /* OMP_CLAUSE_INBRANCH  */
332   0, /* OMP_CLAUSE_NOTINBRANCH  */
333   1, /* OMP_CLAUSE_NUM_TEAMS  */
334   1, /* OMP_CLAUSE_THREAD_LIMIT  */
335   0, /* OMP_CLAUSE_PROC_BIND  */
336   1, /* OMP_CLAUSE_SAFELEN  */
337   1, /* OMP_CLAUSE_SIMDLEN  */
338   0, /* OMP_CLAUSE_DEVICE_TYPE  */
339   0, /* OMP_CLAUSE_FOR  */
340   0, /* OMP_CLAUSE_PARALLEL  */
341   0, /* OMP_CLAUSE_SECTIONS  */
342   0, /* OMP_CLAUSE_TASKGROUP  */
343   1, /* OMP_CLAUSE_PRIORITY  */
344   1, /* OMP_CLAUSE_GRAINSIZE  */
345   1, /* OMP_CLAUSE_NUM_TASKS  */
346   0, /* OMP_CLAUSE_NOGROUP  */
347   0, /* OMP_CLAUSE_THREADS  */
348   0, /* OMP_CLAUSE_SIMD  */
349   1, /* OMP_CLAUSE_HINT  */
350   0, /* OMP_CLAUSE_DEFAULTMAP  */
351   0, /* OMP_CLAUSE_ORDER  */
352   0, /* OMP_CLAUSE_BIND  */
353   1, /* OMP_CLAUSE__SIMDUID_  */
354   0, /* OMP_CLAUSE__SIMT_  */
355   0, /* OMP_CLAUSE_INDEPENDENT  */
356   1, /* OMP_CLAUSE_WORKER  */
357   1, /* OMP_CLAUSE_VECTOR  */
358   1, /* OMP_CLAUSE_NUM_GANGS  */
359   1, /* OMP_CLAUSE_NUM_WORKERS  */
360   1, /* OMP_CLAUSE_VECTOR_LENGTH  */
361   3, /* OMP_CLAUSE_TILE  */
362   0, /* OMP_CLAUSE_IF_PRESENT */
363   0, /* OMP_CLAUSE_FINALIZE */
364 };
365 
366 const char * const omp_clause_code_name[] =
367 {
368   "error_clause",
369   "private",
370   "shared",
371   "firstprivate",
372   "lastprivate",
373   "reduction",
374   "task_reduction",
375   "in_reduction",
376   "copyin",
377   "copyprivate",
378   "linear",
379   "aligned",
380   "allocate",
381   "depend",
382   "nontemporal",
383   "uniform",
384   "to",
385   "link",
386   "detach",
387   "use_device_ptr",
388   "use_device_addr",
389   "is_device_ptr",
390   "inclusive",
391   "exclusive",
392   "from",
393   "to",
394   "map",
395   "_cache_",
396   "gang",
397   "async",
398   "wait",
399   "auto",
400   "seq",
401   "_looptemp_",
402   "_reductemp_",
403   "_condtemp_",
404   "_scantemp_",
405   "if",
406   "num_threads",
407   "schedule",
408   "nowait",
409   "ordered",
410   "default",
411   "collapse",
412   "untied",
413   "final",
414   "mergeable",
415   "device",
416   "dist_schedule",
417   "inbranch",
418   "notinbranch",
419   "num_teams",
420   "thread_limit",
421   "proc_bind",
422   "safelen",
423   "simdlen",
424   "device_type",
425   "for",
426   "parallel",
427   "sections",
428   "taskgroup",
429   "priority",
430   "grainsize",
431   "num_tasks",
432   "nogroup",
433   "threads",
434   "simd",
435   "hint",
436   "defaultmap",
437   "order",
438   "bind",
439   "_simduid_",
440   "_simt_",
441   "independent",
442   "worker",
443   "vector",
444   "num_gangs",
445   "num_workers",
446   "vector_length",
447   "tile",
448   "if_present",
449   "finalize",
450 };
451 
452 
453 /* Return the tree node structure used by tree code CODE.  */
454 
455 static inline enum tree_node_structure_enum
tree_node_structure_for_code(enum tree_code code)456 tree_node_structure_for_code (enum tree_code code)
457 {
458   switch (TREE_CODE_CLASS (code))
459     {
460     case tcc_declaration:
461       switch (code)
462 	{
463 	case CONST_DECL:	return TS_CONST_DECL;
464 	case DEBUG_EXPR_DECL:	return TS_DECL_WRTL;
465 	case FIELD_DECL:	return TS_FIELD_DECL;
466 	case FUNCTION_DECL:	return TS_FUNCTION_DECL;
467 	case LABEL_DECL:	return TS_LABEL_DECL;
468 	case PARM_DECL:		return TS_PARM_DECL;
469 	case RESULT_DECL:	return TS_RESULT_DECL;
470 	case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
471 	case TYPE_DECL:		return TS_TYPE_DECL;
472 	case VAR_DECL:		return TS_VAR_DECL;
473 	default: 		return TS_DECL_NON_COMMON;
474 	}
475 
476     case tcc_type:		return TS_TYPE_NON_COMMON;
477 
478     case tcc_binary:
479     case tcc_comparison:
480     case tcc_expression:
481     case tcc_reference:
482     case tcc_statement:
483     case tcc_unary:
484     case tcc_vl_exp:		return TS_EXP;
485 
486     default:  /* tcc_constant and tcc_exceptional */
487       break;
488     }
489 
490   switch (code)
491     {
492       /* tcc_constant cases.  */
493     case COMPLEX_CST:		return TS_COMPLEX;
494     case FIXED_CST:		return TS_FIXED_CST;
495     case INTEGER_CST:		return TS_INT_CST;
496     case POLY_INT_CST:		return TS_POLY_INT_CST;
497     case REAL_CST:		return TS_REAL_CST;
498     case STRING_CST:		return TS_STRING;
499     case VECTOR_CST:		return TS_VECTOR;
500     case VOID_CST:		return TS_TYPED;
501 
502       /* tcc_exceptional cases.  */
503     case BLOCK:			return TS_BLOCK;
504     case CONSTRUCTOR:		return TS_CONSTRUCTOR;
505     case ERROR_MARK:		return TS_COMMON;
506     case IDENTIFIER_NODE:	return TS_IDENTIFIER;
507     case OMP_CLAUSE:		return TS_OMP_CLAUSE;
508     case OPTIMIZATION_NODE:	return TS_OPTIMIZATION;
509     case PLACEHOLDER_EXPR:	return TS_COMMON;
510     case SSA_NAME:		return TS_SSA_NAME;
511     case STATEMENT_LIST:	return TS_STATEMENT_LIST;
512     case TARGET_OPTION_NODE:	return TS_TARGET_OPTION;
513     case TREE_BINFO:		return TS_BINFO;
514     case TREE_LIST:		return TS_LIST;
515     case TREE_VEC:		return TS_VEC;
516 
517     default:
518       gcc_unreachable ();
519     }
520 }
521 
522 
523 /* Initialize tree_contains_struct to describe the hierarchy of tree
524    nodes.  */
525 
526 static void
initialize_tree_contains_struct(void)527 initialize_tree_contains_struct (void)
528 {
529   unsigned i;
530 
531   for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
532     {
533       enum tree_code code;
534       enum tree_node_structure_enum ts_code;
535 
536       code = (enum tree_code) i;
537       ts_code = tree_node_structure_for_code (code);
538 
539       /* Mark the TS structure itself.  */
540       tree_contains_struct[code][ts_code] = 1;
541 
542       /* Mark all the structures that TS is derived from.  */
543       switch (ts_code)
544 	{
545 	case TS_TYPED:
546 	case TS_BLOCK:
547 	case TS_OPTIMIZATION:
548 	case TS_TARGET_OPTION:
549 	  MARK_TS_BASE (code);
550 	  break;
551 
552 	case TS_COMMON:
553 	case TS_INT_CST:
554 	case TS_POLY_INT_CST:
555 	case TS_REAL_CST:
556 	case TS_FIXED_CST:
557 	case TS_VECTOR:
558 	case TS_STRING:
559 	case TS_COMPLEX:
560 	case TS_SSA_NAME:
561 	case TS_CONSTRUCTOR:
562 	case TS_EXP:
563 	case TS_STATEMENT_LIST:
564 	  MARK_TS_TYPED (code);
565 	  break;
566 
567 	case TS_IDENTIFIER:
568 	case TS_DECL_MINIMAL:
569 	case TS_TYPE_COMMON:
570 	case TS_LIST:
571 	case TS_VEC:
572 	case TS_BINFO:
573 	case TS_OMP_CLAUSE:
574 	  MARK_TS_COMMON (code);
575 	  break;
576 
577 	case TS_TYPE_WITH_LANG_SPECIFIC:
578 	  MARK_TS_TYPE_COMMON (code);
579 	  break;
580 
581 	case TS_TYPE_NON_COMMON:
582 	  MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
583 	  break;
584 
585 	case TS_DECL_COMMON:
586 	  MARK_TS_DECL_MINIMAL (code);
587 	  break;
588 
589 	case TS_DECL_WRTL:
590 	case TS_CONST_DECL:
591 	  MARK_TS_DECL_COMMON (code);
592 	  break;
593 
594 	case TS_DECL_NON_COMMON:
595 	  MARK_TS_DECL_WITH_VIS (code);
596 	  break;
597 
598 	case TS_DECL_WITH_VIS:
599 	case TS_PARM_DECL:
600 	case TS_LABEL_DECL:
601 	case TS_RESULT_DECL:
602 	  MARK_TS_DECL_WRTL (code);
603 	  break;
604 
605 	case TS_FIELD_DECL:
606 	  MARK_TS_DECL_COMMON (code);
607 	  break;
608 
609 	case TS_VAR_DECL:
610 	  MARK_TS_DECL_WITH_VIS (code);
611 	  break;
612 
613 	case TS_TYPE_DECL:
614 	case TS_FUNCTION_DECL:
615 	  MARK_TS_DECL_NON_COMMON (code);
616 	  break;
617 
618 	case TS_TRANSLATION_UNIT_DECL:
619 	  MARK_TS_DECL_COMMON (code);
620 	  break;
621 
622 	default:
623 	  gcc_unreachable ();
624 	}
625     }
626 
627   /* Basic consistency checks for attributes used in fold.  */
628   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
629   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
630   gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
631   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
632   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
633   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
634   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
635   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
636   gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
637   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
638   gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
639   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
640   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
641   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
642   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
643   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
644   gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
645   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
646   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
647   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
648   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
649   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
650   gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
651   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
652   gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
653   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
654   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
655   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
656   gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
657   gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
658   gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
659   gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
660   gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
661   gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
662   gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
663   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
664   gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
665   gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
666   gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
667   gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
668 }
669 
670 
671 /* Init tree.c.  */
672 
673 void
init_ttree(void)674 init_ttree (void)
675 {
676   /* Initialize the hash table of types.  */
677   type_hash_table
678     = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
679 
680   debug_expr_for_decl
681     = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
682 
683   value_expr_for_decl
684     = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
685 
686   int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
687 
688   poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
689 
690   int_cst_node = make_int_cst (1, 1);
691 
692   cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
693 
694   cl_optimization_node = make_node (OPTIMIZATION_NODE);
695   cl_target_option_node = make_node (TARGET_OPTION_NODE);
696 
697   /* Initialize the tree_contains_struct array.  */
698   initialize_tree_contains_struct ();
699   lang_hooks.init_ts ();
700 }
701 
702 
703 /* The name of the object as the assembler will see it (but before any
704    translations made by ASM_OUTPUT_LABELREF).  Often this is the same
705    as DECL_NAME.  It is an IDENTIFIER_NODE.  */
706 tree
decl_assembler_name(tree decl)707 decl_assembler_name (tree decl)
708 {
709   if (!DECL_ASSEMBLER_NAME_SET_P (decl))
710     lang_hooks.set_decl_assembler_name (decl);
711   return DECL_ASSEMBLER_NAME_RAW (decl);
712 }
713 
714 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
715    (either of which may be NULL).  Inform the FE, if this changes the
716    name.  */
717 
718 void
overwrite_decl_assembler_name(tree decl,tree name)719 overwrite_decl_assembler_name (tree decl, tree name)
720 {
721   if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
722     lang_hooks.overwrite_decl_assembler_name (decl, name);
723 }
724 
725 /* When the target supports COMDAT groups, this indicates which group the
726    DECL is associated with.  This can be either an IDENTIFIER_NODE or a
727    decl, in which case its DECL_ASSEMBLER_NAME identifies the group.  */
728 tree
decl_comdat_group(const_tree node)729 decl_comdat_group (const_tree node)
730 {
731   struct symtab_node *snode = symtab_node::get (node);
732   if (!snode)
733     return NULL;
734   return snode->get_comdat_group ();
735 }
736 
737 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE.  */
738 tree
decl_comdat_group_id(const_tree node)739 decl_comdat_group_id (const_tree node)
740 {
741   struct symtab_node *snode = symtab_node::get (node);
742   if (!snode)
743     return NULL;
744   return snode->get_comdat_group_id ();
745 }
746 
747 /* When the target supports named section, return its name as IDENTIFIER_NODE
748    or NULL if it is in no section.  */
749 const char *
decl_section_name(const_tree node)750 decl_section_name (const_tree node)
751 {
752   struct symtab_node *snode = symtab_node::get (node);
753   if (!snode)
754     return NULL;
755   return snode->get_section ();
756 }
757 
758 /* Set section name of NODE to VALUE (that is expected to be
759    identifier node) */
760 void
set_decl_section_name(tree node,const char * value)761 set_decl_section_name (tree node, const char *value)
762 {
763   struct symtab_node *snode;
764 
765   if (value == NULL)
766     {
767       snode = symtab_node::get (node);
768       if (!snode)
769 	return;
770     }
771   else if (VAR_P (node))
772     snode = varpool_node::get_create (node);
773   else
774     snode = cgraph_node::get_create (node);
775   snode->set_section (value);
776 }
777 
778 /* Set section name of NODE to match the section name of OTHER.
779 
780    set_decl_section_name (decl, other) is equivalent to
781    set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
782    efficient.  */
783 void
set_decl_section_name(tree decl,const_tree other)784 set_decl_section_name (tree decl, const_tree other)
785 {
786   struct symtab_node *other_node = symtab_node::get (other);
787   if (other_node)
788     {
789       struct symtab_node *decl_node;
790       if (VAR_P (decl))
791     decl_node = varpool_node::get_create (decl);
792       else
793     decl_node = cgraph_node::get_create (decl);
794       decl_node->set_section (*other_node);
795     }
796   else
797     {
798       struct symtab_node *decl_node = symtab_node::get (decl);
799       if (!decl_node)
800     return;
801       decl_node->set_section (NULL);
802     }
803 }
804 
805 /* Return TLS model of a variable NODE.  */
806 enum tls_model
decl_tls_model(const_tree node)807 decl_tls_model (const_tree node)
808 {
809   struct varpool_node *snode = varpool_node::get (node);
810   if (!snode)
811     return TLS_MODEL_NONE;
812   return snode->tls_model;
813 }
814 
815 /* Set TLS model of variable NODE to MODEL.  */
816 void
set_decl_tls_model(tree node,enum tls_model model)817 set_decl_tls_model (tree node, enum tls_model model)
818 {
819   struct varpool_node *vnode;
820 
821   if (model == TLS_MODEL_NONE)
822     {
823       vnode = varpool_node::get (node);
824       if (!vnode)
825 	return;
826     }
827   else
828     vnode = varpool_node::get_create (node);
829   vnode->tls_model = model;
830 }
831 
832 /* Compute the number of bytes occupied by a tree with code CODE.
833    This function cannot be used for nodes that have variable sizes,
834    including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR.  */
835 size_t
tree_code_size(enum tree_code code)836 tree_code_size (enum tree_code code)
837 {
838   switch (TREE_CODE_CLASS (code))
839     {
840     case tcc_declaration:  /* A decl node */
841       switch (code)
842 	{
843 	case FIELD_DECL:	return sizeof (tree_field_decl);
844 	case PARM_DECL:		return sizeof (tree_parm_decl);
845 	case VAR_DECL:		return sizeof (tree_var_decl);
846 	case LABEL_DECL:	return sizeof (tree_label_decl);
847 	case RESULT_DECL:	return sizeof (tree_result_decl);
848 	case CONST_DECL:	return sizeof (tree_const_decl);
849 	case TYPE_DECL:		return sizeof (tree_type_decl);
850 	case FUNCTION_DECL:	return sizeof (tree_function_decl);
851 	case DEBUG_EXPR_DECL:	return sizeof (tree_decl_with_rtl);
852 	case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
853 	case NAMESPACE_DECL:
854 	case IMPORTED_DECL:
855 	case NAMELIST_DECL:	return sizeof (tree_decl_non_common);
856 	default:
857 	  gcc_checking_assert (code >= NUM_TREE_CODES);
858 	  return lang_hooks.tree_size (code);
859 	}
860 
861     case tcc_type:  /* a type node */
862       switch (code)
863 	{
864 	case OFFSET_TYPE:
865 	case ENUMERAL_TYPE:
866 	case BOOLEAN_TYPE:
867 	case INTEGER_TYPE:
868 	case REAL_TYPE:
869 	case OPAQUE_TYPE:
870 	case POINTER_TYPE:
871 	case REFERENCE_TYPE:
872 	case NULLPTR_TYPE:
873 	case FIXED_POINT_TYPE:
874 	case COMPLEX_TYPE:
875 	case VECTOR_TYPE:
876 	case ARRAY_TYPE:
877 	case RECORD_TYPE:
878 	case UNION_TYPE:
879 	case QUAL_UNION_TYPE:
880 	case VOID_TYPE:
881 	case FUNCTION_TYPE:
882 	case METHOD_TYPE:
883 	case LANG_TYPE:		return sizeof (tree_type_non_common);
884 	default:
885 	  gcc_checking_assert (code >= NUM_TREE_CODES);
886 	  return lang_hooks.tree_size (code);
887 	}
888 
889     case tcc_reference:   /* a reference */
890     case tcc_expression:  /* an expression */
891     case tcc_statement:   /* an expression with side effects */
892     case tcc_comparison:  /* a comparison expression */
893     case tcc_unary:       /* a unary arithmetic expression */
894     case tcc_binary:      /* a binary arithmetic expression */
895       return (sizeof (struct tree_exp)
896 	      + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
897 
898     case tcc_constant:  /* a constant */
899       switch (code)
900 	{
901 	case VOID_CST:		return sizeof (tree_typed);
902 	case INTEGER_CST:	gcc_unreachable ();
903 	case POLY_INT_CST:	return sizeof (tree_poly_int_cst);
904 	case REAL_CST:		return sizeof (tree_real_cst);
905 	case FIXED_CST:		return sizeof (tree_fixed_cst);
906 	case COMPLEX_CST:	return sizeof (tree_complex);
907 	case VECTOR_CST:	gcc_unreachable ();
908 	case STRING_CST:	gcc_unreachable ();
909 	default:
910 	  gcc_checking_assert (code >= NUM_TREE_CODES);
911 	  return lang_hooks.tree_size (code);
912 	}
913 
914     case tcc_exceptional:  /* something random, like an identifier.  */
915       switch (code)
916 	{
917 	case IDENTIFIER_NODE:	return lang_hooks.identifier_size;
918 	case TREE_LIST:		return sizeof (tree_list);
919 
920 	case ERROR_MARK:
921 	case PLACEHOLDER_EXPR:	return sizeof (tree_common);
922 
923 	case TREE_VEC:		gcc_unreachable ();
924 	case OMP_CLAUSE:	gcc_unreachable ();
925 
926 	case SSA_NAME:		return sizeof (tree_ssa_name);
927 
928 	case STATEMENT_LIST:	return sizeof (tree_statement_list);
929 	case BLOCK:		return sizeof (struct tree_block);
930 	case CONSTRUCTOR:	return sizeof (tree_constructor);
931 	case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
932 	case TARGET_OPTION_NODE: return sizeof (tree_target_option);
933 
934 	default:
935 	  gcc_checking_assert (code >= NUM_TREE_CODES);
936 	  return lang_hooks.tree_size (code);
937 	}
938 
939     default:
940       gcc_unreachable ();
941     }
942 }
943 
944 /* Compute the number of bytes occupied by NODE.  This routine only
945    looks at TREE_CODE, except for those nodes that have variable sizes.  */
946 size_t
tree_size(const_tree node)947 tree_size (const_tree node)
948 {
949   const enum tree_code code = TREE_CODE (node);
950   switch (code)
951     {
952     case INTEGER_CST:
953       return (sizeof (struct tree_int_cst)
954 	      + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
955 
956     case TREE_BINFO:
957       return (offsetof (struct tree_binfo, base_binfos)
958 	      + vec<tree, va_gc>
959 		  ::embedded_size (BINFO_N_BASE_BINFOS (node)));
960 
961     case TREE_VEC:
962       return (sizeof (struct tree_vec)
963 	      + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
964 
965     case VECTOR_CST:
966       return (sizeof (struct tree_vector)
967 	      + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
968 
969     case STRING_CST:
970       return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
971 
972     case OMP_CLAUSE:
973       return (sizeof (struct tree_omp_clause)
974 	      + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
975 	        * sizeof (tree));
976 
977     default:
978       if (TREE_CODE_CLASS (code) == tcc_vl_exp)
979 	return (sizeof (struct tree_exp)
980 		+ (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
981       else
982 	return tree_code_size (code);
983     }
984 }
985 
986 /* Return tree node kind based on tree CODE.  */
987 
988 static tree_node_kind
get_stats_node_kind(enum tree_code code)989 get_stats_node_kind (enum tree_code code)
990 {
991   enum tree_code_class type = TREE_CODE_CLASS (code);
992 
993   switch (type)
994     {
995     case tcc_declaration:  /* A decl node */
996       return d_kind;
997     case tcc_type:  /* a type node */
998       return t_kind;
999     case tcc_statement:  /* an expression with side effects */
1000       return s_kind;
1001     case tcc_reference:  /* a reference */
1002       return r_kind;
1003     case tcc_expression:  /* an expression */
1004     case tcc_comparison:  /* a comparison expression */
1005     case tcc_unary:  /* a unary arithmetic expression */
1006     case tcc_binary:  /* a binary arithmetic expression */
1007       return e_kind;
1008     case tcc_constant:  /* a constant */
1009       return c_kind;
1010     case tcc_exceptional:  /* something random, like an identifier.  */
1011       switch (code)
1012 	{
1013 	case IDENTIFIER_NODE:
1014 	  return id_kind;
1015 	case TREE_VEC:
1016 	  return vec_kind;
1017 	case TREE_BINFO:
1018 	  return binfo_kind;
1019 	case SSA_NAME:
1020 	  return ssa_name_kind;
1021 	case BLOCK:
1022 	  return b_kind;
1023 	case CONSTRUCTOR:
1024 	  return constr_kind;
1025 	case OMP_CLAUSE:
1026 	  return omp_clause_kind;
1027 	default:
1028 	  return x_kind;
1029 	}
1030       break;
1031     case tcc_vl_exp:
1032       return e_kind;
1033     default:
1034       gcc_unreachable ();
1035     }
1036 }
1037 
1038 /* Record interesting allocation statistics for a tree node with CODE
1039    and LENGTH.  */
1040 
1041 static void
record_node_allocation_statistics(enum tree_code code,size_t length)1042 record_node_allocation_statistics (enum tree_code code, size_t length)
1043 {
1044   if (!GATHER_STATISTICS)
1045     return;
1046 
1047   tree_node_kind kind = get_stats_node_kind (code);
1048 
1049   tree_code_counts[(int) code]++;
1050   tree_node_counts[(int) kind]++;
1051   tree_node_sizes[(int) kind] += length;
1052 }
1053 
1054 /* Allocate and return a new UID from the DECL_UID namespace.  */
1055 
1056 int
allocate_decl_uid(void)1057 allocate_decl_uid (void)
1058 {
1059   return next_decl_uid++;
1060 }
1061 
1062 /* Return a newly allocated node of code CODE.  For decl and type
1063    nodes, some other fields are initialized.  The rest of the node is
1064    initialized to zero.  This function cannot be used for TREE_VEC,
1065    INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1066    tree_code_size.
1067 
1068    Achoo!  I got a code in the node.  */
1069 
1070 tree
make_node(enum tree_code code MEM_STAT_DECL)1071 make_node (enum tree_code code MEM_STAT_DECL)
1072 {
1073   tree t;
1074   enum tree_code_class type = TREE_CODE_CLASS (code);
1075   size_t length = tree_code_size (code);
1076 
1077   record_node_allocation_statistics (code, length);
1078 
1079   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1080   TREE_SET_CODE (t, code);
1081 
1082   switch (type)
1083     {
1084     case tcc_statement:
1085       if (code != DEBUG_BEGIN_STMT)
1086 	TREE_SIDE_EFFECTS (t) = 1;
1087       break;
1088 
1089     case tcc_declaration:
1090       if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1091 	{
1092 	  if (code == FUNCTION_DECL)
1093 	    {
1094 	      SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1095 	      SET_DECL_MODE (t, FUNCTION_MODE);
1096 	    }
1097 	  else
1098 	    SET_DECL_ALIGN (t, 1);
1099 	}
1100       DECL_SOURCE_LOCATION (t) = input_location;
1101       if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1102 	DECL_UID (t) = --next_debug_decl_uid;
1103       else
1104 	{
1105 	  DECL_UID (t) = allocate_decl_uid ();
1106 	  SET_DECL_PT_UID (t, -1);
1107 	}
1108       if (TREE_CODE (t) == LABEL_DECL)
1109 	LABEL_DECL_UID (t) = -1;
1110 
1111       break;
1112 
1113     case tcc_type:
1114       TYPE_UID (t) = next_type_uid++;
1115       SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1116       TYPE_USER_ALIGN (t) = 0;
1117       TYPE_MAIN_VARIANT (t) = t;
1118       TYPE_CANONICAL (t) = t;
1119 
1120       /* Default to no attributes for type, but let target change that.  */
1121       TYPE_ATTRIBUTES (t) = NULL_TREE;
1122       targetm.set_default_type_attributes (t);
1123 
1124       /* We have not yet computed the alias set for this type.  */
1125       TYPE_ALIAS_SET (t) = -1;
1126       break;
1127 
1128     case tcc_constant:
1129       TREE_CONSTANT (t) = 1;
1130       break;
1131 
1132     case tcc_expression:
1133       switch (code)
1134 	{
1135 	case INIT_EXPR:
1136 	case MODIFY_EXPR:
1137 	case VA_ARG_EXPR:
1138 	case PREDECREMENT_EXPR:
1139 	case PREINCREMENT_EXPR:
1140 	case POSTDECREMENT_EXPR:
1141 	case POSTINCREMENT_EXPR:
1142 	  /* All of these have side-effects, no matter what their
1143 	     operands are.  */
1144 	  TREE_SIDE_EFFECTS (t) = 1;
1145 	  break;
1146 
1147 	default:
1148 	  break;
1149 	}
1150       break;
1151 
1152     case tcc_exceptional:
1153       switch (code)
1154         {
1155 	case TARGET_OPTION_NODE:
1156 	  TREE_TARGET_OPTION(t)
1157 			    = ggc_cleared_alloc<struct cl_target_option> ();
1158 	  break;
1159 
1160 	case OPTIMIZATION_NODE:
1161 	  TREE_OPTIMIZATION (t)
1162 			    = ggc_cleared_alloc<struct cl_optimization> ();
1163 	  break;
1164 
1165 	default:
1166 	  break;
1167 	}
1168       break;
1169 
1170     default:
1171       /* Other classes need no special treatment.  */
1172       break;
1173     }
1174 
1175   return t;
1176 }
1177 
1178 /* Free tree node.  */
1179 
1180 void
free_node(tree node)1181 free_node (tree node)
1182 {
1183   enum tree_code code = TREE_CODE (node);
1184   if (GATHER_STATISTICS)
1185     {
1186       enum tree_node_kind kind = get_stats_node_kind (code);
1187 
1188       gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1189       gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1190       gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1191 
1192       tree_code_counts[(int) TREE_CODE (node)]--;
1193       tree_node_counts[(int) kind]--;
1194       tree_node_sizes[(int) kind] -= tree_size (node);
1195     }
1196   if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1197     vec_free (CONSTRUCTOR_ELTS (node));
1198   else if (code == BLOCK)
1199     vec_free (BLOCK_NONLOCALIZED_VARS (node));
1200   else if (code == TREE_BINFO)
1201     vec_free (BINFO_BASE_ACCESSES (node));
1202   else if (code == OPTIMIZATION_NODE)
1203     cl_optimization_option_free (TREE_OPTIMIZATION (node));
1204   else if (code == TARGET_OPTION_NODE)
1205     cl_target_option_free (TREE_TARGET_OPTION (node));
1206   ggc_free (node);
1207 }
1208 
1209 /* Return a new node with the same contents as NODE except that its
1210    TREE_CHAIN, if it has one, is zero and it has a fresh uid.  */
1211 
1212 tree
copy_node(tree node MEM_STAT_DECL)1213 copy_node (tree node MEM_STAT_DECL)
1214 {
1215   tree t;
1216   enum tree_code code = TREE_CODE (node);
1217   size_t length;
1218 
1219   gcc_assert (code != STATEMENT_LIST);
1220 
1221   length = tree_size (node);
1222   record_node_allocation_statistics (code, length);
1223   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1224   memcpy (t, node, length);
1225 
1226   if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1227     TREE_CHAIN (t) = 0;
1228   TREE_ASM_WRITTEN (t) = 0;
1229   TREE_VISITED (t) = 0;
1230 
1231   if (TREE_CODE_CLASS (code) == tcc_declaration)
1232     {
1233       if (code == DEBUG_EXPR_DECL)
1234 	DECL_UID (t) = --next_debug_decl_uid;
1235       else
1236 	{
1237 	  DECL_UID (t) = allocate_decl_uid ();
1238 	  if (DECL_PT_UID_SET_P (node))
1239 	    SET_DECL_PT_UID (t, DECL_PT_UID (node));
1240 	}
1241       if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1242 	  && DECL_HAS_VALUE_EXPR_P (node))
1243 	{
1244 	  SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1245 	  DECL_HAS_VALUE_EXPR_P (t) = 1;
1246 	}
1247       /* DECL_DEBUG_EXPR is copied explicitly by callers.  */
1248       if (VAR_P (node))
1249 	{
1250 	  DECL_HAS_DEBUG_EXPR_P (t) = 0;
1251 	  t->decl_with_vis.symtab_node = NULL;
1252 	}
1253       if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1254 	{
1255 	  SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1256 	  DECL_HAS_INIT_PRIORITY_P (t) = 1;
1257 	}
1258       if (TREE_CODE (node) == FUNCTION_DECL)
1259 	{
1260 	  DECL_STRUCT_FUNCTION (t) = NULL;
1261 	  t->decl_with_vis.symtab_node = NULL;
1262 	}
1263     }
1264   else if (TREE_CODE_CLASS (code) == tcc_type)
1265     {
1266       TYPE_UID (t) = next_type_uid++;
1267       /* The following is so that the debug code for
1268 	 the copy is different from the original type.
1269 	 The two statements usually duplicate each other
1270 	 (because they clear fields of the same union),
1271 	 but the optimizer should catch that.  */
1272       TYPE_SYMTAB_ADDRESS (t) = 0;
1273       TYPE_SYMTAB_DIE (t) = 0;
1274 
1275       /* Do not copy the values cache.  */
1276       if (TYPE_CACHED_VALUES_P (t))
1277 	{
1278 	  TYPE_CACHED_VALUES_P (t) = 0;
1279 	  TYPE_CACHED_VALUES (t) = NULL_TREE;
1280 	}
1281     }
1282     else if (code == TARGET_OPTION_NODE)
1283       {
1284 	TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1285 	memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1286 		sizeof (struct cl_target_option));
1287       }
1288     else if (code == OPTIMIZATION_NODE)
1289       {
1290 	TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1291 	memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1292 		sizeof (struct cl_optimization));
1293       }
1294 
1295   return t;
1296 }
1297 
1298 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1299    For example, this can copy a list made of TREE_LIST nodes.  */
1300 
1301 tree
copy_list(tree list)1302 copy_list (tree list)
1303 {
1304   tree head;
1305   tree prev, next;
1306 
1307   if (list == 0)
1308     return 0;
1309 
1310   head = prev = copy_node (list);
1311   next = TREE_CHAIN (list);
1312   while (next)
1313     {
1314       TREE_CHAIN (prev) = copy_node (next);
1315       prev = TREE_CHAIN (prev);
1316       next = TREE_CHAIN (next);
1317     }
1318   return head;
1319 }
1320 
1321 
1322 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1323    INTEGER_CST with value CST and type TYPE.   */
1324 
1325 static unsigned int
get_int_cst_ext_nunits(tree type,const wide_int & cst)1326 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1327 {
1328   gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1329   /* We need extra HWIs if CST is an unsigned integer with its
1330      upper bit set.  */
1331   if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1332     return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1333   return cst.get_len ();
1334 }
1335 
1336 /* Return a new INTEGER_CST with value CST and type TYPE.  */
1337 
1338 static tree
build_new_int_cst(tree type,const wide_int & cst)1339 build_new_int_cst (tree type, const wide_int &cst)
1340 {
1341   unsigned int len = cst.get_len ();
1342   unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1343   tree nt = make_int_cst (len, ext_len);
1344 
1345   if (len < ext_len)
1346     {
1347       --ext_len;
1348       TREE_INT_CST_ELT (nt, ext_len)
1349 	= zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1350       for (unsigned int i = len; i < ext_len; ++i)
1351 	TREE_INT_CST_ELT (nt, i) = -1;
1352     }
1353   else if (TYPE_UNSIGNED (type)
1354 	   && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1355     {
1356       len--;
1357       TREE_INT_CST_ELT (nt, len)
1358 	= zext_hwi (cst.elt (len),
1359 		    cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1360     }
1361 
1362   for (unsigned int i = 0; i < len; i++)
1363     TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1364   TREE_TYPE (nt) = type;
1365   return nt;
1366 }
1367 
1368 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE.  */
1369 
1370 static tree
build_new_poly_int_cst(tree type,tree (& coeffs)[NUM_POLY_INT_COEFFS]CXX_MEM_STAT_INFO)1371 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1372 			CXX_MEM_STAT_INFO)
1373 {
1374   size_t length = sizeof (struct tree_poly_int_cst);
1375   record_node_allocation_statistics (POLY_INT_CST, length);
1376 
1377   tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1378 
1379   TREE_SET_CODE (t, POLY_INT_CST);
1380   TREE_CONSTANT (t) = 1;
1381   TREE_TYPE (t) = type;
1382   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1383     POLY_INT_CST_COEFF (t, i) = coeffs[i];
1384   return t;
1385 }
1386 
1387 /* Create a constant tree that contains CST sign-extended to TYPE.  */
1388 
1389 tree
build_int_cst(tree type,poly_int64 cst)1390 build_int_cst (tree type, poly_int64 cst)
1391 {
1392   /* Support legacy code.  */
1393   if (!type)
1394     type = integer_type_node;
1395 
1396   return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1397 }
1398 
1399 /* Create a constant tree that contains CST zero-extended to TYPE.  */
1400 
1401 tree
build_int_cstu(tree type,poly_uint64 cst)1402 build_int_cstu (tree type, poly_uint64 cst)
1403 {
1404   return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1405 }
1406 
1407 /* Create a constant tree that contains CST sign-extended to TYPE.  */
1408 
1409 tree
build_int_cst_type(tree type,poly_int64 cst)1410 build_int_cst_type (tree type, poly_int64 cst)
1411 {
1412   gcc_assert (type);
1413   return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1414 }
1415 
1416 /* Constructs tree in type TYPE from with value given by CST.  Signedness
1417    of CST is assumed to be the same as the signedness of TYPE.  */
1418 
1419 tree
double_int_to_tree(tree type,double_int cst)1420 double_int_to_tree (tree type, double_int cst)
1421 {
1422   return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1423 }
1424 
1425 /* We force the wide_int CST to the range of the type TYPE by sign or
1426    zero extending it.  OVERFLOWABLE indicates if we are interested in
1427    overflow of the value, when >0 we are only interested in signed
1428    overflow, for <0 we are interested in any overflow.  OVERFLOWED
1429    indicates whether overflow has already occurred.  CONST_OVERFLOWED
1430    indicates whether constant overflow has already occurred.  We force
1431    T's value to be within range of T's type (by setting to 0 or 1 all
1432    the bits outside the type's range).  We set TREE_OVERFLOWED if,
1433         OVERFLOWED is nonzero,
1434         or OVERFLOWABLE is >0 and signed overflow occurs
1435         or OVERFLOWABLE is <0 and any overflow occurs
1436    We return a new tree node for the extended wide_int.  The node
1437    is shared if no overflow flags are set.  */
1438 
1439 
1440 tree
force_fit_type(tree type,const poly_wide_int_ref & cst,int overflowable,bool overflowed)1441 force_fit_type (tree type, const poly_wide_int_ref &cst,
1442 		int overflowable, bool overflowed)
1443 {
1444   signop sign = TYPE_SIGN (type);
1445 
1446   /* If we need to set overflow flags, return a new unshared node.  */
1447   if (overflowed || !wi::fits_to_tree_p (cst, type))
1448     {
1449       if (overflowed
1450 	  || overflowable < 0
1451 	  || (overflowable > 0 && sign == SIGNED))
1452 	{
1453 	  poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1454 						   sign);
1455 	  tree t;
1456 	  if (tmp.is_constant ())
1457 	    t = build_new_int_cst (type, tmp.coeffs[0]);
1458 	  else
1459 	    {
1460 	      tree coeffs[NUM_POLY_INT_COEFFS];
1461 	      for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1462 		{
1463 		  coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1464 		  TREE_OVERFLOW (coeffs[i]) = 1;
1465 		}
1466 	      t = build_new_poly_int_cst (type, coeffs);
1467 	    }
1468 	  TREE_OVERFLOW (t) = 1;
1469 	  return t;
1470 	}
1471     }
1472 
1473   /* Else build a shared node.  */
1474   return wide_int_to_tree (type, cst);
1475 }
1476 
1477 /* These are the hash table functions for the hash table of INTEGER_CST
1478    nodes of a sizetype.  */
1479 
1480 /* Return the hash code X, an INTEGER_CST.  */
1481 
1482 hashval_t
hash(tree x)1483 int_cst_hasher::hash (tree x)
1484 {
1485   const_tree const t = x;
1486   hashval_t code = TYPE_UID (TREE_TYPE (t));
1487   int i;
1488 
1489   for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1490     code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1491 
1492   return code;
1493 }
1494 
1495 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1496    is the same as that given by *Y, which is the same.  */
1497 
1498 bool
equal(tree x,tree y)1499 int_cst_hasher::equal (tree x, tree y)
1500 {
1501   const_tree const xt = x;
1502   const_tree const yt = y;
1503 
1504   if (TREE_TYPE (xt) != TREE_TYPE (yt)
1505       || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1506       || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1507     return false;
1508 
1509   for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1510     if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1511       return false;
1512 
1513   return true;
1514 }
1515 
1516 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1517    SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1518    number of slots that can be cached for the type.  */
1519 
1520 static inline tree
cache_wide_int_in_type_cache(tree type,const wide_int & cst,int slot,int max_slots)1521 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1522 			      int slot, int max_slots)
1523 {
1524   gcc_checking_assert (slot >= 0);
1525   /* Initialize cache.  */
1526   if (!TYPE_CACHED_VALUES_P (type))
1527     {
1528       TYPE_CACHED_VALUES_P (type) = 1;
1529       TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1530     }
1531   tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1532   if (!t)
1533     {
1534       /* Create a new shared int.  */
1535       t = build_new_int_cst (type, cst);
1536       TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1537     }
1538   return t;
1539 }
1540 
1541 /* Create an INT_CST node of TYPE and value CST.
1542    The returned node is always shared.  For small integers we use a
1543    per-type vector cache, for larger ones we use a single hash table.
1544    The value is extended from its precision according to the sign of
1545    the type to be a multiple of HOST_BITS_PER_WIDE_INT.  This defines
1546    the upper bits and ensures that hashing and value equality based
1547    upon the underlying HOST_WIDE_INTs works without masking.  */
1548 
1549 static tree
wide_int_to_tree_1(tree type,const wide_int_ref & pcst)1550 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1551 {
1552   tree t;
1553   int ix = -1;
1554   int limit = 0;
1555 
1556   gcc_assert (type);
1557   unsigned int prec = TYPE_PRECISION (type);
1558   signop sgn = TYPE_SIGN (type);
1559 
1560   /* Verify that everything is canonical.  */
1561   int l = pcst.get_len ();
1562   if (l > 1)
1563     {
1564       if (pcst.elt (l - 1) == 0)
1565 	gcc_checking_assert (pcst.elt (l - 2) < 0);
1566       if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1567 	gcc_checking_assert (pcst.elt (l - 2) >= 0);
1568     }
1569 
1570   wide_int cst = wide_int::from (pcst, prec, sgn);
1571   unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1572 
1573   enum tree_code code = TREE_CODE (type);
1574   if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1575     {
1576       /* Cache NULL pointer and zero bounds.  */
1577       if (cst == 0)
1578 	ix = 0;
1579       /* Cache upper bounds of pointers.  */
1580       else if (cst == wi::max_value (prec, sgn))
1581 	ix = 1;
1582       /* Cache 1 which is used for a non-zero range.  */
1583       else if (cst == 1)
1584 	ix = 2;
1585 
1586       if (ix >= 0)
1587 	{
1588 	  t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1589 	  /* Make sure no one is clobbering the shared constant.  */
1590 	  gcc_checking_assert (TREE_TYPE (t) == type
1591 			       && cst == wi::to_wide (t));
1592 	  return t;
1593 	}
1594     }
1595   if (ext_len == 1)
1596     {
1597       /* We just need to store a single HOST_WIDE_INT.  */
1598       HOST_WIDE_INT hwi;
1599       if (TYPE_UNSIGNED (type))
1600 	hwi = cst.to_uhwi ();
1601       else
1602 	hwi = cst.to_shwi ();
1603 
1604       switch (code)
1605 	{
1606 	case NULLPTR_TYPE:
1607 	  gcc_assert (hwi == 0);
1608 	  /* Fallthru.  */
1609 
1610 	case POINTER_TYPE:
1611 	case REFERENCE_TYPE:
1612 	  /* Ignore pointers, as they were already handled above.  */
1613 	  break;
1614 
1615 	case BOOLEAN_TYPE:
1616 	  /* Cache false or true.  */
1617 	  limit = 2;
1618 	  if (IN_RANGE (hwi, 0, 1))
1619 	    ix = hwi;
1620 	  break;
1621 
1622 	case INTEGER_TYPE:
1623 	case OFFSET_TYPE:
1624 	  if (TYPE_SIGN (type) == UNSIGNED)
1625 	    {
1626 	      /* Cache [0, N).  */
1627 	      limit = param_integer_share_limit;
1628 	      if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1629 		ix = hwi;
1630 	    }
1631 	  else
1632 	    {
1633 	      /* Cache [-1, N).  */
1634 	      limit = param_integer_share_limit + 1;
1635 	      if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1636 		ix = hwi + 1;
1637 	    }
1638 	  break;
1639 
1640 	case ENUMERAL_TYPE:
1641 	  break;
1642 
1643 	default:
1644 	  gcc_unreachable ();
1645 	}
1646 
1647       if (ix >= 0)
1648 	{
1649 	  t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1650 	  /* Make sure no one is clobbering the shared constant.  */
1651 	  gcc_checking_assert (TREE_TYPE (t) == type
1652 			       && TREE_INT_CST_NUNITS (t) == 1
1653 			       && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1654 			       && TREE_INT_CST_EXT_NUNITS (t) == 1
1655 			       && TREE_INT_CST_ELT (t, 0) == hwi);
1656 	  return t;
1657 	}
1658       else
1659 	{
1660 	  /* Use the cache of larger shared ints, using int_cst_node as
1661 	     a temporary.  */
1662 
1663 	  TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1664 	  TREE_TYPE (int_cst_node) = type;
1665 
1666 	  tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1667 	  t = *slot;
1668 	  if (!t)
1669 	    {
1670 	      /* Insert this one into the hash table.  */
1671 	      t = int_cst_node;
1672 	      *slot = t;
1673 	      /* Make a new node for next time round.  */
1674 	      int_cst_node = make_int_cst (1, 1);
1675 	    }
1676 	}
1677     }
1678   else
1679     {
1680       /* The value either hashes properly or we drop it on the floor
1681 	 for the gc to take care of.  There will not be enough of them
1682 	 to worry about.  */
1683 
1684       tree nt = build_new_int_cst (type, cst);
1685       tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1686       t = *slot;
1687       if (!t)
1688 	{
1689 	  /* Insert this one into the hash table.  */
1690 	  t = nt;
1691 	  *slot = t;
1692 	}
1693       else
1694 	ggc_free (nt);
1695     }
1696 
1697   return t;
1698 }
1699 
1700 hashval_t
hash(tree t)1701 poly_int_cst_hasher::hash (tree t)
1702 {
1703   inchash::hash hstate;
1704 
1705   hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1706   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1707     hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1708 
1709   return hstate.end ();
1710 }
1711 
1712 bool
equal(tree x,const compare_type & y)1713 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1714 {
1715   if (TREE_TYPE (x) != y.first)
1716     return false;
1717   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1718     if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1719       return false;
1720   return true;
1721 }
1722 
1723 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1724    The elements must also have type TYPE.  */
1725 
1726 tree
build_poly_int_cst(tree type,const poly_wide_int_ref & values)1727 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1728 {
1729   unsigned int prec = TYPE_PRECISION (type);
1730   gcc_assert (prec <= values.coeffs[0].get_precision ());
1731   poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1732 
1733   inchash::hash h;
1734   h.add_int (TYPE_UID (type));
1735   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1736     h.add_wide_int (c.coeffs[i]);
1737   poly_int_cst_hasher::compare_type comp (type, &c);
1738   tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1739 							     INSERT);
1740   if (*slot == NULL_TREE)
1741     {
1742       tree coeffs[NUM_POLY_INT_COEFFS];
1743       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1744 	coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1745       *slot = build_new_poly_int_cst (type, coeffs);
1746     }
1747   return *slot;
1748 }
1749 
1750 /* Create a constant tree with value VALUE in type TYPE.  */
1751 
1752 tree
wide_int_to_tree(tree type,const poly_wide_int_ref & value)1753 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1754 {
1755   if (value.is_constant ())
1756     return wide_int_to_tree_1 (type, value.coeffs[0]);
1757   return build_poly_int_cst (type, value);
1758 }
1759 
1760 /* Insert INTEGER_CST T into a cache of integer constants.  And return
1761    the cached constant (which may or may not be T).  If MIGHT_DUPLICATE
1762    is false, and T falls into the type's 'smaller values' range, there
1763    cannot be an existing entry.  Otherwise, if MIGHT_DUPLICATE is true,
1764    or the value is large, should an existing entry exist, it is
1765    returned (rather than inserting T).  */
1766 
1767 tree
cache_integer_cst(tree t,bool might_duplicate ATTRIBUTE_UNUSED)1768 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1769 {
1770   tree type = TREE_TYPE (t);
1771   int ix = -1;
1772   int limit = 0;
1773   int prec = TYPE_PRECISION (type);
1774 
1775   gcc_assert (!TREE_OVERFLOW (t));
1776 
1777   /* The caching indices here must match those in
1778      wide_int_to_type_1.  */
1779   switch (TREE_CODE (type))
1780     {
1781     case NULLPTR_TYPE:
1782       gcc_checking_assert (integer_zerop (t));
1783       /* Fallthru.  */
1784 
1785     case POINTER_TYPE:
1786     case REFERENCE_TYPE:
1787       {
1788 	if (integer_zerop (t))
1789 	  ix = 0;
1790 	else if (integer_onep (t))
1791 	  ix = 2;
1792 
1793 	if (ix >= 0)
1794 	  limit = 3;
1795       }
1796       break;
1797 
1798     case BOOLEAN_TYPE:
1799       /* Cache false or true.  */
1800       limit = 2;
1801       if (wi::ltu_p (wi::to_wide (t), 2))
1802 	ix = TREE_INT_CST_ELT (t, 0);
1803       break;
1804 
1805     case INTEGER_TYPE:
1806     case OFFSET_TYPE:
1807       if (TYPE_UNSIGNED (type))
1808 	{
1809 	  /* Cache 0..N */
1810 	  limit = param_integer_share_limit;
1811 
1812 	  /* This is a little hokie, but if the prec is smaller than
1813 	     what is necessary to hold param_integer_share_limit, then the
1814 	     obvious test will not get the correct answer.  */
1815 	  if (prec < HOST_BITS_PER_WIDE_INT)
1816 	    {
1817 	      if (tree_to_uhwi (t)
1818 		  < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1819 		ix = tree_to_uhwi (t);
1820 	    }
1821 	  else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1822 	    ix = tree_to_uhwi (t);
1823 	}
1824       else
1825 	{
1826 	  /* Cache -1..N */
1827 	  limit = param_integer_share_limit + 1;
1828 
1829 	  if (integer_minus_onep (t))
1830 	    ix = 0;
1831 	  else if (!wi::neg_p (wi::to_wide (t)))
1832 	    {
1833 	      if (prec < HOST_BITS_PER_WIDE_INT)
1834 		{
1835 		  if (tree_to_shwi (t) < param_integer_share_limit)
1836 		    ix = tree_to_shwi (t) + 1;
1837 		}
1838 	      else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1839 		ix = tree_to_shwi (t) + 1;
1840 	    }
1841 	}
1842       break;
1843 
1844     case ENUMERAL_TYPE:
1845       /* The slot used by TYPE_CACHED_VALUES is used for the enum
1846 	 members.  */
1847       break;
1848 
1849     default:
1850       gcc_unreachable ();
1851     }
1852 
1853   if (ix >= 0)
1854     {
1855       /* Look for it in the type's vector of small shared ints.  */
1856       if (!TYPE_CACHED_VALUES_P (type))
1857 	{
1858 	  TYPE_CACHED_VALUES_P (type) = 1;
1859 	  TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1860 	}
1861 
1862       if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1863 	{
1864 	  gcc_checking_assert (might_duplicate);
1865 	  t = r;
1866 	}
1867       else
1868 	TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1869     }
1870   else
1871     {
1872       /* Use the cache of larger shared ints.  */
1873       tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1874       if (tree r = *slot)
1875 	{
1876 	  /* If there is already an entry for the number verify it's the
1877 	     same value.  */
1878 	  gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
1879 	  /* And return the cached value.  */
1880 	  t = r;
1881 	}
1882       else
1883 	/* Otherwise insert this one into the hash table.  */
1884 	*slot = t;
1885     }
1886 
1887   return t;
1888 }
1889 
1890 
1891 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1892    and the rest are zeros.  */
1893 
1894 tree
build_low_bits_mask(tree type,unsigned bits)1895 build_low_bits_mask (tree type, unsigned bits)
1896 {
1897   gcc_assert (bits <= TYPE_PRECISION (type));
1898 
1899   return wide_int_to_tree (type, wi::mask (bits, false,
1900 					   TYPE_PRECISION (type)));
1901 }
1902 
1903 /* Checks that X is integer constant that can be expressed in (unsigned)
1904    HOST_WIDE_INT without loss of precision.  */
1905 
1906 bool
cst_and_fits_in_hwi(const_tree x)1907 cst_and_fits_in_hwi (const_tree x)
1908 {
1909   return (TREE_CODE (x) == INTEGER_CST
1910 	  && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1911 }
1912 
1913 /* Build a newly constructed VECTOR_CST with the given values of
1914    (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN.  */
1915 
1916 tree
make_vector(unsigned log2_npatterns,unsigned int nelts_per_pattern MEM_STAT_DECL)1917 make_vector (unsigned log2_npatterns,
1918 	     unsigned int nelts_per_pattern MEM_STAT_DECL)
1919 {
1920   gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
1921   tree t;
1922   unsigned npatterns = 1 << log2_npatterns;
1923   unsigned encoded_nelts = npatterns * nelts_per_pattern;
1924   unsigned length = (sizeof (struct tree_vector)
1925 		     + (encoded_nelts - 1) * sizeof (tree));
1926 
1927   record_node_allocation_statistics (VECTOR_CST, length);
1928 
1929   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1930 
1931   TREE_SET_CODE (t, VECTOR_CST);
1932   TREE_CONSTANT (t) = 1;
1933   VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
1934   VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
1935 
1936   return t;
1937 }
1938 
1939 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1940    are extracted from V, a vector of CONSTRUCTOR_ELT.  */
1941 
1942 tree
build_vector_from_ctor(tree type,vec<constructor_elt,va_gc> * v)1943 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1944 {
1945   if (vec_safe_length (v) == 0)
1946     return build_zero_cst (type);
1947 
1948   unsigned HOST_WIDE_INT idx, nelts;
1949   tree value;
1950 
1951   /* We can't construct a VECTOR_CST for a variable number of elements.  */
1952   nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
1953   tree_vector_builder vec (type, nelts, 1);
1954   FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1955     {
1956       if (TREE_CODE (value) == VECTOR_CST)
1957 	{
1958 	  /* If NELTS is constant then this must be too.  */
1959 	  unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
1960 	  for (unsigned i = 0; i < sub_nelts; ++i)
1961 	    vec.quick_push (VECTOR_CST_ELT (value, i));
1962 	}
1963       else
1964 	vec.quick_push (value);
1965     }
1966   while (vec.length () < nelts)
1967     vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1968 
1969   return vec.build ();
1970 }
1971 
1972 /* Build a vector of type VECTYPE where all the elements are SCs.  */
1973 tree
build_vector_from_val(tree vectype,tree sc)1974 build_vector_from_val (tree vectype, tree sc)
1975 {
1976   unsigned HOST_WIDE_INT i, nunits;
1977 
1978   if (sc == error_mark_node)
1979     return sc;
1980 
1981   /* Verify that the vector type is suitable for SC.  Note that there
1982      is some inconsistency in the type-system with respect to restrict
1983      qualifications of pointers.  Vector types always have a main-variant
1984      element type and the qualification is applied to the vector-type.
1985      So TREE_TYPE (vector-type) does not return a properly qualified
1986      vector element-type.  */
1987   gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1988 					   TREE_TYPE (vectype)));
1989 
1990   if (CONSTANT_CLASS_P (sc))
1991     {
1992       tree_vector_builder v (vectype, 1, 1);
1993       v.quick_push (sc);
1994       return v.build ();
1995     }
1996   else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
1997     return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
1998   else
1999     {
2000       vec<constructor_elt, va_gc> *v;
2001       vec_alloc (v, nunits);
2002       for (i = 0; i < nunits; ++i)
2003 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2004       return build_constructor (vectype, v);
2005     }
2006 }
2007 
2008 /* If TYPE is not a vector type, just return SC, otherwise return
2009    build_vector_from_val (TYPE, SC).  */
2010 
2011 tree
build_uniform_cst(tree type,tree sc)2012 build_uniform_cst (tree type, tree sc)
2013 {
2014   if (!VECTOR_TYPE_P (type))
2015     return sc;
2016 
2017   return build_vector_from_val (type, sc);
2018 }
2019 
2020 /* Build a vector series of type TYPE in which element I has the value
2021    BASE + I * STEP.  The result is a constant if BASE and STEP are constant
2022    and a VEC_SERIES_EXPR otherwise.  */
2023 
2024 tree
build_vec_series(tree type,tree base,tree step)2025 build_vec_series (tree type, tree base, tree step)
2026 {
2027   if (integer_zerop (step))
2028     return build_vector_from_val (type, base);
2029   if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2030     {
2031       tree_vector_builder builder (type, 1, 3);
2032       tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2033 				    wi::to_wide (base) + wi::to_wide (step));
2034       tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2035 				    wi::to_wide (elt1) + wi::to_wide (step));
2036       builder.quick_push (base);
2037       builder.quick_push (elt1);
2038       builder.quick_push (elt2);
2039       return builder.build ();
2040     }
2041   return build2 (VEC_SERIES_EXPR, type, base, step);
2042 }
2043 
2044 /* Return a vector with the same number of units and number of bits
2045    as VEC_TYPE, but in which the elements are a linear series of unsigned
2046    integers { BASE, BASE + STEP, BASE + STEP * 2, ... }.  */
2047 
2048 tree
build_index_vector(tree vec_type,poly_uint64 base,poly_uint64 step)2049 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2050 {
2051   tree index_vec_type = vec_type;
2052   tree index_elt_type = TREE_TYPE (vec_type);
2053   poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2054   if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2055     {
2056       index_elt_type = build_nonstandard_integer_type
2057 	(GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2058       index_vec_type = build_vector_type (index_elt_type, nunits);
2059     }
2060 
2061   tree_vector_builder v (index_vec_type, 1, 3);
2062   for (unsigned int i = 0; i < 3; ++i)
2063     v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2064   return v.build ();
2065 }
2066 
2067 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2068    elements are A and the rest are B.  */
2069 
2070 tree
build_vector_a_then_b(tree vec_type,unsigned int num_a,tree a,tree b)2071 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2072 {
2073   gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2074   unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2075   /* Optimize the constant case.  */
2076   if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2077     count /= 2;
2078   tree_vector_builder builder (vec_type, count, 2);
2079   for (unsigned int i = 0; i < count * 2; ++i)
2080     builder.quick_push (i < num_a ? a : b);
2081   return builder.build ();
2082 }
2083 
2084 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2085    calculate TREE_CONSTANT and TREE_SIDE_EFFECTS.  */
2086 
2087 void
recompute_constructor_flags(tree c)2088 recompute_constructor_flags (tree c)
2089 {
2090   unsigned int i;
2091   tree val;
2092   bool constant_p = true;
2093   bool side_effects_p = false;
2094   vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2095 
2096   FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2097     {
2098       /* Mostly ctors will have elts that don't have side-effects, so
2099 	 the usual case is to scan all the elements.  Hence a single
2100 	 loop for both const and side effects, rather than one loop
2101 	 each (with early outs).  */
2102       if (!TREE_CONSTANT (val))
2103 	constant_p = false;
2104       if (TREE_SIDE_EFFECTS (val))
2105 	side_effects_p = true;
2106     }
2107 
2108   TREE_SIDE_EFFECTS (c) = side_effects_p;
2109   TREE_CONSTANT (c) = constant_p;
2110 }
2111 
2112 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2113    CONSTRUCTOR C.  */
2114 
2115 void
verify_constructor_flags(tree c)2116 verify_constructor_flags (tree c)
2117 {
2118   unsigned int i;
2119   tree val;
2120   bool constant_p = TREE_CONSTANT (c);
2121   bool side_effects_p = TREE_SIDE_EFFECTS (c);
2122   vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2123 
2124   FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2125     {
2126       if (constant_p && !TREE_CONSTANT (val))
2127 	internal_error ("non-constant element in constant CONSTRUCTOR");
2128       if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2129 	internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2130     }
2131 }
2132 
2133 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2134    are in the vec pointed to by VALS.  */
2135 tree
build_constructor(tree type,vec<constructor_elt,va_gc> * vals MEM_STAT_DECL)2136 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2137 {
2138   tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2139 
2140   TREE_TYPE (c) = type;
2141   CONSTRUCTOR_ELTS (c) = vals;
2142 
2143   recompute_constructor_flags (c);
2144 
2145   return c;
2146 }
2147 
2148 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2149    INDEX and VALUE.  */
2150 tree
build_constructor_single(tree type,tree index,tree value)2151 build_constructor_single (tree type, tree index, tree value)
2152 {
2153   vec<constructor_elt, va_gc> *v;
2154   constructor_elt elt = {index, value};
2155 
2156   vec_alloc (v, 1);
2157   v->quick_push (elt);
2158 
2159   return build_constructor (type, v);
2160 }
2161 
2162 
2163 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2164    are in a list pointed to by VALS.  */
2165 tree
build_constructor_from_list(tree type,tree vals)2166 build_constructor_from_list (tree type, tree vals)
2167 {
2168   tree t;
2169   vec<constructor_elt, va_gc> *v = NULL;
2170 
2171   if (vals)
2172     {
2173       vec_alloc (v, list_length (vals));
2174       for (t = vals; t; t = TREE_CHAIN (t))
2175 	CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2176     }
2177 
2178   return build_constructor (type, v);
2179 }
2180 
2181 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2182    are in a vector pointed to by VALS.  Note that the TREE_PURPOSE
2183    fields in the constructor remain null.  */
2184 
2185 tree
build_constructor_from_vec(tree type,const vec<tree,va_gc> * vals)2186 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2187 {
2188   vec<constructor_elt, va_gc> *v = NULL;
2189 
2190   for (tree t : vals)
2191     CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2192 
2193   return build_constructor (type, v);
2194 }
2195 
2196 /* Return a new CONSTRUCTOR node whose type is TYPE.  NELTS is the number
2197    of elements, provided as index/value pairs.  */
2198 
2199 tree
build_constructor_va(tree type,int nelts,...)2200 build_constructor_va (tree type, int nelts, ...)
2201 {
2202   vec<constructor_elt, va_gc> *v = NULL;
2203   va_list p;
2204 
2205   va_start (p, nelts);
2206   vec_alloc (v, nelts);
2207   while (nelts--)
2208     {
2209       tree index = va_arg (p, tree);
2210       tree value = va_arg (p, tree);
2211       CONSTRUCTOR_APPEND_ELT (v, index, value);
2212     }
2213   va_end (p);
2214   return build_constructor (type, v);
2215 }
2216 
2217 /* Return a node of type TYPE for which TREE_CLOBBER_P is true.  */
2218 
2219 tree
build_clobber(tree type)2220 build_clobber (tree type)
2221 {
2222   tree clobber = build_constructor (type, NULL);
2223   TREE_THIS_VOLATILE (clobber) = true;
2224   return clobber;
2225 }
2226 
2227 /* Return a new FIXED_CST node whose type is TYPE and value is F.  */
2228 
2229 tree
build_fixed(tree type,FIXED_VALUE_TYPE f)2230 build_fixed (tree type, FIXED_VALUE_TYPE f)
2231 {
2232   tree v;
2233   FIXED_VALUE_TYPE *fp;
2234 
2235   v = make_node (FIXED_CST);
2236   fp = ggc_alloc<fixed_value> ();
2237   memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2238 
2239   TREE_TYPE (v) = type;
2240   TREE_FIXED_CST_PTR (v) = fp;
2241   return v;
2242 }
2243 
2244 /* Return a new REAL_CST node whose type is TYPE and value is D.  */
2245 
2246 tree
build_real(tree type,REAL_VALUE_TYPE d)2247 build_real (tree type, REAL_VALUE_TYPE d)
2248 {
2249   tree v;
2250   REAL_VALUE_TYPE *dp;
2251   int overflow = 0;
2252 
2253   /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2254      Consider doing it via real_convert now.  */
2255 
2256   v = make_node (REAL_CST);
2257   dp = ggc_alloc<real_value> ();
2258   memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2259 
2260   TREE_TYPE (v) = type;
2261   TREE_REAL_CST_PTR (v) = dp;
2262   TREE_OVERFLOW (v) = overflow;
2263   return v;
2264 }
2265 
2266 /* Like build_real, but first truncate D to the type.  */
2267 
2268 tree
build_real_truncate(tree type,REAL_VALUE_TYPE d)2269 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2270 {
2271   return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2272 }
2273 
2274 /* Return a new REAL_CST node whose type is TYPE
2275    and whose value is the integer value of the INTEGER_CST node I.  */
2276 
2277 REAL_VALUE_TYPE
real_value_from_int_cst(const_tree type,const_tree i)2278 real_value_from_int_cst (const_tree type, const_tree i)
2279 {
2280   REAL_VALUE_TYPE d;
2281 
2282   /* Clear all bits of the real value type so that we can later do
2283      bitwise comparisons to see if two values are the same.  */
2284   memset (&d, 0, sizeof d);
2285 
2286   real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2287 		     TYPE_SIGN (TREE_TYPE (i)));
2288   return d;
2289 }
2290 
2291 /* Given a tree representing an integer constant I, return a tree
2292    representing the same value as a floating-point constant of type TYPE.  */
2293 
2294 tree
build_real_from_int_cst(tree type,const_tree i)2295 build_real_from_int_cst (tree type, const_tree i)
2296 {
2297   tree v;
2298   int overflow = TREE_OVERFLOW (i);
2299 
2300   v = build_real (type, real_value_from_int_cst (type, i));
2301 
2302   TREE_OVERFLOW (v) |= overflow;
2303   return v;
2304 }
2305 
2306 /* Return a new REAL_CST node whose type is TYPE
2307    and whose value is the integer value I which has sign SGN.  */
2308 
2309 tree
build_real_from_wide(tree type,const wide_int_ref & i,signop sgn)2310 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2311 {
2312   REAL_VALUE_TYPE d;
2313 
2314   /* Clear all bits of the real value type so that we can later do
2315      bitwise comparisons to see if two values are the same.  */
2316   memset (&d, 0, sizeof d);
2317 
2318   real_from_integer (&d, TYPE_MODE (type), i, sgn);
2319   return build_real (type, d);
2320 }
2321 
2322 /* Return a newly constructed STRING_CST node whose value is the LEN
2323    characters at STR when STR is nonnull, or all zeros otherwise.
2324    Note that for a C string literal, LEN should include the trailing NUL.
2325    The TREE_TYPE is not initialized.  */
2326 
2327 tree
build_string(unsigned len,const char * str)2328 build_string (unsigned len, const char *str /*= NULL */)
2329 {
2330   /* Do not waste bytes provided by padding of struct tree_string.  */
2331   unsigned size = len + offsetof (struct tree_string, str) + 1;
2332 
2333   record_node_allocation_statistics (STRING_CST, size);
2334 
2335   tree s = (tree) ggc_internal_alloc (size);
2336 
2337   memset (s, 0, sizeof (struct tree_typed));
2338   TREE_SET_CODE (s, STRING_CST);
2339   TREE_CONSTANT (s) = 1;
2340   TREE_STRING_LENGTH (s) = len;
2341   if (str)
2342     memcpy (s->string.str, str, len);
2343   else
2344     memset (s->string.str, 0, len);
2345   s->string.str[len] = '\0';
2346 
2347   return s;
2348 }
2349 
2350 /* Return a newly constructed COMPLEX_CST node whose value is
2351    specified by the real and imaginary parts REAL and IMAG.
2352    Both REAL and IMAG should be constant nodes.  TYPE, if specified,
2353    will be the type of the COMPLEX_CST; otherwise a new type will be made.  */
2354 
2355 tree
build_complex(tree type,tree real,tree imag)2356 build_complex (tree type, tree real, tree imag)
2357 {
2358   gcc_assert (CONSTANT_CLASS_P (real));
2359   gcc_assert (CONSTANT_CLASS_P (imag));
2360 
2361   tree t = make_node (COMPLEX_CST);
2362 
2363   TREE_REALPART (t) = real;
2364   TREE_IMAGPART (t) = imag;
2365   TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2366   TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2367   return t;
2368 }
2369 
2370 /* Build a complex (inf +- 0i), such as for the result of cproj.
2371    TYPE is the complex tree type of the result.  If NEG is true, the
2372    imaginary zero is negative.  */
2373 
2374 tree
build_complex_inf(tree type,bool neg)2375 build_complex_inf (tree type, bool neg)
2376 {
2377   REAL_VALUE_TYPE rinf, rzero = dconst0;
2378 
2379   real_inf (&rinf);
2380   rzero.sign = neg;
2381   return build_complex (type, build_real (TREE_TYPE (type), rinf),
2382 			build_real (TREE_TYPE (type), rzero));
2383 }
2384 
2385 /* Return the constant 1 in type TYPE.  If TYPE has several elements, each
2386    element is set to 1.  In particular, this is 1 + i for complex types.  */
2387 
2388 tree
build_each_one_cst(tree type)2389 build_each_one_cst (tree type)
2390 {
2391   if (TREE_CODE (type) == COMPLEX_TYPE)
2392     {
2393       tree scalar = build_one_cst (TREE_TYPE (type));
2394       return build_complex (type, scalar, scalar);
2395     }
2396   else
2397     return build_one_cst (type);
2398 }
2399 
2400 /* Return a constant of arithmetic type TYPE which is the
2401    multiplicative identity of the set TYPE.  */
2402 
2403 tree
build_one_cst(tree type)2404 build_one_cst (tree type)
2405 {
2406   switch (TREE_CODE (type))
2407     {
2408     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2409     case POINTER_TYPE: case REFERENCE_TYPE:
2410     case OFFSET_TYPE:
2411       return build_int_cst (type, 1);
2412 
2413     case REAL_TYPE:
2414       return build_real (type, dconst1);
2415 
2416     case FIXED_POINT_TYPE:
2417       /* We can only generate 1 for accum types.  */
2418       gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2419       return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2420 
2421     case VECTOR_TYPE:
2422       {
2423 	tree scalar = build_one_cst (TREE_TYPE (type));
2424 
2425 	return build_vector_from_val (type, scalar);
2426       }
2427 
2428     case COMPLEX_TYPE:
2429       return build_complex (type,
2430 			    build_one_cst (TREE_TYPE (type)),
2431 			    build_zero_cst (TREE_TYPE (type)));
2432 
2433     default:
2434       gcc_unreachable ();
2435     }
2436 }
2437 
2438 /* Return an integer of type TYPE containing all 1's in as much precision as
2439    it contains, or a complex or vector whose subparts are such integers.  */
2440 
2441 tree
build_all_ones_cst(tree type)2442 build_all_ones_cst (tree type)
2443 {
2444   if (TREE_CODE (type) == COMPLEX_TYPE)
2445     {
2446       tree scalar = build_all_ones_cst (TREE_TYPE (type));
2447       return build_complex (type, scalar, scalar);
2448     }
2449   else
2450     return build_minus_one_cst (type);
2451 }
2452 
2453 /* Return a constant of arithmetic type TYPE which is the
2454    opposite of the multiplicative identity of the set TYPE.  */
2455 
2456 tree
build_minus_one_cst(tree type)2457 build_minus_one_cst (tree type)
2458 {
2459   switch (TREE_CODE (type))
2460     {
2461     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2462     case POINTER_TYPE: case REFERENCE_TYPE:
2463     case OFFSET_TYPE:
2464       return build_int_cst (type, -1);
2465 
2466     case REAL_TYPE:
2467       return build_real (type, dconstm1);
2468 
2469     case FIXED_POINT_TYPE:
2470       /* We can only generate 1 for accum types.  */
2471       gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2472       return build_fixed (type,
2473 			  fixed_from_double_int (double_int_minus_one,
2474 						 SCALAR_TYPE_MODE (type)));
2475 
2476     case VECTOR_TYPE:
2477       {
2478 	tree scalar = build_minus_one_cst (TREE_TYPE (type));
2479 
2480 	return build_vector_from_val (type, scalar);
2481       }
2482 
2483     case COMPLEX_TYPE:
2484       return build_complex (type,
2485 			    build_minus_one_cst (TREE_TYPE (type)),
2486 			    build_zero_cst (TREE_TYPE (type)));
2487 
2488     default:
2489       gcc_unreachable ();
2490     }
2491 }
2492 
2493 /* Build 0 constant of type TYPE.  This is used by constructor folding
2494    and thus the constant should be represented in memory by
2495    zero(es).  */
2496 
2497 tree
build_zero_cst(tree type)2498 build_zero_cst (tree type)
2499 {
2500   switch (TREE_CODE (type))
2501     {
2502     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2503     case POINTER_TYPE: case REFERENCE_TYPE:
2504     case OFFSET_TYPE: case NULLPTR_TYPE:
2505       return build_int_cst (type, 0);
2506 
2507     case REAL_TYPE:
2508       return build_real (type, dconst0);
2509 
2510     case FIXED_POINT_TYPE:
2511       return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2512 
2513     case VECTOR_TYPE:
2514       {
2515 	tree scalar = build_zero_cst (TREE_TYPE (type));
2516 
2517 	return build_vector_from_val (type, scalar);
2518       }
2519 
2520     case COMPLEX_TYPE:
2521       {
2522 	tree zero = build_zero_cst (TREE_TYPE (type));
2523 
2524 	return build_complex (type, zero, zero);
2525       }
2526 
2527     default:
2528       if (!AGGREGATE_TYPE_P (type))
2529 	return fold_convert (type, integer_zero_node);
2530       return build_constructor (type, NULL);
2531     }
2532 }
2533 
2534 
2535 /* Build a BINFO with LEN language slots.  */
2536 
2537 tree
make_tree_binfo(unsigned base_binfos MEM_STAT_DECL)2538 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2539 {
2540   tree t;
2541   size_t length = (offsetof (struct tree_binfo, base_binfos)
2542 		   + vec<tree, va_gc>::embedded_size (base_binfos));
2543 
2544   record_node_allocation_statistics (TREE_BINFO, length);
2545 
2546   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2547 
2548   memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2549 
2550   TREE_SET_CODE (t, TREE_BINFO);
2551 
2552   BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2553 
2554   return t;
2555 }
2556 
2557 /* Create a CASE_LABEL_EXPR tree node and return it.  */
2558 
2559 tree
build_case_label(tree low_value,tree high_value,tree label_decl)2560 build_case_label (tree low_value, tree high_value, tree label_decl)
2561 {
2562   tree t = make_node (CASE_LABEL_EXPR);
2563 
2564   TREE_TYPE (t) = void_type_node;
2565   SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2566 
2567   CASE_LOW (t) = low_value;
2568   CASE_HIGH (t) = high_value;
2569   CASE_LABEL (t) = label_decl;
2570   CASE_CHAIN (t) = NULL_TREE;
2571 
2572   return t;
2573 }
2574 
2575 /* Build a newly constructed INTEGER_CST node.  LEN and EXT_LEN are the
2576    values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2577    The latter determines the length of the HOST_WIDE_INT vector.  */
2578 
2579 tree
make_int_cst(int len,int ext_len MEM_STAT_DECL)2580 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2581 {
2582   tree t;
2583   int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2584 		+ sizeof (struct tree_int_cst));
2585 
2586   gcc_assert (len);
2587   record_node_allocation_statistics (INTEGER_CST, length);
2588 
2589   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2590 
2591   TREE_SET_CODE (t, INTEGER_CST);
2592   TREE_INT_CST_NUNITS (t) = len;
2593   TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2594   /* to_offset can only be applied to trees that are offset_int-sized
2595      or smaller.  EXT_LEN is correct if it fits, otherwise the constant
2596      must be exactly the precision of offset_int and so LEN is correct.  */
2597   if (ext_len <= OFFSET_INT_ELTS)
2598     TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2599   else
2600     TREE_INT_CST_OFFSET_NUNITS (t) = len;
2601 
2602   TREE_CONSTANT (t) = 1;
2603 
2604   return t;
2605 }
2606 
2607 /* Build a newly constructed TREE_VEC node of length LEN.  */
2608 
2609 tree
make_tree_vec(int len MEM_STAT_DECL)2610 make_tree_vec (int len MEM_STAT_DECL)
2611 {
2612   tree t;
2613   size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2614 
2615   record_node_allocation_statistics (TREE_VEC, length);
2616 
2617   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2618 
2619   TREE_SET_CODE (t, TREE_VEC);
2620   TREE_VEC_LENGTH (t) = len;
2621 
2622   return t;
2623 }
2624 
2625 /* Grow a TREE_VEC node to new length LEN.  */
2626 
2627 tree
grow_tree_vec(tree v,int len MEM_STAT_DECL)2628 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2629 {
2630   gcc_assert (TREE_CODE (v) == TREE_VEC);
2631 
2632   int oldlen = TREE_VEC_LENGTH (v);
2633   gcc_assert (len > oldlen);
2634 
2635   size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2636   size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2637 
2638   record_node_allocation_statistics (TREE_VEC, length - oldlength);
2639 
2640   v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2641 
2642   TREE_VEC_LENGTH (v) = len;
2643 
2644   return v;
2645 }
2646 
2647 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2648    fixed, and scalar, complex or vector.  */
2649 
2650 bool
zerop(const_tree expr)2651 zerop (const_tree expr)
2652 {
2653   return (integer_zerop (expr)
2654 	  || real_zerop (expr)
2655 	  || fixed_zerop (expr));
2656 }
2657 
2658 /* Return 1 if EXPR is the integer constant zero or a complex constant
2659    of zero, or a location wrapper for such a constant.  */
2660 
2661 bool
integer_zerop(const_tree expr)2662 integer_zerop (const_tree expr)
2663 {
2664   STRIP_ANY_LOCATION_WRAPPER (expr);
2665 
2666   switch (TREE_CODE (expr))
2667     {
2668     case INTEGER_CST:
2669       return wi::to_wide (expr) == 0;
2670     case COMPLEX_CST:
2671       return (integer_zerop (TREE_REALPART (expr))
2672 	      && integer_zerop (TREE_IMAGPART (expr)));
2673     case VECTOR_CST:
2674       return (VECTOR_CST_NPATTERNS (expr) == 1
2675 	      && VECTOR_CST_DUPLICATE_P (expr)
2676 	      && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2677     default:
2678       return false;
2679     }
2680 }
2681 
2682 /* Return 1 if EXPR is the integer constant one or the corresponding
2683    complex constant, or a location wrapper for such a constant.  */
2684 
2685 bool
integer_onep(const_tree expr)2686 integer_onep (const_tree expr)
2687 {
2688   STRIP_ANY_LOCATION_WRAPPER (expr);
2689 
2690   switch (TREE_CODE (expr))
2691     {
2692     case INTEGER_CST:
2693       return wi::eq_p (wi::to_widest (expr), 1);
2694     case COMPLEX_CST:
2695       return (integer_onep (TREE_REALPART (expr))
2696 	      && integer_zerop (TREE_IMAGPART (expr)));
2697     case VECTOR_CST:
2698       return (VECTOR_CST_NPATTERNS (expr) == 1
2699 	      && VECTOR_CST_DUPLICATE_P (expr)
2700 	      && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2701     default:
2702       return false;
2703     }
2704 }
2705 
2706 /* Return 1 if EXPR is the integer constant one.  For complex and vector,
2707    return 1 if every piece is the integer constant one.
2708    Also return 1 for location wrappers for such a constant.  */
2709 
2710 bool
integer_each_onep(const_tree expr)2711 integer_each_onep (const_tree expr)
2712 {
2713   STRIP_ANY_LOCATION_WRAPPER (expr);
2714 
2715   if (TREE_CODE (expr) == COMPLEX_CST)
2716     return (integer_onep (TREE_REALPART (expr))
2717 	    && integer_onep (TREE_IMAGPART (expr)));
2718   else
2719     return integer_onep (expr);
2720 }
2721 
2722 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2723    it contains, or a complex or vector whose subparts are such integers,
2724    or a location wrapper for such a constant.  */
2725 
2726 bool
integer_all_onesp(const_tree expr)2727 integer_all_onesp (const_tree expr)
2728 {
2729   STRIP_ANY_LOCATION_WRAPPER (expr);
2730 
2731   if (TREE_CODE (expr) == COMPLEX_CST
2732       && integer_all_onesp (TREE_REALPART (expr))
2733       && integer_all_onesp (TREE_IMAGPART (expr)))
2734     return true;
2735 
2736   else if (TREE_CODE (expr) == VECTOR_CST)
2737     return (VECTOR_CST_NPATTERNS (expr) == 1
2738 	    && VECTOR_CST_DUPLICATE_P (expr)
2739 	    && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2740 
2741   else if (TREE_CODE (expr) != INTEGER_CST)
2742     return false;
2743 
2744   return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2745 	  == wi::to_wide (expr));
2746 }
2747 
2748 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2749    for such a constant.  */
2750 
2751 bool
integer_minus_onep(const_tree expr)2752 integer_minus_onep (const_tree expr)
2753 {
2754   STRIP_ANY_LOCATION_WRAPPER (expr);
2755 
2756   if (TREE_CODE (expr) == COMPLEX_CST)
2757     return (integer_all_onesp (TREE_REALPART (expr))
2758 	    && integer_zerop (TREE_IMAGPART (expr)));
2759   else
2760     return integer_all_onesp (expr);
2761 }
2762 
2763 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2764    one bit on), or a location wrapper for such a constant.  */
2765 
2766 bool
integer_pow2p(const_tree expr)2767 integer_pow2p (const_tree expr)
2768 {
2769   STRIP_ANY_LOCATION_WRAPPER (expr);
2770 
2771   if (TREE_CODE (expr) == COMPLEX_CST
2772       && integer_pow2p (TREE_REALPART (expr))
2773       && integer_zerop (TREE_IMAGPART (expr)))
2774     return true;
2775 
2776   if (TREE_CODE (expr) != INTEGER_CST)
2777     return false;
2778 
2779   return wi::popcount (wi::to_wide (expr)) == 1;
2780 }
2781 
2782 /* Return 1 if EXPR is an integer constant other than zero or a
2783    complex constant other than zero, or a location wrapper for such a
2784    constant.  */
2785 
2786 bool
integer_nonzerop(const_tree expr)2787 integer_nonzerop (const_tree expr)
2788 {
2789   STRIP_ANY_LOCATION_WRAPPER (expr);
2790 
2791   return ((TREE_CODE (expr) == INTEGER_CST
2792 	   && wi::to_wide (expr) != 0)
2793 	  || (TREE_CODE (expr) == COMPLEX_CST
2794 	      && (integer_nonzerop (TREE_REALPART (expr))
2795 		  || integer_nonzerop (TREE_IMAGPART (expr)))));
2796 }
2797 
2798 /* Return 1 if EXPR is the integer constant one.  For vector,
2799    return 1 if every piece is the integer constant minus one
2800    (representing the value TRUE).
2801    Also return 1 for location wrappers for such a constant.  */
2802 
2803 bool
integer_truep(const_tree expr)2804 integer_truep (const_tree expr)
2805 {
2806   STRIP_ANY_LOCATION_WRAPPER (expr);
2807 
2808   if (TREE_CODE (expr) == VECTOR_CST)
2809     return integer_all_onesp (expr);
2810   return integer_onep (expr);
2811 }
2812 
2813 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2814    for such a constant.  */
2815 
2816 bool
fixed_zerop(const_tree expr)2817 fixed_zerop (const_tree expr)
2818 {
2819   STRIP_ANY_LOCATION_WRAPPER (expr);
2820 
2821   return (TREE_CODE (expr) == FIXED_CST
2822 	  && TREE_FIXED_CST (expr).data.is_zero ());
2823 }
2824 
2825 /* Return the power of two represented by a tree node known to be a
2826    power of two.  */
2827 
2828 int
tree_log2(const_tree expr)2829 tree_log2 (const_tree expr)
2830 {
2831   if (TREE_CODE (expr) == COMPLEX_CST)
2832     return tree_log2 (TREE_REALPART (expr));
2833 
2834   return wi::exact_log2 (wi::to_wide (expr));
2835 }
2836 
2837 /* Similar, but return the largest integer Y such that 2 ** Y is less
2838    than or equal to EXPR.  */
2839 
2840 int
tree_floor_log2(const_tree expr)2841 tree_floor_log2 (const_tree expr)
2842 {
2843   if (TREE_CODE (expr) == COMPLEX_CST)
2844     return tree_log2 (TREE_REALPART (expr));
2845 
2846   return wi::floor_log2 (wi::to_wide (expr));
2847 }
2848 
2849 /* Return number of known trailing zero bits in EXPR, or, if the value of
2850    EXPR is known to be zero, the precision of it's type.  */
2851 
2852 unsigned int
tree_ctz(const_tree expr)2853 tree_ctz (const_tree expr)
2854 {
2855   if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2856       && !POINTER_TYPE_P (TREE_TYPE (expr)))
2857     return 0;
2858 
2859   unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2860   switch (TREE_CODE (expr))
2861     {
2862     case INTEGER_CST:
2863       ret1 = wi::ctz (wi::to_wide (expr));
2864       return MIN (ret1, prec);
2865     case SSA_NAME:
2866       ret1 = wi::ctz (get_nonzero_bits (expr));
2867       return MIN (ret1, prec);
2868     case PLUS_EXPR:
2869     case MINUS_EXPR:
2870     case BIT_IOR_EXPR:
2871     case BIT_XOR_EXPR:
2872     case MIN_EXPR:
2873     case MAX_EXPR:
2874       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2875       if (ret1 == 0)
2876 	return ret1;
2877       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2878       return MIN (ret1, ret2);
2879     case POINTER_PLUS_EXPR:
2880       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2881       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2882       /* Second operand is sizetype, which could be in theory
2883 	 wider than pointer's precision.  Make sure we never
2884 	 return more than prec.  */
2885       ret2 = MIN (ret2, prec);
2886       return MIN (ret1, ret2);
2887     case BIT_AND_EXPR:
2888       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2889       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2890       return MAX (ret1, ret2);
2891     case MULT_EXPR:
2892       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2893       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2894       return MIN (ret1 + ret2, prec);
2895     case LSHIFT_EXPR:
2896       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2897       if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2898 	  && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2899 	{
2900 	  ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2901 	  return MIN (ret1 + ret2, prec);
2902 	}
2903       return ret1;
2904     case RSHIFT_EXPR:
2905       if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2906 	  && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2907 	{
2908 	  ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2909 	  ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2910 	  if (ret1 > ret2)
2911 	    return ret1 - ret2;
2912 	}
2913       return 0;
2914     case TRUNC_DIV_EXPR:
2915     case CEIL_DIV_EXPR:
2916     case FLOOR_DIV_EXPR:
2917     case ROUND_DIV_EXPR:
2918     case EXACT_DIV_EXPR:
2919       if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2920 	  && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2921 	{
2922 	  int l = tree_log2 (TREE_OPERAND (expr, 1));
2923 	  if (l >= 0)
2924 	    {
2925 	      ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2926 	      ret2 = l;
2927 	      if (ret1 > ret2)
2928 		return ret1 - ret2;
2929 	    }
2930 	}
2931       return 0;
2932     CASE_CONVERT:
2933       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2934       if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2935 	ret1 = prec;
2936       return MIN (ret1, prec);
2937     case SAVE_EXPR:
2938       return tree_ctz (TREE_OPERAND (expr, 0));
2939     case COND_EXPR:
2940       ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2941       if (ret1 == 0)
2942 	return 0;
2943       ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2944       return MIN (ret1, ret2);
2945     case COMPOUND_EXPR:
2946       return tree_ctz (TREE_OPERAND (expr, 1));
2947     case ADDR_EXPR:
2948       ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2949       if (ret1 > BITS_PER_UNIT)
2950 	{
2951 	  ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2952 	  return MIN (ret1, prec);
2953 	}
2954       return 0;
2955     default:
2956       return 0;
2957     }
2958 }
2959 
2960 /* Return 1 if EXPR is the real constant zero.  Trailing zeroes matter for
2961    decimal float constants, so don't return 1 for them.
2962    Also return 1 for location wrappers around such a constant.  */
2963 
2964 bool
real_zerop(const_tree expr)2965 real_zerop (const_tree expr)
2966 {
2967   STRIP_ANY_LOCATION_WRAPPER (expr);
2968 
2969   switch (TREE_CODE (expr))
2970     {
2971     case REAL_CST:
2972       return real_equal (&TREE_REAL_CST (expr), &dconst0)
2973 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2974     case COMPLEX_CST:
2975       return real_zerop (TREE_REALPART (expr))
2976 	     && real_zerop (TREE_IMAGPART (expr));
2977     case VECTOR_CST:
2978       {
2979 	/* Don't simply check for a duplicate because the predicate
2980 	   accepts both +0.0 and -0.0.  */
2981 	unsigned count = vector_cst_encoded_nelts (expr);
2982 	for (unsigned int i = 0; i < count; ++i)
2983 	  if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
2984 	    return false;
2985 	return true;
2986       }
2987     default:
2988       return false;
2989     }
2990 }
2991 
2992 /* Return 1 if EXPR is the real constant one in real or complex form.
2993    Trailing zeroes matter for decimal float constants, so don't return
2994    1 for them.
2995    Also return 1 for location wrappers around such a constant.  */
2996 
2997 bool
real_onep(const_tree expr)2998 real_onep (const_tree expr)
2999 {
3000   STRIP_ANY_LOCATION_WRAPPER (expr);
3001 
3002   switch (TREE_CODE (expr))
3003     {
3004     case REAL_CST:
3005       return real_equal (&TREE_REAL_CST (expr), &dconst1)
3006 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3007     case COMPLEX_CST:
3008       return real_onep (TREE_REALPART (expr))
3009 	     && real_zerop (TREE_IMAGPART (expr));
3010     case VECTOR_CST:
3011       return (VECTOR_CST_NPATTERNS (expr) == 1
3012 	      && VECTOR_CST_DUPLICATE_P (expr)
3013 	      && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3014     default:
3015       return false;
3016     }
3017 }
3018 
3019 /* Return 1 if EXPR is the real constant minus one.  Trailing zeroes
3020    matter for decimal float constants, so don't return 1 for them.
3021    Also return 1 for location wrappers around such a constant.  */
3022 
3023 bool
real_minus_onep(const_tree expr)3024 real_minus_onep (const_tree expr)
3025 {
3026   STRIP_ANY_LOCATION_WRAPPER (expr);
3027 
3028   switch (TREE_CODE (expr))
3029     {
3030     case REAL_CST:
3031       return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3032 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3033     case COMPLEX_CST:
3034       return real_minus_onep (TREE_REALPART (expr))
3035 	     && real_zerop (TREE_IMAGPART (expr));
3036     case VECTOR_CST:
3037       return (VECTOR_CST_NPATTERNS (expr) == 1
3038 	      && VECTOR_CST_DUPLICATE_P (expr)
3039 	      && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3040     default:
3041       return false;
3042     }
3043 }
3044 
3045 /* Nonzero if EXP is a constant or a cast of a constant.  */
3046 
3047 bool
really_constant_p(const_tree exp)3048 really_constant_p (const_tree exp)
3049 {
3050   /* This is not quite the same as STRIP_NOPS.  It does more.  */
3051   while (CONVERT_EXPR_P (exp)
3052 	 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3053     exp = TREE_OPERAND (exp, 0);
3054   return TREE_CONSTANT (exp);
3055 }
3056 
3057 /* Return true if T holds a polynomial pointer difference, storing it in
3058    *VALUE if so.  A true return means that T's precision is no greater
3059    than 64 bits, which is the largest address space we support, so *VALUE
3060    never loses precision.  However, the signedness of the result does
3061    not necessarily match the signedness of T: sometimes an unsigned type
3062    like sizetype is used to encode a value that is actually negative.  */
3063 
3064 bool
ptrdiff_tree_p(const_tree t,poly_int64_pod * value)3065 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
3066 {
3067   if (!t)
3068     return false;
3069   if (TREE_CODE (t) == INTEGER_CST)
3070     {
3071       if (!cst_and_fits_in_hwi (t))
3072 	return false;
3073       *value = int_cst_value (t);
3074       return true;
3075     }
3076   if (POLY_INT_CST_P (t))
3077     {
3078       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3079 	if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3080 	  return false;
3081       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3082 	value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3083       return true;
3084     }
3085   return false;
3086 }
3087 
3088 poly_int64
tree_to_poly_int64(const_tree t)3089 tree_to_poly_int64 (const_tree t)
3090 {
3091   gcc_assert (tree_fits_poly_int64_p (t));
3092   if (POLY_INT_CST_P (t))
3093     return poly_int_cst_value (t).force_shwi ();
3094   return TREE_INT_CST_LOW (t);
3095 }
3096 
3097 poly_uint64
tree_to_poly_uint64(const_tree t)3098 tree_to_poly_uint64 (const_tree t)
3099 {
3100   gcc_assert (tree_fits_poly_uint64_p (t));
3101   if (POLY_INT_CST_P (t))
3102     return poly_int_cst_value (t).force_uhwi ();
3103   return TREE_INT_CST_LOW (t);
3104 }
3105 
3106 /* Return first list element whose TREE_VALUE is ELEM.
3107    Return 0 if ELEM is not in LIST.  */
3108 
3109 tree
value_member(tree elem,tree list)3110 value_member (tree elem, tree list)
3111 {
3112   while (list)
3113     {
3114       if (elem == TREE_VALUE (list))
3115 	return list;
3116       list = TREE_CHAIN (list);
3117     }
3118   return NULL_TREE;
3119 }
3120 
3121 /* Return first list element whose TREE_PURPOSE is ELEM.
3122    Return 0 if ELEM is not in LIST.  */
3123 
3124 tree
purpose_member(const_tree elem,tree list)3125 purpose_member (const_tree elem, tree list)
3126 {
3127   while (list)
3128     {
3129       if (elem == TREE_PURPOSE (list))
3130 	return list;
3131       list = TREE_CHAIN (list);
3132     }
3133   return NULL_TREE;
3134 }
3135 
3136 /* Return true if ELEM is in V.  */
3137 
3138 bool
vec_member(const_tree elem,vec<tree,va_gc> * v)3139 vec_member (const_tree elem, vec<tree, va_gc> *v)
3140 {
3141   unsigned ix;
3142   tree t;
3143   FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3144     if (elem == t)
3145       return true;
3146   return false;
3147 }
3148 
3149 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3150    NULL_TREE.  */
3151 
3152 tree
chain_index(int idx,tree chain)3153 chain_index (int idx, tree chain)
3154 {
3155   for (; chain && idx > 0; --idx)
3156     chain = TREE_CHAIN (chain);
3157   return chain;
3158 }
3159 
3160 /* Return nonzero if ELEM is part of the chain CHAIN.  */
3161 
3162 bool
chain_member(const_tree elem,const_tree chain)3163 chain_member (const_tree elem, const_tree chain)
3164 {
3165   while (chain)
3166     {
3167       if (elem == chain)
3168 	return true;
3169       chain = DECL_CHAIN (chain);
3170     }
3171 
3172   return false;
3173 }
3174 
3175 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3176    We expect a null pointer to mark the end of the chain.
3177    This is the Lisp primitive `length'.  */
3178 
3179 int
list_length(const_tree t)3180 list_length (const_tree t)
3181 {
3182   const_tree p = t;
3183 #ifdef ENABLE_TREE_CHECKING
3184   const_tree q = t;
3185 #endif
3186   int len = 0;
3187 
3188   while (p)
3189     {
3190       p = TREE_CHAIN (p);
3191 #ifdef ENABLE_TREE_CHECKING
3192       if (len % 2)
3193 	q = TREE_CHAIN (q);
3194       gcc_assert (p != q);
3195 #endif
3196       len++;
3197     }
3198 
3199   return len;
3200 }
3201 
3202 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3203    UNION_TYPE TYPE, or NULL_TREE if none.  */
3204 
3205 tree
first_field(const_tree type)3206 first_field (const_tree type)
3207 {
3208   tree t = TYPE_FIELDS (type);
3209   while (t && TREE_CODE (t) != FIELD_DECL)
3210     t = TREE_CHAIN (t);
3211   return t;
3212 }
3213 
3214 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3215    UNION_TYPE TYPE, or NULL_TREE if none.  */
3216 
3217 tree
last_field(const_tree type)3218 last_field (const_tree type)
3219 {
3220   tree last = NULL_TREE;
3221 
3222   for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3223     {
3224       if (TREE_CODE (fld) != FIELD_DECL)
3225 	continue;
3226 
3227       last = fld;
3228     }
3229 
3230   return last;
3231 }
3232 
3233 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3234    by modifying the last node in chain 1 to point to chain 2.
3235    This is the Lisp primitive `nconc'.  */
3236 
3237 tree
chainon(tree op1,tree op2)3238 chainon (tree op1, tree op2)
3239 {
3240   tree t1;
3241 
3242   if (!op1)
3243     return op2;
3244   if (!op2)
3245     return op1;
3246 
3247   for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3248     continue;
3249   TREE_CHAIN (t1) = op2;
3250 
3251 #ifdef ENABLE_TREE_CHECKING
3252   {
3253     tree t2;
3254     for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3255       gcc_assert (t2 != t1);
3256   }
3257 #endif
3258 
3259   return op1;
3260 }
3261 
3262 /* Return the last node in a chain of nodes (chained through TREE_CHAIN).  */
3263 
3264 tree
tree_last(tree chain)3265 tree_last (tree chain)
3266 {
3267   tree next;
3268   if (chain)
3269     while ((next = TREE_CHAIN (chain)))
3270       chain = next;
3271   return chain;
3272 }
3273 
3274 /* Reverse the order of elements in the chain T,
3275    and return the new head of the chain (old last element).  */
3276 
3277 tree
nreverse(tree t)3278 nreverse (tree t)
3279 {
3280   tree prev = 0, decl, next;
3281   for (decl = t; decl; decl = next)
3282     {
3283       /* We shouldn't be using this function to reverse BLOCK chains; we
3284 	 have blocks_nreverse for that.  */
3285       gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3286       next = TREE_CHAIN (decl);
3287       TREE_CHAIN (decl) = prev;
3288       prev = decl;
3289     }
3290   return prev;
3291 }
3292 
3293 /* Return a newly created TREE_LIST node whose
3294    purpose and value fields are PARM and VALUE.  */
3295 
3296 tree
build_tree_list(tree parm,tree value MEM_STAT_DECL)3297 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3298 {
3299   tree t = make_node (TREE_LIST PASS_MEM_STAT);
3300   TREE_PURPOSE (t) = parm;
3301   TREE_VALUE (t) = value;
3302   return t;
3303 }
3304 
3305 /* Build a chain of TREE_LIST nodes from a vector.  */
3306 
3307 tree
build_tree_list_vec(const vec<tree,va_gc> * vec MEM_STAT_DECL)3308 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3309 {
3310   tree ret = NULL_TREE;
3311   tree *pp = &ret;
3312   unsigned int i;
3313   tree t;
3314   FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3315     {
3316       *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3317       pp = &TREE_CHAIN (*pp);
3318     }
3319   return ret;
3320 }
3321 
3322 /* Return a newly created TREE_LIST node whose
3323    purpose and value fields are PURPOSE and VALUE
3324    and whose TREE_CHAIN is CHAIN.  */
3325 
3326 tree
tree_cons(tree purpose,tree value,tree chain MEM_STAT_DECL)3327 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3328 {
3329   tree node;
3330 
3331   node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3332   memset (node, 0, sizeof (struct tree_common));
3333 
3334   record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3335 
3336   TREE_SET_CODE (node, TREE_LIST);
3337   TREE_CHAIN (node) = chain;
3338   TREE_PURPOSE (node) = purpose;
3339   TREE_VALUE (node) = value;
3340   return node;
3341 }
3342 
3343 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3344    trees.  */
3345 
3346 vec<tree, va_gc> *
ctor_to_vec(tree ctor)3347 ctor_to_vec (tree ctor)
3348 {
3349   vec<tree, va_gc> *vec;
3350   vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3351   unsigned int ix;
3352   tree val;
3353 
3354   FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3355     vec->quick_push (val);
3356 
3357   return vec;
3358 }
3359 
3360 /* Return the size nominally occupied by an object of type TYPE
3361    when it resides in memory.  The value is measured in units of bytes,
3362    and its data type is that normally used for type sizes
3363    (which is the first type created by make_signed_type or
3364    make_unsigned_type).  */
3365 
3366 tree
size_in_bytes_loc(location_t loc,const_tree type)3367 size_in_bytes_loc (location_t loc, const_tree type)
3368 {
3369   tree t;
3370 
3371   if (type == error_mark_node)
3372     return integer_zero_node;
3373 
3374   type = TYPE_MAIN_VARIANT (type);
3375   t = TYPE_SIZE_UNIT (type);
3376 
3377   if (t == 0)
3378     {
3379       lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3380       return size_zero_node;
3381     }
3382 
3383   return t;
3384 }
3385 
3386 /* Return the size of TYPE (in bytes) as a wide integer
3387    or return -1 if the size can vary or is larger than an integer.  */
3388 
3389 HOST_WIDE_INT
int_size_in_bytes(const_tree type)3390 int_size_in_bytes (const_tree type)
3391 {
3392   tree t;
3393 
3394   if (type == error_mark_node)
3395     return 0;
3396 
3397   type = TYPE_MAIN_VARIANT (type);
3398   t = TYPE_SIZE_UNIT (type);
3399 
3400   if (t && tree_fits_uhwi_p (t))
3401     return TREE_INT_CST_LOW (t);
3402   else
3403     return -1;
3404 }
3405 
3406 /* Return the maximum size of TYPE (in bytes) as a wide integer
3407    or return -1 if the size can vary or is larger than an integer.  */
3408 
3409 HOST_WIDE_INT
max_int_size_in_bytes(const_tree type)3410 max_int_size_in_bytes (const_tree type)
3411 {
3412   HOST_WIDE_INT size = -1;
3413   tree size_tree;
3414 
3415   /* If this is an array type, check for a possible MAX_SIZE attached.  */
3416 
3417   if (TREE_CODE (type) == ARRAY_TYPE)
3418     {
3419       size_tree = TYPE_ARRAY_MAX_SIZE (type);
3420 
3421       if (size_tree && tree_fits_uhwi_p (size_tree))
3422 	size = tree_to_uhwi (size_tree);
3423     }
3424 
3425   /* If we still haven't been able to get a size, see if the language
3426      can compute a maximum size.  */
3427 
3428   if (size == -1)
3429     {
3430       size_tree = lang_hooks.types.max_size (type);
3431 
3432       if (size_tree && tree_fits_uhwi_p (size_tree))
3433 	size = tree_to_uhwi (size_tree);
3434     }
3435 
3436   return size;
3437 }
3438 
3439 /* Return the bit position of FIELD, in bits from the start of the record.
3440    This is a tree of type bitsizetype.  */
3441 
3442 tree
bit_position(const_tree field)3443 bit_position (const_tree field)
3444 {
3445   return bit_from_pos (DECL_FIELD_OFFSET (field),
3446 		       DECL_FIELD_BIT_OFFSET (field));
3447 }
3448 
3449 /* Return the byte position of FIELD, in bytes from the start of the record.
3450    This is a tree of type sizetype.  */
3451 
3452 tree
byte_position(const_tree field)3453 byte_position (const_tree field)
3454 {
3455   return byte_from_pos (DECL_FIELD_OFFSET (field),
3456 			DECL_FIELD_BIT_OFFSET (field));
3457 }
3458 
3459 /* Likewise, but return as an integer.  It must be representable in
3460    that way (since it could be a signed value, we don't have the
3461    option of returning -1 like int_size_in_byte can.  */
3462 
3463 HOST_WIDE_INT
int_byte_position(const_tree field)3464 int_byte_position (const_tree field)
3465 {
3466   return tree_to_shwi (byte_position (field));
3467 }
3468 
3469 /* Return, as a tree node, the number of elements for TYPE (which is an
3470    ARRAY_TYPE) minus one. This counts only elements of the top array.  */
3471 
3472 tree
array_type_nelts(const_tree type)3473 array_type_nelts (const_tree type)
3474 {
3475   tree index_type, min, max;
3476 
3477   /* If they did it with unspecified bounds, then we should have already
3478      given an error about it before we got here.  */
3479   if (! TYPE_DOMAIN (type))
3480     return error_mark_node;
3481 
3482   index_type = TYPE_DOMAIN (type);
3483   min = TYPE_MIN_VALUE (index_type);
3484   max = TYPE_MAX_VALUE (index_type);
3485 
3486   /* TYPE_MAX_VALUE may not be set if the array has unknown length.  */
3487   if (!max)
3488     {
3489       /* zero sized arrays are represented from C FE as complete types with
3490 	 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3491 	 them as min 0, max -1.  */
3492       if (COMPLETE_TYPE_P (type)
3493 	  && integer_zerop (TYPE_SIZE (type))
3494 	  && integer_zerop (min))
3495 	return build_int_cst (TREE_TYPE (min), -1);
3496 
3497       return error_mark_node;
3498     }
3499 
3500   return (integer_zerop (min)
3501 	  ? max
3502 	  : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3503 }
3504 
3505 /* If arg is static -- a reference to an object in static storage -- then
3506    return the object.  This is not the same as the C meaning of `static'.
3507    If arg isn't static, return NULL.  */
3508 
3509 tree
staticp(tree arg)3510 staticp (tree arg)
3511 {
3512   switch (TREE_CODE (arg))
3513     {
3514     case FUNCTION_DECL:
3515       /* Nested functions are static, even though taking their address will
3516 	 involve a trampoline as we unnest the nested function and create
3517 	 the trampoline on the tree level.  */
3518       return arg;
3519 
3520     case VAR_DECL:
3521       return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3522 	      && ! DECL_THREAD_LOCAL_P (arg)
3523 	      && ! DECL_DLLIMPORT_P (arg)
3524 	      ? arg : NULL);
3525 
3526     case CONST_DECL:
3527       return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3528 	      ? arg : NULL);
3529 
3530     case CONSTRUCTOR:
3531       return TREE_STATIC (arg) ? arg : NULL;
3532 
3533     case LABEL_DECL:
3534     case STRING_CST:
3535       return arg;
3536 
3537     case COMPONENT_REF:
3538       /* If the thing being referenced is not a field, then it is
3539 	 something language specific.  */
3540       gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3541 
3542       /* If we are referencing a bitfield, we can't evaluate an
3543 	 ADDR_EXPR at compile time and so it isn't a constant.  */
3544       if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3545 	return NULL;
3546 
3547       return staticp (TREE_OPERAND (arg, 0));
3548 
3549     case BIT_FIELD_REF:
3550       return NULL;
3551 
3552     case INDIRECT_REF:
3553       return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3554 
3555     case ARRAY_REF:
3556     case ARRAY_RANGE_REF:
3557       if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3558 	  && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3559 	return staticp (TREE_OPERAND (arg, 0));
3560       else
3561 	return NULL;
3562 
3563     case COMPOUND_LITERAL_EXPR:
3564       return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3565 
3566     default:
3567       return NULL;
3568     }
3569 }
3570 
3571 
3572 
3573 
3574 /* Return whether OP is a DECL whose address is function-invariant.  */
3575 
3576 bool
decl_address_invariant_p(const_tree op)3577 decl_address_invariant_p (const_tree op)
3578 {
3579   /* The conditions below are slightly less strict than the one in
3580      staticp.  */
3581 
3582   switch (TREE_CODE (op))
3583     {
3584     case PARM_DECL:
3585     case RESULT_DECL:
3586     case LABEL_DECL:
3587     case FUNCTION_DECL:
3588       return true;
3589 
3590     case VAR_DECL:
3591       if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3592           || DECL_THREAD_LOCAL_P (op)
3593           || DECL_CONTEXT (op) == current_function_decl
3594           || decl_function_context (op) == current_function_decl)
3595         return true;
3596       break;
3597 
3598     case CONST_DECL:
3599       if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3600           || decl_function_context (op) == current_function_decl)
3601         return true;
3602       break;
3603 
3604     default:
3605       break;
3606     }
3607 
3608   return false;
3609 }
3610 
3611 /* Return whether OP is a DECL whose address is interprocedural-invariant.  */
3612 
3613 bool
decl_address_ip_invariant_p(const_tree op)3614 decl_address_ip_invariant_p (const_tree op)
3615 {
3616   /* The conditions below are slightly less strict than the one in
3617      staticp.  */
3618 
3619   switch (TREE_CODE (op))
3620     {
3621     case LABEL_DECL:
3622     case FUNCTION_DECL:
3623     case STRING_CST:
3624       return true;
3625 
3626     case VAR_DECL:
3627       if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3628            && !DECL_DLLIMPORT_P (op))
3629           || DECL_THREAD_LOCAL_P (op))
3630         return true;
3631       break;
3632 
3633     case CONST_DECL:
3634       if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3635         return true;
3636       break;
3637 
3638     default:
3639       break;
3640     }
3641 
3642   return false;
3643 }
3644 
3645 
3646 /* Return true if T is function-invariant (internal function, does
3647    not handle arithmetic; that's handled in skip_simple_arithmetic and
3648    tree_invariant_p).  */
3649 
3650 static bool
tree_invariant_p_1(tree t)3651 tree_invariant_p_1 (tree t)
3652 {
3653   tree op;
3654 
3655   if (TREE_CONSTANT (t)
3656       || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3657     return true;
3658 
3659   switch (TREE_CODE (t))
3660     {
3661     case SAVE_EXPR:
3662       return true;
3663 
3664     case ADDR_EXPR:
3665       op = TREE_OPERAND (t, 0);
3666       while (handled_component_p (op))
3667 	{
3668 	  switch (TREE_CODE (op))
3669 	    {
3670 	    case ARRAY_REF:
3671 	    case ARRAY_RANGE_REF:
3672 	      if (!tree_invariant_p (TREE_OPERAND (op, 1))
3673 		  || TREE_OPERAND (op, 2) != NULL_TREE
3674 		  || TREE_OPERAND (op, 3) != NULL_TREE)
3675 		return false;
3676 	      break;
3677 
3678 	    case COMPONENT_REF:
3679 	      if (TREE_OPERAND (op, 2) != NULL_TREE)
3680 		return false;
3681 	      break;
3682 
3683 	    default:;
3684 	    }
3685 	  op = TREE_OPERAND (op, 0);
3686 	}
3687 
3688       return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3689 
3690     default:
3691       break;
3692     }
3693 
3694   return false;
3695 }
3696 
3697 /* Return true if T is function-invariant.  */
3698 
3699 bool
tree_invariant_p(tree t)3700 tree_invariant_p (tree t)
3701 {
3702   tree inner = skip_simple_arithmetic (t);
3703   return tree_invariant_p_1 (inner);
3704 }
3705 
3706 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3707    Do this to any expression which may be used in more than one place,
3708    but must be evaluated only once.
3709 
3710    Normally, expand_expr would reevaluate the expression each time.
3711    Calling save_expr produces something that is evaluated and recorded
3712    the first time expand_expr is called on it.  Subsequent calls to
3713    expand_expr just reuse the recorded value.
3714 
3715    The call to expand_expr that generates code that actually computes
3716    the value is the first call *at compile time*.  Subsequent calls
3717    *at compile time* generate code to use the saved value.
3718    This produces correct result provided that *at run time* control
3719    always flows through the insns made by the first expand_expr
3720    before reaching the other places where the save_expr was evaluated.
3721    You, the caller of save_expr, must make sure this is so.
3722 
3723    Constants, and certain read-only nodes, are returned with no
3724    SAVE_EXPR because that is safe.  Expressions containing placeholders
3725    are not touched; see tree.def for an explanation of what these
3726    are used for.  */
3727 
3728 tree
save_expr(tree expr)3729 save_expr (tree expr)
3730 {
3731   tree inner;
3732 
3733   /* If the tree evaluates to a constant, then we don't want to hide that
3734      fact (i.e. this allows further folding, and direct checks for constants).
3735      However, a read-only object that has side effects cannot be bypassed.
3736      Since it is no problem to reevaluate literals, we just return the
3737      literal node.  */
3738   inner = skip_simple_arithmetic (expr);
3739   if (TREE_CODE (inner) == ERROR_MARK)
3740     return inner;
3741 
3742   if (tree_invariant_p_1 (inner))
3743     return expr;
3744 
3745   /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3746      it means that the size or offset of some field of an object depends on
3747      the value within another field.
3748 
3749      Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3750      and some variable since it would then need to be both evaluated once and
3751      evaluated more than once.  Front-ends must assure this case cannot
3752      happen by surrounding any such subexpressions in their own SAVE_EXPR
3753      and forcing evaluation at the proper time.  */
3754   if (contains_placeholder_p (inner))
3755     return expr;
3756 
3757   expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3758 
3759   /* This expression might be placed ahead of a jump to ensure that the
3760      value was computed on both sides of the jump.  So make sure it isn't
3761      eliminated as dead.  */
3762   TREE_SIDE_EFFECTS (expr) = 1;
3763   return expr;
3764 }
3765 
3766 /* Look inside EXPR into any simple arithmetic operations.  Return the
3767    outermost non-arithmetic or non-invariant node.  */
3768 
3769 tree
skip_simple_arithmetic(tree expr)3770 skip_simple_arithmetic (tree expr)
3771 {
3772   /* We don't care about whether this can be used as an lvalue in this
3773      context.  */
3774   while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3775     expr = TREE_OPERAND (expr, 0);
3776 
3777   /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3778      a constant, it will be more efficient to not make another SAVE_EXPR since
3779      it will allow better simplification and GCSE will be able to merge the
3780      computations if they actually occur.  */
3781   while (true)
3782     {
3783       if (UNARY_CLASS_P (expr))
3784 	expr = TREE_OPERAND (expr, 0);
3785       else if (BINARY_CLASS_P (expr))
3786 	{
3787 	  if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3788 	    expr = TREE_OPERAND (expr, 0);
3789 	  else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3790 	    expr = TREE_OPERAND (expr, 1);
3791 	  else
3792 	    break;
3793 	}
3794       else
3795 	break;
3796     }
3797 
3798   return expr;
3799 }
3800 
3801 /* Look inside EXPR into simple arithmetic operations involving constants.
3802    Return the outermost non-arithmetic or non-constant node.  */
3803 
3804 tree
skip_simple_constant_arithmetic(tree expr)3805 skip_simple_constant_arithmetic (tree expr)
3806 {
3807   while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3808     expr = TREE_OPERAND (expr, 0);
3809 
3810   while (true)
3811     {
3812       if (UNARY_CLASS_P (expr))
3813 	expr = TREE_OPERAND (expr, 0);
3814       else if (BINARY_CLASS_P (expr))
3815 	{
3816 	  if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3817 	    expr = TREE_OPERAND (expr, 0);
3818 	  else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3819 	    expr = TREE_OPERAND (expr, 1);
3820 	  else
3821 	    break;
3822 	}
3823       else
3824 	break;
3825     }
3826 
3827   return expr;
3828 }
3829 
3830 /* Return which tree structure is used by T.  */
3831 
3832 enum tree_node_structure_enum
tree_node_structure(const_tree t)3833 tree_node_structure (const_tree t)
3834 {
3835   const enum tree_code code = TREE_CODE (t);
3836   return tree_node_structure_for_code (code);
3837 }
3838 
3839 /* Set various status flags when building a CALL_EXPR object T.  */
3840 
3841 static void
process_call_operands(tree t)3842 process_call_operands (tree t)
3843 {
3844   bool side_effects = TREE_SIDE_EFFECTS (t);
3845   bool read_only = false;
3846   int i = call_expr_flags (t);
3847 
3848   /* Calls have side-effects, except those to const or pure functions.  */
3849   if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3850     side_effects = true;
3851   /* Propagate TREE_READONLY of arguments for const functions.  */
3852   if (i & ECF_CONST)
3853     read_only = true;
3854 
3855   if (!side_effects || read_only)
3856     for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3857       {
3858 	tree op = TREE_OPERAND (t, i);
3859 	if (op && TREE_SIDE_EFFECTS (op))
3860 	  side_effects = true;
3861 	if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3862 	  read_only = false;
3863       }
3864 
3865   TREE_SIDE_EFFECTS (t) = side_effects;
3866   TREE_READONLY (t) = read_only;
3867 }
3868 
3869 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3870    size or offset that depends on a field within a record.  */
3871 
3872 bool
contains_placeholder_p(const_tree exp)3873 contains_placeholder_p (const_tree exp)
3874 {
3875   enum tree_code code;
3876 
3877   if (!exp)
3878     return 0;
3879 
3880   code = TREE_CODE (exp);
3881   if (code == PLACEHOLDER_EXPR)
3882     return 1;
3883 
3884   switch (TREE_CODE_CLASS (code))
3885     {
3886     case tcc_reference:
3887       /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3888 	 position computations since they will be converted into a
3889 	 WITH_RECORD_EXPR involving the reference, which will assume
3890 	 here will be valid.  */
3891       return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3892 
3893     case tcc_exceptional:
3894       if (code == TREE_LIST)
3895 	return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3896 		|| CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3897       break;
3898 
3899     case tcc_unary:
3900     case tcc_binary:
3901     case tcc_comparison:
3902     case tcc_expression:
3903       switch (code)
3904 	{
3905 	case COMPOUND_EXPR:
3906 	  /* Ignoring the first operand isn't quite right, but works best.  */
3907 	  return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3908 
3909 	case COND_EXPR:
3910 	  return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3911 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3912 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3913 
3914 	case SAVE_EXPR:
3915 	  /* The save_expr function never wraps anything containing
3916 	     a PLACEHOLDER_EXPR. */
3917 	  return 0;
3918 
3919 	default:
3920 	  break;
3921 	}
3922 
3923       switch (TREE_CODE_LENGTH (code))
3924 	{
3925 	case 1:
3926 	  return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3927 	case 2:
3928 	  return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3929 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3930 	default:
3931 	  return 0;
3932 	}
3933 
3934     case tcc_vl_exp:
3935       switch (code)
3936 	{
3937 	case CALL_EXPR:
3938 	  {
3939 	    const_tree arg;
3940 	    const_call_expr_arg_iterator iter;
3941 	    FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3942 	      if (CONTAINS_PLACEHOLDER_P (arg))
3943 		return 1;
3944 	    return 0;
3945 	  }
3946 	default:
3947 	  return 0;
3948 	}
3949 
3950     default:
3951       return 0;
3952     }
3953   return 0;
3954 }
3955 
3956 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3957    directly.  This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3958    field positions.  */
3959 
3960 static bool
type_contains_placeholder_1(const_tree type)3961 type_contains_placeholder_1 (const_tree type)
3962 {
3963   /* If the size contains a placeholder or the parent type (component type in
3964      the case of arrays) type involves a placeholder, this type does.  */
3965   if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3966       || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3967       || (!POINTER_TYPE_P (type)
3968 	  && TREE_TYPE (type)
3969 	  && type_contains_placeholder_p (TREE_TYPE (type))))
3970     return true;
3971 
3972   /* Now do type-specific checks.  Note that the last part of the check above
3973      greatly limits what we have to do below.  */
3974   switch (TREE_CODE (type))
3975     {
3976     case VOID_TYPE:
3977     case OPAQUE_TYPE:
3978     case COMPLEX_TYPE:
3979     case ENUMERAL_TYPE:
3980     case BOOLEAN_TYPE:
3981     case POINTER_TYPE:
3982     case OFFSET_TYPE:
3983     case REFERENCE_TYPE:
3984     case METHOD_TYPE:
3985     case FUNCTION_TYPE:
3986     case VECTOR_TYPE:
3987     case NULLPTR_TYPE:
3988       return false;
3989 
3990     case INTEGER_TYPE:
3991     case REAL_TYPE:
3992     case FIXED_POINT_TYPE:
3993       /* Here we just check the bounds.  */
3994       return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3995 	      || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3996 
3997     case ARRAY_TYPE:
3998       /* We have already checked the component type above, so just check
3999 	 the domain type.  Flexible array members have a null domain.  */
4000       return TYPE_DOMAIN (type) ?
4001 	type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4002 
4003     case RECORD_TYPE:
4004     case UNION_TYPE:
4005     case QUAL_UNION_TYPE:
4006       {
4007 	tree field;
4008 
4009 	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4010 	  if (TREE_CODE (field) == FIELD_DECL
4011 	      && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4012 		  || (TREE_CODE (type) == QUAL_UNION_TYPE
4013 		      && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4014 		  || type_contains_placeholder_p (TREE_TYPE (field))))
4015 	    return true;
4016 
4017 	return false;
4018       }
4019 
4020     default:
4021       gcc_unreachable ();
4022     }
4023 }
4024 
4025 /* Wrapper around above function used to cache its result.  */
4026 
4027 bool
type_contains_placeholder_p(tree type)4028 type_contains_placeholder_p (tree type)
4029 {
4030   bool result;
4031 
4032   /* If the contains_placeholder_bits field has been initialized,
4033      then we know the answer.  */
4034   if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4035     return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4036 
4037   /* Indicate that we've seen this type node, and the answer is false.
4038      This is what we want to return if we run into recursion via fields.  */
4039   TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4040 
4041   /* Compute the real value.  */
4042   result = type_contains_placeholder_1 (type);
4043 
4044   /* Store the real value.  */
4045   TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4046 
4047   return result;
4048 }
4049 
4050 /* Push tree EXP onto vector QUEUE if it is not already present.  */
4051 
4052 static void
push_without_duplicates(tree exp,vec<tree> * queue)4053 push_without_duplicates (tree exp, vec<tree> *queue)
4054 {
4055   unsigned int i;
4056   tree iter;
4057 
4058   FOR_EACH_VEC_ELT (*queue, i, iter)
4059     if (simple_cst_equal (iter, exp) == 1)
4060       break;
4061 
4062   if (!iter)
4063     queue->safe_push (exp);
4064 }
4065 
4066 /* Given a tree EXP, find all occurrences of references to fields
4067    in a PLACEHOLDER_EXPR and place them in vector REFS without
4068    duplicates.  Also record VAR_DECLs and CONST_DECLs.  Note that
4069    we assume here that EXP contains only arithmetic expressions
4070    or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4071    argument list.  */
4072 
4073 void
find_placeholder_in_expr(tree exp,vec<tree> * refs)4074 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4075 {
4076   enum tree_code code = TREE_CODE (exp);
4077   tree inner;
4078   int i;
4079 
4080   /* We handle TREE_LIST and COMPONENT_REF separately.  */
4081   if (code == TREE_LIST)
4082     {
4083       FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4084       FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4085     }
4086   else if (code == COMPONENT_REF)
4087     {
4088       for (inner = TREE_OPERAND (exp, 0);
4089 	   REFERENCE_CLASS_P (inner);
4090 	   inner = TREE_OPERAND (inner, 0))
4091 	;
4092 
4093       if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4094 	push_without_duplicates (exp, refs);
4095       else
4096 	FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4097    }
4098   else
4099     switch (TREE_CODE_CLASS (code))
4100       {
4101       case tcc_constant:
4102 	break;
4103 
4104       case tcc_declaration:
4105 	/* Variables allocated to static storage can stay.  */
4106         if (!TREE_STATIC (exp))
4107 	  push_without_duplicates (exp, refs);
4108 	break;
4109 
4110       case tcc_expression:
4111 	/* This is the pattern built in ada/make_aligning_type.  */
4112 	if (code == ADDR_EXPR
4113 	    && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4114 	  {
4115 	    push_without_duplicates (exp, refs);
4116 	    break;
4117 	  }
4118 
4119         /* Fall through.  */
4120 
4121       case tcc_exceptional:
4122       case tcc_unary:
4123       case tcc_binary:
4124       case tcc_comparison:
4125       case tcc_reference:
4126 	for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4127 	  FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4128 	break;
4129 
4130       case tcc_vl_exp:
4131 	for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4132 	  FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4133 	break;
4134 
4135       default:
4136 	gcc_unreachable ();
4137       }
4138 }
4139 
4140 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4141    return a tree with all occurrences of references to F in a
4142    PLACEHOLDER_EXPR replaced by R.  Also handle VAR_DECLs and
4143    CONST_DECLs.  Note that we assume here that EXP contains only
4144    arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4145    occurring only in their argument list.  */
4146 
4147 tree
substitute_in_expr(tree exp,tree f,tree r)4148 substitute_in_expr (tree exp, tree f, tree r)
4149 {
4150   enum tree_code code = TREE_CODE (exp);
4151   tree op0, op1, op2, op3;
4152   tree new_tree;
4153 
4154   /* We handle TREE_LIST and COMPONENT_REF separately.  */
4155   if (code == TREE_LIST)
4156     {
4157       op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4158       op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4159       if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4160 	return exp;
4161 
4162       return tree_cons (TREE_PURPOSE (exp), op1, op0);
4163     }
4164   else if (code == COMPONENT_REF)
4165     {
4166       tree inner;
4167 
4168       /* If this expression is getting a value from a PLACEHOLDER_EXPR
4169 	 and it is the right field, replace it with R.  */
4170       for (inner = TREE_OPERAND (exp, 0);
4171 	   REFERENCE_CLASS_P (inner);
4172 	   inner = TREE_OPERAND (inner, 0))
4173 	;
4174 
4175       /* The field.  */
4176       op1 = TREE_OPERAND (exp, 1);
4177 
4178       if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4179 	return r;
4180 
4181       /* If this expression hasn't been completed let, leave it alone.  */
4182       if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4183 	return exp;
4184 
4185       op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4186       if (op0 == TREE_OPERAND (exp, 0))
4187 	return exp;
4188 
4189       new_tree
4190 	= fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4191    }
4192   else
4193     switch (TREE_CODE_CLASS (code))
4194       {
4195       case tcc_constant:
4196 	return exp;
4197 
4198       case tcc_declaration:
4199 	if (exp == f)
4200 	  return r;
4201 	else
4202 	  return exp;
4203 
4204       case tcc_expression:
4205 	if (exp == f)
4206 	  return r;
4207 
4208         /* Fall through.  */
4209 
4210       case tcc_exceptional:
4211       case tcc_unary:
4212       case tcc_binary:
4213       case tcc_comparison:
4214       case tcc_reference:
4215 	switch (TREE_CODE_LENGTH (code))
4216 	  {
4217 	  case 0:
4218 	    return exp;
4219 
4220 	  case 1:
4221 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4222 	    if (op0 == TREE_OPERAND (exp, 0))
4223 	      return exp;
4224 
4225 	    new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4226 	    break;
4227 
4228 	  case 2:
4229 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4230 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4231 
4232 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4233 	      return exp;
4234 
4235 	    new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4236 	    break;
4237 
4238 	  case 3:
4239 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4240 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4241 	    op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4242 
4243 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4244 		&& op2 == TREE_OPERAND (exp, 2))
4245 	      return exp;
4246 
4247 	    new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4248 	    break;
4249 
4250 	  case 4:
4251 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4252 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4253 	    op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4254 	    op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4255 
4256 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4257 		&& op2 == TREE_OPERAND (exp, 2)
4258 		&& op3 == TREE_OPERAND (exp, 3))
4259 	      return exp;
4260 
4261 	    new_tree
4262 	      = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4263 	    break;
4264 
4265 	  default:
4266 	    gcc_unreachable ();
4267 	  }
4268 	break;
4269 
4270       case tcc_vl_exp:
4271 	{
4272 	  int i;
4273 
4274 	  new_tree = NULL_TREE;
4275 
4276 	  /* If we are trying to replace F with a constant or with another
4277 	     instance of one of the arguments of the call, inline back
4278 	     functions which do nothing else than computing a value from
4279 	     the arguments they are passed.  This makes it possible to
4280 	     fold partially or entirely the replacement expression.  */
4281 	  if (code == CALL_EXPR)
4282 	    {
4283 	      bool maybe_inline = false;
4284 	      if (CONSTANT_CLASS_P (r))
4285 		maybe_inline = true;
4286 	      else
4287 		for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4288 		  if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4289 		    {
4290 		      maybe_inline = true;
4291 		      break;
4292 		    }
4293 	      if (maybe_inline)
4294 		{
4295 		  tree t = maybe_inline_call_in_expr (exp);
4296 		  if (t)
4297 		    return SUBSTITUTE_IN_EXPR (t, f, r);
4298 		}
4299 	    }
4300 
4301 	  for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4302 	    {
4303 	      tree op = TREE_OPERAND (exp, i);
4304 	      tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4305 	      if (new_op != op)
4306 		{
4307 		  if (!new_tree)
4308 		    new_tree = copy_node (exp);
4309 		  TREE_OPERAND (new_tree, i) = new_op;
4310 		}
4311 	    }
4312 
4313 	  if (new_tree)
4314 	    {
4315 	      new_tree = fold (new_tree);
4316 	      if (TREE_CODE (new_tree) == CALL_EXPR)
4317 		process_call_operands (new_tree);
4318 	    }
4319 	  else
4320 	    return exp;
4321 	}
4322 	break;
4323 
4324       default:
4325 	gcc_unreachable ();
4326       }
4327 
4328   TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4329 
4330   if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4331     TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4332 
4333   return new_tree;
4334 }
4335 
4336 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4337    for it within OBJ, a tree that is an object or a chain of references.  */
4338 
4339 tree
substitute_placeholder_in_expr(tree exp,tree obj)4340 substitute_placeholder_in_expr (tree exp, tree obj)
4341 {
4342   enum tree_code code = TREE_CODE (exp);
4343   tree op0, op1, op2, op3;
4344   tree new_tree;
4345 
4346   /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4347      in the chain of OBJ.  */
4348   if (code == PLACEHOLDER_EXPR)
4349     {
4350       tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4351       tree elt;
4352 
4353       for (elt = obj; elt != 0;
4354 	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4355 		   || TREE_CODE (elt) == COND_EXPR)
4356 		  ? TREE_OPERAND (elt, 1)
4357 		  : (REFERENCE_CLASS_P (elt)
4358 		     || UNARY_CLASS_P (elt)
4359 		     || BINARY_CLASS_P (elt)
4360 		     || VL_EXP_CLASS_P (elt)
4361 		     || EXPRESSION_CLASS_P (elt))
4362 		  ? TREE_OPERAND (elt, 0) : 0))
4363 	if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4364 	  return elt;
4365 
4366       for (elt = obj; elt != 0;
4367 	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4368 		   || TREE_CODE (elt) == COND_EXPR)
4369 		  ? TREE_OPERAND (elt, 1)
4370 		  : (REFERENCE_CLASS_P (elt)
4371 		     || UNARY_CLASS_P (elt)
4372 		     || BINARY_CLASS_P (elt)
4373 		     || VL_EXP_CLASS_P (elt)
4374 		     || EXPRESSION_CLASS_P (elt))
4375 		  ? TREE_OPERAND (elt, 0) : 0))
4376 	if (POINTER_TYPE_P (TREE_TYPE (elt))
4377 	    && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4378 		== need_type))
4379 	  return fold_build1 (INDIRECT_REF, need_type, elt);
4380 
4381       /* If we didn't find it, return the original PLACEHOLDER_EXPR.  If it
4382 	 survives until RTL generation, there will be an error.  */
4383       return exp;
4384     }
4385 
4386   /* TREE_LIST is special because we need to look at TREE_VALUE
4387      and TREE_CHAIN, not TREE_OPERANDS.  */
4388   else if (code == TREE_LIST)
4389     {
4390       op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4391       op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4392       if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4393 	return exp;
4394 
4395       return tree_cons (TREE_PURPOSE (exp), op1, op0);
4396     }
4397   else
4398     switch (TREE_CODE_CLASS (code))
4399       {
4400       case tcc_constant:
4401       case tcc_declaration:
4402 	return exp;
4403 
4404       case tcc_exceptional:
4405       case tcc_unary:
4406       case tcc_binary:
4407       case tcc_comparison:
4408       case tcc_expression:
4409       case tcc_reference:
4410       case tcc_statement:
4411 	switch (TREE_CODE_LENGTH (code))
4412 	  {
4413 	  case 0:
4414 	    return exp;
4415 
4416 	  case 1:
4417 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4418 	    if (op0 == TREE_OPERAND (exp, 0))
4419 	      return exp;
4420 
4421 	    new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4422 	    break;
4423 
4424 	  case 2:
4425 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4426 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4427 
4428 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4429 	      return exp;
4430 
4431 	    new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4432 	    break;
4433 
4434 	  case 3:
4435 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4436 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4437 	    op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4438 
4439 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4440 		&& op2 == TREE_OPERAND (exp, 2))
4441 	      return exp;
4442 
4443 	    new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4444 	    break;
4445 
4446 	  case 4:
4447 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4448 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4449 	    op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4450 	    op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4451 
4452 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4453 		&& op2 == TREE_OPERAND (exp, 2)
4454 		&& op3 == TREE_OPERAND (exp, 3))
4455 	      return exp;
4456 
4457 	    new_tree
4458 	      = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4459 	    break;
4460 
4461 	  default:
4462 	    gcc_unreachable ();
4463 	  }
4464 	break;
4465 
4466       case tcc_vl_exp:
4467 	{
4468 	  int i;
4469 
4470 	  new_tree = NULL_TREE;
4471 
4472 	  for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4473 	    {
4474 	      tree op = TREE_OPERAND (exp, i);
4475 	      tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4476 	      if (new_op != op)
4477 		{
4478 		  if (!new_tree)
4479 		    new_tree = copy_node (exp);
4480 		  TREE_OPERAND (new_tree, i) = new_op;
4481 		}
4482 	    }
4483 
4484 	  if (new_tree)
4485 	    {
4486 	      new_tree = fold (new_tree);
4487 	      if (TREE_CODE (new_tree) == CALL_EXPR)
4488 		process_call_operands (new_tree);
4489 	    }
4490 	  else
4491 	    return exp;
4492 	}
4493 	break;
4494 
4495       default:
4496 	gcc_unreachable ();
4497       }
4498 
4499   TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4500 
4501   if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4502     TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4503 
4504   return new_tree;
4505 }
4506 
4507 
4508 /* Subroutine of stabilize_reference; this is called for subtrees of
4509    references.  Any expression with side-effects must be put in a SAVE_EXPR
4510    to ensure that it is only evaluated once.
4511 
4512    We don't put SAVE_EXPR nodes around everything, because assigning very
4513    simple expressions to temporaries causes us to miss good opportunities
4514    for optimizations.  Among other things, the opportunity to fold in the
4515    addition of a constant into an addressing mode often gets lost, e.g.
4516    "y[i+1] += x;".  In general, we take the approach that we should not make
4517    an assignment unless we are forced into it - i.e., that any non-side effect
4518    operator should be allowed, and that cse should take care of coalescing
4519    multiple utterances of the same expression should that prove fruitful.  */
4520 
4521 static tree
stabilize_reference_1(tree e)4522 stabilize_reference_1 (tree e)
4523 {
4524   tree result;
4525   enum tree_code code = TREE_CODE (e);
4526 
4527   /* We cannot ignore const expressions because it might be a reference
4528      to a const array but whose index contains side-effects.  But we can
4529      ignore things that are actual constant or that already have been
4530      handled by this function.  */
4531 
4532   if (tree_invariant_p (e))
4533     return e;
4534 
4535   switch (TREE_CODE_CLASS (code))
4536     {
4537     case tcc_exceptional:
4538       /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4539 	 have side-effects.  */
4540       if (code == STATEMENT_LIST)
4541 	return save_expr (e);
4542       /* FALLTHRU */
4543     case tcc_type:
4544     case tcc_declaration:
4545     case tcc_comparison:
4546     case tcc_statement:
4547     case tcc_expression:
4548     case tcc_reference:
4549     case tcc_vl_exp:
4550       /* If the expression has side-effects, then encase it in a SAVE_EXPR
4551 	 so that it will only be evaluated once.  */
4552       /* The reference (r) and comparison (<) classes could be handled as
4553 	 below, but it is generally faster to only evaluate them once.  */
4554       if (TREE_SIDE_EFFECTS (e))
4555 	return save_expr (e);
4556       return e;
4557 
4558     case tcc_constant:
4559       /* Constants need no processing.  In fact, we should never reach
4560 	 here.  */
4561       return e;
4562 
4563     case tcc_binary:
4564       /* Division is slow and tends to be compiled with jumps,
4565 	 especially the division by powers of 2 that is often
4566 	 found inside of an array reference.  So do it just once.  */
4567       if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4568 	  || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4569 	  || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4570 	  || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4571 	return save_expr (e);
4572       /* Recursively stabilize each operand.  */
4573       result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4574 			 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4575       break;
4576 
4577     case tcc_unary:
4578       /* Recursively stabilize each operand.  */
4579       result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4580       break;
4581 
4582     default:
4583       gcc_unreachable ();
4584     }
4585 
4586   TREE_TYPE (result) = TREE_TYPE (e);
4587   TREE_READONLY (result) = TREE_READONLY (e);
4588   TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4589   TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4590 
4591   return result;
4592 }
4593 
4594 /* Stabilize a reference so that we can use it any number of times
4595    without causing its operands to be evaluated more than once.
4596    Returns the stabilized reference.  This works by means of save_expr,
4597    so see the caveats in the comments about save_expr.
4598 
4599    Also allows conversion expressions whose operands are references.
4600    Any other kind of expression is returned unchanged.  */
4601 
4602 tree
stabilize_reference(tree ref)4603 stabilize_reference (tree ref)
4604 {
4605   tree result;
4606   enum tree_code code = TREE_CODE (ref);
4607 
4608   switch (code)
4609     {
4610     case VAR_DECL:
4611     case PARM_DECL:
4612     case RESULT_DECL:
4613       /* No action is needed in this case.  */
4614       return ref;
4615 
4616     CASE_CONVERT:
4617     case FLOAT_EXPR:
4618     case FIX_TRUNC_EXPR:
4619       result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4620       break;
4621 
4622     case INDIRECT_REF:
4623       result = build_nt (INDIRECT_REF,
4624 			 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4625       break;
4626 
4627     case COMPONENT_REF:
4628       result = build_nt (COMPONENT_REF,
4629 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4630 			 TREE_OPERAND (ref, 1), NULL_TREE);
4631       break;
4632 
4633     case BIT_FIELD_REF:
4634       result = build_nt (BIT_FIELD_REF,
4635 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4636 			 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4637       REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4638       break;
4639 
4640     case ARRAY_REF:
4641       result = build_nt (ARRAY_REF,
4642 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4643 			 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4644 			 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4645       break;
4646 
4647     case ARRAY_RANGE_REF:
4648       result = build_nt (ARRAY_RANGE_REF,
4649 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4650 			 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4651 			 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4652       break;
4653 
4654     case COMPOUND_EXPR:
4655       /* We cannot wrap the first expression in a SAVE_EXPR, as then
4656 	 it wouldn't be ignored.  This matters when dealing with
4657 	 volatiles.  */
4658       return stabilize_reference_1 (ref);
4659 
4660       /* If arg isn't a kind of lvalue we recognize, make no change.
4661 	 Caller should recognize the error for an invalid lvalue.  */
4662     default:
4663       return ref;
4664 
4665     case ERROR_MARK:
4666       return error_mark_node;
4667     }
4668 
4669   TREE_TYPE (result) = TREE_TYPE (ref);
4670   TREE_READONLY (result) = TREE_READONLY (ref);
4671   TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4672   TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4673 
4674   return result;
4675 }
4676 
4677 /* Low-level constructors for expressions.  */
4678 
4679 /* A helper function for build1 and constant folders.  Set TREE_CONSTANT,
4680    and TREE_SIDE_EFFECTS for an ADDR_EXPR.  */
4681 
4682 void
recompute_tree_invariant_for_addr_expr(tree t)4683 recompute_tree_invariant_for_addr_expr (tree t)
4684 {
4685   tree node;
4686   bool tc = true, se = false;
4687 
4688   gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4689 
4690   /* We started out assuming this address is both invariant and constant, but
4691      does not have side effects.  Now go down any handled components and see if
4692      any of them involve offsets that are either non-constant or non-invariant.
4693      Also check for side-effects.
4694 
4695      ??? Note that this code makes no attempt to deal with the case where
4696      taking the address of something causes a copy due to misalignment.  */
4697 
4698 #define UPDATE_FLAGS(NODE)  \
4699 do { tree _node = (NODE); \
4700      if (_node && !TREE_CONSTANT (_node)) tc = false; \
4701      if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4702 
4703   for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4704        node = TREE_OPERAND (node, 0))
4705     {
4706       /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4707 	 array reference (probably made temporarily by the G++ front end),
4708 	 so ignore all the operands.  */
4709       if ((TREE_CODE (node) == ARRAY_REF
4710 	   || TREE_CODE (node) == ARRAY_RANGE_REF)
4711 	  && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4712 	{
4713 	  UPDATE_FLAGS (TREE_OPERAND (node, 1));
4714 	  if (TREE_OPERAND (node, 2))
4715 	    UPDATE_FLAGS (TREE_OPERAND (node, 2));
4716 	  if (TREE_OPERAND (node, 3))
4717 	    UPDATE_FLAGS (TREE_OPERAND (node, 3));
4718 	}
4719       /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4720 	 FIELD_DECL, apparently.  The G++ front end can put something else
4721 	 there, at least temporarily.  */
4722       else if (TREE_CODE (node) == COMPONENT_REF
4723 	       && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4724 	{
4725 	  if (TREE_OPERAND (node, 2))
4726 	    UPDATE_FLAGS (TREE_OPERAND (node, 2));
4727 	}
4728     }
4729 
4730   node = lang_hooks.expr_to_decl (node, &tc, &se);
4731 
4732   /* Now see what's inside.  If it's an INDIRECT_REF, copy our properties from
4733      the address, since &(*a)->b is a form of addition.  If it's a constant, the
4734      address is constant too.  If it's a decl, its address is constant if the
4735      decl is static.  Everything else is not constant and, furthermore,
4736      taking the address of a volatile variable is not volatile.  */
4737   if (TREE_CODE (node) == INDIRECT_REF
4738       || TREE_CODE (node) == MEM_REF)
4739     UPDATE_FLAGS (TREE_OPERAND (node, 0));
4740   else if (CONSTANT_CLASS_P (node))
4741     ;
4742   else if (DECL_P (node))
4743     tc &= (staticp (node) != NULL_TREE);
4744   else
4745     {
4746       tc = false;
4747       se |= TREE_SIDE_EFFECTS (node);
4748     }
4749 
4750 
4751   TREE_CONSTANT (t) = tc;
4752   TREE_SIDE_EFFECTS (t) = se;
4753 #undef UPDATE_FLAGS
4754 }
4755 
4756 /* Build an expression of code CODE, data type TYPE, and operands as
4757    specified.  Expressions and reference nodes can be created this way.
4758    Constants, decls, types and misc nodes cannot be.
4759 
4760    We define 5 non-variadic functions, from 0 to 4 arguments.  This is
4761    enough for all extant tree codes.  */
4762 
4763 tree
build0(enum tree_code code,tree tt MEM_STAT_DECL)4764 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4765 {
4766   tree t;
4767 
4768   gcc_assert (TREE_CODE_LENGTH (code) == 0);
4769 
4770   t = make_node (code PASS_MEM_STAT);
4771   TREE_TYPE (t) = tt;
4772 
4773   return t;
4774 }
4775 
4776 tree
build1(enum tree_code code,tree type,tree node MEM_STAT_DECL)4777 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4778 {
4779   int length = sizeof (struct tree_exp);
4780   tree t;
4781 
4782   record_node_allocation_statistics (code, length);
4783 
4784   gcc_assert (TREE_CODE_LENGTH (code) == 1);
4785 
4786   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4787 
4788   memset (t, 0, sizeof (struct tree_common));
4789 
4790   TREE_SET_CODE (t, code);
4791 
4792   TREE_TYPE (t) = type;
4793   SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4794   TREE_OPERAND (t, 0) = node;
4795   if (node && !TYPE_P (node))
4796     {
4797       TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4798       TREE_READONLY (t) = TREE_READONLY (node);
4799     }
4800 
4801   if (TREE_CODE_CLASS (code) == tcc_statement)
4802     {
4803       if (code != DEBUG_BEGIN_STMT)
4804 	TREE_SIDE_EFFECTS (t) = 1;
4805     }
4806   else switch (code)
4807     {
4808     case VA_ARG_EXPR:
4809       /* All of these have side-effects, no matter what their
4810 	 operands are.  */
4811       TREE_SIDE_EFFECTS (t) = 1;
4812       TREE_READONLY (t) = 0;
4813       break;
4814 
4815     case INDIRECT_REF:
4816       /* Whether a dereference is readonly has nothing to do with whether
4817 	 its operand is readonly.  */
4818       TREE_READONLY (t) = 0;
4819       break;
4820 
4821     case ADDR_EXPR:
4822       if (node)
4823 	recompute_tree_invariant_for_addr_expr (t);
4824       break;
4825 
4826     default:
4827       if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4828 	  && node && !TYPE_P (node)
4829 	  && TREE_CONSTANT (node))
4830 	TREE_CONSTANT (t) = 1;
4831       if (TREE_CODE_CLASS (code) == tcc_reference
4832 	  && node && TREE_THIS_VOLATILE (node))
4833 	TREE_THIS_VOLATILE (t) = 1;
4834       break;
4835     }
4836 
4837   return t;
4838 }
4839 
4840 #define PROCESS_ARG(N)				\
4841   do {						\
4842     TREE_OPERAND (t, N) = arg##N;		\
4843     if (arg##N &&!TYPE_P (arg##N))		\
4844       {						\
4845         if (TREE_SIDE_EFFECTS (arg##N))		\
4846 	  side_effects = 1;			\
4847         if (!TREE_READONLY (arg##N)		\
4848 	    && !CONSTANT_CLASS_P (arg##N))	\
4849 	  (void) (read_only = 0);		\
4850         if (!TREE_CONSTANT (arg##N))		\
4851 	  (void) (constant = 0);		\
4852       }						\
4853   } while (0)
4854 
4855 tree
build2(enum tree_code code,tree tt,tree arg0,tree arg1 MEM_STAT_DECL)4856 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4857 {
4858   bool constant, read_only, side_effects, div_by_zero;
4859   tree t;
4860 
4861   gcc_assert (TREE_CODE_LENGTH (code) == 2);
4862 
4863   if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4864       && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4865       /* When sizetype precision doesn't match that of pointers
4866          we need to be able to build explicit extensions or truncations
4867 	 of the offset argument.  */
4868       && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4869     gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4870 		&& TREE_CODE (arg1) == INTEGER_CST);
4871 
4872   if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4873     gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4874 		&& ptrofftype_p (TREE_TYPE (arg1)));
4875 
4876   t = make_node (code PASS_MEM_STAT);
4877   TREE_TYPE (t) = tt;
4878 
4879   /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4880      result based on those same flags for the arguments.  But if the
4881      arguments aren't really even `tree' expressions, we shouldn't be trying
4882      to do this.  */
4883 
4884   /* Expressions without side effects may be constant if their
4885      arguments are as well.  */
4886   constant = (TREE_CODE_CLASS (code) == tcc_comparison
4887 	      || TREE_CODE_CLASS (code) == tcc_binary);
4888   read_only = 1;
4889   side_effects = TREE_SIDE_EFFECTS (t);
4890 
4891   switch (code)
4892     {
4893     case TRUNC_DIV_EXPR:
4894     case CEIL_DIV_EXPR:
4895     case FLOOR_DIV_EXPR:
4896     case ROUND_DIV_EXPR:
4897     case EXACT_DIV_EXPR:
4898     case CEIL_MOD_EXPR:
4899     case FLOOR_MOD_EXPR:
4900     case ROUND_MOD_EXPR:
4901     case TRUNC_MOD_EXPR:
4902       div_by_zero = integer_zerop (arg1);
4903       break;
4904     default:
4905       div_by_zero = false;
4906     }
4907 
4908   PROCESS_ARG (0);
4909   PROCESS_ARG (1);
4910 
4911   TREE_SIDE_EFFECTS (t) = side_effects;
4912   if (code == MEM_REF)
4913     {
4914       if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4915 	{
4916 	  tree o = TREE_OPERAND (arg0, 0);
4917 	  TREE_READONLY (t) = TREE_READONLY (o);
4918 	  TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4919 	}
4920     }
4921   else
4922     {
4923       TREE_READONLY (t) = read_only;
4924       /* Don't mark X / 0 as constant.  */
4925       TREE_CONSTANT (t) = constant && !div_by_zero;
4926       TREE_THIS_VOLATILE (t)
4927 	= (TREE_CODE_CLASS (code) == tcc_reference
4928 	   && arg0 && TREE_THIS_VOLATILE (arg0));
4929     }
4930 
4931   return t;
4932 }
4933 
4934 
4935 tree
build3(enum tree_code code,tree tt,tree arg0,tree arg1,tree arg2 MEM_STAT_DECL)4936 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4937 	tree arg2 MEM_STAT_DECL)
4938 {
4939   bool constant, read_only, side_effects;
4940   tree t;
4941 
4942   gcc_assert (TREE_CODE_LENGTH (code) == 3);
4943   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4944 
4945   t = make_node (code PASS_MEM_STAT);
4946   TREE_TYPE (t) = tt;
4947 
4948   read_only = 1;
4949 
4950   /* As a special exception, if COND_EXPR has NULL branches, we
4951      assume that it is a gimple statement and always consider
4952      it to have side effects.  */
4953   if (code == COND_EXPR
4954       && tt == void_type_node
4955       && arg1 == NULL_TREE
4956       && arg2 == NULL_TREE)
4957     side_effects = true;
4958   else
4959     side_effects = TREE_SIDE_EFFECTS (t);
4960 
4961   PROCESS_ARG (0);
4962   PROCESS_ARG (1);
4963   PROCESS_ARG (2);
4964 
4965   if (code == COND_EXPR)
4966     TREE_READONLY (t) = read_only;
4967 
4968   TREE_SIDE_EFFECTS (t) = side_effects;
4969   TREE_THIS_VOLATILE (t)
4970     = (TREE_CODE_CLASS (code) == tcc_reference
4971        && arg0 && TREE_THIS_VOLATILE (arg0));
4972 
4973   return t;
4974 }
4975 
4976 tree
build4(enum tree_code code,tree tt,tree arg0,tree arg1,tree arg2,tree arg3 MEM_STAT_DECL)4977 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4978 	tree arg2, tree arg3 MEM_STAT_DECL)
4979 {
4980   bool constant, read_only, side_effects;
4981   tree t;
4982 
4983   gcc_assert (TREE_CODE_LENGTH (code) == 4);
4984 
4985   t = make_node (code PASS_MEM_STAT);
4986   TREE_TYPE (t) = tt;
4987 
4988   side_effects = TREE_SIDE_EFFECTS (t);
4989 
4990   PROCESS_ARG (0);
4991   PROCESS_ARG (1);
4992   PROCESS_ARG (2);
4993   PROCESS_ARG (3);
4994 
4995   TREE_SIDE_EFFECTS (t) = side_effects;
4996   TREE_THIS_VOLATILE (t)
4997     = (TREE_CODE_CLASS (code) == tcc_reference
4998        && arg0 && TREE_THIS_VOLATILE (arg0));
4999 
5000   return t;
5001 }
5002 
5003 tree
build5(enum tree_code code,tree tt,tree arg0,tree arg1,tree arg2,tree arg3,tree arg4 MEM_STAT_DECL)5004 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5005 	tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5006 {
5007   bool constant, read_only, side_effects;
5008   tree t;
5009 
5010   gcc_assert (TREE_CODE_LENGTH (code) == 5);
5011 
5012   t = make_node (code PASS_MEM_STAT);
5013   TREE_TYPE (t) = tt;
5014 
5015   side_effects = TREE_SIDE_EFFECTS (t);
5016 
5017   PROCESS_ARG (0);
5018   PROCESS_ARG (1);
5019   PROCESS_ARG (2);
5020   PROCESS_ARG (3);
5021   PROCESS_ARG (4);
5022 
5023   TREE_SIDE_EFFECTS (t) = side_effects;
5024   if (code == TARGET_MEM_REF)
5025     {
5026       if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5027 	{
5028 	  tree o = TREE_OPERAND (arg0, 0);
5029 	  TREE_READONLY (t) = TREE_READONLY (o);
5030 	  TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5031 	}
5032     }
5033   else
5034     TREE_THIS_VOLATILE (t)
5035       = (TREE_CODE_CLASS (code) == tcc_reference
5036 	 && arg0 && TREE_THIS_VOLATILE (arg0));
5037 
5038   return t;
5039 }
5040 
5041 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5042    on the pointer PTR.  */
5043 
5044 tree
build_simple_mem_ref_loc(location_t loc,tree ptr)5045 build_simple_mem_ref_loc (location_t loc, tree ptr)
5046 {
5047   poly_int64 offset = 0;
5048   tree ptype = TREE_TYPE (ptr);
5049   tree tem;
5050   /* For convenience allow addresses that collapse to a simple base
5051      and offset.  */
5052   if (TREE_CODE (ptr) == ADDR_EXPR
5053       && (handled_component_p (TREE_OPERAND (ptr, 0))
5054 	  || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5055     {
5056       ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5057       gcc_assert (ptr);
5058       if (TREE_CODE (ptr) == MEM_REF)
5059 	{
5060 	  offset += mem_ref_offset (ptr).force_shwi ();
5061 	  ptr = TREE_OPERAND (ptr, 0);
5062 	}
5063       else
5064 	ptr = build_fold_addr_expr (ptr);
5065       gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5066     }
5067   tem = build2 (MEM_REF, TREE_TYPE (ptype),
5068 		ptr, build_int_cst (ptype, offset));
5069   SET_EXPR_LOCATION (tem, loc);
5070   return tem;
5071 }
5072 
5073 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T.  */
5074 
5075 poly_offset_int
mem_ref_offset(const_tree t)5076 mem_ref_offset (const_tree t)
5077 {
5078   return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5079 				SIGNED);
5080 }
5081 
5082 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5083    offsetted by OFFSET units.  */
5084 
5085 tree
build_invariant_address(tree type,tree base,poly_int64 offset)5086 build_invariant_address (tree type, tree base, poly_int64 offset)
5087 {
5088   tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5089 			  build_fold_addr_expr (base),
5090 			  build_int_cst (ptr_type_node, offset));
5091   tree addr = build1 (ADDR_EXPR, type, ref);
5092   recompute_tree_invariant_for_addr_expr (addr);
5093   return addr;
5094 }
5095 
5096 /* Similar except don't specify the TREE_TYPE
5097    and leave the TREE_SIDE_EFFECTS as 0.
5098    It is permissible for arguments to be null,
5099    or even garbage if their values do not matter.  */
5100 
5101 tree
build_nt(enum tree_code code,...)5102 build_nt (enum tree_code code, ...)
5103 {
5104   tree t;
5105   int length;
5106   int i;
5107   va_list p;
5108 
5109   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5110 
5111   va_start (p, code);
5112 
5113   t = make_node (code);
5114   length = TREE_CODE_LENGTH (code);
5115 
5116   for (i = 0; i < length; i++)
5117     TREE_OPERAND (t, i) = va_arg (p, tree);
5118 
5119   va_end (p);
5120   return t;
5121 }
5122 
5123 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5124    tree vec.  */
5125 
5126 tree
build_nt_call_vec(tree fn,vec<tree,va_gc> * args)5127 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5128 {
5129   tree ret, t;
5130   unsigned int ix;
5131 
5132   ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5133   CALL_EXPR_FN (ret) = fn;
5134   CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5135   FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5136     CALL_EXPR_ARG (ret, ix) = t;
5137   return ret;
5138 }
5139 
5140 /* Create a DECL_... node of code CODE, name NAME  (if non-null)
5141    and data type TYPE.
5142    We do NOT enter this node in any sort of symbol table.
5143 
5144    LOC is the location of the decl.
5145 
5146    layout_decl is used to set up the decl's storage layout.
5147    Other slots are initialized to 0 or null pointers.  */
5148 
5149 tree
build_decl(location_t loc,enum tree_code code,tree name,tree type MEM_STAT_DECL)5150 build_decl (location_t loc, enum tree_code code, tree name,
5151     		 tree type MEM_STAT_DECL)
5152 {
5153   tree t;
5154 
5155   t = make_node (code PASS_MEM_STAT);
5156   DECL_SOURCE_LOCATION (t) = loc;
5157 
5158 /*  if (type == error_mark_node)
5159     type = integer_type_node; */
5160 /* That is not done, deliberately, so that having error_mark_node
5161    as the type can suppress useless errors in the use of this variable.  */
5162 
5163   DECL_NAME (t) = name;
5164   TREE_TYPE (t) = type;
5165 
5166   if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5167     layout_decl (t, 0);
5168 
5169   return t;
5170 }
5171 
5172 /* Builds and returns function declaration with NAME and TYPE.  */
5173 
5174 tree
build_fn_decl(const char * name,tree type)5175 build_fn_decl (const char *name, tree type)
5176 {
5177   tree id = get_identifier (name);
5178   tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5179 
5180   DECL_EXTERNAL (decl) = 1;
5181   TREE_PUBLIC (decl) = 1;
5182   DECL_ARTIFICIAL (decl) = 1;
5183   TREE_NOTHROW (decl) = 1;
5184 
5185   return decl;
5186 }
5187 
5188 vec<tree, va_gc> *all_translation_units;
5189 
5190 /* Builds a new translation-unit decl with name NAME, queues it in the
5191    global list of translation-unit decls and returns it.   */
5192 
5193 tree
build_translation_unit_decl(tree name)5194 build_translation_unit_decl (tree name)
5195 {
5196   tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5197 			name, NULL_TREE);
5198   TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5199   vec_safe_push (all_translation_units, tu);
5200   return tu;
5201 }
5202 
5203 
5204 /* BLOCK nodes are used to represent the structure of binding contours
5205    and declarations, once those contours have been exited and their contents
5206    compiled.  This information is used for outputting debugging info.  */
5207 
5208 tree
build_block(tree vars,tree subblocks,tree supercontext,tree chain)5209 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5210 {
5211   tree block = make_node (BLOCK);
5212 
5213   BLOCK_VARS (block) = vars;
5214   BLOCK_SUBBLOCKS (block) = subblocks;
5215   BLOCK_SUPERCONTEXT (block) = supercontext;
5216   BLOCK_CHAIN (block) = chain;
5217   return block;
5218 }
5219 
5220 
5221 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5222 
5223    LOC is the location to use in tree T.  */
5224 
5225 void
protected_set_expr_location(tree t,location_t loc)5226 protected_set_expr_location (tree t, location_t loc)
5227 {
5228   if (CAN_HAVE_LOCATION_P (t))
5229     SET_EXPR_LOCATION (t, loc);
5230   else if (t && TREE_CODE (t) == STATEMENT_LIST)
5231     {
5232       t = expr_single (t);
5233       if (t && CAN_HAVE_LOCATION_P (t))
5234 	SET_EXPR_LOCATION (t, loc);
5235     }
5236 }
5237 
5238 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5239    UNKNOWN_LOCATION.  */
5240 
5241 void
protected_set_expr_location_if_unset(tree t,location_t loc)5242 protected_set_expr_location_if_unset (tree t, location_t loc)
5243 {
5244   t = expr_single (t);
5245   if (t && !EXPR_HAS_LOCATION (t))
5246     protected_set_expr_location (t, loc);
5247 }
5248 
5249 /* Data used when collecting DECLs and TYPEs for language data removal.  */
5250 
5251 class free_lang_data_d
5252 {
5253 public:
free_lang_data_d()5254   free_lang_data_d () : decls (100), types (100) {}
5255 
5256   /* Worklist to avoid excessive recursion.  */
5257   auto_vec<tree> worklist;
5258 
5259   /* Set of traversed objects.  Used to avoid duplicate visits.  */
5260   hash_set<tree> pset;
5261 
5262   /* Array of symbols to process with free_lang_data_in_decl.  */
5263   auto_vec<tree> decls;
5264 
5265   /* Array of types to process with free_lang_data_in_type.  */
5266   auto_vec<tree> types;
5267 };
5268 
5269 
5270 /* Add type or decl T to one of the list of tree nodes that need their
5271    language data removed.  The lists are held inside FLD.  */
5272 
5273 static void
add_tree_to_fld_list(tree t,class free_lang_data_d * fld)5274 add_tree_to_fld_list (tree t, class free_lang_data_d *fld)
5275 {
5276   if (DECL_P (t))
5277     fld->decls.safe_push (t);
5278   else if (TYPE_P (t))
5279     fld->types.safe_push (t);
5280   else
5281     gcc_unreachable ();
5282 }
5283 
5284 /* Push tree node T into FLD->WORKLIST.  */
5285 
5286 static inline void
fld_worklist_push(tree t,class free_lang_data_d * fld)5287 fld_worklist_push (tree t, class free_lang_data_d *fld)
5288 {
5289   if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5290     fld->worklist.safe_push ((t));
5291 }
5292 
5293 
5294 
5295 /* Return simplified TYPE_NAME of TYPE.  */
5296 
5297 static tree
fld_simplified_type_name(tree type)5298 fld_simplified_type_name (tree type)
5299 {
5300   if (!TYPE_NAME (type) || TREE_CODE (TYPE_NAME (type)) != TYPE_DECL)
5301     return TYPE_NAME (type);
5302   /* Drop TYPE_DECLs in TYPE_NAME in favor of the identifier in the
5303      TYPE_DECL if the type doesn't have linkage.
5304      this must match fld_  */
5305   if (type != TYPE_MAIN_VARIANT (type)
5306       || (!DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type))
5307 	  && (TREE_CODE (type) != RECORD_TYPE
5308 	      || !TYPE_BINFO (type)
5309 	      || !BINFO_VTABLE (TYPE_BINFO (type)))))
5310     return DECL_NAME (TYPE_NAME (type));
5311   return TYPE_NAME (type);
5312 }
5313 
5314 /* Do same comparsion as check_qualified_type skipping lang part of type
5315    and be more permissive about type names: we only care that names are
5316    same (for diagnostics) and that ODR names are the same.
5317    If INNER_TYPE is non-NULL, be sure that TREE_TYPE match it.  */
5318 
5319 static bool
fld_type_variant_equal_p(tree t,tree v,tree inner_type)5320 fld_type_variant_equal_p (tree t, tree v, tree inner_type)
5321 {
5322   if (TYPE_QUALS (t) != TYPE_QUALS (v)
5323       /* We want to match incomplete variants with complete types.
5324 	 In this case we need to ignore alignment.   */
5325       || ((!RECORD_OR_UNION_TYPE_P (t) || COMPLETE_TYPE_P (v))
5326 	  && (TYPE_ALIGN (t) != TYPE_ALIGN (v)
5327 	      || TYPE_USER_ALIGN (t) != TYPE_USER_ALIGN (v)))
5328       || fld_simplified_type_name (t) != fld_simplified_type_name (v)
5329       || !attribute_list_equal (TYPE_ATTRIBUTES (t),
5330 			        TYPE_ATTRIBUTES (v))
5331       || (inner_type && TREE_TYPE (v) != inner_type))
5332     return false;
5333 
5334   return true;
5335 }
5336 
5337 /* Find variant of FIRST that match T and create new one if necessary.
5338    Set TREE_TYPE to INNER_TYPE if non-NULL.  */
5339 
5340 static tree
5341 fld_type_variant (tree first, tree t, class free_lang_data_d *fld,
5342 		  tree inner_type = NULL)
5343 {
5344   if (first == TYPE_MAIN_VARIANT (t))
5345     return t;
5346   for (tree v = first; v; v = TYPE_NEXT_VARIANT (v))
5347     if (fld_type_variant_equal_p (t, v, inner_type))
5348       return v;
5349   tree v = build_variant_type_copy (first);
5350   TYPE_READONLY (v) = TYPE_READONLY (t);
5351   TYPE_VOLATILE (v) = TYPE_VOLATILE (t);
5352   TYPE_ATOMIC (v) = TYPE_ATOMIC (t);
5353   TYPE_RESTRICT (v) = TYPE_RESTRICT (t);
5354   TYPE_ADDR_SPACE (v) = TYPE_ADDR_SPACE (t);
5355   TYPE_NAME (v) = TYPE_NAME (t);
5356   TYPE_ATTRIBUTES (v) = TYPE_ATTRIBUTES (t);
5357   TYPE_CANONICAL (v) = TYPE_CANONICAL (t);
5358   /* Variants of incomplete types should have alignment
5359      set to BITS_PER_UNIT.  Do not copy the actual alignment.  */
5360   if (!RECORD_OR_UNION_TYPE_P (v) || COMPLETE_TYPE_P (v))
5361     {
5362       SET_TYPE_ALIGN (v, TYPE_ALIGN (t));
5363       TYPE_USER_ALIGN (v) = TYPE_USER_ALIGN (t);
5364     }
5365   if (inner_type)
5366     TREE_TYPE (v) = inner_type;
5367   gcc_checking_assert (fld_type_variant_equal_p (t,v, inner_type));
5368   if (!fld->pset.add (v))
5369     add_tree_to_fld_list (v, fld);
5370   return v;
5371 }
5372 
5373 /* Map complete types to incomplete types.  */
5374 
5375 static hash_map<tree, tree> *fld_incomplete_types;
5376 
5377 /* Map types to simplified types.  */
5378 
5379 static hash_map<tree, tree> *fld_simplified_types;
5380 
5381 /* Produce variant of T whose TREE_TYPE is T2. If it is main variant,
5382    use MAP to prevent duplicates.  */
5383 
5384 static tree
fld_process_array_type(tree t,tree t2,hash_map<tree,tree> * map,class free_lang_data_d * fld)5385 fld_process_array_type (tree t, tree t2, hash_map<tree, tree> *map,
5386 			class free_lang_data_d *fld)
5387 {
5388   if (TREE_TYPE (t) == t2)
5389     return t;
5390 
5391   if (TYPE_MAIN_VARIANT (t) != t)
5392     {
5393       return fld_type_variant
5394 	       (fld_process_array_type (TYPE_MAIN_VARIANT (t),
5395 					TYPE_MAIN_VARIANT (t2), map, fld),
5396 		t, fld, t2);
5397     }
5398 
5399   bool existed;
5400   tree &array
5401      = map->get_or_insert (t, &existed);
5402   if (!existed)
5403     {
5404       array
5405 	= build_array_type_1 (t2, TYPE_DOMAIN (t), TYPE_TYPELESS_STORAGE (t),
5406 			      false, false);
5407       TYPE_CANONICAL (array) = TYPE_CANONICAL (t);
5408       if (!fld->pset.add (array))
5409 	add_tree_to_fld_list (array, fld);
5410     }
5411   return array;
5412 }
5413 
5414 /* Return CTX after removal of contexts that are not relevant  */
5415 
5416 static tree
fld_decl_context(tree ctx)5417 fld_decl_context (tree ctx)
5418 {
5419   /* Variably modified types are needed for tree_is_indexable to decide
5420      whether the type needs to go to local or global section.
5421      This code is semi-broken but for now it is easiest to keep contexts
5422      as expected.  */
5423   if (ctx && TYPE_P (ctx)
5424       && !variably_modified_type_p (ctx, NULL_TREE))
5425      {
5426        while (ctx && TYPE_P (ctx))
5427 	 ctx = TYPE_CONTEXT (ctx);
5428      }
5429   return ctx;
5430 }
5431 
5432 /* For T being aggregate type try to turn it into a incomplete variant.
5433    Return T if no simplification is possible.  */
5434 
5435 static tree
fld_incomplete_type_of(tree t,class free_lang_data_d * fld)5436 fld_incomplete_type_of (tree t, class free_lang_data_d *fld)
5437 {
5438   if (!t)
5439     return NULL;
5440   if (POINTER_TYPE_P (t))
5441     {
5442       tree t2 = fld_incomplete_type_of (TREE_TYPE (t), fld);
5443       if (t2 != TREE_TYPE (t))
5444 	{
5445 	  tree first;
5446 	  if (TREE_CODE (t) == POINTER_TYPE)
5447 	    first = build_pointer_type_for_mode (t2, TYPE_MODE (t),
5448 						TYPE_REF_CAN_ALIAS_ALL (t));
5449 	  else
5450 	    first = build_reference_type_for_mode (t2, TYPE_MODE (t),
5451 						TYPE_REF_CAN_ALIAS_ALL (t));
5452 	  gcc_assert (TYPE_CANONICAL (t2) != t2
5453 		      && TYPE_CANONICAL (t2) == TYPE_CANONICAL (TREE_TYPE (t)));
5454 	  if (!fld->pset.add (first))
5455 	    add_tree_to_fld_list (first, fld);
5456 	  return fld_type_variant (first, t, fld);
5457 	}
5458       return t;
5459     }
5460   if (TREE_CODE (t) == ARRAY_TYPE)
5461     return fld_process_array_type (t,
5462 				   fld_incomplete_type_of (TREE_TYPE (t), fld),
5463 				   fld_incomplete_types, fld);
5464   if ((!RECORD_OR_UNION_TYPE_P (t) && TREE_CODE (t) != ENUMERAL_TYPE)
5465       || !COMPLETE_TYPE_P (t))
5466     return t;
5467   if (TYPE_MAIN_VARIANT (t) == t)
5468     {
5469       bool existed;
5470       tree &copy
5471 	 = fld_incomplete_types->get_or_insert (t, &existed);
5472 
5473       if (!existed)
5474 	{
5475 	  copy = build_distinct_type_copy (t);
5476 
5477 	  /* It is possible that type was not seen by free_lang_data yet.  */
5478 	  if (!fld->pset.add (copy))
5479 	    add_tree_to_fld_list (copy, fld);
5480 	  TYPE_SIZE (copy) = NULL;
5481 	  TYPE_USER_ALIGN (copy) = 0;
5482 	  TYPE_SIZE_UNIT (copy) = NULL;
5483 	  TYPE_CANONICAL (copy) = TYPE_CANONICAL (t);
5484 	  TREE_ADDRESSABLE (copy) = 0;
5485 	  if (AGGREGATE_TYPE_P (t))
5486 	    {
5487 	      SET_TYPE_MODE (copy, VOIDmode);
5488 	      SET_TYPE_ALIGN (copy, BITS_PER_UNIT);
5489 	      TYPE_TYPELESS_STORAGE (copy) = 0;
5490 	      TYPE_FIELDS (copy) = NULL;
5491 	      TYPE_BINFO (copy) = NULL;
5492 	      TYPE_FINAL_P (copy) = 0;
5493 	      TYPE_EMPTY_P (copy) = 0;
5494 	    }
5495 	  else
5496 	    {
5497 	      TYPE_VALUES (copy) = NULL;
5498 	      ENUM_IS_OPAQUE (copy) = 0;
5499 	      ENUM_IS_SCOPED (copy) = 0;
5500 	    }
5501 
5502 	  /* Build copy of TYPE_DECL in TYPE_NAME if necessary.
5503 	     This is needed for ODR violation warnings to come out right (we
5504 	     want duplicate TYPE_DECLs whenever the type is duplicated because
5505 	     of ODR violation.  Because lang data in the TYPE_DECL may not
5506 	     have been freed yet, rebuild it from scratch and copy relevant
5507 	     fields.  */
5508 	  TYPE_NAME (copy) = fld_simplified_type_name (copy);
5509 	  tree name = TYPE_NAME (copy);
5510 
5511 	  if (name && TREE_CODE (name) == TYPE_DECL)
5512 	    {
5513 	      gcc_checking_assert (TREE_TYPE (name) == t);
5514 	      tree name2 = build_decl (DECL_SOURCE_LOCATION (name), TYPE_DECL,
5515 				       DECL_NAME (name), copy);
5516 	      if (DECL_ASSEMBLER_NAME_SET_P (name))
5517 	        SET_DECL_ASSEMBLER_NAME (name2, DECL_ASSEMBLER_NAME (name));
5518 	      SET_DECL_ALIGN (name2, 0);
5519 	      DECL_CONTEXT (name2) = fld_decl_context
5520 					 (DECL_CONTEXT (name));
5521 	      TYPE_NAME (copy) = name2;
5522 	    }
5523 	}
5524       return copy;
5525    }
5526   return (fld_type_variant
5527 	    (fld_incomplete_type_of (TYPE_MAIN_VARIANT (t), fld), t, fld));
5528 }
5529 
5530 /* Simplify type T for scenarios where we do not need complete pointer
5531    types.  */
5532 
5533 static tree
fld_simplified_type(tree t,class free_lang_data_d * fld)5534 fld_simplified_type (tree t, class free_lang_data_d *fld)
5535 {
5536   if (!t)
5537     return t;
5538   if (POINTER_TYPE_P (t))
5539     return fld_incomplete_type_of (t, fld);
5540   /* FIXME: This triggers verification error, see PR88140.  */
5541   if (TREE_CODE (t) == ARRAY_TYPE && 0)
5542     return fld_process_array_type (t, fld_simplified_type (TREE_TYPE (t), fld),
5543 				   fld_simplified_types, fld);
5544   return t;
5545 }
5546 
5547 /* Reset the expression *EXPR_P, a size or position.
5548 
5549    ??? We could reset all non-constant sizes or positions.  But it's cheap
5550    enough to not do so and refrain from adding workarounds to dwarf2out.c.
5551 
5552    We need to reset self-referential sizes or positions because they cannot
5553    be gimplified and thus can contain a CALL_EXPR after the gimplification
5554    is finished, which will run afoul of LTO streaming.  And they need to be
5555    reset to something essentially dummy but not constant, so as to preserve
5556    the properties of the object they are attached to.  */
5557 
5558 static inline void
free_lang_data_in_one_sizepos(tree * expr_p)5559 free_lang_data_in_one_sizepos (tree *expr_p)
5560 {
5561   tree expr = *expr_p;
5562   if (CONTAINS_PLACEHOLDER_P (expr))
5563     *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5564 }
5565 
5566 
5567 /* Reset all the fields in a binfo node BINFO.  We only keep
5568    BINFO_VTABLE, which is used by gimple_fold_obj_type_ref.  */
5569 
5570 static void
free_lang_data_in_binfo(tree binfo)5571 free_lang_data_in_binfo (tree binfo)
5572 {
5573   unsigned i;
5574   tree t;
5575 
5576   gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5577 
5578   BINFO_VIRTUALS (binfo) = NULL_TREE;
5579   BINFO_BASE_ACCESSES (binfo) = NULL;
5580   BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5581   BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5582   BINFO_VPTR_FIELD (binfo) = NULL_TREE;
5583   TREE_PUBLIC (binfo) = 0;
5584 
5585   FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5586     free_lang_data_in_binfo (t);
5587 }
5588 
5589 
5590 /* Reset all language specific information still present in TYPE.  */
5591 
5592 static void
free_lang_data_in_type(tree type,class free_lang_data_d * fld)5593 free_lang_data_in_type (tree type, class free_lang_data_d *fld)
5594 {
5595   gcc_assert (TYPE_P (type));
5596 
5597   /* Give the FE a chance to remove its own data first.  */
5598   lang_hooks.free_lang_data (type);
5599 
5600   TREE_LANG_FLAG_0 (type) = 0;
5601   TREE_LANG_FLAG_1 (type) = 0;
5602   TREE_LANG_FLAG_2 (type) = 0;
5603   TREE_LANG_FLAG_3 (type) = 0;
5604   TREE_LANG_FLAG_4 (type) = 0;
5605   TREE_LANG_FLAG_5 (type) = 0;
5606   TREE_LANG_FLAG_6 (type) = 0;
5607 
5608   TYPE_NEEDS_CONSTRUCTING (type) = 0;
5609 
5610   /* Purge non-marked variants from the variants chain, so that they
5611      don't reappear in the IL after free_lang_data.  */
5612   while (TYPE_NEXT_VARIANT (type)
5613 	 && !fld->pset.contains (TYPE_NEXT_VARIANT (type)))
5614     {
5615       tree t = TYPE_NEXT_VARIANT (type);
5616       TYPE_NEXT_VARIANT (type) = TYPE_NEXT_VARIANT (t);
5617       /* Turn the removed types into distinct types.  */
5618       TYPE_MAIN_VARIANT (t) = t;
5619       TYPE_NEXT_VARIANT (t) = NULL_TREE;
5620     }
5621 
5622   if (TREE_CODE (type) == FUNCTION_TYPE)
5623     {
5624       TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5625       /* Remove the const and volatile qualifiers from arguments.  The
5626 	 C++ front end removes them, but the C front end does not,
5627 	 leading to false ODR violation errors when merging two
5628 	 instances of the same function signature compiled by
5629 	 different front ends.  */
5630       for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5631 	{
5632           TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5633 	  tree arg_type = TREE_VALUE (p);
5634 
5635 	  if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5636 	    {
5637 	      int quals = TYPE_QUALS (arg_type)
5638 			  & ~TYPE_QUAL_CONST
5639 			  & ~TYPE_QUAL_VOLATILE;
5640 	      TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5641 	      if (!fld->pset.add (TREE_VALUE (p)))
5642 		free_lang_data_in_type (TREE_VALUE (p), fld);
5643 	    }
5644 	  /* C++ FE uses TREE_PURPOSE to store initial values.  */
5645 	  TREE_PURPOSE (p) = NULL;
5646 	}
5647     }
5648   else if (TREE_CODE (type) == METHOD_TYPE)
5649     {
5650       TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5651       for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5652 	{
5653 	  /* C++ FE uses TREE_PURPOSE to store initial values.  */
5654 	  TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5655 	  TREE_PURPOSE (p) = NULL;
5656 	}
5657     }
5658   else if (RECORD_OR_UNION_TYPE_P (type))
5659     {
5660       /* Remove members that are not FIELD_DECLs from the field list
5661 	 of an aggregate.  These occur in C++.  */
5662       for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
5663 	if (TREE_CODE (member) == FIELD_DECL)
5664 	  prev = &DECL_CHAIN (member);
5665 	else
5666 	  *prev = DECL_CHAIN (member);
5667 
5668       TYPE_VFIELD (type) = NULL_TREE;
5669 
5670       if (TYPE_BINFO (type))
5671 	{
5672 	  free_lang_data_in_binfo (TYPE_BINFO (type));
5673 	  /* We need to preserve link to bases and virtual table for all
5674 	     polymorphic types to make devirtualization machinery working.  */
5675 	  if (!BINFO_VTABLE (TYPE_BINFO (type)))
5676 	    TYPE_BINFO (type) = NULL;
5677 	}
5678     }
5679   else if (INTEGRAL_TYPE_P (type)
5680 	   || SCALAR_FLOAT_TYPE_P (type)
5681 	   || FIXED_POINT_TYPE_P (type))
5682     {
5683       if (TREE_CODE (type) == ENUMERAL_TYPE)
5684 	{
5685 	  ENUM_IS_OPAQUE (type) = 0;
5686 	  ENUM_IS_SCOPED (type) = 0;
5687 	  /* Type values are used only for C++ ODR checking.  Drop them
5688 	     for all type variants and non-ODR types.
5689 	     For ODR types the data is freed in free_odr_warning_data.  */
5690 	  if (!TYPE_VALUES (type))
5691 	    ;
5692 	  else if (TYPE_MAIN_VARIANT (type) != type
5693 		   || !type_with_linkage_p (type)
5694 		   || type_in_anonymous_namespace_p (type))
5695 	    TYPE_VALUES (type) = NULL;
5696 	  else
5697 	    register_odr_enum (type);
5698 	}
5699       free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5700       free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5701     }
5702 
5703   TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5704 
5705   free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5706   free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5707 
5708   if (TYPE_CONTEXT (type)
5709       && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5710     {
5711       tree ctx = TYPE_CONTEXT (type);
5712       do
5713 	{
5714 	  ctx = BLOCK_SUPERCONTEXT (ctx);
5715 	}
5716       while (ctx && TREE_CODE (ctx) == BLOCK);
5717       TYPE_CONTEXT (type) = ctx;
5718     }
5719 
5720   TYPE_STUB_DECL (type) = NULL;
5721   TYPE_NAME (type) = fld_simplified_type_name (type);
5722 }
5723 
5724 
5725 /* Return true if DECL may need an assembler name to be set.  */
5726 
5727 static inline bool
need_assembler_name_p(tree decl)5728 need_assembler_name_p (tree decl)
5729 {
5730   /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5731      Rule merging.  This makes type_odr_p to return true on those types during
5732      LTO and by comparing the mangled name, we can say what types are intended
5733      to be equivalent across compilation unit.
5734 
5735      We do not store names of type_in_anonymous_namespace_p.
5736 
5737      Record, union and enumeration type have linkage that allows use
5738      to check type_in_anonymous_namespace_p. We do not mangle compound types
5739      that always can be compared structurally.
5740 
5741      Similarly for builtin types, we compare properties of their main variant.
5742      A special case are integer types where mangling do make differences
5743      between char/signed char/unsigned char etc.  Storing name for these makes
5744      e.g.  -fno-signed-char/-fsigned-char mismatches to be handled well.
5745      See cp/mangle.c:write_builtin_type for details.  */
5746 
5747   if (TREE_CODE (decl) == TYPE_DECL)
5748     {
5749       if (DECL_NAME (decl)
5750 	  && decl == TYPE_NAME (TREE_TYPE (decl))
5751 	  && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5752 	  && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5753 	  && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
5754 	       && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
5755 	      || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
5756 	  && (type_with_linkage_p (TREE_TYPE (decl))
5757 	      || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5758 	  && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5759 	return !DECL_ASSEMBLER_NAME_SET_P (decl);
5760       return false;
5761     }
5762   /* Only FUNCTION_DECLs and VAR_DECLs are considered.  */
5763   if (!VAR_OR_FUNCTION_DECL_P (decl))
5764     return false;
5765 
5766   /* If DECL already has its assembler name set, it does not need a
5767      new one.  */
5768   if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5769       || DECL_ASSEMBLER_NAME_SET_P (decl))
5770     return false;
5771 
5772   /* Abstract decls do not need an assembler name.  */
5773   if (DECL_ABSTRACT_P (decl))
5774     return false;
5775 
5776   /* For VAR_DECLs, only static, public and external symbols need an
5777      assembler name.  */
5778   if (VAR_P (decl)
5779       && !TREE_STATIC (decl)
5780       && !TREE_PUBLIC (decl)
5781       && !DECL_EXTERNAL (decl))
5782     return false;
5783 
5784   if (TREE_CODE (decl) == FUNCTION_DECL)
5785     {
5786       /* Do not set assembler name on builtins.  Allow RTL expansion to
5787 	 decide whether to expand inline or via a regular call.  */
5788       if (fndecl_built_in_p (decl)
5789 	  && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5790 	return false;
5791 
5792       /* Functions represented in the callgraph need an assembler name.  */
5793       if (cgraph_node::get (decl) != NULL)
5794 	return true;
5795 
5796       /* Unused and not public functions don't need an assembler name.  */
5797       if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5798 	return false;
5799     }
5800 
5801   return true;
5802 }
5803 
5804 
5805 /* Reset all language specific information still present in symbol
5806    DECL.  */
5807 
5808 static void
free_lang_data_in_decl(tree decl,class free_lang_data_d * fld)5809 free_lang_data_in_decl (tree decl, class free_lang_data_d *fld)
5810 {
5811   gcc_assert (DECL_P (decl));
5812 
5813   /* Give the FE a chance to remove its own data first.  */
5814   lang_hooks.free_lang_data (decl);
5815 
5816   TREE_LANG_FLAG_0 (decl) = 0;
5817   TREE_LANG_FLAG_1 (decl) = 0;
5818   TREE_LANG_FLAG_2 (decl) = 0;
5819   TREE_LANG_FLAG_3 (decl) = 0;
5820   TREE_LANG_FLAG_4 (decl) = 0;
5821   TREE_LANG_FLAG_5 (decl) = 0;
5822   TREE_LANG_FLAG_6 (decl) = 0;
5823 
5824   free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5825   free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5826   if (TREE_CODE (decl) == FIELD_DECL)
5827     {
5828       DECL_FCONTEXT (decl) = NULL;
5829       free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5830       if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5831 	DECL_QUALIFIER (decl) = NULL_TREE;
5832     }
5833 
5834  if (TREE_CODE (decl) == FUNCTION_DECL)
5835     {
5836       struct cgraph_node *node;
5837       /* Frontends do not set TREE_ADDRESSABLE on public variables even though
5838 	 the address may be taken in other unit, so this flag has no practical
5839 	 use for middle-end.
5840 
5841 	 It would make more sense if frontends set TREE_ADDRESSABLE to 0 only
5842 	 for public objects that indeed cannot be adressed, but it is not
5843 	 the case.  Set the flag to true so we do not get merge failures for
5844 	 i.e. virtual tables between units that take address of it and
5845 	 units that don't.  */
5846       if (TREE_PUBLIC (decl))
5847 	TREE_ADDRESSABLE (decl) = true;
5848       TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5849       if (!(node = cgraph_node::get (decl))
5850 	  || (!node->definition && !node->clones))
5851 	{
5852 	  if (node && !node->declare_variant_alt)
5853 	    node->release_body ();
5854 	  else
5855 	    {
5856 	      release_function_body (decl);
5857 	      DECL_ARGUMENTS (decl) = NULL;
5858 	      DECL_RESULT (decl) = NULL;
5859 	      DECL_INITIAL (decl) = error_mark_node;
5860 	    }
5861 	}
5862       if (gimple_has_body_p (decl) || (node && node->thunk))
5863 	{
5864 	  tree t;
5865 
5866 	  /* If DECL has a gimple body, then the context for its
5867 	     arguments must be DECL.  Otherwise, it doesn't really
5868 	     matter, as we will not be emitting any code for DECL.  In
5869 	     general, there may be other instances of DECL created by
5870 	     the front end and since PARM_DECLs are generally shared,
5871 	     their DECL_CONTEXT changes as the replicas of DECL are
5872 	     created.  The only time where DECL_CONTEXT is important
5873 	     is for the FUNCTION_DECLs that have a gimple body (since
5874 	     the PARM_DECL will be used in the function's body).  */
5875 	  for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5876 	    DECL_CONTEXT (t) = decl;
5877 	  if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5878 	    DECL_FUNCTION_SPECIFIC_TARGET (decl)
5879 	      = target_option_default_node;
5880 	  if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5881 	    DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5882 	      = optimization_default_node;
5883 	}
5884 
5885       /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5886 	 At this point, it is not needed anymore.  */
5887       DECL_SAVED_TREE (decl) = NULL_TREE;
5888 
5889       /* Clear the abstract origin if it refers to a method.
5890          Otherwise dwarf2out.c will ICE as we splice functions out of
5891          TYPE_FIELDS and thus the origin will not be output
5892          correctly.  */
5893       if (DECL_ABSTRACT_ORIGIN (decl)
5894 	  && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5895 	  && RECORD_OR_UNION_TYPE_P
5896 	       (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5897 	DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5898 
5899       DECL_VINDEX (decl) = NULL_TREE;
5900     }
5901   else if (VAR_P (decl))
5902     {
5903       /* See comment above why we set the flag for functions.  */
5904       if (TREE_PUBLIC (decl))
5905 	TREE_ADDRESSABLE (decl) = true;
5906       if ((DECL_EXTERNAL (decl)
5907 	   && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5908 	  || (decl_function_context (decl) && !TREE_STATIC (decl)))
5909 	DECL_INITIAL (decl) = NULL_TREE;
5910     }
5911   else if (TREE_CODE (decl) == TYPE_DECL)
5912     {
5913       DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5914       DECL_VISIBILITY_SPECIFIED (decl) = 0;
5915       TREE_PUBLIC (decl) = 0;
5916       TREE_PRIVATE (decl) = 0;
5917       DECL_ARTIFICIAL (decl) = 0;
5918       TYPE_DECL_SUPPRESS_DEBUG (decl) = 0;
5919       DECL_INITIAL (decl) = NULL_TREE;
5920       DECL_ORIGINAL_TYPE (decl) = NULL_TREE;
5921       DECL_MODE (decl) = VOIDmode;
5922       SET_DECL_ALIGN (decl, 0);
5923       /* TREE_TYPE is cleared at WPA time in free_odr_warning_data.  */
5924     }
5925   else if (TREE_CODE (decl) == FIELD_DECL)
5926     {
5927       TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5928       DECL_INITIAL (decl) = NULL_TREE;
5929     }
5930   else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5931            && DECL_INITIAL (decl)
5932            && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5933     {
5934       /* Strip builtins from the translation-unit BLOCK.  We still have targets
5935 	 without builtin_decl_explicit support and also builtins are shared
5936 	 nodes and thus we can't use TREE_CHAIN in multiple lists.  */
5937       tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5938       while (*nextp)
5939 	{
5940 	  tree var = *nextp;
5941 	  if (TREE_CODE (var) == FUNCTION_DECL
5942 	      && fndecl_built_in_p (var))
5943 	    *nextp = TREE_CHAIN (var);
5944 	  else
5945 	    nextp = &TREE_CHAIN (var);
5946         }
5947     }
5948   /* We need to keep field decls associated with their trees. Otherwise tree
5949      merging may merge some fileds and keep others disjoint wich in turn will
5950      not do well with TREE_CHAIN pointers linking them.
5951 
5952      Also do not drop containing types for virtual methods and tables because
5953      these are needed by devirtualization.
5954      C++ destructors are special because C++ frontends sometimes produces
5955      virtual destructor as an alias of non-virtual destructor.  In
5956      devirutalization code we always walk through aliases and we need
5957      context to be preserved too.  See PR89335  */
5958   if (TREE_CODE (decl) != FIELD_DECL
5959       && ((TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL)
5960           || (!DECL_VIRTUAL_P (decl)
5961 	      && (TREE_CODE (decl) != FUNCTION_DECL
5962 		  || !DECL_CXX_DESTRUCTOR_P (decl)))))
5963     DECL_CONTEXT (decl) = fld_decl_context (DECL_CONTEXT (decl));
5964 }
5965 
5966 
5967 /* Operand callback helper for free_lang_data_in_node.  *TP is the
5968    subtree operand being considered.  */
5969 
5970 static tree
find_decls_types_r(tree * tp,int * ws,void * data)5971 find_decls_types_r (tree *tp, int *ws, void *data)
5972 {
5973   tree t = *tp;
5974   class free_lang_data_d *fld = (class free_lang_data_d *) data;
5975 
5976   if (TREE_CODE (t) == TREE_LIST)
5977     return NULL_TREE;
5978 
5979   /* Language specific nodes will be removed, so there is no need
5980      to gather anything under them.  */
5981   if (is_lang_specific (t))
5982     {
5983       *ws = 0;
5984       return NULL_TREE;
5985     }
5986 
5987   if (DECL_P (t))
5988     {
5989       /* Note that walk_tree does not traverse every possible field in
5990 	 decls, so we have to do our own traversals here.  */
5991       add_tree_to_fld_list (t, fld);
5992 
5993       fld_worklist_push (DECL_NAME (t), fld);
5994       fld_worklist_push (DECL_CONTEXT (t), fld);
5995       fld_worklist_push (DECL_SIZE (t), fld);
5996       fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5997 
5998       /* We are going to remove everything under DECL_INITIAL for
5999 	 TYPE_DECLs.  No point walking them.  */
6000       if (TREE_CODE (t) != TYPE_DECL)
6001 	fld_worklist_push (DECL_INITIAL (t), fld);
6002 
6003       fld_worklist_push (DECL_ATTRIBUTES (t), fld);
6004       fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
6005 
6006       if (TREE_CODE (t) == FUNCTION_DECL)
6007 	{
6008 	  fld_worklist_push (DECL_ARGUMENTS (t), fld);
6009 	  fld_worklist_push (DECL_RESULT (t), fld);
6010 	}
6011       else if (TREE_CODE (t) == FIELD_DECL)
6012 	{
6013 	  fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
6014 	  fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
6015 	  fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
6016 	  fld_worklist_push (DECL_FCONTEXT (t), fld);
6017 	}
6018 
6019       if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
6020 	  && DECL_HAS_VALUE_EXPR_P (t))
6021 	fld_worklist_push (DECL_VALUE_EXPR (t), fld);
6022 
6023       if (TREE_CODE (t) != FIELD_DECL
6024 	  && TREE_CODE (t) != TYPE_DECL)
6025 	fld_worklist_push (TREE_CHAIN (t), fld);
6026       *ws = 0;
6027     }
6028   else if (TYPE_P (t))
6029     {
6030       /* Note that walk_tree does not traverse every possible field in
6031 	 types, so we have to do our own traversals here.  */
6032       add_tree_to_fld_list (t, fld);
6033 
6034       if (!RECORD_OR_UNION_TYPE_P (t))
6035 	fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
6036       fld_worklist_push (TYPE_SIZE (t), fld);
6037       fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
6038       fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
6039       fld_worklist_push (TYPE_POINTER_TO (t), fld);
6040       fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
6041       fld_worklist_push (TYPE_NAME (t), fld);
6042       /* While we do not stream TYPE_POINTER_TO and TYPE_REFERENCE_TO
6043 	 lists, we may look types up in these lists and use them while
6044 	 optimizing the function body.  Thus we need to free lang data
6045 	 in them.  */
6046       if (TREE_CODE (t) == POINTER_TYPE)
6047         fld_worklist_push (TYPE_NEXT_PTR_TO (t), fld);
6048       if (TREE_CODE (t) == REFERENCE_TYPE)
6049         fld_worklist_push (TYPE_NEXT_REF_TO (t), fld);
6050       if (!POINTER_TYPE_P (t))
6051 	fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
6052       /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types.  */
6053       if (!RECORD_OR_UNION_TYPE_P (t))
6054 	fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
6055       fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
6056       /* Do not walk TYPE_NEXT_VARIANT.  We do not stream it and thus
6057          do not and want not to reach unused variants this way.  */
6058       if (TYPE_CONTEXT (t))
6059 	{
6060 	  tree ctx = TYPE_CONTEXT (t);
6061 	  /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
6062 	     So push that instead.  */
6063 	  while (ctx && TREE_CODE (ctx) == BLOCK)
6064 	    ctx = BLOCK_SUPERCONTEXT (ctx);
6065 	  fld_worklist_push (ctx, fld);
6066 	}
6067       fld_worklist_push (TYPE_CANONICAL (t), fld);
6068 
6069       if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
6070 	{
6071 	  unsigned i;
6072 	  tree tem;
6073 	  FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
6074 	    fld_worklist_push (TREE_TYPE (tem), fld);
6075 	  fld_worklist_push (BINFO_TYPE (TYPE_BINFO (t)), fld);
6076 	  fld_worklist_push (BINFO_VTABLE (TYPE_BINFO (t)), fld);
6077 	}
6078       if (RECORD_OR_UNION_TYPE_P (t))
6079 	{
6080 	  tree tem;
6081 	  /* Push all TYPE_FIELDS - there can be interleaving interesting
6082 	     and non-interesting things.  */
6083 	  tem = TYPE_FIELDS (t);
6084 	  while (tem)
6085 	    {
6086 	      if (TREE_CODE (tem) == FIELD_DECL)
6087 		fld_worklist_push (tem, fld);
6088 	      tem = TREE_CHAIN (tem);
6089 	    }
6090 	}
6091       if (FUNC_OR_METHOD_TYPE_P (t))
6092 	fld_worklist_push (TYPE_METHOD_BASETYPE (t), fld);
6093 
6094       fld_worklist_push (TYPE_STUB_DECL (t), fld);
6095       *ws = 0;
6096     }
6097   else if (TREE_CODE (t) == BLOCK)
6098     {
6099       for (tree *tem = &BLOCK_VARS (t); *tem; )
6100 	{
6101 	  if (TREE_CODE (*tem) != LABEL_DECL
6102 	      && (TREE_CODE (*tem) != VAR_DECL
6103 		  || !auto_var_in_fn_p (*tem, DECL_CONTEXT (*tem))))
6104 	    {
6105 	      gcc_assert (TREE_CODE (*tem) != RESULT_DECL
6106 			  && TREE_CODE (*tem) != PARM_DECL);
6107 	      *tem = TREE_CHAIN (*tem);
6108 	    }
6109 	  else
6110 	    {
6111 	      fld_worklist_push (*tem, fld);
6112 	      tem = &TREE_CHAIN (*tem);
6113 	    }
6114 	}
6115       for (tree tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
6116 	fld_worklist_push (tem, fld);
6117       fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
6118     }
6119 
6120   if (TREE_CODE (t) != IDENTIFIER_NODE
6121       && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
6122     fld_worklist_push (TREE_TYPE (t), fld);
6123 
6124   return NULL_TREE;
6125 }
6126 
6127 
6128 /* Find decls and types in T.  */
6129 
6130 static void
find_decls_types(tree t,class free_lang_data_d * fld)6131 find_decls_types (tree t, class free_lang_data_d *fld)
6132 {
6133   while (1)
6134     {
6135       if (!fld->pset.contains (t))
6136 	walk_tree (&t, find_decls_types_r, fld, &fld->pset);
6137       if (fld->worklist.is_empty ())
6138 	break;
6139       t = fld->worklist.pop ();
6140     }
6141 }
6142 
6143 /* Translate all the types in LIST with the corresponding runtime
6144    types.  */
6145 
6146 static tree
get_eh_types_for_runtime(tree list)6147 get_eh_types_for_runtime (tree list)
6148 {
6149   tree head, prev;
6150 
6151   if (list == NULL_TREE)
6152     return NULL_TREE;
6153 
6154   head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6155   prev = head;
6156   list = TREE_CHAIN (list);
6157   while (list)
6158     {
6159       tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6160       TREE_CHAIN (prev) = n;
6161       prev = TREE_CHAIN (prev);
6162       list = TREE_CHAIN (list);
6163     }
6164 
6165   return head;
6166 }
6167 
6168 
6169 /* Find decls and types referenced in EH region R and store them in
6170    FLD->DECLS and FLD->TYPES.  */
6171 
6172 static void
find_decls_types_in_eh_region(eh_region r,class free_lang_data_d * fld)6173 find_decls_types_in_eh_region (eh_region r, class free_lang_data_d *fld)
6174 {
6175   switch (r->type)
6176     {
6177     case ERT_CLEANUP:
6178       break;
6179 
6180     case ERT_TRY:
6181       {
6182 	eh_catch c;
6183 
6184 	/* The types referenced in each catch must first be changed to the
6185 	   EH types used at runtime.  This removes references to FE types
6186 	   in the region.  */
6187 	for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
6188 	  {
6189 	    c->type_list = get_eh_types_for_runtime (c->type_list);
6190 	    walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
6191 	  }
6192       }
6193       break;
6194 
6195     case ERT_ALLOWED_EXCEPTIONS:
6196       r->u.allowed.type_list
6197 	= get_eh_types_for_runtime (r->u.allowed.type_list);
6198       walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
6199       break;
6200 
6201     case ERT_MUST_NOT_THROW:
6202       walk_tree (&r->u.must_not_throw.failure_decl,
6203 		 find_decls_types_r, fld, &fld->pset);
6204       break;
6205     }
6206 }
6207 
6208 
6209 /* Find decls and types referenced in cgraph node N and store them in
6210    FLD->DECLS and FLD->TYPES.  Unlike pass_referenced_vars, this will
6211    look for *every* kind of DECL and TYPE node reachable from N,
6212    including those embedded inside types and decls (i.e,, TYPE_DECLs,
6213    NAMESPACE_DECLs, etc).  */
6214 
6215 static void
find_decls_types_in_node(struct cgraph_node * n,class free_lang_data_d * fld)6216 find_decls_types_in_node (struct cgraph_node *n, class free_lang_data_d *fld)
6217 {
6218   basic_block bb;
6219   struct function *fn;
6220   unsigned ix;
6221   tree t;
6222 
6223   find_decls_types (n->decl, fld);
6224 
6225   if (!gimple_has_body_p (n->decl))
6226     return;
6227 
6228   gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
6229 
6230   fn = DECL_STRUCT_FUNCTION (n->decl);
6231 
6232   /* Traverse locals. */
6233   FOR_EACH_LOCAL_DECL (fn, ix, t)
6234     find_decls_types (t, fld);
6235 
6236   /* Traverse EH regions in FN.  */
6237   {
6238     eh_region r;
6239     FOR_ALL_EH_REGION_FN (r, fn)
6240       find_decls_types_in_eh_region (r, fld);
6241   }
6242 
6243   /* Traverse every statement in FN.  */
6244   FOR_EACH_BB_FN (bb, fn)
6245     {
6246       gphi_iterator psi;
6247       gimple_stmt_iterator si;
6248       unsigned i;
6249 
6250       for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
6251 	{
6252 	  gphi *phi = psi.phi ();
6253 
6254 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
6255 	    {
6256 	      tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
6257 	      find_decls_types (*arg_p, fld);
6258 	    }
6259 	}
6260 
6261       for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6262 	{
6263 	  gimple *stmt = gsi_stmt (si);
6264 
6265 	  if (is_gimple_call (stmt))
6266 	    find_decls_types (gimple_call_fntype (stmt), fld);
6267 
6268 	  for (i = 0; i < gimple_num_ops (stmt); i++)
6269 	    {
6270 	      tree arg = gimple_op (stmt, i);
6271 	      find_decls_types (arg, fld);
6272 	      /* find_decls_types doesn't walk TREE_PURPOSE of TREE_LISTs,
6273 		 which we need for asm stmts.  */
6274 	      if (arg
6275 		  && TREE_CODE (arg) == TREE_LIST
6276 		  && TREE_PURPOSE (arg)
6277 		  && gimple_code (stmt) == GIMPLE_ASM)
6278 		find_decls_types (TREE_PURPOSE (arg), fld);
6279 	    }
6280 	}
6281     }
6282 }
6283 
6284 
6285 /* Find decls and types referenced in varpool node N and store them in
6286    FLD->DECLS and FLD->TYPES.  Unlike pass_referenced_vars, this will
6287    look for *every* kind of DECL and TYPE node reachable from N,
6288    including those embedded inside types and decls (i.e,, TYPE_DECLs,
6289    NAMESPACE_DECLs, etc).  */
6290 
6291 static void
find_decls_types_in_var(varpool_node * v,class free_lang_data_d * fld)6292 find_decls_types_in_var (varpool_node *v, class free_lang_data_d *fld)
6293 {
6294   find_decls_types (v->decl, fld);
6295 }
6296 
6297 /* If T needs an assembler name, have one created for it.  */
6298 
6299 void
assign_assembler_name_if_needed(tree t)6300 assign_assembler_name_if_needed (tree t)
6301 {
6302   if (need_assembler_name_p (t))
6303     {
6304       /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
6305 	 diagnostics that use input_location to show locus
6306 	 information.  The problem here is that, at this point,
6307 	 input_location is generally anchored to the end of the file
6308 	 (since the parser is long gone), so we don't have a good
6309 	 position to pin it to.
6310 
6311 	 To alleviate this problem, this uses the location of T's
6312 	 declaration.  Examples of this are
6313 	 testsuite/g++.dg/template/cond2.C and
6314 	 testsuite/g++.dg/template/pr35240.C.  */
6315       location_t saved_location = input_location;
6316       input_location = DECL_SOURCE_LOCATION (t);
6317 
6318       decl_assembler_name (t);
6319 
6320       input_location = saved_location;
6321     }
6322 }
6323 
6324 
6325 /* Free language specific information for every operand and expression
6326    in every node of the call graph.  This process operates in three stages:
6327 
6328    1- Every callgraph node and varpool node is traversed looking for
6329       decls and types embedded in them.  This is a more exhaustive
6330       search than that done by find_referenced_vars, because it will
6331       also collect individual fields, decls embedded in types, etc.
6332 
6333    2- All the decls found are sent to free_lang_data_in_decl.
6334 
6335    3- All the types found are sent to free_lang_data_in_type.
6336 
6337    The ordering between decls and types is important because
6338    free_lang_data_in_decl sets assembler names, which includes
6339    mangling.  So types cannot be freed up until assembler names have
6340    been set up.  */
6341 
6342 static void
free_lang_data_in_cgraph(class free_lang_data_d * fld)6343 free_lang_data_in_cgraph (class free_lang_data_d *fld)
6344 {
6345   struct cgraph_node *n;
6346   varpool_node *v;
6347   tree t;
6348   unsigned i;
6349   alias_pair *p;
6350 
6351   /* Find decls and types in the body of every function in the callgraph.  */
6352   FOR_EACH_FUNCTION (n)
6353     find_decls_types_in_node (n, fld);
6354 
6355   FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
6356     find_decls_types (p->decl, fld);
6357 
6358   /* Find decls and types in every varpool symbol.  */
6359   FOR_EACH_VARIABLE (v)
6360     find_decls_types_in_var (v, fld);
6361 
6362   /* Set the assembler name on every decl found.  We need to do this
6363      now because free_lang_data_in_decl will invalidate data needed
6364      for mangling.  This breaks mangling on interdependent decls.  */
6365   FOR_EACH_VEC_ELT (fld->decls, i, t)
6366     assign_assembler_name_if_needed (t);
6367 
6368   /* Traverse every decl found freeing its language data.  */
6369   FOR_EACH_VEC_ELT (fld->decls, i, t)
6370     free_lang_data_in_decl (t, fld);
6371 
6372   /* Traverse every type found freeing its language data.  */
6373   FOR_EACH_VEC_ELT (fld->types, i, t)
6374     free_lang_data_in_type (t, fld);
6375 }
6376 
6377 
6378 /* Free resources that are used by FE but are not needed once they are done. */
6379 
6380 static unsigned
free_lang_data(void)6381 free_lang_data (void)
6382 {
6383   unsigned i;
6384   class free_lang_data_d fld;
6385 
6386   /* If we are the LTO frontend we have freed lang-specific data already.  */
6387   if (in_lto_p
6388       || (!flag_generate_lto && !flag_generate_offload))
6389     {
6390       /* Rebuild type inheritance graph even when not doing LTO to get
6391 	 consistent profile data.  */
6392       rebuild_type_inheritance_graph ();
6393       return 0;
6394     }
6395 
6396   fld_incomplete_types = new hash_map<tree, tree>;
6397   fld_simplified_types = new hash_map<tree, tree>;
6398 
6399   /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one.  */
6400   if (vec_safe_is_empty (all_translation_units))
6401     build_translation_unit_decl (NULL_TREE);
6402 
6403   /* Allocate and assign alias sets to the standard integer types
6404      while the slots are still in the way the frontends generated them.  */
6405   for (i = 0; i < itk_none; ++i)
6406     if (integer_types[i])
6407       TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6408 
6409   /* Traverse the IL resetting language specific information for
6410      operands, expressions, etc.  */
6411   free_lang_data_in_cgraph (&fld);
6412 
6413   /* Create gimple variants for common types.  */
6414   for (unsigned i = 0;
6415        i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
6416        ++i)
6417     builtin_structptr_types[i].node = builtin_structptr_types[i].base;
6418 
6419   /* Reset some langhooks.  Do not reset types_compatible_p, it may
6420      still be used indirectly via the get_alias_set langhook.  */
6421   lang_hooks.dwarf_name = lhd_dwarf_name;
6422   lang_hooks.decl_printable_name = gimple_decl_printable_name;
6423   lang_hooks.gimplify_expr = lhd_gimplify_expr;
6424   lang_hooks.overwrite_decl_assembler_name = lhd_overwrite_decl_assembler_name;
6425   lang_hooks.print_xnode = lhd_print_tree_nothing;
6426   lang_hooks.print_decl = lhd_print_tree_nothing;
6427   lang_hooks.print_type = lhd_print_tree_nothing;
6428   lang_hooks.print_identifier = lhd_print_tree_nothing;
6429 
6430   lang_hooks.tree_inlining.var_mod_type_p = hook_bool_tree_tree_false;
6431 
6432   if (flag_checking)
6433     {
6434       int i;
6435       tree t;
6436 
6437       FOR_EACH_VEC_ELT (fld.types, i, t)
6438 	verify_type (t);
6439     }
6440 
6441   /* We do not want the default decl_assembler_name implementation,
6442      rather if we have fixed everything we want a wrapper around it
6443      asserting that all non-local symbols already got their assembler
6444      name and only produce assembler names for local symbols.  Or rather
6445      make sure we never call decl_assembler_name on local symbols and
6446      devise a separate, middle-end private scheme for it.  */
6447 
6448   /* Reset diagnostic machinery.  */
6449   tree_diagnostics_defaults (global_dc);
6450 
6451   rebuild_type_inheritance_graph ();
6452 
6453   delete fld_incomplete_types;
6454   delete fld_simplified_types;
6455 
6456   return 0;
6457 }
6458 
6459 
6460 namespace {
6461 
6462 const pass_data pass_data_ipa_free_lang_data =
6463 {
6464   SIMPLE_IPA_PASS, /* type */
6465   "*free_lang_data", /* name */
6466   OPTGROUP_NONE, /* optinfo_flags */
6467   TV_IPA_FREE_LANG_DATA, /* tv_id */
6468   0, /* properties_required */
6469   0, /* properties_provided */
6470   0, /* properties_destroyed */
6471   0, /* todo_flags_start */
6472   0, /* todo_flags_finish */
6473 };
6474 
6475 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6476 {
6477 public:
pass_ipa_free_lang_data(gcc::context * ctxt)6478   pass_ipa_free_lang_data (gcc::context *ctxt)
6479     : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6480   {}
6481 
6482   /* opt_pass methods: */
execute(function *)6483   virtual unsigned int execute (function *) { return free_lang_data (); }
6484 
6485 }; // class pass_ipa_free_lang_data
6486 
6487 } // anon namespace
6488 
6489 simple_ipa_opt_pass *
make_pass_ipa_free_lang_data(gcc::context * ctxt)6490 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6491 {
6492   return new pass_ipa_free_lang_data (ctxt);
6493 }
6494 
6495 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6496    of the various TYPE_QUAL values.  */
6497 
6498 static void
set_type_quals(tree type,int type_quals)6499 set_type_quals (tree type, int type_quals)
6500 {
6501   TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6502   TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6503   TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6504   TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6505   TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6506 }
6507 
6508 /* Returns true iff CAND and BASE have equivalent language-specific
6509    qualifiers.  */
6510 
6511 bool
check_lang_type(const_tree cand,const_tree base)6512 check_lang_type (const_tree cand, const_tree base)
6513 {
6514   if (lang_hooks.types.type_hash_eq == NULL)
6515     return true;
6516   /* type_hash_eq currently only applies to these types.  */
6517   if (TREE_CODE (cand) != FUNCTION_TYPE
6518       && TREE_CODE (cand) != METHOD_TYPE)
6519     return true;
6520   return lang_hooks.types.type_hash_eq (cand, base);
6521 }
6522 
6523 /* This function checks to see if TYPE matches the size one of the built-in
6524    atomic types, and returns that core atomic type.  */
6525 
6526 static tree
find_atomic_core_type(const_tree type)6527 find_atomic_core_type (const_tree type)
6528 {
6529   tree base_atomic_type;
6530 
6531   /* Only handle complete types.  */
6532   if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
6533     return NULL_TREE;
6534 
6535   switch (tree_to_uhwi (TYPE_SIZE (type)))
6536     {
6537     case 8:
6538       base_atomic_type = atomicQI_type_node;
6539       break;
6540 
6541     case 16:
6542       base_atomic_type = atomicHI_type_node;
6543       break;
6544 
6545     case 32:
6546       base_atomic_type = atomicSI_type_node;
6547       break;
6548 
6549     case 64:
6550       base_atomic_type = atomicDI_type_node;
6551       break;
6552 
6553     case 128:
6554       base_atomic_type = atomicTI_type_node;
6555       break;
6556 
6557     default:
6558       base_atomic_type = NULL_TREE;
6559     }
6560 
6561   return base_atomic_type;
6562 }
6563 
6564 /* Returns true iff unqualified CAND and BASE are equivalent.  */
6565 
6566 bool
check_base_type(const_tree cand,const_tree base)6567 check_base_type (const_tree cand, const_tree base)
6568 {
6569   if (TYPE_NAME (cand) != TYPE_NAME (base)
6570       /* Apparently this is needed for Objective-C.  */
6571       || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
6572       || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
6573 			        TYPE_ATTRIBUTES (base)))
6574     return false;
6575   /* Check alignment.  */
6576   if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6577       && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
6578     return true;
6579   /* Atomic types increase minimal alignment.  We must to do so as well
6580      or we get duplicated canonical types. See PR88686.  */
6581   if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
6582     {
6583       /* See if this object can map to a basic atomic type.  */
6584       tree atomic_type = find_atomic_core_type (cand);
6585       if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
6586        return true;
6587     }
6588   return false;
6589 }
6590 
6591 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS.  */
6592 
6593 bool
check_qualified_type(const_tree cand,const_tree base,int type_quals)6594 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6595 {
6596   return (TYPE_QUALS (cand) == type_quals
6597 	  && check_base_type (cand, base)
6598 	  && check_lang_type (cand, base));
6599 }
6600 
6601 /* Returns true iff CAND is equivalent to BASE with ALIGN.  */
6602 
6603 static bool
check_aligned_type(const_tree cand,const_tree base,unsigned int align)6604 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6605 {
6606   return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6607 	  && TYPE_NAME (cand) == TYPE_NAME (base)
6608 	  /* Apparently this is needed for Objective-C.  */
6609 	  && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6610 	  /* Check alignment.  */
6611 	  && TYPE_ALIGN (cand) == align
6612 	  /* Check this is a user-aligned type as build_aligned_type
6613 	     would create.  */
6614 	  && TYPE_USER_ALIGN (cand)
6615 	  && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6616 				   TYPE_ATTRIBUTES (base))
6617 	  && check_lang_type (cand, base));
6618 }
6619 
6620 /* Return a version of the TYPE, qualified as indicated by the
6621    TYPE_QUALS, if one exists.  If no qualified version exists yet,
6622    return NULL_TREE.  */
6623 
6624 tree
get_qualified_type(tree type,int type_quals)6625 get_qualified_type (tree type, int type_quals)
6626 {
6627   if (TYPE_QUALS (type) == type_quals)
6628     return type;
6629 
6630   tree mv = TYPE_MAIN_VARIANT (type);
6631   if (check_qualified_type (mv, type, type_quals))
6632     return mv;
6633 
6634   /* Search the chain of variants to see if there is already one there just
6635      like the one we need to have.  If so, use that existing one.  We must
6636      preserve the TYPE_NAME, since there is code that depends on this.  */
6637   for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
6638     if (check_qualified_type (*tp, type, type_quals))
6639       {
6640 	/* Put the found variant at the head of the variant list so
6641 	   frequently searched variants get found faster.  The C++ FE
6642 	   benefits greatly from this.  */
6643 	tree t = *tp;
6644 	*tp = TYPE_NEXT_VARIANT (t);
6645 	TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
6646 	TYPE_NEXT_VARIANT (mv) = t;
6647 	return t;
6648       }
6649 
6650   return NULL_TREE;
6651 }
6652 
6653 /* Like get_qualified_type, but creates the type if it does not
6654    exist.  This function never returns NULL_TREE.  */
6655 
6656 tree
build_qualified_type(tree type,int type_quals MEM_STAT_DECL)6657 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
6658 {
6659   tree t;
6660 
6661   /* See if we already have the appropriate qualified variant.  */
6662   t = get_qualified_type (type, type_quals);
6663 
6664   /* If not, build it.  */
6665   if (!t)
6666     {
6667       t = build_variant_type_copy (type PASS_MEM_STAT);
6668       set_type_quals (t, type_quals);
6669 
6670       if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6671 	{
6672 	  /* See if this object can map to a basic atomic type.  */
6673 	  tree atomic_type = find_atomic_core_type (type);
6674 	  if (atomic_type)
6675 	    {
6676 	      /* Ensure the alignment of this type is compatible with
6677 		 the required alignment of the atomic type.  */
6678 	      if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6679 		SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6680 	    }
6681 	}
6682 
6683       if (TYPE_STRUCTURAL_EQUALITY_P (type))
6684 	/* Propagate structural equality. */
6685 	SET_TYPE_STRUCTURAL_EQUALITY (t);
6686       else if (TYPE_CANONICAL (type) != type)
6687 	/* Build the underlying canonical type, since it is different
6688 	   from TYPE. */
6689 	{
6690 	  tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6691 	  TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6692 	}
6693       else
6694 	/* T is its own canonical type. */
6695 	TYPE_CANONICAL (t) = t;
6696 
6697     }
6698 
6699   return t;
6700 }
6701 
6702 /* Create a variant of type T with alignment ALIGN.  */
6703 
6704 tree
build_aligned_type(tree type,unsigned int align)6705 build_aligned_type (tree type, unsigned int align)
6706 {
6707   tree t;
6708 
6709   if (TYPE_PACKED (type)
6710       || TYPE_ALIGN (type) == align)
6711     return type;
6712 
6713   for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6714     if (check_aligned_type (t, type, align))
6715       return t;
6716 
6717   t = build_variant_type_copy (type);
6718   SET_TYPE_ALIGN (t, align);
6719   TYPE_USER_ALIGN (t) = 1;
6720 
6721   return t;
6722 }
6723 
6724 /* Create a new distinct copy of TYPE.  The new type is made its own
6725    MAIN_VARIANT. If TYPE requires structural equality checks, the
6726    resulting type requires structural equality checks; otherwise, its
6727    TYPE_CANONICAL points to itself. */
6728 
6729 tree
build_distinct_type_copy(tree type MEM_STAT_DECL)6730 build_distinct_type_copy (tree type MEM_STAT_DECL)
6731 {
6732   tree t = copy_node (type PASS_MEM_STAT);
6733 
6734   TYPE_POINTER_TO (t) = 0;
6735   TYPE_REFERENCE_TO (t) = 0;
6736 
6737   /* Set the canonical type either to a new equivalence class, or
6738      propagate the need for structural equality checks. */
6739   if (TYPE_STRUCTURAL_EQUALITY_P (type))
6740     SET_TYPE_STRUCTURAL_EQUALITY (t);
6741   else
6742     TYPE_CANONICAL (t) = t;
6743 
6744   /* Make it its own variant.  */
6745   TYPE_MAIN_VARIANT (t) = t;
6746   TYPE_NEXT_VARIANT (t) = 0;
6747 
6748   /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6749      whose TREE_TYPE is not t.  This can also happen in the Ada
6750      frontend when using subtypes.  */
6751 
6752   return t;
6753 }
6754 
6755 /* Create a new variant of TYPE, equivalent but distinct.  This is so
6756    the caller can modify it. TYPE_CANONICAL for the return type will
6757    be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6758    are considered equal by the language itself (or that both types
6759    require structural equality checks). */
6760 
6761 tree
build_variant_type_copy(tree type MEM_STAT_DECL)6762 build_variant_type_copy (tree type MEM_STAT_DECL)
6763 {
6764   tree t, m = TYPE_MAIN_VARIANT (type);
6765 
6766   t = build_distinct_type_copy (type PASS_MEM_STAT);
6767 
6768   /* Since we're building a variant, assume that it is a non-semantic
6769      variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6770   TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6771   /* Type variants have no alias set defined.  */
6772   TYPE_ALIAS_SET (t) = -1;
6773 
6774   /* Add the new type to the chain of variants of TYPE.  */
6775   TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6776   TYPE_NEXT_VARIANT (m) = t;
6777   TYPE_MAIN_VARIANT (t) = m;
6778 
6779   return t;
6780 }
6781 
6782 /* Return true if the from tree in both tree maps are equal.  */
6783 
6784 int
tree_map_base_eq(const void * va,const void * vb)6785 tree_map_base_eq (const void *va, const void *vb)
6786 {
6787   const struct tree_map_base  *const a = (const struct tree_map_base *) va,
6788     *const b = (const struct tree_map_base *) vb;
6789   return (a->from == b->from);
6790 }
6791 
6792 /* Hash a from tree in a tree_base_map.  */
6793 
6794 unsigned int
tree_map_base_hash(const void * item)6795 tree_map_base_hash (const void *item)
6796 {
6797   return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6798 }
6799 
6800 /* Return true if this tree map structure is marked for garbage collection
6801    purposes.  We simply return true if the from tree is marked, so that this
6802    structure goes away when the from tree goes away.  */
6803 
6804 int
tree_map_base_marked_p(const void * p)6805 tree_map_base_marked_p (const void *p)
6806 {
6807   return ggc_marked_p (((const struct tree_map_base *) p)->from);
6808 }
6809 
6810 /* Hash a from tree in a tree_map.  */
6811 
6812 unsigned int
tree_map_hash(const void * item)6813 tree_map_hash (const void *item)
6814 {
6815   return (((const struct tree_map *) item)->hash);
6816 }
6817 
6818 /* Hash a from tree in a tree_decl_map.  */
6819 
6820 unsigned int
tree_decl_map_hash(const void * item)6821 tree_decl_map_hash (const void *item)
6822 {
6823   return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6824 }
6825 
6826 /* Return the initialization priority for DECL.  */
6827 
6828 priority_type
decl_init_priority_lookup(tree decl)6829 decl_init_priority_lookup (tree decl)
6830 {
6831   symtab_node *snode = symtab_node::get (decl);
6832 
6833   if (!snode)
6834     return DEFAULT_INIT_PRIORITY;
6835   return
6836     snode->get_init_priority ();
6837 }
6838 
6839 /* Return the finalization priority for DECL.  */
6840 
6841 priority_type
decl_fini_priority_lookup(tree decl)6842 decl_fini_priority_lookup (tree decl)
6843 {
6844   cgraph_node *node = cgraph_node::get (decl);
6845 
6846   if (!node)
6847     return DEFAULT_INIT_PRIORITY;
6848   return
6849     node->get_fini_priority ();
6850 }
6851 
6852 /* Set the initialization priority for DECL to PRIORITY.  */
6853 
6854 void
decl_init_priority_insert(tree decl,priority_type priority)6855 decl_init_priority_insert (tree decl, priority_type priority)
6856 {
6857   struct symtab_node *snode;
6858 
6859   if (priority == DEFAULT_INIT_PRIORITY)
6860     {
6861       snode = symtab_node::get (decl);
6862       if (!snode)
6863 	return;
6864     }
6865   else if (VAR_P (decl))
6866     snode = varpool_node::get_create (decl);
6867   else
6868     snode = cgraph_node::get_create (decl);
6869   snode->set_init_priority (priority);
6870 }
6871 
6872 /* Set the finalization priority for DECL to PRIORITY.  */
6873 
6874 void
decl_fini_priority_insert(tree decl,priority_type priority)6875 decl_fini_priority_insert (tree decl, priority_type priority)
6876 {
6877   struct cgraph_node *node;
6878 
6879   if (priority == DEFAULT_INIT_PRIORITY)
6880     {
6881       node = cgraph_node::get (decl);
6882       if (!node)
6883 	return;
6884     }
6885   else
6886     node = cgraph_node::get_create (decl);
6887   node->set_fini_priority (priority);
6888 }
6889 
6890 /* Print out the statistics for the DECL_DEBUG_EXPR hash table.  */
6891 
6892 static void
print_debug_expr_statistics(void)6893 print_debug_expr_statistics (void)
6894 {
6895   fprintf (stderr, "DECL_DEBUG_EXPR  hash: size %ld, %ld elements, %f collisions\n",
6896 	   (long) debug_expr_for_decl->size (),
6897 	   (long) debug_expr_for_decl->elements (),
6898 	   debug_expr_for_decl->collisions ());
6899 }
6900 
6901 /* Print out the statistics for the DECL_VALUE_EXPR hash table.  */
6902 
6903 static void
print_value_expr_statistics(void)6904 print_value_expr_statistics (void)
6905 {
6906   fprintf (stderr, "DECL_VALUE_EXPR  hash: size %ld, %ld elements, %f collisions\n",
6907 	   (long) value_expr_for_decl->size (),
6908 	   (long) value_expr_for_decl->elements (),
6909 	   value_expr_for_decl->collisions ());
6910 }
6911 
6912 /* Lookup a debug expression for FROM, and return it if we find one.  */
6913 
6914 tree
decl_debug_expr_lookup(tree from)6915 decl_debug_expr_lookup (tree from)
6916 {
6917   struct tree_decl_map *h, in;
6918   in.base.from = from;
6919 
6920   h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6921   if (h)
6922     return h->to;
6923   return NULL_TREE;
6924 }
6925 
6926 /* Insert a mapping FROM->TO in the debug expression hashtable.  */
6927 
6928 void
decl_debug_expr_insert(tree from,tree to)6929 decl_debug_expr_insert (tree from, tree to)
6930 {
6931   struct tree_decl_map *h;
6932 
6933   h = ggc_alloc<tree_decl_map> ();
6934   h->base.from = from;
6935   h->to = to;
6936   *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6937 }
6938 
6939 /* Lookup a value expression for FROM, and return it if we find one.  */
6940 
6941 tree
decl_value_expr_lookup(tree from)6942 decl_value_expr_lookup (tree from)
6943 {
6944   struct tree_decl_map *h, in;
6945   in.base.from = from;
6946 
6947   h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6948   if (h)
6949     return h->to;
6950   return NULL_TREE;
6951 }
6952 
6953 /* Insert a mapping FROM->TO in the value expression hashtable.  */
6954 
6955 void
decl_value_expr_insert(tree from,tree to)6956 decl_value_expr_insert (tree from, tree to)
6957 {
6958   struct tree_decl_map *h;
6959 
6960   h = ggc_alloc<tree_decl_map> ();
6961   h->base.from = from;
6962   h->to = to;
6963   *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6964 }
6965 
6966 /* Lookup a vector of debug arguments for FROM, and return it if we
6967    find one.  */
6968 
6969 vec<tree, va_gc> **
decl_debug_args_lookup(tree from)6970 decl_debug_args_lookup (tree from)
6971 {
6972   struct tree_vec_map *h, in;
6973 
6974   if (!DECL_HAS_DEBUG_ARGS_P (from))
6975     return NULL;
6976   gcc_checking_assert (debug_args_for_decl != NULL);
6977   in.base.from = from;
6978   h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6979   if (h)
6980     return &h->to;
6981   return NULL;
6982 }
6983 
6984 /* Insert a mapping FROM->empty vector of debug arguments in the value
6985    expression hashtable.  */
6986 
6987 vec<tree, va_gc> **
decl_debug_args_insert(tree from)6988 decl_debug_args_insert (tree from)
6989 {
6990   struct tree_vec_map *h;
6991   tree_vec_map **loc;
6992 
6993   if (DECL_HAS_DEBUG_ARGS_P (from))
6994     return decl_debug_args_lookup (from);
6995   if (debug_args_for_decl == NULL)
6996     debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6997   h = ggc_alloc<tree_vec_map> ();
6998   h->base.from = from;
6999   h->to = NULL;
7000   loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
7001   *loc = h;
7002   DECL_HAS_DEBUG_ARGS_P (from) = 1;
7003   return &h->to;
7004 }
7005 
7006 /* Hashing of types so that we don't make duplicates.
7007    The entry point is `type_hash_canon'.  */
7008 
7009 /* Generate the default hash code for TYPE.  This is designed for
7010    speed, rather than maximum entropy.  */
7011 
7012 hashval_t
type_hash_canon_hash(tree type)7013 type_hash_canon_hash (tree type)
7014 {
7015   inchash::hash hstate;
7016 
7017   hstate.add_int (TREE_CODE (type));
7018 
7019   if (TREE_TYPE (type))
7020     hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
7021 
7022   for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
7023     /* Just the identifier is adequate to distinguish.  */
7024     hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
7025 
7026   switch (TREE_CODE (type))
7027     {
7028     case METHOD_TYPE:
7029       hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
7030       /* FALLTHROUGH. */
7031     case FUNCTION_TYPE:
7032       for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7033 	if (TREE_VALUE (t) != error_mark_node)
7034 	  hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
7035       break;
7036 
7037     case OFFSET_TYPE:
7038       hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
7039       break;
7040 
7041     case ARRAY_TYPE:
7042       {
7043 	if (TYPE_DOMAIN (type))
7044 	  hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
7045 	if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
7046 	  {
7047 	    unsigned typeless = TYPE_TYPELESS_STORAGE (type);
7048 	    hstate.add_object (typeless);
7049 	  }
7050       }
7051       break;
7052 
7053     case INTEGER_TYPE:
7054       {
7055 	tree t = TYPE_MAX_VALUE (type);
7056 	if (!t)
7057 	  t = TYPE_MIN_VALUE (type);
7058 	for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7059 	  hstate.add_object (TREE_INT_CST_ELT (t, i));
7060 	break;
7061       }
7062 
7063     case REAL_TYPE:
7064     case FIXED_POINT_TYPE:
7065       {
7066 	unsigned prec = TYPE_PRECISION (type);
7067 	hstate.add_object (prec);
7068 	break;
7069       }
7070 
7071     case VECTOR_TYPE:
7072       hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
7073       break;
7074 
7075     default:
7076       break;
7077     }
7078 
7079   return hstate.end ();
7080 }
7081 
7082 /* These are the Hashtable callback functions.  */
7083 
7084 /* Returns true iff the types are equivalent.  */
7085 
7086 bool
equal(type_hash * a,type_hash * b)7087 type_cache_hasher::equal (type_hash *a, type_hash *b)
7088 {
7089   /* First test the things that are the same for all types.  */
7090   if (a->hash != b->hash
7091       || TREE_CODE (a->type) != TREE_CODE (b->type)
7092       || TREE_TYPE (a->type) != TREE_TYPE (b->type)
7093       || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
7094 				 TYPE_ATTRIBUTES (b->type))
7095       || (TREE_CODE (a->type) != COMPLEX_TYPE
7096           && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
7097     return 0;
7098 
7099   /* Be careful about comparing arrays before and after the element type
7100      has been completed; don't compare TYPE_ALIGN unless both types are
7101      complete.  */
7102   if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
7103       && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
7104 	  || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
7105     return 0;
7106 
7107   switch (TREE_CODE (a->type))
7108     {
7109     case VOID_TYPE:
7110     case OPAQUE_TYPE:
7111     case COMPLEX_TYPE:
7112     case POINTER_TYPE:
7113     case REFERENCE_TYPE:
7114     case NULLPTR_TYPE:
7115       return 1;
7116 
7117     case VECTOR_TYPE:
7118       return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
7119 		       TYPE_VECTOR_SUBPARTS (b->type));
7120 
7121     case ENUMERAL_TYPE:
7122       if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
7123 	  && !(TYPE_VALUES (a->type)
7124 	       && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
7125 	       && TYPE_VALUES (b->type)
7126 	       && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
7127 	       && type_list_equal (TYPE_VALUES (a->type),
7128 				   TYPE_VALUES (b->type))))
7129 	return 0;
7130 
7131       /* fall through */
7132 
7133     case INTEGER_TYPE:
7134     case REAL_TYPE:
7135     case BOOLEAN_TYPE:
7136       if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7137 	return false;
7138       return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7139 	       || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7140 				      TYPE_MAX_VALUE (b->type)))
7141 	      && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7142 		  || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7143 					 TYPE_MIN_VALUE (b->type))));
7144 
7145     case FIXED_POINT_TYPE:
7146       return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7147 
7148     case OFFSET_TYPE:
7149       return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7150 
7151     case METHOD_TYPE:
7152       if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7153 	  && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7154 	      || (TYPE_ARG_TYPES (a->type)
7155 		  && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7156 		  && TYPE_ARG_TYPES (b->type)
7157 		  && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7158 		  && type_list_equal (TYPE_ARG_TYPES (a->type),
7159 				      TYPE_ARG_TYPES (b->type)))))
7160         break;
7161       return 0;
7162     case ARRAY_TYPE:
7163       /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
7164 	 where the flag should be inherited from the element type
7165 	 and can change after ARRAY_TYPEs are created; on non-aggregates
7166 	 compare it and hash it, scalars will never have that flag set
7167 	 and we need to differentiate between arrays created by different
7168 	 front-ends or middle-end created arrays.  */
7169       return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
7170 	      && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
7171 		  || (TYPE_TYPELESS_STORAGE (a->type)
7172 		      == TYPE_TYPELESS_STORAGE (b->type))));
7173 
7174     case RECORD_TYPE:
7175     case UNION_TYPE:
7176     case QUAL_UNION_TYPE:
7177       return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7178 	      || (TYPE_FIELDS (a->type)
7179 		  && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7180 		  && TYPE_FIELDS (b->type)
7181 		  && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7182 		  && type_list_equal (TYPE_FIELDS (a->type),
7183 				      TYPE_FIELDS (b->type))));
7184 
7185     case FUNCTION_TYPE:
7186       if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7187 	  || (TYPE_ARG_TYPES (a->type)
7188 	      && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7189 	      && TYPE_ARG_TYPES (b->type)
7190 	      && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7191 	      && type_list_equal (TYPE_ARG_TYPES (a->type),
7192 				  TYPE_ARG_TYPES (b->type))))
7193 	break;
7194       return 0;
7195 
7196     default:
7197       return 0;
7198     }
7199 
7200   if (lang_hooks.types.type_hash_eq != NULL)
7201     return lang_hooks.types.type_hash_eq (a->type, b->type);
7202 
7203   return 1;
7204 }
7205 
7206 /* Given TYPE, and HASHCODE its hash code, return the canonical
7207    object for an identical type if one already exists.
7208    Otherwise, return TYPE, and record it as the canonical object.
7209 
7210    To use this function, first create a type of the sort you want.
7211    Then compute its hash code from the fields of the type that
7212    make it different from other similar types.
7213    Then call this function and use the value.  */
7214 
7215 tree
type_hash_canon(unsigned int hashcode,tree type)7216 type_hash_canon (unsigned int hashcode, tree type)
7217 {
7218   type_hash in;
7219   type_hash **loc;
7220 
7221   /* The hash table only contains main variants, so ensure that's what we're
7222      being passed.  */
7223   gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7224 
7225   /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7226      must call that routine before comparing TYPE_ALIGNs.  */
7227   layout_type (type);
7228 
7229   in.hash = hashcode;
7230   in.type = type;
7231 
7232   loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7233   if (*loc)
7234     {
7235       tree t1 = ((type_hash *) *loc)->type;
7236       gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
7237 		  && t1 != type);
7238       if (TYPE_UID (type) + 1 == next_type_uid)
7239 	--next_type_uid;
7240       /* Free also min/max values and the cache for integer
7241 	 types.  This can't be done in free_node, as LTO frees
7242 	 those on its own.  */
7243       if (TREE_CODE (type) == INTEGER_TYPE)
7244 	{
7245 	  if (TYPE_MIN_VALUE (type)
7246 	      && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
7247 	    {
7248 	      /* Zero is always in TYPE_CACHED_VALUES.  */
7249 	      if (! TYPE_UNSIGNED (type))
7250 		int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
7251 	      ggc_free (TYPE_MIN_VALUE (type));
7252 	    }
7253 	  if (TYPE_MAX_VALUE (type)
7254 	      && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
7255 	    {
7256 	      int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
7257 	      ggc_free (TYPE_MAX_VALUE (type));
7258 	    }
7259 	  if (TYPE_CACHED_VALUES_P (type))
7260 	    ggc_free (TYPE_CACHED_VALUES (type));
7261 	}
7262       free_node (type);
7263       return t1;
7264     }
7265   else
7266     {
7267       struct type_hash *h;
7268 
7269       h = ggc_alloc<type_hash> ();
7270       h->hash = hashcode;
7271       h->type = type;
7272       *loc = h;
7273 
7274       return type;
7275     }
7276 }
7277 
7278 static void
print_type_hash_statistics(void)7279 print_type_hash_statistics (void)
7280 {
7281   fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7282 	   (long) type_hash_table->size (),
7283 	   (long) type_hash_table->elements (),
7284 	   type_hash_table->collisions ());
7285 }
7286 
7287 /* Given two lists of types
7288    (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7289    return 1 if the lists contain the same types in the same order.
7290    Also, the TREE_PURPOSEs must match.  */
7291 
7292 bool
type_list_equal(const_tree l1,const_tree l2)7293 type_list_equal (const_tree l1, const_tree l2)
7294 {
7295   const_tree t1, t2;
7296 
7297   for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7298     if (TREE_VALUE (t1) != TREE_VALUE (t2)
7299 	|| (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7300 	    && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7301 		  && (TREE_TYPE (TREE_PURPOSE (t1))
7302 		      == TREE_TYPE (TREE_PURPOSE (t2))))))
7303       return false;
7304 
7305   return t1 == t2;
7306 }
7307 
7308 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7309    given by TYPE.  If the argument list accepts variable arguments,
7310    then this function counts only the ordinary arguments.  */
7311 
7312 int
type_num_arguments(const_tree fntype)7313 type_num_arguments (const_tree fntype)
7314 {
7315   int i = 0;
7316 
7317   for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
7318     /* If the function does not take a variable number of arguments,
7319        the last element in the list will have type `void'.  */
7320     if (VOID_TYPE_P (TREE_VALUE (t)))
7321       break;
7322     else
7323       ++i;
7324 
7325   return i;
7326 }
7327 
7328 /* Return the type of the function TYPE's argument ARGNO if known.
7329    For vararg function's where ARGNO refers to one of the variadic
7330    arguments return null.  Otherwise, return a void_type_node for
7331    out-of-bounds ARGNO.  */
7332 
7333 tree
type_argument_type(const_tree fntype,unsigned argno)7334 type_argument_type (const_tree fntype, unsigned argno)
7335 {
7336   /* Treat zero the same as an out-of-bounds argument number.  */
7337   if (!argno)
7338     return void_type_node;
7339 
7340   function_args_iterator iter;
7341 
7342   tree argtype;
7343   unsigned i = 1;
7344   FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
7345     {
7346       /* A vararg function's argument list ends in a null.  Otherwise,
7347 	 an ordinary function's argument list ends with void.  Return
7348 	 null if ARGNO refers to a vararg argument, void_type_node if
7349 	 it's out of bounds, and the formal argument type otherwise.  */
7350       if (!argtype)
7351 	break;
7352 
7353       if (i == argno || VOID_TYPE_P (argtype))
7354 	return argtype;
7355 
7356       ++i;
7357     }
7358 
7359   return NULL_TREE;
7360 }
7361 
7362 /* Nonzero if integer constants T1 and T2
7363    represent the same constant value.  */
7364 
7365 int
tree_int_cst_equal(const_tree t1,const_tree t2)7366 tree_int_cst_equal (const_tree t1, const_tree t2)
7367 {
7368   if (t1 == t2)
7369     return 1;
7370 
7371   if (t1 == 0 || t2 == 0)
7372     return 0;
7373 
7374   STRIP_ANY_LOCATION_WRAPPER (t1);
7375   STRIP_ANY_LOCATION_WRAPPER (t2);
7376 
7377   if (TREE_CODE (t1) == INTEGER_CST
7378       && TREE_CODE (t2) == INTEGER_CST
7379       && wi::to_widest (t1) == wi::to_widest (t2))
7380     return 1;
7381 
7382   return 0;
7383 }
7384 
7385 /* Return true if T is an INTEGER_CST whose numerical value (extended
7386    according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT.  */
7387 
7388 bool
tree_fits_shwi_p(const_tree t)7389 tree_fits_shwi_p (const_tree t)
7390 {
7391   return (t != NULL_TREE
7392 	  && TREE_CODE (t) == INTEGER_CST
7393 	  && wi::fits_shwi_p (wi::to_widest (t)));
7394 }
7395 
7396 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7397    value (extended according to TYPE_UNSIGNED) fits in a poly_int64.  */
7398 
7399 bool
tree_fits_poly_int64_p(const_tree t)7400 tree_fits_poly_int64_p (const_tree t)
7401 {
7402   if (t == NULL_TREE)
7403     return false;
7404   if (POLY_INT_CST_P (t))
7405     {
7406       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7407 	if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
7408 	  return false;
7409       return true;
7410     }
7411   return (TREE_CODE (t) == INTEGER_CST
7412 	  && wi::fits_shwi_p (wi::to_widest (t)));
7413 }
7414 
7415 /* Return true if T is an INTEGER_CST whose numerical value (extended
7416    according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT.  */
7417 
7418 bool
tree_fits_uhwi_p(const_tree t)7419 tree_fits_uhwi_p (const_tree t)
7420 {
7421   return (t != NULL_TREE
7422 	  && TREE_CODE (t) == INTEGER_CST
7423 	  && wi::fits_uhwi_p (wi::to_widest (t)));
7424 }
7425 
7426 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7427    value (extended according to TYPE_UNSIGNED) fits in a poly_uint64.  */
7428 
7429 bool
tree_fits_poly_uint64_p(const_tree t)7430 tree_fits_poly_uint64_p (const_tree t)
7431 {
7432   if (t == NULL_TREE)
7433     return false;
7434   if (POLY_INT_CST_P (t))
7435     {
7436       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7437 	if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
7438 	  return false;
7439       return true;
7440     }
7441   return (TREE_CODE (t) == INTEGER_CST
7442 	  && wi::fits_uhwi_p (wi::to_widest (t)));
7443 }
7444 
7445 /* T is an INTEGER_CST whose numerical value (extended according to
7446    TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT.  Return that
7447    HOST_WIDE_INT.  */
7448 
7449 HOST_WIDE_INT
tree_to_shwi(const_tree t)7450 tree_to_shwi (const_tree t)
7451 {
7452   gcc_assert (tree_fits_shwi_p (t));
7453   return TREE_INT_CST_LOW (t);
7454 }
7455 
7456 /* T is an INTEGER_CST whose numerical value (extended according to
7457    TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT.  Return that
7458    HOST_WIDE_INT.  */
7459 
7460 unsigned HOST_WIDE_INT
tree_to_uhwi(const_tree t)7461 tree_to_uhwi (const_tree t)
7462 {
7463   gcc_assert (tree_fits_uhwi_p (t));
7464   return TREE_INT_CST_LOW (t);
7465 }
7466 
7467 /* Return the most significant (sign) bit of T.  */
7468 
7469 int
tree_int_cst_sign_bit(const_tree t)7470 tree_int_cst_sign_bit (const_tree t)
7471 {
7472   unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7473 
7474   return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
7475 }
7476 
7477 /* Return an indication of the sign of the integer constant T.
7478    The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7479    Note that -1 will never be returned if T's type is unsigned.  */
7480 
7481 int
tree_int_cst_sgn(const_tree t)7482 tree_int_cst_sgn (const_tree t)
7483 {
7484   if (wi::to_wide (t) == 0)
7485     return 0;
7486   else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7487     return 1;
7488   else if (wi::neg_p (wi::to_wide (t)))
7489     return -1;
7490   else
7491     return 1;
7492 }
7493 
7494 /* Return the minimum number of bits needed to represent VALUE in a
7495    signed or unsigned type, UNSIGNEDP says which.  */
7496 
7497 unsigned int
tree_int_cst_min_precision(tree value,signop sgn)7498 tree_int_cst_min_precision (tree value, signop sgn)
7499 {
7500   /* If the value is negative, compute its negative minus 1.  The latter
7501      adjustment is because the absolute value of the largest negative value
7502      is one larger than the largest positive value.  This is equivalent to
7503      a bit-wise negation, so use that operation instead.  */
7504 
7505   if (tree_int_cst_sgn (value) < 0)
7506     value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7507 
7508   /* Return the number of bits needed, taking into account the fact
7509      that we need one more bit for a signed than unsigned type.
7510      If value is 0 or -1, the minimum precision is 1 no matter
7511      whether unsignedp is true or false.  */
7512 
7513   if (integer_zerop (value))
7514     return 1;
7515   else
7516     return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7517 }
7518 
7519 /* Return truthvalue of whether T1 is the same tree structure as T2.
7520    Return 1 if they are the same.
7521    Return 0 if they are understandably different.
7522    Return -1 if either contains tree structure not understood by
7523    this function.  */
7524 
7525 int
simple_cst_equal(const_tree t1,const_tree t2)7526 simple_cst_equal (const_tree t1, const_tree t2)
7527 {
7528   enum tree_code code1, code2;
7529   int cmp;
7530   int i;
7531 
7532   if (t1 == t2)
7533     return 1;
7534   if (t1 == 0 || t2 == 0)
7535     return 0;
7536 
7537   /* For location wrappers to be the same, they must be at the same
7538      source location (and wrap the same thing).  */
7539   if (location_wrapper_p (t1) && location_wrapper_p (t2))
7540     {
7541       if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
7542 	return 0;
7543       return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7544     }
7545 
7546   code1 = TREE_CODE (t1);
7547   code2 = TREE_CODE (t2);
7548 
7549   if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7550     {
7551       if (CONVERT_EXPR_CODE_P (code2)
7552 	  || code2 == NON_LVALUE_EXPR)
7553 	return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7554       else
7555 	return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7556     }
7557 
7558   else if (CONVERT_EXPR_CODE_P (code2)
7559 	   || code2 == NON_LVALUE_EXPR)
7560     return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7561 
7562   if (code1 != code2)
7563     return 0;
7564 
7565   switch (code1)
7566     {
7567     case INTEGER_CST:
7568       return wi::to_widest (t1) == wi::to_widest (t2);
7569 
7570     case REAL_CST:
7571       return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7572 
7573     case FIXED_CST:
7574       return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7575 
7576     case STRING_CST:
7577       return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7578 	      && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7579 			 TREE_STRING_LENGTH (t1)));
7580 
7581     case CONSTRUCTOR:
7582       {
7583 	unsigned HOST_WIDE_INT idx;
7584 	vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7585 	vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7586 
7587 	if (vec_safe_length (v1) != vec_safe_length (v2))
7588 	  return false;
7589 
7590         for (idx = 0; idx < vec_safe_length (v1); ++idx)
7591 	  /* ??? Should we handle also fields here? */
7592 	  if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7593 	    return false;
7594 	return true;
7595       }
7596 
7597     case SAVE_EXPR:
7598       return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7599 
7600     case CALL_EXPR:
7601       cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7602       if (cmp <= 0)
7603 	return cmp;
7604       if (call_expr_nargs (t1) != call_expr_nargs (t2))
7605 	return 0;
7606       {
7607 	const_tree arg1, arg2;
7608 	const_call_expr_arg_iterator iter1, iter2;
7609 	for (arg1 = first_const_call_expr_arg (t1, &iter1),
7610 	       arg2 = first_const_call_expr_arg (t2, &iter2);
7611 	     arg1 && arg2;
7612 	     arg1 = next_const_call_expr_arg (&iter1),
7613 	       arg2 = next_const_call_expr_arg (&iter2))
7614 	  {
7615 	    cmp = simple_cst_equal (arg1, arg2);
7616 	    if (cmp <= 0)
7617 	      return cmp;
7618 	  }
7619 	return arg1 == arg2;
7620       }
7621 
7622     case TARGET_EXPR:
7623       /* Special case: if either target is an unallocated VAR_DECL,
7624 	 it means that it's going to be unified with whatever the
7625 	 TARGET_EXPR is really supposed to initialize, so treat it
7626 	 as being equivalent to anything.  */
7627       if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7628 	   && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7629 	   && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7630 	  || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7631 	      && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7632 	      && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7633 	cmp = 1;
7634       else
7635 	cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7636 
7637       if (cmp <= 0)
7638 	return cmp;
7639 
7640       return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7641 
7642     case WITH_CLEANUP_EXPR:
7643       cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7644       if (cmp <= 0)
7645 	return cmp;
7646 
7647       return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7648 
7649     case COMPONENT_REF:
7650       if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7651 	return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7652 
7653       return 0;
7654 
7655     case VAR_DECL:
7656     case PARM_DECL:
7657     case CONST_DECL:
7658     case FUNCTION_DECL:
7659       return 0;
7660 
7661     default:
7662       if (POLY_INT_CST_P (t1))
7663 	/* A false return means maybe_ne rather than known_ne.  */
7664 	return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
7665 						TYPE_SIGN (TREE_TYPE (t1))),
7666 			 poly_widest_int::from (poly_int_cst_value (t2),
7667 						TYPE_SIGN (TREE_TYPE (t2))));
7668       break;
7669     }
7670 
7671   /* This general rule works for most tree codes.  All exceptions should be
7672      handled above.  If this is a language-specific tree code, we can't
7673      trust what might be in the operand, so say we don't know
7674      the situation.  */
7675   if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7676     return -1;
7677 
7678   switch (TREE_CODE_CLASS (code1))
7679     {
7680     case tcc_unary:
7681     case tcc_binary:
7682     case tcc_comparison:
7683     case tcc_expression:
7684     case tcc_reference:
7685     case tcc_statement:
7686       cmp = 1;
7687       for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7688 	{
7689 	  cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7690 	  if (cmp <= 0)
7691 	    return cmp;
7692 	}
7693 
7694       return cmp;
7695 
7696     default:
7697       return -1;
7698     }
7699 }
7700 
7701 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7702    Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7703    than U, respectively.  */
7704 
7705 int
compare_tree_int(const_tree t,unsigned HOST_WIDE_INT u)7706 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7707 {
7708   if (tree_int_cst_sgn (t) < 0)
7709     return -1;
7710   else if (!tree_fits_uhwi_p (t))
7711     return 1;
7712   else if (TREE_INT_CST_LOW (t) == u)
7713     return 0;
7714   else if (TREE_INT_CST_LOW (t) < u)
7715     return -1;
7716   else
7717     return 1;
7718 }
7719 
7720 /* Return true if SIZE represents a constant size that is in bounds of
7721    what the middle-end and the backend accepts (covering not more than
7722    half of the address-space).
7723    When PERR is non-null, set *PERR on failure to the description of
7724    why SIZE is not valid.  */
7725 
7726 bool
valid_constant_size_p(const_tree size,cst_size_error * perr)7727 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
7728 {
7729   if (POLY_INT_CST_P (size))
7730     {
7731       if (TREE_OVERFLOW (size))
7732 	return false;
7733       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7734 	if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
7735 	  return false;
7736       return true;
7737     }
7738 
7739   cst_size_error error;
7740   if (!perr)
7741     perr = &error;
7742 
7743   if (TREE_CODE (size) != INTEGER_CST)
7744     {
7745       *perr = cst_size_not_constant;
7746       return false;
7747     }
7748 
7749   if (TREE_OVERFLOW_P (size))
7750     {
7751       *perr = cst_size_overflow;
7752       return false;
7753     }
7754 
7755   if (tree_int_cst_sgn (size) < 0)
7756     {
7757       *perr = cst_size_negative;
7758       return false;
7759     }
7760   if (!tree_fits_uhwi_p (size)
7761       || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
7762 	  < wi::to_widest (size) * 2))
7763     {
7764       *perr = cst_size_too_big;
7765       return false;
7766     }
7767 
7768   return true;
7769 }
7770 
7771 /* Return the precision of the type, or for a complex or vector type the
7772    precision of the type of its elements.  */
7773 
7774 unsigned int
element_precision(const_tree type)7775 element_precision (const_tree type)
7776 {
7777   if (!TYPE_P (type))
7778     type = TREE_TYPE (type);
7779   enum tree_code code = TREE_CODE (type);
7780   if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7781     type = TREE_TYPE (type);
7782 
7783   return TYPE_PRECISION (type);
7784 }
7785 
7786 /* Return true if CODE represents an associative tree code.  Otherwise
7787    return false.  */
7788 bool
associative_tree_code(enum tree_code code)7789 associative_tree_code (enum tree_code code)
7790 {
7791   switch (code)
7792     {
7793     case BIT_IOR_EXPR:
7794     case BIT_AND_EXPR:
7795     case BIT_XOR_EXPR:
7796     case PLUS_EXPR:
7797     case MULT_EXPR:
7798     case MIN_EXPR:
7799     case MAX_EXPR:
7800       return true;
7801 
7802     default:
7803       break;
7804     }
7805   return false;
7806 }
7807 
7808 /* Return true if CODE represents a commutative tree code.  Otherwise
7809    return false.  */
7810 bool
commutative_tree_code(enum tree_code code)7811 commutative_tree_code (enum tree_code code)
7812 {
7813   switch (code)
7814     {
7815     case PLUS_EXPR:
7816     case MULT_EXPR:
7817     case MULT_HIGHPART_EXPR:
7818     case MIN_EXPR:
7819     case MAX_EXPR:
7820     case BIT_IOR_EXPR:
7821     case BIT_XOR_EXPR:
7822     case BIT_AND_EXPR:
7823     case NE_EXPR:
7824     case EQ_EXPR:
7825     case UNORDERED_EXPR:
7826     case ORDERED_EXPR:
7827     case UNEQ_EXPR:
7828     case LTGT_EXPR:
7829     case TRUTH_AND_EXPR:
7830     case TRUTH_XOR_EXPR:
7831     case TRUTH_OR_EXPR:
7832     case WIDEN_MULT_EXPR:
7833     case VEC_WIDEN_MULT_HI_EXPR:
7834     case VEC_WIDEN_MULT_LO_EXPR:
7835     case VEC_WIDEN_MULT_EVEN_EXPR:
7836     case VEC_WIDEN_MULT_ODD_EXPR:
7837       return true;
7838 
7839     default:
7840       break;
7841     }
7842   return false;
7843 }
7844 
7845 /* Return true if CODE represents a ternary tree code for which the
7846    first two operands are commutative.  Otherwise return false.  */
7847 bool
commutative_ternary_tree_code(enum tree_code code)7848 commutative_ternary_tree_code (enum tree_code code)
7849 {
7850   switch (code)
7851     {
7852     case WIDEN_MULT_PLUS_EXPR:
7853     case WIDEN_MULT_MINUS_EXPR:
7854     case DOT_PROD_EXPR:
7855       return true;
7856 
7857     default:
7858       break;
7859     }
7860   return false;
7861 }
7862 
7863 /* Returns true if CODE can overflow.  */
7864 
7865 bool
operation_can_overflow(enum tree_code code)7866 operation_can_overflow (enum tree_code code)
7867 {
7868   switch (code)
7869     {
7870     case PLUS_EXPR:
7871     case MINUS_EXPR:
7872     case MULT_EXPR:
7873     case LSHIFT_EXPR:
7874       /* Can overflow in various ways.  */
7875       return true;
7876     case TRUNC_DIV_EXPR:
7877     case EXACT_DIV_EXPR:
7878     case FLOOR_DIV_EXPR:
7879     case CEIL_DIV_EXPR:
7880       /* For INT_MIN / -1.  */
7881       return true;
7882     case NEGATE_EXPR:
7883     case ABS_EXPR:
7884       /* For -INT_MIN.  */
7885       return true;
7886     default:
7887       /* These operators cannot overflow.  */
7888       return false;
7889     }
7890 }
7891 
7892 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7893    ftrapv doesn't generate trapping insns for CODE.  */
7894 
7895 bool
operation_no_trapping_overflow(tree type,enum tree_code code)7896 operation_no_trapping_overflow (tree type, enum tree_code code)
7897 {
7898   gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7899 
7900   /* We don't generate instructions that trap on overflow for complex or vector
7901      types.  */
7902   if (!INTEGRAL_TYPE_P (type))
7903     return true;
7904 
7905   if (!TYPE_OVERFLOW_TRAPS (type))
7906     return true;
7907 
7908   switch (code)
7909     {
7910     case PLUS_EXPR:
7911     case MINUS_EXPR:
7912     case MULT_EXPR:
7913     case NEGATE_EXPR:
7914     case ABS_EXPR:
7915       /* These operators can overflow, and -ftrapv generates trapping code for
7916 	 these.  */
7917       return false;
7918     case TRUNC_DIV_EXPR:
7919     case EXACT_DIV_EXPR:
7920     case FLOOR_DIV_EXPR:
7921     case CEIL_DIV_EXPR:
7922     case LSHIFT_EXPR:
7923       /* These operators can overflow, but -ftrapv does not generate trapping
7924 	 code for these.  */
7925       return true;
7926     default:
7927       /* These operators cannot overflow.  */
7928       return true;
7929     }
7930 }
7931 
7932 /* Constructors for pointer, array and function types.
7933    (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7934    constructed by language-dependent code, not here.)  */
7935 
7936 /* Construct, lay out and return the type of pointers to TO_TYPE with
7937    mode MODE.  If MODE is VOIDmode, a pointer mode for the address
7938    space of TO_TYPE will be picked.  If CAN_ALIAS_ALL is TRUE,
7939    indicate this type can reference all of memory. If such a type has
7940    already been constructed, reuse it.  */
7941 
7942 tree
build_pointer_type_for_mode(tree to_type,machine_mode mode,bool can_alias_all)7943 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7944 			     bool can_alias_all)
7945 {
7946   tree t;
7947   bool could_alias = can_alias_all;
7948 
7949   if (to_type == error_mark_node)
7950     return error_mark_node;
7951 
7952   if (mode == VOIDmode)
7953     {
7954       addr_space_t as = TYPE_ADDR_SPACE (to_type);
7955       mode = targetm.addr_space.pointer_mode (as);
7956     }
7957 
7958   /* If the pointed-to type has the may_alias attribute set, force
7959      a TYPE_REF_CAN_ALIAS_ALL pointer to be generated.  */
7960   if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7961     can_alias_all = true;
7962 
7963   /* In some cases, languages will have things that aren't a POINTER_TYPE
7964      (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7965      In that case, return that type without regard to the rest of our
7966      operands.
7967 
7968      ??? This is a kludge, but consistent with the way this function has
7969      always operated and there doesn't seem to be a good way to avoid this
7970      at the moment.  */
7971   if (TYPE_POINTER_TO (to_type) != 0
7972       && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7973     return TYPE_POINTER_TO (to_type);
7974 
7975   /* First, if we already have a type for pointers to TO_TYPE and it's
7976      the proper mode, use it.  */
7977   for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7978     if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7979       return t;
7980 
7981   t = make_node (POINTER_TYPE);
7982 
7983   TREE_TYPE (t) = to_type;
7984   SET_TYPE_MODE (t, mode);
7985   TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7986   TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7987   TYPE_POINTER_TO (to_type) = t;
7988 
7989   /* During LTO we do not set TYPE_CANONICAL of pointers and references.  */
7990   if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7991     SET_TYPE_STRUCTURAL_EQUALITY (t);
7992   else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7993     TYPE_CANONICAL (t)
7994       = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7995 				     mode, false);
7996 
7997   /* Lay out the type.  This function has many callers that are concerned
7998      with expression-construction, and this simplifies them all.  */
7999   layout_type (t);
8000 
8001   return t;
8002 }
8003 
8004 /* By default build pointers in ptr_mode.  */
8005 
8006 tree
build_pointer_type(tree to_type)8007 build_pointer_type (tree to_type)
8008 {
8009   return build_pointer_type_for_mode (to_type, VOIDmode, false);
8010 }
8011 
8012 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE.  */
8013 
8014 tree
build_reference_type_for_mode(tree to_type,machine_mode mode,bool can_alias_all)8015 build_reference_type_for_mode (tree to_type, machine_mode mode,
8016 			       bool can_alias_all)
8017 {
8018   tree t;
8019   bool could_alias = can_alias_all;
8020 
8021   if (to_type == error_mark_node)
8022     return error_mark_node;
8023 
8024   if (mode == VOIDmode)
8025     {
8026       addr_space_t as = TYPE_ADDR_SPACE (to_type);
8027       mode = targetm.addr_space.pointer_mode (as);
8028     }
8029 
8030   /* If the pointed-to type has the may_alias attribute set, force
8031      a TYPE_REF_CAN_ALIAS_ALL pointer to be generated.  */
8032   if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8033     can_alias_all = true;
8034 
8035   /* In some cases, languages will have things that aren't a REFERENCE_TYPE
8036      (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
8037      In that case, return that type without regard to the rest of our
8038      operands.
8039 
8040      ??? This is a kludge, but consistent with the way this function has
8041      always operated and there doesn't seem to be a good way to avoid this
8042      at the moment.  */
8043   if (TYPE_REFERENCE_TO (to_type) != 0
8044       && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
8045     return TYPE_REFERENCE_TO (to_type);
8046 
8047   /* First, if we already have a type for pointers to TO_TYPE and it's
8048      the proper mode, use it.  */
8049   for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
8050     if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8051       return t;
8052 
8053   t = make_node (REFERENCE_TYPE);
8054 
8055   TREE_TYPE (t) = to_type;
8056   SET_TYPE_MODE (t, mode);
8057   TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8058   TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
8059   TYPE_REFERENCE_TO (to_type) = t;
8060 
8061   /* During LTO we do not set TYPE_CANONICAL of pointers and references.  */
8062   if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8063     SET_TYPE_STRUCTURAL_EQUALITY (t);
8064   else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8065     TYPE_CANONICAL (t)
8066       = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
8067 				       mode, false);
8068 
8069   layout_type (t);
8070 
8071   return t;
8072 }
8073 
8074 
8075 /* Build the node for the type of references-to-TO_TYPE by default
8076    in ptr_mode.  */
8077 
8078 tree
build_reference_type(tree to_type)8079 build_reference_type (tree to_type)
8080 {
8081   return build_reference_type_for_mode (to_type, VOIDmode, false);
8082 }
8083 
8084 #define MAX_INT_CACHED_PREC \
8085   (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8086 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8087 
8088 /* Builds a signed or unsigned integer type of precision PRECISION.
8089    Used for C bitfields whose precision does not match that of
8090    built-in target types.  */
8091 tree
build_nonstandard_integer_type(unsigned HOST_WIDE_INT precision,int unsignedp)8092 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8093 				int unsignedp)
8094 {
8095   tree itype, ret;
8096 
8097   if (unsignedp)
8098     unsignedp = MAX_INT_CACHED_PREC + 1;
8099 
8100   if (precision <= MAX_INT_CACHED_PREC)
8101     {
8102       itype = nonstandard_integer_type_cache[precision + unsignedp];
8103       if (itype)
8104 	return itype;
8105     }
8106 
8107   itype = make_node (INTEGER_TYPE);
8108   TYPE_PRECISION (itype) = precision;
8109 
8110   if (unsignedp)
8111     fixup_unsigned_type (itype);
8112   else
8113     fixup_signed_type (itype);
8114 
8115   inchash::hash hstate;
8116   inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8117   ret = type_hash_canon (hstate.end (), itype);
8118   if (precision <= MAX_INT_CACHED_PREC)
8119     nonstandard_integer_type_cache[precision + unsignedp] = ret;
8120 
8121   return ret;
8122 }
8123 
8124 #define MAX_BOOL_CACHED_PREC \
8125   (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8126 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8127 
8128 /* Builds a boolean type of precision PRECISION.
8129    Used for boolean vectors to choose proper vector element size.  */
8130 tree
build_nonstandard_boolean_type(unsigned HOST_WIDE_INT precision)8131 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8132 {
8133   tree type;
8134 
8135   if (precision <= MAX_BOOL_CACHED_PREC)
8136     {
8137       type = nonstandard_boolean_type_cache[precision];
8138       if (type)
8139 	return type;
8140     }
8141 
8142   type = make_node (BOOLEAN_TYPE);
8143   TYPE_PRECISION (type) = precision;
8144   fixup_signed_type (type);
8145 
8146   if (precision <= MAX_INT_CACHED_PREC)
8147     nonstandard_boolean_type_cache[precision] = type;
8148 
8149   return type;
8150 }
8151 
8152 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8153    or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL.  If SHARED
8154    is true, reuse such a type that has already been constructed.  */
8155 
8156 static tree
build_range_type_1(tree type,tree lowval,tree highval,bool shared)8157 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8158 {
8159   tree itype = make_node (INTEGER_TYPE);
8160 
8161   TREE_TYPE (itype) = type;
8162 
8163   TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8164   TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8165 
8166   TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8167   SET_TYPE_MODE (itype, TYPE_MODE (type));
8168   TYPE_SIZE (itype) = TYPE_SIZE (type);
8169   TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8170   SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8171   TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8172   SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
8173 
8174   if (!shared)
8175     return itype;
8176 
8177   if ((TYPE_MIN_VALUE (itype)
8178        && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8179       || (TYPE_MAX_VALUE (itype)
8180 	  && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8181     {
8182       /* Since we cannot reliably merge this type, we need to compare it using
8183 	 structural equality checks.  */
8184       SET_TYPE_STRUCTURAL_EQUALITY (itype);
8185       return itype;
8186     }
8187 
8188   hashval_t hash = type_hash_canon_hash (itype);
8189   itype = type_hash_canon (hash, itype);
8190 
8191   return itype;
8192 }
8193 
8194 /* Wrapper around build_range_type_1 with SHARED set to true.  */
8195 
8196 tree
build_range_type(tree type,tree lowval,tree highval)8197 build_range_type (tree type, tree lowval, tree highval)
8198 {
8199   return build_range_type_1 (type, lowval, highval, true);
8200 }
8201 
8202 /* Wrapper around build_range_type_1 with SHARED set to false.  */
8203 
8204 tree
build_nonshared_range_type(tree type,tree lowval,tree highval)8205 build_nonshared_range_type (tree type, tree lowval, tree highval)
8206 {
8207   return build_range_type_1 (type, lowval, highval, false);
8208 }
8209 
8210 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8211    MAXVAL should be the maximum value in the domain
8212    (one less than the length of the array).
8213 
8214    The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8215    We don't enforce this limit, that is up to caller (e.g. language front end).
8216    The limit exists because the result is a signed type and we don't handle
8217    sizes that use more than one HOST_WIDE_INT.  */
8218 
8219 tree
build_index_type(tree maxval)8220 build_index_type (tree maxval)
8221 {
8222   return build_range_type (sizetype, size_zero_node, maxval);
8223 }
8224 
8225 /* Return true if the debug information for TYPE, a subtype, should be emitted
8226    as a subrange type.  If so, set LOWVAL to the low bound and HIGHVAL to the
8227    high bound, respectively.  Sometimes doing so unnecessarily obfuscates the
8228    debug info and doesn't reflect the source code.  */
8229 
8230 bool
subrange_type_for_debug_p(const_tree type,tree * lowval,tree * highval)8231 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8232 {
8233   tree base_type = TREE_TYPE (type), low, high;
8234 
8235   /* Subrange types have a base type which is an integral type.  */
8236   if (!INTEGRAL_TYPE_P (base_type))
8237     return false;
8238 
8239   /* Get the real bounds of the subtype.  */
8240   if (lang_hooks.types.get_subrange_bounds)
8241     lang_hooks.types.get_subrange_bounds (type, &low, &high);
8242   else
8243     {
8244       low = TYPE_MIN_VALUE (type);
8245       high = TYPE_MAX_VALUE (type);
8246     }
8247 
8248   /* If the type and its base type have the same representation and the same
8249      name, then the type is not a subrange but a copy of the base type.  */
8250   if ((TREE_CODE (base_type) == INTEGER_TYPE
8251        || TREE_CODE (base_type) == BOOLEAN_TYPE)
8252       && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8253       && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8254       && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8255       && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8256     return false;
8257 
8258   if (lowval)
8259     *lowval = low;
8260   if (highval)
8261     *highval = high;
8262   return true;
8263 }
8264 
8265 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8266    and number of elements specified by the range of values of INDEX_TYPE.
8267    If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
8268    If SHARED is true, reuse such a type that has already been constructed.
8269    If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type.  */
8270 
8271 static tree
build_array_type_1(tree elt_type,tree index_type,bool typeless_storage,bool shared,bool set_canonical)8272 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
8273 		    bool shared, bool set_canonical)
8274 {
8275   tree t;
8276 
8277   if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8278     {
8279       error ("arrays of functions are not meaningful");
8280       elt_type = integer_type_node;
8281     }
8282 
8283   t = make_node (ARRAY_TYPE);
8284   TREE_TYPE (t) = elt_type;
8285   TYPE_DOMAIN (t) = index_type;
8286   TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8287   TYPE_TYPELESS_STORAGE (t) = typeless_storage;
8288   layout_type (t);
8289 
8290   if (shared)
8291     {
8292       hashval_t hash = type_hash_canon_hash (t);
8293       t = type_hash_canon (hash, t);
8294     }
8295 
8296   if (TYPE_CANONICAL (t) == t && set_canonical)
8297     {
8298       if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8299 	  || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8300 	  || in_lto_p)
8301 	SET_TYPE_STRUCTURAL_EQUALITY (t);
8302       else if (TYPE_CANONICAL (elt_type) != elt_type
8303 	       || (index_type && TYPE_CANONICAL (index_type) != index_type))
8304 	TYPE_CANONICAL (t)
8305 	  = build_array_type_1 (TYPE_CANONICAL (elt_type),
8306 				index_type
8307 				? TYPE_CANONICAL (index_type) : NULL_TREE,
8308 				typeless_storage, shared, set_canonical);
8309     }
8310 
8311   return t;
8312 }
8313 
8314 /* Wrapper around build_array_type_1 with SHARED set to true.  */
8315 
8316 tree
build_array_type(tree elt_type,tree index_type,bool typeless_storage)8317 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
8318 {
8319   return
8320     build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
8321 }
8322 
8323 /* Wrapper around build_array_type_1 with SHARED set to false.  */
8324 
8325 tree
build_nonshared_array_type(tree elt_type,tree index_type)8326 build_nonshared_array_type (tree elt_type, tree index_type)
8327 {
8328   return build_array_type_1 (elt_type, index_type, false, false, true);
8329 }
8330 
8331 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8332    sizetype.  */
8333 
8334 tree
build_array_type_nelts(tree elt_type,poly_uint64 nelts)8335 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
8336 {
8337   return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8338 }
8339 
8340 /* Recursively examines the array elements of TYPE, until a non-array
8341    element type is found.  */
8342 
8343 tree
strip_array_types(tree type)8344 strip_array_types (tree type)
8345 {
8346   while (TREE_CODE (type) == ARRAY_TYPE)
8347     type = TREE_TYPE (type);
8348 
8349   return type;
8350 }
8351 
8352 /* Computes the canonical argument types from the argument type list
8353    ARGTYPES.
8354 
8355    Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8356    on entry to this function, or if any of the ARGTYPES are
8357    structural.
8358 
8359    Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8360    true on entry to this function, or if any of the ARGTYPES are
8361    non-canonical.
8362 
8363    Returns a canonical argument list, which may be ARGTYPES when the
8364    canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8365    true) or would not differ from ARGTYPES.  */
8366 
8367 static tree
maybe_canonicalize_argtypes(tree argtypes,bool * any_structural_p,bool * any_noncanonical_p)8368 maybe_canonicalize_argtypes (tree argtypes,
8369 			     bool *any_structural_p,
8370 			     bool *any_noncanonical_p)
8371 {
8372   tree arg;
8373   bool any_noncanonical_argtypes_p = false;
8374 
8375   for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8376     {
8377       if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8378 	/* Fail gracefully by stating that the type is structural.  */
8379 	*any_structural_p = true;
8380       else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8381 	*any_structural_p = true;
8382       else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8383 	       || TREE_PURPOSE (arg))
8384 	/* If the argument has a default argument, we consider it
8385 	   non-canonical even though the type itself is canonical.
8386 	   That way, different variants of function and method types
8387 	   with default arguments will all point to the variant with
8388 	   no defaults as their canonical type.  */
8389         any_noncanonical_argtypes_p = true;
8390     }
8391 
8392   if (*any_structural_p)
8393     return argtypes;
8394 
8395   if (any_noncanonical_argtypes_p)
8396     {
8397       /* Build the canonical list of argument types.  */
8398       tree canon_argtypes = NULL_TREE;
8399       bool is_void = false;
8400 
8401       for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8402         {
8403           if (arg == void_list_node)
8404             is_void = true;
8405           else
8406             canon_argtypes = tree_cons (NULL_TREE,
8407                                         TYPE_CANONICAL (TREE_VALUE (arg)),
8408                                         canon_argtypes);
8409         }
8410 
8411       canon_argtypes = nreverse (canon_argtypes);
8412       if (is_void)
8413         canon_argtypes = chainon (canon_argtypes, void_list_node);
8414 
8415       /* There is a non-canonical type.  */
8416       *any_noncanonical_p = true;
8417       return canon_argtypes;
8418     }
8419 
8420   /* The canonical argument types are the same as ARGTYPES.  */
8421   return argtypes;
8422 }
8423 
8424 /* Construct, lay out and return
8425    the type of functions returning type VALUE_TYPE
8426    given arguments of types ARG_TYPES.
8427    ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8428    are data type nodes for the arguments of the function.
8429    If such a type has already been constructed, reuse it.  */
8430 
8431 tree
build_function_type(tree value_type,tree arg_types)8432 build_function_type (tree value_type, tree arg_types)
8433 {
8434   tree t;
8435   inchash::hash hstate;
8436   bool any_structural_p, any_noncanonical_p;
8437   tree canon_argtypes;
8438 
8439   gcc_assert (arg_types != error_mark_node);
8440 
8441   if (TREE_CODE (value_type) == FUNCTION_TYPE)
8442     {
8443       error ("function return type cannot be function");
8444       value_type = integer_type_node;
8445     }
8446 
8447   /* Make a node of the sort we want.  */
8448   t = make_node (FUNCTION_TYPE);
8449   TREE_TYPE (t) = value_type;
8450   TYPE_ARG_TYPES (t) = arg_types;
8451 
8452   /* If we already have such a type, use the old one.  */
8453   hashval_t hash = type_hash_canon_hash (t);
8454   t = type_hash_canon (hash, t);
8455 
8456   /* Set up the canonical type. */
8457   any_structural_p   = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8458   any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8459   canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8460 						&any_structural_p,
8461 						&any_noncanonical_p);
8462   if (any_structural_p)
8463     SET_TYPE_STRUCTURAL_EQUALITY (t);
8464   else if (any_noncanonical_p)
8465     TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8466 					      canon_argtypes);
8467 
8468   if (!COMPLETE_TYPE_P (t))
8469     layout_type (t);
8470   return t;
8471 }
8472 
8473 /* Build a function type.  The RETURN_TYPE is the type returned by the
8474    function.  If VAARGS is set, no void_type_node is appended to the
8475    list.  ARGP must be always be terminated be a NULL_TREE.  */
8476 
8477 static tree
build_function_type_list_1(bool vaargs,tree return_type,va_list argp)8478 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8479 {
8480   tree t, args, last;
8481 
8482   t = va_arg (argp, tree);
8483   for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8484     args = tree_cons (NULL_TREE, t, args);
8485 
8486   if (vaargs)
8487     {
8488       last = args;
8489       if (args != NULL_TREE)
8490 	args = nreverse (args);
8491       gcc_assert (last != void_list_node);
8492     }
8493   else if (args == NULL_TREE)
8494     args = void_list_node;
8495   else
8496     {
8497       last = args;
8498       args = nreverse (args);
8499       TREE_CHAIN (last) = void_list_node;
8500     }
8501   args = build_function_type (return_type, args);
8502 
8503   return args;
8504 }
8505 
8506 /* Build a function type.  The RETURN_TYPE is the type returned by the
8507    function.  If additional arguments are provided, they are
8508    additional argument types.  The list of argument types must always
8509    be terminated by NULL_TREE.  */
8510 
8511 tree
build_function_type_list(tree return_type,...)8512 build_function_type_list (tree return_type, ...)
8513 {
8514   tree args;
8515   va_list p;
8516 
8517   va_start (p, return_type);
8518   args = build_function_type_list_1 (false, return_type, p);
8519   va_end (p);
8520   return args;
8521 }
8522 
8523 /* Build a variable argument function type.  The RETURN_TYPE is the
8524    type returned by the function.  If additional arguments are provided,
8525    they are additional argument types.  The list of argument types must
8526    always be terminated by NULL_TREE.  */
8527 
8528 tree
build_varargs_function_type_list(tree return_type,...)8529 build_varargs_function_type_list (tree return_type, ...)
8530 {
8531   tree args;
8532   va_list p;
8533 
8534   va_start (p, return_type);
8535   args = build_function_type_list_1 (true, return_type, p);
8536   va_end (p);
8537 
8538   return args;
8539 }
8540 
8541 /* Build a function type.  RETURN_TYPE is the type returned by the
8542    function; VAARGS indicates whether the function takes varargs.  The
8543    function takes N named arguments, the types of which are provided in
8544    ARG_TYPES.  */
8545 
8546 static tree
build_function_type_array_1(bool vaargs,tree return_type,int n,tree * arg_types)8547 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8548 			     tree *arg_types)
8549 {
8550   int i;
8551   tree t = vaargs ? NULL_TREE : void_list_node;
8552 
8553   for (i = n - 1; i >= 0; i--)
8554     t = tree_cons (NULL_TREE, arg_types[i], t);
8555 
8556   return build_function_type (return_type, t);
8557 }
8558 
8559 /* Build a function type.  RETURN_TYPE is the type returned by the
8560    function.  The function takes N named arguments, the types of which
8561    are provided in ARG_TYPES.  */
8562 
8563 tree
build_function_type_array(tree return_type,int n,tree * arg_types)8564 build_function_type_array (tree return_type, int n, tree *arg_types)
8565 {
8566   return build_function_type_array_1 (false, return_type, n, arg_types);
8567 }
8568 
8569 /* Build a variable argument function type.  RETURN_TYPE is the type
8570    returned by the function.  The function takes N named arguments, the
8571    types of which are provided in ARG_TYPES.  */
8572 
8573 tree
build_varargs_function_type_array(tree return_type,int n,tree * arg_types)8574 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8575 {
8576   return build_function_type_array_1 (true, return_type, n, arg_types);
8577 }
8578 
8579 /* Build a METHOD_TYPE for a member of BASETYPE.  The RETTYPE (a TYPE)
8580    and ARGTYPES (a TREE_LIST) are the return type and arguments types
8581    for the method.  An implicit additional parameter (of type
8582    pointer-to-BASETYPE) is added to the ARGTYPES.  */
8583 
8584 tree
build_method_type_directly(tree basetype,tree rettype,tree argtypes)8585 build_method_type_directly (tree basetype,
8586 			    tree rettype,
8587 			    tree argtypes)
8588 {
8589   tree t;
8590   tree ptype;
8591   bool any_structural_p, any_noncanonical_p;
8592   tree canon_argtypes;
8593 
8594   /* Make a node of the sort we want.  */
8595   t = make_node (METHOD_TYPE);
8596 
8597   TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8598   TREE_TYPE (t) = rettype;
8599   ptype = build_pointer_type (basetype);
8600 
8601   /* The actual arglist for this function includes a "hidden" argument
8602      which is "this".  Put it into the list of argument types.  */
8603   argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8604   TYPE_ARG_TYPES (t) = argtypes;
8605 
8606   /* If we already have such a type, use the old one.  */
8607   hashval_t hash = type_hash_canon_hash (t);
8608   t = type_hash_canon (hash, t);
8609 
8610   /* Set up the canonical type. */
8611   any_structural_p
8612     = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8613        || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8614   any_noncanonical_p
8615     = (TYPE_CANONICAL (basetype) != basetype
8616        || TYPE_CANONICAL (rettype) != rettype);
8617   canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8618 						&any_structural_p,
8619 						&any_noncanonical_p);
8620   if (any_structural_p)
8621     SET_TYPE_STRUCTURAL_EQUALITY (t);
8622   else if (any_noncanonical_p)
8623     TYPE_CANONICAL (t)
8624       = build_method_type_directly (TYPE_CANONICAL (basetype),
8625 				    TYPE_CANONICAL (rettype),
8626 				    canon_argtypes);
8627   if (!COMPLETE_TYPE_P (t))
8628     layout_type (t);
8629 
8630   return t;
8631 }
8632 
8633 /* Construct, lay out and return the type of methods belonging to class
8634    BASETYPE and whose arguments and values are described by TYPE.
8635    If that type exists already, reuse it.
8636    TYPE must be a FUNCTION_TYPE node.  */
8637 
8638 tree
build_method_type(tree basetype,tree type)8639 build_method_type (tree basetype, tree type)
8640 {
8641   gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8642 
8643   return build_method_type_directly (basetype,
8644 				     TREE_TYPE (type),
8645 				     TYPE_ARG_TYPES (type));
8646 }
8647 
8648 /* Construct, lay out and return the type of offsets to a value
8649    of type TYPE, within an object of type BASETYPE.
8650    If a suitable offset type exists already, reuse it.  */
8651 
8652 tree
build_offset_type(tree basetype,tree type)8653 build_offset_type (tree basetype, tree type)
8654 {
8655   tree t;
8656 
8657   /* Make a node of the sort we want.  */
8658   t = make_node (OFFSET_TYPE);
8659 
8660   TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8661   TREE_TYPE (t) = type;
8662 
8663   /* If we already have such a type, use the old one.  */
8664   hashval_t hash = type_hash_canon_hash (t);
8665   t = type_hash_canon (hash, t);
8666 
8667   if (!COMPLETE_TYPE_P (t))
8668     layout_type (t);
8669 
8670   if (TYPE_CANONICAL (t) == t)
8671     {
8672       if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8673 	  || TYPE_STRUCTURAL_EQUALITY_P (type))
8674 	SET_TYPE_STRUCTURAL_EQUALITY (t);
8675       else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8676 	       || TYPE_CANONICAL (type) != type)
8677 	TYPE_CANONICAL (t)
8678 	  = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8679 			       TYPE_CANONICAL (type));
8680     }
8681 
8682   return t;
8683 }
8684 
8685 /* Create a complex type whose components are COMPONENT_TYPE.
8686 
8687    If NAMED is true, the type is given a TYPE_NAME.  We do not always
8688    do so because this creates a DECL node and thus make the DECL_UIDs
8689    dependent on the type canonicalization hashtable, which is GC-ed,
8690    so the DECL_UIDs would not be stable wrt garbage collection.  */
8691 
8692 tree
build_complex_type(tree component_type,bool named)8693 build_complex_type (tree component_type, bool named)
8694 {
8695   gcc_assert (INTEGRAL_TYPE_P (component_type)
8696 	      || SCALAR_FLOAT_TYPE_P (component_type)
8697 	      || FIXED_POINT_TYPE_P (component_type));
8698 
8699   /* Make a node of the sort we want.  */
8700   tree probe = make_node (COMPLEX_TYPE);
8701 
8702   TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
8703 
8704   /* If we already have such a type, use the old one.  */
8705   hashval_t hash = type_hash_canon_hash (probe);
8706   tree t = type_hash_canon (hash, probe);
8707 
8708   if (t == probe)
8709     {
8710       /* We created a new type.  The hash insertion will have laid
8711 	 out the type.  We need to check the canonicalization and
8712 	 maybe set the name.  */
8713       gcc_checking_assert (COMPLETE_TYPE_P (t)
8714 			   && !TYPE_NAME (t)
8715 			   && TYPE_CANONICAL (t) == t);
8716 
8717       if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
8718 	SET_TYPE_STRUCTURAL_EQUALITY (t);
8719       else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
8720 	TYPE_CANONICAL (t)
8721 	  = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
8722 
8723       /* We need to create a name, since complex is a fundamental type.  */
8724       if (named)
8725 	{
8726 	  const char *name = NULL;
8727 
8728 	  if (TREE_TYPE (t) == char_type_node)
8729 	    name = "complex char";
8730 	  else if (TREE_TYPE (t) == signed_char_type_node)
8731 	    name = "complex signed char";
8732 	  else if (TREE_TYPE (t) == unsigned_char_type_node)
8733 	    name = "complex unsigned char";
8734 	  else if (TREE_TYPE (t) == short_integer_type_node)
8735 	    name = "complex short int";
8736 	  else if (TREE_TYPE (t) == short_unsigned_type_node)
8737 	    name = "complex short unsigned int";
8738 	  else if (TREE_TYPE (t) == integer_type_node)
8739 	    name = "complex int";
8740 	  else if (TREE_TYPE (t) == unsigned_type_node)
8741 	    name = "complex unsigned int";
8742 	  else if (TREE_TYPE (t) == long_integer_type_node)
8743 	    name = "complex long int";
8744 	  else if (TREE_TYPE (t) == long_unsigned_type_node)
8745 	    name = "complex long unsigned int";
8746 	  else if (TREE_TYPE (t) == long_long_integer_type_node)
8747 	    name = "complex long long int";
8748 	  else if (TREE_TYPE (t) == long_long_unsigned_type_node)
8749 	    name = "complex long long unsigned int";
8750 
8751 	  if (name != NULL)
8752 	    TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8753 					get_identifier (name), t);
8754 	}
8755     }
8756 
8757   return build_qualified_type (t, TYPE_QUALS (component_type));
8758 }
8759 
8760 /* If TYPE is a real or complex floating-point type and the target
8761    does not directly support arithmetic on TYPE then return the wider
8762    type to be used for arithmetic on TYPE.  Otherwise, return
8763    NULL_TREE.  */
8764 
8765 tree
excess_precision_type(tree type)8766 excess_precision_type (tree type)
8767 {
8768   /* The target can give two different responses to the question of
8769      which excess precision mode it would like depending on whether we
8770      are in -fexcess-precision=standard or -fexcess-precision=fast.  */
8771 
8772   enum excess_precision_type requested_type
8773     = (flag_excess_precision == EXCESS_PRECISION_FAST
8774        ? EXCESS_PRECISION_TYPE_FAST
8775        : EXCESS_PRECISION_TYPE_STANDARD);
8776 
8777   enum flt_eval_method target_flt_eval_method
8778     = targetm.c.excess_precision (requested_type);
8779 
8780   /* The target should not ask for unpredictable float evaluation (though
8781      it might advertise that implicitly the evaluation is unpredictable,
8782      but we don't care about that here, it will have been reported
8783      elsewhere).  If it does ask for unpredictable evaluation, we have
8784      nothing to do here.  */
8785   gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8786 
8787   /* Nothing to do.  The target has asked for all types we know about
8788      to be computed with their native precision and range.  */
8789   if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8790     return NULL_TREE;
8791 
8792   /* The target will promote this type in a target-dependent way, so excess
8793      precision ought to leave it alone.  */
8794   if (targetm.promoted_type (type) != NULL_TREE)
8795     return NULL_TREE;
8796 
8797   machine_mode float16_type_mode = (float16_type_node
8798 				    ? TYPE_MODE (float16_type_node)
8799 				    : VOIDmode);
8800   machine_mode float_type_mode = TYPE_MODE (float_type_node);
8801   machine_mode double_type_mode = TYPE_MODE (double_type_node);
8802 
8803   switch (TREE_CODE (type))
8804     {
8805     case REAL_TYPE:
8806       {
8807 	machine_mode type_mode = TYPE_MODE (type);
8808 	switch (target_flt_eval_method)
8809 	  {
8810 	  case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8811 	    if (type_mode == float16_type_mode)
8812 	      return float_type_node;
8813 	    break;
8814 	  case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8815 	    if (type_mode == float16_type_mode
8816 		|| type_mode == float_type_mode)
8817 	      return double_type_node;
8818 	    break;
8819 	  case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8820 	    if (type_mode == float16_type_mode
8821 		|| type_mode == float_type_mode
8822 		|| type_mode == double_type_mode)
8823 	      return long_double_type_node;
8824 	    break;
8825 	  default:
8826 	    gcc_unreachable ();
8827 	  }
8828 	break;
8829       }
8830     case COMPLEX_TYPE:
8831       {
8832 	if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8833 	  return NULL_TREE;
8834 	machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8835 	switch (target_flt_eval_method)
8836 	  {
8837 	  case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8838 	    if (type_mode == float16_type_mode)
8839 	      return complex_float_type_node;
8840 	    break;
8841 	  case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8842 	    if (type_mode == float16_type_mode
8843 		|| type_mode == float_type_mode)
8844 	      return complex_double_type_node;
8845 	    break;
8846 	  case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8847 	    if (type_mode == float16_type_mode
8848 		|| type_mode == float_type_mode
8849 		|| type_mode == double_type_mode)
8850 	      return complex_long_double_type_node;
8851 	    break;
8852 	  default:
8853 	    gcc_unreachable ();
8854 	  }
8855 	break;
8856       }
8857     default:
8858       break;
8859     }
8860 
8861   return NULL_TREE;
8862 }
8863 
8864 /* Return OP, stripped of any conversions to wider types as much as is safe.
8865    Converting the value back to OP's type makes a value equivalent to OP.
8866 
8867    If FOR_TYPE is nonzero, we return a value which, if converted to
8868    type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8869 
8870    OP must have integer, real or enumeral type.  Pointers are not allowed!
8871 
8872    There are some cases where the obvious value we could return
8873    would regenerate to OP if converted to OP's type,
8874    but would not extend like OP to wider types.
8875    If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8876    For example, if OP is (unsigned short)(signed char)-1,
8877    we avoid returning (signed char)-1 if FOR_TYPE is int,
8878    even though extending that to an unsigned short would regenerate OP,
8879    since the result of extending (signed char)-1 to (int)
8880    is different from (int) OP.  */
8881 
8882 tree
get_unwidened(tree op,tree for_type)8883 get_unwidened (tree op, tree for_type)
8884 {
8885   /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension.  */
8886   tree type = TREE_TYPE (op);
8887   unsigned final_prec
8888     = TYPE_PRECISION (for_type != 0 ? for_type : type);
8889   int uns
8890     = (for_type != 0 && for_type != type
8891        && final_prec > TYPE_PRECISION (type)
8892        && TYPE_UNSIGNED (type));
8893   tree win = op;
8894 
8895   while (CONVERT_EXPR_P (op))
8896     {
8897       int bitschange;
8898 
8899       /* TYPE_PRECISION on vector types has different meaning
8900 	 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8901 	 so avoid them here.  */
8902       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8903 	break;
8904 
8905       bitschange = TYPE_PRECISION (TREE_TYPE (op))
8906 		   - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8907 
8908       /* Truncations are many-one so cannot be removed.
8909 	 Unless we are later going to truncate down even farther.  */
8910       if (bitschange < 0
8911 	  && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8912 	break;
8913 
8914       /* See what's inside this conversion.  If we decide to strip it,
8915 	 we will set WIN.  */
8916       op = TREE_OPERAND (op, 0);
8917 
8918       /* If we have not stripped any zero-extensions (uns is 0),
8919 	 we can strip any kind of extension.
8920 	 If we have previously stripped a zero-extension,
8921 	 only zero-extensions can safely be stripped.
8922 	 Any extension can be stripped if the bits it would produce
8923 	 are all going to be discarded later by truncating to FOR_TYPE.  */
8924 
8925       if (bitschange > 0)
8926 	{
8927 	  if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8928 	    win = op;
8929 	  /* TYPE_UNSIGNED says whether this is a zero-extension.
8930 	     Let's avoid computing it if it does not affect WIN
8931 	     and if UNS will not be needed again.  */
8932 	  if ((uns
8933 	       || CONVERT_EXPR_P (op))
8934 	      && TYPE_UNSIGNED (TREE_TYPE (op)))
8935 	    {
8936 	      uns = 1;
8937 	      win = op;
8938 	    }
8939 	}
8940     }
8941 
8942   /* If we finally reach a constant see if it fits in sth smaller and
8943      in that case convert it.  */
8944   if (TREE_CODE (win) == INTEGER_CST)
8945     {
8946       tree wtype = TREE_TYPE (win);
8947       unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8948       if (for_type)
8949 	prec = MAX (prec, final_prec);
8950       if (prec < TYPE_PRECISION (wtype))
8951 	{
8952 	  tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8953 	  if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8954 	    win = fold_convert (t, win);
8955 	}
8956     }
8957 
8958   return win;
8959 }
8960 
8961 /* Return OP or a simpler expression for a narrower value
8962    which can be sign-extended or zero-extended to give back OP.
8963    Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8964    or 0 if the value should be sign-extended.  */
8965 
8966 tree
get_narrower(tree op,int * unsignedp_ptr)8967 get_narrower (tree op, int *unsignedp_ptr)
8968 {
8969   int uns = 0;
8970   int first = 1;
8971   tree win = op;
8972   bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8973 
8974   if (TREE_CODE (op) == COMPOUND_EXPR)
8975     {
8976       do
8977 	op = TREE_OPERAND (op, 1);
8978       while (TREE_CODE (op) == COMPOUND_EXPR);
8979       tree ret = get_narrower (op, unsignedp_ptr);
8980       if (ret == op)
8981 	return win;
8982       auto_vec <tree, 16> v;
8983       unsigned int i;
8984       for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
8985 	   op = TREE_OPERAND (op, 1))
8986 	v.safe_push (op);
8987       FOR_EACH_VEC_ELT_REVERSE (v, i, op)
8988 	ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
8989 			  TREE_TYPE (ret), TREE_OPERAND (op, 0),
8990 			  ret);
8991       return ret;
8992     }
8993   while (TREE_CODE (op) == NOP_EXPR)
8994     {
8995       int bitschange
8996 	= (TYPE_PRECISION (TREE_TYPE (op))
8997 	   - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8998 
8999       /* Truncations are many-one so cannot be removed.  */
9000       if (bitschange < 0)
9001 	break;
9002 
9003       /* See what's inside this conversion.  If we decide to strip it,
9004 	 we will set WIN.  */
9005 
9006       if (bitschange > 0)
9007 	{
9008 	  op = TREE_OPERAND (op, 0);
9009 	  /* An extension: the outermost one can be stripped,
9010 	     but remember whether it is zero or sign extension.  */
9011 	  if (first)
9012 	    uns = TYPE_UNSIGNED (TREE_TYPE (op));
9013 	  /* Otherwise, if a sign extension has been stripped,
9014 	     only sign extensions can now be stripped;
9015 	     if a zero extension has been stripped, only zero-extensions.  */
9016 	  else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
9017 	    break;
9018 	  first = 0;
9019 	}
9020       else /* bitschange == 0 */
9021 	{
9022 	  /* A change in nominal type can always be stripped, but we must
9023 	     preserve the unsignedness.  */
9024 	  if (first)
9025 	    uns = TYPE_UNSIGNED (TREE_TYPE (op));
9026 	  first = 0;
9027 	  op = TREE_OPERAND (op, 0);
9028 	  /* Keep trying to narrow, but don't assign op to win if it
9029 	     would turn an integral type into something else.  */
9030 	  if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
9031 	    continue;
9032 	}
9033 
9034       win = op;
9035     }
9036 
9037   if (TREE_CODE (op) == COMPONENT_REF
9038       /* Since type_for_size always gives an integer type.  */
9039       && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
9040       && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
9041       /* Ensure field is laid out already.  */
9042       && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
9043       && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
9044     {
9045       unsigned HOST_WIDE_INT innerprec
9046 	= tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
9047       int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
9048 		       || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
9049       tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
9050 
9051       /* We can get this structure field in a narrower type that fits it,
9052 	 but the resulting extension to its nominal type (a fullword type)
9053 	 must satisfy the same conditions as for other extensions.
9054 
9055 	 Do this only for fields that are aligned (not bit-fields),
9056 	 because when bit-field insns will be used there is no
9057 	 advantage in doing this.  */
9058 
9059       if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
9060 	  && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
9061 	  && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
9062 	  && type != 0)
9063 	{
9064 	  if (first)
9065 	    uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
9066 	  win = fold_convert (type, op);
9067 	}
9068     }
9069 
9070   *unsignedp_ptr = uns;
9071   return win;
9072 }
9073 
9074 /* Return true if integer constant C has a value that is permissible
9075    for TYPE, an integral type.  */
9076 
9077 bool
int_fits_type_p(const_tree c,const_tree type)9078 int_fits_type_p (const_tree c, const_tree type)
9079 {
9080   tree type_low_bound, type_high_bound;
9081   bool ok_for_low_bound, ok_for_high_bound;
9082   signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
9083 
9084   /* Non-standard boolean types can have arbitrary precision but various
9085      transformations assume that they can only take values 0 and +/-1.  */
9086   if (TREE_CODE (type) == BOOLEAN_TYPE)
9087     return wi::fits_to_boolean_p (wi::to_wide (c), type);
9088 
9089 retry:
9090   type_low_bound = TYPE_MIN_VALUE (type);
9091   type_high_bound = TYPE_MAX_VALUE (type);
9092 
9093   /* If at least one bound of the type is a constant integer, we can check
9094      ourselves and maybe make a decision. If no such decision is possible, but
9095      this type is a subtype, try checking against that.  Otherwise, use
9096      fits_to_tree_p, which checks against the precision.
9097 
9098      Compute the status for each possibly constant bound, and return if we see
9099      one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
9100      for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
9101      for "constant known to fit".  */
9102 
9103   /* Check if c >= type_low_bound.  */
9104   if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
9105     {
9106       if (tree_int_cst_lt (c, type_low_bound))
9107 	return false;
9108       ok_for_low_bound = true;
9109     }
9110   else
9111     ok_for_low_bound = false;
9112 
9113   /* Check if c <= type_high_bound.  */
9114   if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
9115     {
9116       if (tree_int_cst_lt (type_high_bound, c))
9117 	return false;
9118       ok_for_high_bound = true;
9119     }
9120   else
9121     ok_for_high_bound = false;
9122 
9123   /* If the constant fits both bounds, the result is known.  */
9124   if (ok_for_low_bound && ok_for_high_bound)
9125     return true;
9126 
9127   /* Perform some generic filtering which may allow making a decision
9128      even if the bounds are not constant.  First, negative integers
9129      never fit in unsigned types, */
9130   if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
9131     return false;
9132 
9133   /* Second, narrower types always fit in wider ones.  */
9134   if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
9135     return true;
9136 
9137   /* Third, unsigned integers with top bit set never fit signed types.  */
9138   if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9139     {
9140       int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
9141       if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9142 	{
9143 	  /* When a tree_cst is converted to a wide-int, the precision
9144 	     is taken from the type.  However, if the precision of the
9145 	     mode underneath the type is smaller than that, it is
9146 	     possible that the value will not fit.  The test below
9147 	     fails if any bit is set between the sign bit of the
9148 	     underlying mode and the top bit of the type.  */
9149 	  if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
9150 	    return false;
9151 	}
9152       else if (wi::neg_p (wi::to_wide (c)))
9153 	return false;
9154     }
9155 
9156   /* If we haven't been able to decide at this point, there nothing more we
9157      can check ourselves here.  Look at the base type if we have one and it
9158      has the same precision.  */
9159   if (TREE_CODE (type) == INTEGER_TYPE
9160       && TREE_TYPE (type) != 0
9161       && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9162     {
9163       type = TREE_TYPE (type);
9164       goto retry;
9165     }
9166 
9167   /* Or to fits_to_tree_p, if nothing else.  */
9168   return wi::fits_to_tree_p (wi::to_wide (c), type);
9169 }
9170 
9171 /* Stores bounds of an integer TYPE in MIN and MAX.  If TYPE has non-constant
9172    bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9173    represented (assuming two's-complement arithmetic) within the bit
9174    precision of the type are returned instead.  */
9175 
9176 void
get_type_static_bounds(const_tree type,mpz_t min,mpz_t max)9177 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9178 {
9179   if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9180       && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9181     wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
9182   else
9183     {
9184       if (TYPE_UNSIGNED (type))
9185 	mpz_set_ui (min, 0);
9186       else
9187 	{
9188 	  wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9189 	  wi::to_mpz (mn, min, SIGNED);
9190 	}
9191     }
9192 
9193   if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9194       && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9195     wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
9196   else
9197     {
9198       wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9199       wi::to_mpz (mn, max, TYPE_SIGN (type));
9200     }
9201 }
9202 
9203 /* Return true if VAR is an automatic variable.  */
9204 
9205 bool
auto_var_p(const_tree var)9206 auto_var_p (const_tree var)
9207 {
9208   return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
9209 	    || TREE_CODE (var) == PARM_DECL)
9210 	   && ! TREE_STATIC (var))
9211 	  || TREE_CODE (var) == RESULT_DECL);
9212 }
9213 
9214 /* Return true if VAR is an automatic variable defined in function FN.  */
9215 
9216 bool
auto_var_in_fn_p(const_tree var,const_tree fn)9217 auto_var_in_fn_p (const_tree var, const_tree fn)
9218 {
9219   return (DECL_P (var) && DECL_CONTEXT (var) == fn
9220 	  && (auto_var_p (var)
9221 	      || TREE_CODE (var) == LABEL_DECL));
9222 }
9223 
9224 /* Subprogram of following function.  Called by walk_tree.
9225 
9226    Return *TP if it is an automatic variable or parameter of the
9227    function passed in as DATA.  */
9228 
9229 static tree
find_var_from_fn(tree * tp,int * walk_subtrees,void * data)9230 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9231 {
9232   tree fn = (tree) data;
9233 
9234   if (TYPE_P (*tp))
9235     *walk_subtrees = 0;
9236 
9237   else if (DECL_P (*tp)
9238 	   && auto_var_in_fn_p (*tp, fn))
9239     return *tp;
9240 
9241   return NULL_TREE;
9242 }
9243 
9244 /* Returns true if T is, contains, or refers to a type with variable
9245    size.  For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9246    arguments, but not the return type.  If FN is nonzero, only return
9247    true if a modifier of the type or position of FN is a variable or
9248    parameter inside FN.
9249 
9250    This concept is more general than that of C99 'variably modified types':
9251    in C99, a struct type is never variably modified because a VLA may not
9252    appear as a structure member.  However, in GNU C code like:
9253 
9254      struct S { int i[f()]; };
9255 
9256    is valid, and other languages may define similar constructs.  */
9257 
9258 bool
variably_modified_type_p(tree type,tree fn)9259 variably_modified_type_p (tree type, tree fn)
9260 {
9261   tree t;
9262 
9263 /* Test if T is either variable (if FN is zero) or an expression containing
9264    a variable in FN.  If TYPE isn't gimplified, return true also if
9265    gimplify_one_sizepos would gimplify the expression into a local
9266    variable.  */
9267 #define RETURN_TRUE_IF_VAR(T)						\
9268   do { tree _t = (T);							\
9269     if (_t != NULL_TREE							\
9270 	&& _t != error_mark_node					\
9271 	&& !CONSTANT_CLASS_P (_t)					\
9272 	&& TREE_CODE (_t) != PLACEHOLDER_EXPR				\
9273 	&& (!fn								\
9274 	    || (!TYPE_SIZES_GIMPLIFIED (type)				\
9275 		&& (TREE_CODE (_t) != VAR_DECL				\
9276 		    && !CONTAINS_PLACEHOLDER_P (_t)))			\
9277 	    || walk_tree (&_t, find_var_from_fn, fn, NULL)))		\
9278       return true;  } while (0)
9279 
9280   if (type == error_mark_node)
9281     return false;
9282 
9283   /* If TYPE itself has variable size, it is variably modified.  */
9284   RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9285   RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9286 
9287   switch (TREE_CODE (type))
9288     {
9289     case POINTER_TYPE:
9290     case REFERENCE_TYPE:
9291     case VECTOR_TYPE:
9292       /* Ada can have pointer types refering to themselves indirectly.  */
9293       if (TREE_VISITED (type))
9294 	return false;
9295       TREE_VISITED (type) = true;
9296       if (variably_modified_type_p (TREE_TYPE (type), fn))
9297 	{
9298 	  TREE_VISITED (type) = false;
9299 	  return true;
9300 	}
9301       TREE_VISITED (type) = false;
9302       break;
9303 
9304     case FUNCTION_TYPE:
9305     case METHOD_TYPE:
9306       /* If TYPE is a function type, it is variably modified if the
9307 	 return type is variably modified.  */
9308       if (variably_modified_type_p (TREE_TYPE (type), fn))
9309 	  return true;
9310       break;
9311 
9312     case INTEGER_TYPE:
9313     case REAL_TYPE:
9314     case FIXED_POINT_TYPE:
9315     case ENUMERAL_TYPE:
9316     case BOOLEAN_TYPE:
9317       /* Scalar types are variably modified if their end points
9318 	 aren't constant.  */
9319       RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9320       RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9321       break;
9322 
9323     case RECORD_TYPE:
9324     case UNION_TYPE:
9325     case QUAL_UNION_TYPE:
9326       /* We can't see if any of the fields are variably-modified by the
9327 	 definition we normally use, since that would produce infinite
9328 	 recursion via pointers.  */
9329       /* This is variably modified if some field's type is.  */
9330       for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9331 	if (TREE_CODE (t) == FIELD_DECL)
9332 	  {
9333 	    RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9334 	    RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9335 	    RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9336 
9337 	    /* If the type is a qualified union, then the DECL_QUALIFIER
9338 	       of fields can also be an expression containing a variable.  */
9339 	    if (TREE_CODE (type) == QUAL_UNION_TYPE)
9340 	      RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9341 
9342 	    /* If the field is a qualified union, then it's only a container
9343 	       for what's inside so we look into it.  That's necessary in LTO
9344 	       mode because the sizes of the field tested above have been set
9345 	       to PLACEHOLDER_EXPRs by free_lang_data.  */
9346 	    if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
9347 		&& variably_modified_type_p (TREE_TYPE (t), fn))
9348 	      return true;
9349 	  }
9350       break;
9351 
9352     case ARRAY_TYPE:
9353       /* Do not call ourselves to avoid infinite recursion.  This is
9354 	 variably modified if the element type is.  */
9355       RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9356       RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9357       break;
9358 
9359     default:
9360       break;
9361     }
9362 
9363   /* The current language may have other cases to check, but in general,
9364      all other types are not variably modified.  */
9365   return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9366 
9367 #undef RETURN_TRUE_IF_VAR
9368 }
9369 
9370 /* Given a DECL or TYPE, return the scope in which it was declared, or
9371    NULL_TREE if there is no containing scope.  */
9372 
9373 tree
get_containing_scope(const_tree t)9374 get_containing_scope (const_tree t)
9375 {
9376   return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9377 }
9378 
9379 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL.  */
9380 
9381 const_tree
get_ultimate_context(const_tree decl)9382 get_ultimate_context (const_tree decl)
9383 {
9384   while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
9385     {
9386       if (TREE_CODE (decl) == BLOCK)
9387 	decl = BLOCK_SUPERCONTEXT (decl);
9388       else
9389 	decl = get_containing_scope (decl);
9390     }
9391   return decl;
9392 }
9393 
9394 /* Return the innermost context enclosing DECL that is
9395    a FUNCTION_DECL, or zero if none.  */
9396 
9397 tree
decl_function_context(const_tree decl)9398 decl_function_context (const_tree decl)
9399 {
9400   tree context;
9401 
9402   if (TREE_CODE (decl) == ERROR_MARK)
9403     return 0;
9404 
9405   /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9406      where we look up the function at runtime.  Such functions always take
9407      a first argument of type 'pointer to real context'.
9408 
9409      C++ should really be fixed to use DECL_CONTEXT for the real context,
9410      and use something else for the "virtual context".  */
9411   else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
9412     context
9413       = TYPE_MAIN_VARIANT
9414 	(TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9415   else
9416     context = DECL_CONTEXT (decl);
9417 
9418   while (context && TREE_CODE (context) != FUNCTION_DECL)
9419     {
9420       if (TREE_CODE (context) == BLOCK)
9421 	context = BLOCK_SUPERCONTEXT (context);
9422       else
9423 	context = get_containing_scope (context);
9424     }
9425 
9426   return context;
9427 }
9428 
9429 /* Return the innermost context enclosing DECL that is
9430    a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9431    TYPE_DECLs and FUNCTION_DECLs are transparent to this function.  */
9432 
9433 tree
decl_type_context(const_tree decl)9434 decl_type_context (const_tree decl)
9435 {
9436   tree context = DECL_CONTEXT (decl);
9437 
9438   while (context)
9439     switch (TREE_CODE (context))
9440       {
9441       case NAMESPACE_DECL:
9442       case TRANSLATION_UNIT_DECL:
9443 	return NULL_TREE;
9444 
9445       case RECORD_TYPE:
9446       case UNION_TYPE:
9447       case QUAL_UNION_TYPE:
9448 	return context;
9449 
9450       case TYPE_DECL:
9451       case FUNCTION_DECL:
9452 	context = DECL_CONTEXT (context);
9453 	break;
9454 
9455       case BLOCK:
9456 	context = BLOCK_SUPERCONTEXT (context);
9457 	break;
9458 
9459       default:
9460 	gcc_unreachable ();
9461       }
9462 
9463   return NULL_TREE;
9464 }
9465 
9466 /* CALL is a CALL_EXPR.  Return the declaration for the function
9467    called, or NULL_TREE if the called function cannot be
9468    determined.  */
9469 
9470 tree
get_callee_fndecl(const_tree call)9471 get_callee_fndecl (const_tree call)
9472 {
9473   tree addr;
9474 
9475   if (call == error_mark_node)
9476     return error_mark_node;
9477 
9478   /* It's invalid to call this function with anything but a
9479      CALL_EXPR.  */
9480   gcc_assert (TREE_CODE (call) == CALL_EXPR);
9481 
9482   /* The first operand to the CALL is the address of the function
9483      called.  */
9484   addr = CALL_EXPR_FN (call);
9485 
9486   /* If there is no function, return early.  */
9487   if (addr == NULL_TREE)
9488     return NULL_TREE;
9489 
9490   STRIP_NOPS (addr);
9491 
9492   /* If this is a readonly function pointer, extract its initial value.  */
9493   if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9494       && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9495       && DECL_INITIAL (addr))
9496     addr = DECL_INITIAL (addr);
9497 
9498   /* If the address is just `&f' for some function `f', then we know
9499      that `f' is being called.  */
9500   if (TREE_CODE (addr) == ADDR_EXPR
9501       && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9502     return TREE_OPERAND (addr, 0);
9503 
9504   /* We couldn't figure out what was being called.  */
9505   return NULL_TREE;
9506 }
9507 
9508 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9509    return the associated function code, otherwise return CFN_LAST.  */
9510 
9511 combined_fn
get_call_combined_fn(const_tree call)9512 get_call_combined_fn (const_tree call)
9513 {
9514   /* It's invalid to call this function with anything but a CALL_EXPR.  */
9515   gcc_assert (TREE_CODE (call) == CALL_EXPR);
9516 
9517   if (!CALL_EXPR_FN (call))
9518     return as_combined_fn (CALL_EXPR_IFN (call));
9519 
9520   tree fndecl = get_callee_fndecl (call);
9521   if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9522     return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9523 
9524   return CFN_LAST;
9525 }
9526 
9527 /* Comparator of indices based on tree_node_counts.  */
9528 
9529 static int
tree_nodes_cmp(const void * p1,const void * p2)9530 tree_nodes_cmp (const void *p1, const void *p2)
9531 {
9532   const unsigned *n1 = (const unsigned *)p1;
9533   const unsigned *n2 = (const unsigned *)p2;
9534 
9535   return tree_node_counts[*n1] - tree_node_counts[*n2];
9536 }
9537 
9538 /* Comparator of indices based on tree_code_counts.  */
9539 
9540 static int
tree_codes_cmp(const void * p1,const void * p2)9541 tree_codes_cmp (const void *p1, const void *p2)
9542 {
9543   const unsigned *n1 = (const unsigned *)p1;
9544   const unsigned *n2 = (const unsigned *)p2;
9545 
9546   return tree_code_counts[*n1] - tree_code_counts[*n2];
9547 }
9548 
9549 #define TREE_MEM_USAGE_SPACES 40
9550 
9551 /* Print debugging information about tree nodes generated during the compile,
9552    and any language-specific information.  */
9553 
9554 void
dump_tree_statistics(void)9555 dump_tree_statistics (void)
9556 {
9557   if (GATHER_STATISTICS)
9558     {
9559       uint64_t total_nodes, total_bytes;
9560       fprintf (stderr, "\nKind                   Nodes      Bytes\n");
9561       mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9562       total_nodes = total_bytes = 0;
9563 
9564       {
9565 	auto_vec<unsigned> indices (all_kinds);
9566 	for (unsigned i = 0; i < all_kinds; i++)
9567 	  indices.quick_push (i);
9568 	indices.qsort (tree_nodes_cmp);
9569 
9570 	for (unsigned i = 0; i < (int) all_kinds; i++)
9571 	  {
9572 	    unsigned j = indices[i];
9573 	    fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
9574 		     tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
9575 		     SIZE_AMOUNT (tree_node_sizes[j]));
9576 	    total_nodes += tree_node_counts[j];
9577 	    total_bytes += tree_node_sizes[j];
9578 	  }
9579 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9580 	fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
9581 		 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
9582 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9583       }
9584 
9585       {
9586 	fprintf (stderr, "Code                              Nodes\n");
9587 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9588 
9589 	auto_vec<unsigned> indices (MAX_TREE_CODES);
9590 	for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9591 	  indices.quick_push (i);
9592 	indices.qsort (tree_codes_cmp);
9593 
9594 	for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9595 	  {
9596 	    unsigned j = indices[i];
9597 	    fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
9598 		     get_tree_code_name ((enum tree_code) j),
9599 		     SIZE_AMOUNT (tree_code_counts[j]));
9600 	  }
9601 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9602 	fprintf (stderr, "\n");
9603 	ssanames_print_statistics ();
9604 	fprintf (stderr, "\n");
9605 	phinodes_print_statistics ();
9606 	fprintf (stderr, "\n");
9607       }
9608     }
9609   else
9610     fprintf (stderr, "(No per-node statistics)\n");
9611 
9612   print_type_hash_statistics ();
9613   print_debug_expr_statistics ();
9614   print_value_expr_statistics ();
9615   lang_hooks.print_statistics ();
9616 }
9617 
9618 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9619 
9620 /* Generate a crc32 of the low BYTES bytes of VALUE.  */
9621 
9622 unsigned
crc32_unsigned_n(unsigned chksum,unsigned value,unsigned bytes)9623 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
9624 {
9625   /* This relies on the raw feedback's top 4 bits being zero.  */
9626 #define FEEDBACK(X) ((X) * 0x04c11db7)
9627 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
9628 		     ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
9629   static const unsigned syndromes[16] =
9630     {
9631       SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
9632       SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
9633       SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
9634       SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
9635     };
9636 #undef FEEDBACK
9637 #undef SYNDROME
9638 
9639   value <<= (32 - bytes * 8);
9640   for (unsigned ix = bytes * 2; ix--; value <<= 4)
9641     {
9642       unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
9643 
9644       chksum = (chksum << 4) ^ feedback;
9645     }
9646 
9647   return chksum;
9648 }
9649 
9650 /* Generate a crc32 of a string.  */
9651 
9652 unsigned
crc32_string(unsigned chksum,const char * string)9653 crc32_string (unsigned chksum, const char *string)
9654 {
9655   do
9656     chksum = crc32_byte (chksum, *string);
9657   while (*string++);
9658   return chksum;
9659 }
9660 
9661 /* P is a string that will be used in a symbol.  Mask out any characters
9662    that are not valid in that context.  */
9663 
9664 void
clean_symbol_name(char * p)9665 clean_symbol_name (char *p)
9666 {
9667   for (; *p; p++)
9668     if (! (ISALNUM (*p)
9669 #ifndef NO_DOLLAR_IN_LABEL	/* this for `$'; unlikely, but... -- kr */
9670 	    || *p == '$'
9671 #endif
9672 #ifndef NO_DOT_IN_LABEL		/* this for `.'; unlikely, but...  */
9673 	    || *p == '.'
9674 #endif
9675 	   ))
9676       *p = '_';
9677 }
9678 
9679 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH.  */
9680 
9681 /* Create a unique anonymous identifier.  The identifier is still a
9682    valid assembly label.  */
9683 
9684 tree
make_anon_name()9685 make_anon_name ()
9686 {
9687   const char *fmt =
9688 #if !defined (NO_DOT_IN_LABEL)
9689     "."
9690 #elif !defined (NO_DOLLAR_IN_LABEL)
9691     "$"
9692 #else
9693     "_"
9694 #endif
9695     "_anon_%d";
9696 
9697   char buf[24];
9698   int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
9699   gcc_checking_assert (len < int (sizeof (buf)));
9700 
9701   tree id = get_identifier_with_length (buf, len);
9702   IDENTIFIER_ANON_P (id) = true;
9703 
9704   return id;
9705 }
9706 
9707 /* Generate a name for a special-purpose function.
9708    The generated name may need to be unique across the whole link.
9709    Changes to this function may also require corresponding changes to
9710    xstrdup_mask_random.
9711    TYPE is some string to identify the purpose of this function to the
9712    linker or collect2; it must start with an uppercase letter,
9713    one of:
9714    I - for constructors
9715    D - for destructors
9716    N - for C++ anonymous namespaces
9717    F - for DWARF unwind frame information.  */
9718 
9719 tree
get_file_function_name(const char * type)9720 get_file_function_name (const char *type)
9721 {
9722   char *buf;
9723   const char *p;
9724   char *q;
9725 
9726   /* If we already have a name we know to be unique, just use that.  */
9727   if (first_global_object_name)
9728     p = q = ASTRDUP (first_global_object_name);
9729   /* If the target is handling the constructors/destructors, they
9730      will be local to this file and the name is only necessary for
9731      debugging purposes.
9732      We also assign sub_I and sub_D sufixes to constructors called from
9733      the global static constructors.  These are always local.  */
9734   else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9735 	   || (strncmp (type, "sub_", 4) == 0
9736 	       && (type[4] == 'I' || type[4] == 'D')))
9737     {
9738       const char *file = main_input_filename;
9739       if (! file)
9740 	file = LOCATION_FILE (input_location);
9741       /* Just use the file's basename, because the full pathname
9742 	 might be quite long.  */
9743       p = q = ASTRDUP (lbasename (file));
9744     }
9745   else
9746     {
9747       /* Otherwise, the name must be unique across the entire link.
9748 	 We don't have anything that we know to be unique to this translation
9749 	 unit, so use what we do have and throw in some randomness.  */
9750       unsigned len;
9751       const char *name = weak_global_object_name;
9752       const char *file = main_input_filename;
9753 
9754       if (! name)
9755 	name = "";
9756       if (! file)
9757 	file = LOCATION_FILE (input_location);
9758 
9759       len = strlen (file);
9760       q = (char *) alloca (9 + 19 + len + 1);
9761       memcpy (q, file, len + 1);
9762 
9763       snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9764 		crc32_string (0, name), get_random_seed (false));
9765 
9766       p = q;
9767     }
9768 
9769   clean_symbol_name (q);
9770   buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9771 			 + strlen (type));
9772 
9773   /* Set up the name of the file-level functions we may need.
9774      Use a global object (which is already required to be unique over
9775      the program) rather than the file name (which imposes extra
9776      constraints).  */
9777   sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9778 
9779   return get_identifier (buf);
9780 }
9781 
9782 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9783 
9784 /* Complain that the tree code of NODE does not match the expected 0
9785    terminated list of trailing codes. The trailing code list can be
9786    empty, for a more vague error message.  FILE, LINE, and FUNCTION
9787    are of the caller.  */
9788 
9789 void
tree_check_failed(const_tree node,const char * file,int line,const char * function,...)9790 tree_check_failed (const_tree node, const char *file,
9791 		   int line, const char *function, ...)
9792 {
9793   va_list args;
9794   const char *buffer;
9795   unsigned length = 0;
9796   enum tree_code code;
9797 
9798   va_start (args, function);
9799   while ((code = (enum tree_code) va_arg (args, int)))
9800     length += 4 + strlen (get_tree_code_name (code));
9801   va_end (args);
9802   if (length)
9803     {
9804       char *tmp;
9805       va_start (args, function);
9806       length += strlen ("expected ");
9807       buffer = tmp = (char *) alloca (length);
9808       length = 0;
9809       while ((code = (enum tree_code) va_arg (args, int)))
9810 	{
9811 	  const char *prefix = length ? " or " : "expected ";
9812 
9813 	  strcpy (tmp + length, prefix);
9814 	  length += strlen (prefix);
9815 	  strcpy (tmp + length, get_tree_code_name (code));
9816 	  length += strlen (get_tree_code_name (code));
9817 	}
9818       va_end (args);
9819     }
9820   else
9821     buffer = "unexpected node";
9822 
9823   internal_error ("tree check: %s, have %s in %s, at %s:%d",
9824 		  buffer, get_tree_code_name (TREE_CODE (node)),
9825 		  function, trim_filename (file), line);
9826 }
9827 
9828 /* Complain that the tree code of NODE does match the expected 0
9829    terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9830    the caller.  */
9831 
9832 void
tree_not_check_failed(const_tree node,const char * file,int line,const char * function,...)9833 tree_not_check_failed (const_tree node, const char *file,
9834 		       int line, const char *function, ...)
9835 {
9836   va_list args;
9837   char *buffer;
9838   unsigned length = 0;
9839   enum tree_code code;
9840 
9841   va_start (args, function);
9842   while ((code = (enum tree_code) va_arg (args, int)))
9843     length += 4 + strlen (get_tree_code_name (code));
9844   va_end (args);
9845   va_start (args, function);
9846   buffer = (char *) alloca (length);
9847   length = 0;
9848   while ((code = (enum tree_code) va_arg (args, int)))
9849     {
9850       if (length)
9851 	{
9852 	  strcpy (buffer + length, " or ");
9853 	  length += 4;
9854 	}
9855       strcpy (buffer + length, get_tree_code_name (code));
9856       length += strlen (get_tree_code_name (code));
9857     }
9858   va_end (args);
9859 
9860   internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9861 		  buffer, get_tree_code_name (TREE_CODE (node)),
9862 		  function, trim_filename (file), line);
9863 }
9864 
9865 /* Similar to tree_check_failed, except that we check for a class of tree
9866    code, given in CL.  */
9867 
9868 void
tree_class_check_failed(const_tree node,const enum tree_code_class cl,const char * file,int line,const char * function)9869 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9870 			 const char *file, int line, const char *function)
9871 {
9872   internal_error
9873     ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9874      TREE_CODE_CLASS_STRING (cl),
9875      TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9876      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9877 }
9878 
9879 /* Similar to tree_check_failed, except that instead of specifying a
9880    dozen codes, use the knowledge that they're all sequential.  */
9881 
9882 void
tree_range_check_failed(const_tree node,const char * file,int line,const char * function,enum tree_code c1,enum tree_code c2)9883 tree_range_check_failed (const_tree node, const char *file, int line,
9884 			 const char *function, enum tree_code c1,
9885 			 enum tree_code c2)
9886 {
9887   char *buffer;
9888   unsigned length = 0;
9889   unsigned int c;
9890 
9891   for (c = c1; c <= c2; ++c)
9892     length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9893 
9894   length += strlen ("expected ");
9895   buffer = (char *) alloca (length);
9896   length = 0;
9897 
9898   for (c = c1; c <= c2; ++c)
9899     {
9900       const char *prefix = length ? " or " : "expected ";
9901 
9902       strcpy (buffer + length, prefix);
9903       length += strlen (prefix);
9904       strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9905       length += strlen (get_tree_code_name ((enum tree_code) c));
9906     }
9907 
9908   internal_error ("tree check: %s, have %s in %s, at %s:%d",
9909 		  buffer, get_tree_code_name (TREE_CODE (node)),
9910 		  function, trim_filename (file), line);
9911 }
9912 
9913 
9914 /* Similar to tree_check_failed, except that we check that a tree does
9915    not have the specified code, given in CL.  */
9916 
9917 void
tree_not_class_check_failed(const_tree node,const enum tree_code_class cl,const char * file,int line,const char * function)9918 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9919 			     const char *file, int line, const char *function)
9920 {
9921   internal_error
9922     ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9923      TREE_CODE_CLASS_STRING (cl),
9924      TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9925      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9926 }
9927 
9928 
9929 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes.  */
9930 
9931 void
omp_clause_check_failed(const_tree node,const char * file,int line,const char * function,enum omp_clause_code code)9932 omp_clause_check_failed (const_tree node, const char *file, int line,
9933                          const char *function, enum omp_clause_code code)
9934 {
9935   internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
9936 		  "in %s, at %s:%d",
9937 		  omp_clause_code_name[code],
9938 		  get_tree_code_name (TREE_CODE (node)),
9939 		  function, trim_filename (file), line);
9940 }
9941 
9942 
9943 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes.  */
9944 
9945 void
omp_clause_range_check_failed(const_tree node,const char * file,int line,const char * function,enum omp_clause_code c1,enum omp_clause_code c2)9946 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9947 			       const char *function, enum omp_clause_code c1,
9948 			       enum omp_clause_code c2)
9949 {
9950   char *buffer;
9951   unsigned length = 0;
9952   unsigned int c;
9953 
9954   for (c = c1; c <= c2; ++c)
9955     length += 4 + strlen (omp_clause_code_name[c]);
9956 
9957   length += strlen ("expected ");
9958   buffer = (char *) alloca (length);
9959   length = 0;
9960 
9961   for (c = c1; c <= c2; ++c)
9962     {
9963       const char *prefix = length ? " or " : "expected ";
9964 
9965       strcpy (buffer + length, prefix);
9966       length += strlen (prefix);
9967       strcpy (buffer + length, omp_clause_code_name[c]);
9968       length += strlen (omp_clause_code_name[c]);
9969     }
9970 
9971   internal_error ("tree check: %s, have %s in %s, at %s:%d",
9972 		  buffer, omp_clause_code_name[TREE_CODE (node)],
9973 		  function, trim_filename (file), line);
9974 }
9975 
9976 
9977 #undef DEFTREESTRUCT
9978 #define DEFTREESTRUCT(VAL, NAME) NAME,
9979 
9980 static const char *ts_enum_names[] = {
9981 #include "treestruct.def"
9982 };
9983 #undef DEFTREESTRUCT
9984 
9985 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9986 
9987 /* Similar to tree_class_check_failed, except that we check for
9988    whether CODE contains the tree structure identified by EN.  */
9989 
9990 void
tree_contains_struct_check_failed(const_tree node,const enum tree_node_structure_enum en,const char * file,int line,const char * function)9991 tree_contains_struct_check_failed (const_tree node,
9992 				   const enum tree_node_structure_enum en,
9993 				   const char *file, int line,
9994 				   const char *function)
9995 {
9996   internal_error
9997     ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9998      TS_ENUM_NAME (en),
9999      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
10000 }
10001 
10002 
10003 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
10004    (dynamically sized) vector.  */
10005 
10006 void
tree_int_cst_elt_check_failed(int idx,int len,const char * file,int line,const char * function)10007 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
10008 			       const char *function)
10009 {
10010   internal_error
10011     ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
10012      "at %s:%d",
10013      idx + 1, len, function, trim_filename (file), line);
10014 }
10015 
10016 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
10017    (dynamically sized) vector.  */
10018 
10019 void
tree_vec_elt_check_failed(int idx,int len,const char * file,int line,const char * function)10020 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
10021 			   const char *function)
10022 {
10023   internal_error
10024     ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
10025      idx + 1, len, function, trim_filename (file), line);
10026 }
10027 
10028 /* Similar to above, except that the check is for the bounds of the operand
10029    vector of an expression node EXP.  */
10030 
10031 void
tree_operand_check_failed(int idx,const_tree exp,const char * file,int line,const char * function)10032 tree_operand_check_failed (int idx, const_tree exp, const char *file,
10033 			   int line, const char *function)
10034 {
10035   enum tree_code code = TREE_CODE (exp);
10036   internal_error
10037     ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
10038      idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
10039      function, trim_filename (file), line);
10040 }
10041 
10042 /* Similar to above, except that the check is for the number of
10043    operands of an OMP_CLAUSE node.  */
10044 
10045 void
omp_clause_operand_check_failed(int idx,const_tree t,const char * file,int line,const char * function)10046 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
10047 			         int line, const char *function)
10048 {
10049   internal_error
10050     ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
10051      "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
10052      omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
10053      trim_filename (file), line);
10054 }
10055 #endif /* ENABLE_TREE_CHECKING */
10056 
10057 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
10058    and mapped to the machine mode MODE.  Initialize its fields and build
10059    the information necessary for debugging output.  */
10060 
10061 static tree
make_vector_type(tree innertype,poly_int64 nunits,machine_mode mode)10062 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
10063 {
10064   tree t;
10065   tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
10066 
10067   t = make_node (VECTOR_TYPE);
10068   TREE_TYPE (t) = mv_innertype;
10069   SET_TYPE_VECTOR_SUBPARTS (t, nunits);
10070   SET_TYPE_MODE (t, mode);
10071 
10072   if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
10073     SET_TYPE_STRUCTURAL_EQUALITY (t);
10074   else if ((TYPE_CANONICAL (mv_innertype) != innertype
10075 	    || mode != VOIDmode)
10076 	   && !VECTOR_BOOLEAN_TYPE_P (t))
10077     TYPE_CANONICAL (t)
10078       = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
10079 
10080   layout_type (t);
10081 
10082   hashval_t hash = type_hash_canon_hash (t);
10083   t = type_hash_canon (hash, t);
10084 
10085   /* We have built a main variant, based on the main variant of the
10086      inner type. Use it to build the variant we return.  */
10087   if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
10088       && TREE_TYPE (t) != innertype)
10089     return build_type_attribute_qual_variant (t,
10090 					      TYPE_ATTRIBUTES (innertype),
10091 					      TYPE_QUALS (innertype));
10092 
10093   return t;
10094 }
10095 
10096 static tree
make_or_reuse_type(unsigned size,int unsignedp)10097 make_or_reuse_type (unsigned size, int unsignedp)
10098 {
10099   int i;
10100 
10101   if (size == INT_TYPE_SIZE)
10102     return unsignedp ? unsigned_type_node : integer_type_node;
10103   if (size == CHAR_TYPE_SIZE)
10104     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
10105   if (size == SHORT_TYPE_SIZE)
10106     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
10107   if (size == LONG_TYPE_SIZE)
10108     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
10109   if (size == LONG_LONG_TYPE_SIZE)
10110     return (unsignedp ? long_long_unsigned_type_node
10111             : long_long_integer_type_node);
10112 
10113   for (i = 0; i < NUM_INT_N_ENTS; i ++)
10114     if (size == int_n_data[i].bitsize
10115 	&& int_n_enabled_p[i])
10116       return (unsignedp ? int_n_trees[i].unsigned_type
10117 	      : int_n_trees[i].signed_type);
10118 
10119   if (unsignedp)
10120     return make_unsigned_type (size);
10121   else
10122     return make_signed_type (size);
10123 }
10124 
10125 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP.  */
10126 
10127 static tree
make_or_reuse_fract_type(unsigned size,int unsignedp,int satp)10128 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
10129 {
10130   if (satp)
10131     {
10132       if (size == SHORT_FRACT_TYPE_SIZE)
10133 	return unsignedp ? sat_unsigned_short_fract_type_node
10134 			 : sat_short_fract_type_node;
10135       if (size == FRACT_TYPE_SIZE)
10136 	return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
10137       if (size == LONG_FRACT_TYPE_SIZE)
10138 	return unsignedp ? sat_unsigned_long_fract_type_node
10139 			 : sat_long_fract_type_node;
10140       if (size == LONG_LONG_FRACT_TYPE_SIZE)
10141 	return unsignedp ? sat_unsigned_long_long_fract_type_node
10142 			 : sat_long_long_fract_type_node;
10143     }
10144   else
10145     {
10146       if (size == SHORT_FRACT_TYPE_SIZE)
10147 	return unsignedp ? unsigned_short_fract_type_node
10148 			 : short_fract_type_node;
10149       if (size == FRACT_TYPE_SIZE)
10150 	return unsignedp ? unsigned_fract_type_node : fract_type_node;
10151       if (size == LONG_FRACT_TYPE_SIZE)
10152 	return unsignedp ? unsigned_long_fract_type_node
10153 			 : long_fract_type_node;
10154       if (size == LONG_LONG_FRACT_TYPE_SIZE)
10155 	return unsignedp ? unsigned_long_long_fract_type_node
10156 			 : long_long_fract_type_node;
10157     }
10158 
10159   return make_fract_type (size, unsignedp, satp);
10160 }
10161 
10162 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP.  */
10163 
10164 static tree
make_or_reuse_accum_type(unsigned size,int unsignedp,int satp)10165 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
10166 {
10167   if (satp)
10168     {
10169       if (size == SHORT_ACCUM_TYPE_SIZE)
10170 	return unsignedp ? sat_unsigned_short_accum_type_node
10171 			 : sat_short_accum_type_node;
10172       if (size == ACCUM_TYPE_SIZE)
10173 	return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10174       if (size == LONG_ACCUM_TYPE_SIZE)
10175 	return unsignedp ? sat_unsigned_long_accum_type_node
10176 			 : sat_long_accum_type_node;
10177       if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10178 	return unsignedp ? sat_unsigned_long_long_accum_type_node
10179 			 : sat_long_long_accum_type_node;
10180     }
10181   else
10182     {
10183       if (size == SHORT_ACCUM_TYPE_SIZE)
10184 	return unsignedp ? unsigned_short_accum_type_node
10185 			 : short_accum_type_node;
10186       if (size == ACCUM_TYPE_SIZE)
10187 	return unsignedp ? unsigned_accum_type_node : accum_type_node;
10188       if (size == LONG_ACCUM_TYPE_SIZE)
10189 	return unsignedp ? unsigned_long_accum_type_node
10190 			 : long_accum_type_node;
10191       if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10192 	return unsignedp ? unsigned_long_long_accum_type_node
10193 			 : long_long_accum_type_node;
10194     }
10195 
10196   return make_accum_type (size, unsignedp, satp);
10197 }
10198 
10199 
10200 /* Create an atomic variant node for TYPE.  This routine is called
10201    during initialization of data types to create the 5 basic atomic
10202    types. The generic build_variant_type function requires these to
10203    already be set up in order to function properly, so cannot be
10204    called from there.  If ALIGN is non-zero, then ensure alignment is
10205    overridden to this value.  */
10206 
10207 static tree
build_atomic_base(tree type,unsigned int align)10208 build_atomic_base (tree type, unsigned int align)
10209 {
10210   tree t;
10211 
10212   /* Make sure its not already registered.  */
10213   if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10214     return t;
10215 
10216   t = build_variant_type_copy (type);
10217   set_type_quals (t, TYPE_QUAL_ATOMIC);
10218 
10219   if (align)
10220     SET_TYPE_ALIGN (t, align);
10221 
10222   return t;
10223 }
10224 
10225 /* Information about the _FloatN and _FloatNx types.  This must be in
10226    the same order as the corresponding TI_* enum values.  */
10227 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
10228   {
10229     { 16, false },
10230     { 32, false },
10231     { 64, false },
10232     { 128, false },
10233     { 32, true },
10234     { 64, true },
10235     { 128, true },
10236   };
10237 
10238 
10239 /* Create nodes for all integer types (and error_mark_node) using the sizes
10240    of C datatypes.  SIGNED_CHAR specifies whether char is signed.  */
10241 
10242 void
build_common_tree_nodes(bool signed_char)10243 build_common_tree_nodes (bool signed_char)
10244 {
10245   int i;
10246 
10247   error_mark_node = make_node (ERROR_MARK);
10248   TREE_TYPE (error_mark_node) = error_mark_node;
10249 
10250   initialize_sizetypes ();
10251 
10252   /* Define both `signed char' and `unsigned char'.  */
10253   signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10254   TYPE_STRING_FLAG (signed_char_type_node) = 1;
10255   unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10256   TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10257 
10258   /* Define `char', which is like either `signed char' or `unsigned char'
10259      but not the same as either.  */
10260   char_type_node
10261     = (signed_char
10262        ? make_signed_type (CHAR_TYPE_SIZE)
10263        : make_unsigned_type (CHAR_TYPE_SIZE));
10264   TYPE_STRING_FLAG (char_type_node) = 1;
10265 
10266   short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10267   short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10268   integer_type_node = make_signed_type (INT_TYPE_SIZE);
10269   unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10270   long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10271   long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10272   long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10273   long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10274 
10275   for (i = 0; i < NUM_INT_N_ENTS; i ++)
10276     {
10277       int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10278       int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10279 
10280       if (int_n_enabled_p[i])
10281 	{
10282 	  integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10283 	  integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10284 	}
10285     }
10286 
10287   /* Define a boolean type.  This type only represents boolean values but
10288      may be larger than char depending on the value of BOOL_TYPE_SIZE.  */
10289   boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10290   TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10291   TYPE_PRECISION (boolean_type_node) = 1;
10292   TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10293 
10294   /* Define what type to use for size_t.  */
10295   if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10296     size_type_node = unsigned_type_node;
10297   else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10298     size_type_node = long_unsigned_type_node;
10299   else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10300     size_type_node = long_long_unsigned_type_node;
10301   else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10302     size_type_node = short_unsigned_type_node;
10303   else
10304     {
10305       int i;
10306 
10307       size_type_node = NULL_TREE;
10308       for (i = 0; i < NUM_INT_N_ENTS; i++)
10309 	if (int_n_enabled_p[i])
10310 	  {
10311 	    char name[50], altname[50];
10312 	    sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10313 	    sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
10314 
10315 	    if (strcmp (name, SIZE_TYPE) == 0
10316 		|| strcmp (altname, SIZE_TYPE) == 0)
10317 	      {
10318 		size_type_node = int_n_trees[i].unsigned_type;
10319 	      }
10320 	  }
10321       if (size_type_node == NULL_TREE)
10322 	gcc_unreachable ();
10323     }
10324 
10325   /* Define what type to use for ptrdiff_t.  */
10326   if (strcmp (PTRDIFF_TYPE, "int") == 0)
10327     ptrdiff_type_node = integer_type_node;
10328   else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
10329     ptrdiff_type_node = long_integer_type_node;
10330   else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
10331     ptrdiff_type_node = long_long_integer_type_node;
10332   else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
10333     ptrdiff_type_node = short_integer_type_node;
10334   else
10335     {
10336       ptrdiff_type_node = NULL_TREE;
10337       for (int i = 0; i < NUM_INT_N_ENTS; i++)
10338 	if (int_n_enabled_p[i])
10339 	  {
10340 	    char name[50], altname[50];
10341 	    sprintf (name, "__int%d", int_n_data[i].bitsize);
10342 	    sprintf (altname, "__int%d__", int_n_data[i].bitsize);
10343 
10344 	    if (strcmp (name, PTRDIFF_TYPE) == 0
10345 		|| strcmp (altname, PTRDIFF_TYPE) == 0)
10346 	      ptrdiff_type_node = int_n_trees[i].signed_type;
10347 	  }
10348       if (ptrdiff_type_node == NULL_TREE)
10349 	gcc_unreachable ();
10350     }
10351 
10352   /* Fill in the rest of the sized types.  Reuse existing type nodes
10353      when possible.  */
10354   intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10355   intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10356   intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10357   intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10358   intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10359 
10360   unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10361   unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10362   unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10363   unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10364   unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10365 
10366   /* Don't call build_qualified type for atomics.  That routine does
10367      special processing for atomics, and until they are initialized
10368      it's better not to make that call.
10369 
10370      Check to see if there is a target override for atomic types.  */
10371 
10372   atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10373 					targetm.atomic_align_for_mode (QImode));
10374   atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10375 					targetm.atomic_align_for_mode (HImode));
10376   atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10377 					targetm.atomic_align_for_mode (SImode));
10378   atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10379 					targetm.atomic_align_for_mode (DImode));
10380   atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10381 					targetm.atomic_align_for_mode (TImode));
10382 
10383   access_public_node = get_identifier ("public");
10384   access_protected_node = get_identifier ("protected");
10385   access_private_node = get_identifier ("private");
10386 
10387   /* Define these next since types below may used them.  */
10388   integer_zero_node = build_int_cst (integer_type_node, 0);
10389   integer_one_node = build_int_cst (integer_type_node, 1);
10390   integer_three_node = build_int_cst (integer_type_node, 3);
10391   integer_minus_one_node = build_int_cst (integer_type_node, -1);
10392 
10393   size_zero_node = size_int (0);
10394   size_one_node = size_int (1);
10395   bitsize_zero_node = bitsize_int (0);
10396   bitsize_one_node = bitsize_int (1);
10397   bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10398 
10399   boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10400   boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10401 
10402   void_type_node = make_node (VOID_TYPE);
10403   layout_type (void_type_node);
10404 
10405   /* We are not going to have real types in C with less than byte alignment,
10406      so we might as well not have any types that claim to have it.  */
10407   SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10408   TYPE_USER_ALIGN (void_type_node) = 0;
10409 
10410   void_node = make_node (VOID_CST);
10411   TREE_TYPE (void_node) = void_type_node;
10412 
10413   null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10414   layout_type (TREE_TYPE (null_pointer_node));
10415 
10416   ptr_type_node = build_pointer_type (void_type_node);
10417   const_ptr_type_node
10418     = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10419   for (unsigned i = 0;
10420        i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
10421        ++i)
10422     builtin_structptr_types[i].node = builtin_structptr_types[i].base;
10423 
10424   pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10425 
10426   float_type_node = make_node (REAL_TYPE);
10427   TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10428   layout_type (float_type_node);
10429 
10430   double_type_node = make_node (REAL_TYPE);
10431   TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10432   layout_type (double_type_node);
10433 
10434   long_double_type_node = make_node (REAL_TYPE);
10435   TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10436   layout_type (long_double_type_node);
10437 
10438   for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10439     {
10440       int n = floatn_nx_types[i].n;
10441       bool extended = floatn_nx_types[i].extended;
10442       scalar_float_mode mode;
10443       if (!targetm.floatn_mode (n, extended).exists (&mode))
10444 	continue;
10445       int precision = GET_MODE_PRECISION (mode);
10446       /* Work around the rs6000 KFmode having precision 113 not
10447 	 128.  */
10448       const struct real_format *fmt = REAL_MODE_FORMAT (mode);
10449       gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
10450       int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
10451       if (!extended)
10452 	gcc_assert (min_precision == n);
10453       if (precision < min_precision)
10454 	precision = min_precision;
10455       FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
10456       TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
10457       layout_type (FLOATN_NX_TYPE_NODE (i));
10458       SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
10459     }
10460 
10461   float_ptr_type_node = build_pointer_type (float_type_node);
10462   double_ptr_type_node = build_pointer_type (double_type_node);
10463   long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10464   integer_ptr_type_node = build_pointer_type (integer_type_node);
10465 
10466   /* Fixed size integer types.  */
10467   uint16_type_node = make_or_reuse_type (16, 1);
10468   uint32_type_node = make_or_reuse_type (32, 1);
10469   uint64_type_node = make_or_reuse_type (64, 1);
10470   if (targetm.scalar_mode_supported_p (TImode))
10471     uint128_type_node = make_or_reuse_type (128, 1);
10472 
10473   /* Decimal float types. */
10474   if (targetm.decimal_float_supported_p ())
10475     {
10476       dfloat32_type_node = make_node (REAL_TYPE);
10477       TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10478       SET_TYPE_MODE (dfloat32_type_node, SDmode);
10479       layout_type (dfloat32_type_node);
10480 
10481       dfloat64_type_node = make_node (REAL_TYPE);
10482       TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10483       SET_TYPE_MODE (dfloat64_type_node, DDmode);
10484       layout_type (dfloat64_type_node);
10485 
10486       dfloat128_type_node = make_node (REAL_TYPE);
10487       TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10488       SET_TYPE_MODE (dfloat128_type_node, TDmode);
10489       layout_type (dfloat128_type_node);
10490     }
10491 
10492   complex_integer_type_node = build_complex_type (integer_type_node, true);
10493   complex_float_type_node = build_complex_type (float_type_node, true);
10494   complex_double_type_node = build_complex_type (double_type_node, true);
10495   complex_long_double_type_node = build_complex_type (long_double_type_node,
10496 						      true);
10497 
10498   for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10499     {
10500       if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
10501 	COMPLEX_FLOATN_NX_TYPE_NODE (i)
10502 	  = build_complex_type (FLOATN_NX_TYPE_NODE (i));
10503     }
10504 
10505 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned.  */
10506 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10507   sat_ ## KIND ## _type_node = \
10508     make_sat_signed_ ## KIND ## _type (SIZE); \
10509   sat_unsigned_ ## KIND ## _type_node = \
10510     make_sat_unsigned_ ## KIND ## _type (SIZE); \
10511   KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10512   unsigned_ ## KIND ## _type_node = \
10513     make_unsigned_ ## KIND ## _type (SIZE);
10514 
10515 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10516   sat_ ## WIDTH ## KIND ## _type_node = \
10517     make_sat_signed_ ## KIND ## _type (SIZE); \
10518   sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10519     make_sat_unsigned_ ## KIND ## _type (SIZE); \
10520   WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10521   unsigned_ ## WIDTH ## KIND ## _type_node = \
10522     make_unsigned_ ## KIND ## _type (SIZE);
10523 
10524 /* Make fixed-point type nodes based on four different widths.  */
10525 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10526   MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10527   MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10528   MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10529   MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10530 
10531 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned.  */
10532 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10533   NAME ## _type_node = \
10534     make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10535   u ## NAME ## _type_node = \
10536     make_or_reuse_unsigned_ ## KIND ## _type \
10537       (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10538   sat_ ## NAME ## _type_node = \
10539     make_or_reuse_sat_signed_ ## KIND ## _type \
10540       (GET_MODE_BITSIZE (MODE ## mode)); \
10541   sat_u ## NAME ## _type_node = \
10542     make_or_reuse_sat_unsigned_ ## KIND ## _type \
10543       (GET_MODE_BITSIZE (U ## MODE ## mode));
10544 
10545   /* Fixed-point type and mode nodes.  */
10546   MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10547   MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10548   MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10549   MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10550   MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10551   MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10552   MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10553   MAKE_FIXED_MODE_NODE (accum, ha, HA)
10554   MAKE_FIXED_MODE_NODE (accum, sa, SA)
10555   MAKE_FIXED_MODE_NODE (accum, da, DA)
10556   MAKE_FIXED_MODE_NODE (accum, ta, TA)
10557 
10558   {
10559     tree t = targetm.build_builtin_va_list ();
10560 
10561     /* Many back-ends define record types without setting TYPE_NAME.
10562        If we copied the record type here, we'd keep the original
10563        record type without a name.  This breaks name mangling.  So,
10564        don't copy record types and let c_common_nodes_and_builtins()
10565        declare the type to be __builtin_va_list.  */
10566     if (TREE_CODE (t) != RECORD_TYPE)
10567       t = build_variant_type_copy (t);
10568 
10569     va_list_type_node = t;
10570   }
10571 
10572   /* SCEV analyzer global shared trees.  */
10573   chrec_dont_know = make_node (SCEV_NOT_KNOWN);
10574   TREE_TYPE (chrec_dont_know) = void_type_node;
10575   chrec_known = make_node (SCEV_KNOWN);
10576   TREE_TYPE (chrec_known) = void_type_node;
10577 }
10578 
10579 /* Modify DECL for given flags.
10580    TM_PURE attribute is set only on types, so the function will modify
10581    DECL's type when ECF_TM_PURE is used.  */
10582 
10583 void
set_call_expr_flags(tree decl,int flags)10584 set_call_expr_flags (tree decl, int flags)
10585 {
10586   if (flags & ECF_NOTHROW)
10587     TREE_NOTHROW (decl) = 1;
10588   if (flags & ECF_CONST)
10589     TREE_READONLY (decl) = 1;
10590   if (flags & ECF_PURE)
10591     DECL_PURE_P (decl) = 1;
10592   if (flags & ECF_LOOPING_CONST_OR_PURE)
10593     DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10594   if (flags & ECF_NOVOPS)
10595     DECL_IS_NOVOPS (decl) = 1;
10596   if (flags & ECF_NORETURN)
10597     TREE_THIS_VOLATILE (decl) = 1;
10598   if (flags & ECF_MALLOC)
10599     DECL_IS_MALLOC (decl) = 1;
10600   if (flags & ECF_RETURNS_TWICE)
10601     DECL_IS_RETURNS_TWICE (decl) = 1;
10602   if (flags & ECF_LEAF)
10603     DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10604 					NULL, DECL_ATTRIBUTES (decl));
10605   if (flags & ECF_COLD)
10606     DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
10607 					NULL, DECL_ATTRIBUTES (decl));
10608   if (flags & ECF_RET1)
10609     DECL_ATTRIBUTES (decl)
10610       = tree_cons (get_identifier ("fn spec"),
10611 		   build_tree_list (NULL_TREE, build_string (2, "1 ")),
10612 		   DECL_ATTRIBUTES (decl));
10613   if ((flags & ECF_TM_PURE) && flag_tm)
10614     apply_tm_attr (decl, get_identifier ("transaction_pure"));
10615   /* Looping const or pure is implied by noreturn.
10616      There is currently no way to declare looping const or looping pure alone.  */
10617   gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10618 	      || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10619 }
10620 
10621 
10622 /* A subroutine of build_common_builtin_nodes.  Define a builtin function.  */
10623 
10624 static void
local_define_builtin(const char * name,tree type,enum built_in_function code,const char * library_name,int ecf_flags)10625 local_define_builtin (const char *name, tree type, enum built_in_function code,
10626                       const char *library_name, int ecf_flags)
10627 {
10628   tree decl;
10629 
10630   decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10631 			       library_name, NULL_TREE);
10632   set_call_expr_flags (decl, ecf_flags);
10633 
10634   set_builtin_decl (code, decl, true);
10635 }
10636 
10637 /* Call this function after instantiating all builtins that the language
10638    front end cares about.  This will build the rest of the builtins
10639    and internal functions that are relied upon by the tree optimizers and
10640    the middle-end.  */
10641 
10642 void
build_common_builtin_nodes(void)10643 build_common_builtin_nodes (void)
10644 {
10645   tree tmp, ftype;
10646   int ecf_flags;
10647 
10648   if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10649       || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10650     {
10651       ftype = build_function_type (void_type_node, void_list_node);
10652       if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10653 	local_define_builtin ("__builtin_unreachable", ftype,
10654 			      BUILT_IN_UNREACHABLE,
10655 			      "__builtin_unreachable",
10656 			      ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10657 			      | ECF_CONST | ECF_COLD);
10658       if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10659 	local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10660 			      "abort",
10661 			      ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
10662     }
10663 
10664   if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10665       || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10666     {
10667       ftype = build_function_type_list (ptr_type_node,
10668 					ptr_type_node, const_ptr_type_node,
10669 					size_type_node, NULL_TREE);
10670 
10671       if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10672 	local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10673 			      "memcpy", ECF_NOTHROW | ECF_LEAF);
10674       if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10675 	local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10676 			      "memmove", ECF_NOTHROW | ECF_LEAF);
10677     }
10678 
10679   if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10680     {
10681       ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10682 					const_ptr_type_node, size_type_node,
10683 					NULL_TREE);
10684       local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10685 			    "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10686     }
10687 
10688   if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10689     {
10690       ftype = build_function_type_list (ptr_type_node,
10691 					ptr_type_node, integer_type_node,
10692 					size_type_node, NULL_TREE);
10693       local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10694 			    "memset", ECF_NOTHROW | ECF_LEAF);
10695     }
10696 
10697   /* If we're checking the stack, `alloca' can throw.  */
10698   const int alloca_flags
10699     = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10700 
10701   if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10702     {
10703       ftype = build_function_type_list (ptr_type_node,
10704 					size_type_node, NULL_TREE);
10705       local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10706 			    "alloca", alloca_flags);
10707     }
10708 
10709   ftype = build_function_type_list (ptr_type_node, size_type_node,
10710 				    size_type_node, NULL_TREE);
10711   local_define_builtin ("__builtin_alloca_with_align", ftype,
10712 			BUILT_IN_ALLOCA_WITH_ALIGN,
10713 			"__builtin_alloca_with_align",
10714 			alloca_flags);
10715 
10716   ftype = build_function_type_list (ptr_type_node, size_type_node,
10717 				    size_type_node, size_type_node, NULL_TREE);
10718   local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
10719 			BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
10720 			"__builtin_alloca_with_align_and_max",
10721 			alloca_flags);
10722 
10723   ftype = build_function_type_list (void_type_node,
10724 				    ptr_type_node, ptr_type_node,
10725 				    ptr_type_node, NULL_TREE);
10726   local_define_builtin ("__builtin_init_trampoline", ftype,
10727 			BUILT_IN_INIT_TRAMPOLINE,
10728 			"__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10729   local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10730 			BUILT_IN_INIT_HEAP_TRAMPOLINE,
10731 			"__builtin_init_heap_trampoline",
10732 			ECF_NOTHROW | ECF_LEAF);
10733   local_define_builtin ("__builtin_init_descriptor", ftype,
10734 			BUILT_IN_INIT_DESCRIPTOR,
10735 			"__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10736 
10737   ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10738   local_define_builtin ("__builtin_adjust_trampoline", ftype,
10739 			BUILT_IN_ADJUST_TRAMPOLINE,
10740 			"__builtin_adjust_trampoline",
10741 			ECF_CONST | ECF_NOTHROW);
10742   local_define_builtin ("__builtin_adjust_descriptor", ftype,
10743 			BUILT_IN_ADJUST_DESCRIPTOR,
10744 			"__builtin_adjust_descriptor",
10745 			ECF_CONST | ECF_NOTHROW);
10746 
10747   ftype = build_function_type_list (void_type_node,
10748 				    ptr_type_node, ptr_type_node, NULL_TREE);
10749   if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
10750     local_define_builtin ("__builtin___clear_cache", ftype,
10751 			  BUILT_IN_CLEAR_CACHE,
10752 			  "__clear_cache",
10753 			  ECF_NOTHROW);
10754 
10755   local_define_builtin ("__builtin_nonlocal_goto", ftype,
10756 			BUILT_IN_NONLOCAL_GOTO,
10757 			"__builtin_nonlocal_goto",
10758 			ECF_NORETURN | ECF_NOTHROW);
10759 
10760   ftype = build_function_type_list (void_type_node,
10761 				    ptr_type_node, ptr_type_node, NULL_TREE);
10762   local_define_builtin ("__builtin_setjmp_setup", ftype,
10763 			BUILT_IN_SETJMP_SETUP,
10764 			"__builtin_setjmp_setup", ECF_NOTHROW);
10765 
10766   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10767   local_define_builtin ("__builtin_setjmp_receiver", ftype,
10768 			BUILT_IN_SETJMP_RECEIVER,
10769 			"__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10770 
10771   ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10772   local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10773 			"__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10774 
10775   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10776   local_define_builtin ("__builtin_stack_restore", ftype,
10777 			BUILT_IN_STACK_RESTORE,
10778 			"__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10779 
10780   ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10781 				    const_ptr_type_node, size_type_node,
10782 				    NULL_TREE);
10783   local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10784 			"__builtin_memcmp_eq",
10785 			ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10786 
10787   local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
10788 			"__builtin_strncmp_eq",
10789 			ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10790 
10791   local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
10792 			"__builtin_strcmp_eq",
10793 			ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10794 
10795   /* If there's a possibility that we might use the ARM EABI, build the
10796     alternate __cxa_end_cleanup node used to resume from C++.  */
10797   if (targetm.arm_eabi_unwinder)
10798     {
10799       ftype = build_function_type_list (void_type_node, NULL_TREE);
10800       local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10801 			    BUILT_IN_CXA_END_CLEANUP,
10802 			    "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10803     }
10804 
10805   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10806   local_define_builtin ("__builtin_unwind_resume", ftype,
10807 			BUILT_IN_UNWIND_RESUME,
10808 			((targetm_common.except_unwind_info (&global_options)
10809 			  == UI_SJLJ)
10810 			 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10811 			ECF_NORETURN);
10812 
10813   if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10814     {
10815       ftype = build_function_type_list (ptr_type_node, integer_type_node,
10816 					NULL_TREE);
10817       local_define_builtin ("__builtin_return_address", ftype,
10818 			    BUILT_IN_RETURN_ADDRESS,
10819 			    "__builtin_return_address",
10820 			    ECF_NOTHROW);
10821     }
10822 
10823   if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10824       || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10825     {
10826       ftype = build_function_type_list (void_type_node, ptr_type_node,
10827 					ptr_type_node, NULL_TREE);
10828       if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10829 	local_define_builtin ("__cyg_profile_func_enter", ftype,
10830 			      BUILT_IN_PROFILE_FUNC_ENTER,
10831 			      "__cyg_profile_func_enter", 0);
10832       if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10833 	local_define_builtin ("__cyg_profile_func_exit", ftype,
10834 			      BUILT_IN_PROFILE_FUNC_EXIT,
10835 			      "__cyg_profile_func_exit", 0);
10836     }
10837 
10838   /* The exception object and filter values from the runtime.  The argument
10839      must be zero before exception lowering, i.e. from the front end.  After
10840      exception lowering, it will be the region number for the exception
10841      landing pad.  These functions are PURE instead of CONST to prevent
10842      them from being hoisted past the exception edge that will initialize
10843      its value in the landing pad.  */
10844   ftype = build_function_type_list (ptr_type_node,
10845 				    integer_type_node, NULL_TREE);
10846   ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10847   /* Only use TM_PURE if we have TM language support.  */
10848   if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10849     ecf_flags |= ECF_TM_PURE;
10850   local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10851 			"__builtin_eh_pointer", ecf_flags);
10852 
10853   tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10854   ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10855   local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10856 			"__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10857 
10858   ftype = build_function_type_list (void_type_node,
10859 				    integer_type_node, integer_type_node,
10860 				    NULL_TREE);
10861   local_define_builtin ("__builtin_eh_copy_values", ftype,
10862 			BUILT_IN_EH_COPY_VALUES,
10863 			"__builtin_eh_copy_values", ECF_NOTHROW);
10864 
10865   /* Complex multiplication and division.  These are handled as builtins
10866      rather than optabs because emit_library_call_value doesn't support
10867      complex.  Further, we can do slightly better with folding these
10868      beasties if the real and complex parts of the arguments are separate.  */
10869   {
10870     int mode;
10871 
10872     for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10873       {
10874 	char mode_name_buf[4], *q;
10875 	const char *p;
10876 	enum built_in_function mcode, dcode;
10877 	tree type, inner_type;
10878 	const char *prefix = "__";
10879 
10880 	if (targetm.libfunc_gnu_prefix)
10881 	  prefix = "__gnu_";
10882 
10883 	type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10884 	if (type == NULL)
10885 	  continue;
10886 	inner_type = TREE_TYPE (type);
10887 
10888 	ftype = build_function_type_list (type, inner_type, inner_type,
10889 					  inner_type, inner_type, NULL_TREE);
10890 
10891         mcode = ((enum built_in_function)
10892 		 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10893         dcode = ((enum built_in_function)
10894 		 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10895 
10896         for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10897 	  *q = TOLOWER (*p);
10898 	*q = '\0';
10899 
10900 	/* For -ftrapping-math these should throw from a former
10901 	   -fnon-call-exception stmt.  */
10902 	built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10903 					NULL);
10904         local_define_builtin (built_in_names[mcode], ftype, mcode,
10905 			      built_in_names[mcode],
10906 			      ECF_CONST | ECF_LEAF);
10907 
10908 	built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10909 					NULL);
10910         local_define_builtin (built_in_names[dcode], ftype, dcode,
10911 			      built_in_names[dcode],
10912 			      ECF_CONST | ECF_LEAF);
10913       }
10914   }
10915 
10916   init_internal_fns ();
10917 }
10918 
10919 /* HACK.  GROSS.  This is absolutely disgusting.  I wish there was a
10920    better way.
10921 
10922    If we requested a pointer to a vector, build up the pointers that
10923    we stripped off while looking for the inner type.  Similarly for
10924    return values from functions.
10925 
10926    The argument TYPE is the top of the chain, and BOTTOM is the
10927    new type which we will point to.  */
10928 
10929 tree
reconstruct_complex_type(tree type,tree bottom)10930 reconstruct_complex_type (tree type, tree bottom)
10931 {
10932   tree inner, outer;
10933 
10934   if (TREE_CODE (type) == POINTER_TYPE)
10935     {
10936       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10937       outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10938 					   TYPE_REF_CAN_ALIAS_ALL (type));
10939     }
10940   else if (TREE_CODE (type) == REFERENCE_TYPE)
10941     {
10942       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10943       outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10944 					     TYPE_REF_CAN_ALIAS_ALL (type));
10945     }
10946   else if (TREE_CODE (type) == ARRAY_TYPE)
10947     {
10948       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10949       outer = build_array_type (inner, TYPE_DOMAIN (type));
10950     }
10951   else if (TREE_CODE (type) == FUNCTION_TYPE)
10952     {
10953       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10954       outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10955     }
10956   else if (TREE_CODE (type) == METHOD_TYPE)
10957     {
10958       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10959       /* The build_method_type_directly() routine prepends 'this' to argument list,
10960          so we must compensate by getting rid of it.  */
10961       outer
10962 	= build_method_type_directly
10963 	    (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10964 	     inner,
10965 	     TREE_CHAIN (TYPE_ARG_TYPES (type)));
10966     }
10967   else if (TREE_CODE (type) == OFFSET_TYPE)
10968     {
10969       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10970       outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10971     }
10972   else
10973     return bottom;
10974 
10975   return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10976 					    TYPE_QUALS (type));
10977 }
10978 
10979 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10980    the inner type.  */
10981 tree
build_vector_type_for_mode(tree innertype,machine_mode mode)10982 build_vector_type_for_mode (tree innertype, machine_mode mode)
10983 {
10984   poly_int64 nunits;
10985   unsigned int bitsize;
10986 
10987   switch (GET_MODE_CLASS (mode))
10988     {
10989     case MODE_VECTOR_BOOL:
10990     case MODE_VECTOR_INT:
10991     case MODE_VECTOR_FLOAT:
10992     case MODE_VECTOR_FRACT:
10993     case MODE_VECTOR_UFRACT:
10994     case MODE_VECTOR_ACCUM:
10995     case MODE_VECTOR_UACCUM:
10996       nunits = GET_MODE_NUNITS (mode);
10997       break;
10998 
10999     case MODE_INT:
11000       /* Check that there are no leftover bits.  */
11001       bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
11002       gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
11003       nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
11004       break;
11005 
11006     default:
11007       gcc_unreachable ();
11008     }
11009 
11010   return make_vector_type (innertype, nunits, mode);
11011 }
11012 
11013 /* Similarly, but takes the inner type and number of units, which must be
11014    a power of two.  */
11015 
11016 tree
build_vector_type(tree innertype,poly_int64 nunits)11017 build_vector_type (tree innertype, poly_int64 nunits)
11018 {
11019   return make_vector_type (innertype, nunits, VOIDmode);
11020 }
11021 
11022 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE.  */
11023 
11024 tree
build_truth_vector_type_for_mode(poly_uint64 nunits,machine_mode mask_mode)11025 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
11026 {
11027   gcc_assert (mask_mode != BLKmode);
11028 
11029   unsigned HOST_WIDE_INT esize;
11030   if (VECTOR_MODE_P (mask_mode))
11031     {
11032       poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
11033       esize = vector_element_size (vsize, nunits);
11034     }
11035   else
11036     esize = 1;
11037 
11038   tree bool_type = build_nonstandard_boolean_type (esize);
11039 
11040   return make_vector_type (bool_type, nunits, mask_mode);
11041 }
11042 
11043 /* Build a vector type that holds one boolean result for each element of
11044    vector type VECTYPE.  The public interface for this operation is
11045    truth_type_for.  */
11046 
11047 static tree
build_truth_vector_type_for(tree vectype)11048 build_truth_vector_type_for (tree vectype)
11049 {
11050   machine_mode vector_mode = TYPE_MODE (vectype);
11051   poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
11052 
11053   machine_mode mask_mode;
11054   if (VECTOR_MODE_P (vector_mode)
11055       && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
11056     return build_truth_vector_type_for_mode (nunits, mask_mode);
11057 
11058   poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
11059   unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
11060   tree bool_type = build_nonstandard_boolean_type (esize);
11061 
11062   return make_vector_type (bool_type, nunits, VOIDmode);
11063 }
11064 
11065 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
11066    set.  */
11067 
11068 tree
build_opaque_vector_type(tree innertype,poly_int64 nunits)11069 build_opaque_vector_type (tree innertype, poly_int64 nunits)
11070 {
11071   tree t = make_vector_type (innertype, nunits, VOIDmode);
11072   tree cand;
11073   /* We always build the non-opaque variant before the opaque one,
11074      so if it already exists, it is TYPE_NEXT_VARIANT of this one.  */
11075   cand = TYPE_NEXT_VARIANT (t);
11076   if (cand
11077       && TYPE_VECTOR_OPAQUE (cand)
11078       && check_qualified_type (cand, t, TYPE_QUALS (t)))
11079     return cand;
11080   /* Othewise build a variant type and make sure to queue it after
11081      the non-opaque type.  */
11082   cand = build_distinct_type_copy (t);
11083   TYPE_VECTOR_OPAQUE (cand) = true;
11084   TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
11085   TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
11086   TYPE_NEXT_VARIANT (t) = cand;
11087   TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
11088   return cand;
11089 }
11090 
11091 /* Return the value of element I of VECTOR_CST T as a wide_int.  */
11092 
11093 static poly_wide_int
vector_cst_int_elt(const_tree t,unsigned int i)11094 vector_cst_int_elt (const_tree t, unsigned int i)
11095 {
11096   /* First handle elements that are directly encoded.  */
11097   unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
11098   if (i < encoded_nelts)
11099     return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
11100 
11101   /* Identify the pattern that contains element I and work out the index of
11102      the last encoded element for that pattern.  */
11103   unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
11104   unsigned int pattern = i % npatterns;
11105   unsigned int count = i / npatterns;
11106   unsigned int final_i = encoded_nelts - npatterns + pattern;
11107 
11108   /* If there are no steps, the final encoded value is the right one.  */
11109   if (!VECTOR_CST_STEPPED_P (t))
11110     return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
11111 
11112   /* Otherwise work out the value from the last two encoded elements.  */
11113   tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
11114   tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
11115   poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
11116   return wi::to_poly_wide (v2) + (count - 2) * diff;
11117 }
11118 
11119 /* Return the value of element I of VECTOR_CST T.  */
11120 
11121 tree
vector_cst_elt(const_tree t,unsigned int i)11122 vector_cst_elt (const_tree t, unsigned int i)
11123 {
11124   /* First handle elements that are directly encoded.  */
11125   unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
11126   if (i < encoded_nelts)
11127     return VECTOR_CST_ENCODED_ELT (t, i);
11128 
11129   /* If there are no steps, the final encoded value is the right one.  */
11130   if (!VECTOR_CST_STEPPED_P (t))
11131     {
11132       /* Identify the pattern that contains element I and work out the index of
11133 	 the last encoded element for that pattern.  */
11134       unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
11135       unsigned int pattern = i % npatterns;
11136       unsigned int final_i = encoded_nelts - npatterns + pattern;
11137       return VECTOR_CST_ENCODED_ELT (t, final_i);
11138     }
11139 
11140   /* Otherwise work out the value from the last two encoded elements.  */
11141   return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
11142 			   vector_cst_int_elt (t, i));
11143 }
11144 
11145 /* Given an initializer INIT, return TRUE if INIT is zero or some
11146    aggregate of zeros.  Otherwise return FALSE.  If NONZERO is not
11147    null, set *NONZERO if and only if INIT is known not to be all
11148    zeros.  The combination of return value of false and *NONZERO
11149    false implies that INIT may but need not be all zeros.  Other
11150    combinations indicate definitive answers.  */
11151 
11152 bool
initializer_zerop(const_tree init,bool * nonzero)11153 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
11154 {
11155   bool dummy;
11156   if (!nonzero)
11157     nonzero = &dummy;
11158 
11159   /* Conservatively clear NONZERO and set it only if INIT is definitely
11160      not all zero.  */
11161   *nonzero = false;
11162 
11163   STRIP_NOPS (init);
11164 
11165   unsigned HOST_WIDE_INT off = 0;
11166 
11167   switch (TREE_CODE (init))
11168     {
11169     case INTEGER_CST:
11170       if (integer_zerop (init))
11171 	return true;
11172 
11173       *nonzero = true;
11174       return false;
11175 
11176     case REAL_CST:
11177       /* ??? Note that this is not correct for C4X float formats.  There,
11178 	 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
11179 	 negative exponent.  */
11180       if (real_zerop (init)
11181 	  && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
11182 	return true;
11183 
11184       *nonzero = true;
11185       return false;
11186 
11187     case FIXED_CST:
11188       if (fixed_zerop (init))
11189 	return true;
11190 
11191       *nonzero = true;
11192       return false;
11193 
11194     case COMPLEX_CST:
11195       if (integer_zerop (init)
11196 	  || (real_zerop (init)
11197 	      && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
11198 	      && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
11199 	return true;
11200 
11201       *nonzero = true;
11202       return false;
11203 
11204     case VECTOR_CST:
11205       if (VECTOR_CST_NPATTERNS (init) == 1
11206 	  && VECTOR_CST_DUPLICATE_P (init)
11207 	  && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
11208 	return true;
11209 
11210       *nonzero = true;
11211       return false;
11212 
11213     case CONSTRUCTOR:
11214       {
11215 	if (TREE_CLOBBER_P (init))
11216 	  return false;
11217 
11218 	unsigned HOST_WIDE_INT idx;
11219 	tree elt;
11220 
11221 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
11222 	  if (!initializer_zerop (elt, nonzero))
11223 	    return false;
11224 
11225 	return true;
11226       }
11227 
11228     case MEM_REF:
11229       {
11230 	tree arg = TREE_OPERAND (init, 0);
11231 	if (TREE_CODE (arg) != ADDR_EXPR)
11232 	  return false;
11233 	tree offset = TREE_OPERAND (init, 1);
11234 	if (TREE_CODE (offset) != INTEGER_CST
11235 	    || !tree_fits_uhwi_p (offset))
11236 	  return false;
11237 	off = tree_to_uhwi (offset);
11238 	if (INT_MAX < off)
11239 	  return false;
11240 	arg = TREE_OPERAND (arg, 0);
11241 	if (TREE_CODE (arg) != STRING_CST)
11242 	  return false;
11243 	init = arg;
11244       }
11245       /* Fall through.  */
11246 
11247     case STRING_CST:
11248       {
11249 	gcc_assert (off <= INT_MAX);
11250 
11251 	int i = off;
11252 	int n = TREE_STRING_LENGTH (init);
11253 	if (n <= i)
11254 	  return false;
11255 
11256 	/* We need to loop through all elements to handle cases like
11257 	   "\0" and "\0foobar".  */
11258 	for (i = 0; i < n; ++i)
11259 	  if (TREE_STRING_POINTER (init)[i] != '\0')
11260 	    {
11261 	      *nonzero = true;
11262 	      return false;
11263 	    }
11264 
11265 	return true;
11266       }
11267 
11268     default:
11269       return false;
11270     }
11271 }
11272 
11273 /* Return true if EXPR is an initializer expression in which every element
11274    is a constant that is numerically equal to 0 or 1.  The elements do not
11275    need to be equal to each other.  */
11276 
11277 bool
initializer_each_zero_or_onep(const_tree expr)11278 initializer_each_zero_or_onep (const_tree expr)
11279 {
11280   STRIP_ANY_LOCATION_WRAPPER (expr);
11281 
11282   switch (TREE_CODE (expr))
11283     {
11284     case INTEGER_CST:
11285       return integer_zerop (expr) || integer_onep (expr);
11286 
11287     case REAL_CST:
11288       return real_zerop (expr) || real_onep (expr);
11289 
11290     case VECTOR_CST:
11291       {
11292 	unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
11293 	if (VECTOR_CST_STEPPED_P (expr)
11294 	    && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
11295 	  return false;
11296 
11297 	for (unsigned int i = 0; i < nelts; ++i)
11298 	  {
11299 	    tree elt = vector_cst_elt (expr, i);
11300 	    if (!initializer_each_zero_or_onep (elt))
11301 	      return false;
11302 	  }
11303 
11304 	return true;
11305       }
11306 
11307     default:
11308       return false;
11309     }
11310 }
11311 
11312 /* Check if vector VEC consists of all the equal elements and
11313    that the number of elements corresponds to the type of VEC.
11314    The function returns first element of the vector
11315    or NULL_TREE if the vector is not uniform.  */
11316 tree
uniform_vector_p(const_tree vec)11317 uniform_vector_p (const_tree vec)
11318 {
11319   tree first, t;
11320   unsigned HOST_WIDE_INT i, nelts;
11321 
11322   if (vec == NULL_TREE)
11323     return NULL_TREE;
11324 
11325   gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
11326 
11327   if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
11328     return TREE_OPERAND (vec, 0);
11329 
11330   else if (TREE_CODE (vec) == VECTOR_CST)
11331     {
11332       if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
11333 	return VECTOR_CST_ENCODED_ELT (vec, 0);
11334       return NULL_TREE;
11335     }
11336 
11337   else if (TREE_CODE (vec) == CONSTRUCTOR
11338 	   && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
11339     {
11340       first = error_mark_node;
11341 
11342       FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
11343         {
11344           if (i == 0)
11345             {
11346               first = t;
11347               continue;
11348             }
11349 	  if (!operand_equal_p (first, t, 0))
11350 	    return NULL_TREE;
11351         }
11352       if (i != nelts)
11353 	return NULL_TREE;
11354 
11355       return first;
11356     }
11357 
11358   return NULL_TREE;
11359 }
11360 
11361 /* If the argument is INTEGER_CST, return it.  If the argument is vector
11362    with all elements the same INTEGER_CST, return that INTEGER_CST.  Otherwise
11363    return NULL_TREE.
11364    Look through location wrappers. */
11365 
11366 tree
uniform_integer_cst_p(tree t)11367 uniform_integer_cst_p (tree t)
11368 {
11369   STRIP_ANY_LOCATION_WRAPPER (t);
11370 
11371   if (TREE_CODE (t) == INTEGER_CST)
11372     return t;
11373 
11374   if (VECTOR_TYPE_P (TREE_TYPE (t)))
11375     {
11376       t = uniform_vector_p (t);
11377       if (t && TREE_CODE (t) == INTEGER_CST)
11378 	return t;
11379     }
11380 
11381   return NULL_TREE;
11382 }
11383 
11384 /* If VECTOR_CST T has a single nonzero element, return the index of that
11385    element, otherwise return -1.  */
11386 
11387 int
single_nonzero_element(const_tree t)11388 single_nonzero_element (const_tree t)
11389 {
11390   unsigned HOST_WIDE_INT nelts;
11391   unsigned int repeat_nelts;
11392   if (VECTOR_CST_NELTS (t).is_constant (&nelts))
11393     repeat_nelts = nelts;
11394   else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
11395     {
11396       nelts = vector_cst_encoded_nelts (t);
11397       repeat_nelts = VECTOR_CST_NPATTERNS (t);
11398     }
11399   else
11400     return -1;
11401 
11402   int res = -1;
11403   for (unsigned int i = 0; i < nelts; ++i)
11404     {
11405       tree elt = vector_cst_elt (t, i);
11406       if (!integer_zerop (elt) && !real_zerop (elt))
11407 	{
11408 	  if (res >= 0 || i >= repeat_nelts)
11409 	    return -1;
11410 	  res = i;
11411 	}
11412     }
11413   return res;
11414 }
11415 
11416 /* Build an empty statement at location LOC.  */
11417 
11418 tree
build_empty_stmt(location_t loc)11419 build_empty_stmt (location_t loc)
11420 {
11421   tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
11422   SET_EXPR_LOCATION (t, loc);
11423   return t;
11424 }
11425 
11426 
11427 /* Build an OpenMP clause with code CODE.  LOC is the location of the
11428    clause.  */
11429 
11430 tree
build_omp_clause(location_t loc,enum omp_clause_code code)11431 build_omp_clause (location_t loc, enum omp_clause_code code)
11432 {
11433   tree t;
11434   int size, length;
11435 
11436   length = omp_clause_num_ops[code];
11437   size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
11438 
11439   record_node_allocation_statistics (OMP_CLAUSE, size);
11440 
11441   t = (tree) ggc_internal_alloc (size);
11442   memset (t, 0, size);
11443   TREE_SET_CODE (t, OMP_CLAUSE);
11444   OMP_CLAUSE_SET_CODE (t, code);
11445   OMP_CLAUSE_LOCATION (t) = loc;
11446 
11447   return t;
11448 }
11449 
11450 /* Build a tcc_vl_exp object with code CODE and room for LEN operands.  LEN
11451    includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11452    Except for the CODE and operand count field, other storage for the
11453    object is initialized to zeros.  */
11454 
11455 tree
build_vl_exp(enum tree_code code,int len MEM_STAT_DECL)11456 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
11457 {
11458   tree t;
11459   int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
11460 
11461   gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
11462   gcc_assert (len >= 1);
11463 
11464   record_node_allocation_statistics (code, length);
11465 
11466   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
11467 
11468   TREE_SET_CODE (t, code);
11469 
11470   /* Can't use TREE_OPERAND to store the length because if checking is
11471      enabled, it will try to check the length before we store it.  :-P  */
11472   t->exp.operands[0] = build_int_cst (sizetype, len);
11473 
11474   return t;
11475 }
11476 
11477 /* Helper function for build_call_* functions; build a CALL_EXPR with
11478    indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11479    the argument slots.  */
11480 
11481 static tree
build_call_1(tree return_type,tree fn,int nargs)11482 build_call_1 (tree return_type, tree fn, int nargs)
11483 {
11484   tree t;
11485 
11486   t = build_vl_exp (CALL_EXPR, nargs + 3);
11487   TREE_TYPE (t) = return_type;
11488   CALL_EXPR_FN (t) = fn;
11489   CALL_EXPR_STATIC_CHAIN (t) = NULL;
11490 
11491   return t;
11492 }
11493 
11494 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11495    FN and a null static chain slot.  NARGS is the number of call arguments
11496    which are specified as "..." arguments.  */
11497 
11498 tree
build_call_nary(tree return_type,tree fn,int nargs,...)11499 build_call_nary (tree return_type, tree fn, int nargs, ...)
11500 {
11501   tree ret;
11502   va_list args;
11503   va_start (args, nargs);
11504   ret = build_call_valist (return_type, fn, nargs, args);
11505   va_end (args);
11506   return ret;
11507 }
11508 
11509 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11510    FN and a null static chain slot.  NARGS is the number of call arguments
11511    which are specified as a va_list ARGS.  */
11512 
11513 tree
build_call_valist(tree return_type,tree fn,int nargs,va_list args)11514 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11515 {
11516   tree t;
11517   int i;
11518 
11519   t = build_call_1 (return_type, fn, nargs);
11520   for (i = 0; i < nargs; i++)
11521     CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11522   process_call_operands (t);
11523   return t;
11524 }
11525 
11526 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11527    FN and a null static chain slot.  NARGS is the number of call arguments
11528    which are specified as a tree array ARGS.  */
11529 
11530 tree
build_call_array_loc(location_t loc,tree return_type,tree fn,int nargs,const tree * args)11531 build_call_array_loc (location_t loc, tree return_type, tree fn,
11532 		      int nargs, const tree *args)
11533 {
11534   tree t;
11535   int i;
11536 
11537   t = build_call_1 (return_type, fn, nargs);
11538   for (i = 0; i < nargs; i++)
11539     CALL_EXPR_ARG (t, i) = args[i];
11540   process_call_operands (t);
11541   SET_EXPR_LOCATION (t, loc);
11542   return t;
11543 }
11544 
11545 /* Like build_call_array, but takes a vec.  */
11546 
11547 tree
build_call_vec(tree return_type,tree fn,vec<tree,va_gc> * args)11548 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11549 {
11550   tree ret, t;
11551   unsigned int ix;
11552 
11553   ret = build_call_1 (return_type, fn, vec_safe_length (args));
11554   FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11555     CALL_EXPR_ARG (ret, ix) = t;
11556   process_call_operands (ret);
11557   return ret;
11558 }
11559 
11560 /* Conveniently construct a function call expression.  FNDECL names the
11561    function to be called and N arguments are passed in the array
11562    ARGARRAY.  */
11563 
11564 tree
build_call_expr_loc_array(location_t loc,tree fndecl,int n,tree * argarray)11565 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11566 {
11567   tree fntype = TREE_TYPE (fndecl);
11568   tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11569 
11570   return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11571 }
11572 
11573 /* Conveniently construct a function call expression.  FNDECL names the
11574    function to be called and the arguments are passed in the vector
11575    VEC.  */
11576 
11577 tree
build_call_expr_loc_vec(location_t loc,tree fndecl,vec<tree,va_gc> * vec)11578 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11579 {
11580   return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11581 				    vec_safe_address (vec));
11582 }
11583 
11584 
11585 /* Conveniently construct a function call expression.  FNDECL names the
11586    function to be called, N is the number of arguments, and the "..."
11587    parameters are the argument expressions.  */
11588 
11589 tree
build_call_expr_loc(location_t loc,tree fndecl,int n,...)11590 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11591 {
11592   va_list ap;
11593   tree *argarray = XALLOCAVEC (tree, n);
11594   int i;
11595 
11596   va_start (ap, n);
11597   for (i = 0; i < n; i++)
11598     argarray[i] = va_arg (ap, tree);
11599   va_end (ap);
11600   return build_call_expr_loc_array (loc, fndecl, n, argarray);
11601 }
11602 
11603 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...).  Duplicated because
11604    varargs macros aren't supported by all bootstrap compilers.  */
11605 
11606 tree
build_call_expr(tree fndecl,int n,...)11607 build_call_expr (tree fndecl, int n, ...)
11608 {
11609   va_list ap;
11610   tree *argarray = XALLOCAVEC (tree, n);
11611   int i;
11612 
11613   va_start (ap, n);
11614   for (i = 0; i < n; i++)
11615     argarray[i] = va_arg (ap, tree);
11616   va_end (ap);
11617   return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11618 }
11619 
11620 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11621    type TYPE.  This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11622    It will get gimplified later into an ordinary internal function.  */
11623 
11624 tree
build_call_expr_internal_loc_array(location_t loc,internal_fn ifn,tree type,int n,const tree * args)11625 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11626 				    tree type, int n, const tree *args)
11627 {
11628   tree t = build_call_1 (type, NULL_TREE, n);
11629   for (int i = 0; i < n; ++i)
11630     CALL_EXPR_ARG (t, i) = args[i];
11631   SET_EXPR_LOCATION (t, loc);
11632   CALL_EXPR_IFN (t) = ifn;
11633   process_call_operands (t);
11634   return t;
11635 }
11636 
11637 /* Build internal call expression.  This is just like CALL_EXPR, except
11638    its CALL_EXPR_FN is NULL.  It will get gimplified later into ordinary
11639    internal function.  */
11640 
11641 tree
build_call_expr_internal_loc(location_t loc,enum internal_fn ifn,tree type,int n,...)11642 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11643 			      tree type, int n, ...)
11644 {
11645   va_list ap;
11646   tree *argarray = XALLOCAVEC (tree, n);
11647   int i;
11648 
11649   va_start (ap, n);
11650   for (i = 0; i < n; i++)
11651     argarray[i] = va_arg (ap, tree);
11652   va_end (ap);
11653   return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11654 }
11655 
11656 /* Return a function call to FN, if the target is guaranteed to support it,
11657    or null otherwise.
11658 
11659    N is the number of arguments, passed in the "...", and TYPE is the
11660    type of the return value.  */
11661 
11662 tree
maybe_build_call_expr_loc(location_t loc,combined_fn fn,tree type,int n,...)11663 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11664 			   int n, ...)
11665 {
11666   va_list ap;
11667   tree *argarray = XALLOCAVEC (tree, n);
11668   int i;
11669 
11670   va_start (ap, n);
11671   for (i = 0; i < n; i++)
11672     argarray[i] = va_arg (ap, tree);
11673   va_end (ap);
11674   if (internal_fn_p (fn))
11675     {
11676       internal_fn ifn = as_internal_fn (fn);
11677       if (direct_internal_fn_p (ifn))
11678 	{
11679 	  tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11680 	  if (!direct_internal_fn_supported_p (ifn, types,
11681 					       OPTIMIZE_FOR_BOTH))
11682 	    return NULL_TREE;
11683 	}
11684       return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11685     }
11686   else
11687     {
11688       tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11689       if (!fndecl)
11690 	return NULL_TREE;
11691       return build_call_expr_loc_array (loc, fndecl, n, argarray);
11692     }
11693 }
11694 
11695 /* Return a function call to the appropriate builtin alloca variant.
11696 
11697    SIZE is the size to be allocated.  ALIGN, if non-zero, is the requested
11698    alignment of the allocated area.  MAX_SIZE, if non-negative, is an upper
11699    bound for SIZE in case it is not a fixed value.  */
11700 
11701 tree
build_alloca_call_expr(tree size,unsigned int align,HOST_WIDE_INT max_size)11702 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
11703 {
11704   if (max_size >= 0)
11705     {
11706       tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
11707       return
11708 	build_call_expr (t, 3, size, size_int (align), size_int (max_size));
11709     }
11710   else if (align > 0)
11711     {
11712       tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11713       return build_call_expr (t, 2, size, size_int (align));
11714     }
11715   else
11716     {
11717       tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
11718       return build_call_expr (t, 1, size);
11719     }
11720 }
11721 
11722 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
11723    if SIZE == -1) and return a tree node representing char* pointer to
11724    it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)).  When STR is nonnull
11725    the STRING_CST value is the LEN bytes at STR (the representation
11726    of the string, which may be wide).  Otherwise it's all zeros.  */
11727 
11728 tree
build_string_literal(unsigned len,const char * str,tree eltype,unsigned HOST_WIDE_INT size)11729 build_string_literal (unsigned len, const char *str /* = NULL */,
11730 		      tree eltype /* = char_type_node */,
11731 		      unsigned HOST_WIDE_INT size /* = -1 */)
11732 {
11733   tree t = build_string (len, str);
11734   /* Set the maximum valid index based on the string length or SIZE.  */
11735   unsigned HOST_WIDE_INT maxidx
11736     = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
11737 
11738   tree index = build_index_type (size_int (maxidx));
11739   eltype = build_type_variant (eltype, 1, 0);
11740   tree type = build_array_type (eltype, index);
11741   TREE_TYPE (t) = type;
11742   TREE_CONSTANT (t) = 1;
11743   TREE_READONLY (t) = 1;
11744   TREE_STATIC (t) = 1;
11745 
11746   type = build_pointer_type (eltype);
11747   t = build1 (ADDR_EXPR, type,
11748 	      build4 (ARRAY_REF, eltype,
11749 		      t, integer_zero_node, NULL_TREE, NULL_TREE));
11750   return t;
11751 }
11752 
11753 
11754 
11755 /* Return true if T (assumed to be a DECL) must be assigned a memory
11756    location.  */
11757 
11758 bool
needs_to_live_in_memory(const_tree t)11759 needs_to_live_in_memory (const_tree t)
11760 {
11761   return (TREE_ADDRESSABLE (t)
11762 	  || is_global_var (t)
11763 	  || (TREE_CODE (t) == RESULT_DECL
11764 	      && !DECL_BY_REFERENCE (t)
11765 	      && aggregate_value_p (t, current_function_decl)));
11766 }
11767 
11768 /* Return value of a constant X and sign-extend it.  */
11769 
11770 HOST_WIDE_INT
int_cst_value(const_tree x)11771 int_cst_value (const_tree x)
11772 {
11773   unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11774   unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11775 
11776   /* Make sure the sign-extended value will fit in a HOST_WIDE_INT.  */
11777   gcc_assert (cst_and_fits_in_hwi (x));
11778 
11779   if (bits < HOST_BITS_PER_WIDE_INT)
11780     {
11781       bool negative = ((val >> (bits - 1)) & 1) != 0;
11782       if (negative)
11783 	val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11784       else
11785 	val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11786     }
11787 
11788   return val;
11789 }
11790 
11791 /* If TYPE is an integral or pointer type, return an integer type with
11792    the same precision which is unsigned iff UNSIGNEDP is true, or itself
11793    if TYPE is already an integer type of signedness UNSIGNEDP.
11794    If TYPE is a floating-point type, return an integer type with the same
11795    bitsize and with the signedness given by UNSIGNEDP; this is useful
11796    when doing bit-level operations on a floating-point value.  */
11797 
11798 tree
signed_or_unsigned_type_for(int unsignedp,tree type)11799 signed_or_unsigned_type_for (int unsignedp, tree type)
11800 {
11801   if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11802     return type;
11803 
11804   if (TREE_CODE (type) == VECTOR_TYPE)
11805     {
11806       tree inner = TREE_TYPE (type);
11807       tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11808       if (!inner2)
11809 	return NULL_TREE;
11810       if (inner == inner2)
11811 	return type;
11812       return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11813     }
11814 
11815   if (TREE_CODE (type) == COMPLEX_TYPE)
11816     {
11817       tree inner = TREE_TYPE (type);
11818       tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11819       if (!inner2)
11820 	return NULL_TREE;
11821       if (inner == inner2)
11822 	return type;
11823       return build_complex_type (inner2);
11824     }
11825 
11826   unsigned int bits;
11827   if (INTEGRAL_TYPE_P (type)
11828       || POINTER_TYPE_P (type)
11829       || TREE_CODE (type) == OFFSET_TYPE)
11830     bits = TYPE_PRECISION (type);
11831   else if (TREE_CODE (type) == REAL_TYPE)
11832     bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11833   else
11834     return NULL_TREE;
11835 
11836   return build_nonstandard_integer_type (bits, unsignedp);
11837 }
11838 
11839 /* If TYPE is an integral or pointer type, return an integer type with
11840    the same precision which is unsigned, or itself if TYPE is already an
11841    unsigned integer type.  If TYPE is a floating-point type, return an
11842    unsigned integer type with the same bitsize as TYPE.  */
11843 
11844 tree
unsigned_type_for(tree type)11845 unsigned_type_for (tree type)
11846 {
11847   return signed_or_unsigned_type_for (1, type);
11848 }
11849 
11850 /* If TYPE is an integral or pointer type, return an integer type with
11851    the same precision which is signed, or itself if TYPE is already a
11852    signed integer type.  If TYPE is a floating-point type, return a
11853    signed integer type with the same bitsize as TYPE.  */
11854 
11855 tree
signed_type_for(tree type)11856 signed_type_for (tree type)
11857 {
11858   return signed_or_unsigned_type_for (0, type);
11859 }
11860 
11861 /* If TYPE is a vector type, return a signed integer vector type with the
11862    same width and number of subparts. Otherwise return boolean_type_node.  */
11863 
11864 tree
truth_type_for(tree type)11865 truth_type_for (tree type)
11866 {
11867   if (TREE_CODE (type) == VECTOR_TYPE)
11868     {
11869       if (VECTOR_BOOLEAN_TYPE_P (type))
11870 	return type;
11871       return build_truth_vector_type_for (type);
11872     }
11873   else
11874     return boolean_type_node;
11875 }
11876 
11877 /* Returns the largest value obtainable by casting something in INNER type to
11878    OUTER type.  */
11879 
11880 tree
upper_bound_in_type(tree outer,tree inner)11881 upper_bound_in_type (tree outer, tree inner)
11882 {
11883   unsigned int det = 0;
11884   unsigned oprec = TYPE_PRECISION (outer);
11885   unsigned iprec = TYPE_PRECISION (inner);
11886   unsigned prec;
11887 
11888   /* Compute a unique number for every combination.  */
11889   det |= (oprec > iprec) ? 4 : 0;
11890   det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11891   det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11892 
11893   /* Determine the exponent to use.  */
11894   switch (det)
11895     {
11896     case 0:
11897     case 1:
11898       /* oprec <= iprec, outer: signed, inner: don't care.  */
11899       prec = oprec - 1;
11900       break;
11901     case 2:
11902     case 3:
11903       /* oprec <= iprec, outer: unsigned, inner: don't care.  */
11904       prec = oprec;
11905       break;
11906     case 4:
11907       /* oprec > iprec, outer: signed, inner: signed.  */
11908       prec = iprec - 1;
11909       break;
11910     case 5:
11911       /* oprec > iprec, outer: signed, inner: unsigned.  */
11912       prec = iprec;
11913       break;
11914     case 6:
11915       /* oprec > iprec, outer: unsigned, inner: signed.  */
11916       prec = oprec;
11917       break;
11918     case 7:
11919       /* oprec > iprec, outer: unsigned, inner: unsigned.  */
11920       prec = iprec;
11921       break;
11922     default:
11923       gcc_unreachable ();
11924     }
11925 
11926   return wide_int_to_tree (outer,
11927 			   wi::mask (prec, false, TYPE_PRECISION (outer)));
11928 }
11929 
11930 /* Returns the smallest value obtainable by casting something in INNER type to
11931    OUTER type.  */
11932 
11933 tree
lower_bound_in_type(tree outer,tree inner)11934 lower_bound_in_type (tree outer, tree inner)
11935 {
11936   unsigned oprec = TYPE_PRECISION (outer);
11937   unsigned iprec = TYPE_PRECISION (inner);
11938 
11939   /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11940      and obtain 0.  */
11941   if (TYPE_UNSIGNED (outer)
11942       /* If we are widening something of an unsigned type, OUTER type
11943 	 contains all values of INNER type.  In particular, both INNER
11944 	 and OUTER types have zero in common.  */
11945       || (oprec > iprec && TYPE_UNSIGNED (inner)))
11946     return build_int_cst (outer, 0);
11947   else
11948     {
11949       /* If we are widening a signed type to another signed type, we
11950 	 want to obtain -2^^(iprec-1).  If we are keeping the
11951 	 precision or narrowing to a signed type, we want to obtain
11952 	 -2^(oprec-1).  */
11953       unsigned prec = oprec > iprec ? iprec : oprec;
11954       return wide_int_to_tree (outer,
11955 			       wi::mask (prec - 1, true,
11956 					 TYPE_PRECISION (outer)));
11957     }
11958 }
11959 
11960 /* Return nonzero if two operands that are suitable for PHI nodes are
11961    necessarily equal.  Specifically, both ARG0 and ARG1 must be either
11962    SSA_NAME or invariant.  Note that this is strictly an optimization.
11963    That is, callers of this function can directly call operand_equal_p
11964    and get the same result, only slower.  */
11965 
11966 int
operand_equal_for_phi_arg_p(const_tree arg0,const_tree arg1)11967 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11968 {
11969   if (arg0 == arg1)
11970     return 1;
11971   if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11972     return 0;
11973   return operand_equal_p (arg0, arg1, 0);
11974 }
11975 
11976 /* Returns number of zeros at the end of binary representation of X.  */
11977 
11978 tree
num_ending_zeros(const_tree x)11979 num_ending_zeros (const_tree x)
11980 {
11981   return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11982 }
11983 
11984 
11985 #define WALK_SUBTREE(NODE)				\
11986   do							\
11987     {							\
11988       result = walk_tree_1 (&(NODE), func, data, pset, lh);	\
11989       if (result)					\
11990 	return result;					\
11991     }							\
11992   while (0)
11993 
11994 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11995    be walked whenever a type is seen in the tree.  Rest of operands and return
11996    value are as for walk_tree.  */
11997 
11998 static tree
walk_type_fields(tree type,walk_tree_fn func,void * data,hash_set<tree> * pset,walk_tree_lh lh)11999 walk_type_fields (tree type, walk_tree_fn func, void *data,
12000 		  hash_set<tree> *pset, walk_tree_lh lh)
12001 {
12002   tree result = NULL_TREE;
12003 
12004   switch (TREE_CODE (type))
12005     {
12006     case POINTER_TYPE:
12007     case REFERENCE_TYPE:
12008     case VECTOR_TYPE:
12009       /* We have to worry about mutually recursive pointers.  These can't
12010 	 be written in C.  They can in Ada.  It's pathological, but
12011 	 there's an ACATS test (c38102a) that checks it.  Deal with this
12012 	 by checking if we're pointing to another pointer, that one
12013 	 points to another pointer, that one does too, and we have no htab.
12014 	 If so, get a hash table.  We check three levels deep to avoid
12015 	 the cost of the hash table if we don't need one.  */
12016       if (POINTER_TYPE_P (TREE_TYPE (type))
12017 	  && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
12018 	  && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
12019 	  && !pset)
12020 	{
12021 	  result = walk_tree_without_duplicates (&TREE_TYPE (type),
12022 						 func, data);
12023 	  if (result)
12024 	    return result;
12025 
12026 	  break;
12027 	}
12028 
12029       /* fall through */
12030 
12031     case COMPLEX_TYPE:
12032       WALK_SUBTREE (TREE_TYPE (type));
12033       break;
12034 
12035     case METHOD_TYPE:
12036       WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
12037 
12038       /* Fall through.  */
12039 
12040     case FUNCTION_TYPE:
12041       WALK_SUBTREE (TREE_TYPE (type));
12042       {
12043 	tree arg;
12044 
12045 	/* We never want to walk into default arguments.  */
12046 	for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
12047 	  WALK_SUBTREE (TREE_VALUE (arg));
12048       }
12049       break;
12050 
12051     case ARRAY_TYPE:
12052       /* Don't follow this nodes's type if a pointer for fear that
12053 	 we'll have infinite recursion.  If we have a PSET, then we
12054 	 need not fear.  */
12055       if (pset
12056 	  || (!POINTER_TYPE_P (TREE_TYPE (type))
12057 	      && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
12058 	WALK_SUBTREE (TREE_TYPE (type));
12059       WALK_SUBTREE (TYPE_DOMAIN (type));
12060       break;
12061 
12062     case OFFSET_TYPE:
12063       WALK_SUBTREE (TREE_TYPE (type));
12064       WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
12065       break;
12066 
12067     default:
12068       break;
12069     }
12070 
12071   return NULL_TREE;
12072 }
12073 
12074 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal.  FUNC is
12075    called with the DATA and the address of each sub-tree.  If FUNC returns a
12076    non-NULL value, the traversal is stopped, and the value returned by FUNC
12077    is returned.  If PSET is non-NULL it is used to record the nodes visited,
12078    and to avoid visiting a node more than once.  */
12079 
12080 tree
walk_tree_1(tree * tp,walk_tree_fn func,void * data,hash_set<tree> * pset,walk_tree_lh lh)12081 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
12082 	     hash_set<tree> *pset, walk_tree_lh lh)
12083 {
12084   enum tree_code code;
12085   int walk_subtrees;
12086   tree result;
12087 
12088 #define WALK_SUBTREE_TAIL(NODE)				\
12089   do							\
12090     {							\
12091        tp = & (NODE);					\
12092        goto tail_recurse;				\
12093     }							\
12094   while (0)
12095 
12096  tail_recurse:
12097   /* Skip empty subtrees.  */
12098   if (!*tp)
12099     return NULL_TREE;
12100 
12101   /* Don't walk the same tree twice, if the user has requested
12102      that we avoid doing so.  */
12103   if (pset && pset->add (*tp))
12104     return NULL_TREE;
12105 
12106   /* Call the function.  */
12107   walk_subtrees = 1;
12108   result = (*func) (tp, &walk_subtrees, data);
12109 
12110   /* If we found something, return it.  */
12111   if (result)
12112     return result;
12113 
12114   code = TREE_CODE (*tp);
12115 
12116   /* Even if we didn't, FUNC may have decided that there was nothing
12117      interesting below this point in the tree.  */
12118   if (!walk_subtrees)
12119     {
12120       /* But we still need to check our siblings.  */
12121       if (code == TREE_LIST)
12122 	WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12123       else if (code == OMP_CLAUSE)
12124 	WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12125       else
12126 	return NULL_TREE;
12127     }
12128 
12129   if (lh)
12130     {
12131       result = (*lh) (tp, &walk_subtrees, func, data, pset);
12132       if (result || !walk_subtrees)
12133         return result;
12134     }
12135 
12136   switch (code)
12137     {
12138     case ERROR_MARK:
12139     case IDENTIFIER_NODE:
12140     case INTEGER_CST:
12141     case REAL_CST:
12142     case FIXED_CST:
12143     case STRING_CST:
12144     case BLOCK:
12145     case PLACEHOLDER_EXPR:
12146     case SSA_NAME:
12147     case FIELD_DECL:
12148     case RESULT_DECL:
12149       /* None of these have subtrees other than those already walked
12150 	 above.  */
12151       break;
12152 
12153     case TREE_LIST:
12154       WALK_SUBTREE (TREE_VALUE (*tp));
12155       WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12156       break;
12157 
12158     case TREE_VEC:
12159       {
12160 	int len = TREE_VEC_LENGTH (*tp);
12161 
12162 	if (len == 0)
12163 	  break;
12164 
12165 	/* Walk all elements but the first.  */
12166 	while (--len)
12167 	  WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
12168 
12169 	/* Now walk the first one as a tail call.  */
12170 	WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
12171       }
12172 
12173     case VECTOR_CST:
12174       {
12175 	unsigned len = vector_cst_encoded_nelts (*tp);
12176 	if (len == 0)
12177 	  break;
12178 	/* Walk all elements but the first.  */
12179 	while (--len)
12180 	  WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp, len));
12181 	/* Now walk the first one as a tail call.  */
12182 	WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp, 0));
12183       }
12184 
12185     case COMPLEX_CST:
12186       WALK_SUBTREE (TREE_REALPART (*tp));
12187       WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
12188 
12189     case CONSTRUCTOR:
12190       {
12191 	unsigned HOST_WIDE_INT idx;
12192 	constructor_elt *ce;
12193 
12194 	for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
12195 	     idx++)
12196 	  WALK_SUBTREE (ce->value);
12197       }
12198       break;
12199 
12200     case SAVE_EXPR:
12201       WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
12202 
12203     case BIND_EXPR:
12204       {
12205 	tree decl;
12206 	for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
12207 	  {
12208 	    /* Walk the DECL_INITIAL and DECL_SIZE.  We don't want to walk
12209 	       into declarations that are just mentioned, rather than
12210 	       declared; they don't really belong to this part of the tree.
12211 	       And, we can see cycles: the initializer for a declaration
12212 	       can refer to the declaration itself.  */
12213 	    WALK_SUBTREE (DECL_INITIAL (decl));
12214 	    WALK_SUBTREE (DECL_SIZE (decl));
12215 	    WALK_SUBTREE (DECL_SIZE_UNIT (decl));
12216 	  }
12217 	WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
12218       }
12219 
12220     case STATEMENT_LIST:
12221       {
12222 	tree_stmt_iterator i;
12223 	for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
12224 	  WALK_SUBTREE (*tsi_stmt_ptr (i));
12225       }
12226       break;
12227 
12228     case OMP_CLAUSE:
12229       switch (OMP_CLAUSE_CODE (*tp))
12230 	{
12231 	case OMP_CLAUSE_GANG:
12232 	  WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12233 	  /* FALLTHRU */
12234 
12235 	case OMP_CLAUSE_ASYNC:
12236 	case OMP_CLAUSE_WAIT:
12237 	case OMP_CLAUSE_WORKER:
12238 	case OMP_CLAUSE_VECTOR:
12239 	case OMP_CLAUSE_NUM_GANGS:
12240 	case OMP_CLAUSE_NUM_WORKERS:
12241 	case OMP_CLAUSE_VECTOR_LENGTH:
12242 	case OMP_CLAUSE_PRIVATE:
12243 	case OMP_CLAUSE_SHARED:
12244 	case OMP_CLAUSE_FIRSTPRIVATE:
12245 	case OMP_CLAUSE_COPYIN:
12246 	case OMP_CLAUSE_COPYPRIVATE:
12247 	case OMP_CLAUSE_FINAL:
12248 	case OMP_CLAUSE_IF:
12249 	case OMP_CLAUSE_NUM_THREADS:
12250 	case OMP_CLAUSE_SCHEDULE:
12251 	case OMP_CLAUSE_UNIFORM:
12252 	case OMP_CLAUSE_DEPEND:
12253 	case OMP_CLAUSE_NONTEMPORAL:
12254 	case OMP_CLAUSE_NUM_TEAMS:
12255 	case OMP_CLAUSE_THREAD_LIMIT:
12256 	case OMP_CLAUSE_DEVICE:
12257 	case OMP_CLAUSE_DIST_SCHEDULE:
12258 	case OMP_CLAUSE_SAFELEN:
12259 	case OMP_CLAUSE_SIMDLEN:
12260 	case OMP_CLAUSE_ORDERED:
12261 	case OMP_CLAUSE_PRIORITY:
12262 	case OMP_CLAUSE_GRAINSIZE:
12263 	case OMP_CLAUSE_NUM_TASKS:
12264 	case OMP_CLAUSE_HINT:
12265 	case OMP_CLAUSE_TO_DECLARE:
12266 	case OMP_CLAUSE_LINK:
12267 	case OMP_CLAUSE_DETACH:
12268 	case OMP_CLAUSE_USE_DEVICE_PTR:
12269 	case OMP_CLAUSE_USE_DEVICE_ADDR:
12270 	case OMP_CLAUSE_IS_DEVICE_PTR:
12271 	case OMP_CLAUSE_INCLUSIVE:
12272 	case OMP_CLAUSE_EXCLUSIVE:
12273 	case OMP_CLAUSE__LOOPTEMP_:
12274 	case OMP_CLAUSE__REDUCTEMP_:
12275 	case OMP_CLAUSE__CONDTEMP_:
12276 	case OMP_CLAUSE__SCANTEMP_:
12277 	case OMP_CLAUSE__SIMDUID_:
12278 	  WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
12279 	  /* FALLTHRU */
12280 
12281 	case OMP_CLAUSE_INDEPENDENT:
12282 	case OMP_CLAUSE_NOWAIT:
12283 	case OMP_CLAUSE_DEFAULT:
12284 	case OMP_CLAUSE_UNTIED:
12285 	case OMP_CLAUSE_MERGEABLE:
12286 	case OMP_CLAUSE_PROC_BIND:
12287 	case OMP_CLAUSE_DEVICE_TYPE:
12288 	case OMP_CLAUSE_INBRANCH:
12289 	case OMP_CLAUSE_NOTINBRANCH:
12290 	case OMP_CLAUSE_FOR:
12291 	case OMP_CLAUSE_PARALLEL:
12292 	case OMP_CLAUSE_SECTIONS:
12293 	case OMP_CLAUSE_TASKGROUP:
12294 	case OMP_CLAUSE_NOGROUP:
12295 	case OMP_CLAUSE_THREADS:
12296 	case OMP_CLAUSE_SIMD:
12297 	case OMP_CLAUSE_DEFAULTMAP:
12298 	case OMP_CLAUSE_ORDER:
12299 	case OMP_CLAUSE_BIND:
12300 	case OMP_CLAUSE_AUTO:
12301 	case OMP_CLAUSE_SEQ:
12302 	case OMP_CLAUSE_TILE:
12303 	case OMP_CLAUSE__SIMT_:
12304 	case OMP_CLAUSE_IF_PRESENT:
12305 	case OMP_CLAUSE_FINALIZE:
12306 	  WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12307 
12308 	case OMP_CLAUSE_LASTPRIVATE:
12309 	  WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12310 	  WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
12311 	  WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12312 
12313 	case OMP_CLAUSE_COLLAPSE:
12314 	  {
12315 	    int i;
12316 	    for (i = 0; i < 3; i++)
12317 	      WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12318 	    WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12319 	  }
12320 
12321 	case OMP_CLAUSE_LINEAR:
12322 	  WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12323 	  WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
12324 	  WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
12325 	  WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12326 
12327 	case OMP_CLAUSE_ALIGNED:
12328 	case OMP_CLAUSE_ALLOCATE:
12329 	case OMP_CLAUSE_FROM:
12330 	case OMP_CLAUSE_TO:
12331 	case OMP_CLAUSE_MAP:
12332 	case OMP_CLAUSE__CACHE_:
12333 	  WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12334 	  WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12335 	  WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12336 
12337 	case OMP_CLAUSE_REDUCTION:
12338 	case OMP_CLAUSE_TASK_REDUCTION:
12339 	case OMP_CLAUSE_IN_REDUCTION:
12340 	  {
12341 	    int i;
12342 	    for (i = 0; i < 5; i++)
12343 	      WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12344 	    WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12345 	  }
12346 
12347 	default:
12348 	  gcc_unreachable ();
12349 	}
12350       break;
12351 
12352     case TARGET_EXPR:
12353       {
12354 	int i, len;
12355 
12356 	/* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
12357 	   But, we only want to walk once.  */
12358 	len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
12359 	for (i = 0; i < len; ++i)
12360 	  WALK_SUBTREE (TREE_OPERAND (*tp, i));
12361 	WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
12362       }
12363 
12364     case DECL_EXPR:
12365       /* If this is a TYPE_DECL, walk into the fields of the type that it's
12366 	 defining.  We only want to walk into these fields of a type in this
12367 	 case and not in the general case of a mere reference to the type.
12368 
12369 	 The criterion is as follows: if the field can be an expression, it
12370 	 must be walked only here.  This should be in keeping with the fields
12371 	 that are directly gimplified in gimplify_type_sizes in order for the
12372 	 mark/copy-if-shared/unmark machinery of the gimplifier to work with
12373 	 variable-sized types.
12374 
12375 	 Note that DECLs get walked as part of processing the BIND_EXPR.  */
12376       if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
12377 	{
12378 	  /* Call the function for the decl so e.g. copy_tree_body_r can
12379 	     replace it with the remapped one.  */
12380 	  result = (*func) (&DECL_EXPR_DECL (*tp), &walk_subtrees, data);
12381 	  if (result || !walk_subtrees)
12382 	    return result;
12383 
12384 	  tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
12385 	  if (TREE_CODE (*type_p) == ERROR_MARK)
12386 	    return NULL_TREE;
12387 
12388 	  /* Call the function for the type.  See if it returns anything or
12389 	     doesn't want us to continue.  If we are to continue, walk both
12390 	     the normal fields and those for the declaration case.  */
12391 	  result = (*func) (type_p, &walk_subtrees, data);
12392 	  if (result || !walk_subtrees)
12393 	    return result;
12394 
12395 	  /* But do not walk a pointed-to type since it may itself need to
12396 	     be walked in the declaration case if it isn't anonymous.  */
12397 	  if (!POINTER_TYPE_P (*type_p))
12398 	    {
12399 	      result = walk_type_fields (*type_p, func, data, pset, lh);
12400 	      if (result)
12401 		return result;
12402 	    }
12403 
12404 	  /* If this is a record type, also walk the fields.  */
12405 	  if (RECORD_OR_UNION_TYPE_P (*type_p))
12406 	    {
12407 	      tree field;
12408 
12409 	      for (field = TYPE_FIELDS (*type_p); field;
12410 		   field = DECL_CHAIN (field))
12411 		{
12412 		  /* We'd like to look at the type of the field, but we can
12413 		     easily get infinite recursion.  So assume it's pointed
12414 		     to elsewhere in the tree.  Also, ignore things that
12415 		     aren't fields.  */
12416 		  if (TREE_CODE (field) != FIELD_DECL)
12417 		    continue;
12418 
12419 		  WALK_SUBTREE (DECL_FIELD_OFFSET (field));
12420 		  WALK_SUBTREE (DECL_SIZE (field));
12421 		  WALK_SUBTREE (DECL_SIZE_UNIT (field));
12422 		  if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
12423 		    WALK_SUBTREE (DECL_QUALIFIER (field));
12424 		}
12425 	    }
12426 
12427 	  /* Same for scalar types.  */
12428 	  else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
12429 		   || TREE_CODE (*type_p) == ENUMERAL_TYPE
12430 		   || TREE_CODE (*type_p) == INTEGER_TYPE
12431 		   || TREE_CODE (*type_p) == FIXED_POINT_TYPE
12432 		   || TREE_CODE (*type_p) == REAL_TYPE)
12433 	    {
12434 	      WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
12435 	      WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
12436 	    }
12437 
12438 	  WALK_SUBTREE (TYPE_SIZE (*type_p));
12439 	  WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
12440 	}
12441       /* FALLTHRU */
12442 
12443     default:
12444       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
12445 	{
12446 	  int i, len;
12447 
12448 	  /* Walk over all the sub-trees of this operand.  */
12449 	  len = TREE_OPERAND_LENGTH (*tp);
12450 
12451 	  /* Go through the subtrees.  We need to do this in forward order so
12452 	     that the scope of a FOR_EXPR is handled properly.  */
12453 	  if (len)
12454 	    {
12455 	      for (i = 0; i < len - 1; ++i)
12456 		WALK_SUBTREE (TREE_OPERAND (*tp, i));
12457 	      WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
12458 	    }
12459 	}
12460       /* If this is a type, walk the needed fields in the type.  */
12461       else if (TYPE_P (*tp))
12462 	return walk_type_fields (*tp, func, data, pset, lh);
12463       break;
12464     }
12465 
12466   /* We didn't find what we were looking for.  */
12467   return NULL_TREE;
12468 
12469 #undef WALK_SUBTREE_TAIL
12470 }
12471 #undef WALK_SUBTREE
12472 
12473 /* Like walk_tree, but does not walk duplicate nodes more than once.  */
12474 
12475 tree
walk_tree_without_duplicates_1(tree * tp,walk_tree_fn func,void * data,walk_tree_lh lh)12476 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
12477 				walk_tree_lh lh)
12478 {
12479   tree result;
12480 
12481   hash_set<tree> pset;
12482   result = walk_tree_1 (tp, func, data, &pset, lh);
12483   return result;
12484 }
12485 
12486 
12487 tree
tree_block(tree t)12488 tree_block (tree t)
12489 {
12490   const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12491 
12492   if (IS_EXPR_CODE_CLASS (c))
12493     return LOCATION_BLOCK (t->exp.locus);
12494   gcc_unreachable ();
12495   return NULL;
12496 }
12497 
12498 void
tree_set_block(tree t,tree b)12499 tree_set_block (tree t, tree b)
12500 {
12501   const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12502 
12503   if (IS_EXPR_CODE_CLASS (c))
12504     {
12505       t->exp.locus = set_block (t->exp.locus, b);
12506     }
12507   else
12508     gcc_unreachable ();
12509 }
12510 
12511 /* Create a nameless artificial label and put it in the current
12512    function context.  The label has a location of LOC.  Returns the
12513    newly created label.  */
12514 
12515 tree
create_artificial_label(location_t loc)12516 create_artificial_label (location_t loc)
12517 {
12518   tree lab = build_decl (loc,
12519       			 LABEL_DECL, NULL_TREE, void_type_node);
12520 
12521   DECL_ARTIFICIAL (lab) = 1;
12522   DECL_IGNORED_P (lab) = 1;
12523   DECL_CONTEXT (lab) = current_function_decl;
12524   return lab;
12525 }
12526 
12527 /*  Given a tree, try to return a useful variable name that we can use
12528     to prefix a temporary that is being assigned the value of the tree.
12529     I.E. given  <temp> = &A, return A.  */
12530 
12531 const char *
get_name(tree t)12532 get_name (tree t)
12533 {
12534   tree stripped_decl;
12535 
12536   stripped_decl = t;
12537   STRIP_NOPS (stripped_decl);
12538   if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
12539     return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
12540   else if (TREE_CODE (stripped_decl) == SSA_NAME)
12541     {
12542       tree name = SSA_NAME_IDENTIFIER (stripped_decl);
12543       if (!name)
12544 	return NULL;
12545       return IDENTIFIER_POINTER (name);
12546     }
12547   else
12548     {
12549       switch (TREE_CODE (stripped_decl))
12550 	{
12551 	case ADDR_EXPR:
12552 	  return get_name (TREE_OPERAND (stripped_decl, 0));
12553 	default:
12554 	  return NULL;
12555 	}
12556     }
12557 }
12558 
12559 /* Return true if TYPE has a variable argument list.  */
12560 
12561 bool
stdarg_p(const_tree fntype)12562 stdarg_p (const_tree fntype)
12563 {
12564   function_args_iterator args_iter;
12565   tree n = NULL_TREE, t;
12566 
12567   if (!fntype)
12568     return false;
12569 
12570   FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
12571     {
12572       n = t;
12573     }
12574 
12575   return n != NULL_TREE && n != void_type_node;
12576 }
12577 
12578 /* Return true if TYPE has a prototype.  */
12579 
12580 bool
prototype_p(const_tree fntype)12581 prototype_p (const_tree fntype)
12582 {
12583   tree t;
12584 
12585   gcc_assert (fntype != NULL_TREE);
12586 
12587   t = TYPE_ARG_TYPES (fntype);
12588   return (t != NULL_TREE);
12589 }
12590 
12591 /* If BLOCK is inlined from an __attribute__((__artificial__))
12592    routine, return pointer to location from where it has been
12593    called.  */
12594 location_t *
block_nonartificial_location(tree block)12595 block_nonartificial_location (tree block)
12596 {
12597   location_t *ret = NULL;
12598 
12599   while (block && TREE_CODE (block) == BLOCK
12600 	 && BLOCK_ABSTRACT_ORIGIN (block))
12601     {
12602       tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12603       if (TREE_CODE (ao) == FUNCTION_DECL)
12604 	{
12605 	  /* If AO is an artificial inline, point RET to the
12606 	     call site locus at which it has been inlined and continue
12607 	     the loop, in case AO's caller is also an artificial
12608 	     inline.  */
12609 	  if (DECL_DECLARED_INLINE_P (ao)
12610 	      && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12611 	    ret = &BLOCK_SOURCE_LOCATION (block);
12612 	  else
12613 	    break;
12614 	}
12615       else if (TREE_CODE (ao) != BLOCK)
12616 	break;
12617 
12618       block = BLOCK_SUPERCONTEXT (block);
12619     }
12620   return ret;
12621 }
12622 
12623 
12624 /* If EXP is inlined from an __attribute__((__artificial__))
12625    function, return the location of the original call expression.  */
12626 
12627 location_t
tree_nonartificial_location(tree exp)12628 tree_nonartificial_location (tree exp)
12629 {
12630   location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12631 
12632   if (loc)
12633     return *loc;
12634   else
12635     return EXPR_LOCATION (exp);
12636 }
12637 
12638 /* Return the location into which EXP has been inlined.  Analogous
12639    to tree_nonartificial_location() above but not limited to artificial
12640    functions declared inline.  If SYSTEM_HEADER is true, return
12641    the macro expansion point of the location if it's in a system header */
12642 
12643 location_t
tree_inlined_location(tree exp,bool system_header)12644 tree_inlined_location (tree exp, bool system_header /* = true */)
12645 {
12646   location_t loc = UNKNOWN_LOCATION;
12647 
12648   tree block = TREE_BLOCK (exp);
12649 
12650   while (block && TREE_CODE (block) == BLOCK
12651 	 && BLOCK_ABSTRACT_ORIGIN (block))
12652     {
12653       tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12654       if (TREE_CODE (ao) == FUNCTION_DECL)
12655 	loc = BLOCK_SOURCE_LOCATION (block);
12656       else if (TREE_CODE (ao) != BLOCK)
12657 	break;
12658 
12659       block = BLOCK_SUPERCONTEXT (block);
12660     }
12661 
12662   if (loc == UNKNOWN_LOCATION)
12663     {
12664       loc = EXPR_LOCATION (exp);
12665       if (system_header)
12666 	/* Only consider macro expansion when the block traversal failed
12667 	   to find a location.  Otherwise it's not relevant.  */
12668 	return expansion_point_location_if_in_system_header (loc);
12669     }
12670 
12671   return loc;
12672 }
12673 
12674 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
12675    nodes.  */
12676 
12677 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code.  */
12678 
12679 hashval_t
hash(tree x)12680 cl_option_hasher::hash (tree x)
12681 {
12682   const_tree const t = x;
12683   const char *p;
12684   size_t i;
12685   size_t len = 0;
12686   hashval_t hash = 0;
12687 
12688   if (TREE_CODE (t) == OPTIMIZATION_NODE)
12689     {
12690       p = (const char *)TREE_OPTIMIZATION (t);
12691       len = sizeof (struct cl_optimization);
12692     }
12693 
12694   else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12695     return cl_target_option_hash (TREE_TARGET_OPTION (t));
12696 
12697   else
12698     gcc_unreachable ();
12699 
12700   /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12701      something else.  */
12702   for (i = 0; i < len; i++)
12703     if (p[i])
12704       hash = (hash << 4) ^ ((i << 2) | p[i]);
12705 
12706   return hash;
12707 }
12708 
12709 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12710    TARGET_OPTION tree node) is the same as that given by *Y, which is the
12711    same.  */
12712 
12713 bool
equal(tree x,tree y)12714 cl_option_hasher::equal (tree x, tree y)
12715 {
12716   const_tree const xt = x;
12717   const_tree const yt = y;
12718 
12719   if (TREE_CODE (xt) != TREE_CODE (yt))
12720     return 0;
12721 
12722   if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12723     return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
12724 				      TREE_OPTIMIZATION (yt));
12725   else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12726     return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12727 				TREE_TARGET_OPTION (yt));
12728   else
12729     gcc_unreachable ();
12730 }
12731 
12732 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET.  */
12733 
12734 tree
build_optimization_node(struct gcc_options * opts,struct gcc_options * opts_set)12735 build_optimization_node (struct gcc_options *opts,
12736 			 struct gcc_options *opts_set)
12737 {
12738   tree t;
12739 
12740   /* Use the cache of optimization nodes.  */
12741 
12742   cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12743 			opts, opts_set);
12744 
12745   tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12746   t = *slot;
12747   if (!t)
12748     {
12749       /* Insert this one into the hash table.  */
12750       t = cl_optimization_node;
12751       *slot = t;
12752 
12753       /* Make a new node for next time round.  */
12754       cl_optimization_node = make_node (OPTIMIZATION_NODE);
12755     }
12756 
12757   return t;
12758 }
12759 
12760 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET.  */
12761 
12762 tree
build_target_option_node(struct gcc_options * opts,struct gcc_options * opts_set)12763 build_target_option_node (struct gcc_options *opts,
12764 			  struct gcc_options *opts_set)
12765 {
12766   tree t;
12767 
12768   /* Use the cache of optimization nodes.  */
12769 
12770   cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12771 			 opts, opts_set);
12772 
12773   tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12774   t = *slot;
12775   if (!t)
12776     {
12777       /* Insert this one into the hash table.  */
12778       t = cl_target_option_node;
12779       *slot = t;
12780 
12781       /* Make a new node for next time round.  */
12782       cl_target_option_node = make_node (TARGET_OPTION_NODE);
12783     }
12784 
12785   return t;
12786 }
12787 
12788 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12789    so that they aren't saved during PCH writing.  */
12790 
12791 void
prepare_target_option_nodes_for_pch(void)12792 prepare_target_option_nodes_for_pch (void)
12793 {
12794   hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12795   for (; iter != cl_option_hash_table->end (); ++iter)
12796     if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12797       TREE_TARGET_GLOBALS (*iter) = NULL;
12798 }
12799 
12800 /* Determine the "ultimate origin" of a block.  */
12801 
12802 tree
block_ultimate_origin(const_tree block)12803 block_ultimate_origin (const_tree block)
12804 {
12805   tree origin = BLOCK_ABSTRACT_ORIGIN (block);
12806 
12807   if (origin == NULL_TREE)
12808     return NULL_TREE;
12809   else
12810     {
12811       gcc_checking_assert ((DECL_P (origin)
12812 			    && DECL_ORIGIN (origin) == origin)
12813 			   || BLOCK_ORIGIN (origin) == origin);
12814       return origin;
12815     }
12816 }
12817 
12818 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12819    no instruction.  */
12820 
12821 bool
tree_nop_conversion_p(const_tree outer_type,const_tree inner_type)12822 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12823 {
12824   /* Do not strip casts into or out of differing address spaces.  */
12825   if (POINTER_TYPE_P (outer_type)
12826       && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12827     {
12828       if (!POINTER_TYPE_P (inner_type)
12829 	  || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12830 	      != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12831 	return false;
12832     }
12833   else if (POINTER_TYPE_P (inner_type)
12834 	   && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12835     {
12836       /* We already know that outer_type is not a pointer with
12837 	 a non-generic address space.  */
12838       return false;
12839     }
12840 
12841   /* Use precision rather then machine mode when we can, which gives
12842      the correct answer even for submode (bit-field) types.  */
12843   if ((INTEGRAL_TYPE_P (outer_type)
12844        || POINTER_TYPE_P (outer_type)
12845        || TREE_CODE (outer_type) == OFFSET_TYPE)
12846       && (INTEGRAL_TYPE_P (inner_type)
12847 	  || POINTER_TYPE_P (inner_type)
12848 	  || TREE_CODE (inner_type) == OFFSET_TYPE))
12849     return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12850 
12851   /* Otherwise fall back on comparing machine modes (e.g. for
12852      aggregate types, floats).  */
12853   return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12854 }
12855 
12856 /* Return true iff conversion in EXP generates no instruction.  Mark
12857    it inline so that we fully inline into the stripping functions even
12858    though we have two uses of this function.  */
12859 
12860 static inline bool
tree_nop_conversion(const_tree exp)12861 tree_nop_conversion (const_tree exp)
12862 {
12863   tree outer_type, inner_type;
12864 
12865   if (location_wrapper_p (exp))
12866     return true;
12867   if (!CONVERT_EXPR_P (exp)
12868       && TREE_CODE (exp) != NON_LVALUE_EXPR)
12869     return false;
12870 
12871   outer_type = TREE_TYPE (exp);
12872   inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12873   if (!inner_type || inner_type == error_mark_node)
12874     return false;
12875 
12876   return tree_nop_conversion_p (outer_type, inner_type);
12877 }
12878 
12879 /* Return true iff conversion in EXP generates no instruction.  Don't
12880    consider conversions changing the signedness.  */
12881 
12882 static bool
tree_sign_nop_conversion(const_tree exp)12883 tree_sign_nop_conversion (const_tree exp)
12884 {
12885   tree outer_type, inner_type;
12886 
12887   if (!tree_nop_conversion (exp))
12888     return false;
12889 
12890   outer_type = TREE_TYPE (exp);
12891   inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12892 
12893   return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12894 	  && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12895 }
12896 
12897 /* Strip conversions from EXP according to tree_nop_conversion and
12898    return the resulting expression.  */
12899 
12900 tree
tree_strip_nop_conversions(tree exp)12901 tree_strip_nop_conversions (tree exp)
12902 {
12903   while (tree_nop_conversion (exp))
12904     exp = TREE_OPERAND (exp, 0);
12905   return exp;
12906 }
12907 
12908 /* Strip conversions from EXP according to tree_sign_nop_conversion
12909    and return the resulting expression.  */
12910 
12911 tree
tree_strip_sign_nop_conversions(tree exp)12912 tree_strip_sign_nop_conversions (tree exp)
12913 {
12914   while (tree_sign_nop_conversion (exp))
12915     exp = TREE_OPERAND (exp, 0);
12916   return exp;
12917 }
12918 
12919 /* Avoid any floating point extensions from EXP.  */
12920 tree
strip_float_extensions(tree exp)12921 strip_float_extensions (tree exp)
12922 {
12923   tree sub, expt, subt;
12924 
12925   /*  For floating point constant look up the narrowest type that can hold
12926       it properly and handle it like (type)(narrowest_type)constant.
12927       This way we can optimize for instance a=a*2.0 where "a" is float
12928       but 2.0 is double constant.  */
12929   if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12930     {
12931       REAL_VALUE_TYPE orig;
12932       tree type = NULL;
12933 
12934       orig = TREE_REAL_CST (exp);
12935       if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12936 	  && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12937 	type = float_type_node;
12938       else if (TYPE_PRECISION (TREE_TYPE (exp))
12939 	       > TYPE_PRECISION (double_type_node)
12940 	       && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12941 	type = double_type_node;
12942       if (type)
12943 	return build_real_truncate (type, orig);
12944     }
12945 
12946   if (!CONVERT_EXPR_P (exp))
12947     return exp;
12948 
12949   sub = TREE_OPERAND (exp, 0);
12950   subt = TREE_TYPE (sub);
12951   expt = TREE_TYPE (exp);
12952 
12953   if (!FLOAT_TYPE_P (subt))
12954     return exp;
12955 
12956   if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12957     return exp;
12958 
12959   if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12960     return exp;
12961 
12962   return strip_float_extensions (sub);
12963 }
12964 
12965 /* Strip out all handled components that produce invariant
12966    offsets.  */
12967 
12968 const_tree
strip_invariant_refs(const_tree op)12969 strip_invariant_refs (const_tree op)
12970 {
12971   while (handled_component_p (op))
12972     {
12973       switch (TREE_CODE (op))
12974 	{
12975 	case ARRAY_REF:
12976 	case ARRAY_RANGE_REF:
12977 	  if (!is_gimple_constant (TREE_OPERAND (op, 1))
12978 	      || TREE_OPERAND (op, 2) != NULL_TREE
12979 	      || TREE_OPERAND (op, 3) != NULL_TREE)
12980 	    return NULL;
12981 	  break;
12982 
12983 	case COMPONENT_REF:
12984 	  if (TREE_OPERAND (op, 2) != NULL_TREE)
12985 	    return NULL;
12986 	  break;
12987 
12988 	default:;
12989 	}
12990       op = TREE_OPERAND (op, 0);
12991     }
12992 
12993   return op;
12994 }
12995 
12996 static GTY(()) tree gcc_eh_personality_decl;
12997 
12998 /* Return the GCC personality function decl.  */
12999 
13000 tree
lhd_gcc_personality(void)13001 lhd_gcc_personality (void)
13002 {
13003   if (!gcc_eh_personality_decl)
13004     gcc_eh_personality_decl = build_personality_function ("gcc");
13005   return gcc_eh_personality_decl;
13006 }
13007 
13008 /* TARGET is a call target of GIMPLE call statement
13009    (obtained by gimple_call_fn).  Return true if it is
13010    OBJ_TYPE_REF representing an virtual call of C++ method.
13011    (As opposed to OBJ_TYPE_REF representing objc calls
13012    through a cast where middle-end devirtualization machinery
13013    can't apply.)  FOR_DUMP_P is true when being called from
13014    the dump routines.  */
13015 
13016 bool
virtual_method_call_p(const_tree target,bool for_dump_p)13017 virtual_method_call_p (const_tree target, bool for_dump_p)
13018 {
13019   if (TREE_CODE (target) != OBJ_TYPE_REF)
13020     return false;
13021   tree t = TREE_TYPE (target);
13022   gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
13023   t = TREE_TYPE (t);
13024   if (TREE_CODE (t) == FUNCTION_TYPE)
13025     return false;
13026   gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
13027   /* If we do not have BINFO associated, it means that type was built
13028      without devirtualization enabled.  Do not consider this a virtual
13029      call.  */
13030   if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
13031     return false;
13032   return true;
13033 }
13034 
13035 /* Lookup sub-BINFO of BINFO of TYPE at offset POS.  */
13036 
13037 static tree
lookup_binfo_at_offset(tree binfo,tree type,HOST_WIDE_INT pos)13038 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
13039 {
13040   unsigned int i;
13041   tree base_binfo, b;
13042 
13043   for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
13044     if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
13045 	&& types_same_for_odr (TREE_TYPE (base_binfo), type))
13046       return base_binfo;
13047     else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
13048       return b;
13049   return NULL;
13050 }
13051 
13052 /* Try to find a base info of BINFO that would have its field decl at offset
13053    OFFSET within the BINFO type and which is of EXPECTED_TYPE.  If it can be
13054    found, return, otherwise return NULL_TREE.  */
13055 
13056 tree
get_binfo_at_offset(tree binfo,poly_int64 offset,tree expected_type)13057 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
13058 {
13059   tree type = BINFO_TYPE (binfo);
13060 
13061   while (true)
13062     {
13063       HOST_WIDE_INT pos, size;
13064       tree fld;
13065       int i;
13066 
13067       if (types_same_for_odr (type, expected_type))
13068 	  return binfo;
13069       if (maybe_lt (offset, 0))
13070 	return NULL_TREE;
13071 
13072       for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
13073 	{
13074 	  if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
13075 	    continue;
13076 
13077 	  pos = int_bit_position (fld);
13078 	  size = tree_to_uhwi (DECL_SIZE (fld));
13079 	  if (known_in_range_p (offset, pos, size))
13080 	    break;
13081 	}
13082       if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
13083 	return NULL_TREE;
13084 
13085       /* Offset 0 indicates the primary base, whose vtable contents are
13086 	 represented in the binfo for the derived class.  */
13087       else if (maybe_ne (offset, 0))
13088 	{
13089 	  tree found_binfo = NULL, base_binfo;
13090 	  /* Offsets in BINFO are in bytes relative to the whole structure
13091 	     while POS is in bits relative to the containing field.  */
13092 	  int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
13093 			     / BITS_PER_UNIT);
13094 
13095 	  for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
13096 	    if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
13097 		&& types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
13098 	      {
13099 		found_binfo = base_binfo;
13100 		break;
13101 	      }
13102 	  if (found_binfo)
13103 	    binfo = found_binfo;
13104 	  else
13105 	    binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
13106 					    binfo_offset);
13107 	 }
13108 
13109       type = TREE_TYPE (fld);
13110       offset -= pos;
13111     }
13112 }
13113 
13114 /* Returns true if X is a typedef decl.  */
13115 
13116 bool
is_typedef_decl(const_tree x)13117 is_typedef_decl (const_tree x)
13118 {
13119   return (x && TREE_CODE (x) == TYPE_DECL
13120           && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
13121 }
13122 
13123 /* Returns true iff TYPE is a type variant created for a typedef. */
13124 
13125 bool
typedef_variant_p(const_tree type)13126 typedef_variant_p (const_tree type)
13127 {
13128   return is_typedef_decl (TYPE_NAME (type));
13129 }
13130 
13131 /* PR 84195: Replace control characters in "unescaped" with their
13132    escaped equivalents.  Allow newlines if -fmessage-length has
13133    been set to a non-zero value.  This is done here, rather than
13134    where the attribute is recorded as the message length can
13135    change between these two locations.  */
13136 
13137 void
escape(const char * unescaped)13138 escaped_string::escape (const char *unescaped)
13139 {
13140   char *escaped;
13141   size_t i, new_i, len;
13142 
13143   if (m_owned)
13144     free (m_str);
13145 
13146   m_str = const_cast<char *> (unescaped);
13147   m_owned = false;
13148 
13149   if (unescaped == NULL || *unescaped == 0)
13150     return;
13151 
13152   len = strlen (unescaped);
13153   escaped = NULL;
13154   new_i = 0;
13155 
13156   for (i = 0; i < len; i++)
13157     {
13158       char c = unescaped[i];
13159 
13160       if (!ISCNTRL (c))
13161 	{
13162 	  if (escaped)
13163 	    escaped[new_i++] = c;
13164 	  continue;
13165 	}
13166 
13167       if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
13168 	{
13169 	  if (escaped == NULL)
13170 	    {
13171 	      /* We only allocate space for a new string if we
13172 		 actually encounter a control character that
13173 		 needs replacing.  */
13174 	      escaped = (char *) xmalloc (len * 2 + 1);
13175 	      strncpy (escaped, unescaped, i);
13176 	      new_i = i;
13177 	    }
13178 
13179 	  escaped[new_i++] = '\\';
13180 
13181 	  switch (c)
13182 	    {
13183 	    case '\a': escaped[new_i++] = 'a'; break;
13184 	    case '\b': escaped[new_i++] = 'b'; break;
13185 	    case '\f': escaped[new_i++] = 'f'; break;
13186 	    case '\n': escaped[new_i++] = 'n'; break;
13187 	    case '\r': escaped[new_i++] = 'r'; break;
13188 	    case '\t': escaped[new_i++] = 't'; break;
13189 	    case '\v': escaped[new_i++] = 'v'; break;
13190 	    default:   escaped[new_i++] = '?'; break;
13191 	    }
13192 	}
13193       else if (escaped)
13194 	escaped[new_i++] = c;
13195     }
13196 
13197   if (escaped)
13198     {
13199       escaped[new_i] = 0;
13200       m_str = escaped;
13201       m_owned = true;
13202     }
13203 }
13204 
13205 /* Warn about a use of an identifier which was marked deprecated.  Returns
13206    whether a warning was given.  */
13207 
13208 bool
warn_deprecated_use(tree node,tree attr)13209 warn_deprecated_use (tree node, tree attr)
13210 {
13211   escaped_string msg;
13212 
13213   if (node == 0 || !warn_deprecated_decl)
13214     return false;
13215 
13216   if (!attr)
13217     {
13218       if (DECL_P (node))
13219 	attr = DECL_ATTRIBUTES (node);
13220       else if (TYPE_P (node))
13221 	{
13222 	  tree decl = TYPE_STUB_DECL (node);
13223 	  if (decl)
13224 	    attr = lookup_attribute ("deprecated",
13225 				     TYPE_ATTRIBUTES (TREE_TYPE (decl)));
13226 	}
13227     }
13228 
13229   if (attr)
13230     attr = lookup_attribute ("deprecated", attr);
13231 
13232   if (attr)
13233     msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
13234 
13235   bool w = false;
13236   if (DECL_P (node))
13237     {
13238       auto_diagnostic_group d;
13239       if (msg)
13240 	w = warning (OPT_Wdeprecated_declarations,
13241 		     "%qD is deprecated: %s", node, (const char *) msg);
13242       else
13243 	w = warning (OPT_Wdeprecated_declarations,
13244 		     "%qD is deprecated", node);
13245       if (w)
13246 	inform (DECL_SOURCE_LOCATION (node), "declared here");
13247     }
13248   else if (TYPE_P (node))
13249     {
13250       tree what = NULL_TREE;
13251       tree decl = TYPE_STUB_DECL (node);
13252 
13253       if (TYPE_NAME (node))
13254 	{
13255 	  if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
13256 	    what = TYPE_NAME (node);
13257 	  else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
13258 		   && DECL_NAME (TYPE_NAME (node)))
13259 	    what = DECL_NAME (TYPE_NAME (node));
13260 	}
13261 
13262       auto_diagnostic_group d;
13263       if (what)
13264 	{
13265 	  if (msg)
13266 	    w = warning (OPT_Wdeprecated_declarations,
13267 			 "%qE is deprecated: %s", what, (const char *) msg);
13268 	  else
13269 	    w = warning (OPT_Wdeprecated_declarations,
13270 			 "%qE is deprecated", what);
13271 	}
13272       else
13273 	{
13274 	  if (msg)
13275 	    w = warning (OPT_Wdeprecated_declarations,
13276 			 "type is deprecated: %s", (const char *) msg);
13277 	  else
13278 	    w = warning (OPT_Wdeprecated_declarations,
13279 			 "type is deprecated");
13280 	}
13281 
13282       if (w && decl)
13283 	inform (DECL_SOURCE_LOCATION (decl), "declared here");
13284     }
13285 
13286   return w;
13287 }
13288 
13289 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
13290    somewhere in it.  */
13291 
13292 bool
contains_bitfld_component_ref_p(const_tree ref)13293 contains_bitfld_component_ref_p (const_tree ref)
13294 {
13295   while (handled_component_p (ref))
13296     {
13297       if (TREE_CODE (ref) == COMPONENT_REF
13298           && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
13299         return true;
13300       ref = TREE_OPERAND (ref, 0);
13301     }
13302 
13303   return false;
13304 }
13305 
13306 /* Try to determine whether a TRY_CATCH expression can fall through.
13307    This is a subroutine of block_may_fallthru.  */
13308 
13309 static bool
try_catch_may_fallthru(const_tree stmt)13310 try_catch_may_fallthru (const_tree stmt)
13311 {
13312   tree_stmt_iterator i;
13313 
13314   /* If the TRY block can fall through, the whole TRY_CATCH can
13315      fall through.  */
13316   if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
13317     return true;
13318 
13319   i = tsi_start (TREE_OPERAND (stmt, 1));
13320   switch (TREE_CODE (tsi_stmt (i)))
13321     {
13322     case CATCH_EXPR:
13323       /* We expect to see a sequence of CATCH_EXPR trees, each with a
13324 	 catch expression and a body.  The whole TRY_CATCH may fall
13325 	 through iff any of the catch bodies falls through.  */
13326       for (; !tsi_end_p (i); tsi_next (&i))
13327 	{
13328 	  if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
13329 	    return true;
13330 	}
13331       return false;
13332 
13333     case EH_FILTER_EXPR:
13334       /* The exception filter expression only matters if there is an
13335 	 exception.  If the exception does not match EH_FILTER_TYPES,
13336 	 we will execute EH_FILTER_FAILURE, and we will fall through
13337 	 if that falls through.  If the exception does match
13338 	 EH_FILTER_TYPES, the stack unwinder will continue up the
13339 	 stack, so we will not fall through.  We don't know whether we
13340 	 will throw an exception which matches EH_FILTER_TYPES or not,
13341 	 so we just ignore EH_FILTER_TYPES and assume that we might
13342 	 throw an exception which doesn't match.  */
13343       return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
13344 
13345     default:
13346       /* This case represents statements to be executed when an
13347 	 exception occurs.  Those statements are implicitly followed
13348 	 by a RESX statement to resume execution after the exception.
13349 	 So in this case the TRY_CATCH never falls through.  */
13350       return false;
13351     }
13352 }
13353 
13354 /* Try to determine if we can fall out of the bottom of BLOCK.  This guess
13355    need not be 100% accurate; simply be conservative and return true if we
13356    don't know.  This is used only to avoid stupidly generating extra code.
13357    If we're wrong, we'll just delete the extra code later.  */
13358 
13359 bool
block_may_fallthru(const_tree block)13360 block_may_fallthru (const_tree block)
13361 {
13362   /* This CONST_CAST is okay because expr_last returns its argument
13363      unmodified and we assign it to a const_tree.  */
13364   const_tree stmt = expr_last (CONST_CAST_TREE (block));
13365 
13366   switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
13367     {
13368     case GOTO_EXPR:
13369     case RETURN_EXPR:
13370       /* Easy cases.  If the last statement of the block implies
13371 	 control transfer, then we can't fall through.  */
13372       return false;
13373 
13374     case SWITCH_EXPR:
13375       /* If there is a default: label or case labels cover all possible
13376 	 SWITCH_COND values, then the SWITCH_EXPR will transfer control
13377 	 to some case label in all cases and all we care is whether the
13378 	 SWITCH_BODY falls through.  */
13379       if (SWITCH_ALL_CASES_P (stmt))
13380 	return block_may_fallthru (SWITCH_BODY (stmt));
13381       return true;
13382 
13383     case COND_EXPR:
13384       if (block_may_fallthru (COND_EXPR_THEN (stmt)))
13385 	return true;
13386       return block_may_fallthru (COND_EXPR_ELSE (stmt));
13387 
13388     case BIND_EXPR:
13389       return block_may_fallthru (BIND_EXPR_BODY (stmt));
13390 
13391     case TRY_CATCH_EXPR:
13392       return try_catch_may_fallthru (stmt);
13393 
13394     case TRY_FINALLY_EXPR:
13395       /* The finally clause is always executed after the try clause,
13396 	 so if it does not fall through, then the try-finally will not
13397 	 fall through.  Otherwise, if the try clause does not fall
13398 	 through, then when the finally clause falls through it will
13399 	 resume execution wherever the try clause was going.  So the
13400 	 whole try-finally will only fall through if both the try
13401 	 clause and the finally clause fall through.  */
13402       return (block_may_fallthru (TREE_OPERAND (stmt, 0))
13403 	      && block_may_fallthru (TREE_OPERAND (stmt, 1)));
13404 
13405     case EH_ELSE_EXPR:
13406       return block_may_fallthru (TREE_OPERAND (stmt, 0));
13407 
13408     case MODIFY_EXPR:
13409       if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
13410 	stmt = TREE_OPERAND (stmt, 1);
13411       else
13412 	return true;
13413       /* FALLTHRU */
13414 
13415     case CALL_EXPR:
13416       /* Functions that do not return do not fall through.  */
13417       return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
13418 
13419     case CLEANUP_POINT_EXPR:
13420       return block_may_fallthru (TREE_OPERAND (stmt, 0));
13421 
13422     case TARGET_EXPR:
13423       return block_may_fallthru (TREE_OPERAND (stmt, 1));
13424 
13425     case ERROR_MARK:
13426       return true;
13427 
13428     default:
13429       return lang_hooks.block_may_fallthru (stmt);
13430     }
13431 }
13432 
13433 /* True if we are using EH to handle cleanups.  */
13434 static bool using_eh_for_cleanups_flag = false;
13435 
13436 /* This routine is called from front ends to indicate eh should be used for
13437    cleanups.  */
13438 void
using_eh_for_cleanups(void)13439 using_eh_for_cleanups (void)
13440 {
13441   using_eh_for_cleanups_flag = true;
13442 }
13443 
13444 /* Query whether EH is used for cleanups.  */
13445 bool
using_eh_for_cleanups_p(void)13446 using_eh_for_cleanups_p (void)
13447 {
13448   return using_eh_for_cleanups_flag;
13449 }
13450 
13451 /* Wrapper for tree_code_name to ensure that tree code is valid */
13452 const char *
get_tree_code_name(enum tree_code code)13453 get_tree_code_name (enum tree_code code)
13454 {
13455   const char *invalid = "<invalid tree code>";
13456 
13457   /* The tree_code enum promotes to signed, but we could be getting
13458      invalid values, so force an unsigned comparison.  */
13459   if (unsigned (code) >= MAX_TREE_CODES)
13460     {
13461       if (code == 0xa5a5)
13462 	return "ggc_freed";
13463       return invalid;
13464     }
13465 
13466   return tree_code_name[code];
13467 }
13468 
13469 /* Drops the TREE_OVERFLOW flag from T.  */
13470 
13471 tree
drop_tree_overflow(tree t)13472 drop_tree_overflow (tree t)
13473 {
13474   gcc_checking_assert (TREE_OVERFLOW (t));
13475 
13476   /* For tree codes with a sharing machinery re-build the result.  */
13477   if (poly_int_tree_p (t))
13478     return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
13479 
13480   /* For VECTOR_CST, remove the overflow bits from the encoded elements
13481      and canonicalize the result.  */
13482   if (TREE_CODE (t) == VECTOR_CST)
13483     {
13484       tree_vector_builder builder;
13485       builder.new_unary_operation (TREE_TYPE (t), t, true);
13486       unsigned int count = builder.encoded_nelts ();
13487       for (unsigned int i = 0; i < count; ++i)
13488 	{
13489 	  tree elt = VECTOR_CST_ELT (t, i);
13490 	  if (TREE_OVERFLOW (elt))
13491 	    elt = drop_tree_overflow (elt);
13492 	  builder.quick_push (elt);
13493 	}
13494       return builder.build ();
13495     }
13496 
13497   /* Otherwise, as all tcc_constants are possibly shared, copy the node
13498      and drop the flag.  */
13499   t = copy_node (t);
13500   TREE_OVERFLOW (t) = 0;
13501 
13502   /* For constants that contain nested constants, drop the flag
13503      from those as well.  */
13504   if (TREE_CODE (t) == COMPLEX_CST)
13505     {
13506       if (TREE_OVERFLOW (TREE_REALPART (t)))
13507 	TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
13508       if (TREE_OVERFLOW (TREE_IMAGPART (t)))
13509 	TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
13510     }
13511 
13512   return t;
13513 }
13514 
13515 /* Given a memory reference expression T, return its base address.
13516    The base address of a memory reference expression is the main
13517    object being referenced.  For instance, the base address for
13518    'array[i].fld[j]' is 'array'.  You can think of this as stripping
13519    away the offset part from a memory address.
13520 
13521    This function calls handled_component_p to strip away all the inner
13522    parts of the memory reference until it reaches the base object.  */
13523 
13524 tree
get_base_address(tree t)13525 get_base_address (tree t)
13526 {
13527   while (handled_component_p (t))
13528     t = TREE_OPERAND (t, 0);
13529 
13530   if ((TREE_CODE (t) == MEM_REF
13531        || TREE_CODE (t) == TARGET_MEM_REF)
13532       && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
13533     t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
13534 
13535   /* ???  Either the alias oracle or all callers need to properly deal
13536      with WITH_SIZE_EXPRs before we can look through those.  */
13537   if (TREE_CODE (t) == WITH_SIZE_EXPR)
13538     return NULL_TREE;
13539 
13540   return t;
13541 }
13542 
13543 /* Return a tree of sizetype representing the size, in bytes, of the element
13544    of EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
13545 
13546 tree
array_ref_element_size(tree exp)13547 array_ref_element_size (tree exp)
13548 {
13549   tree aligned_size = TREE_OPERAND (exp, 3);
13550   tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
13551   location_t loc = EXPR_LOCATION (exp);
13552 
13553   /* If a size was specified in the ARRAY_REF, it's the size measured
13554      in alignment units of the element type.  So multiply by that value.  */
13555   if (aligned_size)
13556     {
13557       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13558 	 sizetype from another type of the same width and signedness.  */
13559       if (TREE_TYPE (aligned_size) != sizetype)
13560 	aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
13561       return size_binop_loc (loc, MULT_EXPR, aligned_size,
13562 			     size_int (TYPE_ALIGN_UNIT (elmt_type)));
13563     }
13564 
13565   /* Otherwise, take the size from that of the element type.  Substitute
13566      any PLACEHOLDER_EXPR that we have.  */
13567   else
13568     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
13569 }
13570 
13571 /* Return a tree representing the lower bound of the array mentioned in
13572    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
13573 
13574 tree
array_ref_low_bound(tree exp)13575 array_ref_low_bound (tree exp)
13576 {
13577   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13578 
13579   /* If a lower bound is specified in EXP, use it.  */
13580   if (TREE_OPERAND (exp, 2))
13581     return TREE_OPERAND (exp, 2);
13582 
13583   /* Otherwise, if there is a domain type and it has a lower bound, use it,
13584      substituting for a PLACEHOLDER_EXPR as needed.  */
13585   if (domain_type && TYPE_MIN_VALUE (domain_type))
13586     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
13587 
13588   /* Otherwise, return a zero of the appropriate type.  */
13589   tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
13590   return (idxtype == error_mark_node
13591 	  ? integer_zero_node : build_int_cst (idxtype, 0));
13592 }
13593 
13594 /* Return a tree representing the upper bound of the array mentioned in
13595    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
13596 
13597 tree
array_ref_up_bound(tree exp)13598 array_ref_up_bound (tree exp)
13599 {
13600   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13601 
13602   /* If there is a domain type and it has an upper bound, use it, substituting
13603      for a PLACEHOLDER_EXPR as needed.  */
13604   if (domain_type && TYPE_MAX_VALUE (domain_type))
13605     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
13606 
13607   /* Otherwise fail.  */
13608   return NULL_TREE;
13609 }
13610 
13611 /* Returns true if REF is an array reference, component reference,
13612    or memory reference to an array at the end of a structure.
13613    If this is the case, the array may be allocated larger
13614    than its upper bound implies.  */
13615 
13616 bool
array_at_struct_end_p(tree ref)13617 array_at_struct_end_p (tree ref)
13618 {
13619   tree atype;
13620 
13621   if (TREE_CODE (ref) == ARRAY_REF
13622       || TREE_CODE (ref) == ARRAY_RANGE_REF)
13623     {
13624       atype = TREE_TYPE (TREE_OPERAND (ref, 0));
13625       ref = TREE_OPERAND (ref, 0);
13626     }
13627   else if (TREE_CODE (ref) == COMPONENT_REF
13628 	   && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
13629     atype = TREE_TYPE (TREE_OPERAND (ref, 1));
13630   else if (TREE_CODE (ref) == MEM_REF)
13631     {
13632       tree arg = TREE_OPERAND (ref, 0);
13633       if (TREE_CODE (arg) == ADDR_EXPR)
13634 	arg = TREE_OPERAND (arg, 0);
13635       tree argtype = TREE_TYPE (arg);
13636       if (TREE_CODE (argtype) == RECORD_TYPE)
13637 	{
13638 	  if (tree fld = last_field (argtype))
13639 	    {
13640 	      atype = TREE_TYPE (fld);
13641 	      if (TREE_CODE (atype) != ARRAY_TYPE)
13642 		return false;
13643 	      if (VAR_P (arg) && DECL_SIZE (fld))
13644 		return false;
13645 	    }
13646 	  else
13647 	    return false;
13648 	}
13649       else
13650 	return false;
13651     }
13652   else
13653     return false;
13654 
13655   if (TREE_CODE (ref) == STRING_CST)
13656     return false;
13657 
13658   tree ref_to_array = ref;
13659   while (handled_component_p (ref))
13660     {
13661       /* If the reference chain contains a component reference to a
13662          non-union type and there follows another field the reference
13663 	 is not at the end of a structure.  */
13664       if (TREE_CODE (ref) == COMPONENT_REF)
13665 	{
13666 	  if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
13667 	    {
13668 	      tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
13669 	      while (nextf && TREE_CODE (nextf) != FIELD_DECL)
13670 		nextf = DECL_CHAIN (nextf);
13671 	      if (nextf)
13672 		return false;
13673 	    }
13674 	}
13675       /* If we have a multi-dimensional array we do not consider
13676          a non-innermost dimension as flex array if the whole
13677 	 multi-dimensional array is at struct end.
13678 	 Same for an array of aggregates with a trailing array
13679 	 member.  */
13680       else if (TREE_CODE (ref) == ARRAY_REF)
13681 	return false;
13682       else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
13683 	;
13684       /* If we view an underlying object as sth else then what we
13685          gathered up to now is what we have to rely on.  */
13686       else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
13687 	break;
13688       else
13689 	gcc_unreachable ();
13690 
13691       ref = TREE_OPERAND (ref, 0);
13692     }
13693 
13694   /* The array now is at struct end.  Treat flexible arrays as
13695      always subject to extend, even into just padding constrained by
13696      an underlying decl.  */
13697   if (! TYPE_SIZE (atype)
13698       || ! TYPE_DOMAIN (atype)
13699       || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13700     return true;
13701 
13702   if (TREE_CODE (ref) == MEM_REF
13703       && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
13704     ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
13705 
13706   /* If the reference is based on a declared entity, the size of the array
13707      is constrained by its given domain.  (Do not trust commons PR/69368).  */
13708   if (DECL_P (ref)
13709       && !(flag_unconstrained_commons
13710 	   && VAR_P (ref) && DECL_COMMON (ref))
13711       && DECL_SIZE_UNIT (ref)
13712       && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
13713     {
13714       /* Check whether the array domain covers all of the available
13715          padding.  */
13716       poly_int64 offset;
13717       if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
13718 	  || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
13719           || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
13720 	return true;
13721       if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
13722 	return true;
13723 
13724       /* If at least one extra element fits it is a flexarray.  */
13725       if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13726 		     - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
13727 		     + 2)
13728 		    * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
13729 		    wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
13730 	return true;
13731 
13732       return false;
13733     }
13734 
13735   return true;
13736 }
13737 
13738 /* Return a tree representing the offset, in bytes, of the field referenced
13739    by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
13740 
13741 tree
component_ref_field_offset(tree exp)13742 component_ref_field_offset (tree exp)
13743 {
13744   tree aligned_offset = TREE_OPERAND (exp, 2);
13745   tree field = TREE_OPERAND (exp, 1);
13746   location_t loc = EXPR_LOCATION (exp);
13747 
13748   /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13749      in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
13750      value.  */
13751   if (aligned_offset)
13752     {
13753       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13754 	 sizetype from another type of the same width and signedness.  */
13755       if (TREE_TYPE (aligned_offset) != sizetype)
13756 	aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13757       return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13758 			     size_int (DECL_OFFSET_ALIGN (field)
13759 				       / BITS_PER_UNIT));
13760     }
13761 
13762   /* Otherwise, take the offset from that of the field.  Substitute
13763      any PLACEHOLDER_EXPR that we have.  */
13764   else
13765     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13766 }
13767 
13768 /* Given the initializer INIT, return the initializer for the field
13769    DECL if it exists, otherwise null.  Used to obtain the initializer
13770    for a flexible array member and determine its size.  */
13771 
13772 static tree
get_initializer_for(tree init,tree decl)13773 get_initializer_for (tree init, tree decl)
13774 {
13775   STRIP_NOPS (init);
13776 
13777   tree fld, fld_init;
13778   unsigned HOST_WIDE_INT i;
13779   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
13780     {
13781       if (decl == fld)
13782 	return fld_init;
13783 
13784       if (TREE_CODE (fld) == CONSTRUCTOR)
13785 	{
13786 	  fld_init = get_initializer_for (fld_init, decl);
13787 	  if (fld_init)
13788 	    return fld_init;
13789 	}
13790     }
13791 
13792   return NULL_TREE;
13793 }
13794 
13795 /* Determines the size of the member referenced by the COMPONENT_REF
13796    REF, using its initializer expression if necessary in order to
13797    determine the size of an initialized flexible array member.
13798    If non-null, set *ARK when REF refers to an interior zero-length
13799    array or a trailing one-element array.
13800    Returns the size as sizetype (which might be zero for an object
13801    with an uninitialized flexible array member) or null if the size
13802    cannot be determined.  */
13803 
13804 tree
component_ref_size(tree ref,special_array_member * sam)13805 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
13806 {
13807   gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13808 
13809   special_array_member sambuf;
13810   if (!sam)
13811     sam = &sambuf;
13812   *sam = special_array_member::none;
13813 
13814   /* The object/argument referenced by the COMPONENT_REF and its type.  */
13815   tree arg = TREE_OPERAND (ref, 0);
13816   tree argtype = TREE_TYPE (arg);
13817   /* The referenced member.  */
13818   tree member = TREE_OPERAND (ref, 1);
13819 
13820   tree memsize = DECL_SIZE_UNIT (member);
13821   if (memsize)
13822     {
13823       tree memtype = TREE_TYPE (member);
13824       if (TREE_CODE (memtype) != ARRAY_TYPE)
13825 	/* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
13826 	   to the type of a class with a virtual base which doesn't
13827 	   reflect the size of the virtual's members (see pr97595).
13828 	   If that's the case fail for now and implement something
13829 	   more robust in the future.  */
13830 	return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
13831 		? memsize : NULL_TREE);
13832 
13833       bool trailing = array_at_struct_end_p (ref);
13834       bool zero_length = integer_zerop (memsize);
13835       if (!trailing && !zero_length)
13836 	/* MEMBER is either an interior array or is an array with
13837 	   more than one element.  */
13838 	return memsize;
13839 
13840       if (zero_length)
13841 	{
13842 	  if (trailing)
13843 	    *sam = special_array_member::trail_0;
13844 	  else
13845 	    {
13846 	      *sam = special_array_member::int_0;
13847 	      memsize = NULL_TREE;
13848 	    }
13849 	}
13850 
13851       if (!zero_length)
13852 	if (tree dom = TYPE_DOMAIN (memtype))
13853 	  if (tree min = TYPE_MIN_VALUE (dom))
13854 	    if (tree max = TYPE_MAX_VALUE (dom))
13855 	      if (TREE_CODE (min) == INTEGER_CST
13856 		  && TREE_CODE (max) == INTEGER_CST)
13857 		{
13858 		  offset_int minidx = wi::to_offset (min);
13859 		  offset_int maxidx = wi::to_offset (max);
13860 		  offset_int neltsm1 = maxidx - minidx;
13861 		  if (neltsm1 > 0)
13862 		    /* MEMBER is an array with more than one element.  */
13863 		    return memsize;
13864 
13865 		  if (neltsm1 == 0)
13866 		    *sam = special_array_member::trail_1;
13867 		}
13868 
13869       /* For a reference to a zero- or one-element array member of a union
13870 	 use the size of the union instead of the size of the member.  */
13871       if (TREE_CODE (argtype) == UNION_TYPE)
13872 	memsize = TYPE_SIZE_UNIT (argtype);
13873     }
13874 
13875   /* MEMBER is either a bona fide flexible array member, or a zero-length
13876      array member, or an array of length one treated as such.  */
13877 
13878   /* If the reference is to a declared object and the member a true
13879      flexible array, try to determine its size from its initializer.  */
13880   poly_int64 baseoff = 0;
13881   tree base = get_addr_base_and_unit_offset (ref, &baseoff);
13882   if (!base || !VAR_P (base))
13883     {
13884       if (*sam != special_array_member::int_0)
13885 	return NULL_TREE;
13886 
13887       if (TREE_CODE (arg) != COMPONENT_REF)
13888 	return NULL_TREE;
13889 
13890       base = arg;
13891       while (TREE_CODE (base) == COMPONENT_REF)
13892 	base = TREE_OPERAND (base, 0);
13893       baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
13894     }
13895 
13896   /* BASE is the declared object of which MEMBER is either a member
13897      or that is cast to ARGTYPE (e.g., a char buffer used to store
13898      an ARGTYPE object).  */
13899   tree basetype = TREE_TYPE (base);
13900 
13901   /* Determine the base type of the referenced object.  If it's
13902      the same as ARGTYPE and MEMBER has a known size, return it.  */
13903   tree bt = basetype;
13904   if (*sam != special_array_member::int_0)
13905     while (TREE_CODE (bt) == ARRAY_TYPE)
13906       bt = TREE_TYPE (bt);
13907   bool typematch = useless_type_conversion_p (argtype, bt);
13908   if (memsize && typematch)
13909     return memsize;
13910 
13911   memsize = NULL_TREE;
13912 
13913   if (typematch)
13914     /* MEMBER is a true flexible array member.  Compute its size from
13915        the initializer of the BASE object if it has one.  */
13916     if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
13917       if (init != error_mark_node)
13918 	{
13919 	  init = get_initializer_for (init, member);
13920 	  if (init)
13921 	    {
13922 	      memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
13923 	      if (tree refsize = TYPE_SIZE_UNIT (argtype))
13924 		{
13925 		  /* Use the larger of the initializer size and the tail
13926 		     padding in the enclosing struct.  */
13927 		  poly_int64 rsz = tree_to_poly_int64 (refsize);
13928 		  rsz -= baseoff;
13929 		  if (known_lt (tree_to_poly_int64 (memsize), rsz))
13930 		    memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
13931 		}
13932 
13933 	      baseoff = 0;
13934 	    }
13935 	}
13936 
13937   if (!memsize)
13938     {
13939       if (typematch)
13940 	{
13941 	  if (DECL_P (base)
13942 	      && DECL_EXTERNAL (base)
13943 	      && bt == basetype
13944 	      && *sam != special_array_member::int_0)
13945 	    /* The size of a flexible array member of an extern struct
13946 	       with no initializer cannot be determined (it's defined
13947 	       in another translation unit and can have an initializer
13948 	       with an arbitrary number of elements).  */
13949 	    return NULL_TREE;
13950 
13951 	  /* Use the size of the base struct or, for interior zero-length
13952 	     arrays, the size of the enclosing type.  */
13953 	  memsize = TYPE_SIZE_UNIT (bt);
13954 	}
13955       else if (DECL_P (base))
13956 	/* Use the size of the BASE object (possibly an array of some
13957 	   other type such as char used to store the struct).  */
13958 	memsize = DECL_SIZE_UNIT (base);
13959       else
13960 	return NULL_TREE;
13961     }
13962 
13963   /* If the flexible array member has a known size use the greater
13964      of it and the tail padding in the enclosing struct.
13965      Otherwise, when the size of the flexible array member is unknown
13966      and the referenced object is not a struct, use the size of its
13967      type when known.  This detects sizes of array buffers when cast
13968      to struct types with flexible array members.  */
13969   if (memsize)
13970     {
13971       poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13972       if (known_lt (baseoff, memsz64))
13973 	{
13974 	  memsz64 -= baseoff;
13975 	  return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13976 	}
13977       return size_zero_node;
13978     }
13979 
13980   /* Return "don't know" for an external non-array object since its
13981      flexible array member can be initialized to have any number of
13982      elements.  Otherwise, return zero because the flexible array
13983      member has no elements.  */
13984   return (DECL_P (base)
13985 	  && DECL_EXTERNAL (base)
13986 	  && (!typematch
13987 	      || TREE_CODE (basetype) != ARRAY_TYPE)
13988 	  ? NULL_TREE : size_zero_node);
13989 }
13990 
13991 /* Return the machine mode of T.  For vectors, returns the mode of the
13992    inner type.  The main use case is to feed the result to HONOR_NANS,
13993    avoiding the BLKmode that a direct TYPE_MODE (T) might return.  */
13994 
13995 machine_mode
element_mode(const_tree t)13996 element_mode (const_tree t)
13997 {
13998   if (!TYPE_P (t))
13999     t = TREE_TYPE (t);
14000   if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
14001     t = TREE_TYPE (t);
14002   return TYPE_MODE (t);
14003 }
14004 
14005 /* Vector types need to re-check the target flags each time we report
14006    the machine mode.  We need to do this because attribute target can
14007    change the result of vector_mode_supported_p and have_regs_of_mode
14008    on a per-function basis.  Thus the TYPE_MODE of a VECTOR_TYPE can
14009    change on a per-function basis.  */
14010 /* ??? Possibly a better solution is to run through all the types
14011    referenced by a function and re-compute the TYPE_MODE once, rather
14012    than make the TYPE_MODE macro call a function.  */
14013 
14014 machine_mode
vector_type_mode(const_tree t)14015 vector_type_mode (const_tree t)
14016 {
14017   machine_mode mode;
14018 
14019   gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
14020 
14021   mode = t->type_common.mode;
14022   if (VECTOR_MODE_P (mode)
14023       && (!targetm.vector_mode_supported_p (mode)
14024 	  || !have_regs_of_mode[mode]))
14025     {
14026       scalar_int_mode innermode;
14027 
14028       /* For integers, try mapping it to a same-sized scalar mode.  */
14029       if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
14030 	{
14031 	  poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
14032 			     * GET_MODE_BITSIZE (innermode));
14033 	  scalar_int_mode mode;
14034 	  if (int_mode_for_size (size, 0).exists (&mode)
14035 	      && have_regs_of_mode[mode])
14036 	    return mode;
14037 	}
14038 
14039       return BLKmode;
14040     }
14041 
14042   return mode;
14043 }
14044 
14045 /* Return the size in bits of each element of vector type TYPE.  */
14046 
14047 unsigned int
vector_element_bits(const_tree type)14048 vector_element_bits (const_tree type)
14049 {
14050   gcc_checking_assert (VECTOR_TYPE_P (type));
14051   if (VECTOR_BOOLEAN_TYPE_P (type))
14052     return TYPE_PRECISION (TREE_TYPE (type));
14053   return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
14054 }
14055 
14056 /* Calculate the size in bits of each element of vector type TYPE
14057    and return the result as a tree of type bitsizetype.  */
14058 
14059 tree
vector_element_bits_tree(const_tree type)14060 vector_element_bits_tree (const_tree type)
14061 {
14062   gcc_checking_assert (VECTOR_TYPE_P (type));
14063   if (VECTOR_BOOLEAN_TYPE_P (type))
14064     return bitsize_int (vector_element_bits (type));
14065   return TYPE_SIZE (TREE_TYPE (type));
14066 }
14067 
14068 /* Verify that basic properties of T match TV and thus T can be a variant of
14069    TV.  TV should be the more specified variant (i.e. the main variant).  */
14070 
14071 static bool
verify_type_variant(const_tree t,tree tv)14072 verify_type_variant (const_tree t, tree tv)
14073 {
14074   /* Type variant can differ by:
14075 
14076      - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
14077                    ENCODE_QUAL_ADDR_SPACE.
14078      - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
14079        in this case some values may not be set in the variant types
14080        (see TYPE_COMPLETE_P checks).
14081      - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
14082      - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
14083      - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
14084      - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
14085      - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
14086        this is necessary to make it possible to merge types form different TUs
14087      - arrays, pointers and references may have TREE_TYPE that is a variant
14088        of TREE_TYPE of their main variants.
14089      - aggregates may have new TYPE_FIELDS list that list variants of
14090        the main variant TYPE_FIELDS.
14091      - vector types may differ by TYPE_VECTOR_OPAQUE
14092    */
14093 
14094   /* Convenience macro for matching individual fields.  */
14095 #define verify_variant_match(flag)					    \
14096   do {									    \
14097     if (flag (tv) != flag (t))						    \
14098       {									    \
14099 	error ("type variant differs by %s", #flag);			    \
14100 	debug_tree (tv);						    \
14101 	return false;							    \
14102       }									    \
14103   } while (false)
14104 
14105   /* tree_base checks.  */
14106 
14107   verify_variant_match (TREE_CODE);
14108   /* FIXME: Ada builds non-artificial variants of artificial types.  */
14109   if (TYPE_ARTIFICIAL (tv) && 0)
14110     verify_variant_match (TYPE_ARTIFICIAL);
14111   if (POINTER_TYPE_P (tv))
14112     verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
14113   /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build.  */
14114   verify_variant_match (TYPE_UNSIGNED);
14115   verify_variant_match (TYPE_PACKED);
14116   if (TREE_CODE (t) == REFERENCE_TYPE)
14117     verify_variant_match (TYPE_REF_IS_RVALUE);
14118   if (AGGREGATE_TYPE_P (t))
14119     verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
14120   else
14121     verify_variant_match (TYPE_SATURATING);
14122   /* FIXME: This check trigger during libstdc++ build.  */
14123   if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
14124     verify_variant_match (TYPE_FINAL_P);
14125 
14126   /* tree_type_common checks.  */
14127 
14128   if (COMPLETE_TYPE_P (t))
14129     {
14130       verify_variant_match (TYPE_MODE);
14131       if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
14132 	  && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
14133 	verify_variant_match (TYPE_SIZE);
14134       if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
14135 	  && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
14136 	  && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
14137 	{
14138 	  gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
14139 					TYPE_SIZE_UNIT (tv), 0));
14140 	  error ("type variant has different %<TYPE_SIZE_UNIT%>");
14141 	  debug_tree (tv);
14142 	  error ("type variant%'s %<TYPE_SIZE_UNIT%>");
14143 	  debug_tree (TYPE_SIZE_UNIT (tv));
14144 	  error ("type%'s %<TYPE_SIZE_UNIT%>");
14145 	  debug_tree (TYPE_SIZE_UNIT (t));
14146 	  return false;
14147 	}
14148       verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
14149     }
14150   verify_variant_match (TYPE_PRECISION);
14151   if (RECORD_OR_UNION_TYPE_P (t))
14152     verify_variant_match (TYPE_TRANSPARENT_AGGR);
14153   else if (TREE_CODE (t) == ARRAY_TYPE)
14154     verify_variant_match (TYPE_NONALIASED_COMPONENT);
14155   /* During LTO we merge variant lists from diferent translation units
14156      that may differ BY TYPE_CONTEXT that in turn may point
14157      to TRANSLATION_UNIT_DECL.
14158      Ada also builds variants of types with different TYPE_CONTEXT.   */
14159   if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
14160     verify_variant_match (TYPE_CONTEXT);
14161   if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
14162     verify_variant_match (TYPE_STRING_FLAG);
14163   if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
14164     verify_variant_match (TYPE_CXX_ODR_P);
14165   if (TYPE_ALIAS_SET_KNOWN_P (t))
14166     {
14167       error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
14168       debug_tree (tv);
14169       return false;
14170     }
14171 
14172   /* tree_type_non_common checks.  */
14173 
14174   /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14175      and dangle the pointer from time to time.  */
14176   if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
14177       && (in_lto_p || !TYPE_VFIELD (tv)
14178 	  || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
14179     {
14180       error ("type variant has different %<TYPE_VFIELD%>");
14181       debug_tree (tv);
14182       return false;
14183     }
14184   if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
14185        || TREE_CODE (t) == INTEGER_TYPE
14186        || TREE_CODE (t) == BOOLEAN_TYPE
14187        || TREE_CODE (t) == REAL_TYPE
14188        || TREE_CODE (t) == FIXED_POINT_TYPE)
14189     {
14190       verify_variant_match (TYPE_MAX_VALUE);
14191       verify_variant_match (TYPE_MIN_VALUE);
14192     }
14193   if (TREE_CODE (t) == METHOD_TYPE)
14194     verify_variant_match (TYPE_METHOD_BASETYPE);
14195   if (TREE_CODE (t) == OFFSET_TYPE)
14196     verify_variant_match (TYPE_OFFSET_BASETYPE);
14197   if (TREE_CODE (t) == ARRAY_TYPE)
14198     verify_variant_match (TYPE_ARRAY_MAX_SIZE);
14199   /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
14200      or even type's main variant.  This is needed to make bootstrap pass
14201      and the bug seems new in GCC 5.
14202      C++ FE should be updated to make this consistent and we should check
14203      that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
14204      is a match with main variant.
14205 
14206      Also disable the check for Java for now because of parser hack that builds
14207      first an dummy BINFO and then sometimes replace it by real BINFO in some
14208      of the copies.  */
14209   if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
14210       && TYPE_BINFO (t) != TYPE_BINFO (tv)
14211       /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
14212 	 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
14213 	 at LTO time only.  */
14214       && (in_lto_p && odr_type_p (t)))
14215     {
14216       error ("type variant has different %<TYPE_BINFO%>");
14217       debug_tree (tv);
14218       error ("type variant%'s %<TYPE_BINFO%>");
14219       debug_tree (TYPE_BINFO (tv));
14220       error ("type%'s %<TYPE_BINFO%>");
14221       debug_tree (TYPE_BINFO (t));
14222       return false;
14223     }
14224 
14225   /* Check various uses of TYPE_VALUES_RAW.  */
14226   if (TREE_CODE (t) == ENUMERAL_TYPE
14227       && TYPE_VALUES (t))
14228     verify_variant_match (TYPE_VALUES);
14229   else if (TREE_CODE (t) == ARRAY_TYPE)
14230     verify_variant_match (TYPE_DOMAIN);
14231   /* Permit incomplete variants of complete type.  While FEs may complete
14232      all variants, this does not happen for C++ templates in all cases.  */
14233   else if (RECORD_OR_UNION_TYPE_P (t)
14234 	   && COMPLETE_TYPE_P (t)
14235 	   && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
14236     {
14237       tree f1, f2;
14238 
14239       /* Fortran builds qualified variants as new records with items of
14240 	 qualified type. Verify that they looks same.  */
14241       for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
14242 	   f1 && f2;
14243 	   f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14244 	if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
14245 	    || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
14246 		 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
14247 		/* FIXME: gfc_nonrestricted_type builds all types as variants
14248 		   with exception of pointer types.  It deeply copies the type
14249 		   which means that we may end up with a variant type
14250 		   referring non-variant pointer.  We may change it to
14251 		   produce types as variants, too, like
14252 		   objc_get_protocol_qualified_type does.  */
14253 		&& !POINTER_TYPE_P (TREE_TYPE (f1)))
14254 	    || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
14255 	    || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
14256 	  break;
14257       if (f1 || f2)
14258 	{
14259 	  error ("type variant has different %<TYPE_FIELDS%>");
14260 	  debug_tree (tv);
14261 	  error ("first mismatch is field");
14262 	  debug_tree (f1);
14263 	  error ("and field");
14264 	  debug_tree (f2);
14265           return false;
14266 	}
14267     }
14268   else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
14269     verify_variant_match (TYPE_ARG_TYPES);
14270   /* For C++ the qualified variant of array type is really an array type
14271      of qualified TREE_TYPE.
14272      objc builds variants of pointer where pointer to type is a variant, too
14273      in objc_get_protocol_qualified_type.  */
14274   if (TREE_TYPE (t) != TREE_TYPE (tv)
14275       && ((TREE_CODE (t) != ARRAY_TYPE
14276 	   && !POINTER_TYPE_P (t))
14277 	  || TYPE_MAIN_VARIANT (TREE_TYPE (t))
14278 	     != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
14279     {
14280       error ("type variant has different %<TREE_TYPE%>");
14281       debug_tree (tv);
14282       error ("type variant%'s %<TREE_TYPE%>");
14283       debug_tree (TREE_TYPE (tv));
14284       error ("type%'s %<TREE_TYPE%>");
14285       debug_tree (TREE_TYPE (t));
14286       return false;
14287     }
14288   if (type_with_alias_set_p (t)
14289       && !gimple_canonical_types_compatible_p (t, tv, false))
14290     {
14291       error ("type is not compatible with its variant");
14292       debug_tree (tv);
14293       error ("type variant%'s %<TREE_TYPE%>");
14294       debug_tree (TREE_TYPE (tv));
14295       error ("type%'s %<TREE_TYPE%>");
14296       debug_tree (TREE_TYPE (t));
14297       return false;
14298     }
14299   return true;
14300 #undef verify_variant_match
14301 }
14302 
14303 
14304 /* The TYPE_CANONICAL merging machinery.  It should closely resemble
14305    the middle-end types_compatible_p function.  It needs to avoid
14306    claiming types are different for types that should be treated
14307    the same with respect to TBAA.  Canonical types are also used
14308    for IL consistency checks via the useless_type_conversion_p
14309    predicate which does not handle all type kinds itself but falls
14310    back to pointer-comparison of TYPE_CANONICAL for aggregates
14311    for example.  */
14312 
14313 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
14314    type calculation because we need to allow inter-operability between signed
14315    and unsigned variants.  */
14316 
14317 bool
type_with_interoperable_signedness(const_tree type)14318 type_with_interoperable_signedness (const_tree type)
14319 {
14320   /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
14321      signed char and unsigned char.  Similarly fortran FE builds
14322      C_SIZE_T as signed type, while C defines it unsigned.  */
14323 
14324   return tree_code_for_canonical_type_merging (TREE_CODE (type))
14325 	   == INTEGER_TYPE
14326          && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
14327 	     || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
14328 }
14329 
14330 /* Return true iff T1 and T2 are structurally identical for what
14331    TBAA is concerned.
14332    This function is used both by lto.c canonical type merging and by the
14333    verifier.  If TRUST_TYPE_CANONICAL we do not look into structure of types
14334    that have TYPE_CANONICAL defined and assume them equivalent.  This is useful
14335    only for LTO because only in these cases TYPE_CANONICAL equivalence
14336    correspond to one defined by gimple_canonical_types_compatible_p.  */
14337 
14338 bool
gimple_canonical_types_compatible_p(const_tree t1,const_tree t2,bool trust_type_canonical)14339 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
14340 				     bool trust_type_canonical)
14341 {
14342   /* Type variants should be same as the main variant.  When not doing sanity
14343      checking to verify this fact, go to main variants and save some work.  */
14344   if (trust_type_canonical)
14345     {
14346       t1 = TYPE_MAIN_VARIANT (t1);
14347       t2 = TYPE_MAIN_VARIANT (t2);
14348     }
14349 
14350   /* Check first for the obvious case of pointer identity.  */
14351   if (t1 == t2)
14352     return true;
14353 
14354   /* Check that we have two types to compare.  */
14355   if (t1 == NULL_TREE || t2 == NULL_TREE)
14356     return false;
14357 
14358   /* We consider complete types always compatible with incomplete type.
14359      This does not make sense for canonical type calculation and thus we
14360      need to ensure that we are never called on it.
14361 
14362      FIXME: For more correctness the function probably should have three modes
14363 	1) mode assuming that types are complete mathcing their structure
14364 	2) mode allowing incomplete types but producing equivalence classes
14365 	   and thus ignoring all info from complete types
14366 	3) mode allowing incomplete types to match complete but checking
14367 	   compatibility between complete types.
14368 
14369      1 and 2 can be used for canonical type calculation. 3 is the real
14370      definition of type compatibility that can be used i.e. for warnings during
14371      declaration merging.  */
14372 
14373   gcc_assert (!trust_type_canonical
14374 	      || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
14375 
14376   /* If the types have been previously registered and found equal
14377      they still are.  */
14378 
14379   if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
14380       && trust_type_canonical)
14381     {
14382       /* Do not use TYPE_CANONICAL of pointer types.  For LTO streamed types
14383 	 they are always NULL, but they are set to non-NULL for types
14384 	 constructed by build_pointer_type and variants.  In this case the
14385 	 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
14386 	 all pointers are considered equal.  Be sure to not return false
14387 	 negatives.  */
14388       gcc_checking_assert (canonical_type_used_p (t1)
14389 			   && canonical_type_used_p (t2));
14390       return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
14391     }
14392 
14393   /* For types where we do ODR based TBAA the canonical type is always
14394      set correctly, so we know that types are different if their
14395      canonical types does not match.  */
14396   if (trust_type_canonical
14397       && (odr_type_p (t1) && odr_based_tbaa_p (t1))
14398 	  != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
14399     return false;
14400 
14401   /* Can't be the same type if the types don't have the same code.  */
14402   enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
14403   if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
14404     return false;
14405 
14406   /* Qualifiers do not matter for canonical type comparison purposes.  */
14407 
14408   /* Void types and nullptr types are always the same.  */
14409   if (TREE_CODE (t1) == VOID_TYPE
14410       || TREE_CODE (t1) == NULLPTR_TYPE)
14411     return true;
14412 
14413   /* Can't be the same type if they have different mode.  */
14414   if (TYPE_MODE (t1) != TYPE_MODE (t2))
14415     return false;
14416 
14417   /* Non-aggregate types can be handled cheaply.  */
14418   if (INTEGRAL_TYPE_P (t1)
14419       || SCALAR_FLOAT_TYPE_P (t1)
14420       || FIXED_POINT_TYPE_P (t1)
14421       || TREE_CODE (t1) == VECTOR_TYPE
14422       || TREE_CODE (t1) == COMPLEX_TYPE
14423       || TREE_CODE (t1) == OFFSET_TYPE
14424       || POINTER_TYPE_P (t1))
14425     {
14426       /* Can't be the same type if they have different recision.  */
14427       if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
14428 	return false;
14429 
14430       /* In some cases the signed and unsigned types are required to be
14431 	 inter-operable.  */
14432       if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
14433 	  && !type_with_interoperable_signedness (t1))
14434 	return false;
14435 
14436       /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
14437 	 interoperable with "signed char".  Unless all frontends are revisited
14438 	 to agree on these types, we must ignore the flag completely.  */
14439 
14440       /* Fortran standard define C_PTR type that is compatible with every
14441  	 C pointer.  For this reason we need to glob all pointers into one.
14442 	 Still pointers in different address spaces are not compatible.  */
14443       if (POINTER_TYPE_P (t1))
14444 	{
14445 	  if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
14446 	      != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
14447 	    return false;
14448 	}
14449 
14450       /* Tail-recurse to components.  */
14451       if (TREE_CODE (t1) == VECTOR_TYPE
14452 	  || TREE_CODE (t1) == COMPLEX_TYPE)
14453 	return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
14454 						    TREE_TYPE (t2),
14455 						    trust_type_canonical);
14456 
14457       return true;
14458     }
14459 
14460   /* Do type-specific comparisons.  */
14461   switch (TREE_CODE (t1))
14462     {
14463     case ARRAY_TYPE:
14464       /* Array types are the same if the element types are the same and
14465 	 the number of elements are the same.  */
14466       if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14467 						trust_type_canonical)
14468 	  || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
14469 	  || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
14470 	  || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
14471 	return false;
14472       else
14473 	{
14474 	  tree i1 = TYPE_DOMAIN (t1);
14475 	  tree i2 = TYPE_DOMAIN (t2);
14476 
14477 	  /* For an incomplete external array, the type domain can be
14478  	     NULL_TREE.  Check this condition also.  */
14479 	  if (i1 == NULL_TREE && i2 == NULL_TREE)
14480 	    return true;
14481 	  else if (i1 == NULL_TREE || i2 == NULL_TREE)
14482 	    return false;
14483 	  else
14484 	    {
14485 	      tree min1 = TYPE_MIN_VALUE (i1);
14486 	      tree min2 = TYPE_MIN_VALUE (i2);
14487 	      tree max1 = TYPE_MAX_VALUE (i1);
14488 	      tree max2 = TYPE_MAX_VALUE (i2);
14489 
14490 	      /* The minimum/maximum values have to be the same.  */
14491 	      if ((min1 == min2
14492 		   || (min1 && min2
14493 		       && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
14494 			    && TREE_CODE (min2) == PLACEHOLDER_EXPR)
14495 		           || operand_equal_p (min1, min2, 0))))
14496 		  && (max1 == max2
14497 		      || (max1 && max2
14498 			  && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
14499 			       && TREE_CODE (max2) == PLACEHOLDER_EXPR)
14500 			      || operand_equal_p (max1, max2, 0)))))
14501 		return true;
14502 	      else
14503 		return false;
14504 	    }
14505 	}
14506 
14507     case METHOD_TYPE:
14508     case FUNCTION_TYPE:
14509       /* Function types are the same if the return type and arguments types
14510 	 are the same.  */
14511       if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14512 						trust_type_canonical))
14513 	return false;
14514 
14515       if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
14516 	return true;
14517       else
14518 	{
14519 	  tree parms1, parms2;
14520 
14521 	  for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
14522 	       parms1 && parms2;
14523 	       parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
14524 	    {
14525 	      if (!gimple_canonical_types_compatible_p
14526 		     (TREE_VALUE (parms1), TREE_VALUE (parms2),
14527 		      trust_type_canonical))
14528 		return false;
14529 	    }
14530 
14531 	  if (parms1 || parms2)
14532 	    return false;
14533 
14534 	  return true;
14535 	}
14536 
14537     case RECORD_TYPE:
14538     case UNION_TYPE:
14539     case QUAL_UNION_TYPE:
14540       {
14541 	tree f1, f2;
14542 
14543 	/* Don't try to compare variants of an incomplete type, before
14544 	   TYPE_FIELDS has been copied around.  */
14545 	if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
14546 	  return true;
14547 
14548 
14549 	if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
14550 	  return false;
14551 
14552 	/* For aggregate types, all the fields must be the same.  */
14553 	for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
14554 	     f1 || f2;
14555 	     f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14556 	  {
14557 	    /* Skip non-fields and zero-sized fields.  */
14558 	    while (f1 && (TREE_CODE (f1) != FIELD_DECL
14559 			  || (DECL_SIZE (f1)
14560 			      && integer_zerop (DECL_SIZE (f1)))))
14561 	      f1 = TREE_CHAIN (f1);
14562 	    while (f2 && (TREE_CODE (f2) != FIELD_DECL
14563 			  || (DECL_SIZE (f2)
14564 			      && integer_zerop (DECL_SIZE (f2)))))
14565 	      f2 = TREE_CHAIN (f2);
14566 	    if (!f1 || !f2)
14567 	      break;
14568 	    /* The fields must have the same name, offset and type.  */
14569 	    if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
14570 		|| !gimple_compare_field_offset (f1, f2)
14571 		|| !gimple_canonical_types_compatible_p
14572 		      (TREE_TYPE (f1), TREE_TYPE (f2),
14573 		       trust_type_canonical))
14574 	      return false;
14575 	  }
14576 
14577 	/* If one aggregate has more fields than the other, they
14578 	   are not the same.  */
14579 	if (f1 || f2)
14580 	  return false;
14581 
14582 	return true;
14583       }
14584 
14585     default:
14586       /* Consider all types with language specific trees in them mutually
14587 	 compatible.  This is executed only from verify_type and false
14588          positives can be tolerated.  */
14589       gcc_assert (!in_lto_p);
14590       return true;
14591     }
14592 }
14593 
14594 /* Verify type T.  */
14595 
14596 void
verify_type(const_tree t)14597 verify_type (const_tree t)
14598 {
14599   bool error_found = false;
14600   tree mv = TYPE_MAIN_VARIANT (t);
14601   if (!mv)
14602     {
14603       error ("main variant is not defined");
14604       error_found = true;
14605     }
14606   else if (mv != TYPE_MAIN_VARIANT (mv))
14607     {
14608       error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14609       debug_tree (mv);
14610       error_found = true;
14611     }
14612   else if (t != mv && !verify_type_variant (t, mv))
14613     error_found = true;
14614 
14615   tree ct = TYPE_CANONICAL (t);
14616   if (!ct)
14617     ;
14618   else if (TYPE_CANONICAL (t) != ct)
14619     {
14620       error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14621       debug_tree (ct);
14622       error_found = true;
14623     }
14624   /* Method and function types cannot be used to address memory and thus
14625      TYPE_CANONICAL really matters only for determining useless conversions.
14626 
14627      FIXME: C++ FE produce declarations of builtin functions that are not
14628      compatible with main variants.  */
14629   else if (TREE_CODE (t) == FUNCTION_TYPE)
14630     ;
14631   else if (t != ct
14632 	   /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14633 	      with variably sized arrays because their sizes possibly
14634 	      gimplified to different variables.  */
14635 	   && !variably_modified_type_p (ct, NULL)
14636 	   && !gimple_canonical_types_compatible_p (t, ct, false)
14637 	   && COMPLETE_TYPE_P (t))
14638     {
14639       error ("%<TYPE_CANONICAL%> is not compatible");
14640       debug_tree (ct);
14641       error_found = true;
14642     }
14643 
14644   if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14645       && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14646     {
14647       error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14648       debug_tree (ct);
14649       error_found = true;
14650     }
14651   if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14652    {
14653       error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14654       debug_tree (ct);
14655       debug_tree (TYPE_MAIN_VARIANT (ct));
14656       error_found = true;
14657    }
14658 
14659 
14660   /* Check various uses of TYPE_MIN_VALUE_RAW.  */
14661   if (RECORD_OR_UNION_TYPE_P (t))
14662     {
14663       /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14664 	 and danagle the pointer from time to time.  */
14665       if (TYPE_VFIELD (t)
14666 	  && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14667 	  && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14668 	{
14669 	  error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14670 	  debug_tree (TYPE_VFIELD (t));
14671 	  error_found = true;
14672 	}
14673     }
14674   else if (TREE_CODE (t) == POINTER_TYPE)
14675     {
14676       if (TYPE_NEXT_PTR_TO (t)
14677 	  && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14678 	{
14679 	  error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14680 	  debug_tree (TYPE_NEXT_PTR_TO (t));
14681 	  error_found = true;
14682 	}
14683     }
14684   else if (TREE_CODE (t) == REFERENCE_TYPE)
14685     {
14686       if (TYPE_NEXT_REF_TO (t)
14687 	  && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14688 	{
14689 	  error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14690 	  debug_tree (TYPE_NEXT_REF_TO (t));
14691 	  error_found = true;
14692 	}
14693     }
14694   else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14695 	   || TREE_CODE (t) == FIXED_POINT_TYPE)
14696     {
14697       /* FIXME: The following check should pass:
14698 	  useless_type_conversion_p (const_cast <tree> (t),
14699 				     TREE_TYPE (TYPE_MIN_VALUE (t))
14700 	 but does not for C sizetypes in LTO.  */
14701     }
14702 
14703   /* Check various uses of TYPE_MAXVAL_RAW.  */
14704   if (RECORD_OR_UNION_TYPE_P (t))
14705     {
14706       if (!TYPE_BINFO (t))
14707 	;
14708       else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14709 	{
14710 	  error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14711 	  debug_tree (TYPE_BINFO (t));
14712 	  error_found = true;
14713 	}
14714       else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14715 	{
14716 	  error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14717 	  debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14718 	  error_found = true;
14719 	}
14720     }
14721   else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14722     {
14723       if (TYPE_METHOD_BASETYPE (t)
14724 	  && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14725 	  && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14726 	{
14727 	  error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14728 	  debug_tree (TYPE_METHOD_BASETYPE (t));
14729 	  error_found = true;
14730 	}
14731     }
14732   else if (TREE_CODE (t) == OFFSET_TYPE)
14733     {
14734       if (TYPE_OFFSET_BASETYPE (t)
14735 	  && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14736 	  && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14737 	{
14738 	  error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14739 	  debug_tree (TYPE_OFFSET_BASETYPE (t));
14740 	  error_found = true;
14741 	}
14742     }
14743   else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14744 	   || TREE_CODE (t) == FIXED_POINT_TYPE)
14745     {
14746       /* FIXME: The following check should pass:
14747 	  useless_type_conversion_p (const_cast <tree> (t),
14748 				     TREE_TYPE (TYPE_MAX_VALUE (t))
14749 	 but does not for C sizetypes in LTO.  */
14750     }
14751   else if (TREE_CODE (t) == ARRAY_TYPE)
14752     {
14753       if (TYPE_ARRAY_MAX_SIZE (t)
14754 	  && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14755         {
14756 	  error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14757 	  debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14758 	  error_found = true;
14759         }
14760     }
14761   else if (TYPE_MAX_VALUE_RAW (t))
14762     {
14763       error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14764       debug_tree (TYPE_MAX_VALUE_RAW (t));
14765       error_found = true;
14766     }
14767 
14768   if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14769     {
14770       error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14771       debug_tree (TYPE_LANG_SLOT_1 (t));
14772       error_found = true;
14773     }
14774 
14775   /* Check various uses of TYPE_VALUES_RAW.  */
14776   if (TREE_CODE (t) == ENUMERAL_TYPE)
14777     for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14778       {
14779 	tree value = TREE_VALUE (l);
14780 	tree name = TREE_PURPOSE (l);
14781 
14782 	/* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14783  	   CONST_DECL of ENUMERAL TYPE.  */
14784 	if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14785 	  {
14786 	    error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14787 	    debug_tree (value);
14788 	    debug_tree (name);
14789 	    error_found = true;
14790 	  }
14791 	if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14792 	    && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14793 	  {
14794 	    error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14795 		   "to the enum");
14796 	    debug_tree (value);
14797 	    debug_tree (name);
14798 	    error_found = true;
14799 	  }
14800 	if (TREE_CODE (name) != IDENTIFIER_NODE)
14801 	  {
14802 	    error ("enum value name is not %<IDENTIFIER_NODE%>");
14803 	    debug_tree (value);
14804 	    debug_tree (name);
14805 	    error_found = true;
14806 	  }
14807       }
14808   else if (TREE_CODE (t) == ARRAY_TYPE)
14809     {
14810       if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14811 	{
14812 	  error ("array %<TYPE_DOMAIN%> is not integer type");
14813 	  debug_tree (TYPE_DOMAIN (t));
14814 	  error_found = true;
14815 	}
14816     }
14817   else if (RECORD_OR_UNION_TYPE_P (t))
14818     {
14819       if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14820 	{
14821 	  error ("%<TYPE_FIELDS%> defined in incomplete type");
14822 	  error_found = true;
14823 	}
14824       for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14825 	{
14826 	  /* TODO: verify properties of decls.  */
14827 	  if (TREE_CODE (fld) == FIELD_DECL)
14828 	    ;
14829 	  else if (TREE_CODE (fld) == TYPE_DECL)
14830 	    ;
14831 	  else if (TREE_CODE (fld) == CONST_DECL)
14832 	    ;
14833 	  else if (VAR_P (fld))
14834 	    ;
14835 	  else if (TREE_CODE (fld) == TEMPLATE_DECL)
14836 	    ;
14837 	  else if (TREE_CODE (fld) == USING_DECL)
14838 	    ;
14839 	  else if (TREE_CODE (fld) == FUNCTION_DECL)
14840 	    ;
14841 	  else
14842 	    {
14843 	      error ("wrong tree in %<TYPE_FIELDS%> list");
14844 	      debug_tree (fld);
14845 	      error_found = true;
14846 	    }
14847 	}
14848     }
14849   else if (TREE_CODE (t) == INTEGER_TYPE
14850 	   || TREE_CODE (t) == BOOLEAN_TYPE
14851 	   || TREE_CODE (t) == OFFSET_TYPE
14852 	   || TREE_CODE (t) == REFERENCE_TYPE
14853 	   || TREE_CODE (t) == NULLPTR_TYPE
14854 	   || TREE_CODE (t) == POINTER_TYPE)
14855     {
14856       if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14857 	{
14858 	  error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14859 		 "is %p",
14860 		 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14861 	  error_found = true;
14862 	}
14863       else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14864 	{
14865 	  error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14866 	  debug_tree (TYPE_CACHED_VALUES (t));
14867 	  error_found = true;
14868 	}
14869       /* Verify just enough of cache to ensure that no one copied it to new type.
14870  	 All copying should go by copy_node that should clear it.  */
14871       else if (TYPE_CACHED_VALUES_P (t))
14872 	{
14873 	  int i;
14874 	  for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14875 	    if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14876 		&& TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14877 	      {
14878 		error ("wrong %<TYPE_CACHED_VALUES%> entry");
14879 		debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14880 		error_found = true;
14881 		break;
14882 	      }
14883 	}
14884     }
14885   else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14886     for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14887       {
14888 	/* C++ FE uses TREE_PURPOSE to store initial values.  */
14889 	if (TREE_PURPOSE (l) && in_lto_p)
14890 	  {
14891 	    error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14892 	    debug_tree (l);
14893 	    error_found = true;
14894 	  }
14895 	if (!TYPE_P (TREE_VALUE (l)))
14896 	  {
14897 	    error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14898 	    debug_tree (l);
14899 	    error_found = true;
14900 	  }
14901       }
14902   else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14903     {
14904       error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14905       debug_tree (TYPE_VALUES_RAW (t));
14906       error_found = true;
14907     }
14908   if (TREE_CODE (t) != INTEGER_TYPE
14909       && TREE_CODE (t) != BOOLEAN_TYPE
14910       && TREE_CODE (t) != OFFSET_TYPE
14911       && TREE_CODE (t) != REFERENCE_TYPE
14912       && TREE_CODE (t) != NULLPTR_TYPE
14913       && TREE_CODE (t) != POINTER_TYPE
14914       && TYPE_CACHED_VALUES_P (t))
14915     {
14916       error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14917       error_found = true;
14918     }
14919 
14920   /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14921      TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14922      of a type. */
14923   if (TREE_CODE (t) == METHOD_TYPE
14924       && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14925     {
14926 	error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14927 	error_found = true;
14928     }
14929 
14930   if (error_found)
14931     {
14932       debug_tree (const_cast <tree> (t));
14933       internal_error ("%qs failed", __func__);
14934     }
14935 }
14936 
14937 
14938 /* Return 1 if ARG interpreted as signed in its precision is known to be
14939    always positive or 2 if ARG is known to be always negative, or 3 if
14940    ARG may be positive or negative.  */
14941 
14942 int
get_range_pos_neg(tree arg)14943 get_range_pos_neg (tree arg)
14944 {
14945   if (arg == error_mark_node)
14946     return 3;
14947 
14948   int prec = TYPE_PRECISION (TREE_TYPE (arg));
14949   int cnt = 0;
14950   if (TREE_CODE (arg) == INTEGER_CST)
14951     {
14952       wide_int w = wi::sext (wi::to_wide (arg), prec);
14953       if (wi::neg_p (w))
14954 	return 2;
14955       else
14956 	return 1;
14957     }
14958   while (CONVERT_EXPR_P (arg)
14959 	 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14960 	 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14961     {
14962       arg = TREE_OPERAND (arg, 0);
14963       /* Narrower value zero extended into wider type
14964 	 will always result in positive values.  */
14965       if (TYPE_UNSIGNED (TREE_TYPE (arg))
14966 	  && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14967 	return 1;
14968       prec = TYPE_PRECISION (TREE_TYPE (arg));
14969       if (++cnt > 30)
14970 	return 3;
14971     }
14972 
14973   if (TREE_CODE (arg) != SSA_NAME)
14974     return 3;
14975   wide_int arg_min, arg_max;
14976   while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
14977     {
14978       gimple *g = SSA_NAME_DEF_STMT (arg);
14979       if (is_gimple_assign (g)
14980 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14981 	{
14982 	  tree t = gimple_assign_rhs1 (g);
14983 	  if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14984 	      && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14985 	    {
14986 	      if (TYPE_UNSIGNED (TREE_TYPE (t))
14987 		  && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14988 		return 1;
14989 	      prec = TYPE_PRECISION (TREE_TYPE (t));
14990 	      arg = t;
14991 	      if (++cnt > 30)
14992 		return 3;
14993 	      continue;
14994 	    }
14995 	}
14996       return 3;
14997     }
14998   if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14999     {
15000       /* For unsigned values, the "positive" range comes
15001 	 below the "negative" range.  */
15002       if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
15003 	return 1;
15004       if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
15005 	return 2;
15006     }
15007   else
15008     {
15009       if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
15010 	return 1;
15011       if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
15012 	return 2;
15013     }
15014   return 3;
15015 }
15016 
15017 
15018 
15019 
15020 /* Return true if ARG is marked with the nonnull attribute in the
15021    current function signature.  */
15022 
15023 bool
nonnull_arg_p(const_tree arg)15024 nonnull_arg_p (const_tree arg)
15025 {
15026   tree t, attrs, fntype;
15027   unsigned HOST_WIDE_INT arg_num;
15028 
15029   gcc_assert (TREE_CODE (arg) == PARM_DECL
15030 	      && (POINTER_TYPE_P (TREE_TYPE (arg))
15031 		  || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
15032 
15033   /* The static chain decl is always non null.  */
15034   if (arg == cfun->static_chain_decl)
15035     return true;
15036 
15037   /* THIS argument of method is always non-NULL.  */
15038   if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
15039       && arg == DECL_ARGUMENTS (cfun->decl)
15040       && flag_delete_null_pointer_checks)
15041     return true;
15042 
15043   /* Values passed by reference are always non-NULL.  */
15044   if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
15045       && flag_delete_null_pointer_checks)
15046     return true;
15047 
15048   fntype = TREE_TYPE (cfun->decl);
15049   for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
15050     {
15051       attrs = lookup_attribute ("nonnull", attrs);
15052 
15053       /* If "nonnull" wasn't specified, we know nothing about the argument.  */
15054       if (attrs == NULL_TREE)
15055 	return false;
15056 
15057       /* If "nonnull" applies to all the arguments, then ARG is non-null.  */
15058       if (TREE_VALUE (attrs) == NULL_TREE)
15059 	return true;
15060 
15061       /* Get the position number for ARG in the function signature.  */
15062       for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
15063 	   t;
15064 	   t = DECL_CHAIN (t), arg_num++)
15065 	{
15066 	  if (t == arg)
15067 	    break;
15068 	}
15069 
15070       gcc_assert (t == arg);
15071 
15072       /* Now see if ARG_NUM is mentioned in the nonnull list.  */
15073       for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
15074 	{
15075 	  if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
15076 	    return true;
15077 	}
15078     }
15079 
15080   return false;
15081 }
15082 
15083 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
15084    information.  */
15085 
15086 location_t
set_block(location_t loc,tree block)15087 set_block (location_t loc, tree block)
15088 {
15089   location_t pure_loc = get_pure_location (loc);
15090   source_range src_range = get_range_from_loc (line_table, loc);
15091   return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
15092 }
15093 
15094 location_t
set_source_range(tree expr,location_t start,location_t finish)15095 set_source_range (tree expr, location_t start, location_t finish)
15096 {
15097   source_range src_range;
15098   src_range.m_start = start;
15099   src_range.m_finish = finish;
15100   return set_source_range (expr, src_range);
15101 }
15102 
15103 location_t
set_source_range(tree expr,source_range src_range)15104 set_source_range (tree expr, source_range src_range)
15105 {
15106   if (!EXPR_P (expr))
15107     return UNKNOWN_LOCATION;
15108 
15109   location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
15110   location_t adhoc = COMBINE_LOCATION_DATA (line_table,
15111 					    pure_loc,
15112 					    src_range,
15113 					    NULL);
15114   SET_EXPR_LOCATION (expr, adhoc);
15115   return adhoc;
15116 }
15117 
15118 /* Return EXPR, potentially wrapped with a node expression LOC,
15119    if !CAN_HAVE_LOCATION_P (expr).
15120 
15121    NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
15122    VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
15123 
15124    Wrapper nodes can be identified using location_wrapper_p.  */
15125 
15126 tree
maybe_wrap_with_location(tree expr,location_t loc)15127 maybe_wrap_with_location (tree expr, location_t loc)
15128 {
15129   if (expr == NULL)
15130     return NULL;
15131   if (loc == UNKNOWN_LOCATION)
15132     return expr;
15133   if (CAN_HAVE_LOCATION_P (expr))
15134     return expr;
15135   /* We should only be adding wrappers for constants and for decls,
15136      or for some exceptional tree nodes (e.g. BASELINK in the C++ FE).  */
15137   gcc_assert (CONSTANT_CLASS_P (expr)
15138 	      || DECL_P (expr)
15139 	      || EXCEPTIONAL_CLASS_P (expr));
15140 
15141   /* For now, don't add wrappers to exceptional tree nodes, to minimize
15142      any impact of the wrapper nodes.  */
15143   if (EXCEPTIONAL_CLASS_P (expr))
15144     return expr;
15145 
15146   /* Compiler-generated temporary variables don't need a wrapper.  */
15147   if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
15148     return expr;
15149 
15150   /* If any auto_suppress_location_wrappers are active, don't create
15151      wrappers.  */
15152   if (suppress_location_wrappers > 0)
15153     return expr;
15154 
15155   tree_code code
15156     = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
15157 	|| (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
15158        ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
15159   tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
15160   /* Mark this node as being a wrapper.  */
15161   EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
15162   return wrapper;
15163 }
15164 
15165 int suppress_location_wrappers;
15166 
15167 /* Return the name of combined function FN, for debugging purposes.  */
15168 
15169 const char *
combined_fn_name(combined_fn fn)15170 combined_fn_name (combined_fn fn)
15171 {
15172   if (builtin_fn_p (fn))
15173     {
15174       tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
15175       return IDENTIFIER_POINTER (DECL_NAME (fndecl));
15176     }
15177   else
15178     return internal_fn_name (as_internal_fn (fn));
15179 }
15180 
15181 /* Return a bitmap with a bit set corresponding to each argument in
15182    a function call type FNTYPE declared with attribute nonnull,
15183    or null if none of the function's argument are nonnull.  The caller
15184    must free the bitmap.  */
15185 
15186 bitmap
get_nonnull_args(const_tree fntype)15187 get_nonnull_args (const_tree fntype)
15188 {
15189   if (fntype == NULL_TREE)
15190     return NULL;
15191 
15192   bitmap argmap = NULL;
15193   if (TREE_CODE (fntype) == METHOD_TYPE)
15194     {
15195       /* The this pointer in C++ non-static member functions is
15196 	 implicitly nonnull whether or not it's declared as such.  */
15197       argmap = BITMAP_ALLOC (NULL);
15198       bitmap_set_bit (argmap, 0);
15199     }
15200 
15201   tree attrs = TYPE_ATTRIBUTES (fntype);
15202   if (!attrs)
15203     return argmap;
15204 
15205   /* A function declaration can specify multiple attribute nonnull,
15206      each with zero or more arguments.  The loop below creates a bitmap
15207      representing a union of all the arguments.  An empty (but non-null)
15208      bitmap means that all arguments have been declaraed nonnull.  */
15209   for ( ; attrs; attrs = TREE_CHAIN (attrs))
15210     {
15211       attrs = lookup_attribute ("nonnull", attrs);
15212       if (!attrs)
15213 	break;
15214 
15215       if (!argmap)
15216 	argmap = BITMAP_ALLOC (NULL);
15217 
15218       if (!TREE_VALUE (attrs))
15219 	{
15220 	  /* Clear the bitmap in case a previous attribute nonnull
15221 	     set it and this one overrides it for all arguments.  */
15222 	  bitmap_clear (argmap);
15223 	  return argmap;
15224 	}
15225 
15226       /* Iterate over the indices of the format arguments declared nonnull
15227 	 and set a bit for each.  */
15228       for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
15229 	{
15230 	  unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
15231 	  bitmap_set_bit (argmap, val);
15232 	}
15233     }
15234 
15235   return argmap;
15236 }
15237 
15238 /* Returns true if TYPE is a type where it and all of its subobjects
15239    (recursively) are of structure, union, or array type.  */
15240 
15241 bool
is_empty_type(const_tree type)15242 is_empty_type (const_tree type)
15243 {
15244   if (RECORD_OR_UNION_TYPE_P (type))
15245     {
15246       for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
15247 	if (TREE_CODE (field) == FIELD_DECL
15248 	    && !DECL_PADDING_P (field)
15249 	    && !is_empty_type (TREE_TYPE (field)))
15250 	  return false;
15251       return true;
15252     }
15253   else if (TREE_CODE (type) == ARRAY_TYPE)
15254     return (integer_minus_onep (array_type_nelts (type))
15255 	    || TYPE_DOMAIN (type) == NULL_TREE
15256 	    || is_empty_type (TREE_TYPE (type)));
15257   return false;
15258 }
15259 
15260 /* Implement TARGET_EMPTY_RECORD_P.  Return true if TYPE is an empty type
15261    that shouldn't be passed via stack.  */
15262 
15263 bool
default_is_empty_record(const_tree type)15264 default_is_empty_record (const_tree type)
15265 {
15266   if (!abi_version_at_least (12))
15267     return false;
15268 
15269   if (type == error_mark_node)
15270     return false;
15271 
15272   if (TREE_ADDRESSABLE (type))
15273     return false;
15274 
15275   return is_empty_type (TYPE_MAIN_VARIANT (type));
15276 }
15277 
15278 /* Determine whether TYPE is a structure with a flexible array member,
15279    or a union containing such a structure (possibly recursively).  */
15280 
15281 bool
flexible_array_type_p(const_tree type)15282 flexible_array_type_p (const_tree type)
15283 {
15284   tree x, last;
15285   switch (TREE_CODE (type))
15286     {
15287     case RECORD_TYPE:
15288       last = NULL_TREE;
15289       for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
15290 	if (TREE_CODE (x) == FIELD_DECL)
15291 	  last = x;
15292       if (last == NULL_TREE)
15293 	return false;
15294       if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
15295 	  && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
15296 	  && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
15297 	  && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
15298 	return true;
15299       return false;
15300     case UNION_TYPE:
15301       for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
15302 	{
15303 	  if (TREE_CODE (x) == FIELD_DECL
15304 	      && flexible_array_type_p (TREE_TYPE (x)))
15305 	    return true;
15306 	}
15307       return false;
15308     default:
15309       return false;
15310   }
15311 }
15312 
15313 /* Like int_size_in_bytes, but handle empty records specially.  */
15314 
15315 HOST_WIDE_INT
arg_int_size_in_bytes(const_tree type)15316 arg_int_size_in_bytes (const_tree type)
15317 {
15318   return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
15319 }
15320 
15321 /* Like size_in_bytes, but handle empty records specially.  */
15322 
15323 tree
arg_size_in_bytes(const_tree type)15324 arg_size_in_bytes (const_tree type)
15325 {
15326   return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
15327 }
15328 
15329 /* Return true if an expression with CODE has to have the same result type as
15330    its first operand.  */
15331 
15332 bool
expr_type_first_operand_type_p(tree_code code)15333 expr_type_first_operand_type_p (tree_code code)
15334 {
15335   switch (code)
15336     {
15337     case NEGATE_EXPR:
15338     case ABS_EXPR:
15339     case BIT_NOT_EXPR:
15340     case PAREN_EXPR:
15341     case CONJ_EXPR:
15342 
15343     case PLUS_EXPR:
15344     case MINUS_EXPR:
15345     case MULT_EXPR:
15346     case TRUNC_DIV_EXPR:
15347     case CEIL_DIV_EXPR:
15348     case FLOOR_DIV_EXPR:
15349     case ROUND_DIV_EXPR:
15350     case TRUNC_MOD_EXPR:
15351     case CEIL_MOD_EXPR:
15352     case FLOOR_MOD_EXPR:
15353     case ROUND_MOD_EXPR:
15354     case RDIV_EXPR:
15355     case EXACT_DIV_EXPR:
15356     case MIN_EXPR:
15357     case MAX_EXPR:
15358     case BIT_IOR_EXPR:
15359     case BIT_XOR_EXPR:
15360     case BIT_AND_EXPR:
15361 
15362     case LSHIFT_EXPR:
15363     case RSHIFT_EXPR:
15364     case LROTATE_EXPR:
15365     case RROTATE_EXPR:
15366       return true;
15367 
15368     default:
15369       return false;
15370     }
15371 }
15372 
15373 /* Return a typenode for the "standard" C type with a given name.  */
15374 tree
get_typenode_from_name(const char * name)15375 get_typenode_from_name (const char *name)
15376 {
15377   if (name == NULL || *name == '\0')
15378     return NULL_TREE;
15379 
15380   if (strcmp (name, "char") == 0)
15381     return char_type_node;
15382   if (strcmp (name, "unsigned char") == 0)
15383     return unsigned_char_type_node;
15384   if (strcmp (name, "signed char") == 0)
15385     return signed_char_type_node;
15386 
15387   if (strcmp (name, "short int") == 0)
15388     return short_integer_type_node;
15389   if (strcmp (name, "short unsigned int") == 0)
15390     return short_unsigned_type_node;
15391 
15392   if (strcmp (name, "int") == 0)
15393     return integer_type_node;
15394   if (strcmp (name, "unsigned int") == 0)
15395     return unsigned_type_node;
15396 
15397   if (strcmp (name, "long int") == 0)
15398     return long_integer_type_node;
15399   if (strcmp (name, "long unsigned int") == 0)
15400     return long_unsigned_type_node;
15401 
15402   if (strcmp (name, "long long int") == 0)
15403     return long_long_integer_type_node;
15404   if (strcmp (name, "long long unsigned int") == 0)
15405     return long_long_unsigned_type_node;
15406 
15407   gcc_unreachable ();
15408 }
15409 
15410 /* List of pointer types used to declare builtins before we have seen their
15411    real declaration.
15412 
15413    Keep the size up to date in tree.h !  */
15414 const builtin_structptr_type builtin_structptr_types[6] =
15415 {
15416   { fileptr_type_node, ptr_type_node, "FILE" },
15417   { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
15418   { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
15419   { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
15420   { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
15421   { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
15422 };
15423 
15424 /* Return the maximum object size.  */
15425 
15426 tree
max_object_size(void)15427 max_object_size (void)
15428 {
15429   /* To do: Make this a configurable parameter.  */
15430   return TYPE_MAX_VALUE (ptrdiff_type_node);
15431 }
15432 
15433 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
15434    parameter default to false and that weeds out error_mark_node.  */
15435 
15436 bool
verify_type_context(location_t loc,type_context_kind context,const_tree type,bool silent_p)15437 verify_type_context (location_t loc, type_context_kind context,
15438 		     const_tree type, bool silent_p)
15439 {
15440   if (type == error_mark_node)
15441     return true;
15442 
15443   gcc_assert (TYPE_P (type));
15444   return (!targetm.verify_type_context
15445 	  || targetm.verify_type_context (loc, context, type, silent_p));
15446 }
15447 
15448 /* Return that NEW_ASM and DELETE_ASM name a valid pair of new and
15449    delete operators.  */
15450 
15451 bool
valid_new_delete_pair_p(tree new_asm,tree delete_asm)15452 valid_new_delete_pair_p (tree new_asm, tree delete_asm)
15453 {
15454   const char *new_name = IDENTIFIER_POINTER (new_asm);
15455   const char *delete_name = IDENTIFIER_POINTER (delete_asm);
15456   unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
15457   unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
15458 
15459   if (new_len < 5 || delete_len < 6)
15460     return false;
15461   if (new_name[0] == '_')
15462     ++new_name, --new_len;
15463   if (new_name[0] == '_')
15464     ++new_name, --new_len;
15465   if (delete_name[0] == '_')
15466     ++delete_name, --delete_len;
15467   if (delete_name[0] == '_')
15468     ++delete_name, --delete_len;
15469   if (new_len < 4 || delete_len < 5)
15470     return false;
15471   /* *_len is now just the length after initial underscores.  */
15472   if (new_name[0] != 'Z' || new_name[1] != 'n')
15473     return false;
15474   if (delete_name[0] != 'Z' || delete_name[1] != 'd')
15475     return false;
15476   /* _Znw must match _Zdl, _Zna must match _Zda.  */
15477   if ((new_name[2] != 'w' || delete_name[2] != 'l')
15478       && (new_name[2] != 'a' || delete_name[2] != 'a'))
15479     return false;
15480   /* 'j', 'm' and 'y' correspond to size_t.  */
15481   if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
15482     return false;
15483   if (delete_name[3] != 'P' || delete_name[4] != 'v')
15484     return false;
15485   if (new_len == 4
15486       || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
15487     {
15488       /* _ZnXY or _ZnXYRKSt9nothrow_t matches
15489 	 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t.  */
15490       if (delete_len == 5)
15491 	return true;
15492       if (delete_len == 6 && delete_name[5] == new_name[3])
15493 	return true;
15494       if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
15495 	return true;
15496     }
15497   else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
15498 	   || (new_len == 33
15499 	       && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
15500     {
15501       /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
15502 	 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or  or
15503 	 _ZdXPvSt11align_val_tRKSt9nothrow_t.  */
15504       if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
15505 	return true;
15506       if (delete_len == 21
15507 	  && delete_name[5] == new_name[3]
15508 	  && !memcmp (delete_name + 6, "St11align_val_t", 15))
15509 	return true;
15510       if (delete_len == 34
15511 	  && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
15512 	return true;
15513     }
15514   return false;
15515 }
15516 
15517 #if CHECKING_P
15518 
15519 namespace selftest {
15520 
15521 /* Selftests for tree.  */
15522 
15523 /* Verify that integer constants are sane.  */
15524 
15525 static void
test_integer_constants()15526 test_integer_constants ()
15527 {
15528   ASSERT_TRUE (integer_type_node != NULL);
15529   ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15530 
15531   tree type = integer_type_node;
15532 
15533   tree zero = build_zero_cst (type);
15534   ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15535   ASSERT_EQ (type, TREE_TYPE (zero));
15536 
15537   tree one = build_int_cst (type, 1);
15538   ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15539   ASSERT_EQ (type, TREE_TYPE (zero));
15540 }
15541 
15542 /* Verify identifiers.  */
15543 
15544 static void
test_identifiers()15545 test_identifiers ()
15546 {
15547   tree identifier = get_identifier ("foo");
15548   ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15549   ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15550 }
15551 
15552 /* Verify LABEL_DECL.  */
15553 
15554 static void
test_labels()15555 test_labels ()
15556 {
15557   tree identifier = get_identifier ("err");
15558   tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15559 				identifier, void_type_node);
15560   ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15561   ASSERT_FALSE (FORCED_LABEL (label_decl));
15562 }
15563 
15564 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15565    are given by VALS.  */
15566 
15567 static tree
build_vector(tree type,vec<tree> vals MEM_STAT_DECL)15568 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
15569 {
15570   gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15571   tree_vector_builder builder (type, vals.length (), 1);
15572   builder.splice (vals);
15573   return builder.build ();
15574 }
15575 
15576 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED.  */
15577 
15578 static void
check_vector_cst(vec<tree> expected,tree actual)15579 check_vector_cst (vec<tree> expected, tree actual)
15580 {
15581   ASSERT_KNOWN_EQ (expected.length (),
15582 		   TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15583   for (unsigned int i = 0; i < expected.length (); ++i)
15584     ASSERT_EQ (wi::to_wide (expected[i]),
15585 	       wi::to_wide (vector_cst_elt (actual, i)));
15586 }
15587 
15588 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15589    and that its elements match EXPECTED.  */
15590 
15591 static void
check_vector_cst_duplicate(vec<tree> expected,tree actual,unsigned int npatterns)15592 check_vector_cst_duplicate (vec<tree> expected, tree actual,
15593 			    unsigned int npatterns)
15594 {
15595   ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15596   ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15597   ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15598   ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15599   ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15600   check_vector_cst (expected, actual);
15601 }
15602 
15603 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15604    and NPATTERNS background elements, and that its elements match
15605    EXPECTED.  */
15606 
15607 static void
check_vector_cst_fill(vec<tree> expected,tree actual,unsigned int npatterns)15608 check_vector_cst_fill (vec<tree> expected, tree actual,
15609 		       unsigned int npatterns)
15610 {
15611   ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15612   ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15613   ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15614   ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15615   ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15616   check_vector_cst (expected, actual);
15617 }
15618 
15619 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15620    and that its elements match EXPECTED.  */
15621 
15622 static void
check_vector_cst_stepped(vec<tree> expected,tree actual,unsigned int npatterns)15623 check_vector_cst_stepped (vec<tree> expected, tree actual,
15624 			  unsigned int npatterns)
15625 {
15626   ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15627   ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15628   ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15629   ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15630   ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15631   check_vector_cst (expected, actual);
15632 }
15633 
15634 /* Test the creation of VECTOR_CSTs.  */
15635 
15636 static void
test_vector_cst_patterns(ALONE_CXX_MEM_STAT_INFO)15637 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15638 {
15639   auto_vec<tree, 8> elements (8);
15640   elements.quick_grow (8);
15641   tree element_type = build_nonstandard_integer_type (16, true);
15642   tree vector_type = build_vector_type (element_type, 8);
15643 
15644   /* Test a simple linear series with a base of 0 and a step of 1:
15645      { 0, 1, 2, 3, 4, 5, 6, 7 }.  */
15646   for (unsigned int i = 0; i < 8; ++i)
15647     elements[i] = build_int_cst (element_type, i);
15648   tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15649   check_vector_cst_stepped (elements, vector, 1);
15650 
15651   /* Try the same with the first element replaced by 100:
15652      { 100, 1, 2, 3, 4, 5, 6, 7 }.  */
15653   elements[0] = build_int_cst (element_type, 100);
15654   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15655   check_vector_cst_stepped (elements, vector, 1);
15656 
15657   /* Try a series that wraps around.
15658      { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }.  */
15659   for (unsigned int i = 1; i < 8; ++i)
15660     elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15661   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15662   check_vector_cst_stepped (elements, vector, 1);
15663 
15664   /* Try a downward series:
15665      { 100, 79, 78, 77, 76, 75, 75, 73 }.  */
15666   for (unsigned int i = 1; i < 8; ++i)
15667     elements[i] = build_int_cst (element_type, 80 - i);
15668   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15669   check_vector_cst_stepped (elements, vector, 1);
15670 
15671   /* Try two interleaved series with different bases and steps:
15672      { 100, 53, 66, 206, 62, 212, 58, 218 }.  */
15673   elements[1] = build_int_cst (element_type, 53);
15674   for (unsigned int i = 2; i < 8; i += 2)
15675     {
15676       elements[i] = build_int_cst (element_type, 70 - i * 2);
15677       elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15678     }
15679   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15680   check_vector_cst_stepped (elements, vector, 2);
15681 
15682   /* Try a duplicated value:
15683      { 100, 100, 100, 100, 100, 100, 100, 100 }.  */
15684   for (unsigned int i = 1; i < 8; ++i)
15685     elements[i] = elements[0];
15686   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15687   check_vector_cst_duplicate (elements, vector, 1);
15688 
15689   /* Try an interleaved duplicated value:
15690      { 100, 55, 100, 55, 100, 55, 100, 55 }.  */
15691   elements[1] = build_int_cst (element_type, 55);
15692   for (unsigned int i = 2; i < 8; ++i)
15693     elements[i] = elements[i - 2];
15694   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15695   check_vector_cst_duplicate (elements, vector, 2);
15696 
15697   /* Try a duplicated value with 2 exceptions
15698      { 41, 97, 100, 55, 100, 55, 100, 55 }.  */
15699   elements[0] = build_int_cst (element_type, 41);
15700   elements[1] = build_int_cst (element_type, 97);
15701   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15702   check_vector_cst_fill (elements, vector, 2);
15703 
15704   /* Try with and without a step
15705      { 41, 97, 100, 21, 100, 35, 100, 49 }.  */
15706   for (unsigned int i = 3; i < 8; i += 2)
15707     elements[i] = build_int_cst (element_type, i * 7);
15708   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15709   check_vector_cst_stepped (elements, vector, 2);
15710 
15711   /* Try a fully-general constant:
15712      { 41, 97, 100, 21, 100, 9990, 100, 49 }.  */
15713   elements[5] = build_int_cst (element_type, 9990);
15714   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15715   check_vector_cst_fill (elements, vector, 4);
15716 }
15717 
15718 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15719    Helper function for test_location_wrappers, to deal with STRIP_NOPS
15720    modifying its argument in-place.  */
15721 
15722 static void
check_strip_nops(tree node,tree expected)15723 check_strip_nops (tree node, tree expected)
15724 {
15725   STRIP_NOPS (node);
15726   ASSERT_EQ (expected, node);
15727 }
15728 
15729 /* Verify location wrappers.  */
15730 
15731 static void
test_location_wrappers()15732 test_location_wrappers ()
15733 {
15734   location_t loc = BUILTINS_LOCATION;
15735 
15736   ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15737 
15738   /* Wrapping a constant.  */
15739   tree int_cst = build_int_cst (integer_type_node, 42);
15740   ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15741   ASSERT_FALSE (location_wrapper_p (int_cst));
15742 
15743   tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15744   ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15745   ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15746   ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15747 
15748   /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION.  */
15749   ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15750 
15751   /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P.  */
15752   tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15753   ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15754   ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15755 
15756   /* Wrapping a STRING_CST.  */
15757   tree string_cst = build_string (4, "foo");
15758   ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15759   ASSERT_FALSE (location_wrapper_p (string_cst));
15760 
15761   tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15762   ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15763   ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15764   ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15765   ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15766 
15767 
15768   /* Wrapping a variable.  */
15769   tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15770 			     get_identifier ("some_int_var"),
15771 			     integer_type_node);
15772   ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15773   ASSERT_FALSE (location_wrapper_p (int_var));
15774 
15775   tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15776   ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15777   ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15778   ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15779 
15780   /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15781      wrapper.  */
15782   tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15783   ASSERT_FALSE (location_wrapper_p (r_cast));
15784   ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15785 
15786   /* Verify that STRIP_NOPS removes wrappers.  */
15787   check_strip_nops (wrapped_int_cst, int_cst);
15788   check_strip_nops (wrapped_string_cst, string_cst);
15789   check_strip_nops (wrapped_int_var, int_var);
15790 }
15791 
15792 /* Test various tree predicates.  Verify that location wrappers don't
15793    affect the results.  */
15794 
15795 static void
test_predicates()15796 test_predicates ()
15797 {
15798   /* Build various constants and wrappers around them.  */
15799 
15800   location_t loc = BUILTINS_LOCATION;
15801 
15802   tree i_0 = build_int_cst (integer_type_node, 0);
15803   tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15804 
15805   tree i_1 = build_int_cst (integer_type_node, 1);
15806   tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15807 
15808   tree i_m1 = build_int_cst (integer_type_node, -1);
15809   tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15810 
15811   tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15812   tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15813   tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15814   tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15815   tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15816   tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15817 
15818   tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15819   tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15820   tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15821 
15822   tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15823   tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15824   tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15825 
15826   /* TODO: vector constants.  */
15827 
15828   /* Test integer_onep.  */
15829   ASSERT_FALSE (integer_onep (i_0));
15830   ASSERT_FALSE (integer_onep (wr_i_0));
15831   ASSERT_TRUE (integer_onep (i_1));
15832   ASSERT_TRUE (integer_onep (wr_i_1));
15833   ASSERT_FALSE (integer_onep (i_m1));
15834   ASSERT_FALSE (integer_onep (wr_i_m1));
15835   ASSERT_FALSE (integer_onep (f_0));
15836   ASSERT_FALSE (integer_onep (wr_f_0));
15837   ASSERT_FALSE (integer_onep (f_1));
15838   ASSERT_FALSE (integer_onep (wr_f_1));
15839   ASSERT_FALSE (integer_onep (f_m1));
15840   ASSERT_FALSE (integer_onep (wr_f_m1));
15841   ASSERT_FALSE (integer_onep (c_i_0));
15842   ASSERT_TRUE (integer_onep (c_i_1));
15843   ASSERT_FALSE (integer_onep (c_i_m1));
15844   ASSERT_FALSE (integer_onep (c_f_0));
15845   ASSERT_FALSE (integer_onep (c_f_1));
15846   ASSERT_FALSE (integer_onep (c_f_m1));
15847 
15848   /* Test integer_zerop.  */
15849   ASSERT_TRUE (integer_zerop (i_0));
15850   ASSERT_TRUE (integer_zerop (wr_i_0));
15851   ASSERT_FALSE (integer_zerop (i_1));
15852   ASSERT_FALSE (integer_zerop (wr_i_1));
15853   ASSERT_FALSE (integer_zerop (i_m1));
15854   ASSERT_FALSE (integer_zerop (wr_i_m1));
15855   ASSERT_FALSE (integer_zerop (f_0));
15856   ASSERT_FALSE (integer_zerop (wr_f_0));
15857   ASSERT_FALSE (integer_zerop (f_1));
15858   ASSERT_FALSE (integer_zerop (wr_f_1));
15859   ASSERT_FALSE (integer_zerop (f_m1));
15860   ASSERT_FALSE (integer_zerop (wr_f_m1));
15861   ASSERT_TRUE (integer_zerop (c_i_0));
15862   ASSERT_FALSE (integer_zerop (c_i_1));
15863   ASSERT_FALSE (integer_zerop (c_i_m1));
15864   ASSERT_FALSE (integer_zerop (c_f_0));
15865   ASSERT_FALSE (integer_zerop (c_f_1));
15866   ASSERT_FALSE (integer_zerop (c_f_m1));
15867 
15868   /* Test integer_all_onesp.  */
15869   ASSERT_FALSE (integer_all_onesp (i_0));
15870   ASSERT_FALSE (integer_all_onesp (wr_i_0));
15871   ASSERT_FALSE (integer_all_onesp (i_1));
15872   ASSERT_FALSE (integer_all_onesp (wr_i_1));
15873   ASSERT_TRUE (integer_all_onesp (i_m1));
15874   ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15875   ASSERT_FALSE (integer_all_onesp (f_0));
15876   ASSERT_FALSE (integer_all_onesp (wr_f_0));
15877   ASSERT_FALSE (integer_all_onesp (f_1));
15878   ASSERT_FALSE (integer_all_onesp (wr_f_1));
15879   ASSERT_FALSE (integer_all_onesp (f_m1));
15880   ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15881   ASSERT_FALSE (integer_all_onesp (c_i_0));
15882   ASSERT_FALSE (integer_all_onesp (c_i_1));
15883   ASSERT_FALSE (integer_all_onesp (c_i_m1));
15884   ASSERT_FALSE (integer_all_onesp (c_f_0));
15885   ASSERT_FALSE (integer_all_onesp (c_f_1));
15886   ASSERT_FALSE (integer_all_onesp (c_f_m1));
15887 
15888   /* Test integer_minus_onep.  */
15889   ASSERT_FALSE (integer_minus_onep (i_0));
15890   ASSERT_FALSE (integer_minus_onep (wr_i_0));
15891   ASSERT_FALSE (integer_minus_onep (i_1));
15892   ASSERT_FALSE (integer_minus_onep (wr_i_1));
15893   ASSERT_TRUE (integer_minus_onep (i_m1));
15894   ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15895   ASSERT_FALSE (integer_minus_onep (f_0));
15896   ASSERT_FALSE (integer_minus_onep (wr_f_0));
15897   ASSERT_FALSE (integer_minus_onep (f_1));
15898   ASSERT_FALSE (integer_minus_onep (wr_f_1));
15899   ASSERT_FALSE (integer_minus_onep (f_m1));
15900   ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15901   ASSERT_FALSE (integer_minus_onep (c_i_0));
15902   ASSERT_FALSE (integer_minus_onep (c_i_1));
15903   ASSERT_TRUE (integer_minus_onep (c_i_m1));
15904   ASSERT_FALSE (integer_minus_onep (c_f_0));
15905   ASSERT_FALSE (integer_minus_onep (c_f_1));
15906   ASSERT_FALSE (integer_minus_onep (c_f_m1));
15907 
15908   /* Test integer_each_onep.  */
15909   ASSERT_FALSE (integer_each_onep (i_0));
15910   ASSERT_FALSE (integer_each_onep (wr_i_0));
15911   ASSERT_TRUE (integer_each_onep (i_1));
15912   ASSERT_TRUE (integer_each_onep (wr_i_1));
15913   ASSERT_FALSE (integer_each_onep (i_m1));
15914   ASSERT_FALSE (integer_each_onep (wr_i_m1));
15915   ASSERT_FALSE (integer_each_onep (f_0));
15916   ASSERT_FALSE (integer_each_onep (wr_f_0));
15917   ASSERT_FALSE (integer_each_onep (f_1));
15918   ASSERT_FALSE (integer_each_onep (wr_f_1));
15919   ASSERT_FALSE (integer_each_onep (f_m1));
15920   ASSERT_FALSE (integer_each_onep (wr_f_m1));
15921   ASSERT_FALSE (integer_each_onep (c_i_0));
15922   ASSERT_FALSE (integer_each_onep (c_i_1));
15923   ASSERT_FALSE (integer_each_onep (c_i_m1));
15924   ASSERT_FALSE (integer_each_onep (c_f_0));
15925   ASSERT_FALSE (integer_each_onep (c_f_1));
15926   ASSERT_FALSE (integer_each_onep (c_f_m1));
15927 
15928   /* Test integer_truep.  */
15929   ASSERT_FALSE (integer_truep (i_0));
15930   ASSERT_FALSE (integer_truep (wr_i_0));
15931   ASSERT_TRUE (integer_truep (i_1));
15932   ASSERT_TRUE (integer_truep (wr_i_1));
15933   ASSERT_FALSE (integer_truep (i_m1));
15934   ASSERT_FALSE (integer_truep (wr_i_m1));
15935   ASSERT_FALSE (integer_truep (f_0));
15936   ASSERT_FALSE (integer_truep (wr_f_0));
15937   ASSERT_FALSE (integer_truep (f_1));
15938   ASSERT_FALSE (integer_truep (wr_f_1));
15939   ASSERT_FALSE (integer_truep (f_m1));
15940   ASSERT_FALSE (integer_truep (wr_f_m1));
15941   ASSERT_FALSE (integer_truep (c_i_0));
15942   ASSERT_TRUE (integer_truep (c_i_1));
15943   ASSERT_FALSE (integer_truep (c_i_m1));
15944   ASSERT_FALSE (integer_truep (c_f_0));
15945   ASSERT_FALSE (integer_truep (c_f_1));
15946   ASSERT_FALSE (integer_truep (c_f_m1));
15947 
15948   /* Test integer_nonzerop.  */
15949   ASSERT_FALSE (integer_nonzerop (i_0));
15950   ASSERT_FALSE (integer_nonzerop (wr_i_0));
15951   ASSERT_TRUE (integer_nonzerop (i_1));
15952   ASSERT_TRUE (integer_nonzerop (wr_i_1));
15953   ASSERT_TRUE (integer_nonzerop (i_m1));
15954   ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15955   ASSERT_FALSE (integer_nonzerop (f_0));
15956   ASSERT_FALSE (integer_nonzerop (wr_f_0));
15957   ASSERT_FALSE (integer_nonzerop (f_1));
15958   ASSERT_FALSE (integer_nonzerop (wr_f_1));
15959   ASSERT_FALSE (integer_nonzerop (f_m1));
15960   ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15961   ASSERT_FALSE (integer_nonzerop (c_i_0));
15962   ASSERT_TRUE (integer_nonzerop (c_i_1));
15963   ASSERT_TRUE (integer_nonzerop (c_i_m1));
15964   ASSERT_FALSE (integer_nonzerop (c_f_0));
15965   ASSERT_FALSE (integer_nonzerop (c_f_1));
15966   ASSERT_FALSE (integer_nonzerop (c_f_m1));
15967 
15968   /* Test real_zerop.  */
15969   ASSERT_FALSE (real_zerop (i_0));
15970   ASSERT_FALSE (real_zerop (wr_i_0));
15971   ASSERT_FALSE (real_zerop (i_1));
15972   ASSERT_FALSE (real_zerop (wr_i_1));
15973   ASSERT_FALSE (real_zerop (i_m1));
15974   ASSERT_FALSE (real_zerop (wr_i_m1));
15975   ASSERT_TRUE (real_zerop (f_0));
15976   ASSERT_TRUE (real_zerop (wr_f_0));
15977   ASSERT_FALSE (real_zerop (f_1));
15978   ASSERT_FALSE (real_zerop (wr_f_1));
15979   ASSERT_FALSE (real_zerop (f_m1));
15980   ASSERT_FALSE (real_zerop (wr_f_m1));
15981   ASSERT_FALSE (real_zerop (c_i_0));
15982   ASSERT_FALSE (real_zerop (c_i_1));
15983   ASSERT_FALSE (real_zerop (c_i_m1));
15984   ASSERT_TRUE (real_zerop (c_f_0));
15985   ASSERT_FALSE (real_zerop (c_f_1));
15986   ASSERT_FALSE (real_zerop (c_f_m1));
15987 
15988   /* Test real_onep.  */
15989   ASSERT_FALSE (real_onep (i_0));
15990   ASSERT_FALSE (real_onep (wr_i_0));
15991   ASSERT_FALSE (real_onep (i_1));
15992   ASSERT_FALSE (real_onep (wr_i_1));
15993   ASSERT_FALSE (real_onep (i_m1));
15994   ASSERT_FALSE (real_onep (wr_i_m1));
15995   ASSERT_FALSE (real_onep (f_0));
15996   ASSERT_FALSE (real_onep (wr_f_0));
15997   ASSERT_TRUE (real_onep (f_1));
15998   ASSERT_TRUE (real_onep (wr_f_1));
15999   ASSERT_FALSE (real_onep (f_m1));
16000   ASSERT_FALSE (real_onep (wr_f_m1));
16001   ASSERT_FALSE (real_onep (c_i_0));
16002   ASSERT_FALSE (real_onep (c_i_1));
16003   ASSERT_FALSE (real_onep (c_i_m1));
16004   ASSERT_FALSE (real_onep (c_f_0));
16005   ASSERT_TRUE (real_onep (c_f_1));
16006   ASSERT_FALSE (real_onep (c_f_m1));
16007 
16008   /* Test real_minus_onep.  */
16009   ASSERT_FALSE (real_minus_onep (i_0));
16010   ASSERT_FALSE (real_minus_onep (wr_i_0));
16011   ASSERT_FALSE (real_minus_onep (i_1));
16012   ASSERT_FALSE (real_minus_onep (wr_i_1));
16013   ASSERT_FALSE (real_minus_onep (i_m1));
16014   ASSERT_FALSE (real_minus_onep (wr_i_m1));
16015   ASSERT_FALSE (real_minus_onep (f_0));
16016   ASSERT_FALSE (real_minus_onep (wr_f_0));
16017   ASSERT_FALSE (real_minus_onep (f_1));
16018   ASSERT_FALSE (real_minus_onep (wr_f_1));
16019   ASSERT_TRUE (real_minus_onep (f_m1));
16020   ASSERT_TRUE (real_minus_onep (wr_f_m1));
16021   ASSERT_FALSE (real_minus_onep (c_i_0));
16022   ASSERT_FALSE (real_minus_onep (c_i_1));
16023   ASSERT_FALSE (real_minus_onep (c_i_m1));
16024   ASSERT_FALSE (real_minus_onep (c_f_0));
16025   ASSERT_FALSE (real_minus_onep (c_f_1));
16026   ASSERT_TRUE (real_minus_onep (c_f_m1));
16027 
16028   /* Test zerop.  */
16029   ASSERT_TRUE (zerop (i_0));
16030   ASSERT_TRUE (zerop (wr_i_0));
16031   ASSERT_FALSE (zerop (i_1));
16032   ASSERT_FALSE (zerop (wr_i_1));
16033   ASSERT_FALSE (zerop (i_m1));
16034   ASSERT_FALSE (zerop (wr_i_m1));
16035   ASSERT_TRUE (zerop (f_0));
16036   ASSERT_TRUE (zerop (wr_f_0));
16037   ASSERT_FALSE (zerop (f_1));
16038   ASSERT_FALSE (zerop (wr_f_1));
16039   ASSERT_FALSE (zerop (f_m1));
16040   ASSERT_FALSE (zerop (wr_f_m1));
16041   ASSERT_TRUE (zerop (c_i_0));
16042   ASSERT_FALSE (zerop (c_i_1));
16043   ASSERT_FALSE (zerop (c_i_m1));
16044   ASSERT_TRUE (zerop (c_f_0));
16045   ASSERT_FALSE (zerop (c_f_1));
16046   ASSERT_FALSE (zerop (c_f_m1));
16047 
16048   /* Test tree_expr_nonnegative_p.  */
16049   ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
16050   ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
16051   ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
16052   ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
16053   ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
16054   ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
16055   ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
16056   ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
16057   ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
16058   ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
16059   ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
16060   ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
16061   ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
16062   ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
16063   ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
16064   ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
16065   ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
16066   ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
16067 
16068   /* Test tree_expr_nonzero_p.  */
16069   ASSERT_FALSE (tree_expr_nonzero_p (i_0));
16070   ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
16071   ASSERT_TRUE (tree_expr_nonzero_p (i_1));
16072   ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
16073   ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
16074   ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
16075 
16076   /* Test integer_valued_real_p.  */
16077   ASSERT_FALSE (integer_valued_real_p (i_0));
16078   ASSERT_TRUE (integer_valued_real_p (f_0));
16079   ASSERT_TRUE (integer_valued_real_p (wr_f_0));
16080   ASSERT_TRUE (integer_valued_real_p (f_1));
16081   ASSERT_TRUE (integer_valued_real_p (wr_f_1));
16082 
16083   /* Test integer_pow2p.  */
16084   ASSERT_FALSE (integer_pow2p (i_0));
16085   ASSERT_TRUE (integer_pow2p (i_1));
16086   ASSERT_TRUE (integer_pow2p (wr_i_1));
16087 
16088   /* Test uniform_integer_cst_p.  */
16089   ASSERT_TRUE (uniform_integer_cst_p (i_0));
16090   ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
16091   ASSERT_TRUE (uniform_integer_cst_p (i_1));
16092   ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
16093   ASSERT_TRUE (uniform_integer_cst_p (i_m1));
16094   ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
16095   ASSERT_FALSE (uniform_integer_cst_p (f_0));
16096   ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
16097   ASSERT_FALSE (uniform_integer_cst_p (f_1));
16098   ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
16099   ASSERT_FALSE (uniform_integer_cst_p (f_m1));
16100   ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
16101   ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
16102   ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
16103   ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
16104   ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
16105   ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
16106   ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
16107 }
16108 
16109 /* Check that string escaping works correctly.  */
16110 
16111 static void
test_escaped_strings(void)16112 test_escaped_strings (void)
16113 {
16114   int saved_cutoff;
16115   escaped_string msg;
16116 
16117   msg.escape (NULL);
16118   /* ASSERT_STREQ does not accept NULL as a valid test
16119      result, so we have to use ASSERT_EQ instead.  */
16120   ASSERT_EQ (NULL, (const char *) msg);
16121 
16122   msg.escape ("");
16123   ASSERT_STREQ ("", (const char *) msg);
16124 
16125   msg.escape ("foobar");
16126   ASSERT_STREQ ("foobar", (const char *) msg);
16127 
16128   /* Ensure that we have -fmessage-length set to 0.  */
16129   saved_cutoff = pp_line_cutoff (global_dc->printer);
16130   pp_line_cutoff (global_dc->printer) = 0;
16131 
16132   msg.escape ("foo\nbar");
16133   ASSERT_STREQ ("foo\\nbar", (const char *) msg);
16134 
16135   msg.escape ("\a\b\f\n\r\t\v");
16136   ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
16137 
16138   /* Now repeat the tests with -fmessage-length set to 5.  */
16139   pp_line_cutoff (global_dc->printer) = 5;
16140 
16141   /* Note that the newline is not translated into an escape.  */
16142   msg.escape ("foo\nbar");
16143   ASSERT_STREQ ("foo\nbar", (const char *) msg);
16144 
16145   msg.escape ("\a\b\f\n\r\t\v");
16146   ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
16147 
16148   /* Restore the original message length setting.  */
16149   pp_line_cutoff (global_dc->printer) = saved_cutoff;
16150 }
16151 
16152 /* Run all of the selftests within this file.  */
16153 
16154 void
tree_c_tests()16155 tree_c_tests ()
16156 {
16157   test_integer_constants ();
16158   test_identifiers ();
16159   test_labels ();
16160   test_vector_cst_patterns ();
16161   test_location_wrappers ();
16162   test_predicates ();
16163   test_escaped_strings ();
16164 }
16165 
16166 } // namespace selftest
16167 
16168 #endif /* CHECKING_P */
16169 
16170 #include "gt-tree.h"
16171