1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "cp-tree.h"
26 #include "gimple-expr.h"
27 #include "cgraph.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
30 #include "tree-iterator.h"
31 #include "tree-inline.h"
32 #include "debug.h"
33 #include "convert.h"
34 #include "gimplify.h"
35 #include "stringpool.h"
36 #include "attribs.h"
37 #include "flags.h"
38 #include "selftest.h"
39
40 static tree bot_manip (tree *, int *, void *);
41 static tree bot_replace (tree *, int *, void *);
42 static hashval_t list_hash_pieces (tree, tree, tree);
43 static tree build_target_expr (tree, tree, tsubst_flags_t);
44 static tree count_trees_r (tree *, int *, void *);
45 static tree verify_stmt_tree_r (tree *, int *, void *);
46 static tree build_local_temp (tree);
47
48 static tree handle_init_priority_attribute (tree *, tree, tree, int, bool *);
49 static tree handle_abi_tag_attribute (tree *, tree, tree, int, bool *);
50
51 /* If REF is an lvalue, returns the kind of lvalue that REF is.
52 Otherwise, returns clk_none. */
53
54 cp_lvalue_kind
lvalue_kind(const_tree ref)55 lvalue_kind (const_tree ref)
56 {
57 cp_lvalue_kind op1_lvalue_kind = clk_none;
58 cp_lvalue_kind op2_lvalue_kind = clk_none;
59
60 /* Expressions of reference type are sometimes wrapped in
61 INDIRECT_REFs. INDIRECT_REFs are just internal compiler
62 representation, not part of the language, so we have to look
63 through them. */
64 if (REFERENCE_REF_P (ref))
65 return lvalue_kind (TREE_OPERAND (ref, 0));
66
67 if (TREE_TYPE (ref)
68 && TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
69 {
70 /* unnamed rvalue references are rvalues */
71 if (TYPE_REF_IS_RVALUE (TREE_TYPE (ref))
72 && TREE_CODE (ref) != PARM_DECL
73 && !VAR_P (ref)
74 && TREE_CODE (ref) != COMPONENT_REF
75 /* Functions are always lvalues. */
76 && TREE_CODE (TREE_TYPE (TREE_TYPE (ref))) != FUNCTION_TYPE)
77 return clk_rvalueref;
78
79 /* lvalue references and named rvalue references are lvalues. */
80 return clk_ordinary;
81 }
82
83 if (ref == current_class_ptr)
84 return clk_none;
85
86 switch (TREE_CODE (ref))
87 {
88 case SAVE_EXPR:
89 return clk_none;
90 /* preincrements and predecrements are valid lvals, provided
91 what they refer to are valid lvals. */
92 case PREINCREMENT_EXPR:
93 case PREDECREMENT_EXPR:
94 case TRY_CATCH_EXPR:
95 case REALPART_EXPR:
96 case IMAGPART_EXPR:
97 return lvalue_kind (TREE_OPERAND (ref, 0));
98
99 case MEMBER_REF:
100 case DOTSTAR_EXPR:
101 if (TREE_CODE (ref) == MEMBER_REF)
102 op1_lvalue_kind = clk_ordinary;
103 else
104 op1_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 0));
105 if (TYPE_PTRMEMFUNC_P (TREE_TYPE (TREE_OPERAND (ref, 1))))
106 op1_lvalue_kind = clk_none;
107 return op1_lvalue_kind;
108
109 case COMPONENT_REF:
110 if (BASELINK_P (TREE_OPERAND (ref, 1)))
111 {
112 tree fn = BASELINK_FUNCTIONS (TREE_OPERAND (ref, 1));
113
114 /* For static member function recurse on the BASELINK, we can get
115 here e.g. from reference_binding. If BASELINK_FUNCTIONS is
116 OVERLOAD, the overload is resolved first if possible through
117 resolve_address_of_overloaded_function. */
118 if (TREE_CODE (fn) == FUNCTION_DECL && DECL_STATIC_FUNCTION_P (fn))
119 return lvalue_kind (TREE_OPERAND (ref, 1));
120 }
121 op1_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 0));
122 /* Look at the member designator. */
123 if (!op1_lvalue_kind)
124 ;
125 else if (is_overloaded_fn (TREE_OPERAND (ref, 1)))
126 /* The "field" can be a FUNCTION_DECL or an OVERLOAD in some
127 situations. If we're seeing a COMPONENT_REF, it's a non-static
128 member, so it isn't an lvalue. */
129 op1_lvalue_kind = clk_none;
130 else if (TREE_CODE (TREE_OPERAND (ref, 1)) != FIELD_DECL)
131 /* This can be IDENTIFIER_NODE in a template. */;
132 else if (DECL_C_BIT_FIELD (TREE_OPERAND (ref, 1)))
133 {
134 /* Clear the ordinary bit. If this object was a class
135 rvalue we want to preserve that information. */
136 op1_lvalue_kind &= ~clk_ordinary;
137 /* The lvalue is for a bitfield. */
138 op1_lvalue_kind |= clk_bitfield;
139 }
140 else if (DECL_PACKED (TREE_OPERAND (ref, 1)))
141 op1_lvalue_kind |= clk_packed;
142
143 return op1_lvalue_kind;
144
145 case STRING_CST:
146 case COMPOUND_LITERAL_EXPR:
147 return clk_ordinary;
148
149 case CONST_DECL:
150 /* CONST_DECL without TREE_STATIC are enumeration values and
151 thus not lvalues. With TREE_STATIC they are used by ObjC++
152 in objc_build_string_object and need to be considered as
153 lvalues. */
154 if (! TREE_STATIC (ref))
155 return clk_none;
156 /* FALLTHRU */
157 case VAR_DECL:
158 if (VAR_P (ref) && DECL_HAS_VALUE_EXPR_P (ref))
159 return lvalue_kind (DECL_VALUE_EXPR (CONST_CAST_TREE (ref)));
160
161 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
162 && DECL_LANG_SPECIFIC (ref)
163 && DECL_IN_AGGR_P (ref))
164 return clk_none;
165 /* FALLTHRU */
166 case INDIRECT_REF:
167 case ARROW_EXPR:
168 case ARRAY_REF:
169 case PARM_DECL:
170 case RESULT_DECL:
171 case PLACEHOLDER_EXPR:
172 return clk_ordinary;
173
174 /* A scope ref in a template, left as SCOPE_REF to support later
175 access checking. */
176 case SCOPE_REF:
177 gcc_assert (!type_dependent_expression_p (CONST_CAST_TREE (ref)));
178 {
179 tree op = TREE_OPERAND (ref, 1);
180 if (TREE_CODE (op) == FIELD_DECL)
181 return (DECL_C_BIT_FIELD (op) ? clk_bitfield : clk_ordinary);
182 else
183 return lvalue_kind (op);
184 }
185
186 case MAX_EXPR:
187 case MIN_EXPR:
188 /* Disallow <? and >? as lvalues if either argument side-effects. */
189 if (TREE_SIDE_EFFECTS (TREE_OPERAND (ref, 0))
190 || TREE_SIDE_EFFECTS (TREE_OPERAND (ref, 1)))
191 return clk_none;
192 op1_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 0));
193 op2_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 1));
194 break;
195
196 case COND_EXPR:
197 if (processing_template_decl)
198 {
199 /* Within templates, a REFERENCE_TYPE will indicate whether
200 the COND_EXPR result is an ordinary lvalue or rvalueref.
201 Since REFERENCE_TYPEs are handled above, if we reach this
202 point, we know we got a plain rvalue. Unless we have a
203 type-dependent expr, that is, but we shouldn't be testing
204 lvalueness if we can't even tell the types yet! */
205 gcc_assert (!type_dependent_expression_p (CONST_CAST_TREE (ref)));
206 if (CLASS_TYPE_P (TREE_TYPE (ref))
207 || TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
208 return clk_class;
209 else
210 return clk_none;
211 }
212 op1_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 1)
213 ? TREE_OPERAND (ref, 1)
214 : TREE_OPERAND (ref, 0));
215 op2_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 2));
216 break;
217
218 case MODOP_EXPR:
219 /* We expect to see unlowered MODOP_EXPRs only during
220 template processing. */
221 gcc_assert (processing_template_decl);
222 return clk_ordinary;
223
224 case MODIFY_EXPR:
225 case TYPEID_EXPR:
226 return clk_ordinary;
227
228 case COMPOUND_EXPR:
229 return lvalue_kind (TREE_OPERAND (ref, 1));
230
231 case TARGET_EXPR:
232 return clk_class;
233
234 case VA_ARG_EXPR:
235 return (CLASS_TYPE_P (TREE_TYPE (ref)) ? clk_class : clk_none);
236
237 case CALL_EXPR:
238 /* We can see calls outside of TARGET_EXPR in templates. */
239 if (CLASS_TYPE_P (TREE_TYPE (ref)))
240 return clk_class;
241 return clk_none;
242
243 case FUNCTION_DECL:
244 /* All functions (except non-static-member functions) are
245 lvalues. */
246 return (DECL_NONSTATIC_MEMBER_FUNCTION_P (ref)
247 ? clk_none : clk_ordinary);
248
249 case BASELINK:
250 /* We now represent a reference to a single static member function
251 with a BASELINK. */
252 /* This CONST_CAST is okay because BASELINK_FUNCTIONS returns
253 its argument unmodified and we assign it to a const_tree. */
254 return lvalue_kind (BASELINK_FUNCTIONS (CONST_CAST_TREE (ref)));
255
256 case NON_DEPENDENT_EXPR:
257 case PAREN_EXPR:
258 return lvalue_kind (TREE_OPERAND (ref, 0));
259
260 case VIEW_CONVERT_EXPR:
261 if (location_wrapper_p (ref))
262 return lvalue_kind (TREE_OPERAND (ref, 0));
263 /* Fallthrough. */
264
265 default:
266 if (!TREE_TYPE (ref))
267 return clk_none;
268 if (CLASS_TYPE_P (TREE_TYPE (ref))
269 || TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
270 return clk_class;
271 break;
272 }
273
274 /* If one operand is not an lvalue at all, then this expression is
275 not an lvalue. */
276 if (!op1_lvalue_kind || !op2_lvalue_kind)
277 return clk_none;
278
279 /* Otherwise, it's an lvalue, and it has all the odd properties
280 contributed by either operand. */
281 op1_lvalue_kind = op1_lvalue_kind | op2_lvalue_kind;
282 /* It's not an ordinary lvalue if it involves any other kind. */
283 if ((op1_lvalue_kind & ~clk_ordinary) != clk_none)
284 op1_lvalue_kind &= ~clk_ordinary;
285 /* It can't be both a pseudo-lvalue and a non-addressable lvalue.
286 A COND_EXPR of those should be wrapped in a TARGET_EXPR. */
287 if ((op1_lvalue_kind & (clk_rvalueref|clk_class))
288 && (op1_lvalue_kind & (clk_bitfield|clk_packed)))
289 op1_lvalue_kind = clk_none;
290 return op1_lvalue_kind;
291 }
292
293 /* Returns the kind of lvalue that REF is, in the sense of [basic.lval]. */
294
295 cp_lvalue_kind
real_lvalue_p(const_tree ref)296 real_lvalue_p (const_tree ref)
297 {
298 cp_lvalue_kind kind = lvalue_kind (ref);
299 if (kind & (clk_rvalueref|clk_class))
300 return clk_none;
301 else
302 return kind;
303 }
304
305 /* c-common wants us to return bool. */
306
307 bool
lvalue_p(const_tree t)308 lvalue_p (const_tree t)
309 {
310 return real_lvalue_p (t);
311 }
312
313 /* This differs from lvalue_p in that xvalues are included. */
314
315 bool
glvalue_p(const_tree ref)316 glvalue_p (const_tree ref)
317 {
318 cp_lvalue_kind kind = lvalue_kind (ref);
319 if (kind & clk_class)
320 return false;
321 else
322 return (kind != clk_none);
323 }
324
325 /* This differs from glvalue_p in that class prvalues are included. */
326
327 bool
obvalue_p(const_tree ref)328 obvalue_p (const_tree ref)
329 {
330 return (lvalue_kind (ref) != clk_none);
331 }
332
333 /* Returns true if REF is an xvalue (the result of dereferencing an rvalue
334 reference), false otherwise. */
335
336 bool
xvalue_p(const_tree ref)337 xvalue_p (const_tree ref)
338 {
339 return (lvalue_kind (ref) == clk_rvalueref);
340 }
341
342 /* True if REF is a bit-field. */
343
344 bool
bitfield_p(const_tree ref)345 bitfield_p (const_tree ref)
346 {
347 return (lvalue_kind (ref) & clk_bitfield);
348 }
349
350 /* C++-specific version of stabilize_reference. */
351
352 tree
cp_stabilize_reference(tree ref)353 cp_stabilize_reference (tree ref)
354 {
355 switch (TREE_CODE (ref))
356 {
357 case NON_DEPENDENT_EXPR:
358 /* We aren't actually evaluating this. */
359 return ref;
360
361 /* We need to treat specially anything stabilize_reference doesn't
362 handle specifically. */
363 case VAR_DECL:
364 case PARM_DECL:
365 case RESULT_DECL:
366 CASE_CONVERT:
367 case FLOAT_EXPR:
368 case FIX_TRUNC_EXPR:
369 case INDIRECT_REF:
370 case COMPONENT_REF:
371 case BIT_FIELD_REF:
372 case ARRAY_REF:
373 case ARRAY_RANGE_REF:
374 case ERROR_MARK:
375 break;
376 default:
377 cp_lvalue_kind kind = lvalue_kind (ref);
378 if ((kind & ~clk_class) != clk_none)
379 {
380 tree type = unlowered_expr_type (ref);
381 bool rval = !!(kind & clk_rvalueref);
382 type = cp_build_reference_type (type, rval);
383 /* This inhibits warnings in, eg, cxx_mark_addressable
384 (c++/60955). */
385 warning_sentinel s (extra_warnings);
386 ref = build_static_cast (type, ref, tf_error);
387 }
388 }
389
390 return stabilize_reference (ref);
391 }
392
393 /* Test whether DECL is a builtin that may appear in a
394 constant-expression. */
395
396 bool
builtin_valid_in_constant_expr_p(const_tree decl)397 builtin_valid_in_constant_expr_p (const_tree decl)
398 {
399 if (!(TREE_CODE (decl) == FUNCTION_DECL
400 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL))
401 /* Not a built-in. */
402 return false;
403 switch (DECL_FUNCTION_CODE (decl))
404 {
405 /* These always have constant results like the corresponding
406 macros/symbol. */
407 case BUILT_IN_FILE:
408 case BUILT_IN_FUNCTION:
409 case BUILT_IN_LINE:
410
411 /* The following built-ins are valid in constant expressions
412 when their arguments are. */
413 case BUILT_IN_ADD_OVERFLOW_P:
414 case BUILT_IN_SUB_OVERFLOW_P:
415 case BUILT_IN_MUL_OVERFLOW_P:
416
417 /* These have constant results even if their operands are
418 non-constant. */
419 case BUILT_IN_CONSTANT_P:
420 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
421 return true;
422 default:
423 return false;
424 }
425 }
426
427 /* Build a TARGET_EXPR, initializing the DECL with the VALUE. */
428
429 static tree
build_target_expr(tree decl,tree value,tsubst_flags_t complain)430 build_target_expr (tree decl, tree value, tsubst_flags_t complain)
431 {
432 tree t;
433 tree type = TREE_TYPE (decl);
434
435 value = mark_rvalue_use (value);
436
437 gcc_checking_assert (VOID_TYPE_P (TREE_TYPE (value))
438 || TREE_TYPE (decl) == TREE_TYPE (value)
439 /* On ARM ctors return 'this'. */
440 || (TYPE_PTR_P (TREE_TYPE (value))
441 && TREE_CODE (value) == CALL_EXPR)
442 || useless_type_conversion_p (TREE_TYPE (decl),
443 TREE_TYPE (value)));
444
445 if (complain & tf_no_cleanup)
446 /* The caller is building a new-expr and does not need a cleanup. */
447 t = NULL_TREE;
448 else
449 {
450 t = cxx_maybe_build_cleanup (decl, complain);
451 if (t == error_mark_node)
452 return error_mark_node;
453 }
454 t = build4 (TARGET_EXPR, type, decl, value, t, NULL_TREE);
455 if (EXPR_HAS_LOCATION (value))
456 SET_EXPR_LOCATION (t, EXPR_LOCATION (value));
457 /* We always set TREE_SIDE_EFFECTS so that expand_expr does not
458 ignore the TARGET_EXPR. If there really turn out to be no
459 side-effects, then the optimizer should be able to get rid of
460 whatever code is generated anyhow. */
461 TREE_SIDE_EFFECTS (t) = 1;
462
463 return t;
464 }
465
466 /* Return an undeclared local temporary of type TYPE for use in building a
467 TARGET_EXPR. */
468
469 static tree
build_local_temp(tree type)470 build_local_temp (tree type)
471 {
472 tree slot = build_decl (input_location,
473 VAR_DECL, NULL_TREE, type);
474 DECL_ARTIFICIAL (slot) = 1;
475 DECL_IGNORED_P (slot) = 1;
476 DECL_CONTEXT (slot) = current_function_decl;
477 layout_decl (slot, 0);
478 return slot;
479 }
480
481 /* Set various status flags when building an AGGR_INIT_EXPR object T. */
482
483 static void
process_aggr_init_operands(tree t)484 process_aggr_init_operands (tree t)
485 {
486 bool side_effects;
487
488 side_effects = TREE_SIDE_EFFECTS (t);
489 if (!side_effects)
490 {
491 int i, n;
492 n = TREE_OPERAND_LENGTH (t);
493 for (i = 1; i < n; i++)
494 {
495 tree op = TREE_OPERAND (t, i);
496 if (op && TREE_SIDE_EFFECTS (op))
497 {
498 side_effects = 1;
499 break;
500 }
501 }
502 }
503 TREE_SIDE_EFFECTS (t) = side_effects;
504 }
505
506 /* Build an AGGR_INIT_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE,
507 FN, and SLOT. NARGS is the number of call arguments which are specified
508 as a tree array ARGS. */
509
510 static tree
build_aggr_init_array(tree return_type,tree fn,tree slot,int nargs,tree * args)511 build_aggr_init_array (tree return_type, tree fn, tree slot, int nargs,
512 tree *args)
513 {
514 tree t;
515 int i;
516
517 t = build_vl_exp (AGGR_INIT_EXPR, nargs + 3);
518 TREE_TYPE (t) = return_type;
519 AGGR_INIT_EXPR_FN (t) = fn;
520 AGGR_INIT_EXPR_SLOT (t) = slot;
521 for (i = 0; i < nargs; i++)
522 AGGR_INIT_EXPR_ARG (t, i) = args[i];
523 process_aggr_init_operands (t);
524 return t;
525 }
526
527 /* INIT is a CALL_EXPR or AGGR_INIT_EXPR which needs info about its
528 target. TYPE is the type to be initialized.
529
530 Build an AGGR_INIT_EXPR to represent the initialization. This function
531 differs from build_cplus_new in that an AGGR_INIT_EXPR can only be used
532 to initialize another object, whereas a TARGET_EXPR can either
533 initialize another object or create its own temporary object, and as a
534 result building up a TARGET_EXPR requires that the type's destructor be
535 callable. */
536
537 tree
build_aggr_init_expr(tree type,tree init)538 build_aggr_init_expr (tree type, tree init)
539 {
540 tree fn;
541 tree slot;
542 tree rval;
543 int is_ctor;
544
545 /* Don't build AGGR_INIT_EXPR in a template. */
546 if (processing_template_decl)
547 return init;
548
549 fn = cp_get_callee (init);
550 if (fn == NULL_TREE)
551 return convert (type, init);
552
553 is_ctor = (TREE_CODE (fn) == ADDR_EXPR
554 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
555 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0)));
556
557 /* We split the CALL_EXPR into its function and its arguments here.
558 Then, in expand_expr, we put them back together. The reason for
559 this is that this expression might be a default argument
560 expression. In that case, we need a new temporary every time the
561 expression is used. That's what break_out_target_exprs does; it
562 replaces every AGGR_INIT_EXPR with a copy that uses a fresh
563 temporary slot. Then, expand_expr builds up a call-expression
564 using the new slot. */
565
566 /* If we don't need to use a constructor to create an object of this
567 type, don't mess with AGGR_INIT_EXPR. */
568 if (is_ctor || TREE_ADDRESSABLE (type))
569 {
570 slot = build_local_temp (type);
571
572 if (TREE_CODE (init) == CALL_EXPR)
573 {
574 rval = build_aggr_init_array (void_type_node, fn, slot,
575 call_expr_nargs (init),
576 CALL_EXPR_ARGP (init));
577 AGGR_INIT_FROM_THUNK_P (rval)
578 = CALL_FROM_THUNK_P (init);
579 }
580 else
581 {
582 rval = build_aggr_init_array (void_type_node, fn, slot,
583 aggr_init_expr_nargs (init),
584 AGGR_INIT_EXPR_ARGP (init));
585 AGGR_INIT_FROM_THUNK_P (rval)
586 = AGGR_INIT_FROM_THUNK_P (init);
587 }
588 TREE_SIDE_EFFECTS (rval) = 1;
589 AGGR_INIT_VIA_CTOR_P (rval) = is_ctor;
590 TREE_NOTHROW (rval) = TREE_NOTHROW (init);
591 CALL_EXPR_OPERATOR_SYNTAX (rval) = CALL_EXPR_OPERATOR_SYNTAX (init);
592 CALL_EXPR_ORDERED_ARGS (rval) = CALL_EXPR_ORDERED_ARGS (init);
593 CALL_EXPR_REVERSE_ARGS (rval) = CALL_EXPR_REVERSE_ARGS (init);
594 }
595 else
596 rval = init;
597
598 return rval;
599 }
600
601 /* INIT is a CALL_EXPR or AGGR_INIT_EXPR which needs info about its
602 target. TYPE is the type that this initialization should appear to
603 have.
604
605 Build an encapsulation of the initialization to perform
606 and return it so that it can be processed by language-independent
607 and language-specific expression expanders. */
608
609 tree
build_cplus_new(tree type,tree init,tsubst_flags_t complain)610 build_cplus_new (tree type, tree init, tsubst_flags_t complain)
611 {
612 tree rval = build_aggr_init_expr (type, init);
613 tree slot;
614
615 if (!complete_type_or_maybe_complain (type, init, complain))
616 return error_mark_node;
617
618 /* Make sure that we're not trying to create an instance of an
619 abstract class. */
620 if (abstract_virtuals_error_sfinae (NULL_TREE, type, complain))
621 return error_mark_node;
622
623 if (TREE_CODE (rval) == AGGR_INIT_EXPR)
624 slot = AGGR_INIT_EXPR_SLOT (rval);
625 else if (TREE_CODE (rval) == CALL_EXPR
626 || TREE_CODE (rval) == CONSTRUCTOR)
627 slot = build_local_temp (type);
628 else
629 return rval;
630
631 rval = build_target_expr (slot, rval, complain);
632
633 if (rval != error_mark_node)
634 TARGET_EXPR_IMPLICIT_P (rval) = 1;
635
636 return rval;
637 }
638
639 /* Subroutine of build_vec_init_expr: Build up a single element
640 intialization as a proxy for the full array initialization to get things
641 marked as used and any appropriate diagnostics.
642
643 Since we're deferring building the actual constructor calls until
644 gimplification time, we need to build one now and throw it away so
645 that the relevant constructor gets mark_used before cgraph decides
646 what functions are needed. Here we assume that init is either
647 NULL_TREE, void_type_node (indicating value-initialization), or
648 another array to copy. */
649
650 static tree
build_vec_init_elt(tree type,tree init,tsubst_flags_t complain)651 build_vec_init_elt (tree type, tree init, tsubst_flags_t complain)
652 {
653 tree inner_type = strip_array_types (type);
654 vec<tree, va_gc> *argvec;
655
656 if (integer_zerop (array_type_nelts_total (type))
657 || !CLASS_TYPE_P (inner_type))
658 /* No interesting initialization to do. */
659 return integer_zero_node;
660 else if (init == void_type_node)
661 return build_value_init (inner_type, complain);
662
663 gcc_assert (init == NULL_TREE
664 || (same_type_ignoring_top_level_qualifiers_p
665 (type, TREE_TYPE (init))));
666
667 argvec = make_tree_vector ();
668 if (init)
669 {
670 tree init_type = strip_array_types (TREE_TYPE (init));
671 tree dummy = build_dummy_object (init_type);
672 if (!lvalue_p (init))
673 dummy = move (dummy);
674 argvec->quick_push (dummy);
675 }
676 init = build_special_member_call (NULL_TREE, complete_ctor_identifier,
677 &argvec, inner_type, LOOKUP_NORMAL,
678 complain);
679 release_tree_vector (argvec);
680
681 /* For a trivial constructor, build_over_call creates a TARGET_EXPR. But
682 we don't want one here because we aren't creating a temporary. */
683 if (TREE_CODE (init) == TARGET_EXPR)
684 init = TARGET_EXPR_INITIAL (init);
685
686 return init;
687 }
688
689 /* Return a TARGET_EXPR which expresses the initialization of an array to
690 be named later, either default-initialization or copy-initialization
691 from another array of the same type. */
692
693 tree
build_vec_init_expr(tree type,tree init,tsubst_flags_t complain)694 build_vec_init_expr (tree type, tree init, tsubst_flags_t complain)
695 {
696 tree slot;
697 bool value_init = false;
698 tree elt_init = build_vec_init_elt (type, init, complain);
699
700 if (init == void_type_node)
701 {
702 value_init = true;
703 init = NULL_TREE;
704 }
705
706 slot = build_local_temp (type);
707 init = build2 (VEC_INIT_EXPR, type, slot, init);
708 TREE_SIDE_EFFECTS (init) = true;
709 SET_EXPR_LOCATION (init, input_location);
710
711 if (cxx_dialect >= cxx11
712 && potential_constant_expression (elt_init))
713 VEC_INIT_EXPR_IS_CONSTEXPR (init) = true;
714 VEC_INIT_EXPR_VALUE_INIT (init) = value_init;
715
716 return init;
717 }
718
719 /* Give a helpful diagnostic for a non-constexpr VEC_INIT_EXPR in a context
720 that requires a constant expression. */
721
722 void
diagnose_non_constexpr_vec_init(tree expr)723 diagnose_non_constexpr_vec_init (tree expr)
724 {
725 tree type = TREE_TYPE (VEC_INIT_EXPR_SLOT (expr));
726 tree init, elt_init;
727 if (VEC_INIT_EXPR_VALUE_INIT (expr))
728 init = void_type_node;
729 else
730 init = VEC_INIT_EXPR_INIT (expr);
731
732 elt_init = build_vec_init_elt (type, init, tf_warning_or_error);
733 require_potential_constant_expression (elt_init);
734 }
735
736 tree
build_array_copy(tree init)737 build_array_copy (tree init)
738 {
739 return build_vec_init_expr (TREE_TYPE (init), init, tf_warning_or_error);
740 }
741
742 /* Build a TARGET_EXPR using INIT to initialize a new temporary of the
743 indicated TYPE. */
744
745 tree
build_target_expr_with_type(tree init,tree type,tsubst_flags_t complain)746 build_target_expr_with_type (tree init, tree type, tsubst_flags_t complain)
747 {
748 gcc_assert (!VOID_TYPE_P (type));
749
750 if (TREE_CODE (init) == TARGET_EXPR
751 || init == error_mark_node)
752 return init;
753 else if (CLASS_TYPE_P (type) && type_has_nontrivial_copy_init (type)
754 && !VOID_TYPE_P (TREE_TYPE (init))
755 && TREE_CODE (init) != COND_EXPR
756 && TREE_CODE (init) != CONSTRUCTOR
757 && TREE_CODE (init) != VA_ARG_EXPR)
758 /* We need to build up a copy constructor call. A void initializer
759 means we're being called from bot_manip. COND_EXPR is a special
760 case because we already have copies on the arms and we don't want
761 another one here. A CONSTRUCTOR is aggregate initialization, which
762 is handled separately. A VA_ARG_EXPR is magic creation of an
763 aggregate; there's no additional work to be done. */
764 return force_rvalue (init, complain);
765
766 return force_target_expr (type, init, complain);
767 }
768
769 /* Like the above function, but without the checking. This function should
770 only be used by code which is deliberately trying to subvert the type
771 system, such as call_builtin_trap. Or build_over_call, to avoid
772 infinite recursion. */
773
774 tree
force_target_expr(tree type,tree init,tsubst_flags_t complain)775 force_target_expr (tree type, tree init, tsubst_flags_t complain)
776 {
777 tree slot;
778
779 gcc_assert (!VOID_TYPE_P (type));
780
781 slot = build_local_temp (type);
782 return build_target_expr (slot, init, complain);
783 }
784
785 /* Like build_target_expr_with_type, but use the type of INIT. */
786
787 tree
get_target_expr_sfinae(tree init,tsubst_flags_t complain)788 get_target_expr_sfinae (tree init, tsubst_flags_t complain)
789 {
790 if (TREE_CODE (init) == AGGR_INIT_EXPR)
791 return build_target_expr (AGGR_INIT_EXPR_SLOT (init), init, complain);
792 else if (TREE_CODE (init) == VEC_INIT_EXPR)
793 return build_target_expr (VEC_INIT_EXPR_SLOT (init), init, complain);
794 else
795 {
796 init = convert_bitfield_to_declared_type (init);
797 return build_target_expr_with_type (init, TREE_TYPE (init), complain);
798 }
799 }
800
801 tree
get_target_expr(tree init)802 get_target_expr (tree init)
803 {
804 return get_target_expr_sfinae (init, tf_warning_or_error);
805 }
806
807 /* If EXPR is a bitfield reference, convert it to the declared type of
808 the bitfield, and return the resulting expression. Otherwise,
809 return EXPR itself. */
810
811 tree
convert_bitfield_to_declared_type(tree expr)812 convert_bitfield_to_declared_type (tree expr)
813 {
814 tree bitfield_type;
815
816 bitfield_type = is_bitfield_expr_with_lowered_type (expr);
817 if (bitfield_type)
818 expr = convert_to_integer_nofold (TYPE_MAIN_VARIANT (bitfield_type),
819 expr);
820 return expr;
821 }
822
823 /* EXPR is being used in an rvalue context. Return a version of EXPR
824 that is marked as an rvalue. */
825
826 tree
rvalue(tree expr)827 rvalue (tree expr)
828 {
829 tree type;
830
831 if (error_operand_p (expr))
832 return expr;
833
834 expr = mark_rvalue_use (expr);
835
836 /* [basic.lval]
837
838 Non-class rvalues always have cv-unqualified types. */
839 type = TREE_TYPE (expr);
840 if (!CLASS_TYPE_P (type) && cv_qualified_p (type))
841 type = cv_unqualified (type);
842
843 /* We need to do this for rvalue refs as well to get the right answer
844 from decltype; see c++/36628. */
845 if (!processing_template_decl && glvalue_p (expr))
846 expr = build1 (NON_LVALUE_EXPR, type, expr);
847 else if (type != TREE_TYPE (expr))
848 expr = build_nop (type, expr);
849
850 return expr;
851 }
852
853
854 struct cplus_array_info
855 {
856 tree type;
857 tree domain;
858 };
859
860 struct cplus_array_hasher : ggc_ptr_hash<tree_node>
861 {
862 typedef cplus_array_info *compare_type;
863
864 static hashval_t hash (tree t);
865 static bool equal (tree, cplus_array_info *);
866 };
867
868 /* Hash an ARRAY_TYPE. K is really of type `tree'. */
869
870 hashval_t
hash(tree t)871 cplus_array_hasher::hash (tree t)
872 {
873 hashval_t hash;
874
875 hash = TYPE_UID (TREE_TYPE (t));
876 if (TYPE_DOMAIN (t))
877 hash ^= TYPE_UID (TYPE_DOMAIN (t));
878 return hash;
879 }
880
881 /* Compare two ARRAY_TYPEs. K1 is really of type `tree', K2 is really
882 of type `cplus_array_info*'. */
883
884 bool
equal(tree t1,cplus_array_info * t2)885 cplus_array_hasher::equal (tree t1, cplus_array_info *t2)
886 {
887 return (TREE_TYPE (t1) == t2->type && TYPE_DOMAIN (t1) == t2->domain);
888 }
889
890 /* Hash table containing dependent array types, which are unsuitable for
891 the language-independent type hash table. */
892 static GTY (()) hash_table<cplus_array_hasher> *cplus_array_htab;
893
894 /* Build an ARRAY_TYPE without laying it out. */
895
896 static tree
build_min_array_type(tree elt_type,tree index_type)897 build_min_array_type (tree elt_type, tree index_type)
898 {
899 tree t = cxx_make_type (ARRAY_TYPE);
900 TREE_TYPE (t) = elt_type;
901 TYPE_DOMAIN (t) = index_type;
902 return t;
903 }
904
905 /* Set TYPE_CANONICAL like build_array_type_1, but using
906 build_cplus_array_type. */
907
908 static void
set_array_type_canon(tree t,tree elt_type,tree index_type)909 set_array_type_canon (tree t, tree elt_type, tree index_type)
910 {
911 /* Set the canonical type for this new node. */
912 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
913 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
914 SET_TYPE_STRUCTURAL_EQUALITY (t);
915 else if (TYPE_CANONICAL (elt_type) != elt_type
916 || (index_type && TYPE_CANONICAL (index_type) != index_type))
917 TYPE_CANONICAL (t)
918 = build_cplus_array_type (TYPE_CANONICAL (elt_type),
919 index_type
920 ? TYPE_CANONICAL (index_type) : index_type);
921 else
922 TYPE_CANONICAL (t) = t;
923 }
924
925 /* Like build_array_type, but handle special C++ semantics: an array of a
926 variant element type is a variant of the array of the main variant of
927 the element type. */
928
929 tree
build_cplus_array_type(tree elt_type,tree index_type)930 build_cplus_array_type (tree elt_type, tree index_type)
931 {
932 tree t;
933
934 if (elt_type == error_mark_node || index_type == error_mark_node)
935 return error_mark_node;
936
937 bool dependent = (uses_template_parms (elt_type)
938 || (index_type && uses_template_parms (index_type)));
939
940 if (elt_type != TYPE_MAIN_VARIANT (elt_type))
941 /* Start with an array of the TYPE_MAIN_VARIANT. */
942 t = build_cplus_array_type (TYPE_MAIN_VARIANT (elt_type),
943 index_type);
944 else if (dependent)
945 {
946 /* Since type_hash_canon calls layout_type, we need to use our own
947 hash table. */
948 cplus_array_info cai;
949 hashval_t hash;
950
951 if (cplus_array_htab == NULL)
952 cplus_array_htab = hash_table<cplus_array_hasher>::create_ggc (61);
953
954 hash = TYPE_UID (elt_type);
955 if (index_type)
956 hash ^= TYPE_UID (index_type);
957 cai.type = elt_type;
958 cai.domain = index_type;
959
960 tree *e = cplus_array_htab->find_slot_with_hash (&cai, hash, INSERT);
961 if (*e)
962 /* We have found the type: we're done. */
963 return (tree) *e;
964 else
965 {
966 /* Build a new array type. */
967 t = build_min_array_type (elt_type, index_type);
968
969 /* Store it in the hash table. */
970 *e = t;
971
972 /* Set the canonical type for this new node. */
973 set_array_type_canon (t, elt_type, index_type);
974 }
975 }
976 else
977 {
978 bool typeless_storage
979 = (elt_type == unsigned_char_type_node
980 || elt_type == signed_char_type_node
981 || elt_type == char_type_node
982 || (TREE_CODE (elt_type) == ENUMERAL_TYPE
983 && TYPE_CONTEXT (elt_type) == std_node
984 && !strcmp ("byte", TYPE_NAME_STRING (elt_type))));
985 t = build_array_type (elt_type, index_type, typeless_storage);
986 }
987
988 /* Now check whether we already have this array variant. */
989 if (elt_type != TYPE_MAIN_VARIANT (elt_type))
990 {
991 tree m = t;
992 for (t = m; t; t = TYPE_NEXT_VARIANT (t))
993 if (TREE_TYPE (t) == elt_type
994 && TYPE_NAME (t) == NULL_TREE
995 && TYPE_ATTRIBUTES (t) == NULL_TREE)
996 break;
997 if (!t)
998 {
999 t = build_min_array_type (elt_type, index_type);
1000 set_array_type_canon (t, elt_type, index_type);
1001 if (!dependent)
1002 {
1003 layout_type (t);
1004 /* Make sure sizes are shared with the main variant.
1005 layout_type can't be called after setting TYPE_NEXT_VARIANT,
1006 as it will overwrite alignment etc. of all variants. */
1007 TYPE_SIZE (t) = TYPE_SIZE (m);
1008 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (m);
1009 TYPE_TYPELESS_STORAGE (t) = TYPE_TYPELESS_STORAGE (m);
1010 }
1011
1012 TYPE_MAIN_VARIANT (t) = m;
1013 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
1014 TYPE_NEXT_VARIANT (m) = t;
1015 }
1016 }
1017
1018 /* Avoid spurious warnings with VLAs (c++/54583). */
1019 if (TYPE_SIZE (t) && EXPR_P (TYPE_SIZE (t)))
1020 TREE_NO_WARNING (TYPE_SIZE (t)) = 1;
1021
1022 /* Push these needs up to the ARRAY_TYPE so that initialization takes
1023 place more easily. */
1024 bool needs_ctor = (TYPE_NEEDS_CONSTRUCTING (t)
1025 = TYPE_NEEDS_CONSTRUCTING (elt_type));
1026 bool needs_dtor = (TYPE_HAS_NONTRIVIAL_DESTRUCTOR (t)
1027 = TYPE_HAS_NONTRIVIAL_DESTRUCTOR (elt_type));
1028
1029 if (!dependent && t == TYPE_MAIN_VARIANT (t)
1030 && !COMPLETE_TYPE_P (t) && COMPLETE_TYPE_P (elt_type))
1031 {
1032 /* The element type has been completed since the last time we saw
1033 this array type; update the layout and 'tor flags for any variants
1034 that need it. */
1035 layout_type (t);
1036 for (tree v = TYPE_NEXT_VARIANT (t); v; v = TYPE_NEXT_VARIANT (v))
1037 {
1038 TYPE_NEEDS_CONSTRUCTING (v) = needs_ctor;
1039 TYPE_HAS_NONTRIVIAL_DESTRUCTOR (v) = needs_dtor;
1040 }
1041 }
1042
1043 return t;
1044 }
1045
1046 /* Return an ARRAY_TYPE with element type ELT and length N. */
1047
1048 tree
build_array_of_n_type(tree elt,int n)1049 build_array_of_n_type (tree elt, int n)
1050 {
1051 return build_cplus_array_type (elt, build_index_type (size_int (n - 1)));
1052 }
1053
1054 /* True iff T is an N3639 array of runtime bound (VLA). These were approved
1055 for C++14 but then removed. This should only be used for N3639
1056 specifically; code wondering more generally if something is a VLA should use
1057 vla_type_p. */
1058
1059 bool
array_of_runtime_bound_p(tree t)1060 array_of_runtime_bound_p (tree t)
1061 {
1062 if (!t || TREE_CODE (t) != ARRAY_TYPE)
1063 return false;
1064 if (variably_modified_type_p (TREE_TYPE (t), NULL_TREE))
1065 return false;
1066 tree dom = TYPE_DOMAIN (t);
1067 if (!dom)
1068 return false;
1069 tree max = TYPE_MAX_VALUE (dom);
1070 return (!potential_rvalue_constant_expression (max)
1071 || (!value_dependent_expression_p (max) && !TREE_CONSTANT (max)));
1072 }
1073
1074 /* True iff T is a variable length array. */
1075
1076 bool
vla_type_p(tree t)1077 vla_type_p (tree t)
1078 {
1079 for (; t && TREE_CODE (t) == ARRAY_TYPE;
1080 t = TREE_TYPE (t))
1081 if (tree dom = TYPE_DOMAIN (t))
1082 {
1083 tree max = TYPE_MAX_VALUE (dom);
1084 if (!potential_rvalue_constant_expression (max)
1085 || (!value_dependent_expression_p (max) && !TREE_CONSTANT (max)))
1086 return true;
1087 }
1088 return false;
1089 }
1090
1091 /* Return a reference type node referring to TO_TYPE. If RVAL is
1092 true, return an rvalue reference type, otherwise return an lvalue
1093 reference type. If a type node exists, reuse it, otherwise create
1094 a new one. */
1095 tree
cp_build_reference_type(tree to_type,bool rval)1096 cp_build_reference_type (tree to_type, bool rval)
1097 {
1098 tree lvalue_ref, t;
1099
1100 if (to_type == error_mark_node)
1101 return error_mark_node;
1102
1103 if (TREE_CODE (to_type) == REFERENCE_TYPE)
1104 {
1105 rval = rval && TYPE_REF_IS_RVALUE (to_type);
1106 to_type = TREE_TYPE (to_type);
1107 }
1108
1109 lvalue_ref = build_reference_type (to_type);
1110 if (!rval)
1111 return lvalue_ref;
1112
1113 /* This code to create rvalue reference types is based on and tied
1114 to the code creating lvalue reference types in the middle-end
1115 functions build_reference_type_for_mode and build_reference_type.
1116
1117 It works by putting the rvalue reference type nodes after the
1118 lvalue reference nodes in the TYPE_NEXT_REF_TO linked list, so
1119 they will effectively be ignored by the middle end. */
1120
1121 for (t = lvalue_ref; (t = TYPE_NEXT_REF_TO (t)); )
1122 if (TYPE_REF_IS_RVALUE (t))
1123 return t;
1124
1125 t = build_distinct_type_copy (lvalue_ref);
1126
1127 TYPE_REF_IS_RVALUE (t) = true;
1128 TYPE_NEXT_REF_TO (t) = TYPE_NEXT_REF_TO (lvalue_ref);
1129 TYPE_NEXT_REF_TO (lvalue_ref) = t;
1130
1131 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
1132 SET_TYPE_STRUCTURAL_EQUALITY (t);
1133 else if (TYPE_CANONICAL (to_type) != to_type)
1134 TYPE_CANONICAL (t)
1135 = cp_build_reference_type (TYPE_CANONICAL (to_type), rval);
1136 else
1137 TYPE_CANONICAL (t) = t;
1138
1139 layout_type (t);
1140
1141 return t;
1142
1143 }
1144
1145 /* Returns EXPR cast to rvalue reference type, like std::move. */
1146
1147 tree
move(tree expr)1148 move (tree expr)
1149 {
1150 tree type = TREE_TYPE (expr);
1151 gcc_assert (TREE_CODE (type) != REFERENCE_TYPE);
1152 type = cp_build_reference_type (type, /*rval*/true);
1153 return build_static_cast (type, expr, tf_warning_or_error);
1154 }
1155
1156 /* Used by the C++ front end to build qualified array types. However,
1157 the C version of this function does not properly maintain canonical
1158 types (which are not used in C). */
1159 tree
c_build_qualified_type(tree type,int type_quals,tree,size_t)1160 c_build_qualified_type (tree type, int type_quals, tree /* orig_qual_type */,
1161 size_t /* orig_qual_indirect */)
1162 {
1163 return cp_build_qualified_type (type, type_quals);
1164 }
1165
1166
1167 /* Make a variant of TYPE, qualified with the TYPE_QUALS. Handles
1168 arrays correctly. In particular, if TYPE is an array of T's, and
1169 TYPE_QUALS is non-empty, returns an array of qualified T's.
1170
1171 FLAGS determines how to deal with ill-formed qualifications. If
1172 tf_ignore_bad_quals is set, then bad qualifications are dropped
1173 (this is permitted if TYPE was introduced via a typedef or template
1174 type parameter). If bad qualifications are dropped and tf_warning
1175 is set, then a warning is issued for non-const qualifications. If
1176 tf_ignore_bad_quals is not set and tf_error is not set, we
1177 return error_mark_node. Otherwise, we issue an error, and ignore
1178 the qualifications.
1179
1180 Qualification of a reference type is valid when the reference came
1181 via a typedef or template type argument. [dcl.ref] No such
1182 dispensation is provided for qualifying a function type. [dcl.fct]
1183 DR 295 queries this and the proposed resolution brings it into line
1184 with qualifying a reference. We implement the DR. We also behave
1185 in a similar manner for restricting non-pointer types. */
1186
1187 tree
cp_build_qualified_type_real(tree type,int type_quals,tsubst_flags_t complain)1188 cp_build_qualified_type_real (tree type,
1189 int type_quals,
1190 tsubst_flags_t complain)
1191 {
1192 tree result;
1193 int bad_quals = TYPE_UNQUALIFIED;
1194
1195 if (type == error_mark_node)
1196 return type;
1197
1198 if (type_quals == cp_type_quals (type))
1199 return type;
1200
1201 if (TREE_CODE (type) == ARRAY_TYPE)
1202 {
1203 /* In C++, the qualification really applies to the array element
1204 type. Obtain the appropriately qualified element type. */
1205 tree t;
1206 tree element_type
1207 = cp_build_qualified_type_real (TREE_TYPE (type),
1208 type_quals,
1209 complain);
1210
1211 if (element_type == error_mark_node)
1212 return error_mark_node;
1213
1214 /* See if we already have an identically qualified type. Tests
1215 should be equivalent to those in check_qualified_type. */
1216 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
1217 if (TREE_TYPE (t) == element_type
1218 && TYPE_NAME (t) == TYPE_NAME (type)
1219 && TYPE_CONTEXT (t) == TYPE_CONTEXT (type)
1220 && attribute_list_equal (TYPE_ATTRIBUTES (t),
1221 TYPE_ATTRIBUTES (type)))
1222 break;
1223
1224 if (!t)
1225 {
1226 t = build_cplus_array_type (element_type, TYPE_DOMAIN (type));
1227
1228 /* Keep the typedef name. */
1229 if (TYPE_NAME (t) != TYPE_NAME (type))
1230 {
1231 t = build_variant_type_copy (t);
1232 TYPE_NAME (t) = TYPE_NAME (type);
1233 SET_TYPE_ALIGN (t, TYPE_ALIGN (type));
1234 TYPE_USER_ALIGN (t) = TYPE_USER_ALIGN (type);
1235 }
1236 }
1237
1238 /* Even if we already had this variant, we update
1239 TYPE_NEEDS_CONSTRUCTING and TYPE_HAS_NONTRIVIAL_DESTRUCTOR in case
1240 they changed since the variant was originally created.
1241
1242 This seems hokey; if there is some way to use a previous
1243 variant *without* coming through here,
1244 TYPE_NEEDS_CONSTRUCTING will never be updated. */
1245 TYPE_NEEDS_CONSTRUCTING (t)
1246 = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (element_type));
1247 TYPE_HAS_NONTRIVIAL_DESTRUCTOR (t)
1248 = TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TYPE_MAIN_VARIANT (element_type));
1249 return t;
1250 }
1251 else if (TREE_CODE (type) == TYPE_PACK_EXPANSION)
1252 {
1253 tree t = PACK_EXPANSION_PATTERN (type);
1254
1255 t = cp_build_qualified_type_real (t, type_quals, complain);
1256 return make_pack_expansion (t, complain);
1257 }
1258
1259 /* A reference or method type shall not be cv-qualified.
1260 [dcl.ref], [dcl.fct]. This used to be an error, but as of DR 295
1261 (in CD1) we always ignore extra cv-quals on functions. */
1262 if (type_quals & (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE)
1263 && (TREE_CODE (type) == REFERENCE_TYPE
1264 || TREE_CODE (type) == FUNCTION_TYPE
1265 || TREE_CODE (type) == METHOD_TYPE))
1266 {
1267 if (TREE_CODE (type) == REFERENCE_TYPE)
1268 bad_quals |= type_quals & (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE);
1269 type_quals &= ~(TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE);
1270 }
1271
1272 /* But preserve any function-cv-quals on a FUNCTION_TYPE. */
1273 if (TREE_CODE (type) == FUNCTION_TYPE)
1274 type_quals |= type_memfn_quals (type);
1275
1276 /* A restrict-qualified type must be a pointer (or reference)
1277 to object or incomplete type. */
1278 if ((type_quals & TYPE_QUAL_RESTRICT)
1279 && TREE_CODE (type) != TEMPLATE_TYPE_PARM
1280 && TREE_CODE (type) != TYPENAME_TYPE
1281 && !POINTER_TYPE_P (type))
1282 {
1283 bad_quals |= TYPE_QUAL_RESTRICT;
1284 type_quals &= ~TYPE_QUAL_RESTRICT;
1285 }
1286
1287 if (bad_quals == TYPE_UNQUALIFIED
1288 || (complain & tf_ignore_bad_quals))
1289 /*OK*/;
1290 else if (!(complain & tf_error))
1291 return error_mark_node;
1292 else
1293 {
1294 tree bad_type = build_qualified_type (ptr_type_node, bad_quals);
1295 error ("%qV qualifiers cannot be applied to %qT",
1296 bad_type, type);
1297 }
1298
1299 /* Retrieve (or create) the appropriately qualified variant. */
1300 result = build_qualified_type (type, type_quals);
1301
1302 /* Preserve exception specs and ref-qualifier since build_qualified_type
1303 doesn't know about them. */
1304 if (TREE_CODE (result) == FUNCTION_TYPE
1305 || TREE_CODE (result) == METHOD_TYPE)
1306 {
1307 result = build_exception_variant (result, TYPE_RAISES_EXCEPTIONS (type));
1308 result = build_ref_qualified_type (result, type_memfn_rqual (type));
1309 }
1310
1311 return result;
1312 }
1313
1314 /* Return TYPE with const and volatile removed. */
1315
1316 tree
cv_unqualified(tree type)1317 cv_unqualified (tree type)
1318 {
1319 int quals;
1320
1321 if (type == error_mark_node)
1322 return type;
1323
1324 quals = cp_type_quals (type);
1325 quals &= ~(TYPE_QUAL_CONST|TYPE_QUAL_VOLATILE);
1326 return cp_build_qualified_type (type, quals);
1327 }
1328
1329 /* Subroutine of strip_typedefs. We want to apply to RESULT the attributes
1330 from ATTRIBS that affect type identity, and no others. If any are not
1331 applied, set *remove_attributes to true. */
1332
1333 static tree
apply_identity_attributes(tree result,tree attribs,bool * remove_attributes)1334 apply_identity_attributes (tree result, tree attribs, bool *remove_attributes)
1335 {
1336 tree first_ident = NULL_TREE;
1337 tree new_attribs = NULL_TREE;
1338 tree *p = &new_attribs;
1339
1340 if (OVERLOAD_TYPE_P (result))
1341 {
1342 /* On classes and enums all attributes are ingrained. */
1343 gcc_assert (attribs == TYPE_ATTRIBUTES (result));
1344 return result;
1345 }
1346
1347 for (tree a = attribs; a; a = TREE_CHAIN (a))
1348 {
1349 const attribute_spec *as
1350 = lookup_attribute_spec (get_attribute_name (a));
1351 if (as && as->affects_type_identity)
1352 {
1353 if (!first_ident)
1354 first_ident = a;
1355 else if (first_ident == error_mark_node)
1356 {
1357 *p = tree_cons (TREE_PURPOSE (a), TREE_VALUE (a), NULL_TREE);
1358 p = &TREE_CHAIN (*p);
1359 }
1360 }
1361 else if (first_ident)
1362 {
1363 for (tree a2 = first_ident; a2; a2 = TREE_CHAIN (a2))
1364 {
1365 *p = tree_cons (TREE_PURPOSE (a2), TREE_VALUE (a2), NULL_TREE);
1366 p = &TREE_CHAIN (*p);
1367 }
1368 first_ident = error_mark_node;
1369 }
1370 }
1371 if (first_ident != error_mark_node)
1372 new_attribs = first_ident;
1373
1374 if (first_ident == attribs)
1375 /* All attributes affected type identity. */;
1376 else
1377 *remove_attributes = true;
1378
1379 return cp_build_type_attribute_variant (result, new_attribs);
1380 }
1381
1382 /* Builds a qualified variant of T that is not a typedef variant.
1383 E.g. consider the following declarations:
1384 typedef const int ConstInt;
1385 typedef ConstInt* PtrConstInt;
1386 If T is PtrConstInt, this function returns a type representing
1387 const int*.
1388 In other words, if T is a typedef, the function returns the underlying type.
1389 The cv-qualification and attributes of the type returned match the
1390 input type.
1391 They will always be compatible types.
1392 The returned type is built so that all of its subtypes
1393 recursively have their typedefs stripped as well.
1394
1395 This is different from just returning TYPE_CANONICAL (T)
1396 Because of several reasons:
1397 * If T is a type that needs structural equality
1398 its TYPE_CANONICAL (T) will be NULL.
1399 * TYPE_CANONICAL (T) desn't carry type attributes
1400 and loses template parameter names.
1401
1402 If REMOVE_ATTRIBUTES is non-null, also strip attributes that don't
1403 affect type identity, and set the referent to true if any were
1404 stripped. */
1405
1406 tree
strip_typedefs(tree t,bool * remove_attributes)1407 strip_typedefs (tree t, bool *remove_attributes)
1408 {
1409 tree result = NULL, type = NULL, t0 = NULL;
1410
1411 if (!t || t == error_mark_node)
1412 return t;
1413
1414 if (TREE_CODE (t) == TREE_LIST)
1415 {
1416 bool changed = false;
1417 vec<tree,va_gc> *vec = make_tree_vector ();
1418 tree r = t;
1419 for (; t; t = TREE_CHAIN (t))
1420 {
1421 gcc_assert (!TREE_PURPOSE (t));
1422 tree elt = strip_typedefs (TREE_VALUE (t), remove_attributes);
1423 if (elt != TREE_VALUE (t))
1424 changed = true;
1425 vec_safe_push (vec, elt);
1426 }
1427 if (changed)
1428 r = build_tree_list_vec (vec);
1429 release_tree_vector (vec);
1430 return r;
1431 }
1432
1433 gcc_assert (TYPE_P (t));
1434
1435 if (t == TYPE_CANONICAL (t))
1436 return t;
1437
1438 if (dependent_alias_template_spec_p (t))
1439 /* DR 1558: However, if the template-id is dependent, subsequent
1440 template argument substitution still applies to the template-id. */
1441 return t;
1442
1443 switch (TREE_CODE (t))
1444 {
1445 case POINTER_TYPE:
1446 type = strip_typedefs (TREE_TYPE (t), remove_attributes);
1447 result = build_pointer_type (type);
1448 break;
1449 case REFERENCE_TYPE:
1450 type = strip_typedefs (TREE_TYPE (t), remove_attributes);
1451 result = cp_build_reference_type (type, TYPE_REF_IS_RVALUE (t));
1452 break;
1453 case OFFSET_TYPE:
1454 t0 = strip_typedefs (TYPE_OFFSET_BASETYPE (t), remove_attributes);
1455 type = strip_typedefs (TREE_TYPE (t), remove_attributes);
1456 result = build_offset_type (t0, type);
1457 break;
1458 case RECORD_TYPE:
1459 if (TYPE_PTRMEMFUNC_P (t))
1460 {
1461 t0 = strip_typedefs (TYPE_PTRMEMFUNC_FN_TYPE (t), remove_attributes);
1462 result = build_ptrmemfunc_type (t0);
1463 }
1464 break;
1465 case ARRAY_TYPE:
1466 type = strip_typedefs (TREE_TYPE (t), remove_attributes);
1467 t0 = strip_typedefs (TYPE_DOMAIN (t), remove_attributes);
1468 result = build_cplus_array_type (type, t0);
1469 break;
1470 case FUNCTION_TYPE:
1471 case METHOD_TYPE:
1472 {
1473 tree arg_types = NULL, arg_node, arg_node2, arg_type;
1474 bool changed;
1475
1476 /* Because we stomp on TREE_PURPOSE of TYPE_ARG_TYPES in many places
1477 around the compiler (e.g. cp_parser_late_parsing_default_args), we
1478 can't expect that re-hashing a function type will find a previous
1479 equivalent type, so try to reuse the input type if nothing has
1480 changed. If the type is itself a variant, that will change. */
1481 bool is_variant = typedef_variant_p (t);
1482 if (remove_attributes
1483 && (TYPE_ATTRIBUTES (t) || TYPE_USER_ALIGN (t)))
1484 is_variant = true;
1485
1486 type = strip_typedefs (TREE_TYPE (t), remove_attributes);
1487 tree canon_spec = (flag_noexcept_type
1488 ? canonical_eh_spec (TYPE_RAISES_EXCEPTIONS (t))
1489 : NULL_TREE);
1490 changed = (type != TREE_TYPE (t) || is_variant
1491 || TYPE_RAISES_EXCEPTIONS (t) != canon_spec);
1492
1493 for (arg_node = TYPE_ARG_TYPES (t);
1494 arg_node;
1495 arg_node = TREE_CHAIN (arg_node))
1496 {
1497 if (arg_node == void_list_node)
1498 break;
1499 arg_type = strip_typedefs (TREE_VALUE (arg_node),
1500 remove_attributes);
1501 gcc_assert (arg_type);
1502 if (arg_type == TREE_VALUE (arg_node) && !changed)
1503 continue;
1504
1505 if (!changed)
1506 {
1507 changed = true;
1508 for (arg_node2 = TYPE_ARG_TYPES (t);
1509 arg_node2 != arg_node;
1510 arg_node2 = TREE_CHAIN (arg_node2))
1511 arg_types
1512 = tree_cons (TREE_PURPOSE (arg_node2),
1513 TREE_VALUE (arg_node2), arg_types);
1514 }
1515
1516 arg_types
1517 = tree_cons (TREE_PURPOSE (arg_node), arg_type, arg_types);
1518 }
1519
1520 if (!changed)
1521 return t;
1522
1523 if (arg_types)
1524 arg_types = nreverse (arg_types);
1525
1526 /* A list of parameters not ending with an ellipsis
1527 must end with void_list_node. */
1528 if (arg_node)
1529 arg_types = chainon (arg_types, void_list_node);
1530
1531 if (TREE_CODE (t) == METHOD_TYPE)
1532 {
1533 tree class_type = TREE_TYPE (TREE_VALUE (arg_types));
1534 gcc_assert (class_type);
1535 result =
1536 build_method_type_directly (class_type, type,
1537 TREE_CHAIN (arg_types));
1538 result
1539 = build_ref_qualified_type (result, type_memfn_rqual (t));
1540 }
1541 else
1542 {
1543 result = build_function_type (type,
1544 arg_types);
1545 result = apply_memfn_quals (result,
1546 type_memfn_quals (t),
1547 type_memfn_rqual (t));
1548 }
1549
1550 if (canon_spec)
1551 result = build_exception_variant (result, canon_spec);
1552 if (TYPE_HAS_LATE_RETURN_TYPE (t))
1553 TYPE_HAS_LATE_RETURN_TYPE (result) = 1;
1554 }
1555 break;
1556 case TYPENAME_TYPE:
1557 {
1558 bool changed = false;
1559 tree fullname = TYPENAME_TYPE_FULLNAME (t);
1560 if (TREE_CODE (fullname) == TEMPLATE_ID_EXPR
1561 && TREE_OPERAND (fullname, 1))
1562 {
1563 tree args = TREE_OPERAND (fullname, 1);
1564 tree new_args = copy_node (args);
1565 for (int i = 0; i < TREE_VEC_LENGTH (args); ++i)
1566 {
1567 tree arg = TREE_VEC_ELT (args, i);
1568 tree strip_arg;
1569 if (TYPE_P (arg))
1570 strip_arg = strip_typedefs (arg, remove_attributes);
1571 else
1572 strip_arg = strip_typedefs_expr (arg, remove_attributes);
1573 TREE_VEC_ELT (new_args, i) = strip_arg;
1574 if (strip_arg != arg)
1575 changed = true;
1576 }
1577 if (changed)
1578 {
1579 NON_DEFAULT_TEMPLATE_ARGS_COUNT (new_args)
1580 = NON_DEFAULT_TEMPLATE_ARGS_COUNT (args);
1581 fullname
1582 = lookup_template_function (TREE_OPERAND (fullname, 0),
1583 new_args);
1584 }
1585 else
1586 ggc_free (new_args);
1587 }
1588 tree ctx = strip_typedefs (TYPE_CONTEXT (t), remove_attributes);
1589 if (!changed && ctx == TYPE_CONTEXT (t) && !typedef_variant_p (t))
1590 return t;
1591 tree name = fullname;
1592 if (TREE_CODE (fullname) == TEMPLATE_ID_EXPR)
1593 name = TREE_OPERAND (fullname, 0);
1594 /* Use build_typename_type rather than make_typename_type because we
1595 don't want to resolve it here, just strip typedefs. */
1596 result = build_typename_type (ctx, name, fullname, typename_type);
1597 }
1598 break;
1599 case DECLTYPE_TYPE:
1600 result = strip_typedefs_expr (DECLTYPE_TYPE_EXPR (t),
1601 remove_attributes);
1602 if (result == DECLTYPE_TYPE_EXPR (t))
1603 result = NULL_TREE;
1604 else
1605 result = (finish_decltype_type
1606 (result,
1607 DECLTYPE_TYPE_ID_EXPR_OR_MEMBER_ACCESS_P (t),
1608 tf_none));
1609 break;
1610 case UNDERLYING_TYPE:
1611 type = strip_typedefs (UNDERLYING_TYPE_TYPE (t), remove_attributes);
1612 result = finish_underlying_type (type);
1613 break;
1614 default:
1615 break;
1616 }
1617
1618 if (!result)
1619 {
1620 if (typedef_variant_p (t))
1621 {
1622 /* Explicitly get the underlying type, as TYPE_MAIN_VARIANT doesn't
1623 strip typedefs with attributes. */
1624 result = TYPE_MAIN_VARIANT (DECL_ORIGINAL_TYPE (TYPE_NAME (t)));
1625 result = strip_typedefs (result);
1626 }
1627 else
1628 result = TYPE_MAIN_VARIANT (t);
1629 }
1630 gcc_assert (!typedef_variant_p (result));
1631
1632 if (COMPLETE_TYPE_P (result) && !COMPLETE_TYPE_P (t))
1633 /* If RESULT is complete and T isn't, it's likely the case that T
1634 is a variant of RESULT which hasn't been updated yet. Skip the
1635 attribute handling. */;
1636 else
1637 {
1638 if (TYPE_USER_ALIGN (t) != TYPE_USER_ALIGN (result)
1639 || TYPE_ALIGN (t) != TYPE_ALIGN (result))
1640 {
1641 gcc_assert (TYPE_USER_ALIGN (t));
1642 if (remove_attributes)
1643 *remove_attributes = true;
1644 else
1645 {
1646 if (TYPE_ALIGN (t) == TYPE_ALIGN (result))
1647 result = build_variant_type_copy (result);
1648 else
1649 result = build_aligned_type (result, TYPE_ALIGN (t));
1650 TYPE_USER_ALIGN (result) = true;
1651 }
1652 }
1653
1654 if (TYPE_ATTRIBUTES (t))
1655 {
1656 if (remove_attributes)
1657 result = apply_identity_attributes (result, TYPE_ATTRIBUTES (t),
1658 remove_attributes);
1659 else
1660 result = cp_build_type_attribute_variant (result,
1661 TYPE_ATTRIBUTES (t));
1662 }
1663 }
1664
1665 return cp_build_qualified_type (result, cp_type_quals (t));
1666 }
1667
1668 /* Like strip_typedefs above, but works on expressions, so that in
1669
1670 template<class T> struct A
1671 {
1672 typedef T TT;
1673 B<sizeof(TT)> b;
1674 };
1675
1676 sizeof(TT) is replaced by sizeof(T). */
1677
1678 tree
strip_typedefs_expr(tree t,bool * remove_attributes)1679 strip_typedefs_expr (tree t, bool *remove_attributes)
1680 {
1681 unsigned i,n;
1682 tree r, type, *ops;
1683 enum tree_code code;
1684
1685 if (t == NULL_TREE || t == error_mark_node)
1686 return t;
1687
1688 if (DECL_P (t) || CONSTANT_CLASS_P (t))
1689 return t;
1690
1691 /* Some expressions have type operands, so let's handle types here rather
1692 than check TYPE_P in multiple places below. */
1693 if (TYPE_P (t))
1694 return strip_typedefs (t, remove_attributes);
1695
1696 code = TREE_CODE (t);
1697 switch (code)
1698 {
1699 case IDENTIFIER_NODE:
1700 case TEMPLATE_PARM_INDEX:
1701 case OVERLOAD:
1702 case BASELINK:
1703 case ARGUMENT_PACK_SELECT:
1704 return t;
1705
1706 case TRAIT_EXPR:
1707 {
1708 tree type1 = strip_typedefs (TRAIT_EXPR_TYPE1 (t), remove_attributes);
1709 tree type2 = strip_typedefs (TRAIT_EXPR_TYPE2 (t), remove_attributes);
1710 if (type1 == TRAIT_EXPR_TYPE1 (t)
1711 && type2 == TRAIT_EXPR_TYPE2 (t))
1712 return t;
1713 r = copy_node (t);
1714 TRAIT_EXPR_TYPE1 (r) = type1;
1715 TRAIT_EXPR_TYPE2 (r) = type2;
1716 return r;
1717 }
1718
1719 case TREE_LIST:
1720 {
1721 vec<tree, va_gc> *vec = make_tree_vector ();
1722 bool changed = false;
1723 tree it;
1724 for (it = t; it; it = TREE_CHAIN (it))
1725 {
1726 tree val = strip_typedefs_expr (TREE_VALUE (it), remove_attributes);
1727 vec_safe_push (vec, val);
1728 if (val != TREE_VALUE (it))
1729 changed = true;
1730 gcc_assert (TREE_PURPOSE (it) == NULL_TREE);
1731 }
1732 if (changed)
1733 {
1734 r = NULL_TREE;
1735 FOR_EACH_VEC_ELT_REVERSE (*vec, i, it)
1736 r = tree_cons (NULL_TREE, it, r);
1737 }
1738 else
1739 r = t;
1740 release_tree_vector (vec);
1741 return r;
1742 }
1743
1744 case TREE_VEC:
1745 {
1746 bool changed = false;
1747 vec<tree, va_gc> *vec = make_tree_vector ();
1748 n = TREE_VEC_LENGTH (t);
1749 vec_safe_reserve (vec, n);
1750 for (i = 0; i < n; ++i)
1751 {
1752 tree op = strip_typedefs_expr (TREE_VEC_ELT (t, i),
1753 remove_attributes);
1754 vec->quick_push (op);
1755 if (op != TREE_VEC_ELT (t, i))
1756 changed = true;
1757 }
1758 if (changed)
1759 {
1760 r = copy_node (t);
1761 for (i = 0; i < n; ++i)
1762 TREE_VEC_ELT (r, i) = (*vec)[i];
1763 NON_DEFAULT_TEMPLATE_ARGS_COUNT (r)
1764 = NON_DEFAULT_TEMPLATE_ARGS_COUNT (t);
1765 }
1766 else
1767 r = t;
1768 release_tree_vector (vec);
1769 return r;
1770 }
1771
1772 case CONSTRUCTOR:
1773 {
1774 bool changed = false;
1775 vec<constructor_elt, va_gc> *vec
1776 = vec_safe_copy (CONSTRUCTOR_ELTS (t));
1777 n = CONSTRUCTOR_NELTS (t);
1778 type = strip_typedefs (TREE_TYPE (t), remove_attributes);
1779 for (i = 0; i < n; ++i)
1780 {
1781 constructor_elt *e = &(*vec)[i];
1782 tree op = strip_typedefs_expr (e->value, remove_attributes);
1783 if (op != e->value)
1784 {
1785 changed = true;
1786 e->value = op;
1787 }
1788 gcc_checking_assert
1789 (e->index == strip_typedefs_expr (e->index, remove_attributes));
1790 }
1791
1792 if (!changed && type == TREE_TYPE (t))
1793 {
1794 vec_free (vec);
1795 return t;
1796 }
1797 else
1798 {
1799 r = copy_node (t);
1800 TREE_TYPE (r) = type;
1801 CONSTRUCTOR_ELTS (r) = vec;
1802 return r;
1803 }
1804 }
1805
1806 case LAMBDA_EXPR:
1807 error ("lambda-expression in a constant expression");
1808 return error_mark_node;
1809
1810 case STATEMENT_LIST:
1811 error ("statement-expression in a constant expression");
1812 return error_mark_node;
1813
1814 default:
1815 break;
1816 }
1817
1818 gcc_assert (EXPR_P (t));
1819
1820 n = cp_tree_operand_length (t);
1821 ops = XALLOCAVEC (tree, n);
1822 type = TREE_TYPE (t);
1823
1824 switch (code)
1825 {
1826 CASE_CONVERT:
1827 case IMPLICIT_CONV_EXPR:
1828 case DYNAMIC_CAST_EXPR:
1829 case STATIC_CAST_EXPR:
1830 case CONST_CAST_EXPR:
1831 case REINTERPRET_CAST_EXPR:
1832 case CAST_EXPR:
1833 case NEW_EXPR:
1834 type = strip_typedefs (type, remove_attributes);
1835 /* fallthrough */
1836
1837 default:
1838 for (i = 0; i < n; ++i)
1839 ops[i] = strip_typedefs_expr (TREE_OPERAND (t, i), remove_attributes);
1840 break;
1841 }
1842
1843 /* If nothing changed, return t. */
1844 for (i = 0; i < n; ++i)
1845 if (ops[i] != TREE_OPERAND (t, i))
1846 break;
1847 if (i == n && type == TREE_TYPE (t))
1848 return t;
1849
1850 r = copy_node (t);
1851 TREE_TYPE (r) = type;
1852 for (i = 0; i < n; ++i)
1853 TREE_OPERAND (r, i) = ops[i];
1854 return r;
1855 }
1856
1857 /* Makes a copy of BINFO and TYPE, which is to be inherited into a
1858 graph dominated by T. If BINFO is NULL, TYPE is a dependent base,
1859 and we do a shallow copy. If BINFO is non-NULL, we do a deep copy.
1860 VIRT indicates whether TYPE is inherited virtually or not.
1861 IGO_PREV points at the previous binfo of the inheritance graph
1862 order chain. The newly copied binfo's TREE_CHAIN forms this
1863 ordering.
1864
1865 The CLASSTYPE_VBASECLASSES vector of T is constructed in the
1866 correct order. That is in the order the bases themselves should be
1867 constructed in.
1868
1869 The BINFO_INHERITANCE of a virtual base class points to the binfo
1870 of the most derived type. ??? We could probably change this so that
1871 BINFO_INHERITANCE becomes synonymous with BINFO_PRIMARY, and hence
1872 remove a field. They currently can only differ for primary virtual
1873 virtual bases. */
1874
1875 tree
copy_binfo(tree binfo,tree type,tree t,tree * igo_prev,int virt)1876 copy_binfo (tree binfo, tree type, tree t, tree *igo_prev, int virt)
1877 {
1878 tree new_binfo;
1879
1880 if (virt)
1881 {
1882 /* See if we've already made this virtual base. */
1883 new_binfo = binfo_for_vbase (type, t);
1884 if (new_binfo)
1885 return new_binfo;
1886 }
1887
1888 new_binfo = make_tree_binfo (binfo ? BINFO_N_BASE_BINFOS (binfo) : 0);
1889 BINFO_TYPE (new_binfo) = type;
1890
1891 /* Chain it into the inheritance graph. */
1892 TREE_CHAIN (*igo_prev) = new_binfo;
1893 *igo_prev = new_binfo;
1894
1895 if (binfo && !BINFO_DEPENDENT_BASE_P (binfo))
1896 {
1897 int ix;
1898 tree base_binfo;
1899
1900 gcc_assert (SAME_BINFO_TYPE_P (BINFO_TYPE (binfo), type));
1901
1902 BINFO_OFFSET (new_binfo) = BINFO_OFFSET (binfo);
1903 BINFO_VIRTUALS (new_binfo) = BINFO_VIRTUALS (binfo);
1904
1905 /* We do not need to copy the accesses, as they are read only. */
1906 BINFO_BASE_ACCESSES (new_binfo) = BINFO_BASE_ACCESSES (binfo);
1907
1908 /* Recursively copy base binfos of BINFO. */
1909 for (ix = 0; BINFO_BASE_ITERATE (binfo, ix, base_binfo); ix++)
1910 {
1911 tree new_base_binfo;
1912 new_base_binfo = copy_binfo (base_binfo, BINFO_TYPE (base_binfo),
1913 t, igo_prev,
1914 BINFO_VIRTUAL_P (base_binfo));
1915
1916 if (!BINFO_INHERITANCE_CHAIN (new_base_binfo))
1917 BINFO_INHERITANCE_CHAIN (new_base_binfo) = new_binfo;
1918 BINFO_BASE_APPEND (new_binfo, new_base_binfo);
1919 }
1920 }
1921 else
1922 BINFO_DEPENDENT_BASE_P (new_binfo) = 1;
1923
1924 if (virt)
1925 {
1926 /* Push it onto the list after any virtual bases it contains
1927 will have been pushed. */
1928 CLASSTYPE_VBASECLASSES (t)->quick_push (new_binfo);
1929 BINFO_VIRTUAL_P (new_binfo) = 1;
1930 BINFO_INHERITANCE_CHAIN (new_binfo) = TYPE_BINFO (t);
1931 }
1932
1933 return new_binfo;
1934 }
1935
1936 /* Hashing of lists so that we don't make duplicates.
1937 The entry point is `list_hash_canon'. */
1938
1939 struct list_proxy
1940 {
1941 tree purpose;
1942 tree value;
1943 tree chain;
1944 };
1945
1946 struct list_hasher : ggc_ptr_hash<tree_node>
1947 {
1948 typedef list_proxy *compare_type;
1949
1950 static hashval_t hash (tree);
1951 static bool equal (tree, list_proxy *);
1952 };
1953
1954 /* Now here is the hash table. When recording a list, it is added
1955 to the slot whose index is the hash code mod the table size.
1956 Note that the hash table is used for several kinds of lists.
1957 While all these live in the same table, they are completely independent,
1958 and the hash code is computed differently for each of these. */
1959
1960 static GTY (()) hash_table<list_hasher> *list_hash_table;
1961
1962 /* Compare ENTRY (an entry in the hash table) with DATA (a list_proxy
1963 for a node we are thinking about adding). */
1964
1965 bool
equal(tree t,list_proxy * proxy)1966 list_hasher::equal (tree t, list_proxy *proxy)
1967 {
1968 return (TREE_VALUE (t) == proxy->value
1969 && TREE_PURPOSE (t) == proxy->purpose
1970 && TREE_CHAIN (t) == proxy->chain);
1971 }
1972
1973 /* Compute a hash code for a list (chain of TREE_LIST nodes
1974 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
1975 TREE_COMMON slots), by adding the hash codes of the individual entries. */
1976
1977 static hashval_t
list_hash_pieces(tree purpose,tree value,tree chain)1978 list_hash_pieces (tree purpose, tree value, tree chain)
1979 {
1980 hashval_t hashcode = 0;
1981
1982 if (chain)
1983 hashcode += TREE_HASH (chain);
1984
1985 if (value)
1986 hashcode += TREE_HASH (value);
1987 else
1988 hashcode += 1007;
1989 if (purpose)
1990 hashcode += TREE_HASH (purpose);
1991 else
1992 hashcode += 1009;
1993 return hashcode;
1994 }
1995
1996 /* Hash an already existing TREE_LIST. */
1997
1998 hashval_t
hash(tree t)1999 list_hasher::hash (tree t)
2000 {
2001 return list_hash_pieces (TREE_PURPOSE (t),
2002 TREE_VALUE (t),
2003 TREE_CHAIN (t));
2004 }
2005
2006 /* Given list components PURPOSE, VALUE, AND CHAIN, return the canonical
2007 object for an identical list if one already exists. Otherwise, build a
2008 new one, and record it as the canonical object. */
2009
2010 tree
hash_tree_cons(tree purpose,tree value,tree chain)2011 hash_tree_cons (tree purpose, tree value, tree chain)
2012 {
2013 int hashcode = 0;
2014 tree *slot;
2015 struct list_proxy proxy;
2016
2017 /* Hash the list node. */
2018 hashcode = list_hash_pieces (purpose, value, chain);
2019 /* Create a proxy for the TREE_LIST we would like to create. We
2020 don't actually create it so as to avoid creating garbage. */
2021 proxy.purpose = purpose;
2022 proxy.value = value;
2023 proxy.chain = chain;
2024 /* See if it is already in the table. */
2025 slot = list_hash_table->find_slot_with_hash (&proxy, hashcode, INSERT);
2026 /* If not, create a new node. */
2027 if (!*slot)
2028 *slot = tree_cons (purpose, value, chain);
2029 return (tree) *slot;
2030 }
2031
2032 /* Constructor for hashed lists. */
2033
2034 tree
hash_tree_chain(tree value,tree chain)2035 hash_tree_chain (tree value, tree chain)
2036 {
2037 return hash_tree_cons (NULL_TREE, value, chain);
2038 }
2039
2040 void
debug_binfo(tree elem)2041 debug_binfo (tree elem)
2042 {
2043 HOST_WIDE_INT n;
2044 tree virtuals;
2045
2046 fprintf (stderr, "type \"%s\", offset = " HOST_WIDE_INT_PRINT_DEC
2047 "\nvtable type:\n",
2048 TYPE_NAME_STRING (BINFO_TYPE (elem)),
2049 TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
2050 debug_tree (BINFO_TYPE (elem));
2051 if (BINFO_VTABLE (elem))
2052 fprintf (stderr, "vtable decl \"%s\"\n",
2053 IDENTIFIER_POINTER (DECL_NAME (get_vtbl_decl_for_binfo (elem))));
2054 else
2055 fprintf (stderr, "no vtable decl yet\n");
2056 fprintf (stderr, "virtuals:\n");
2057 virtuals = BINFO_VIRTUALS (elem);
2058 n = 0;
2059
2060 while (virtuals)
2061 {
2062 tree fndecl = TREE_VALUE (virtuals);
2063 fprintf (stderr, "%s [%ld =? %ld]\n",
2064 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
2065 (long) n, (long) TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
2066 ++n;
2067 virtuals = TREE_CHAIN (virtuals);
2068 }
2069 }
2070
2071 /* Build a representation for the qualified name SCOPE::NAME. TYPE is
2072 the type of the result expression, if known, or NULL_TREE if the
2073 resulting expression is type-dependent. If TEMPLATE_P is true,
2074 NAME is known to be a template because the user explicitly used the
2075 "template" keyword after the "::".
2076
2077 All SCOPE_REFs should be built by use of this function. */
2078
2079 tree
build_qualified_name(tree type,tree scope,tree name,bool template_p)2080 build_qualified_name (tree type, tree scope, tree name, bool template_p)
2081 {
2082 tree t;
2083 if (type == error_mark_node
2084 || scope == error_mark_node
2085 || name == error_mark_node)
2086 return error_mark_node;
2087 gcc_assert (TREE_CODE (name) != SCOPE_REF);
2088 t = build2 (SCOPE_REF, type, scope, name);
2089 QUALIFIED_NAME_IS_TEMPLATE (t) = template_p;
2090 PTRMEM_OK_P (t) = true;
2091 if (type)
2092 t = convert_from_reference (t);
2093 return t;
2094 }
2095
2096 /* Like check_qualified_type, but also check ref-qualifier and exception
2097 specification. */
2098
2099 static bool
cp_check_qualified_type(const_tree cand,const_tree base,int type_quals,cp_ref_qualifier rqual,tree raises)2100 cp_check_qualified_type (const_tree cand, const_tree base, int type_quals,
2101 cp_ref_qualifier rqual, tree raises)
2102 {
2103 return (TYPE_QUALS (cand) == type_quals
2104 && check_base_type (cand, base)
2105 && comp_except_specs (raises, TYPE_RAISES_EXCEPTIONS (cand),
2106 ce_exact)
2107 && type_memfn_rqual (cand) == rqual);
2108 }
2109
2110 /* Build the FUNCTION_TYPE or METHOD_TYPE with the ref-qualifier RQUAL. */
2111
2112 tree
build_ref_qualified_type(tree type,cp_ref_qualifier rqual)2113 build_ref_qualified_type (tree type, cp_ref_qualifier rqual)
2114 {
2115 tree t;
2116
2117 if (rqual == type_memfn_rqual (type))
2118 return type;
2119
2120 int type_quals = TYPE_QUALS (type);
2121 tree raises = TYPE_RAISES_EXCEPTIONS (type);
2122 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
2123 if (cp_check_qualified_type (t, type, type_quals, rqual, raises))
2124 return t;
2125
2126 t = build_variant_type_copy (type);
2127 switch (rqual)
2128 {
2129 case REF_QUAL_RVALUE:
2130 FUNCTION_RVALUE_QUALIFIED (t) = 1;
2131 FUNCTION_REF_QUALIFIED (t) = 1;
2132 break;
2133 case REF_QUAL_LVALUE:
2134 FUNCTION_RVALUE_QUALIFIED (t) = 0;
2135 FUNCTION_REF_QUALIFIED (t) = 1;
2136 break;
2137 default:
2138 FUNCTION_REF_QUALIFIED (t) = 0;
2139 break;
2140 }
2141
2142 if (TYPE_STRUCTURAL_EQUALITY_P (type))
2143 /* Propagate structural equality. */
2144 SET_TYPE_STRUCTURAL_EQUALITY (t);
2145 else if (TYPE_CANONICAL (type) != type)
2146 /* Build the underlying canonical type, since it is different
2147 from TYPE. */
2148 TYPE_CANONICAL (t) = build_ref_qualified_type (TYPE_CANONICAL (type),
2149 rqual);
2150 else
2151 /* T is its own canonical type. */
2152 TYPE_CANONICAL (t) = t;
2153
2154 return t;
2155 }
2156
2157 /* Cache of free ovl nodes. Uses OVL_FUNCTION for chaining. */
2158 static GTY((deletable)) tree ovl_cache;
2159
2160 /* Make a raw overload node containing FN. */
2161
2162 tree
ovl_make(tree fn,tree next)2163 ovl_make (tree fn, tree next)
2164 {
2165 tree result = ovl_cache;
2166
2167 if (result)
2168 {
2169 ovl_cache = OVL_FUNCTION (result);
2170 /* Zap the flags. */
2171 memset (result, 0, sizeof (tree_base));
2172 TREE_SET_CODE (result, OVERLOAD);
2173 }
2174 else
2175 result = make_node (OVERLOAD);
2176
2177 if (TREE_CODE (fn) == OVERLOAD)
2178 OVL_NESTED_P (result) = true;
2179
2180 TREE_TYPE (result) = (next || TREE_CODE (fn) == TEMPLATE_DECL
2181 ? unknown_type_node : TREE_TYPE (fn));
2182 OVL_FUNCTION (result) = fn;
2183 OVL_CHAIN (result) = next;
2184 return result;
2185 }
2186
2187 static tree
ovl_copy(tree ovl)2188 ovl_copy (tree ovl)
2189 {
2190 tree result = ovl_cache;
2191
2192 if (result)
2193 {
2194 ovl_cache = OVL_FUNCTION (result);
2195 /* Zap the flags. */
2196 memset (result, 0, sizeof (tree_base));
2197 TREE_SET_CODE (result, OVERLOAD);
2198 }
2199 else
2200 result = make_node (OVERLOAD);
2201
2202 gcc_checking_assert (!OVL_NESTED_P (ovl) && OVL_USED_P (ovl));
2203 TREE_TYPE (result) = TREE_TYPE (ovl);
2204 OVL_FUNCTION (result) = OVL_FUNCTION (ovl);
2205 OVL_CHAIN (result) = OVL_CHAIN (ovl);
2206 OVL_HIDDEN_P (result) = OVL_HIDDEN_P (ovl);
2207 OVL_USING_P (result) = OVL_USING_P (ovl);
2208 OVL_LOOKUP_P (result) = OVL_LOOKUP_P (ovl);
2209
2210 return result;
2211 }
2212
2213 /* Add FN to the (potentially NULL) overload set OVL. USING_P is
2214 true, if FN is via a using declaration. We also pay attention to
2215 DECL_HIDDEN. Overloads are ordered as hidden, using, regular. */
2216
2217 tree
ovl_insert(tree fn,tree maybe_ovl,bool using_p)2218 ovl_insert (tree fn, tree maybe_ovl, bool using_p)
2219 {
2220 bool copying = false; /* Checking use only. */
2221 bool hidden_p = DECL_HIDDEN_P (fn);
2222 int weight = (hidden_p << 1) | (using_p << 0);
2223
2224 tree result = NULL_TREE;
2225 tree insert_after = NULL_TREE;
2226
2227 /* Find insertion point. */
2228 while (maybe_ovl && TREE_CODE (maybe_ovl) == OVERLOAD
2229 && (weight < ((OVL_HIDDEN_P (maybe_ovl) << 1)
2230 | (OVL_USING_P (maybe_ovl) << 0))))
2231 {
2232 gcc_checking_assert (!OVL_LOOKUP_P (maybe_ovl)
2233 && (!copying || OVL_USED_P (maybe_ovl)));
2234 if (OVL_USED_P (maybe_ovl))
2235 {
2236 copying = true;
2237 maybe_ovl = ovl_copy (maybe_ovl);
2238 if (insert_after)
2239 OVL_CHAIN (insert_after) = maybe_ovl;
2240 }
2241 if (!result)
2242 result = maybe_ovl;
2243 insert_after = maybe_ovl;
2244 maybe_ovl = OVL_CHAIN (maybe_ovl);
2245 }
2246
2247 tree trail = fn;
2248 if (maybe_ovl || using_p || hidden_p || TREE_CODE (fn) == TEMPLATE_DECL)
2249 {
2250 trail = ovl_make (fn, maybe_ovl);
2251 if (hidden_p)
2252 OVL_HIDDEN_P (trail) = true;
2253 if (using_p)
2254 OVL_USING_P (trail) = true;
2255 }
2256
2257 if (insert_after)
2258 {
2259 OVL_CHAIN (insert_after) = trail;
2260 TREE_TYPE (insert_after) = unknown_type_node;
2261 }
2262 else
2263 result = trail;
2264
2265 return result;
2266 }
2267
2268 /* Skip any hidden names at the beginning of OVL. */
2269
2270 tree
ovl_skip_hidden(tree ovl)2271 ovl_skip_hidden (tree ovl)
2272 {
2273 for (;
2274 ovl && TREE_CODE (ovl) == OVERLOAD && OVL_HIDDEN_P (ovl);
2275 ovl = OVL_CHAIN (ovl))
2276 gcc_checking_assert (DECL_HIDDEN_P (OVL_FUNCTION (ovl)));
2277
2278 if (ovl && TREE_CODE (ovl) != OVERLOAD && DECL_HIDDEN_P (ovl))
2279 {
2280 /* Any hidden functions should have been wrapped in an
2281 overload, but injected friend classes will not. */
2282 gcc_checking_assert (!DECL_DECLARES_FUNCTION_P (ovl));
2283 ovl = NULL_TREE;
2284 }
2285
2286 return ovl;
2287 }
2288
2289 /* NODE is an OVL_HIDDEN_P node which is now revealed. */
2290
2291 tree
reveal_node(tree overload,tree node)2292 ovl_iterator::reveal_node (tree overload, tree node)
2293 {
2294 /* We cannot have returned NODE as part of a lookup overload, so it
2295 cannot be USED. */
2296 gcc_checking_assert (!OVL_USED_P (node));
2297
2298 OVL_HIDDEN_P (node) = false;
2299 if (tree chain = OVL_CHAIN (node))
2300 if (TREE_CODE (chain) == OVERLOAD
2301 && (OVL_USING_P (chain) || OVL_HIDDEN_P (chain)))
2302 {
2303 /* The node needs moving, and the simplest way is to remove it
2304 and reinsert. */
2305 overload = remove_node (overload, node);
2306 overload = ovl_insert (OVL_FUNCTION (node), overload);
2307 }
2308 return overload;
2309 }
2310
2311 /* NODE is on the overloads of OVL. Remove it. If a predecessor is
2312 OVL_USED_P we must copy OVL nodes, because those are immutable.
2313 The removed node is unaltered and may continue to be iterated
2314 from (i.e. it is safe to remove a node from an overload one is
2315 currently iterating over). */
2316
2317 tree
remove_node(tree overload,tree node)2318 ovl_iterator::remove_node (tree overload, tree node)
2319 {
2320 bool copying = false; /* Checking use only. */
2321
2322 tree *slot = &overload;
2323 while (*slot != node)
2324 {
2325 tree probe = *slot;
2326 gcc_checking_assert (!OVL_LOOKUP_P (probe)
2327 && (!copying || OVL_USED_P (probe)));
2328 if (OVL_USED_P (probe))
2329 {
2330 copying = true;
2331 probe = ovl_copy (probe);
2332 *slot = probe;
2333 }
2334
2335 slot = &OVL_CHAIN (probe);
2336 }
2337
2338 /* Stitch out NODE. We don't have to worry about now making a
2339 singleton overload (and consequently maybe setting its type),
2340 because all uses of this function will be followed by inserting a
2341 new node that must follow the place we've cut this out from. */
2342 if (TREE_CODE (node) != OVERLOAD)
2343 /* Cloned inherited ctors don't mark themselves as via_using. */
2344 *slot = NULL_TREE;
2345 else
2346 *slot = OVL_CHAIN (node);
2347
2348 return overload;
2349 }
2350
2351 /* Mark or unmark a lookup set. */
2352
2353 void
lookup_mark(tree ovl,bool val)2354 lookup_mark (tree ovl, bool val)
2355 {
2356 for (lkp_iterator iter (ovl); iter; ++iter)
2357 {
2358 gcc_checking_assert (LOOKUP_SEEN_P (*iter) != val);
2359 LOOKUP_SEEN_P (*iter) = val;
2360 }
2361 }
2362
2363 /* Add a set of new FNS into a lookup. */
2364
2365 tree
lookup_add(tree fns,tree lookup)2366 lookup_add (tree fns, tree lookup)
2367 {
2368 if (lookup || TREE_CODE (fns) == TEMPLATE_DECL)
2369 {
2370 lookup = ovl_make (fns, lookup);
2371 OVL_LOOKUP_P (lookup) = true;
2372 }
2373 else
2374 lookup = fns;
2375
2376 return lookup;
2377 }
2378
2379 /* FNS is a new overload set, add them to LOOKUP, if they are not
2380 already present there. */
2381
2382 tree
lookup_maybe_add(tree fns,tree lookup,bool deduping)2383 lookup_maybe_add (tree fns, tree lookup, bool deduping)
2384 {
2385 if (deduping)
2386 for (tree next, probe = fns; probe; probe = next)
2387 {
2388 tree fn = probe;
2389 next = NULL_TREE;
2390
2391 if (TREE_CODE (probe) == OVERLOAD)
2392 {
2393 fn = OVL_FUNCTION (probe);
2394 next = OVL_CHAIN (probe);
2395 }
2396
2397 if (!LOOKUP_SEEN_P (fn))
2398 LOOKUP_SEEN_P (fn) = true;
2399 else
2400 {
2401 /* This function was already seen. Insert all the
2402 predecessors onto the lookup. */
2403 for (; fns != probe; fns = OVL_CHAIN (fns))
2404 {
2405 lookup = lookup_add (OVL_FUNCTION (fns), lookup);
2406 /* Propagate OVL_USING, but OVL_HIDDEN doesn't matter. */
2407 if (OVL_USING_P (fns))
2408 OVL_USING_P (lookup) = true;
2409 }
2410
2411 /* And now skip this function. */
2412 fns = next;
2413 }
2414 }
2415
2416 if (fns)
2417 /* We ended in a set of new functions. Add them all in one go. */
2418 lookup = lookup_add (fns, lookup);
2419
2420 return lookup;
2421 }
2422
2423 /* Regular overload OVL is part of a kept lookup. Mark the nodes on
2424 it as immutable. */
2425
2426 static void
ovl_used(tree ovl)2427 ovl_used (tree ovl)
2428 {
2429 for (;
2430 ovl && TREE_CODE (ovl) == OVERLOAD
2431 && !OVL_USED_P (ovl);
2432 ovl = OVL_CHAIN (ovl))
2433 {
2434 gcc_checking_assert (!OVL_LOOKUP_P (ovl));
2435 OVL_USED_P (ovl) = true;
2436 }
2437 }
2438
2439 /* If KEEP is true, preserve the contents of a lookup so that it is
2440 available for a later instantiation. Otherwise release the LOOKUP
2441 nodes for reuse. */
2442
2443 void
lookup_keep(tree lookup,bool keep)2444 lookup_keep (tree lookup, bool keep)
2445 {
2446 for (;
2447 lookup && TREE_CODE (lookup) == OVERLOAD
2448 && OVL_LOOKUP_P (lookup) && !OVL_USED_P (lookup);
2449 lookup = OVL_CHAIN (lookup))
2450 if (keep)
2451 {
2452 OVL_USED_P (lookup) = true;
2453 ovl_used (OVL_FUNCTION (lookup));
2454 }
2455 else
2456 {
2457 OVL_FUNCTION (lookup) = ovl_cache;
2458 ovl_cache = lookup;
2459 }
2460
2461 if (keep)
2462 ovl_used (lookup);
2463 }
2464
2465 /* LIST is a TREE_LIST whose TREE_VALUEs may be OVERLOADS that need
2466 keeping, or may be ignored. */
2467
2468 void
lookup_list_keep(tree list,bool keep)2469 lookup_list_keep (tree list, bool keep)
2470 {
2471 for (; list; list = TREE_CHAIN (list))
2472 {
2473 tree v = TREE_VALUE (list);
2474 if (TREE_CODE (v) == OVERLOAD)
2475 lookup_keep (v, keep);
2476 }
2477 }
2478
2479 /* Returns nonzero if X is an expression for a (possibly overloaded)
2480 function. If "f" is a function or function template, "f", "c->f",
2481 "c.f", "C::f", and "f<int>" will all be considered possibly
2482 overloaded functions. Returns 2 if the function is actually
2483 overloaded, i.e., if it is impossible to know the type of the
2484 function without performing overload resolution. */
2485
2486 int
is_overloaded_fn(tree x)2487 is_overloaded_fn (tree x)
2488 {
2489 /* A baselink is also considered an overloaded function. */
2490 if (TREE_CODE (x) == OFFSET_REF
2491 || TREE_CODE (x) == COMPONENT_REF)
2492 x = TREE_OPERAND (x, 1);
2493 x = MAYBE_BASELINK_FUNCTIONS (x);
2494 if (TREE_CODE (x) == TEMPLATE_ID_EXPR)
2495 x = TREE_OPERAND (x, 0);
2496
2497 if (DECL_FUNCTION_TEMPLATE_P (OVL_FIRST (x))
2498 || (TREE_CODE (x) == OVERLOAD && !OVL_SINGLE_P (x)))
2499 return 2;
2500
2501 return (TREE_CODE (x) == FUNCTION_DECL
2502 || TREE_CODE (x) == OVERLOAD);
2503 }
2504
2505 /* X is the CALL_EXPR_FN of a CALL_EXPR. If X represents a dependent name
2506 (14.6.2), return the IDENTIFIER_NODE for that name. Otherwise, return
2507 NULL_TREE. */
2508
2509 tree
dependent_name(tree x)2510 dependent_name (tree x)
2511 {
2512 if (identifier_p (x))
2513 return x;
2514 if (TREE_CODE (x) == TEMPLATE_ID_EXPR)
2515 x = TREE_OPERAND (x, 0);
2516 if (TREE_CODE (x) == OVERLOAD || TREE_CODE (x) == FUNCTION_DECL)
2517 return OVL_NAME (x);
2518 return NULL_TREE;
2519 }
2520
2521 /* Returns true iff X is an expression for an overloaded function
2522 whose type cannot be known without performing overload
2523 resolution. */
2524
2525 bool
really_overloaded_fn(tree x)2526 really_overloaded_fn (tree x)
2527 {
2528 return is_overloaded_fn (x) == 2;
2529 }
2530
2531 /* Get the overload set FROM refers to. */
2532
2533 tree
get_fns(tree from)2534 get_fns (tree from)
2535 {
2536 /* A baselink is also considered an overloaded function. */
2537 if (TREE_CODE (from) == OFFSET_REF
2538 || TREE_CODE (from) == COMPONENT_REF)
2539 from = TREE_OPERAND (from, 1);
2540 if (BASELINK_P (from))
2541 from = BASELINK_FUNCTIONS (from);
2542 if (TREE_CODE (from) == TEMPLATE_ID_EXPR)
2543 from = TREE_OPERAND (from, 0);
2544 gcc_assert (TREE_CODE (from) == OVERLOAD
2545 || TREE_CODE (from) == FUNCTION_DECL);
2546 return from;
2547 }
2548
2549 /* Return the first function of the overload set FROM refers to. */
2550
2551 tree
get_first_fn(tree from)2552 get_first_fn (tree from)
2553 {
2554 return OVL_FIRST (get_fns (from));
2555 }
2556
2557 /* Return the scope where the overloaded functions OVL were found. */
2558
2559 tree
ovl_scope(tree ovl)2560 ovl_scope (tree ovl)
2561 {
2562 if (TREE_CODE (ovl) == OFFSET_REF
2563 || TREE_CODE (ovl) == COMPONENT_REF)
2564 ovl = TREE_OPERAND (ovl, 1);
2565 if (TREE_CODE (ovl) == BASELINK)
2566 return BINFO_TYPE (BASELINK_BINFO (ovl));
2567 if (TREE_CODE (ovl) == TEMPLATE_ID_EXPR)
2568 ovl = TREE_OPERAND (ovl, 0);
2569 /* Skip using-declarations. */
2570 lkp_iterator iter (ovl);
2571 do
2572 ovl = *iter;
2573 while (iter.using_p () && ++iter);
2574
2575 return CP_DECL_CONTEXT (ovl);
2576 }
2577
2578 #define PRINT_RING_SIZE 4
2579
2580 static const char *
cxx_printable_name_internal(tree decl,int v,bool translate)2581 cxx_printable_name_internal (tree decl, int v, bool translate)
2582 {
2583 static unsigned int uid_ring[PRINT_RING_SIZE];
2584 static char *print_ring[PRINT_RING_SIZE];
2585 static bool trans_ring[PRINT_RING_SIZE];
2586 static int ring_counter;
2587 int i;
2588
2589 /* Only cache functions. */
2590 if (v < 2
2591 || TREE_CODE (decl) != FUNCTION_DECL
2592 || DECL_LANG_SPECIFIC (decl) == 0)
2593 return lang_decl_name (decl, v, translate);
2594
2595 /* See if this print name is lying around. */
2596 for (i = 0; i < PRINT_RING_SIZE; i++)
2597 if (uid_ring[i] == DECL_UID (decl) && translate == trans_ring[i])
2598 /* yes, so return it. */
2599 return print_ring[i];
2600
2601 if (++ring_counter == PRINT_RING_SIZE)
2602 ring_counter = 0;
2603
2604 if (current_function_decl != NULL_TREE)
2605 {
2606 /* There may be both translated and untranslated versions of the
2607 name cached. */
2608 for (i = 0; i < 2; i++)
2609 {
2610 if (uid_ring[ring_counter] == DECL_UID (current_function_decl))
2611 ring_counter += 1;
2612 if (ring_counter == PRINT_RING_SIZE)
2613 ring_counter = 0;
2614 }
2615 gcc_assert (uid_ring[ring_counter] != DECL_UID (current_function_decl));
2616 }
2617
2618 free (print_ring[ring_counter]);
2619
2620 print_ring[ring_counter] = xstrdup (lang_decl_name (decl, v, translate));
2621 uid_ring[ring_counter] = DECL_UID (decl);
2622 trans_ring[ring_counter] = translate;
2623 return print_ring[ring_counter];
2624 }
2625
2626 const char *
cxx_printable_name(tree decl,int v)2627 cxx_printable_name (tree decl, int v)
2628 {
2629 return cxx_printable_name_internal (decl, v, false);
2630 }
2631
2632 const char *
cxx_printable_name_translate(tree decl,int v)2633 cxx_printable_name_translate (tree decl, int v)
2634 {
2635 return cxx_printable_name_internal (decl, v, true);
2636 }
2637
2638 /* Return the canonical version of exception-specification RAISES for a C++17
2639 function type, for use in type comparison and building TYPE_CANONICAL. */
2640
2641 tree
canonical_eh_spec(tree raises)2642 canonical_eh_spec (tree raises)
2643 {
2644 if (raises == NULL_TREE)
2645 return raises;
2646 else if (DEFERRED_NOEXCEPT_SPEC_P (raises)
2647 || uses_template_parms (raises)
2648 || uses_template_parms (TREE_PURPOSE (raises)))
2649 /* Keep a dependent or deferred exception specification. */
2650 return raises;
2651 else if (nothrow_spec_p (raises))
2652 /* throw() -> noexcept. */
2653 return noexcept_true_spec;
2654 else
2655 /* For C++17 type matching, anything else -> nothing. */
2656 return NULL_TREE;
2657 }
2658
2659 /* Build the FUNCTION_TYPE or METHOD_TYPE which may throw exceptions
2660 listed in RAISES. */
2661
2662 tree
build_exception_variant(tree type,tree raises)2663 build_exception_variant (tree type, tree raises)
2664 {
2665 tree v;
2666 int type_quals;
2667
2668 if (comp_except_specs (raises, TYPE_RAISES_EXCEPTIONS (type), ce_exact))
2669 return type;
2670
2671 type_quals = TYPE_QUALS (type);
2672 cp_ref_qualifier rqual = type_memfn_rqual (type);
2673 for (v = TYPE_MAIN_VARIANT (type); v; v = TYPE_NEXT_VARIANT (v))
2674 if (cp_check_qualified_type (v, type, type_quals, rqual, raises))
2675 return v;
2676
2677 /* Need to build a new variant. */
2678 v = build_variant_type_copy (type);
2679 TYPE_RAISES_EXCEPTIONS (v) = raises;
2680
2681 if (!flag_noexcept_type)
2682 /* The exception-specification is not part of the canonical type. */
2683 return v;
2684
2685 /* Canonicalize the exception specification. */
2686 tree cr = canonical_eh_spec (raises);
2687
2688 if (TYPE_STRUCTURAL_EQUALITY_P (type))
2689 /* Propagate structural equality. */
2690 SET_TYPE_STRUCTURAL_EQUALITY (v);
2691 else if (TYPE_CANONICAL (type) != type || cr != raises)
2692 /* Build the underlying canonical type, since it is different
2693 from TYPE. */
2694 TYPE_CANONICAL (v) = build_exception_variant (TYPE_CANONICAL (type), cr);
2695 else
2696 /* T is its own canonical type. */
2697 TYPE_CANONICAL (v) = v;
2698
2699 return v;
2700 }
2701
2702 /* Given a TEMPLATE_TEMPLATE_PARM node T, create a new
2703 BOUND_TEMPLATE_TEMPLATE_PARM bound with NEWARGS as its template
2704 arguments. */
2705
2706 tree
bind_template_template_parm(tree t,tree newargs)2707 bind_template_template_parm (tree t, tree newargs)
2708 {
2709 tree decl = TYPE_NAME (t);
2710 tree t2;
2711
2712 t2 = cxx_make_type (BOUND_TEMPLATE_TEMPLATE_PARM);
2713 decl = build_decl (input_location,
2714 TYPE_DECL, DECL_NAME (decl), NULL_TREE);
2715
2716 /* These nodes have to be created to reflect new TYPE_DECL and template
2717 arguments. */
2718 TEMPLATE_TYPE_PARM_INDEX (t2) = copy_node (TEMPLATE_TYPE_PARM_INDEX (t));
2719 TEMPLATE_PARM_DECL (TEMPLATE_TYPE_PARM_INDEX (t2)) = decl;
2720 TEMPLATE_TEMPLATE_PARM_TEMPLATE_INFO (t2)
2721 = build_template_info (TEMPLATE_TEMPLATE_PARM_TEMPLATE_DECL (t), newargs);
2722
2723 TREE_TYPE (decl) = t2;
2724 TYPE_NAME (t2) = decl;
2725 TYPE_STUB_DECL (t2) = decl;
2726 TYPE_SIZE (t2) = 0;
2727 SET_TYPE_STRUCTURAL_EQUALITY (t2);
2728
2729 return t2;
2730 }
2731
2732 /* Called from count_trees via walk_tree. */
2733
2734 static tree
count_trees_r(tree * tp,int * walk_subtrees,void * data)2735 count_trees_r (tree *tp, int *walk_subtrees, void *data)
2736 {
2737 ++*((int *) data);
2738
2739 if (TYPE_P (*tp))
2740 *walk_subtrees = 0;
2741
2742 return NULL_TREE;
2743 }
2744
2745 /* Debugging function for measuring the rough complexity of a tree
2746 representation. */
2747
2748 int
count_trees(tree t)2749 count_trees (tree t)
2750 {
2751 int n_trees = 0;
2752 cp_walk_tree_without_duplicates (&t, count_trees_r, &n_trees);
2753 return n_trees;
2754 }
2755
2756 /* Called from verify_stmt_tree via walk_tree. */
2757
2758 static tree
verify_stmt_tree_r(tree * tp,int *,void * data)2759 verify_stmt_tree_r (tree* tp, int * /*walk_subtrees*/, void* data)
2760 {
2761 tree t = *tp;
2762 hash_table<nofree_ptr_hash <tree_node> > *statements
2763 = static_cast <hash_table<nofree_ptr_hash <tree_node> > *> (data);
2764 tree_node **slot;
2765
2766 if (!STATEMENT_CODE_P (TREE_CODE (t)))
2767 return NULL_TREE;
2768
2769 /* If this statement is already present in the hash table, then
2770 there is a circularity in the statement tree. */
2771 gcc_assert (!statements->find (t));
2772
2773 slot = statements->find_slot (t, INSERT);
2774 *slot = t;
2775
2776 return NULL_TREE;
2777 }
2778
2779 /* Debugging function to check that the statement T has not been
2780 corrupted. For now, this function simply checks that T contains no
2781 circularities. */
2782
2783 void
verify_stmt_tree(tree t)2784 verify_stmt_tree (tree t)
2785 {
2786 hash_table<nofree_ptr_hash <tree_node> > statements (37);
2787 cp_walk_tree (&t, verify_stmt_tree_r, &statements, NULL);
2788 }
2789
2790 /* Check if the type T depends on a type with no linkage and if so, return
2791 it. If RELAXED_P then do not consider a class type declared within
2792 a vague-linkage function to have no linkage. */
2793
2794 tree
no_linkage_check(tree t,bool relaxed_p)2795 no_linkage_check (tree t, bool relaxed_p)
2796 {
2797 tree r;
2798
2799 /* There's no point in checking linkage on template functions; we
2800 can't know their complete types. */
2801 if (processing_template_decl)
2802 return NULL_TREE;
2803
2804 switch (TREE_CODE (t))
2805 {
2806 case RECORD_TYPE:
2807 if (TYPE_PTRMEMFUNC_P (t))
2808 goto ptrmem;
2809 /* Lambda types that don't have mangling scope have no linkage. We
2810 check CLASSTYPE_LAMBDA_EXPR for error_mark_node because
2811 when we get here from pushtag none of the lambda information is
2812 set up yet, so we want to assume that the lambda has linkage and
2813 fix it up later if not. */
2814 if (CLASSTYPE_LAMBDA_EXPR (t)
2815 && CLASSTYPE_LAMBDA_EXPR (t) != error_mark_node
2816 && LAMBDA_TYPE_EXTRA_SCOPE (t) == NULL_TREE)
2817 return t;
2818 /* Fall through. */
2819 case UNION_TYPE:
2820 if (!CLASS_TYPE_P (t))
2821 return NULL_TREE;
2822 /* Fall through. */
2823 case ENUMERAL_TYPE:
2824 /* Only treat unnamed types as having no linkage if they're at
2825 namespace scope. This is core issue 966. */
2826 if (TYPE_UNNAMED_P (t) && TYPE_NAMESPACE_SCOPE_P (t))
2827 return t;
2828
2829 for (r = CP_TYPE_CONTEXT (t); ; )
2830 {
2831 /* If we're a nested type of a !TREE_PUBLIC class, we might not
2832 have linkage, or we might just be in an anonymous namespace.
2833 If we're in a TREE_PUBLIC class, we have linkage. */
2834 if (TYPE_P (r) && !TREE_PUBLIC (TYPE_NAME (r)))
2835 return no_linkage_check (TYPE_CONTEXT (t), relaxed_p);
2836 else if (TREE_CODE (r) == FUNCTION_DECL)
2837 {
2838 if (!relaxed_p || !vague_linkage_p (r))
2839 return t;
2840 else
2841 r = CP_DECL_CONTEXT (r);
2842 }
2843 else
2844 break;
2845 }
2846
2847 return NULL_TREE;
2848
2849 case ARRAY_TYPE:
2850 case POINTER_TYPE:
2851 case REFERENCE_TYPE:
2852 case VECTOR_TYPE:
2853 return no_linkage_check (TREE_TYPE (t), relaxed_p);
2854
2855 case OFFSET_TYPE:
2856 ptrmem:
2857 r = no_linkage_check (TYPE_PTRMEM_POINTED_TO_TYPE (t),
2858 relaxed_p);
2859 if (r)
2860 return r;
2861 return no_linkage_check (TYPE_PTRMEM_CLASS_TYPE (t), relaxed_p);
2862
2863 case METHOD_TYPE:
2864 case FUNCTION_TYPE:
2865 {
2866 tree parm = TYPE_ARG_TYPES (t);
2867 if (TREE_CODE (t) == METHOD_TYPE)
2868 /* The 'this' pointer isn't interesting; a method has the same
2869 linkage (or lack thereof) as its enclosing class. */
2870 parm = TREE_CHAIN (parm);
2871 for (;
2872 parm && parm != void_list_node;
2873 parm = TREE_CHAIN (parm))
2874 {
2875 r = no_linkage_check (TREE_VALUE (parm), relaxed_p);
2876 if (r)
2877 return r;
2878 }
2879 return no_linkage_check (TREE_TYPE (t), relaxed_p);
2880 }
2881
2882 default:
2883 return NULL_TREE;
2884 }
2885 }
2886
2887 extern int depth_reached;
2888
2889 void
cxx_print_statistics(void)2890 cxx_print_statistics (void)
2891 {
2892 print_template_statistics ();
2893 if (GATHER_STATISTICS)
2894 fprintf (stderr, "maximum template instantiation depth reached: %d\n",
2895 depth_reached);
2896 }
2897
2898 /* Return, as an INTEGER_CST node, the number of elements for TYPE
2899 (which is an ARRAY_TYPE). This counts only elements of the top
2900 array. */
2901
2902 tree
array_type_nelts_top(tree type)2903 array_type_nelts_top (tree type)
2904 {
2905 return fold_build2_loc (input_location,
2906 PLUS_EXPR, sizetype,
2907 array_type_nelts (type),
2908 size_one_node);
2909 }
2910
2911 /* Return, as an INTEGER_CST node, the number of elements for TYPE
2912 (which is an ARRAY_TYPE). This one is a recursive count of all
2913 ARRAY_TYPEs that are clumped together. */
2914
2915 tree
array_type_nelts_total(tree type)2916 array_type_nelts_total (tree type)
2917 {
2918 tree sz = array_type_nelts_top (type);
2919 type = TREE_TYPE (type);
2920 while (TREE_CODE (type) == ARRAY_TYPE)
2921 {
2922 tree n = array_type_nelts_top (type);
2923 sz = fold_build2_loc (input_location,
2924 MULT_EXPR, sizetype, sz, n);
2925 type = TREE_TYPE (type);
2926 }
2927 return sz;
2928 }
2929
2930 struct bot_data
2931 {
2932 splay_tree target_remap;
2933 bool clear_location;
2934 };
2935
2936 /* Called from break_out_target_exprs via mapcar. */
2937
2938 static tree
bot_manip(tree * tp,int * walk_subtrees,void * data_)2939 bot_manip (tree* tp, int* walk_subtrees, void* data_)
2940 {
2941 bot_data &data = *(bot_data*)data_;
2942 splay_tree target_remap = data.target_remap;
2943 tree t = *tp;
2944
2945 if (!TYPE_P (t) && TREE_CONSTANT (t) && !TREE_SIDE_EFFECTS (t))
2946 {
2947 /* There can't be any TARGET_EXPRs or their slot variables below this
2948 point. But we must make a copy, in case subsequent processing
2949 alters any part of it. For example, during gimplification a cast
2950 of the form (T) &X::f (where "f" is a member function) will lead
2951 to replacing the PTRMEM_CST for &X::f with a VAR_DECL. */
2952 *walk_subtrees = 0;
2953 *tp = unshare_expr (t);
2954 return NULL_TREE;
2955 }
2956 if (TREE_CODE (t) == TARGET_EXPR)
2957 {
2958 tree u;
2959
2960 if (TREE_CODE (TREE_OPERAND (t, 1)) == AGGR_INIT_EXPR)
2961 {
2962 u = build_cplus_new (TREE_TYPE (t), TREE_OPERAND (t, 1),
2963 tf_warning_or_error);
2964 if (u == error_mark_node)
2965 return u;
2966 if (AGGR_INIT_ZERO_FIRST (TREE_OPERAND (t, 1)))
2967 AGGR_INIT_ZERO_FIRST (TREE_OPERAND (u, 1)) = true;
2968 }
2969 else
2970 u = build_target_expr_with_type (TREE_OPERAND (t, 1), TREE_TYPE (t),
2971 tf_warning_or_error);
2972
2973 TARGET_EXPR_IMPLICIT_P (u) = TARGET_EXPR_IMPLICIT_P (t);
2974 TARGET_EXPR_LIST_INIT_P (u) = TARGET_EXPR_LIST_INIT_P (t);
2975 TARGET_EXPR_DIRECT_INIT_P (u) = TARGET_EXPR_DIRECT_INIT_P (t);
2976
2977 /* Map the old variable to the new one. */
2978 splay_tree_insert (target_remap,
2979 (splay_tree_key) TREE_OPERAND (t, 0),
2980 (splay_tree_value) TREE_OPERAND (u, 0));
2981
2982 TREE_OPERAND (u, 1) = break_out_target_exprs (TREE_OPERAND (u, 1),
2983 data.clear_location);
2984 if (TREE_OPERAND (u, 1) == error_mark_node)
2985 return error_mark_node;
2986
2987 /* Replace the old expression with the new version. */
2988 *tp = u;
2989 /* We don't have to go below this point; the recursive call to
2990 break_out_target_exprs will have handled anything below this
2991 point. */
2992 *walk_subtrees = 0;
2993 return NULL_TREE;
2994 }
2995 if (TREE_CODE (*tp) == SAVE_EXPR)
2996 {
2997 t = *tp;
2998 splay_tree_node n = splay_tree_lookup (target_remap,
2999 (splay_tree_key) t);
3000 if (n)
3001 {
3002 *tp = (tree)n->value;
3003 *walk_subtrees = 0;
3004 }
3005 else
3006 {
3007 copy_tree_r (tp, walk_subtrees, NULL);
3008 splay_tree_insert (target_remap,
3009 (splay_tree_key)t,
3010 (splay_tree_value)*tp);
3011 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
3012 splay_tree_insert (target_remap,
3013 (splay_tree_key)*tp,
3014 (splay_tree_value)*tp);
3015 }
3016 return NULL_TREE;
3017 }
3018
3019 /* Make a copy of this node. */
3020 t = copy_tree_r (tp, walk_subtrees, NULL);
3021 if (TREE_CODE (*tp) == CALL_EXPR)
3022 if (!processing_template_decl)
3023 set_flags_from_callee (*tp);
3024 if (data.clear_location && EXPR_HAS_LOCATION (*tp))
3025 SET_EXPR_LOCATION (*tp, input_location);
3026 return t;
3027 }
3028
3029 /* Replace all remapped VAR_DECLs in T with their new equivalents.
3030 DATA is really a splay-tree mapping old variables to new
3031 variables. */
3032
3033 static tree
bot_replace(tree * t,int *,void * data_)3034 bot_replace (tree* t, int* /*walk_subtrees*/, void* data_)
3035 {
3036 bot_data &data = *(bot_data*)data_;
3037 splay_tree target_remap = data.target_remap;
3038
3039 if (VAR_P (*t))
3040 {
3041 splay_tree_node n = splay_tree_lookup (target_remap,
3042 (splay_tree_key) *t);
3043 if (n)
3044 *t = (tree) n->value;
3045 }
3046 else if (TREE_CODE (*t) == PARM_DECL
3047 && DECL_NAME (*t) == this_identifier
3048 && !DECL_CONTEXT (*t))
3049 {
3050 /* In an NSDMI we need to replace the 'this' parameter we used for
3051 parsing with the real one for this function. */
3052 *t = current_class_ptr;
3053 }
3054 else if (TREE_CODE (*t) == CONVERT_EXPR
3055 && CONVERT_EXPR_VBASE_PATH (*t))
3056 {
3057 /* In an NSDMI build_base_path defers building conversions to virtual
3058 bases, and we handle it here. */
3059 tree basetype = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (*t)));
3060 vec<tree, va_gc> *vbases = CLASSTYPE_VBASECLASSES (current_class_type);
3061 int i; tree binfo;
3062 FOR_EACH_VEC_SAFE_ELT (vbases, i, binfo)
3063 if (BINFO_TYPE (binfo) == basetype)
3064 break;
3065 *t = build_base_path (PLUS_EXPR, TREE_OPERAND (*t, 0), binfo, true,
3066 tf_warning_or_error);
3067 }
3068
3069 return NULL_TREE;
3070 }
3071
3072 /* When we parse a default argument expression, we may create
3073 temporary variables via TARGET_EXPRs. When we actually use the
3074 default-argument expression, we make a copy of the expression
3075 and replace the temporaries with appropriate local versions.
3076
3077 If CLEAR_LOCATION is true, override any EXPR_LOCATION with
3078 input_location. */
3079
3080 tree
break_out_target_exprs(tree t,bool clear_location)3081 break_out_target_exprs (tree t, bool clear_location /* = false */)
3082 {
3083 static int target_remap_count;
3084 static splay_tree target_remap;
3085
3086 if (!target_remap_count++)
3087 target_remap = splay_tree_new (splay_tree_compare_pointers,
3088 /*splay_tree_delete_key_fn=*/NULL,
3089 /*splay_tree_delete_value_fn=*/NULL);
3090 bot_data data = { target_remap, clear_location };
3091 if (cp_walk_tree (&t, bot_manip, &data, NULL) == error_mark_node)
3092 t = error_mark_node;
3093 cp_walk_tree (&t, bot_replace, &data, NULL);
3094
3095 if (!--target_remap_count)
3096 {
3097 splay_tree_delete (target_remap);
3098 target_remap = NULL;
3099 }
3100
3101 return t;
3102 }
3103
3104 /* Build an expression for the subobject of OBJ at CONSTRUCTOR index INDEX,
3105 which we expect to have type TYPE. */
3106
3107 tree
build_ctor_subob_ref(tree index,tree type,tree obj)3108 build_ctor_subob_ref (tree index, tree type, tree obj)
3109 {
3110 if (index == NULL_TREE)
3111 /* Can't refer to a particular member of a vector. */
3112 obj = NULL_TREE;
3113 else if (TREE_CODE (index) == INTEGER_CST)
3114 obj = cp_build_array_ref (input_location, obj, index, tf_none);
3115 else
3116 obj = build_class_member_access_expr (obj, index, NULL_TREE,
3117 /*reference*/false, tf_none);
3118 if (obj)
3119 {
3120 tree objtype = TREE_TYPE (obj);
3121 if (TREE_CODE (objtype) == ARRAY_TYPE && !TYPE_DOMAIN (objtype))
3122 {
3123 /* When the destination object refers to a flexible array member
3124 verify that it matches the type of the source object except
3125 for its domain and qualifiers. */
3126 gcc_assert (comptypes (TYPE_MAIN_VARIANT (type),
3127 TYPE_MAIN_VARIANT (objtype),
3128 COMPARE_REDECLARATION));
3129 }
3130 else
3131 gcc_assert (same_type_ignoring_top_level_qualifiers_p (type, objtype));
3132 }
3133
3134 return obj;
3135 }
3136
3137 struct replace_placeholders_t
3138 {
3139 tree obj; /* The object to be substituted for a PLACEHOLDER_EXPR. */
3140 tree exp; /* The outermost exp. */
3141 bool seen; /* Whether we've encountered a PLACEHOLDER_EXPR. */
3142 hash_set<tree> *pset; /* To avoid walking same trees multiple times. */
3143 };
3144
3145 /* Like substitute_placeholder_in_expr, but handle C++ tree codes and
3146 build up subexpressions as we go deeper. */
3147
3148 static tree
replace_placeholders_r(tree * t,int * walk_subtrees,void * data_)3149 replace_placeholders_r (tree* t, int* walk_subtrees, void* data_)
3150 {
3151 replace_placeholders_t *d = static_cast<replace_placeholders_t*>(data_);
3152 tree obj = d->obj;
3153
3154 if (TYPE_P (*t) || TREE_CONSTANT (*t))
3155 {
3156 *walk_subtrees = false;
3157 return NULL_TREE;
3158 }
3159
3160 switch (TREE_CODE (*t))
3161 {
3162 case PLACEHOLDER_EXPR:
3163 {
3164 tree x = obj;
3165 for (; !same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (*t),
3166 TREE_TYPE (x));
3167 x = TREE_OPERAND (x, 0))
3168 gcc_assert (handled_component_p (x));
3169 *t = unshare_expr (x);
3170 *walk_subtrees = false;
3171 d->seen = true;
3172 }
3173 break;
3174
3175 case CONSTRUCTOR:
3176 {
3177 constructor_elt *ce;
3178 vec<constructor_elt,va_gc> *v = CONSTRUCTOR_ELTS (*t);
3179 /* Don't walk into CONSTRUCTOR_PLACEHOLDER_BOUNDARY ctors
3180 other than the d->exp one, those have PLACEHOLDER_EXPRs
3181 related to another object. */
3182 if ((CONSTRUCTOR_PLACEHOLDER_BOUNDARY (*t)
3183 && *t != d->exp)
3184 || d->pset->add (*t))
3185 {
3186 *walk_subtrees = false;
3187 return NULL_TREE;
3188 }
3189 for (unsigned i = 0; vec_safe_iterate (v, i, &ce); ++i)
3190 {
3191 tree *valp = &ce->value;
3192 tree type = TREE_TYPE (*valp);
3193 tree subob = obj;
3194
3195 /* Elements with RANGE_EXPR index shouldn't have any
3196 placeholders in them. */
3197 if (ce->index && TREE_CODE (ce->index) == RANGE_EXPR)
3198 continue;
3199
3200 if (TREE_CODE (*valp) == CONSTRUCTOR
3201 && AGGREGATE_TYPE_P (type))
3202 {
3203 /* If we're looking at the initializer for OBJ, then build
3204 a sub-object reference. If we're looking at an
3205 initializer for another object, just pass OBJ down. */
3206 if (same_type_ignoring_top_level_qualifiers_p
3207 (TREE_TYPE (*t), TREE_TYPE (obj)))
3208 subob = build_ctor_subob_ref (ce->index, type, obj);
3209 if (TREE_CODE (*valp) == TARGET_EXPR)
3210 valp = &TARGET_EXPR_INITIAL (*valp);
3211 }
3212 d->obj = subob;
3213 cp_walk_tree (valp, replace_placeholders_r, data_, NULL);
3214 d->obj = obj;
3215 }
3216 *walk_subtrees = false;
3217 break;
3218 }
3219
3220 default:
3221 if (d->pset->add (*t))
3222 *walk_subtrees = false;
3223 break;
3224 }
3225
3226 return NULL_TREE;
3227 }
3228
3229 /* Replace PLACEHOLDER_EXPRs in EXP with object OBJ. SEEN_P is set if
3230 a PLACEHOLDER_EXPR has been encountered. */
3231
3232 tree
replace_placeholders(tree exp,tree obj,bool * seen_p)3233 replace_placeholders (tree exp, tree obj, bool *seen_p)
3234 {
3235 /* This is only relevant for C++14. */
3236 if (cxx_dialect < cxx14)
3237 return exp;
3238
3239 /* If the object isn't a (member of a) class, do nothing. */
3240 tree op0 = obj;
3241 while (TREE_CODE (op0) == COMPONENT_REF)
3242 op0 = TREE_OPERAND (op0, 0);
3243 if (!CLASS_TYPE_P (strip_array_types (TREE_TYPE (op0))))
3244 return exp;
3245
3246 tree *tp = &exp;
3247 if (TREE_CODE (exp) == TARGET_EXPR)
3248 tp = &TARGET_EXPR_INITIAL (exp);
3249 hash_set<tree> pset;
3250 replace_placeholders_t data = { obj, *tp, false, &pset };
3251 cp_walk_tree (tp, replace_placeholders_r, &data, NULL);
3252 if (seen_p)
3253 *seen_p = data.seen;
3254 return exp;
3255 }
3256
3257 /* Callback function for find_placeholders. */
3258
3259 static tree
find_placeholders_r(tree * t,int * walk_subtrees,void *)3260 find_placeholders_r (tree *t, int *walk_subtrees, void *)
3261 {
3262 if (TYPE_P (*t) || TREE_CONSTANT (*t))
3263 {
3264 *walk_subtrees = false;
3265 return NULL_TREE;
3266 }
3267
3268 switch (TREE_CODE (*t))
3269 {
3270 case PLACEHOLDER_EXPR:
3271 return *t;
3272
3273 case CONSTRUCTOR:
3274 if (CONSTRUCTOR_PLACEHOLDER_BOUNDARY (*t))
3275 *walk_subtrees = false;
3276 break;
3277
3278 default:
3279 break;
3280 }
3281
3282 return NULL_TREE;
3283 }
3284
3285 /* Return true if EXP contains a PLACEHOLDER_EXPR. Don't walk into
3286 ctors with CONSTRUCTOR_PLACEHOLDER_BOUNDARY flag set. */
3287
3288 bool
find_placeholders(tree exp)3289 find_placeholders (tree exp)
3290 {
3291 /* This is only relevant for C++14. */
3292 if (cxx_dialect < cxx14)
3293 return false;
3294
3295 return cp_walk_tree_without_duplicates (&exp, find_placeholders_r, NULL);
3296 }
3297
3298 /* Similar to `build_nt', but for template definitions of dependent
3299 expressions */
3300
3301 tree
build_min_nt_loc(location_t loc,enum tree_code code,...)3302 build_min_nt_loc (location_t loc, enum tree_code code, ...)
3303 {
3304 tree t;
3305 int length;
3306 int i;
3307 va_list p;
3308
3309 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
3310
3311 va_start (p, code);
3312
3313 t = make_node (code);
3314 SET_EXPR_LOCATION (t, loc);
3315 length = TREE_CODE_LENGTH (code);
3316
3317 for (i = 0; i < length; i++)
3318 {
3319 tree x = va_arg (p, tree);
3320 TREE_OPERAND (t, i) = x;
3321 if (x && TREE_CODE (x) == OVERLOAD)
3322 lookup_keep (x, true);
3323 }
3324
3325 va_end (p);
3326 return t;
3327 }
3328
3329 /* Similar to `build', but for template definitions. */
3330
3331 tree
build_min(enum tree_code code,tree tt,...)3332 build_min (enum tree_code code, tree tt, ...)
3333 {
3334 tree t;
3335 int length;
3336 int i;
3337 va_list p;
3338
3339 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
3340
3341 va_start (p, tt);
3342
3343 t = make_node (code);
3344 length = TREE_CODE_LENGTH (code);
3345 TREE_TYPE (t) = tt;
3346
3347 for (i = 0; i < length; i++)
3348 {
3349 tree x = va_arg (p, tree);
3350 TREE_OPERAND (t, i) = x;
3351 if (x)
3352 {
3353 if (!TYPE_P (x) && TREE_SIDE_EFFECTS (x))
3354 TREE_SIDE_EFFECTS (t) = 1;
3355 if (TREE_CODE (x) == OVERLOAD)
3356 lookup_keep (x, true);
3357 }
3358 }
3359
3360 va_end (p);
3361
3362 if (code == CAST_EXPR)
3363 /* The single operand is a TREE_LIST, which we have to check. */
3364 lookup_list_keep (TREE_OPERAND (t, 0), true);
3365
3366 return t;
3367 }
3368
3369 /* Similar to `build', but for template definitions of non-dependent
3370 expressions. NON_DEP is the non-dependent expression that has been
3371 built. */
3372
3373 tree
build_min_non_dep(enum tree_code code,tree non_dep,...)3374 build_min_non_dep (enum tree_code code, tree non_dep, ...)
3375 {
3376 tree t;
3377 int length;
3378 int i;
3379 va_list p;
3380
3381 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
3382
3383 va_start (p, non_dep);
3384
3385 if (REFERENCE_REF_P (non_dep))
3386 non_dep = TREE_OPERAND (non_dep, 0);
3387
3388 t = make_node (code);
3389 length = TREE_CODE_LENGTH (code);
3390 TREE_TYPE (t) = unlowered_expr_type (non_dep);
3391 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (non_dep);
3392
3393 for (i = 0; i < length; i++)
3394 {
3395 tree x = va_arg (p, tree);
3396 TREE_OPERAND (t, i) = x;
3397 if (x && TREE_CODE (x) == OVERLOAD)
3398 lookup_keep (x, true);
3399 }
3400
3401 if (code == COMPOUND_EXPR && TREE_CODE (non_dep) != COMPOUND_EXPR)
3402 /* This should not be considered a COMPOUND_EXPR, because it
3403 resolves to an overload. */
3404 COMPOUND_EXPR_OVERLOADED (t) = 1;
3405
3406 va_end (p);
3407 return convert_from_reference (t);
3408 }
3409
3410 /* Similar to build_min_nt, but call expressions */
3411
3412 tree
build_min_nt_call_vec(tree fn,vec<tree,va_gc> * args)3413 build_min_nt_call_vec (tree fn, vec<tree, va_gc> *args)
3414 {
3415 tree ret, t;
3416 unsigned int ix;
3417
3418 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
3419 CALL_EXPR_FN (ret) = fn;
3420 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
3421 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
3422 {
3423 CALL_EXPR_ARG (ret, ix) = t;
3424 if (TREE_CODE (t) == OVERLOAD)
3425 lookup_keep (t, true);
3426 }
3427 return ret;
3428 }
3429
3430 /* Similar to `build_min_nt_call_vec', but for template definitions of
3431 non-dependent expressions. NON_DEP is the non-dependent expression
3432 that has been built. */
3433
3434 tree
build_min_non_dep_call_vec(tree non_dep,tree fn,vec<tree,va_gc> * argvec)3435 build_min_non_dep_call_vec (tree non_dep, tree fn, vec<tree, va_gc> *argvec)
3436 {
3437 tree t = build_min_nt_call_vec (fn, argvec);
3438 if (REFERENCE_REF_P (non_dep))
3439 non_dep = TREE_OPERAND (non_dep, 0);
3440 TREE_TYPE (t) = TREE_TYPE (non_dep);
3441 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (non_dep);
3442 return convert_from_reference (t);
3443 }
3444
3445 /* Similar to build_min_non_dep, but for expressions that have been resolved to
3446 a call to an operator overload. OP is the operator that has been
3447 overloaded. NON_DEP is the non-dependent expression that's been built,
3448 which should be a CALL_EXPR or an INDIRECT_REF to a CALL_EXPR. OVERLOAD is
3449 the overload that NON_DEP is calling. */
3450
3451 tree
build_min_non_dep_op_overload(enum tree_code op,tree non_dep,tree overload,...)3452 build_min_non_dep_op_overload (enum tree_code op,
3453 tree non_dep,
3454 tree overload, ...)
3455 {
3456 va_list p;
3457 int nargs, expected_nargs;
3458 tree fn, call;
3459 vec<tree, va_gc> *args;
3460
3461 non_dep = extract_call_expr (non_dep);
3462
3463 nargs = call_expr_nargs (non_dep);
3464
3465 expected_nargs = cp_tree_code_length (op);
3466 if ((op == POSTINCREMENT_EXPR
3467 || op == POSTDECREMENT_EXPR)
3468 /* With -fpermissive non_dep could be operator++(). */
3469 && (!flag_permissive || nargs != expected_nargs))
3470 expected_nargs += 1;
3471 gcc_assert (nargs == expected_nargs);
3472
3473 args = make_tree_vector ();
3474 va_start (p, overload);
3475
3476 if (TREE_CODE (TREE_TYPE (overload)) == FUNCTION_TYPE)
3477 {
3478 fn = overload;
3479 for (int i = 0; i < nargs; i++)
3480 {
3481 tree arg = va_arg (p, tree);
3482 vec_safe_push (args, arg);
3483 }
3484 }
3485 else if (TREE_CODE (TREE_TYPE (overload)) == METHOD_TYPE)
3486 {
3487 tree object = va_arg (p, tree);
3488 tree binfo = TYPE_BINFO (TREE_TYPE (object));
3489 tree method = build_baselink (binfo, binfo, overload, NULL_TREE);
3490 fn = build_min (COMPONENT_REF, TREE_TYPE (overload),
3491 object, method, NULL_TREE);
3492 for (int i = 1; i < nargs; i++)
3493 {
3494 tree arg = va_arg (p, tree);
3495 vec_safe_push (args, arg);
3496 }
3497 }
3498 else
3499 gcc_unreachable ();
3500
3501 va_end (p);
3502 call = build_min_non_dep_call_vec (non_dep, fn, args);
3503 release_tree_vector (args);
3504
3505 tree call_expr = extract_call_expr (call);
3506 KOENIG_LOOKUP_P (call_expr) = KOENIG_LOOKUP_P (non_dep);
3507 CALL_EXPR_OPERATOR_SYNTAX (call_expr) = true;
3508 CALL_EXPR_ORDERED_ARGS (call_expr) = CALL_EXPR_ORDERED_ARGS (non_dep);
3509 CALL_EXPR_REVERSE_ARGS (call_expr) = CALL_EXPR_REVERSE_ARGS (non_dep);
3510
3511 return call;
3512 }
3513
3514 /* Return a new tree vec copied from VEC, with ELT inserted at index IDX. */
3515
3516 vec<tree, va_gc> *
vec_copy_and_insert(vec<tree,va_gc> * old_vec,tree elt,unsigned idx)3517 vec_copy_and_insert (vec<tree, va_gc> *old_vec, tree elt, unsigned idx)
3518 {
3519 unsigned len = vec_safe_length (old_vec);
3520 gcc_assert (idx <= len);
3521
3522 vec<tree, va_gc> *new_vec = NULL;
3523 vec_alloc (new_vec, len + 1);
3524
3525 unsigned i;
3526 for (i = 0; i < len; ++i)
3527 {
3528 if (i == idx)
3529 new_vec->quick_push (elt);
3530 new_vec->quick_push ((*old_vec)[i]);
3531 }
3532 if (i == idx)
3533 new_vec->quick_push (elt);
3534
3535 return new_vec;
3536 }
3537
3538 tree
get_type_decl(tree t)3539 get_type_decl (tree t)
3540 {
3541 if (TREE_CODE (t) == TYPE_DECL)
3542 return t;
3543 if (TYPE_P (t))
3544 return TYPE_STUB_DECL (t);
3545 gcc_assert (t == error_mark_node);
3546 return t;
3547 }
3548
3549 /* Returns the namespace that contains DECL, whether directly or
3550 indirectly. */
3551
3552 tree
decl_namespace_context(tree decl)3553 decl_namespace_context (tree decl)
3554 {
3555 while (1)
3556 {
3557 if (TREE_CODE (decl) == NAMESPACE_DECL)
3558 return decl;
3559 else if (TYPE_P (decl))
3560 decl = CP_DECL_CONTEXT (TYPE_MAIN_DECL (decl));
3561 else
3562 decl = CP_DECL_CONTEXT (decl);
3563 }
3564 }
3565
3566 /* Returns true if decl is within an anonymous namespace, however deeply
3567 nested, or false otherwise. */
3568
3569 bool
decl_anon_ns_mem_p(const_tree decl)3570 decl_anon_ns_mem_p (const_tree decl)
3571 {
3572 while (TREE_CODE (decl) != NAMESPACE_DECL)
3573 {
3574 /* Classes inside anonymous namespaces have TREE_PUBLIC == 0. */
3575 if (TYPE_P (decl))
3576 return !TREE_PUBLIC (TYPE_MAIN_DECL (decl));
3577
3578 decl = CP_DECL_CONTEXT (decl);
3579 }
3580 return !TREE_PUBLIC (decl);
3581 }
3582
3583 /* Subroutine of cp_tree_equal: t1 and t2 are the CALL_EXPR_FNs of two
3584 CALL_EXPRS. Return whether they are equivalent. */
3585
3586 static bool
called_fns_equal(tree t1,tree t2)3587 called_fns_equal (tree t1, tree t2)
3588 {
3589 /* Core 1321: dependent names are equivalent even if the overload sets
3590 are different. But do compare explicit template arguments. */
3591 tree name1 = dependent_name (t1);
3592 tree name2 = dependent_name (t2);
3593 if (name1 || name2)
3594 {
3595 tree targs1 = NULL_TREE, targs2 = NULL_TREE;
3596
3597 if (name1 != name2)
3598 return false;
3599
3600 if (TREE_CODE (t1) == TEMPLATE_ID_EXPR)
3601 targs1 = TREE_OPERAND (t1, 1);
3602 if (TREE_CODE (t2) == TEMPLATE_ID_EXPR)
3603 targs2 = TREE_OPERAND (t2, 1);
3604 return cp_tree_equal (targs1, targs2);
3605 }
3606 else
3607 return cp_tree_equal (t1, t2);
3608 }
3609
3610 /* Return truthvalue of whether T1 is the same tree structure as T2.
3611 Return 1 if they are the same. Return 0 if they are different. */
3612
3613 bool
cp_tree_equal(tree t1,tree t2)3614 cp_tree_equal (tree t1, tree t2)
3615 {
3616 enum tree_code code1, code2;
3617
3618 if (t1 == t2)
3619 return true;
3620 if (!t1 || !t2)
3621 return false;
3622
3623 code1 = TREE_CODE (t1);
3624 code2 = TREE_CODE (t2);
3625
3626 if (code1 != code2)
3627 return false;
3628
3629 if (CONSTANT_CLASS_P (t1)
3630 && !same_type_p (TREE_TYPE (t1), TREE_TYPE (t2)))
3631 return false;
3632
3633 switch (code1)
3634 {
3635 case VOID_CST:
3636 /* There's only a single VOID_CST node, so we should never reach
3637 here. */
3638 gcc_unreachable ();
3639
3640 case INTEGER_CST:
3641 return tree_int_cst_equal (t1, t2);
3642
3643 case REAL_CST:
3644 return real_equal (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
3645
3646 case STRING_CST:
3647 return TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
3648 && !memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
3649 TREE_STRING_LENGTH (t1));
3650
3651 case FIXED_CST:
3652 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1),
3653 TREE_FIXED_CST (t2));
3654
3655 case COMPLEX_CST:
3656 return cp_tree_equal (TREE_REALPART (t1), TREE_REALPART (t2))
3657 && cp_tree_equal (TREE_IMAGPART (t1), TREE_IMAGPART (t2));
3658
3659 case VECTOR_CST:
3660 return operand_equal_p (t1, t2, OEP_ONLY_CONST);
3661
3662 case CONSTRUCTOR:
3663 /* We need to do this when determining whether or not two
3664 non-type pointer to member function template arguments
3665 are the same. */
3666 if (!same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))
3667 || CONSTRUCTOR_NELTS (t1) != CONSTRUCTOR_NELTS (t2))
3668 return false;
3669 {
3670 tree field, value;
3671 unsigned int i;
3672 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, field, value)
3673 {
3674 constructor_elt *elt2 = CONSTRUCTOR_ELT (t2, i);
3675 if (!cp_tree_equal (field, elt2->index)
3676 || !cp_tree_equal (value, elt2->value))
3677 return false;
3678 }
3679 }
3680 return true;
3681
3682 case TREE_LIST:
3683 if (!cp_tree_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2)))
3684 return false;
3685 if (!cp_tree_equal (TREE_VALUE (t1), TREE_VALUE (t2)))
3686 return false;
3687 return cp_tree_equal (TREE_CHAIN (t1), TREE_CHAIN (t2));
3688
3689 case SAVE_EXPR:
3690 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
3691
3692 case CALL_EXPR:
3693 {
3694 tree arg1, arg2;
3695 call_expr_arg_iterator iter1, iter2;
3696 if (!called_fns_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2)))
3697 return false;
3698 for (arg1 = first_call_expr_arg (t1, &iter1),
3699 arg2 = first_call_expr_arg (t2, &iter2);
3700 arg1 && arg2;
3701 arg1 = next_call_expr_arg (&iter1),
3702 arg2 = next_call_expr_arg (&iter2))
3703 if (!cp_tree_equal (arg1, arg2))
3704 return false;
3705 if (arg1 || arg2)
3706 return false;
3707 return true;
3708 }
3709
3710 case TARGET_EXPR:
3711 {
3712 tree o1 = TREE_OPERAND (t1, 0);
3713 tree o2 = TREE_OPERAND (t2, 0);
3714
3715 /* Special case: if either target is an unallocated VAR_DECL,
3716 it means that it's going to be unified with whatever the
3717 TARGET_EXPR is really supposed to initialize, so treat it
3718 as being equivalent to anything. */
3719 if (VAR_P (o1) && DECL_NAME (o1) == NULL_TREE
3720 && !DECL_RTL_SET_P (o1))
3721 /*Nop*/;
3722 else if (VAR_P (o2) && DECL_NAME (o2) == NULL_TREE
3723 && !DECL_RTL_SET_P (o2))
3724 /*Nop*/;
3725 else if (!cp_tree_equal (o1, o2))
3726 return false;
3727
3728 return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
3729 }
3730
3731 case PARM_DECL:
3732 /* For comparing uses of parameters in late-specified return types
3733 with an out-of-class definition of the function, but can also come
3734 up for expressions that involve 'this' in a member function
3735 template. */
3736
3737 if (comparing_specializations && !CONSTRAINT_VAR_P (t1))
3738 /* When comparing hash table entries, only an exact match is
3739 good enough; we don't want to replace 'this' with the
3740 version from another function. But be more flexible
3741 with local parameters in a requires-expression. */
3742 return false;
3743
3744 if (same_type_p (TREE_TYPE (t1), TREE_TYPE (t2)))
3745 {
3746 if (DECL_ARTIFICIAL (t1) ^ DECL_ARTIFICIAL (t2))
3747 return false;
3748 if (CONSTRAINT_VAR_P (t1) ^ CONSTRAINT_VAR_P (t2))
3749 return false;
3750 if (DECL_ARTIFICIAL (t1)
3751 || (DECL_PARM_LEVEL (t1) == DECL_PARM_LEVEL (t2)
3752 && DECL_PARM_INDEX (t1) == DECL_PARM_INDEX (t2)))
3753 return true;
3754 }
3755 return false;
3756
3757 case VAR_DECL:
3758 case CONST_DECL:
3759 case FIELD_DECL:
3760 case FUNCTION_DECL:
3761 case TEMPLATE_DECL:
3762 case IDENTIFIER_NODE:
3763 case SSA_NAME:
3764 return false;
3765
3766 case BASELINK:
3767 return (BASELINK_BINFO (t1) == BASELINK_BINFO (t2)
3768 && BASELINK_ACCESS_BINFO (t1) == BASELINK_ACCESS_BINFO (t2)
3769 && BASELINK_QUALIFIED_P (t1) == BASELINK_QUALIFIED_P (t2)
3770 && cp_tree_equal (BASELINK_FUNCTIONS (t1),
3771 BASELINK_FUNCTIONS (t2)));
3772
3773 case TEMPLATE_PARM_INDEX:
3774 return (TEMPLATE_PARM_IDX (t1) == TEMPLATE_PARM_IDX (t2)
3775 && TEMPLATE_PARM_LEVEL (t1) == TEMPLATE_PARM_LEVEL (t2)
3776 && (TEMPLATE_PARM_PARAMETER_PACK (t1)
3777 == TEMPLATE_PARM_PARAMETER_PACK (t2))
3778 && same_type_p (TREE_TYPE (TEMPLATE_PARM_DECL (t1)),
3779 TREE_TYPE (TEMPLATE_PARM_DECL (t2))));
3780
3781 case TEMPLATE_ID_EXPR:
3782 return (cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0))
3783 && cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)));
3784
3785 case CONSTRAINT_INFO:
3786 return cp_tree_equal (CI_ASSOCIATED_CONSTRAINTS (t1),
3787 CI_ASSOCIATED_CONSTRAINTS (t2));
3788
3789 case CHECK_CONSTR:
3790 return (CHECK_CONSTR_CONCEPT (t1) == CHECK_CONSTR_CONCEPT (t2)
3791 && comp_template_args (CHECK_CONSTR_ARGS (t1),
3792 CHECK_CONSTR_ARGS (t2)));
3793
3794 case TREE_VEC:
3795 {
3796 unsigned ix;
3797 if (TREE_VEC_LENGTH (t1) != TREE_VEC_LENGTH (t2))
3798 return false;
3799 for (ix = TREE_VEC_LENGTH (t1); ix--;)
3800 if (!cp_tree_equal (TREE_VEC_ELT (t1, ix),
3801 TREE_VEC_ELT (t2, ix)))
3802 return false;
3803 return true;
3804 }
3805
3806 case SIZEOF_EXPR:
3807 case ALIGNOF_EXPR:
3808 {
3809 tree o1 = TREE_OPERAND (t1, 0);
3810 tree o2 = TREE_OPERAND (t2, 0);
3811
3812 if (code1 == SIZEOF_EXPR)
3813 {
3814 if (SIZEOF_EXPR_TYPE_P (t1))
3815 o1 = TREE_TYPE (o1);
3816 if (SIZEOF_EXPR_TYPE_P (t2))
3817 o2 = TREE_TYPE (o2);
3818 }
3819 else if (ALIGNOF_EXPR_STD_P (t1) != ALIGNOF_EXPR_STD_P (t2))
3820 return false;
3821
3822 if (TREE_CODE (o1) != TREE_CODE (o2))
3823 return false;
3824 if (TYPE_P (o1))
3825 return same_type_p (o1, o2);
3826 else
3827 return cp_tree_equal (o1, o2);
3828 }
3829
3830 case MODOP_EXPR:
3831 {
3832 tree t1_op1, t2_op1;
3833
3834 if (!cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)))
3835 return false;
3836
3837 t1_op1 = TREE_OPERAND (t1, 1);
3838 t2_op1 = TREE_OPERAND (t2, 1);
3839 if (TREE_CODE (t1_op1) != TREE_CODE (t2_op1))
3840 return false;
3841
3842 return cp_tree_equal (TREE_OPERAND (t1, 2), TREE_OPERAND (t2, 2));
3843 }
3844
3845 case PTRMEM_CST:
3846 /* Two pointer-to-members are the same if they point to the same
3847 field or function in the same class. */
3848 if (PTRMEM_CST_MEMBER (t1) != PTRMEM_CST_MEMBER (t2))
3849 return false;
3850
3851 return same_type_p (PTRMEM_CST_CLASS (t1), PTRMEM_CST_CLASS (t2));
3852
3853 case OVERLOAD:
3854 {
3855 /* Two overloads. Must be exactly the same set of decls. */
3856 lkp_iterator first (t1);
3857 lkp_iterator second (t2);
3858
3859 for (; first && second; ++first, ++second)
3860 if (*first != *second)
3861 return false;
3862 return !(first || second);
3863 }
3864
3865 case TRAIT_EXPR:
3866 if (TRAIT_EXPR_KIND (t1) != TRAIT_EXPR_KIND (t2))
3867 return false;
3868 return same_type_p (TRAIT_EXPR_TYPE1 (t1), TRAIT_EXPR_TYPE1 (t2))
3869 && cp_tree_equal (TRAIT_EXPR_TYPE2 (t1), TRAIT_EXPR_TYPE2 (t2));
3870
3871 case CAST_EXPR:
3872 case STATIC_CAST_EXPR:
3873 case REINTERPRET_CAST_EXPR:
3874 case CONST_CAST_EXPR:
3875 case DYNAMIC_CAST_EXPR:
3876 case IMPLICIT_CONV_EXPR:
3877 case NEW_EXPR:
3878 CASE_CONVERT:
3879 case NON_LVALUE_EXPR:
3880 case VIEW_CONVERT_EXPR:
3881 if (!same_type_p (TREE_TYPE (t1), TREE_TYPE (t2)))
3882 return false;
3883 /* Now compare operands as usual. */
3884 break;
3885
3886 case DEFERRED_NOEXCEPT:
3887 return (cp_tree_equal (DEFERRED_NOEXCEPT_PATTERN (t1),
3888 DEFERRED_NOEXCEPT_PATTERN (t2))
3889 && comp_template_args (DEFERRED_NOEXCEPT_ARGS (t1),
3890 DEFERRED_NOEXCEPT_ARGS (t2)));
3891 break;
3892
3893 case USING_DECL:
3894 if (DECL_DEPENDENT_P (t1) && DECL_DEPENDENT_P (t2))
3895 return (cp_tree_equal (USING_DECL_SCOPE (t1),
3896 USING_DECL_SCOPE (t2))
3897 && cp_tree_equal (DECL_NAME (t1),
3898 DECL_NAME (t2)));
3899 return false;
3900
3901 default:
3902 break;
3903 }
3904
3905 switch (TREE_CODE_CLASS (code1))
3906 {
3907 case tcc_unary:
3908 case tcc_binary:
3909 case tcc_comparison:
3910 case tcc_expression:
3911 case tcc_vl_exp:
3912 case tcc_reference:
3913 case tcc_statement:
3914 {
3915 int i, n;
3916
3917 n = cp_tree_operand_length (t1);
3918 if (TREE_CODE_CLASS (code1) == tcc_vl_exp
3919 && n != TREE_OPERAND_LENGTH (t2))
3920 return false;
3921
3922 for (i = 0; i < n; ++i)
3923 if (!cp_tree_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i)))
3924 return false;
3925
3926 return true;
3927 }
3928
3929 case tcc_type:
3930 return same_type_p (t1, t2);
3931 default:
3932 gcc_unreachable ();
3933 }
3934 /* We can get here with --disable-checking. */
3935 return false;
3936 }
3937
3938 /* The type of ARG when used as an lvalue. */
3939
3940 tree
lvalue_type(tree arg)3941 lvalue_type (tree arg)
3942 {
3943 tree type = TREE_TYPE (arg);
3944 return type;
3945 }
3946
3947 /* The type of ARG for printing error messages; denote lvalues with
3948 reference types. */
3949
3950 tree
error_type(tree arg)3951 error_type (tree arg)
3952 {
3953 tree type = TREE_TYPE (arg);
3954
3955 if (TREE_CODE (type) == ARRAY_TYPE)
3956 ;
3957 else if (TREE_CODE (type) == ERROR_MARK)
3958 ;
3959 else if (lvalue_p (arg))
3960 type = build_reference_type (lvalue_type (arg));
3961 else if (MAYBE_CLASS_TYPE_P (type))
3962 type = lvalue_type (arg);
3963
3964 return type;
3965 }
3966
3967 /* Does FUNCTION use a variable-length argument list? */
3968
3969 int
varargs_function_p(const_tree function)3970 varargs_function_p (const_tree function)
3971 {
3972 return stdarg_p (TREE_TYPE (function));
3973 }
3974
3975 /* Returns 1 if decl is a member of a class. */
3976
3977 int
member_p(const_tree decl)3978 member_p (const_tree decl)
3979 {
3980 const_tree const ctx = DECL_CONTEXT (decl);
3981 return (ctx && TYPE_P (ctx));
3982 }
3983
3984 /* Create a placeholder for member access where we don't actually have an
3985 object that the access is against. */
3986
3987 tree
build_dummy_object(tree type)3988 build_dummy_object (tree type)
3989 {
3990 tree decl = build1 (CONVERT_EXPR, build_pointer_type (type), void_node);
3991 return cp_build_fold_indirect_ref (decl);
3992 }
3993
3994 /* We've gotten a reference to a member of TYPE. Return *this if appropriate,
3995 or a dummy object otherwise. If BINFOP is non-0, it is filled with the
3996 binfo path from current_class_type to TYPE, or 0. */
3997
3998 tree
maybe_dummy_object(tree type,tree * binfop)3999 maybe_dummy_object (tree type, tree* binfop)
4000 {
4001 tree decl, context;
4002 tree binfo;
4003 tree current = current_nonlambda_class_type ();
4004
4005 if (current
4006 && (binfo = lookup_base (current, type, ba_any, NULL,
4007 tf_warning_or_error)))
4008 context = current;
4009 else
4010 {
4011 /* Reference from a nested class member function. */
4012 context = type;
4013 binfo = TYPE_BINFO (type);
4014 }
4015
4016 if (binfop)
4017 *binfop = binfo;
4018
4019 if (current_class_ref
4020 /* current_class_ref might not correspond to current_class_type if
4021 we're in tsubst_default_argument or a lambda-declarator; in either
4022 case, we want to use current_class_ref if it matches CONTEXT. */
4023 && (same_type_ignoring_top_level_qualifiers_p
4024 (TREE_TYPE (current_class_ref), context)))
4025 decl = current_class_ref;
4026 else
4027 decl = build_dummy_object (context);
4028
4029 return decl;
4030 }
4031
4032 /* Returns 1 if OB is a placeholder object, or a pointer to one. */
4033
4034 int
is_dummy_object(const_tree ob)4035 is_dummy_object (const_tree ob)
4036 {
4037 if (INDIRECT_REF_P (ob))
4038 ob = TREE_OPERAND (ob, 0);
4039 return (TREE_CODE (ob) == CONVERT_EXPR
4040 && TREE_OPERAND (ob, 0) == void_node);
4041 }
4042
4043 /* Returns 1 iff type T is something we want to treat as a scalar type for
4044 the purpose of deciding whether it is trivial/POD/standard-layout. */
4045
4046 bool
scalarish_type_p(const_tree t)4047 scalarish_type_p (const_tree t)
4048 {
4049 if (t == error_mark_node)
4050 return 1;
4051
4052 return (SCALAR_TYPE_P (t) || VECTOR_TYPE_P (t));
4053 }
4054
4055 /* Returns true iff T requires non-trivial default initialization. */
4056
4057 bool
type_has_nontrivial_default_init(const_tree t)4058 type_has_nontrivial_default_init (const_tree t)
4059 {
4060 t = strip_array_types (CONST_CAST_TREE (t));
4061
4062 if (CLASS_TYPE_P (t))
4063 return TYPE_HAS_COMPLEX_DFLT (t);
4064 else
4065 return 0;
4066 }
4067
4068 /* Track classes with only deleted copy/move constructors so that we can warn
4069 if they are used in call/return by value. */
4070
4071 static GTY(()) hash_set<tree>* deleted_copy_types;
4072 static void
remember_deleted_copy(const_tree t)4073 remember_deleted_copy (const_tree t)
4074 {
4075 if (!deleted_copy_types)
4076 deleted_copy_types = hash_set<tree>::create_ggc(37);
4077 deleted_copy_types->add (CONST_CAST_TREE (t));
4078 }
4079 void
maybe_warn_parm_abi(tree t,location_t loc)4080 maybe_warn_parm_abi (tree t, location_t loc)
4081 {
4082 if (!deleted_copy_types
4083 || !deleted_copy_types->contains (t))
4084 return;
4085
4086 if ((flag_abi_version == 12 || warn_abi_version == 12)
4087 && classtype_has_non_deleted_move_ctor (t))
4088 {
4089 bool w;
4090 if (flag_abi_version > 12)
4091 w = warning_at (loc, OPT_Wabi, "-fabi-version=13 (GCC 8.2) fixes the "
4092 "calling convention for %qT, which was accidentally "
4093 "changed in 8.1", t);
4094 else
4095 w = warning_at (loc, OPT_Wabi, "-fabi-version=12 (GCC 8.1) accident"
4096 "ally changes the calling convention for %qT", t);
4097 if (w)
4098 inform (location_of (t), " declared here");
4099 return;
4100 }
4101
4102 if (warning_at (loc, OPT_Wabi, "the calling convention for %qT changes in "
4103 "-fabi-version=13 (GCC 8.2)", t))
4104 inform (location_of (t), " because all of its copy and move "
4105 "constructors are deleted");
4106 }
4107
4108 /* Returns true iff copying an object of type T (including via move
4109 constructor) is non-trivial. That is, T has no non-trivial copy
4110 constructors and no non-trivial move constructors, and not all copy/move
4111 constructors are deleted. This function implements the ABI notion of
4112 non-trivial copy, which has diverged from the one in the standard. */
4113
4114 bool
type_has_nontrivial_copy_init(const_tree type)4115 type_has_nontrivial_copy_init (const_tree type)
4116 {
4117 tree t = strip_array_types (CONST_CAST_TREE (type));
4118
4119 if (CLASS_TYPE_P (t))
4120 {
4121 gcc_assert (COMPLETE_TYPE_P (t));
4122
4123 if (TYPE_HAS_COMPLEX_COPY_CTOR (t)
4124 || TYPE_HAS_COMPLEX_MOVE_CTOR (t))
4125 /* Nontrivial. */
4126 return true;
4127
4128 if (cxx_dialect < cxx11)
4129 /* No deleted functions before C++11. */
4130 return false;
4131
4132 /* Before ABI v12 we did a bitwise copy of types with only deleted
4133 copy/move constructors. */
4134 if (!abi_version_at_least (12)
4135 && !(warn_abi && abi_version_crosses (12)))
4136 return false;
4137
4138 bool saw_copy = false;
4139 bool saw_non_deleted = false;
4140 bool saw_non_deleted_move = false;
4141
4142 if (CLASSTYPE_LAZY_MOVE_CTOR (t))
4143 saw_copy = saw_non_deleted = true;
4144 else if (CLASSTYPE_LAZY_COPY_CTOR (t))
4145 {
4146 saw_copy = true;
4147 if (classtype_has_move_assign_or_move_ctor_p (t, true))
4148 /* [class.copy]/8 If the class definition declares a move
4149 constructor or move assignment operator, the implicitly declared
4150 copy constructor is defined as deleted.... */;
4151 else
4152 /* Any other reason the implicitly-declared function would be
4153 deleted would also cause TYPE_HAS_COMPLEX_COPY_CTOR to be
4154 set. */
4155 saw_non_deleted = true;
4156 }
4157
4158 if (!saw_non_deleted)
4159 for (ovl_iterator iter (CLASSTYPE_CONSTRUCTORS (t)); iter; ++iter)
4160 {
4161 tree fn = *iter;
4162 if (copy_fn_p (fn))
4163 {
4164 saw_copy = true;
4165 if (!DECL_DELETED_FN (fn))
4166 {
4167 /* Not deleted, therefore trivial. */
4168 saw_non_deleted = true;
4169 break;
4170 }
4171 }
4172 else if (move_fn_p (fn))
4173 if (!DECL_DELETED_FN (fn))
4174 saw_non_deleted_move = true;
4175 }
4176
4177 gcc_assert (saw_copy);
4178
4179 /* ABI v12 buggily ignored move constructors. */
4180 bool v11nontriv = false;
4181 bool v12nontriv = !saw_non_deleted;
4182 bool v13nontriv = !saw_non_deleted && !saw_non_deleted_move;
4183 bool nontriv = (abi_version_at_least (13) ? v13nontriv
4184 : flag_abi_version == 12 ? v12nontriv
4185 : v11nontriv);
4186 bool warn_nontriv = (warn_abi_version >= 13 ? v13nontriv
4187 : warn_abi_version == 12 ? v12nontriv
4188 : v11nontriv);
4189 if (nontriv != warn_nontriv)
4190 remember_deleted_copy (t);
4191
4192 return nontriv;
4193 }
4194 else
4195 return 0;
4196 }
4197
4198 /* Returns 1 iff type T is a trivially copyable type, as defined in
4199 [basic.types] and [class]. */
4200
4201 bool
trivially_copyable_p(const_tree t)4202 trivially_copyable_p (const_tree t)
4203 {
4204 t = strip_array_types (CONST_CAST_TREE (t));
4205
4206 if (CLASS_TYPE_P (t))
4207 return ((!TYPE_HAS_COPY_CTOR (t)
4208 || !TYPE_HAS_COMPLEX_COPY_CTOR (t))
4209 && !TYPE_HAS_COMPLEX_MOVE_CTOR (t)
4210 && (!TYPE_HAS_COPY_ASSIGN (t)
4211 || !TYPE_HAS_COMPLEX_COPY_ASSIGN (t))
4212 && !TYPE_HAS_COMPLEX_MOVE_ASSIGN (t)
4213 && TYPE_HAS_TRIVIAL_DESTRUCTOR (t));
4214 else
4215 return !CP_TYPE_VOLATILE_P (t) && scalarish_type_p (t);
4216 }
4217
4218 /* Returns 1 iff type T is a trivial type, as defined in [basic.types] and
4219 [class]. */
4220
4221 bool
trivial_type_p(const_tree t)4222 trivial_type_p (const_tree t)
4223 {
4224 t = strip_array_types (CONST_CAST_TREE (t));
4225
4226 if (CLASS_TYPE_P (t))
4227 return (TYPE_HAS_TRIVIAL_DFLT (t)
4228 && trivially_copyable_p (t));
4229 else
4230 return scalarish_type_p (t);
4231 }
4232
4233 /* Returns 1 iff type T is a POD type, as defined in [basic.types]. */
4234
4235 bool
pod_type_p(const_tree t)4236 pod_type_p (const_tree t)
4237 {
4238 /* This CONST_CAST is okay because strip_array_types returns its
4239 argument unmodified and we assign it to a const_tree. */
4240 t = strip_array_types (CONST_CAST_TREE(t));
4241
4242 if (!CLASS_TYPE_P (t))
4243 return scalarish_type_p (t);
4244 else if (cxx_dialect > cxx98)
4245 /* [class]/10: A POD struct is a class that is both a trivial class and a
4246 standard-layout class, and has no non-static data members of type
4247 non-POD struct, non-POD union (or array of such types).
4248
4249 We don't need to check individual members because if a member is
4250 non-std-layout or non-trivial, the class will be too. */
4251 return (std_layout_type_p (t) && trivial_type_p (t));
4252 else
4253 /* The C++98 definition of POD is different. */
4254 return !CLASSTYPE_NON_LAYOUT_POD_P (t);
4255 }
4256
4257 /* Returns true iff T is POD for the purpose of layout, as defined in the
4258 C++ ABI. */
4259
4260 bool
layout_pod_type_p(const_tree t)4261 layout_pod_type_p (const_tree t)
4262 {
4263 t = strip_array_types (CONST_CAST_TREE (t));
4264
4265 if (CLASS_TYPE_P (t))
4266 return !CLASSTYPE_NON_LAYOUT_POD_P (t);
4267 else
4268 return scalarish_type_p (t);
4269 }
4270
4271 /* Returns true iff T is a standard-layout type, as defined in
4272 [basic.types]. */
4273
4274 bool
std_layout_type_p(const_tree t)4275 std_layout_type_p (const_tree t)
4276 {
4277 t = strip_array_types (CONST_CAST_TREE (t));
4278
4279 if (CLASS_TYPE_P (t))
4280 return !CLASSTYPE_NON_STD_LAYOUT (t);
4281 else
4282 return scalarish_type_p (t);
4283 }
4284
4285 static bool record_has_unique_obj_representations (const_tree, const_tree);
4286
4287 /* Returns true iff T satisfies std::has_unique_object_representations<T>,
4288 as defined in [meta.unary.prop]. */
4289
4290 bool
type_has_unique_obj_representations(const_tree t)4291 type_has_unique_obj_representations (const_tree t)
4292 {
4293 bool ret;
4294
4295 t = strip_array_types (CONST_CAST_TREE (t));
4296
4297 if (!trivially_copyable_p (t))
4298 return false;
4299
4300 if (CLASS_TYPE_P (t) && CLASSTYPE_UNIQUE_OBJ_REPRESENTATIONS_SET (t))
4301 return CLASSTYPE_UNIQUE_OBJ_REPRESENTATIONS (t);
4302
4303 switch (TREE_CODE (t))
4304 {
4305 case INTEGER_TYPE:
4306 case POINTER_TYPE:
4307 case REFERENCE_TYPE:
4308 /* If some backend has any paddings in these types, we should add
4309 a target hook for this and handle it there. */
4310 return true;
4311
4312 case BOOLEAN_TYPE:
4313 /* For bool values other than 0 and 1 should only appear with
4314 undefined behavior. */
4315 return true;
4316
4317 case ENUMERAL_TYPE:
4318 return type_has_unique_obj_representations (ENUM_UNDERLYING_TYPE (t));
4319
4320 case REAL_TYPE:
4321 /* XFmode certainly contains padding on x86, which the CPU doesn't store
4322 when storing long double values, so for that we have to return false.
4323 Other kinds of floating point values are questionable due to +.0/-.0
4324 and NaNs, let's play safe for now. */
4325 return false;
4326
4327 case FIXED_POINT_TYPE:
4328 return false;
4329
4330 case OFFSET_TYPE:
4331 return true;
4332
4333 case COMPLEX_TYPE:
4334 case VECTOR_TYPE:
4335 return type_has_unique_obj_representations (TREE_TYPE (t));
4336
4337 case RECORD_TYPE:
4338 ret = record_has_unique_obj_representations (t, TYPE_SIZE (t));
4339 if (CLASS_TYPE_P (t))
4340 {
4341 CLASSTYPE_UNIQUE_OBJ_REPRESENTATIONS_SET (t) = 1;
4342 CLASSTYPE_UNIQUE_OBJ_REPRESENTATIONS (t) = ret;
4343 }
4344 return ret;
4345
4346 case UNION_TYPE:
4347 ret = true;
4348 bool any_fields;
4349 any_fields = false;
4350 for (tree field = TYPE_FIELDS (t); field; field = DECL_CHAIN (field))
4351 if (TREE_CODE (field) == FIELD_DECL)
4352 {
4353 any_fields = true;
4354 if (!type_has_unique_obj_representations (TREE_TYPE (field))
4355 || simple_cst_equal (DECL_SIZE (field), TYPE_SIZE (t)) != 1)
4356 {
4357 ret = false;
4358 break;
4359 }
4360 }
4361 if (!any_fields && !integer_zerop (TYPE_SIZE (t)))
4362 ret = false;
4363 if (CLASS_TYPE_P (t))
4364 {
4365 CLASSTYPE_UNIQUE_OBJ_REPRESENTATIONS_SET (t) = 1;
4366 CLASSTYPE_UNIQUE_OBJ_REPRESENTATIONS (t) = ret;
4367 }
4368 return ret;
4369
4370 case NULLPTR_TYPE:
4371 return false;
4372
4373 case ERROR_MARK:
4374 return false;
4375
4376 default:
4377 gcc_unreachable ();
4378 }
4379 }
4380
4381 /* Helper function for type_has_unique_obj_representations. */
4382
4383 static bool
record_has_unique_obj_representations(const_tree t,const_tree sz)4384 record_has_unique_obj_representations (const_tree t, const_tree sz)
4385 {
4386 for (tree field = TYPE_FIELDS (t); field; field = DECL_CHAIN (field))
4387 if (TREE_CODE (field) != FIELD_DECL)
4388 ;
4389 /* For bases, can't use type_has_unique_obj_representations here, as in
4390 struct S { int i : 24; S (); };
4391 struct T : public S { int j : 8; T (); };
4392 S doesn't have unique obj representations, but T does. */
4393 else if (DECL_FIELD_IS_BASE (field))
4394 {
4395 if (!record_has_unique_obj_representations (TREE_TYPE (field),
4396 DECL_SIZE (field)))
4397 return false;
4398 }
4399 else if (DECL_C_BIT_FIELD (field))
4400 {
4401 tree btype = DECL_BIT_FIELD_TYPE (field);
4402 if (!type_has_unique_obj_representations (btype))
4403 return false;
4404 }
4405 else if (!type_has_unique_obj_representations (TREE_TYPE (field)))
4406 return false;
4407
4408 offset_int cur = 0;
4409 for (tree field = TYPE_FIELDS (t); field; field = DECL_CHAIN (field))
4410 if (TREE_CODE (field) == FIELD_DECL)
4411 {
4412 offset_int fld = wi::to_offset (DECL_FIELD_OFFSET (field));
4413 offset_int bitpos = wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
4414 fld = fld * BITS_PER_UNIT + bitpos;
4415 if (cur != fld)
4416 return false;
4417 if (DECL_SIZE (field))
4418 {
4419 offset_int size = wi::to_offset (DECL_SIZE (field));
4420 cur += size;
4421 }
4422 }
4423 if (cur != wi::to_offset (sz))
4424 return false;
4425
4426 return true;
4427 }
4428
4429 /* Nonzero iff type T is a class template implicit specialization. */
4430
4431 bool
class_tmpl_impl_spec_p(const_tree t)4432 class_tmpl_impl_spec_p (const_tree t)
4433 {
4434 return CLASS_TYPE_P (t) && CLASSTYPE_TEMPLATE_INSTANTIATION (t);
4435 }
4436
4437 /* Returns 1 iff zero initialization of type T means actually storing
4438 zeros in it. */
4439
4440 int
zero_init_p(const_tree t)4441 zero_init_p (const_tree t)
4442 {
4443 /* This CONST_CAST is okay because strip_array_types returns its
4444 argument unmodified and we assign it to a const_tree. */
4445 t = strip_array_types (CONST_CAST_TREE(t));
4446
4447 if (t == error_mark_node)
4448 return 1;
4449
4450 /* NULL pointers to data members are initialized with -1. */
4451 if (TYPE_PTRDATAMEM_P (t))
4452 return 0;
4453
4454 /* Classes that contain types that can't be zero-initialized, cannot
4455 be zero-initialized themselves. */
4456 if (CLASS_TYPE_P (t) && CLASSTYPE_NON_ZERO_INIT_P (t))
4457 return 0;
4458
4459 return 1;
4460 }
4461
4462 /* Handle the C++17 [[nodiscard]] attribute, which is similar to the GNU
4463 warn_unused_result attribute. */
4464
4465 static tree
handle_nodiscard_attribute(tree * node,tree name,tree,int,bool * no_add_attrs)4466 handle_nodiscard_attribute (tree *node, tree name, tree /*args*/,
4467 int /*flags*/, bool *no_add_attrs)
4468 {
4469 if (TREE_CODE (*node) == FUNCTION_DECL)
4470 {
4471 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
4472 warning (OPT_Wattributes, "%qE attribute applied to %qD with void "
4473 "return type", name, *node);
4474 }
4475 else if (OVERLOAD_TYPE_P (*node))
4476 /* OK */;
4477 else
4478 {
4479 warning (OPT_Wattributes, "%qE attribute can only be applied to "
4480 "functions or to class or enumeration types", name);
4481 *no_add_attrs = true;
4482 }
4483 return NULL_TREE;
4484 }
4485
4486 /* Table of valid C++ attributes. */
4487 const struct attribute_spec cxx_attribute_table[] =
4488 {
4489 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
4490 affects_type_identity, handler, exclude } */
4491 { "init_priority", 1, 1, true, false, false, false,
4492 handle_init_priority_attribute, NULL },
4493 { "abi_tag", 1, -1, false, false, false, true,
4494 handle_abi_tag_attribute, NULL },
4495 { NULL, 0, 0, false, false, false, false, NULL, NULL }
4496 };
4497
4498 /* Table of C++ standard attributes. */
4499 const struct attribute_spec std_attribute_table[] =
4500 {
4501 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
4502 affects_type_identity, handler, exclude } */
4503 { "maybe_unused", 0, 0, false, false, false, false,
4504 handle_unused_attribute, NULL },
4505 { "nodiscard", 0, 0, false, false, false, false,
4506 handle_nodiscard_attribute, NULL },
4507 { NULL, 0, 0, false, false, false, false, NULL, NULL }
4508 };
4509
4510 /* Handle an "init_priority" attribute; arguments as in
4511 struct attribute_spec.handler. */
4512 static tree
handle_init_priority_attribute(tree * node,tree name,tree args,int,bool * no_add_attrs)4513 handle_init_priority_attribute (tree* node,
4514 tree name,
4515 tree args,
4516 int /*flags*/,
4517 bool* no_add_attrs)
4518 {
4519 tree initp_expr = TREE_VALUE (args);
4520 tree decl = *node;
4521 tree type = TREE_TYPE (decl);
4522 int pri;
4523
4524 STRIP_NOPS (initp_expr);
4525 initp_expr = default_conversion (initp_expr);
4526 if (initp_expr)
4527 initp_expr = maybe_constant_value (initp_expr);
4528
4529 if (!initp_expr || TREE_CODE (initp_expr) != INTEGER_CST)
4530 {
4531 error ("requested init_priority is not an integer constant");
4532 cxx_constant_value (initp_expr);
4533 *no_add_attrs = true;
4534 return NULL_TREE;
4535 }
4536
4537 pri = TREE_INT_CST_LOW (initp_expr);
4538
4539 type = strip_array_types (type);
4540
4541 if (decl == NULL_TREE
4542 || !VAR_P (decl)
4543 || !TREE_STATIC (decl)
4544 || DECL_EXTERNAL (decl)
4545 || (TREE_CODE (type) != RECORD_TYPE
4546 && TREE_CODE (type) != UNION_TYPE)
4547 /* Static objects in functions are initialized the
4548 first time control passes through that
4549 function. This is not precise enough to pin down an
4550 init_priority value, so don't allow it. */
4551 || current_function_decl)
4552 {
4553 error ("can only use %qE attribute on file-scope definitions "
4554 "of objects of class type", name);
4555 *no_add_attrs = true;
4556 return NULL_TREE;
4557 }
4558
4559 if (pri > MAX_INIT_PRIORITY || pri <= 0)
4560 {
4561 error ("requested init_priority is out of range");
4562 *no_add_attrs = true;
4563 return NULL_TREE;
4564 }
4565
4566 /* Check for init_priorities that are reserved for
4567 language and runtime support implementations.*/
4568 if (pri <= MAX_RESERVED_INIT_PRIORITY)
4569 {
4570 warning
4571 (0, "requested init_priority is reserved for internal use");
4572 }
4573
4574 if (SUPPORTS_INIT_PRIORITY)
4575 {
4576 SET_DECL_INIT_PRIORITY (decl, pri);
4577 DECL_HAS_INIT_PRIORITY_P (decl) = 1;
4578 return NULL_TREE;
4579 }
4580 else
4581 {
4582 error ("%qE attribute is not supported on this platform", name);
4583 *no_add_attrs = true;
4584 return NULL_TREE;
4585 }
4586 }
4587
4588 /* DECL is being redeclared; the old declaration had the abi tags in OLD,
4589 and the new one has the tags in NEW_. Give an error if there are tags
4590 in NEW_ that weren't in OLD. */
4591
4592 bool
check_abi_tag_redeclaration(const_tree decl,const_tree old,const_tree new_)4593 check_abi_tag_redeclaration (const_tree decl, const_tree old, const_tree new_)
4594 {
4595 if (old && TREE_CODE (TREE_VALUE (old)) == TREE_LIST)
4596 old = TREE_VALUE (old);
4597 if (new_ && TREE_CODE (TREE_VALUE (new_)) == TREE_LIST)
4598 new_ = TREE_VALUE (new_);
4599 bool err = false;
4600 for (const_tree t = new_; t; t = TREE_CHAIN (t))
4601 {
4602 tree str = TREE_VALUE (t);
4603 for (const_tree in = old; in; in = TREE_CHAIN (in))
4604 {
4605 tree ostr = TREE_VALUE (in);
4606 if (cp_tree_equal (str, ostr))
4607 goto found;
4608 }
4609 error ("redeclaration of %qD adds abi tag %qE", decl, str);
4610 err = true;
4611 found:;
4612 }
4613 if (err)
4614 {
4615 inform (DECL_SOURCE_LOCATION (decl), "previous declaration here");
4616 return false;
4617 }
4618 return true;
4619 }
4620
4621 /* The abi_tag attribute with the name NAME was given ARGS. If they are
4622 ill-formed, give an error and return false; otherwise, return true. */
4623
4624 bool
check_abi_tag_args(tree args,tree name)4625 check_abi_tag_args (tree args, tree name)
4626 {
4627 if (!args)
4628 {
4629 error ("the %qE attribute requires arguments", name);
4630 return false;
4631 }
4632 for (tree arg = args; arg; arg = TREE_CHAIN (arg))
4633 {
4634 tree elt = TREE_VALUE (arg);
4635 if (TREE_CODE (elt) != STRING_CST
4636 || (!same_type_ignoring_top_level_qualifiers_p
4637 (strip_array_types (TREE_TYPE (elt)),
4638 char_type_node)))
4639 {
4640 error ("arguments to the %qE attribute must be narrow string "
4641 "literals", name);
4642 return false;
4643 }
4644 const char *begin = TREE_STRING_POINTER (elt);
4645 const char *end = begin + TREE_STRING_LENGTH (elt);
4646 for (const char *p = begin; p != end; ++p)
4647 {
4648 char c = *p;
4649 if (p == begin)
4650 {
4651 if (!ISALPHA (c) && c != '_')
4652 {
4653 error ("arguments to the %qE attribute must contain valid "
4654 "identifiers", name);
4655 inform (input_location, "%<%c%> is not a valid first "
4656 "character for an identifier", c);
4657 return false;
4658 }
4659 }
4660 else if (p == end - 1)
4661 gcc_assert (c == 0);
4662 else
4663 {
4664 if (!ISALNUM (c) && c != '_')
4665 {
4666 error ("arguments to the %qE attribute must contain valid "
4667 "identifiers", name);
4668 inform (input_location, "%<%c%> is not a valid character "
4669 "in an identifier", c);
4670 return false;
4671 }
4672 }
4673 }
4674 }
4675 return true;
4676 }
4677
4678 /* Handle an "abi_tag" attribute; arguments as in
4679 struct attribute_spec.handler. */
4680
4681 static tree
handle_abi_tag_attribute(tree * node,tree name,tree args,int flags,bool * no_add_attrs)4682 handle_abi_tag_attribute (tree* node, tree name, tree args,
4683 int flags, bool* no_add_attrs)
4684 {
4685 if (!check_abi_tag_args (args, name))
4686 goto fail;
4687
4688 if (TYPE_P (*node))
4689 {
4690 if (!OVERLOAD_TYPE_P (*node))
4691 {
4692 error ("%qE attribute applied to non-class, non-enum type %qT",
4693 name, *node);
4694 goto fail;
4695 }
4696 else if (!(flags & (int)ATTR_FLAG_TYPE_IN_PLACE))
4697 {
4698 error ("%qE attribute applied to %qT after its definition",
4699 name, *node);
4700 goto fail;
4701 }
4702 else if (CLASS_TYPE_P (*node)
4703 && CLASSTYPE_TEMPLATE_INSTANTIATION (*node))
4704 {
4705 warning (OPT_Wattributes, "ignoring %qE attribute applied to "
4706 "template instantiation %qT", name, *node);
4707 goto fail;
4708 }
4709 else if (CLASS_TYPE_P (*node)
4710 && CLASSTYPE_TEMPLATE_SPECIALIZATION (*node))
4711 {
4712 warning (OPT_Wattributes, "ignoring %qE attribute applied to "
4713 "template specialization %qT", name, *node);
4714 goto fail;
4715 }
4716
4717 tree attributes = TYPE_ATTRIBUTES (*node);
4718 tree decl = TYPE_NAME (*node);
4719
4720 /* Make sure all declarations have the same abi tags. */
4721 if (DECL_SOURCE_LOCATION (decl) != input_location)
4722 {
4723 if (!check_abi_tag_redeclaration (decl,
4724 lookup_attribute ("abi_tag",
4725 attributes),
4726 args))
4727 goto fail;
4728 }
4729 }
4730 else
4731 {
4732 if (!VAR_OR_FUNCTION_DECL_P (*node))
4733 {
4734 error ("%qE attribute applied to non-function, non-variable %qD",
4735 name, *node);
4736 goto fail;
4737 }
4738 else if (DECL_LANGUAGE (*node) == lang_c)
4739 {
4740 error ("%qE attribute applied to extern \"C\" declaration %qD",
4741 name, *node);
4742 goto fail;
4743 }
4744 }
4745
4746 return NULL_TREE;
4747
4748 fail:
4749 *no_add_attrs = true;
4750 return NULL_TREE;
4751 }
4752
4753 /* Return a new PTRMEM_CST of the indicated TYPE. The MEMBER is the
4754 thing pointed to by the constant. */
4755
4756 tree
make_ptrmem_cst(tree type,tree member)4757 make_ptrmem_cst (tree type, tree member)
4758 {
4759 tree ptrmem_cst = make_node (PTRMEM_CST);
4760 TREE_TYPE (ptrmem_cst) = type;
4761 PTRMEM_CST_MEMBER (ptrmem_cst) = member;
4762 return ptrmem_cst;
4763 }
4764
4765 /* Build a variant of TYPE that has the indicated ATTRIBUTES. May
4766 return an existing type if an appropriate type already exists. */
4767
4768 tree
cp_build_type_attribute_variant(tree type,tree attributes)4769 cp_build_type_attribute_variant (tree type, tree attributes)
4770 {
4771 tree new_type;
4772
4773 new_type = build_type_attribute_variant (type, attributes);
4774 if (TREE_CODE (new_type) == FUNCTION_TYPE
4775 || TREE_CODE (new_type) == METHOD_TYPE)
4776 {
4777 new_type = build_exception_variant (new_type,
4778 TYPE_RAISES_EXCEPTIONS (type));
4779 new_type = build_ref_qualified_type (new_type,
4780 type_memfn_rqual (type));
4781 }
4782
4783 /* Making a new main variant of a class type is broken. */
4784 gcc_assert (!CLASS_TYPE_P (type) || new_type == type);
4785
4786 return new_type;
4787 }
4788
4789 /* Return TRUE if TYPE1 and TYPE2 are identical for type hashing purposes.
4790 Called only after doing all language independent checks. */
4791
4792 bool
cxx_type_hash_eq(const_tree typea,const_tree typeb)4793 cxx_type_hash_eq (const_tree typea, const_tree typeb)
4794 {
4795 gcc_assert (TREE_CODE (typea) == FUNCTION_TYPE
4796 || TREE_CODE (typea) == METHOD_TYPE);
4797
4798 if (type_memfn_rqual (typea) != type_memfn_rqual (typeb))
4799 return false;
4800 return comp_except_specs (TYPE_RAISES_EXCEPTIONS (typea),
4801 TYPE_RAISES_EXCEPTIONS (typeb), ce_exact);
4802 }
4803
4804 /* Copy the language-specific type variant modifiers from TYPEB to TYPEA. For
4805 C++, these are the exception-specifier and ref-qualifier. */
4806
4807 tree
cxx_copy_lang_qualifiers(const_tree typea,const_tree typeb)4808 cxx_copy_lang_qualifiers (const_tree typea, const_tree typeb)
4809 {
4810 tree type = CONST_CAST_TREE (typea);
4811 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4812 {
4813 type = build_exception_variant (type, TYPE_RAISES_EXCEPTIONS (typeb));
4814 type = build_ref_qualified_type (type, type_memfn_rqual (typeb));
4815 }
4816 return type;
4817 }
4818
4819 /* Apply FUNC to all language-specific sub-trees of TP in a pre-order
4820 traversal. Called from walk_tree. */
4821
4822 tree
cp_walk_subtrees(tree * tp,int * walk_subtrees_p,walk_tree_fn func,void * data,hash_set<tree> * pset)4823 cp_walk_subtrees (tree *tp, int *walk_subtrees_p, walk_tree_fn func,
4824 void *data, hash_set<tree> *pset)
4825 {
4826 enum tree_code code = TREE_CODE (*tp);
4827 tree result;
4828
4829 #define WALK_SUBTREE(NODE) \
4830 do \
4831 { \
4832 result = cp_walk_tree (&(NODE), func, data, pset); \
4833 if (result) goto out; \
4834 } \
4835 while (0)
4836
4837 /* Not one of the easy cases. We must explicitly go through the
4838 children. */
4839 result = NULL_TREE;
4840 switch (code)
4841 {
4842 case DEFAULT_ARG:
4843 case TEMPLATE_TEMPLATE_PARM:
4844 case BOUND_TEMPLATE_TEMPLATE_PARM:
4845 case UNBOUND_CLASS_TEMPLATE:
4846 case TEMPLATE_PARM_INDEX:
4847 case TEMPLATE_TYPE_PARM:
4848 case TYPENAME_TYPE:
4849 case TYPEOF_TYPE:
4850 case UNDERLYING_TYPE:
4851 /* None of these have subtrees other than those already walked
4852 above. */
4853 *walk_subtrees_p = 0;
4854 break;
4855
4856 case BASELINK:
4857 if (BASELINK_QUALIFIED_P (*tp))
4858 WALK_SUBTREE (BINFO_TYPE (BASELINK_ACCESS_BINFO (*tp)));
4859 WALK_SUBTREE (BASELINK_FUNCTIONS (*tp));
4860 *walk_subtrees_p = 0;
4861 break;
4862
4863 case PTRMEM_CST:
4864 WALK_SUBTREE (TREE_TYPE (*tp));
4865 *walk_subtrees_p = 0;
4866 break;
4867
4868 case TREE_LIST:
4869 WALK_SUBTREE (TREE_PURPOSE (*tp));
4870 break;
4871
4872 case OVERLOAD:
4873 WALK_SUBTREE (OVL_FUNCTION (*tp));
4874 WALK_SUBTREE (OVL_CHAIN (*tp));
4875 *walk_subtrees_p = 0;
4876 break;
4877
4878 case USING_DECL:
4879 WALK_SUBTREE (DECL_NAME (*tp));
4880 WALK_SUBTREE (USING_DECL_SCOPE (*tp));
4881 WALK_SUBTREE (USING_DECL_DECLS (*tp));
4882 *walk_subtrees_p = 0;
4883 break;
4884
4885 case RECORD_TYPE:
4886 if (TYPE_PTRMEMFUNC_P (*tp))
4887 WALK_SUBTREE (TYPE_PTRMEMFUNC_FN_TYPE_RAW (*tp));
4888 break;
4889
4890 case TYPE_ARGUMENT_PACK:
4891 case NONTYPE_ARGUMENT_PACK:
4892 {
4893 tree args = ARGUMENT_PACK_ARGS (*tp);
4894 int i, len = TREE_VEC_LENGTH (args);
4895 for (i = 0; i < len; i++)
4896 WALK_SUBTREE (TREE_VEC_ELT (args, i));
4897 }
4898 break;
4899
4900 case TYPE_PACK_EXPANSION:
4901 WALK_SUBTREE (TREE_TYPE (*tp));
4902 WALK_SUBTREE (PACK_EXPANSION_EXTRA_ARGS (*tp));
4903 *walk_subtrees_p = 0;
4904 break;
4905
4906 case EXPR_PACK_EXPANSION:
4907 WALK_SUBTREE (TREE_OPERAND (*tp, 0));
4908 WALK_SUBTREE (PACK_EXPANSION_EXTRA_ARGS (*tp));
4909 *walk_subtrees_p = 0;
4910 break;
4911
4912 case CAST_EXPR:
4913 case REINTERPRET_CAST_EXPR:
4914 case STATIC_CAST_EXPR:
4915 case CONST_CAST_EXPR:
4916 case DYNAMIC_CAST_EXPR:
4917 case IMPLICIT_CONV_EXPR:
4918 if (TREE_TYPE (*tp))
4919 WALK_SUBTREE (TREE_TYPE (*tp));
4920
4921 {
4922 int i;
4923 for (i = 0; i < TREE_CODE_LENGTH (TREE_CODE (*tp)); ++i)
4924 WALK_SUBTREE (TREE_OPERAND (*tp, i));
4925 }
4926 *walk_subtrees_p = 0;
4927 break;
4928
4929 case TRAIT_EXPR:
4930 WALK_SUBTREE (TRAIT_EXPR_TYPE1 (*tp));
4931 WALK_SUBTREE (TRAIT_EXPR_TYPE2 (*tp));
4932 *walk_subtrees_p = 0;
4933 break;
4934
4935 case DECLTYPE_TYPE:
4936 WALK_SUBTREE (DECLTYPE_TYPE_EXPR (*tp));
4937 *walk_subtrees_p = 0;
4938 break;
4939
4940 case REQUIRES_EXPR:
4941 // Only recurse through the nested expression. Do not
4942 // walk the parameter list. Doing so causes false
4943 // positives in the pack expansion checker since the
4944 // requires parameters are introduced as pack expansions.
4945 WALK_SUBTREE (TREE_OPERAND (*tp, 1));
4946 *walk_subtrees_p = 0;
4947 break;
4948
4949 case DECL_EXPR:
4950 /* User variables should be mentioned in BIND_EXPR_VARS
4951 and their initializers and sizes walked when walking
4952 the containing BIND_EXPR. Compiler temporaries are
4953 handled here. And also normal variables in templates,
4954 since do_poplevel doesn't build a BIND_EXPR then. */
4955 if (VAR_P (TREE_OPERAND (*tp, 0))
4956 && (processing_template_decl
4957 || (DECL_ARTIFICIAL (TREE_OPERAND (*tp, 0))
4958 && !TREE_STATIC (TREE_OPERAND (*tp, 0)))))
4959 {
4960 tree decl = TREE_OPERAND (*tp, 0);
4961 WALK_SUBTREE (DECL_INITIAL (decl));
4962 WALK_SUBTREE (DECL_SIZE (decl));
4963 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
4964 }
4965 break;
4966
4967 case LAMBDA_EXPR:
4968 /* Don't walk into the body of the lambda, but the capture initializers
4969 are part of the enclosing context. */
4970 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (*tp); cap;
4971 cap = TREE_CHAIN (cap))
4972 WALK_SUBTREE (TREE_VALUE (cap));
4973 break;
4974
4975 default:
4976 return NULL_TREE;
4977 }
4978
4979 /* We didn't find what we were looking for. */
4980 out:
4981 return result;
4982
4983 #undef WALK_SUBTREE
4984 }
4985
4986 /* Like save_expr, but for C++. */
4987
4988 tree
cp_save_expr(tree expr)4989 cp_save_expr (tree expr)
4990 {
4991 /* There is no reason to create a SAVE_EXPR within a template; if
4992 needed, we can create the SAVE_EXPR when instantiating the
4993 template. Furthermore, the middle-end cannot handle C++-specific
4994 tree codes. */
4995 if (processing_template_decl)
4996 return expr;
4997 return save_expr (expr);
4998 }
4999
5000 /* Initialize tree.c. */
5001
5002 void
init_tree(void)5003 init_tree (void)
5004 {
5005 list_hash_table = hash_table<list_hasher>::create_ggc (61);
5006 register_scoped_attributes (std_attribute_table, NULL);
5007 }
5008
5009 /* Returns the kind of special function that DECL (a FUNCTION_DECL)
5010 is. Note that sfk_none is zero, so this function can be used as a
5011 predicate to test whether or not DECL is a special function. */
5012
5013 special_function_kind
special_function_p(const_tree decl)5014 special_function_p (const_tree decl)
5015 {
5016 /* Rather than doing all this stuff with magic names, we should
5017 probably have a field of type `special_function_kind' in
5018 DECL_LANG_SPECIFIC. */
5019 if (DECL_INHERITED_CTOR (decl))
5020 return sfk_inheriting_constructor;
5021 if (DECL_COPY_CONSTRUCTOR_P (decl))
5022 return sfk_copy_constructor;
5023 if (DECL_MOVE_CONSTRUCTOR_P (decl))
5024 return sfk_move_constructor;
5025 if (DECL_CONSTRUCTOR_P (decl))
5026 return sfk_constructor;
5027 if (DECL_ASSIGNMENT_OPERATOR_P (decl)
5028 && DECL_OVERLOADED_OPERATOR_IS (decl, NOP_EXPR))
5029 {
5030 if (copy_fn_p (decl))
5031 return sfk_copy_assignment;
5032 if (move_fn_p (decl))
5033 return sfk_move_assignment;
5034 }
5035 if (DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (decl))
5036 return sfk_destructor;
5037 if (DECL_COMPLETE_DESTRUCTOR_P (decl))
5038 return sfk_complete_destructor;
5039 if (DECL_BASE_DESTRUCTOR_P (decl))
5040 return sfk_base_destructor;
5041 if (DECL_DELETING_DESTRUCTOR_P (decl))
5042 return sfk_deleting_destructor;
5043 if (DECL_CONV_FN_P (decl))
5044 return sfk_conversion;
5045 if (deduction_guide_p (decl))
5046 return sfk_deduction_guide;
5047
5048 return sfk_none;
5049 }
5050
5051 /* Returns nonzero if TYPE is a character type, including wchar_t. */
5052
5053 int
char_type_p(tree type)5054 char_type_p (tree type)
5055 {
5056 return (same_type_p (type, char_type_node)
5057 || same_type_p (type, unsigned_char_type_node)
5058 || same_type_p (type, signed_char_type_node)
5059 || same_type_p (type, char16_type_node)
5060 || same_type_p (type, char32_type_node)
5061 || same_type_p (type, wchar_type_node));
5062 }
5063
5064 /* Returns the kind of linkage associated with the indicated DECL. Th
5065 value returned is as specified by the language standard; it is
5066 independent of implementation details regarding template
5067 instantiation, etc. For example, it is possible that a declaration
5068 to which this function assigns external linkage would not show up
5069 as a global symbol when you run `nm' on the resulting object file. */
5070
5071 linkage_kind
decl_linkage(tree decl)5072 decl_linkage (tree decl)
5073 {
5074 /* This function doesn't attempt to calculate the linkage from first
5075 principles as given in [basic.link]. Instead, it makes use of
5076 the fact that we have already set TREE_PUBLIC appropriately, and
5077 then handles a few special cases. Ideally, we would calculate
5078 linkage first, and then transform that into a concrete
5079 implementation. */
5080
5081 /* Things that don't have names have no linkage. */
5082 if (!DECL_NAME (decl))
5083 return lk_none;
5084
5085 /* Fields have no linkage. */
5086 if (TREE_CODE (decl) == FIELD_DECL)
5087 return lk_none;
5088
5089 /* Things that are TREE_PUBLIC have external linkage. */
5090 if (TREE_PUBLIC (decl))
5091 return lk_external;
5092
5093 /* maybe_thunk_body clears TREE_PUBLIC on the maybe-in-charge 'tor variants,
5094 check one of the "clones" for the real linkage. */
5095 if ((DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (decl)
5096 || DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (decl))
5097 && DECL_CHAIN (decl)
5098 && DECL_CLONED_FUNCTION_P (DECL_CHAIN (decl)))
5099 return decl_linkage (DECL_CHAIN (decl));
5100
5101 if (TREE_CODE (decl) == NAMESPACE_DECL)
5102 return lk_external;
5103
5104 /* Linkage of a CONST_DECL depends on the linkage of the enumeration
5105 type. */
5106 if (TREE_CODE (decl) == CONST_DECL)
5107 return decl_linkage (TYPE_NAME (DECL_CONTEXT (decl)));
5108
5109 /* Things in local scope do not have linkage, if they don't have
5110 TREE_PUBLIC set. */
5111 if (decl_function_context (decl))
5112 return lk_none;
5113
5114 /* Members of the anonymous namespace also have TREE_PUBLIC unset, but
5115 are considered to have external linkage for language purposes, as do
5116 template instantiations on targets without weak symbols. DECLs really
5117 meant to have internal linkage have DECL_THIS_STATIC set. */
5118 if (TREE_CODE (decl) == TYPE_DECL)
5119 return lk_external;
5120 if (VAR_OR_FUNCTION_DECL_P (decl))
5121 {
5122 if (!DECL_THIS_STATIC (decl))
5123 return lk_external;
5124
5125 /* Static data members and static member functions from classes
5126 in anonymous namespace also don't have TREE_PUBLIC set. */
5127 if (DECL_CLASS_CONTEXT (decl))
5128 return lk_external;
5129 }
5130
5131 /* Everything else has internal linkage. */
5132 return lk_internal;
5133 }
5134
5135 /* Returns the storage duration of the object or reference associated with
5136 the indicated DECL, which should be a VAR_DECL or PARM_DECL. */
5137
5138 duration_kind
decl_storage_duration(tree decl)5139 decl_storage_duration (tree decl)
5140 {
5141 if (TREE_CODE (decl) == PARM_DECL)
5142 return dk_auto;
5143 if (TREE_CODE (decl) == FUNCTION_DECL)
5144 return dk_static;
5145 gcc_assert (VAR_P (decl));
5146 if (!TREE_STATIC (decl)
5147 && !DECL_EXTERNAL (decl))
5148 return dk_auto;
5149 if (CP_DECL_THREAD_LOCAL_P (decl))
5150 return dk_thread;
5151 return dk_static;
5152 }
5153
5154 /* EXP is an expression that we want to pre-evaluate. Returns (in
5155 *INITP) an expression that will perform the pre-evaluation. The
5156 value returned by this function is a side-effect free expression
5157 equivalent to the pre-evaluated expression. Callers must ensure
5158 that *INITP is evaluated before EXP. */
5159
5160 tree
stabilize_expr(tree exp,tree * initp)5161 stabilize_expr (tree exp, tree* initp)
5162 {
5163 tree init_expr;
5164
5165 if (!TREE_SIDE_EFFECTS (exp))
5166 init_expr = NULL_TREE;
5167 else if (VOID_TYPE_P (TREE_TYPE (exp)))
5168 {
5169 init_expr = exp;
5170 exp = void_node;
5171 }
5172 /* There are no expressions with REFERENCE_TYPE, but there can be call
5173 arguments with such a type; just treat it as a pointer. */
5174 else if (TREE_CODE (TREE_TYPE (exp)) == REFERENCE_TYPE
5175 || SCALAR_TYPE_P (TREE_TYPE (exp))
5176 || !glvalue_p (exp))
5177 {
5178 init_expr = get_target_expr (exp);
5179 exp = TARGET_EXPR_SLOT (init_expr);
5180 if (CLASS_TYPE_P (TREE_TYPE (exp)))
5181 exp = move (exp);
5182 else
5183 exp = rvalue (exp);
5184 }
5185 else
5186 {
5187 bool xval = !lvalue_p (exp);
5188 exp = cp_build_addr_expr (exp, tf_warning_or_error);
5189 init_expr = get_target_expr (exp);
5190 exp = TARGET_EXPR_SLOT (init_expr);
5191 exp = cp_build_fold_indirect_ref (exp);
5192 if (xval)
5193 exp = move (exp);
5194 }
5195 *initp = init_expr;
5196
5197 gcc_assert (!TREE_SIDE_EFFECTS (exp));
5198 return exp;
5199 }
5200
5201 /* Add NEW_EXPR, an expression whose value we don't care about, after the
5202 similar expression ORIG. */
5203
5204 tree
add_stmt_to_compound(tree orig,tree new_expr)5205 add_stmt_to_compound (tree orig, tree new_expr)
5206 {
5207 if (!new_expr || !TREE_SIDE_EFFECTS (new_expr))
5208 return orig;
5209 if (!orig || !TREE_SIDE_EFFECTS (orig))
5210 return new_expr;
5211 return build2 (COMPOUND_EXPR, void_type_node, orig, new_expr);
5212 }
5213
5214 /* Like stabilize_expr, but for a call whose arguments we want to
5215 pre-evaluate. CALL is modified in place to use the pre-evaluated
5216 arguments, while, upon return, *INITP contains an expression to
5217 compute the arguments. */
5218
5219 void
stabilize_call(tree call,tree * initp)5220 stabilize_call (tree call, tree *initp)
5221 {
5222 tree inits = NULL_TREE;
5223 int i;
5224 int nargs = call_expr_nargs (call);
5225
5226 if (call == error_mark_node || processing_template_decl)
5227 {
5228 *initp = NULL_TREE;
5229 return;
5230 }
5231
5232 gcc_assert (TREE_CODE (call) == CALL_EXPR);
5233
5234 for (i = 0; i < nargs; i++)
5235 {
5236 tree init;
5237 CALL_EXPR_ARG (call, i) =
5238 stabilize_expr (CALL_EXPR_ARG (call, i), &init);
5239 inits = add_stmt_to_compound (inits, init);
5240 }
5241
5242 *initp = inits;
5243 }
5244
5245 /* Like stabilize_expr, but for an AGGR_INIT_EXPR whose arguments we want
5246 to pre-evaluate. CALL is modified in place to use the pre-evaluated
5247 arguments, while, upon return, *INITP contains an expression to
5248 compute the arguments. */
5249
5250 static void
stabilize_aggr_init(tree call,tree * initp)5251 stabilize_aggr_init (tree call, tree *initp)
5252 {
5253 tree inits = NULL_TREE;
5254 int i;
5255 int nargs = aggr_init_expr_nargs (call);
5256
5257 if (call == error_mark_node)
5258 return;
5259
5260 gcc_assert (TREE_CODE (call) == AGGR_INIT_EXPR);
5261
5262 for (i = 0; i < nargs; i++)
5263 {
5264 tree init;
5265 AGGR_INIT_EXPR_ARG (call, i) =
5266 stabilize_expr (AGGR_INIT_EXPR_ARG (call, i), &init);
5267 inits = add_stmt_to_compound (inits, init);
5268 }
5269
5270 *initp = inits;
5271 }
5272
5273 /* Like stabilize_expr, but for an initialization.
5274
5275 If the initialization is for an object of class type, this function
5276 takes care not to introduce additional temporaries.
5277
5278 Returns TRUE iff the expression was successfully pre-evaluated,
5279 i.e., if INIT is now side-effect free, except for, possibly, a
5280 single call to a constructor. */
5281
5282 bool
stabilize_init(tree init,tree * initp)5283 stabilize_init (tree init, tree *initp)
5284 {
5285 tree t = init;
5286
5287 *initp = NULL_TREE;
5288
5289 if (t == error_mark_node || processing_template_decl)
5290 return true;
5291
5292 if (TREE_CODE (t) == INIT_EXPR)
5293 t = TREE_OPERAND (t, 1);
5294 if (TREE_CODE (t) == TARGET_EXPR)
5295 t = TARGET_EXPR_INITIAL (t);
5296
5297 /* If the RHS can be stabilized without breaking copy elision, stabilize
5298 it. We specifically don't stabilize class prvalues here because that
5299 would mean an extra copy, but they might be stabilized below. */
5300 if (TREE_CODE (init) == INIT_EXPR
5301 && TREE_CODE (t) != CONSTRUCTOR
5302 && TREE_CODE (t) != AGGR_INIT_EXPR
5303 && (SCALAR_TYPE_P (TREE_TYPE (t))
5304 || glvalue_p (t)))
5305 {
5306 TREE_OPERAND (init, 1) = stabilize_expr (t, initp);
5307 return true;
5308 }
5309
5310 if (TREE_CODE (t) == COMPOUND_EXPR
5311 && TREE_CODE (init) == INIT_EXPR)
5312 {
5313 tree last = expr_last (t);
5314 /* Handle stabilizing the EMPTY_CLASS_EXPR pattern. */
5315 if (!TREE_SIDE_EFFECTS (last))
5316 {
5317 *initp = t;
5318 TREE_OPERAND (init, 1) = last;
5319 return true;
5320 }
5321 }
5322
5323 if (TREE_CODE (t) == CONSTRUCTOR)
5324 {
5325 /* Aggregate initialization: stabilize each of the field
5326 initializers. */
5327 unsigned i;
5328 constructor_elt *ce;
5329 bool good = true;
5330 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (t);
5331 for (i = 0; vec_safe_iterate (v, i, &ce); ++i)
5332 {
5333 tree type = TREE_TYPE (ce->value);
5334 tree subinit;
5335 if (TREE_CODE (type) == REFERENCE_TYPE
5336 || SCALAR_TYPE_P (type))
5337 ce->value = stabilize_expr (ce->value, &subinit);
5338 else if (!stabilize_init (ce->value, &subinit))
5339 good = false;
5340 *initp = add_stmt_to_compound (*initp, subinit);
5341 }
5342 return good;
5343 }
5344
5345 if (TREE_CODE (t) == CALL_EXPR)
5346 {
5347 stabilize_call (t, initp);
5348 return true;
5349 }
5350
5351 if (TREE_CODE (t) == AGGR_INIT_EXPR)
5352 {
5353 stabilize_aggr_init (t, initp);
5354 return true;
5355 }
5356
5357 /* The initialization is being performed via a bitwise copy -- and
5358 the item copied may have side effects. */
5359 return !TREE_SIDE_EFFECTS (init);
5360 }
5361
5362 /* Returns true if a cast to TYPE may appear in an integral constant
5363 expression. */
5364
5365 bool
cast_valid_in_integral_constant_expression_p(tree type)5366 cast_valid_in_integral_constant_expression_p (tree type)
5367 {
5368 return (INTEGRAL_OR_ENUMERATION_TYPE_P (type)
5369 || cxx_dialect >= cxx11
5370 || dependent_type_p (type)
5371 || type == error_mark_node);
5372 }
5373
5374 /* Return true if we need to fix linkage information of DECL. */
5375
5376 static bool
cp_fix_function_decl_p(tree decl)5377 cp_fix_function_decl_p (tree decl)
5378 {
5379 /* Skip if DECL is not externally visible. */
5380 if (!TREE_PUBLIC (decl))
5381 return false;
5382
5383 /* We need to fix DECL if it a appears to be exported but with no
5384 function body. Thunks do not have CFGs and we may need to
5385 handle them specially later. */
5386 if (!gimple_has_body_p (decl)
5387 && !DECL_THUNK_P (decl)
5388 && !DECL_EXTERNAL (decl))
5389 {
5390 struct cgraph_node *node = cgraph_node::get (decl);
5391
5392 /* Don't fix same_body aliases. Although they don't have their own
5393 CFG, they share it with what they alias to. */
5394 if (!node || !node->alias
5395 || !vec_safe_length (node->ref_list.references))
5396 return true;
5397 }
5398
5399 return false;
5400 }
5401
5402 /* Clean the C++ specific parts of the tree T. */
5403
5404 void
cp_free_lang_data(tree t)5405 cp_free_lang_data (tree t)
5406 {
5407 if (TREE_CODE (t) == METHOD_TYPE
5408 || TREE_CODE (t) == FUNCTION_TYPE)
5409 {
5410 /* Default args are not interesting anymore. */
5411 tree argtypes = TYPE_ARG_TYPES (t);
5412 while (argtypes)
5413 {
5414 TREE_PURPOSE (argtypes) = 0;
5415 argtypes = TREE_CHAIN (argtypes);
5416 }
5417 }
5418 else if (TREE_CODE (t) == FUNCTION_DECL
5419 && cp_fix_function_decl_p (t))
5420 {
5421 /* If T is used in this translation unit at all, the definition
5422 must exist somewhere else since we have decided to not emit it
5423 in this TU. So make it an external reference. */
5424 DECL_EXTERNAL (t) = 1;
5425 TREE_STATIC (t) = 0;
5426 }
5427 if (TREE_CODE (t) == NAMESPACE_DECL)
5428 /* We do not need the leftover chaining of namespaces from the
5429 binding level. */
5430 DECL_CHAIN (t) = NULL_TREE;
5431 }
5432
5433 /* Stub for c-common. Please keep in sync with c-decl.c.
5434 FIXME: If address space support is target specific, then this
5435 should be a C target hook. But currently this is not possible,
5436 because this function is called via REGISTER_TARGET_PRAGMAS. */
5437 void
c_register_addr_space(const char *,addr_space_t)5438 c_register_addr_space (const char * /*word*/, addr_space_t /*as*/)
5439 {
5440 }
5441
5442 /* Return the number of operands in T that we care about for things like
5443 mangling. */
5444
5445 int
cp_tree_operand_length(const_tree t)5446 cp_tree_operand_length (const_tree t)
5447 {
5448 enum tree_code code = TREE_CODE (t);
5449
5450 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
5451 return VL_EXP_OPERAND_LENGTH (t);
5452
5453 return cp_tree_code_length (code);
5454 }
5455
5456 /* Like cp_tree_operand_length, but takes a tree_code CODE. */
5457
5458 int
cp_tree_code_length(enum tree_code code)5459 cp_tree_code_length (enum tree_code code)
5460 {
5461 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5462
5463 switch (code)
5464 {
5465 case PREINCREMENT_EXPR:
5466 case PREDECREMENT_EXPR:
5467 case POSTINCREMENT_EXPR:
5468 case POSTDECREMENT_EXPR:
5469 return 1;
5470
5471 case ARRAY_REF:
5472 return 2;
5473
5474 case EXPR_PACK_EXPANSION:
5475 return 1;
5476
5477 default:
5478 return TREE_CODE_LENGTH (code);
5479 }
5480 }
5481
5482 /* Wrapper around warn_deprecated_use that doesn't warn for
5483 current_class_type. */
5484
5485 void
cp_warn_deprecated_use(tree node)5486 cp_warn_deprecated_use (tree node)
5487 {
5488 if (TYPE_P (node)
5489 && current_class_type
5490 && TYPE_MAIN_VARIANT (node) == current_class_type)
5491 return;
5492 warn_deprecated_use (node, NULL_TREE);
5493 }
5494
5495 /* Implement -Wzero_as_null_pointer_constant. Return true if the
5496 conditions for the warning hold, false otherwise. */
5497 bool
maybe_warn_zero_as_null_pointer_constant(tree expr,location_t loc)5498 maybe_warn_zero_as_null_pointer_constant (tree expr, location_t loc)
5499 {
5500 if (c_inhibit_evaluation_warnings == 0
5501 && !NULLPTR_TYPE_P (TREE_TYPE (expr)))
5502 {
5503 warning_at (loc, OPT_Wzero_as_null_pointer_constant,
5504 "zero as null pointer constant");
5505 return true;
5506 }
5507 return false;
5508 }
5509
5510 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
5511 /* Complain that some language-specific thing hanging off a tree
5512 node has been accessed improperly. */
5513
5514 void
lang_check_failed(const char * file,int line,const char * function)5515 lang_check_failed (const char* file, int line, const char* function)
5516 {
5517 internal_error ("lang_* check: failed in %s, at %s:%d",
5518 function, trim_filename (file), line);
5519 }
5520 #endif /* ENABLE_TREE_CHECKING */
5521
5522 #if CHECKING_P
5523
5524 namespace selftest {
5525
5526 /* Verify that lvalue_kind () works, for various expressions,
5527 and that location wrappers don't affect the results. */
5528
5529 static void
test_lvalue_kind()5530 test_lvalue_kind ()
5531 {
5532 location_t loc = BUILTINS_LOCATION;
5533
5534 /* Verify constants and parameters, without and with
5535 location wrappers. */
5536 tree int_cst = build_int_cst (integer_type_node, 42);
5537 ASSERT_EQ (clk_none, lvalue_kind (int_cst));
5538
5539 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
5540 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
5541 ASSERT_EQ (clk_none, lvalue_kind (wrapped_int_cst));
5542
5543 tree string_lit = build_string (4, "foo");
5544 TREE_TYPE (string_lit) = char_array_type_node;
5545 string_lit = fix_string_type (string_lit);
5546 ASSERT_EQ (clk_ordinary, lvalue_kind (string_lit));
5547
5548 tree wrapped_string_lit = maybe_wrap_with_location (string_lit, loc);
5549 ASSERT_TRUE (location_wrapper_p (wrapped_string_lit));
5550 ASSERT_EQ (clk_ordinary, lvalue_kind (wrapped_string_lit));
5551
5552 tree parm = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5553 get_identifier ("some_parm"),
5554 integer_type_node);
5555 ASSERT_EQ (clk_ordinary, lvalue_kind (parm));
5556
5557 tree wrapped_parm = maybe_wrap_with_location (parm, loc);
5558 ASSERT_TRUE (location_wrapper_p (wrapped_parm));
5559 ASSERT_EQ (clk_ordinary, lvalue_kind (wrapped_parm));
5560
5561 /* Verify that lvalue_kind of std::move on a parm isn't
5562 affected by location wrappers. */
5563 tree rvalue_ref_of_parm = move (parm);
5564 ASSERT_EQ (clk_rvalueref, lvalue_kind (rvalue_ref_of_parm));
5565 tree rvalue_ref_of_wrapped_parm = move (wrapped_parm);
5566 ASSERT_EQ (clk_rvalueref, lvalue_kind (rvalue_ref_of_wrapped_parm));
5567 }
5568
5569 /* Run all of the selftests within this file. */
5570
5571 void
cp_tree_c_tests()5572 cp_tree_c_tests ()
5573 {
5574 test_lvalue_kind ();
5575 }
5576
5577 } // namespace selftest
5578
5579 #endif /* #if CHECKING_P */
5580
5581
5582 #include "gt-cp-tree.h"
5583