1 /* Output variables, constants and external declarations, for GNU compiler.
2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* This file handles generation of all the assembler code
22 *except* the instructions of a function.
23 This includes declarations of variables and their initial values.
24
25 We also output the assembler code for constants stored in memory
26 and are responsible for combining constants with the same value. */
27
28 #include "config.h"
29 #include "system.h"
30 #include "coretypes.h"
31 #include "backend.h"
32 #include "target.h"
33 #include "rtl.h"
34 #include "tree.h"
35 #include "predict.h"
36 #include "memmodel.h"
37 #include "tm_p.h"
38 #include "stringpool.h"
39 #include "regs.h"
40 #include "emit-rtl.h"
41 #include "cgraph.h"
42 #include "diagnostic-core.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "varasm.h"
46 #include "flags.h"
47 #include "stmt.h"
48 #include "expr.h"
49 #include "expmed.h"
50 #include "output.h"
51 #include "langhooks.h"
52 #include "debug.h"
53 #include "common/common-target.h"
54 #include "stringpool.h"
55 #include "attribs.h"
56 #include "asan.h"
57 #include "rtl-iter.h"
58 #include "file-prefix-map.h" /* remap_debug_filename() */
59
60 #ifdef XCOFF_DEBUGGING_INFO
61 #include "xcoffout.h" /* Needed for external data declarations. */
62 #endif
63
64 /* The (assembler) name of the first globally-visible object output. */
65 extern GTY(()) const char *first_global_object_name;
66 extern GTY(()) const char *weak_global_object_name;
67
68 const char *first_global_object_name;
69 const char *weak_global_object_name;
70
71 struct addr_const;
72 struct constant_descriptor_rtx;
73 struct rtx_constant_pool;
74
75 #define n_deferred_constants (crtl->varasm.deferred_constants)
76
77 /* Number for making the label on the next
78 constant that is stored in memory. */
79
80 static GTY(()) int const_labelno;
81
82 /* Carry information from ASM_DECLARE_OBJECT_NAME
83 to ASM_FINISH_DECLARE_OBJECT. */
84
85 int size_directive_output;
86
87 /* The last decl for which assemble_variable was called,
88 if it did ASM_DECLARE_OBJECT_NAME.
89 If the last call to assemble_variable didn't do that,
90 this holds 0. */
91
92 tree last_assemble_variable_decl;
93
94 /* The following global variable indicates if the first basic block
95 in a function belongs to the cold partition or not. */
96
97 bool first_function_block_is_cold;
98
99 /* Whether we saw any functions with no_split_stack. */
100
101 static bool saw_no_split_stack;
102
103 static const char *strip_reg_name (const char *);
104 static int contains_pointers_p (tree);
105 #ifdef ASM_OUTPUT_EXTERNAL
106 static bool incorporeal_function_p (tree);
107 #endif
108 static void decode_addr_const (tree, struct addr_const *);
109 static hashval_t const_hash_1 (const tree);
110 static int compare_constant (const tree, const tree);
111 static void output_constant_def_contents (rtx);
112 static void output_addressed_constants (tree);
113 static unsigned HOST_WIDE_INT output_constant (tree, unsigned HOST_WIDE_INT,
114 unsigned int, bool);
115 static void globalize_decl (tree);
116 static bool decl_readonly_section_1 (enum section_category);
117 #ifdef BSS_SECTION_ASM_OP
118 #ifdef ASM_OUTPUT_ALIGNED_BSS
119 static void asm_output_aligned_bss (FILE *, tree, const char *,
120 unsigned HOST_WIDE_INT, int)
121 ATTRIBUTE_UNUSED;
122 #endif
123 #endif /* BSS_SECTION_ASM_OP */
124 static void mark_weak (tree);
125 static void output_constant_pool (const char *, tree);
126 static void handle_vtv_comdat_section (section *, const_tree);
127
128 /* Well-known sections, each one associated with some sort of *_ASM_OP. */
129 section *text_section;
130 section *data_section;
131 section *readonly_data_section;
132 section *sdata_section;
133 section *ctors_section;
134 section *dtors_section;
135 section *bss_section;
136 section *sbss_section;
137
138 /* Various forms of common section. All are guaranteed to be nonnull. */
139 section *tls_comm_section;
140 section *comm_section;
141 section *lcomm_section;
142
143 /* A SECTION_NOSWITCH section used for declaring global BSS variables.
144 May be null. */
145 section *bss_noswitch_section;
146
147 /* The section that holds the main exception table, when known. The section
148 is set either by the target's init_sections hook or by the first call to
149 switch_to_exception_section. */
150 section *exception_section;
151
152 /* The section that holds the DWARF2 frame unwind information, when known.
153 The section is set either by the target's init_sections hook or by the
154 first call to switch_to_eh_frame_section. */
155 section *eh_frame_section;
156
157 /* asm_out_file's current section. This is NULL if no section has yet
158 been selected or if we lose track of what the current section is. */
159 section *in_section;
160
161 /* True if code for the current function is currently being directed
162 at the cold section. */
163 bool in_cold_section_p;
164
165 /* The following global holds the "function name" for the code in the
166 cold section of a function, if hot/cold function splitting is enabled
167 and there was actually code that went into the cold section. A
168 pseudo function name is needed for the cold section of code for some
169 debugging tools that perform symbolization. */
170 tree cold_function_name = NULL_TREE;
171
172 /* A linked list of all the unnamed sections. */
173 static GTY(()) section *unnamed_sections;
174
175 /* Return a nonzero value if DECL has a section attribute. */
176 #define IN_NAMED_SECTION(DECL) \
177 (VAR_OR_FUNCTION_DECL_P (DECL) && DECL_SECTION_NAME (DECL) != NULL)
178
179 struct section_hasher : ggc_ptr_hash<section>
180 {
181 typedef const char *compare_type;
182
183 static hashval_t hash (section *);
184 static bool equal (section *, const char *);
185 };
186
187 /* Hash table of named sections. */
188 static GTY(()) hash_table<section_hasher> *section_htab;
189
190 struct object_block_hasher : ggc_ptr_hash<object_block>
191 {
192 typedef const section *compare_type;
193
194 static hashval_t hash (object_block *);
195 static bool equal (object_block *, const section *);
196 };
197
198 /* A table of object_blocks, indexed by section. */
199 static GTY(()) hash_table<object_block_hasher> *object_block_htab;
200
201 /* The next number to use for internal anchor labels. */
202 static GTY(()) int anchor_labelno;
203
204 /* A pool of constants that can be shared between functions. */
205 static GTY(()) struct rtx_constant_pool *shared_constant_pool;
206
207 /* Helper routines for maintaining section_htab. */
208
209 bool
equal(section * old,const char * new_name)210 section_hasher::equal (section *old, const char *new_name)
211 {
212 return strcmp (old->named.name, new_name) == 0;
213 }
214
215 hashval_t
hash(section * old)216 section_hasher::hash (section *old)
217 {
218 return htab_hash_string (old->named.name);
219 }
220
221 /* Return a hash value for section SECT. */
222
223 static hashval_t
hash_section(section * sect)224 hash_section (section *sect)
225 {
226 if (sect->common.flags & SECTION_NAMED)
227 return htab_hash_string (sect->named.name);
228 return sect->common.flags & ~SECTION_DECLARED;
229 }
230
231 /* Helper routines for maintaining object_block_htab. */
232
233 inline bool
equal(object_block * old,const section * new_section)234 object_block_hasher::equal (object_block *old, const section *new_section)
235 {
236 return old->sect == new_section;
237 }
238
239 hashval_t
hash(object_block * old)240 object_block_hasher::hash (object_block *old)
241 {
242 return hash_section (old->sect);
243 }
244
245 /* Return a new unnamed section with the given fields. */
246
247 section *
get_unnamed_section(unsigned int flags,void (* callback)(const void *),const void * data)248 get_unnamed_section (unsigned int flags, void (*callback) (const void *),
249 const void *data)
250 {
251 section *sect;
252
253 sect = ggc_alloc<section> ();
254 sect->unnamed.common.flags = flags | SECTION_UNNAMED;
255 sect->unnamed.callback = callback;
256 sect->unnamed.data = data;
257 sect->unnamed.next = unnamed_sections;
258
259 unnamed_sections = sect;
260 return sect;
261 }
262
263 /* Return a SECTION_NOSWITCH section with the given fields. */
264
265 static section *
get_noswitch_section(unsigned int flags,noswitch_section_callback callback)266 get_noswitch_section (unsigned int flags, noswitch_section_callback callback)
267 {
268 section *sect;
269
270 sect = ggc_alloc<section> ();
271 sect->noswitch.common.flags = flags | SECTION_NOSWITCH;
272 sect->noswitch.callback = callback;
273
274 return sect;
275 }
276
277 /* Return the named section structure associated with NAME. Create
278 a new section with the given fields if no such structure exists. */
279
280 section *
get_section(const char * name,unsigned int flags,tree decl)281 get_section (const char *name, unsigned int flags, tree decl)
282 {
283 section *sect, **slot;
284
285 slot = section_htab->find_slot_with_hash (name, htab_hash_string (name),
286 INSERT);
287 flags |= SECTION_NAMED;
288 if (*slot == NULL)
289 {
290 sect = ggc_alloc<section> ();
291 sect->named.common.flags = flags;
292 sect->named.name = ggc_strdup (name);
293 sect->named.decl = decl;
294 *slot = sect;
295 }
296 else
297 {
298 sect = *slot;
299 /* It is fine if one of the sections has SECTION_NOTYPE as long as
300 the other has none of the contrary flags (see the logic at the end
301 of default_section_type_flags, below). */
302 if (((sect->common.flags ^ flags) & SECTION_NOTYPE)
303 && !((sect->common.flags | flags)
304 & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE
305 | (HAVE_COMDAT_GROUP ? SECTION_LINKONCE : 0))))
306 {
307 sect->common.flags |= SECTION_NOTYPE;
308 flags |= SECTION_NOTYPE;
309 }
310 if ((sect->common.flags & ~SECTION_DECLARED) != flags
311 && ((sect->common.flags | flags) & SECTION_OVERRIDE) == 0)
312 {
313 /* It is fine if one of the section flags is
314 SECTION_WRITE | SECTION_RELRO and the other has none of these
315 flags (i.e. read-only) in named sections and either the
316 section hasn't been declared yet or has been declared as writable.
317 In that case just make sure the resulting flags are
318 SECTION_WRITE | SECTION_RELRO, ie. writable only because of
319 relocations. */
320 if (((sect->common.flags ^ flags) & (SECTION_WRITE | SECTION_RELRO))
321 == (SECTION_WRITE | SECTION_RELRO)
322 && (sect->common.flags
323 & ~(SECTION_DECLARED | SECTION_WRITE | SECTION_RELRO))
324 == (flags & ~(SECTION_WRITE | SECTION_RELRO))
325 && ((sect->common.flags & SECTION_DECLARED) == 0
326 || (sect->common.flags & SECTION_WRITE)))
327 {
328 sect->common.flags |= (SECTION_WRITE | SECTION_RELRO);
329 return sect;
330 }
331 /* Sanity check user variables for flag changes. */
332 if (sect->named.decl != NULL
333 && DECL_P (sect->named.decl)
334 && decl != sect->named.decl)
335 {
336 if (decl != NULL && DECL_P (decl))
337 error ("%+qD causes a section type conflict with %qD",
338 decl, sect->named.decl);
339 else
340 error ("section type conflict with %qD", sect->named.decl);
341 inform (DECL_SOURCE_LOCATION (sect->named.decl),
342 "%qD was declared here", sect->named.decl);
343 }
344 else if (decl != NULL && DECL_P (decl))
345 error ("%+qD causes a section type conflict", decl);
346 else
347 error ("section type conflict");
348 /* Make sure we don't error about one section multiple times. */
349 sect->common.flags |= SECTION_OVERRIDE;
350 }
351 }
352 return sect;
353 }
354
355 /* Return true if the current compilation mode benefits from having
356 objects grouped into blocks. */
357
358 static bool
use_object_blocks_p(void)359 use_object_blocks_p (void)
360 {
361 return flag_section_anchors;
362 }
363
364 /* Return the object_block structure for section SECT. Create a new
365 structure if we haven't created one already. Return null if SECT
366 itself is null. */
367
368 static struct object_block *
get_block_for_section(section * sect)369 get_block_for_section (section *sect)
370 {
371 struct object_block *block;
372
373 if (sect == NULL)
374 return NULL;
375
376 object_block **slot
377 = object_block_htab->find_slot_with_hash (sect, hash_section (sect),
378 INSERT);
379 block = *slot;
380 if (block == NULL)
381 {
382 block = ggc_cleared_alloc<object_block> ();
383 block->sect = sect;
384 *slot = block;
385 }
386 return block;
387 }
388
389 /* Create a symbol with label LABEL and place it at byte offset
390 OFFSET in BLOCK. OFFSET can be negative if the symbol's offset
391 is not yet known. LABEL must be a garbage-collected string. */
392
393 static rtx
create_block_symbol(const char * label,struct object_block * block,HOST_WIDE_INT offset)394 create_block_symbol (const char *label, struct object_block *block,
395 HOST_WIDE_INT offset)
396 {
397 rtx symbol;
398 unsigned int size;
399
400 /* Create the extended SYMBOL_REF. */
401 size = RTX_HDR_SIZE + sizeof (struct block_symbol);
402 symbol = (rtx) ggc_internal_alloc (size);
403
404 /* Initialize the normal SYMBOL_REF fields. */
405 memset (symbol, 0, size);
406 PUT_CODE (symbol, SYMBOL_REF);
407 PUT_MODE (symbol, Pmode);
408 XSTR (symbol, 0) = label;
409 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_HAS_BLOCK_INFO;
410
411 /* Initialize the block_symbol stuff. */
412 SYMBOL_REF_BLOCK (symbol) = block;
413 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
414
415 return symbol;
416 }
417
418 /* Return a section with a particular name and with whatever SECTION_*
419 flags section_type_flags deems appropriate. The name of the section
420 is taken from NAME if nonnull, otherwise it is taken from DECL's
421 DECL_SECTION_NAME. DECL is the decl associated with the section
422 (see the section comment for details) and RELOC is as for
423 section_type_flags. */
424
425 section *
get_named_section(tree decl,const char * name,int reloc)426 get_named_section (tree decl, const char *name, int reloc)
427 {
428 unsigned int flags;
429
430 if (name == NULL)
431 {
432 gcc_assert (decl && DECL_P (decl) && DECL_SECTION_NAME (decl));
433 name = DECL_SECTION_NAME (decl);
434 }
435
436 flags = targetm.section_type_flags (decl, name, reloc);
437 return get_section (name, flags, decl);
438 }
439
440 /* Worker for resolve_unique_section. */
441
442 static bool
set_implicit_section(struct symtab_node * n,void * data ATTRIBUTE_UNUSED)443 set_implicit_section (struct symtab_node *n, void *data ATTRIBUTE_UNUSED)
444 {
445 n->implicit_section = true;
446 return false;
447 }
448
449 /* If required, set DECL_SECTION_NAME to a unique name. */
450
451 void
resolve_unique_section(tree decl,int reloc ATTRIBUTE_UNUSED,int flag_function_or_data_sections)452 resolve_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED,
453 int flag_function_or_data_sections)
454 {
455 if (DECL_SECTION_NAME (decl) == NULL
456 && targetm_common.have_named_sections
457 && (flag_function_or_data_sections
458 || DECL_COMDAT_GROUP (decl)))
459 {
460 targetm.asm_out.unique_section (decl, reloc);
461 if (DECL_SECTION_NAME (decl))
462 symtab_node::get (decl)->call_for_symbol_and_aliases
463 (set_implicit_section, NULL, true);
464 }
465 }
466
467 #ifdef BSS_SECTION_ASM_OP
468
469 #ifdef ASM_OUTPUT_ALIGNED_BSS
470
471 /* Utility function for targets to use in implementing
472 ASM_OUTPUT_ALIGNED_BSS.
473 ??? It is believed that this function will work in most cases so such
474 support is localized here. */
475
476 static void
asm_output_aligned_bss(FILE * file,tree decl ATTRIBUTE_UNUSED,const char * name,unsigned HOST_WIDE_INT size,int align)477 asm_output_aligned_bss (FILE *file, tree decl ATTRIBUTE_UNUSED,
478 const char *name, unsigned HOST_WIDE_INT size,
479 int align)
480 {
481 switch_to_section (bss_section);
482 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
483 #ifdef ASM_DECLARE_OBJECT_NAME
484 last_assemble_variable_decl = decl;
485 ASM_DECLARE_OBJECT_NAME (file, name, decl);
486 #else
487 /* Standard thing is just output label for the object. */
488 ASM_OUTPUT_LABEL (file, name);
489 #endif /* ASM_DECLARE_OBJECT_NAME */
490 ASM_OUTPUT_SKIP (file, size ? size : 1);
491 }
492
493 #endif
494
495 #endif /* BSS_SECTION_ASM_OP */
496
497 #ifndef USE_SELECT_SECTION_FOR_FUNCTIONS
498 /* Return the hot section for function DECL. Return text_section for
499 null DECLs. */
500
501 static section *
hot_function_section(tree decl)502 hot_function_section (tree decl)
503 {
504 if (decl != NULL_TREE
505 && DECL_SECTION_NAME (decl) != NULL
506 && targetm_common.have_named_sections)
507 return get_named_section (decl, NULL, 0);
508 else
509 return text_section;
510 }
511 #endif
512
513 /* Return section for TEXT_SECTION_NAME if DECL or DECL_SECTION_NAME (DECL)
514 is NULL.
515
516 When DECL_SECTION_NAME is non-NULL and it is implicit section and
517 NAMED_SECTION_SUFFIX is non-NULL, then produce section called
518 concatenate the name with NAMED_SECTION_SUFFIX.
519 Otherwise produce "TEXT_SECTION_NAME.IMPLICIT_NAME". */
520
521 section *
get_named_text_section(tree decl,const char * text_section_name,const char * named_section_suffix)522 get_named_text_section (tree decl,
523 const char *text_section_name,
524 const char *named_section_suffix)
525 {
526 if (decl && DECL_SECTION_NAME (decl))
527 {
528 if (named_section_suffix)
529 {
530 const char *dsn = DECL_SECTION_NAME (decl);
531 const char *stripped_name;
532 char *name, *buffer;
533
534 name = (char *) alloca (strlen (dsn) + 1);
535 memcpy (name, dsn,
536 strlen (dsn) + 1);
537
538 stripped_name = targetm.strip_name_encoding (name);
539
540 buffer = ACONCAT ((stripped_name, named_section_suffix, NULL));
541 return get_named_section (decl, buffer, 0);
542 }
543 else if (symtab_node::get (decl)->implicit_section)
544 {
545 const char *name;
546
547 /* Do not try to split gnu_linkonce functions. This gets somewhat
548 slipperly. */
549 if (DECL_COMDAT_GROUP (decl) && !HAVE_COMDAT_GROUP)
550 return NULL;
551 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
552 name = targetm.strip_name_encoding (name);
553 return get_named_section (decl, ACONCAT ((text_section_name, ".",
554 name, NULL)), 0);
555 }
556 else
557 return NULL;
558 }
559 return get_named_section (decl, text_section_name, 0);
560 }
561
562 /* Choose named function section based on its frequency. */
563
564 section *
default_function_section(tree decl,enum node_frequency freq,bool startup,bool exit)565 default_function_section (tree decl, enum node_frequency freq,
566 bool startup, bool exit)
567 {
568 #if defined HAVE_LD_EH_GC_SECTIONS && defined HAVE_LD_EH_GC_SECTIONS_BUG
569 /* Old GNU linkers have buggy --gc-section support, which sometimes
570 results in .gcc_except_table* sections being garbage collected. */
571 if (decl
572 && symtab_node::get (decl)->implicit_section)
573 return NULL;
574 #endif
575
576 if (!flag_reorder_functions
577 || !targetm_common.have_named_sections)
578 return NULL;
579 /* Startup code should go to startup subsection unless it is
580 unlikely executed (this happens especially with function splitting
581 where we can split away unnecessary parts of static constructors. */
582 if (startup && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED)
583 {
584 /* If we do have a profile or(and) LTO phase is executed, we do not need
585 these ELF section. */
586 if (!in_lto_p || !flag_profile_values)
587 return get_named_text_section (decl, ".text.startup", NULL);
588 else
589 return NULL;
590 }
591
592 /* Similarly for exit. */
593 if (exit && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED)
594 return get_named_text_section (decl, ".text.exit", NULL);
595
596 /* Group cold functions together, similarly for hot code. */
597 switch (freq)
598 {
599 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
600 return get_named_text_section (decl, ".text.unlikely", NULL);
601 case NODE_FREQUENCY_HOT:
602 /* If we do have a profile or(and) LTO phase is executed, we do not need
603 these ELF section. */
604 if (!in_lto_p || !flag_profile_values)
605 return get_named_text_section (decl, ".text.hot", NULL);
606 /* FALLTHRU */
607 default:
608 return NULL;
609 }
610 }
611
612 /* Return the section for function DECL.
613
614 If DECL is NULL_TREE, return the text section. We can be passed
615 NULL_TREE under some circumstances by dbxout.c at least.
616
617 If FORCE_COLD is true, return cold function section ignoring
618 the frequency info of cgraph_node. */
619
620 static section *
function_section_1(tree decl,bool force_cold)621 function_section_1 (tree decl, bool force_cold)
622 {
623 section *section = NULL;
624 enum node_frequency freq = NODE_FREQUENCY_NORMAL;
625 bool startup = false, exit = false;
626
627 if (decl)
628 {
629 struct cgraph_node *node = cgraph_node::get (decl);
630
631 if (node)
632 {
633 freq = node->frequency;
634 startup = node->only_called_at_startup;
635 exit = node->only_called_at_exit;
636 }
637 }
638 if (force_cold)
639 freq = NODE_FREQUENCY_UNLIKELY_EXECUTED;
640
641 #ifdef USE_SELECT_SECTION_FOR_FUNCTIONS
642 if (decl != NULL_TREE
643 && DECL_SECTION_NAME (decl) != NULL)
644 {
645 if (targetm.asm_out.function_section)
646 section = targetm.asm_out.function_section (decl, freq,
647 startup, exit);
648 if (section)
649 return section;
650 return get_named_section (decl, NULL, 0);
651 }
652 else
653 return targetm.asm_out.select_section
654 (decl, freq == NODE_FREQUENCY_UNLIKELY_EXECUTED,
655 symtab_node::get (decl)->definition_alignment ());
656 #else
657 if (targetm.asm_out.function_section)
658 section = targetm.asm_out.function_section (decl, freq, startup, exit);
659 if (section)
660 return section;
661 return hot_function_section (decl);
662 #endif
663 }
664
665 /* Return the section for function DECL.
666
667 If DECL is NULL_TREE, return the text section. We can be passed
668 NULL_TREE under some circumstances by dbxout.c at least. */
669
670 section *
function_section(tree decl)671 function_section (tree decl)
672 {
673 /* Handle cases where function splitting code decides
674 to put function entry point into unlikely executed section
675 despite the fact that the function itself is not cold
676 (i.e. it is called rarely but contains a hot loop that is
677 better to live in hot subsection for the code locality). */
678 return function_section_1 (decl,
679 first_function_block_is_cold);
680 }
681
682 /* Return the section for the current function, take IN_COLD_SECTION_P
683 into account. */
684
685 section *
current_function_section(void)686 current_function_section (void)
687 {
688 return function_section_1 (current_function_decl, in_cold_section_p);
689 }
690
691 /* Tell assembler to switch to unlikely-to-be-executed text section. */
692
693 section *
unlikely_text_section(void)694 unlikely_text_section (void)
695 {
696 return function_section_1 (current_function_decl, true);
697 }
698
699 /* When called within a function context, return true if the function
700 has been assigned a cold text section and if SECT is that section.
701 When called outside a function context, return true if SECT is the
702 default cold section. */
703
704 bool
unlikely_text_section_p(section * sect)705 unlikely_text_section_p (section *sect)
706 {
707 return sect == function_section_1 (current_function_decl, true);
708 }
709
710 /* Switch to the other function partition (if inside of hot section
711 into cold section, otherwise into the hot section). */
712
713 void
switch_to_other_text_partition(void)714 switch_to_other_text_partition (void)
715 {
716 in_cold_section_p = !in_cold_section_p;
717 switch_to_section (current_function_section ());
718 }
719
720 /* Return the read-only data section associated with function DECL. */
721
722 section *
default_function_rodata_section(tree decl)723 default_function_rodata_section (tree decl)
724 {
725 if (decl != NULL_TREE && DECL_SECTION_NAME (decl))
726 {
727 const char *name = DECL_SECTION_NAME (decl);
728
729 if (DECL_COMDAT_GROUP (decl) && HAVE_COMDAT_GROUP)
730 {
731 const char *dot;
732 size_t len;
733 char* rname;
734
735 dot = strchr (name + 1, '.');
736 if (!dot)
737 dot = name;
738 len = strlen (dot) + 8;
739 rname = (char *) alloca (len);
740
741 strcpy (rname, ".rodata");
742 strcat (rname, dot);
743 return get_section (rname, SECTION_LINKONCE, decl);
744 }
745 /* For .gnu.linkonce.t.foo we want to use .gnu.linkonce.r.foo. */
746 else if (DECL_COMDAT_GROUP (decl)
747 && strncmp (name, ".gnu.linkonce.t.", 16) == 0)
748 {
749 size_t len = strlen (name) + 1;
750 char *rname = (char *) alloca (len);
751
752 memcpy (rname, name, len);
753 rname[14] = 'r';
754 return get_section (rname, SECTION_LINKONCE, decl);
755 }
756 /* For .text.foo we want to use .rodata.foo. */
757 else if (flag_function_sections && flag_data_sections
758 && strncmp (name, ".text.", 6) == 0)
759 {
760 size_t len = strlen (name) + 1;
761 char *rname = (char *) alloca (len + 2);
762
763 memcpy (rname, ".rodata", 7);
764 memcpy (rname + 7, name + 5, len - 5);
765 return get_section (rname, 0, decl);
766 }
767 }
768
769 return readonly_data_section;
770 }
771
772 /* Return the read-only data section associated with function DECL
773 for targets where that section should be always the single
774 readonly data section. */
775
776 section *
default_no_function_rodata_section(tree decl ATTRIBUTE_UNUSED)777 default_no_function_rodata_section (tree decl ATTRIBUTE_UNUSED)
778 {
779 return readonly_data_section;
780 }
781
782 /* A subroutine of mergeable_string_section and mergeable_constant_section. */
783
784 static const char *
function_mergeable_rodata_prefix(void)785 function_mergeable_rodata_prefix (void)
786 {
787 section *s = targetm.asm_out.function_rodata_section (current_function_decl);
788 if (SECTION_STYLE (s) == SECTION_NAMED)
789 return s->named.name;
790 else
791 return targetm.asm_out.mergeable_rodata_prefix;
792 }
793
794 /* Return the section to use for string merging. */
795
796 static section *
mergeable_string_section(tree decl ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,unsigned int flags ATTRIBUTE_UNUSED)797 mergeable_string_section (tree decl ATTRIBUTE_UNUSED,
798 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
799 unsigned int flags ATTRIBUTE_UNUSED)
800 {
801 HOST_WIDE_INT len;
802
803 if (HAVE_GAS_SHF_MERGE && flag_merge_constants
804 && TREE_CODE (decl) == STRING_CST
805 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
806 && align <= 256
807 && (len = int_size_in_bytes (TREE_TYPE (decl))) > 0
808 && TREE_STRING_LENGTH (decl) >= len)
809 {
810 scalar_int_mode mode;
811 unsigned int modesize;
812 const char *str;
813 HOST_WIDE_INT i;
814 int j, unit;
815 const char *prefix = function_mergeable_rodata_prefix ();
816 char *name = (char *) alloca (strlen (prefix) + 30);
817
818 mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (TREE_TYPE (decl)));
819 modesize = GET_MODE_BITSIZE (mode);
820 if (modesize >= 8 && modesize <= 256
821 && (modesize & (modesize - 1)) == 0)
822 {
823 if (align < modesize)
824 align = modesize;
825
826 str = TREE_STRING_POINTER (decl);
827 unit = GET_MODE_SIZE (mode);
828
829 /* Check for embedded NUL characters. */
830 for (i = 0; i < len; i += unit)
831 {
832 for (j = 0; j < unit; j++)
833 if (str[i + j] != '\0')
834 break;
835 if (j == unit)
836 break;
837 }
838 if (i == len - unit)
839 {
840 sprintf (name, "%s.str%d.%d", prefix,
841 modesize / 8, (int) (align / 8));
842 flags |= (modesize / 8) | SECTION_MERGE | SECTION_STRINGS;
843 return get_section (name, flags, NULL);
844 }
845 }
846 }
847
848 return readonly_data_section;
849 }
850
851 /* Return the section to use for constant merging. */
852
853 section *
mergeable_constant_section(machine_mode mode ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,unsigned int flags ATTRIBUTE_UNUSED)854 mergeable_constant_section (machine_mode mode ATTRIBUTE_UNUSED,
855 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
856 unsigned int flags ATTRIBUTE_UNUSED)
857 {
858 if (HAVE_GAS_SHF_MERGE && flag_merge_constants
859 && mode != VOIDmode
860 && mode != BLKmode
861 && known_le (GET_MODE_BITSIZE (mode), align)
862 && align >= 8
863 && align <= 256
864 && (align & (align - 1)) == 0)
865 {
866 const char *prefix = function_mergeable_rodata_prefix ();
867 char *name = (char *) alloca (strlen (prefix) + 30);
868
869 sprintf (name, "%s.cst%d", prefix, (int) (align / 8));
870 flags |= (align / 8) | SECTION_MERGE;
871 return get_section (name, flags, NULL);
872 }
873 return readonly_data_section;
874 }
875
876 /* Given NAME, a putative register name, discard any customary prefixes. */
877
878 static const char *
strip_reg_name(const char * name)879 strip_reg_name (const char *name)
880 {
881 #ifdef REGISTER_PREFIX
882 if (!strncmp (name, REGISTER_PREFIX, strlen (REGISTER_PREFIX)))
883 name += strlen (REGISTER_PREFIX);
884 #endif
885 if (name[0] == '%' || name[0] == '#')
886 name++;
887 return name;
888 }
889
890 /* The user has asked for a DECL to have a particular name. Set (or
891 change) it in such a way that we don't prefix an underscore to
892 it. */
893 void
set_user_assembler_name(tree decl,const char * name)894 set_user_assembler_name (tree decl, const char *name)
895 {
896 char *starred = (char *) alloca (strlen (name) + 2);
897 starred[0] = '*';
898 strcpy (starred + 1, name);
899 symtab->change_decl_assembler_name (decl, get_identifier (starred));
900 SET_DECL_RTL (decl, NULL_RTX);
901 }
902
903 /* Decode an `asm' spec for a declaration as a register name.
904 Return the register number, or -1 if nothing specified,
905 or -2 if the ASMSPEC is not `cc' or `memory' and is not recognized,
906 or -3 if ASMSPEC is `cc' and is not recognized,
907 or -4 if ASMSPEC is `memory' and is not recognized.
908 Accept an exact spelling or a decimal number.
909 Prefixes such as % are optional. */
910
911 int
decode_reg_name_and_count(const char * asmspec,int * pnregs)912 decode_reg_name_and_count (const char *asmspec, int *pnregs)
913 {
914 /* Presume just one register is clobbered. */
915 *pnregs = 1;
916
917 if (asmspec != 0)
918 {
919 int i;
920
921 /* Get rid of confusing prefixes. */
922 asmspec = strip_reg_name (asmspec);
923
924 /* Allow a decimal number as a "register name". */
925 for (i = strlen (asmspec) - 1; i >= 0; i--)
926 if (! ISDIGIT (asmspec[i]))
927 break;
928 if (asmspec[0] != 0 && i < 0)
929 {
930 i = atoi (asmspec);
931 if (i < FIRST_PSEUDO_REGISTER && i >= 0 && reg_names[i][0])
932 return i;
933 else
934 return -2;
935 }
936
937 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
938 if (reg_names[i][0]
939 && ! strcmp (asmspec, strip_reg_name (reg_names[i])))
940 return i;
941
942 #ifdef OVERLAPPING_REGISTER_NAMES
943 {
944 static const struct
945 {
946 const char *const name;
947 const int number;
948 const int nregs;
949 } table[] = OVERLAPPING_REGISTER_NAMES;
950
951 for (i = 0; i < (int) ARRAY_SIZE (table); i++)
952 if (table[i].name[0]
953 && ! strcmp (asmspec, table[i].name))
954 {
955 *pnregs = table[i].nregs;
956 return table[i].number;
957 }
958 }
959 #endif /* OVERLAPPING_REGISTER_NAMES */
960
961 #ifdef ADDITIONAL_REGISTER_NAMES
962 {
963 static const struct { const char *const name; const int number; } table[]
964 = ADDITIONAL_REGISTER_NAMES;
965
966 for (i = 0; i < (int) ARRAY_SIZE (table); i++)
967 if (table[i].name[0]
968 && ! strcmp (asmspec, table[i].name)
969 && reg_names[table[i].number][0])
970 return table[i].number;
971 }
972 #endif /* ADDITIONAL_REGISTER_NAMES */
973
974 if (!strcmp (asmspec, "memory"))
975 return -4;
976
977 if (!strcmp (asmspec, "cc"))
978 return -3;
979
980 return -2;
981 }
982
983 return -1;
984 }
985
986 int
decode_reg_name(const char * name)987 decode_reg_name (const char *name)
988 {
989 int count;
990 return decode_reg_name_and_count (name, &count);
991 }
992
993
994 /* Return true if DECL's initializer is suitable for a BSS section. */
995
996 bool
bss_initializer_p(const_tree decl,bool named)997 bss_initializer_p (const_tree decl, bool named)
998 {
999 /* Do not put non-common constants into the .bss section, they belong in
1000 a readonly section, except when NAMED is true. */
1001 return ((!TREE_READONLY (decl) || DECL_COMMON (decl) || named)
1002 && (DECL_INITIAL (decl) == NULL
1003 /* In LTO we have no errors in program; error_mark_node is used
1004 to mark offlined constructors. */
1005 || (DECL_INITIAL (decl) == error_mark_node
1006 && !in_lto_p)
1007 || (flag_zero_initialized_in_bss
1008 && initializer_zerop (DECL_INITIAL (decl)))));
1009 }
1010
1011 /* Compute the alignment of variable specified by DECL.
1012 DONT_OUTPUT_DATA is from assemble_variable. */
1013
1014 void
align_variable(tree decl,bool dont_output_data)1015 align_variable (tree decl, bool dont_output_data)
1016 {
1017 unsigned int align = DECL_ALIGN (decl);
1018
1019 /* In the case for initialing an array whose length isn't specified,
1020 where we have not yet been able to do the layout,
1021 figure out the proper alignment now. */
1022 if (dont_output_data && DECL_SIZE (decl) == 0
1023 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1024 align = MAX (align, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1025
1026 /* Some object file formats have a maximum alignment which they support.
1027 In particular, a.out format supports a maximum alignment of 4. */
1028 if (align > MAX_OFILE_ALIGNMENT)
1029 {
1030 error ("alignment of %q+D is greater than maximum object "
1031 "file alignment %d", decl,
1032 MAX_OFILE_ALIGNMENT/BITS_PER_UNIT);
1033 align = MAX_OFILE_ALIGNMENT;
1034 }
1035
1036 if (! DECL_USER_ALIGN (decl))
1037 {
1038 #ifdef DATA_ABI_ALIGNMENT
1039 unsigned int data_abi_align
1040 = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align);
1041 /* For backwards compatibility, don't assume the ABI alignment for
1042 TLS variables. */
1043 if (! DECL_THREAD_LOCAL_P (decl) || data_abi_align <= BITS_PER_WORD)
1044 align = data_abi_align;
1045 #endif
1046
1047 /* On some machines, it is good to increase alignment sometimes.
1048 But as DECL_ALIGN is used both for actually emitting the variable
1049 and for code accessing the variable as guaranteed alignment, we
1050 can only increase the alignment if it is a performance optimization
1051 if the references to it must bind to the current definition. */
1052 if (decl_binds_to_current_def_p (decl)
1053 && !DECL_VIRTUAL_P (decl))
1054 {
1055 #ifdef DATA_ALIGNMENT
1056 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
1057 /* Don't increase alignment too much for TLS variables - TLS space
1058 is too precious. */
1059 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD)
1060 align = data_align;
1061 #endif
1062 if (DECL_INITIAL (decl) != 0
1063 /* In LTO we have no errors in program; error_mark_node is used
1064 to mark offlined constructors. */
1065 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
1066 {
1067 unsigned int const_align
1068 = targetm.constant_alignment (DECL_INITIAL (decl), align);
1069 /* Don't increase alignment too much for TLS variables - TLS
1070 space is too precious. */
1071 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
1072 align = const_align;
1073 }
1074 }
1075 }
1076
1077 /* Reset the alignment in case we have made it tighter, so we can benefit
1078 from it in get_pointer_alignment. */
1079 SET_DECL_ALIGN (decl, align);
1080 }
1081
1082 /* Return DECL_ALIGN (decl), possibly increased for optimization purposes
1083 beyond what align_variable returned. */
1084
1085 static unsigned int
get_variable_align(tree decl)1086 get_variable_align (tree decl)
1087 {
1088 unsigned int align = DECL_ALIGN (decl);
1089
1090 /* For user aligned vars or static vars align_variable already did
1091 everything. */
1092 if (DECL_USER_ALIGN (decl) || !TREE_PUBLIC (decl))
1093 return align;
1094
1095 #ifdef DATA_ABI_ALIGNMENT
1096 if (DECL_THREAD_LOCAL_P (decl))
1097 align = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align);
1098 #endif
1099
1100 /* For decls that bind to the current definition, align_variable
1101 did also everything, except for not assuming ABI required alignment
1102 of TLS variables. For other vars, increase the alignment here
1103 as an optimization. */
1104 if (!decl_binds_to_current_def_p (decl))
1105 {
1106 /* On some machines, it is good to increase alignment sometimes. */
1107 #ifdef DATA_ALIGNMENT
1108 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
1109 /* Don't increase alignment too much for TLS variables - TLS space
1110 is too precious. */
1111 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD)
1112 align = data_align;
1113 #endif
1114 if (DECL_INITIAL (decl) != 0
1115 /* In LTO we have no errors in program; error_mark_node is used
1116 to mark offlined constructors. */
1117 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
1118 {
1119 unsigned int const_align
1120 = targetm.constant_alignment (DECL_INITIAL (decl), align);
1121 /* Don't increase alignment too much for TLS variables - TLS space
1122 is too precious. */
1123 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
1124 align = const_align;
1125 }
1126 }
1127
1128 return align;
1129 }
1130
1131 /* Return the section into which the given VAR_DECL or CONST_DECL
1132 should be placed. PREFER_NOSWITCH_P is true if a noswitch
1133 section should be used wherever possible. */
1134
1135 section *
get_variable_section(tree decl,bool prefer_noswitch_p)1136 get_variable_section (tree decl, bool prefer_noswitch_p)
1137 {
1138 addr_space_t as = ADDR_SPACE_GENERIC;
1139 int reloc;
1140 varpool_node *vnode = varpool_node::get (decl);
1141 if (vnode)
1142 {
1143 vnode = vnode->ultimate_alias_target ();
1144 decl = vnode->decl;
1145 }
1146
1147 if (TREE_TYPE (decl) != error_mark_node)
1148 as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
1149
1150 /* We need the constructor to figure out reloc flag. */
1151 if (vnode)
1152 vnode->get_constructor ();
1153
1154 if (DECL_COMMON (decl))
1155 {
1156 /* If the decl has been given an explicit section name, or it resides
1157 in a non-generic address space, then it isn't common, and shouldn't
1158 be handled as such. */
1159 gcc_assert (DECL_SECTION_NAME (decl) == NULL
1160 && ADDR_SPACE_GENERIC_P (as));
1161 if (DECL_THREAD_LOCAL_P (decl))
1162 return tls_comm_section;
1163 else if (TREE_PUBLIC (decl) && bss_initializer_p (decl))
1164 return comm_section;
1165 }
1166
1167 if (DECL_INITIAL (decl) == error_mark_node)
1168 reloc = contains_pointers_p (TREE_TYPE (decl)) ? 3 : 0;
1169 else if (DECL_INITIAL (decl))
1170 reloc = compute_reloc_for_constant (DECL_INITIAL (decl));
1171 else
1172 reloc = 0;
1173
1174 resolve_unique_section (decl, reloc, flag_data_sections);
1175 if (IN_NAMED_SECTION (decl))
1176 {
1177 section *sect = get_named_section (decl, NULL, reloc);
1178
1179 if ((sect->common.flags & SECTION_BSS)
1180 && !bss_initializer_p (decl, true))
1181 {
1182 error_at (DECL_SOURCE_LOCATION (decl),
1183 "only zero initializers are allowed in section %qs",
1184 sect->named.name);
1185 DECL_INITIAL (decl) = error_mark_node;
1186 }
1187 return sect;
1188 }
1189
1190 if (ADDR_SPACE_GENERIC_P (as)
1191 && !DECL_THREAD_LOCAL_P (decl)
1192 && !(prefer_noswitch_p && targetm.have_switchable_bss_sections)
1193 && bss_initializer_p (decl))
1194 {
1195 if (!TREE_PUBLIC (decl)
1196 && !((flag_sanitize & SANITIZE_ADDRESS)
1197 && asan_protect_global (decl)))
1198 return lcomm_section;
1199 if (bss_noswitch_section)
1200 return bss_noswitch_section;
1201 }
1202
1203 return targetm.asm_out.select_section (decl, reloc,
1204 get_variable_align (decl));
1205 }
1206
1207 /* Return the block into which object_block DECL should be placed. */
1208
1209 static struct object_block *
get_block_for_decl(tree decl)1210 get_block_for_decl (tree decl)
1211 {
1212 section *sect;
1213
1214 if (VAR_P (decl))
1215 {
1216 /* The object must be defined in this translation unit. */
1217 if (DECL_EXTERNAL (decl))
1218 return NULL;
1219
1220 /* There's no point using object blocks for something that is
1221 isolated by definition. */
1222 if (DECL_COMDAT_GROUP (decl))
1223 return NULL;
1224 }
1225
1226 /* We can only calculate block offsets if the decl has a known
1227 constant size. */
1228 if (DECL_SIZE_UNIT (decl) == NULL)
1229 return NULL;
1230 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (decl)))
1231 return NULL;
1232
1233 /* Find out which section should contain DECL. We cannot put it into
1234 an object block if it requires a standalone definition. */
1235 if (VAR_P (decl))
1236 align_variable (decl, 0);
1237 sect = get_variable_section (decl, true);
1238 if (SECTION_STYLE (sect) == SECTION_NOSWITCH)
1239 return NULL;
1240
1241 return get_block_for_section (sect);
1242 }
1243
1244 /* Make sure block symbol SYMBOL is in block BLOCK. */
1245
1246 static void
change_symbol_block(rtx symbol,struct object_block * block)1247 change_symbol_block (rtx symbol, struct object_block *block)
1248 {
1249 if (block != SYMBOL_REF_BLOCK (symbol))
1250 {
1251 gcc_assert (SYMBOL_REF_BLOCK_OFFSET (symbol) < 0);
1252 SYMBOL_REF_BLOCK (symbol) = block;
1253 }
1254 }
1255
1256 /* Return true if it is possible to put DECL in an object_block. */
1257
1258 static bool
use_blocks_for_decl_p(tree decl)1259 use_blocks_for_decl_p (tree decl)
1260 {
1261 struct symtab_node *snode;
1262
1263 /* Only data DECLs can be placed into object blocks. */
1264 if (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
1265 return false;
1266
1267 /* DECL_INITIAL (decl) set to decl is a hack used for some decls that
1268 are never used from code directly and we never want object block handling
1269 for those. */
1270 if (DECL_INITIAL (decl) == decl)
1271 return false;
1272
1273 /* If this decl is an alias, then we don't want to emit a
1274 definition. */
1275 if (VAR_P (decl)
1276 && (snode = symtab_node::get (decl)) != NULL
1277 && snode->alias)
1278 return false;
1279
1280 return targetm.use_blocks_for_decl_p (decl);
1281 }
1282
1283 /* Follow the IDENTIFIER_TRANSPARENT_ALIAS chain starting at *ALIAS
1284 until we find an identifier that is not itself a transparent alias.
1285 Modify the alias passed to it by reference (and all aliases on the
1286 way to the ultimate target), such that they do not have to be
1287 followed again, and return the ultimate target of the alias
1288 chain. */
1289
1290 static inline tree
ultimate_transparent_alias_target(tree * alias)1291 ultimate_transparent_alias_target (tree *alias)
1292 {
1293 tree target = *alias;
1294
1295 if (IDENTIFIER_TRANSPARENT_ALIAS (target))
1296 {
1297 gcc_assert (TREE_CHAIN (target));
1298 target = ultimate_transparent_alias_target (&TREE_CHAIN (target));
1299 gcc_assert (! IDENTIFIER_TRANSPARENT_ALIAS (target)
1300 && ! TREE_CHAIN (target));
1301 *alias = target;
1302 }
1303
1304 return target;
1305 }
1306
1307 /* Create the DECL_RTL for a VAR_DECL or FUNCTION_DECL. DECL should
1308 have static storage duration. In other words, it should not be an
1309 automatic variable, including PARM_DECLs.
1310
1311 There is, however, one exception: this function handles variables
1312 explicitly placed in a particular register by the user.
1313
1314 This is never called for PARM_DECL nodes. */
1315
1316 void
make_decl_rtl(tree decl)1317 make_decl_rtl (tree decl)
1318 {
1319 const char *name = 0;
1320 int reg_number;
1321 tree id;
1322 rtx x;
1323
1324 /* Check that we are not being given an automatic variable. */
1325 gcc_assert (TREE_CODE (decl) != PARM_DECL
1326 && TREE_CODE (decl) != RESULT_DECL);
1327
1328 /* A weak alias has TREE_PUBLIC set but not the other bits. */
1329 gcc_assert (!VAR_P (decl)
1330 || TREE_STATIC (decl)
1331 || TREE_PUBLIC (decl)
1332 || DECL_EXTERNAL (decl)
1333 || DECL_REGISTER (decl));
1334
1335 /* And that we were not given a type or a label. */
1336 gcc_assert (TREE_CODE (decl) != TYPE_DECL
1337 && TREE_CODE (decl) != LABEL_DECL);
1338
1339 /* For a duplicate declaration, we can be called twice on the
1340 same DECL node. Don't discard the RTL already made. */
1341 if (DECL_RTL_SET_P (decl))
1342 {
1343 /* If the old RTL had the wrong mode, fix the mode. */
1344 x = DECL_RTL (decl);
1345 if (GET_MODE (x) != DECL_MODE (decl))
1346 SET_DECL_RTL (decl, adjust_address_nv (x, DECL_MODE (decl), 0));
1347
1348 if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
1349 return;
1350
1351 /* ??? Another way to do this would be to maintain a hashed
1352 table of such critters. Instead of adding stuff to a DECL
1353 to give certain attributes to it, we could use an external
1354 hash map from DECL to set of attributes. */
1355
1356 /* Let the target reassign the RTL if it wants.
1357 This is necessary, for example, when one machine specific
1358 decl attribute overrides another. */
1359 targetm.encode_section_info (decl, DECL_RTL (decl), false);
1360
1361 /* If the symbol has a SYMBOL_REF_BLOCK field, update it based
1362 on the new decl information. */
1363 if (MEM_P (x)
1364 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
1365 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (x, 0)))
1366 change_symbol_block (XEXP (x, 0), get_block_for_decl (decl));
1367
1368 return;
1369 }
1370
1371 /* If this variable belongs to the global constant pool, retrieve the
1372 pre-computed RTL or recompute it in LTO mode. */
1373 if (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
1374 {
1375 SET_DECL_RTL (decl, output_constant_def (DECL_INITIAL (decl), 1));
1376 return;
1377 }
1378
1379 id = DECL_ASSEMBLER_NAME (decl);
1380 if (TREE_CODE (decl) == FUNCTION_DECL
1381 && cgraph_node::get (decl)
1382 && cgraph_node::get (decl)->instrumentation_clone)
1383 ultimate_transparent_alias_target (&id);
1384 name = IDENTIFIER_POINTER (id);
1385
1386 if (name[0] != '*' && TREE_CODE (decl) != FUNCTION_DECL
1387 && DECL_REGISTER (decl))
1388 {
1389 error ("register name not specified for %q+D", decl);
1390 }
1391 else if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
1392 {
1393 const char *asmspec = name+1;
1394 machine_mode mode = DECL_MODE (decl);
1395 reg_number = decode_reg_name (asmspec);
1396 /* First detect errors in declaring global registers. */
1397 if (reg_number == -1)
1398 error ("register name not specified for %q+D", decl);
1399 else if (reg_number < 0)
1400 error ("invalid register name for %q+D", decl);
1401 else if (mode == BLKmode)
1402 error ("data type of %q+D isn%'t suitable for a register",
1403 decl);
1404 else if (!in_hard_reg_set_p (accessible_reg_set, mode, reg_number))
1405 error ("the register specified for %q+D cannot be accessed"
1406 " by the current target", decl);
1407 else if (!in_hard_reg_set_p (operand_reg_set, mode, reg_number))
1408 error ("the register specified for %q+D is not general enough"
1409 " to be used as a register variable", decl);
1410 else if (!targetm.hard_regno_mode_ok (reg_number, mode))
1411 error ("register specified for %q+D isn%'t suitable for data type",
1412 decl);
1413 /* Now handle properly declared static register variables. */
1414 else
1415 {
1416 int nregs;
1417
1418 if (DECL_INITIAL (decl) != 0 && TREE_STATIC (decl))
1419 {
1420 DECL_INITIAL (decl) = 0;
1421 error ("global register variable has initial value");
1422 }
1423 if (TREE_THIS_VOLATILE (decl))
1424 warning (OPT_Wvolatile_register_var,
1425 "optimization may eliminate reads and/or "
1426 "writes to register variables");
1427
1428 /* If the user specified one of the eliminables registers here,
1429 e.g., FRAME_POINTER_REGNUM, we don't want to get this variable
1430 confused with that register and be eliminated. This usage is
1431 somewhat suspect... */
1432
1433 SET_DECL_RTL (decl, gen_raw_REG (mode, reg_number));
1434 ORIGINAL_REGNO (DECL_RTL (decl)) = reg_number;
1435 REG_USERVAR_P (DECL_RTL (decl)) = 1;
1436
1437 if (TREE_STATIC (decl))
1438 {
1439 /* Make this register global, so not usable for anything
1440 else. */
1441 #ifdef ASM_DECLARE_REGISTER_GLOBAL
1442 name = IDENTIFIER_POINTER (DECL_NAME (decl));
1443 ASM_DECLARE_REGISTER_GLOBAL (asm_out_file, decl, reg_number, name);
1444 #endif
1445 nregs = hard_regno_nregs (reg_number, mode);
1446 while (nregs > 0)
1447 globalize_reg (decl, reg_number + --nregs);
1448 }
1449
1450 /* As a register variable, it has no section. */
1451 return;
1452 }
1453 /* Avoid internal errors from invalid register
1454 specifications. */
1455 SET_DECL_ASSEMBLER_NAME (decl, NULL_TREE);
1456 DECL_HARD_REGISTER (decl) = 0;
1457 /* Also avoid SSA inconsistencies by pretending this is an external
1458 decl now. */
1459 DECL_EXTERNAL (decl) = 1;
1460 return;
1461 }
1462 /* Now handle ordinary static variables and functions (in memory).
1463 Also handle vars declared register invalidly. */
1464 else if (name[0] == '*')
1465 {
1466 #ifdef REGISTER_PREFIX
1467 if (strlen (REGISTER_PREFIX) != 0)
1468 {
1469 reg_number = decode_reg_name (name);
1470 if (reg_number >= 0 || reg_number == -3)
1471 error ("register name given for non-register variable %q+D", decl);
1472 }
1473 #endif
1474 }
1475
1476 /* Specifying a section attribute on a variable forces it into a
1477 non-.bss section, and thus it cannot be common. */
1478 /* FIXME: In general this code should not be necessary because
1479 visibility pass is doing the same work. But notice_global_symbol
1480 is called early and it needs to make DECL_RTL to get the name.
1481 we take care of recomputing the DECL_RTL after visibility is changed. */
1482 if (VAR_P (decl)
1483 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1484 && DECL_SECTION_NAME (decl) != NULL
1485 && DECL_INITIAL (decl) == NULL_TREE
1486 && DECL_COMMON (decl))
1487 DECL_COMMON (decl) = 0;
1488
1489 /* Variables can't be both common and weak. */
1490 if (VAR_P (decl) && DECL_WEAK (decl))
1491 DECL_COMMON (decl) = 0;
1492
1493 if (use_object_blocks_p () && use_blocks_for_decl_p (decl))
1494 x = create_block_symbol (name, get_block_for_decl (decl), -1);
1495 else
1496 {
1497 machine_mode address_mode = Pmode;
1498 if (TREE_TYPE (decl) != error_mark_node)
1499 {
1500 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
1501 address_mode = targetm.addr_space.address_mode (as);
1502 }
1503 x = gen_rtx_SYMBOL_REF (address_mode, name);
1504 }
1505 SYMBOL_REF_WEAK (x) = DECL_WEAK (decl);
1506 SET_SYMBOL_REF_DECL (x, decl);
1507
1508 x = gen_rtx_MEM (DECL_MODE (decl), x);
1509 if (TREE_CODE (decl) != FUNCTION_DECL)
1510 set_mem_attributes (x, decl, 1);
1511 SET_DECL_RTL (decl, x);
1512
1513 /* Optionally set flags or add text to the name to record information
1514 such as that it is a function name.
1515 If the name is changed, the macro ASM_OUTPUT_LABELREF
1516 will have to know how to strip this information. */
1517 targetm.encode_section_info (decl, DECL_RTL (decl), true);
1518 }
1519
1520 /* Like make_decl_rtl, but inhibit creation of new alias sets when
1521 calling make_decl_rtl. Also, reset DECL_RTL before returning the
1522 rtl. */
1523
1524 rtx
make_decl_rtl_for_debug(tree decl)1525 make_decl_rtl_for_debug (tree decl)
1526 {
1527 unsigned int save_aliasing_flag;
1528 rtx rtl;
1529
1530 if (DECL_RTL_SET_P (decl))
1531 return DECL_RTL (decl);
1532
1533 /* Kludge alert! Somewhere down the call chain, make_decl_rtl will
1534 call new_alias_set. If running with -fcompare-debug, sometimes
1535 we do not want to create alias sets that will throw the alias
1536 numbers off in the comparison dumps. So... clearing
1537 flag_strict_aliasing will keep new_alias_set() from creating a
1538 new set. */
1539 save_aliasing_flag = flag_strict_aliasing;
1540 flag_strict_aliasing = 0;
1541
1542 rtl = DECL_RTL (decl);
1543 /* Reset DECL_RTL back, as various parts of the compiler expects
1544 DECL_RTL set meaning it is actually going to be output. */
1545 SET_DECL_RTL (decl, NULL);
1546
1547 flag_strict_aliasing = save_aliasing_flag;
1548 return rtl;
1549 }
1550
1551 /* Output a string of literal assembler code
1552 for an `asm' keyword used between functions. */
1553
1554 void
assemble_asm(tree string)1555 assemble_asm (tree string)
1556 {
1557 const char *p;
1558 app_enable ();
1559
1560 if (TREE_CODE (string) == ADDR_EXPR)
1561 string = TREE_OPERAND (string, 0);
1562
1563 p = TREE_STRING_POINTER (string);
1564 fprintf (asm_out_file, "%s%s\n", p[0] == '\t' ? "" : "\t", p);
1565 }
1566
1567 /* Write the address of the entity given by SYMBOL to SEC. */
1568 void
assemble_addr_to_section(rtx symbol,section * sec)1569 assemble_addr_to_section (rtx symbol, section *sec)
1570 {
1571 switch_to_section (sec);
1572 assemble_align (POINTER_SIZE);
1573 assemble_integer (symbol, POINTER_SIZE_UNITS, POINTER_SIZE, 1);
1574 }
1575
1576 /* Return the numbered .ctors.N (if CONSTRUCTOR_P) or .dtors.N (if
1577 not) section for PRIORITY. */
1578 section *
get_cdtor_priority_section(int priority,bool constructor_p)1579 get_cdtor_priority_section (int priority, bool constructor_p)
1580 {
1581 /* Buffer conservatively large enough for the full range of a 32-bit
1582 int plus the text below. */
1583 char buf[18];
1584
1585 /* ??? This only works reliably with the GNU linker. */
1586 sprintf (buf, "%s.%.5u",
1587 constructor_p ? ".ctors" : ".dtors",
1588 /* Invert the numbering so the linker puts us in the proper
1589 order; constructors are run from right to left, and the
1590 linker sorts in increasing order. */
1591 MAX_INIT_PRIORITY - priority);
1592 return get_section (buf, SECTION_WRITE, NULL);
1593 }
1594
1595 void
default_named_section_asm_out_destructor(rtx symbol,int priority)1596 default_named_section_asm_out_destructor (rtx symbol, int priority)
1597 {
1598 section *sec;
1599
1600 if (priority != DEFAULT_INIT_PRIORITY)
1601 sec = get_cdtor_priority_section (priority,
1602 /*constructor_p=*/false);
1603 else
1604 sec = get_section (".dtors", SECTION_WRITE, NULL);
1605
1606 assemble_addr_to_section (symbol, sec);
1607 }
1608
1609 #ifdef DTORS_SECTION_ASM_OP
1610 void
default_dtor_section_asm_out_destructor(rtx symbol,int priority ATTRIBUTE_UNUSED)1611 default_dtor_section_asm_out_destructor (rtx symbol,
1612 int priority ATTRIBUTE_UNUSED)
1613 {
1614 assemble_addr_to_section (symbol, dtors_section);
1615 }
1616 #endif
1617
1618 void
default_named_section_asm_out_constructor(rtx symbol,int priority)1619 default_named_section_asm_out_constructor (rtx symbol, int priority)
1620 {
1621 section *sec;
1622
1623 if (priority != DEFAULT_INIT_PRIORITY)
1624 sec = get_cdtor_priority_section (priority,
1625 /*constructor_p=*/true);
1626 else
1627 sec = get_section (".ctors", SECTION_WRITE, NULL);
1628
1629 assemble_addr_to_section (symbol, sec);
1630 }
1631
1632 #ifdef CTORS_SECTION_ASM_OP
1633 void
default_ctor_section_asm_out_constructor(rtx symbol,int priority ATTRIBUTE_UNUSED)1634 default_ctor_section_asm_out_constructor (rtx symbol,
1635 int priority ATTRIBUTE_UNUSED)
1636 {
1637 assemble_addr_to_section (symbol, ctors_section);
1638 }
1639 #endif
1640
1641 /* CONSTANT_POOL_BEFORE_FUNCTION may be defined as an expression with
1642 a nonzero value if the constant pool should be output before the
1643 start of the function, or a zero value if the pool should output
1644 after the end of the function. The default is to put it before the
1645 start. */
1646
1647 #ifndef CONSTANT_POOL_BEFORE_FUNCTION
1648 #define CONSTANT_POOL_BEFORE_FUNCTION 1
1649 #endif
1650
1651 /* DECL is an object (either VAR_DECL or FUNCTION_DECL) which is going
1652 to be output to assembler.
1653 Set first_global_object_name and weak_global_object_name as appropriate. */
1654
1655 void
notice_global_symbol(tree decl)1656 notice_global_symbol (tree decl)
1657 {
1658 const char **t = &first_global_object_name;
1659
1660 if (first_global_object_name
1661 || !TREE_PUBLIC (decl)
1662 || DECL_EXTERNAL (decl)
1663 || !DECL_NAME (decl)
1664 || (VAR_P (decl) && DECL_HARD_REGISTER (decl))
1665 || (TREE_CODE (decl) != FUNCTION_DECL
1666 && (!VAR_P (decl)
1667 || (DECL_COMMON (decl)
1668 && (DECL_INITIAL (decl) == 0
1669 || DECL_INITIAL (decl) == error_mark_node)))))
1670 return;
1671
1672 /* We win when global object is found, but it is useful to know about weak
1673 symbol as well so we can produce nicer unique names. */
1674 if (DECL_WEAK (decl) || DECL_ONE_ONLY (decl) || flag_shlib)
1675 t = &weak_global_object_name;
1676
1677 if (!*t)
1678 {
1679 tree id = DECL_ASSEMBLER_NAME (decl);
1680 ultimate_transparent_alias_target (&id);
1681 *t = ggc_strdup (targetm.strip_name_encoding (IDENTIFIER_POINTER (id)));
1682 }
1683 }
1684
1685 /* If not using flag_reorder_blocks_and_partition, decide early whether the
1686 current function goes into the cold section, so that targets can use
1687 current_function_section during RTL expansion. DECL describes the
1688 function. */
1689
1690 void
decide_function_section(tree decl)1691 decide_function_section (tree decl)
1692 {
1693 first_function_block_is_cold = false;
1694
1695 if (DECL_SECTION_NAME (decl))
1696 {
1697 struct cgraph_node *node = cgraph_node::get (current_function_decl);
1698 /* Calls to function_section rely on first_function_block_is_cold
1699 being accurate. */
1700 first_function_block_is_cold = (node
1701 && node->frequency
1702 == NODE_FREQUENCY_UNLIKELY_EXECUTED);
1703 }
1704
1705 in_cold_section_p = first_function_block_is_cold;
1706 }
1707
1708 /* Get the function's name, as described by its RTL. This may be
1709 different from the DECL_NAME name used in the source file. */
1710 const char *
get_fnname_from_decl(tree decl)1711 get_fnname_from_decl (tree decl)
1712 {
1713 rtx x = DECL_RTL (decl);
1714 gcc_assert (MEM_P (x));
1715 x = XEXP (x, 0);
1716 gcc_assert (GET_CODE (x) == SYMBOL_REF);
1717 return XSTR (x, 0);
1718 }
1719
1720 /* Output assembler code for the constant pool of a function and associated
1721 with defining the name of the function. DECL describes the function.
1722 NAME is the function's name. For the constant pool, we use the current
1723 constant pool data. */
1724
1725 void
assemble_start_function(tree decl,const char * fnname)1726 assemble_start_function (tree decl, const char *fnname)
1727 {
1728 int align;
1729 char tmp_label[100];
1730 bool hot_label_written = false;
1731
1732 if (crtl->has_bb_partition)
1733 {
1734 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTB", const_labelno);
1735 crtl->subsections.hot_section_label = ggc_strdup (tmp_label);
1736 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDB", const_labelno);
1737 crtl->subsections.cold_section_label = ggc_strdup (tmp_label);
1738 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTE", const_labelno);
1739 crtl->subsections.hot_section_end_label = ggc_strdup (tmp_label);
1740 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDE", const_labelno);
1741 crtl->subsections.cold_section_end_label = ggc_strdup (tmp_label);
1742 const_labelno++;
1743 cold_function_name = NULL_TREE;
1744 }
1745 else
1746 {
1747 crtl->subsections.hot_section_label = NULL;
1748 crtl->subsections.cold_section_label = NULL;
1749 crtl->subsections.hot_section_end_label = NULL;
1750 crtl->subsections.cold_section_end_label = NULL;
1751 }
1752
1753 /* The following code does not need preprocessing in the assembler. */
1754
1755 app_disable ();
1756
1757 if (CONSTANT_POOL_BEFORE_FUNCTION)
1758 output_constant_pool (fnname, decl);
1759
1760 align = symtab_node::get (decl)->definition_alignment ();
1761
1762 /* Make sure the not and cold text (code) sections are properly
1763 aligned. This is necessary here in the case where the function
1764 has both hot and cold sections, because we don't want to re-set
1765 the alignment when the section switch happens mid-function. */
1766
1767 if (crtl->has_bb_partition)
1768 {
1769 first_function_block_is_cold = false;
1770
1771 switch_to_section (unlikely_text_section ());
1772 assemble_align (align);
1773 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_label);
1774
1775 /* When the function starts with a cold section, we need to explicitly
1776 align the hot section and write out the hot section label.
1777 But if the current function is a thunk, we do not have a CFG. */
1778 if (!cfun->is_thunk
1779 && BB_PARTITION (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) == BB_COLD_PARTITION)
1780 {
1781 switch_to_section (text_section);
1782 assemble_align (align);
1783 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
1784 hot_label_written = true;
1785 first_function_block_is_cold = true;
1786 }
1787 in_cold_section_p = first_function_block_is_cold;
1788 }
1789
1790
1791 /* Switch to the correct text section for the start of the function. */
1792
1793 switch_to_section (function_section (decl));
1794 if (crtl->has_bb_partition && !hot_label_written)
1795 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
1796
1797 /* Tell assembler to move to target machine's alignment for functions. */
1798 align = floor_log2 (align / BITS_PER_UNIT);
1799 if (align > 0)
1800 {
1801 ASM_OUTPUT_ALIGN (asm_out_file, align);
1802 }
1803
1804 /* Handle a user-specified function alignment.
1805 Note that we still need to align to DECL_ALIGN, as above,
1806 because ASM_OUTPUT_MAX_SKIP_ALIGN might not do any alignment at all. */
1807 if (! DECL_USER_ALIGN (decl)
1808 && align_functions_log > align
1809 && optimize_function_for_speed_p (cfun))
1810 {
1811 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1812 int align_log = align_functions_log;
1813 #endif
1814 int max_skip = align_functions - 1;
1815 if (flag_limit_function_alignment && crtl->max_insn_address > 0
1816 && max_skip >= crtl->max_insn_address)
1817 max_skip = crtl->max_insn_address - 1;
1818
1819 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1820 ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file, align_log, max_skip);
1821 #else
1822 ASM_OUTPUT_ALIGN (asm_out_file, align_functions_log);
1823 #endif
1824 }
1825
1826 #ifdef ASM_OUTPUT_FUNCTION_PREFIX
1827 ASM_OUTPUT_FUNCTION_PREFIX (asm_out_file, fnname);
1828 #endif
1829
1830 if (!DECL_IGNORED_P (decl))
1831 (*debug_hooks->begin_function) (decl);
1832
1833 /* Make function name accessible from other files, if appropriate. */
1834
1835 if (TREE_PUBLIC (decl)
1836 || (cgraph_node::get (decl)->instrumentation_clone
1837 && cgraph_node::get (decl)->instrumented_version
1838 && TREE_PUBLIC (cgraph_node::get (decl)->instrumented_version->decl)))
1839 {
1840 notice_global_symbol (decl);
1841
1842 globalize_decl (decl);
1843
1844 maybe_assemble_visibility (decl);
1845 }
1846
1847 if (DECL_PRESERVE_P (decl))
1848 targetm.asm_out.mark_decl_preserved (fnname);
1849
1850 unsigned HOST_WIDE_INT patch_area_size = function_entry_patch_area_size;
1851 unsigned HOST_WIDE_INT patch_area_entry = function_entry_patch_area_start;
1852
1853 tree patchable_function_entry_attr
1854 = lookup_attribute ("patchable_function_entry", DECL_ATTRIBUTES (decl));
1855 if (patchable_function_entry_attr)
1856 {
1857 tree pp_val = TREE_VALUE (patchable_function_entry_attr);
1858 tree patchable_function_entry_value1 = TREE_VALUE (pp_val);
1859
1860 if (tree_fits_uhwi_p (patchable_function_entry_value1))
1861 patch_area_size = tree_to_uhwi (patchable_function_entry_value1);
1862 else
1863 gcc_unreachable ();
1864
1865 patch_area_entry = 0;
1866 if (list_length (pp_val) > 1)
1867 {
1868 tree patchable_function_entry_value2 =
1869 TREE_VALUE (TREE_CHAIN (pp_val));
1870
1871 if (tree_fits_uhwi_p (patchable_function_entry_value2))
1872 patch_area_entry = tree_to_uhwi (patchable_function_entry_value2);
1873 else
1874 gcc_unreachable ();
1875 }
1876 }
1877
1878 if (patch_area_entry > patch_area_size)
1879 {
1880 if (patch_area_size > 0)
1881 warning (OPT_Wattributes, "Patchable function entry > size");
1882 patch_area_entry = 0;
1883 }
1884
1885 /* Emit the patching area before the entry label, if any. */
1886 if (patch_area_entry > 0)
1887 targetm.asm_out.print_patchable_function_entry (asm_out_file,
1888 patch_area_entry, true);
1889
1890 /* Do any machine/system dependent processing of the function name. */
1891 #ifdef ASM_DECLARE_FUNCTION_NAME
1892 ASM_DECLARE_FUNCTION_NAME (asm_out_file, fnname, current_function_decl);
1893 #else
1894 /* Standard thing is just output label for the function. */
1895 ASM_OUTPUT_FUNCTION_LABEL (asm_out_file, fnname, current_function_decl);
1896 #endif /* ASM_DECLARE_FUNCTION_NAME */
1897
1898 /* And the area after the label. Record it if we haven't done so yet. */
1899 if (patch_area_size > patch_area_entry)
1900 targetm.asm_out.print_patchable_function_entry (asm_out_file,
1901 patch_area_size-patch_area_entry,
1902 patch_area_entry == 0);
1903
1904 if (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (decl)))
1905 saw_no_split_stack = true;
1906 }
1907
1908 /* Output assembler code associated with defining the size of the
1909 function. DECL describes the function. NAME is the function's name. */
1910
1911 void
assemble_end_function(tree decl,const char * fnname ATTRIBUTE_UNUSED)1912 assemble_end_function (tree decl, const char *fnname ATTRIBUTE_UNUSED)
1913 {
1914 #ifdef ASM_DECLARE_FUNCTION_SIZE
1915 /* We could have switched section in the middle of the function. */
1916 if (crtl->has_bb_partition)
1917 switch_to_section (function_section (decl));
1918 ASM_DECLARE_FUNCTION_SIZE (asm_out_file, fnname, decl);
1919 #endif
1920 if (! CONSTANT_POOL_BEFORE_FUNCTION)
1921 {
1922 output_constant_pool (fnname, decl);
1923 switch_to_section (function_section (decl)); /* need to switch back */
1924 }
1925 /* Output labels for end of hot/cold text sections (to be used by
1926 debug info.) */
1927 if (crtl->has_bb_partition)
1928 {
1929 section *save_text_section;
1930
1931 save_text_section = in_section;
1932 switch_to_section (unlikely_text_section ());
1933 #ifdef ASM_DECLARE_COLD_FUNCTION_SIZE
1934 if (cold_function_name != NULL_TREE)
1935 ASM_DECLARE_COLD_FUNCTION_SIZE (asm_out_file,
1936 IDENTIFIER_POINTER (cold_function_name),
1937 decl);
1938 #endif
1939 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_end_label);
1940 if (first_function_block_is_cold)
1941 switch_to_section (text_section);
1942 else
1943 switch_to_section (function_section (decl));
1944 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_end_label);
1945 switch_to_section (save_text_section);
1946 }
1947 }
1948
1949 /* Assemble code to leave SIZE bytes of zeros. */
1950
1951 void
assemble_zeros(unsigned HOST_WIDE_INT size)1952 assemble_zeros (unsigned HOST_WIDE_INT size)
1953 {
1954 /* Do no output if -fsyntax-only. */
1955 if (flag_syntax_only)
1956 return;
1957
1958 #ifdef ASM_NO_SKIP_IN_TEXT
1959 /* The `space' pseudo in the text section outputs nop insns rather than 0s,
1960 so we must output 0s explicitly in the text section. */
1961 if (ASM_NO_SKIP_IN_TEXT && (in_section->common.flags & SECTION_CODE) != 0)
1962 {
1963 unsigned HOST_WIDE_INT i;
1964 for (i = 0; i < size; i++)
1965 assemble_integer (const0_rtx, 1, BITS_PER_UNIT, 1);
1966 }
1967 else
1968 #endif
1969 if (size > 0)
1970 ASM_OUTPUT_SKIP (asm_out_file, size);
1971 }
1972
1973 /* Assemble an alignment pseudo op for an ALIGN-bit boundary. */
1974
1975 void
assemble_align(int align)1976 assemble_align (int align)
1977 {
1978 if (align > BITS_PER_UNIT)
1979 {
1980 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
1981 }
1982 }
1983
1984 /* Assemble a string constant with the specified C string as contents. */
1985
1986 void
assemble_string(const char * p,int size)1987 assemble_string (const char *p, int size)
1988 {
1989 int pos = 0;
1990 int maximum = 2000;
1991
1992 /* If the string is very long, split it up. */
1993
1994 while (pos < size)
1995 {
1996 int thissize = size - pos;
1997 if (thissize > maximum)
1998 thissize = maximum;
1999
2000 ASM_OUTPUT_ASCII (asm_out_file, p, thissize);
2001
2002 pos += thissize;
2003 p += thissize;
2004 }
2005 }
2006
2007
2008 /* A noswitch_section_callback for lcomm_section. */
2009
2010 static bool
emit_local(tree decl ATTRIBUTE_UNUSED,const char * name ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)2011 emit_local (tree decl ATTRIBUTE_UNUSED,
2012 const char *name ATTRIBUTE_UNUSED,
2013 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2014 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2015 {
2016 #if defined ASM_OUTPUT_ALIGNED_DECL_LOCAL
2017 unsigned int align = symtab_node::get (decl)->definition_alignment ();
2018 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, decl, name,
2019 size, align);
2020 return true;
2021 #elif defined ASM_OUTPUT_ALIGNED_LOCAL
2022 unsigned int align = symtab_node::get (decl)->definition_alignment ();
2023 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, align);
2024 return true;
2025 #else
2026 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded);
2027 return false;
2028 #endif
2029 }
2030
2031 /* A noswitch_section_callback for bss_noswitch_section. */
2032
2033 #if defined ASM_OUTPUT_ALIGNED_BSS
2034 static bool
emit_bss(tree decl ATTRIBUTE_UNUSED,const char * name ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)2035 emit_bss (tree decl ATTRIBUTE_UNUSED,
2036 const char *name ATTRIBUTE_UNUSED,
2037 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2038 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2039 {
2040 ASM_OUTPUT_ALIGNED_BSS (asm_out_file, decl, name, size,
2041 get_variable_align (decl));
2042 return true;
2043 }
2044 #endif
2045
2046 /* A noswitch_section_callback for comm_section. */
2047
2048 static bool
emit_common(tree decl ATTRIBUTE_UNUSED,const char * name ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)2049 emit_common (tree decl ATTRIBUTE_UNUSED,
2050 const char *name ATTRIBUTE_UNUSED,
2051 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2052 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2053 {
2054 #if defined ASM_OUTPUT_ALIGNED_DECL_COMMON
2055 ASM_OUTPUT_ALIGNED_DECL_COMMON (asm_out_file, decl, name,
2056 size, get_variable_align (decl));
2057 return true;
2058 #elif defined ASM_OUTPUT_ALIGNED_COMMON
2059 ASM_OUTPUT_ALIGNED_COMMON (asm_out_file, name, size,
2060 get_variable_align (decl));
2061 return true;
2062 #else
2063 ASM_OUTPUT_COMMON (asm_out_file, name, size, rounded);
2064 return false;
2065 #endif
2066 }
2067
2068 /* A noswitch_section_callback for tls_comm_section. */
2069
2070 static bool
emit_tls_common(tree decl ATTRIBUTE_UNUSED,const char * name ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)2071 emit_tls_common (tree decl ATTRIBUTE_UNUSED,
2072 const char *name ATTRIBUTE_UNUSED,
2073 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2074 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2075 {
2076 #ifdef ASM_OUTPUT_TLS_COMMON
2077 ASM_OUTPUT_TLS_COMMON (asm_out_file, decl, name, size);
2078 return true;
2079 #else
2080 sorry ("thread-local COMMON data not implemented");
2081 return true;
2082 #endif
2083 }
2084
2085 /* Assemble DECL given that it belongs in SECTION_NOSWITCH section SECT.
2086 NAME is the name of DECL's SYMBOL_REF. */
2087
2088 static void
assemble_noswitch_variable(tree decl,const char * name,section * sect,unsigned int align)2089 assemble_noswitch_variable (tree decl, const char *name, section *sect,
2090 unsigned int align)
2091 {
2092 unsigned HOST_WIDE_INT size, rounded;
2093
2094 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2095 rounded = size;
2096
2097 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_protect_global (decl))
2098 size += asan_red_zone_size (size);
2099
2100 /* Don't allocate zero bytes of common,
2101 since that means "undefined external" in the linker. */
2102 if (size == 0)
2103 rounded = 1;
2104
2105 /* Round size up to multiple of BIGGEST_ALIGNMENT bits
2106 so that each uninitialized object starts on such a boundary. */
2107 rounded += (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1;
2108 rounded = (rounded / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2109 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
2110
2111 if (!sect->noswitch.callback (decl, name, size, rounded)
2112 && (unsigned HOST_WIDE_INT) (align / BITS_PER_UNIT) > rounded)
2113 error ("requested alignment for %q+D is greater than "
2114 "implemented alignment of %wu", decl, rounded);
2115 }
2116
2117 /* A subroutine of assemble_variable. Output the label and contents of
2118 DECL, whose address is a SYMBOL_REF with name NAME. DONT_OUTPUT_DATA
2119 is as for assemble_variable. */
2120
2121 static void
assemble_variable_contents(tree decl,const char * name,bool dont_output_data)2122 assemble_variable_contents (tree decl, const char *name,
2123 bool dont_output_data)
2124 {
2125 /* Do any machine/system dependent processing of the object. */
2126 #ifdef ASM_DECLARE_OBJECT_NAME
2127 last_assemble_variable_decl = decl;
2128 ASM_DECLARE_OBJECT_NAME (asm_out_file, name, decl);
2129 #else
2130 /* Standard thing is just output label for the object. */
2131 ASM_OUTPUT_LABEL (asm_out_file, name);
2132 #endif /* ASM_DECLARE_OBJECT_NAME */
2133
2134 if (!dont_output_data)
2135 {
2136 /* Caller is supposed to use varpool_get_constructor when it wants
2137 to output the body. */
2138 gcc_assert (!in_lto_p || DECL_INITIAL (decl) != error_mark_node);
2139 if (DECL_INITIAL (decl)
2140 && DECL_INITIAL (decl) != error_mark_node
2141 && !initializer_zerop (DECL_INITIAL (decl)))
2142 /* Output the actual data. */
2143 output_constant (DECL_INITIAL (decl),
2144 tree_to_uhwi (DECL_SIZE_UNIT (decl)),
2145 get_variable_align (decl),
2146 false);
2147 else
2148 /* Leave space for it. */
2149 assemble_zeros (tree_to_uhwi (DECL_SIZE_UNIT (decl)));
2150 targetm.asm_out.decl_end ();
2151 }
2152 }
2153
2154 /* Write out assembly for the variable DECL, which is not defined in
2155 the current translation unit. */
2156 void
assemble_undefined_decl(tree decl)2157 assemble_undefined_decl (tree decl)
2158 {
2159 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2160 targetm.asm_out.assemble_undefined_decl (asm_out_file, name, decl);
2161 }
2162
2163 /* Assemble everything that is needed for a variable or function declaration.
2164 Not used for automatic variables, and not used for function definitions.
2165 Should not be called for variables of incomplete structure type.
2166
2167 TOP_LEVEL is nonzero if this variable has file scope.
2168 AT_END is nonzero if this is the special handling, at end of compilation,
2169 to define things that have had only tentative definitions.
2170 DONT_OUTPUT_DATA if nonzero means don't actually output the
2171 initial value (that will be done by the caller). */
2172
2173 void
assemble_variable(tree decl,int top_level ATTRIBUTE_UNUSED,int at_end ATTRIBUTE_UNUSED,int dont_output_data)2174 assemble_variable (tree decl, int top_level ATTRIBUTE_UNUSED,
2175 int at_end ATTRIBUTE_UNUSED, int dont_output_data)
2176 {
2177 const char *name;
2178 rtx decl_rtl, symbol;
2179 section *sect;
2180 unsigned int align;
2181 bool asan_protected = false;
2182
2183 /* This function is supposed to handle VARIABLES. Ensure we have one. */
2184 gcc_assert (VAR_P (decl));
2185
2186 /* Emulated TLS had better not get this far. */
2187 gcc_checking_assert (targetm.have_tls || !DECL_THREAD_LOCAL_P (decl));
2188
2189 last_assemble_variable_decl = 0;
2190
2191 /* Normally no need to say anything here for external references,
2192 since assemble_external is called by the language-specific code
2193 when a declaration is first seen. */
2194
2195 if (DECL_EXTERNAL (decl))
2196 return;
2197
2198 /* Do nothing for global register variables. */
2199 if (DECL_RTL_SET_P (decl) && REG_P (DECL_RTL (decl)))
2200 {
2201 TREE_ASM_WRITTEN (decl) = 1;
2202 return;
2203 }
2204
2205 /* If type was incomplete when the variable was declared,
2206 see if it is complete now. */
2207
2208 if (DECL_SIZE (decl) == 0)
2209 layout_decl (decl, 0);
2210
2211 /* Still incomplete => don't allocate it; treat the tentative defn
2212 (which is what it must have been) as an `extern' reference. */
2213
2214 if (!dont_output_data && DECL_SIZE (decl) == 0)
2215 {
2216 error ("storage size of %q+D isn%'t known", decl);
2217 TREE_ASM_WRITTEN (decl) = 1;
2218 return;
2219 }
2220
2221 /* The first declaration of a variable that comes through this function
2222 decides whether it is global (in C, has external linkage)
2223 or local (in C, has internal linkage). So do nothing more
2224 if this function has already run. */
2225
2226 if (TREE_ASM_WRITTEN (decl))
2227 return;
2228
2229 /* Make sure targetm.encode_section_info is invoked before we set
2230 ASM_WRITTEN. */
2231 decl_rtl = DECL_RTL (decl);
2232
2233 TREE_ASM_WRITTEN (decl) = 1;
2234
2235 /* Do no output if -fsyntax-only. */
2236 if (flag_syntax_only)
2237 return;
2238
2239 if (! dont_output_data
2240 && ! valid_constant_size_p (DECL_SIZE_UNIT (decl)))
2241 {
2242 error ("size of variable %q+D is too large", decl);
2243 return;
2244 }
2245
2246 gcc_assert (MEM_P (decl_rtl));
2247 gcc_assert (GET_CODE (XEXP (decl_rtl, 0)) == SYMBOL_REF);
2248 symbol = XEXP (decl_rtl, 0);
2249
2250 /* If this symbol belongs to the tree constant pool, output the constant
2251 if it hasn't already been written. */
2252 if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
2253 {
2254 tree decl = SYMBOL_REF_DECL (symbol);
2255 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl)))
2256 output_constant_def_contents (symbol);
2257 return;
2258 }
2259
2260 app_disable ();
2261
2262 name = XSTR (symbol, 0);
2263 if (TREE_PUBLIC (decl) && DECL_NAME (decl))
2264 notice_global_symbol (decl);
2265
2266 /* Compute the alignment of this data. */
2267
2268 align_variable (decl, dont_output_data);
2269
2270 if ((flag_sanitize & SANITIZE_ADDRESS)
2271 && asan_protect_global (decl))
2272 {
2273 asan_protected = true;
2274 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
2275 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
2276 }
2277
2278 set_mem_align (decl_rtl, DECL_ALIGN (decl));
2279
2280 align = get_variable_align (decl);
2281
2282 if (TREE_PUBLIC (decl))
2283 maybe_assemble_visibility (decl);
2284
2285 if (DECL_PRESERVE_P (decl))
2286 targetm.asm_out.mark_decl_preserved (name);
2287
2288 /* First make the assembler name(s) global if appropriate. */
2289 sect = get_variable_section (decl, false);
2290 if (TREE_PUBLIC (decl)
2291 && (sect->common.flags & SECTION_COMMON) == 0)
2292 globalize_decl (decl);
2293
2294 /* Output any data that we will need to use the address of. */
2295 if (DECL_INITIAL (decl) && DECL_INITIAL (decl) != error_mark_node)
2296 output_addressed_constants (DECL_INITIAL (decl));
2297
2298 /* dbxout.c needs to know this. */
2299 if (sect && (sect->common.flags & SECTION_CODE) != 0)
2300 DECL_IN_TEXT_SECTION (decl) = 1;
2301
2302 /* If the decl is part of an object_block, make sure that the decl
2303 has been positioned within its block, but do not write out its
2304 definition yet. output_object_blocks will do that later. */
2305 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol))
2306 {
2307 gcc_assert (!dont_output_data);
2308 place_block_symbol (symbol);
2309 }
2310 else if (SECTION_STYLE (sect) == SECTION_NOSWITCH)
2311 assemble_noswitch_variable (decl, name, sect, align);
2312 else
2313 {
2314 /* Special-case handling of vtv comdat sections. */
2315 if (sect->named.name
2316 && (strcmp (sect->named.name, ".vtable_map_vars") == 0))
2317 handle_vtv_comdat_section (sect, decl);
2318 else
2319 switch_to_section (sect);
2320 if (align > BITS_PER_UNIT)
2321 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
2322 assemble_variable_contents (decl, name, dont_output_data);
2323 if (asan_protected)
2324 {
2325 unsigned HOST_WIDE_INT int size
2326 = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2327 assemble_zeros (asan_red_zone_size (size));
2328 }
2329 }
2330 }
2331
2332
2333 /* Given a function declaration (FN_DECL), this function assembles the
2334 function into the .preinit_array section. */
2335
2336 void
assemble_vtv_preinit_initializer(tree fn_decl)2337 assemble_vtv_preinit_initializer (tree fn_decl)
2338 {
2339 section *sect;
2340 unsigned flags = SECTION_WRITE;
2341 rtx symbol = XEXP (DECL_RTL (fn_decl), 0);
2342
2343 flags |= SECTION_NOTYPE;
2344 sect = get_section (".preinit_array", flags, fn_decl);
2345 switch_to_section (sect);
2346 assemble_addr_to_section (symbol, sect);
2347 }
2348
2349 /* Return 1 if type TYPE contains any pointers. */
2350
2351 static int
contains_pointers_p(tree type)2352 contains_pointers_p (tree type)
2353 {
2354 switch (TREE_CODE (type))
2355 {
2356 case POINTER_TYPE:
2357 case REFERENCE_TYPE:
2358 /* I'm not sure whether OFFSET_TYPE needs this treatment,
2359 so I'll play safe and return 1. */
2360 case OFFSET_TYPE:
2361 return 1;
2362
2363 case RECORD_TYPE:
2364 case UNION_TYPE:
2365 case QUAL_UNION_TYPE:
2366 {
2367 tree fields;
2368 /* For a type that has fields, see if the fields have pointers. */
2369 for (fields = TYPE_FIELDS (type); fields; fields = DECL_CHAIN (fields))
2370 if (TREE_CODE (fields) == FIELD_DECL
2371 && contains_pointers_p (TREE_TYPE (fields)))
2372 return 1;
2373 return 0;
2374 }
2375
2376 case ARRAY_TYPE:
2377 /* An array type contains pointers if its element type does. */
2378 return contains_pointers_p (TREE_TYPE (type));
2379
2380 default:
2381 return 0;
2382 }
2383 }
2384
2385 /* We delay assemble_external processing until
2386 the compilation unit is finalized. This is the best we can do for
2387 right now (i.e. stage 3 of GCC 4.0) - the right thing is to delay
2388 it all the way to final. See PR 17982 for further discussion. */
2389 static GTY(()) tree pending_assemble_externals;
2390
2391 #ifdef ASM_OUTPUT_EXTERNAL
2392 /* Some targets delay some output to final using TARGET_ASM_FILE_END.
2393 As a result, assemble_external can be called after the list of externals
2394 is processed and the pointer set destroyed. */
2395 static bool pending_assemble_externals_processed;
2396
2397 /* Avoid O(external_decls**2) lookups in the pending_assemble_externals
2398 TREE_LIST in assemble_external. */
2399 static hash_set<tree> *pending_assemble_externals_set;
2400
2401 /* True if DECL is a function decl for which no out-of-line copy exists.
2402 It is assumed that DECL's assembler name has been set. */
2403
2404 static bool
incorporeal_function_p(tree decl)2405 incorporeal_function_p (tree decl)
2406 {
2407 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
2408 {
2409 const char *name;
2410
2411 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2412 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2413 return true;
2414
2415 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2416 /* Atomic or sync builtins which have survived this far will be
2417 resolved externally and therefore are not incorporeal. */
2418 if (strncmp (name, "__builtin_", 10) == 0)
2419 return true;
2420 }
2421 return false;
2422 }
2423
2424 /* Actually do the tests to determine if this is necessary, and invoke
2425 ASM_OUTPUT_EXTERNAL. */
2426 static void
assemble_external_real(tree decl)2427 assemble_external_real (tree decl)
2428 {
2429 rtx rtl = DECL_RTL (decl);
2430
2431 if (MEM_P (rtl) && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF
2432 && !SYMBOL_REF_USED (XEXP (rtl, 0))
2433 && !incorporeal_function_p (decl))
2434 {
2435 /* Some systems do require some output. */
2436 SYMBOL_REF_USED (XEXP (rtl, 0)) = 1;
2437 ASM_OUTPUT_EXTERNAL (asm_out_file, decl, XSTR (XEXP (rtl, 0), 0));
2438 }
2439 }
2440 #endif
2441
2442 void
process_pending_assemble_externals(void)2443 process_pending_assemble_externals (void)
2444 {
2445 #ifdef ASM_OUTPUT_EXTERNAL
2446 tree list;
2447 for (list = pending_assemble_externals; list; list = TREE_CHAIN (list))
2448 assemble_external_real (TREE_VALUE (list));
2449
2450 pending_assemble_externals = 0;
2451 pending_assemble_externals_processed = true;
2452 delete pending_assemble_externals_set;
2453 #endif
2454 }
2455
2456 /* This TREE_LIST contains any weak symbol declarations waiting
2457 to be emitted. */
2458 static GTY(()) tree weak_decls;
2459
2460 /* Output something to declare an external symbol to the assembler,
2461 and qualifiers such as weakness. (Most assemblers don't need
2462 extern declaration, so we normally output nothing.) Do nothing if
2463 DECL is not external. */
2464
2465 void
assemble_external(tree decl ATTRIBUTE_UNUSED)2466 assemble_external (tree decl ATTRIBUTE_UNUSED)
2467 {
2468 /* Make sure that the ASM_OUT_FILE is open.
2469 If it's not, we should not be calling this function. */
2470 gcc_assert (asm_out_file);
2471
2472 /* In a perfect world, the following condition would be true.
2473 Sadly, the Go front end emit assembly *from the front end*,
2474 bypassing the call graph. See PR52739. Fix before GCC 4.8. */
2475 #if 0
2476 /* This function should only be called if we are expanding, or have
2477 expanded, to RTL.
2478 Ideally, only final.c would be calling this function, but it is
2479 not clear whether that would break things somehow. See PR 17982
2480 for further discussion. */
2481 gcc_assert (state == EXPANSION
2482 || state == FINISHED);
2483 #endif
2484
2485 if (!DECL_P (decl) || !DECL_EXTERNAL (decl) || !TREE_PUBLIC (decl))
2486 return;
2487
2488 /* We want to output annotation for weak and external symbols at
2489 very last to check if they are references or not. */
2490
2491 if (TARGET_SUPPORTS_WEAK
2492 && DECL_WEAK (decl)
2493 /* TREE_STATIC is a weird and abused creature which is not
2494 generally the right test for whether an entity has been
2495 locally emitted, inlined or otherwise not-really-extern, but
2496 for declarations that can be weak, it happens to be
2497 match. */
2498 && !TREE_STATIC (decl)
2499 && lookup_attribute ("weak", DECL_ATTRIBUTES (decl))
2500 && value_member (decl, weak_decls) == NULL_TREE)
2501 weak_decls = tree_cons (NULL, decl, weak_decls);
2502
2503 #ifdef ASM_OUTPUT_EXTERNAL
2504 if (pending_assemble_externals_processed)
2505 {
2506 assemble_external_real (decl);
2507 return;
2508 }
2509
2510 if (! pending_assemble_externals_set->add (decl))
2511 pending_assemble_externals = tree_cons (NULL, decl,
2512 pending_assemble_externals);
2513 #endif
2514 }
2515
2516 /* Similar, for calling a library function FUN. */
2517
2518 void
assemble_external_libcall(rtx fun)2519 assemble_external_libcall (rtx fun)
2520 {
2521 /* Declare library function name external when first used, if nec. */
2522 if (! SYMBOL_REF_USED (fun))
2523 {
2524 SYMBOL_REF_USED (fun) = 1;
2525 targetm.asm_out.external_libcall (fun);
2526 }
2527 }
2528
2529 /* Assemble a label named NAME. */
2530
2531 void
assemble_label(FILE * file,const char * name)2532 assemble_label (FILE *file, const char *name)
2533 {
2534 ASM_OUTPUT_LABEL (file, name);
2535 }
2536
2537 /* Set the symbol_referenced flag for ID. */
2538 void
mark_referenced(tree id)2539 mark_referenced (tree id)
2540 {
2541 TREE_SYMBOL_REFERENCED (id) = 1;
2542 }
2543
2544 /* Set the symbol_referenced flag for DECL and notify callgraph. */
2545 void
mark_decl_referenced(tree decl)2546 mark_decl_referenced (tree decl)
2547 {
2548 if (TREE_CODE (decl) == FUNCTION_DECL)
2549 {
2550 /* Extern inline functions don't become needed when referenced.
2551 If we know a method will be emitted in other TU and no new
2552 functions can be marked reachable, just use the external
2553 definition. */
2554 struct cgraph_node *node = cgraph_node::get_create (decl);
2555 if (!DECL_EXTERNAL (decl)
2556 && !node->definition)
2557 node->mark_force_output ();
2558 }
2559 else if (VAR_P (decl))
2560 {
2561 varpool_node *node = varpool_node::get_create (decl);
2562 /* C++ frontend use mark_decl_references to force COMDAT variables
2563 to be output that might appear dead otherwise. */
2564 node->force_output = true;
2565 }
2566 /* else do nothing - we can get various sorts of CST nodes here,
2567 which do not need to be marked. */
2568 }
2569
2570
2571 /* Output to FILE (an assembly file) a reference to NAME. If NAME
2572 starts with a *, the rest of NAME is output verbatim. Otherwise
2573 NAME is transformed in a target-specific way (usually by the
2574 addition of an underscore). */
2575
2576 void
assemble_name_raw(FILE * file,const char * name)2577 assemble_name_raw (FILE *file, const char *name)
2578 {
2579 if (name[0] == '*')
2580 fputs (&name[1], file);
2581 else
2582 ASM_OUTPUT_LABELREF (file, name);
2583 }
2584
2585 /* Like assemble_name_raw, but should be used when NAME might refer to
2586 an entity that is also represented as a tree (like a function or
2587 variable). If NAME does refer to such an entity, that entity will
2588 be marked as referenced. */
2589
2590 void
assemble_name(FILE * file,const char * name)2591 assemble_name (FILE *file, const char *name)
2592 {
2593 const char *real_name;
2594 tree id;
2595
2596 real_name = targetm.strip_name_encoding (name);
2597
2598 id = maybe_get_identifier (real_name);
2599 if (id)
2600 {
2601 tree id_orig = id;
2602
2603 mark_referenced (id);
2604 ultimate_transparent_alias_target (&id);
2605 if (id != id_orig)
2606 name = IDENTIFIER_POINTER (id);
2607 gcc_assert (! TREE_CHAIN (id));
2608 }
2609
2610 assemble_name_raw (file, name);
2611 }
2612
2613 /* Allocate SIZE bytes writable static space with a gensym name
2614 and return an RTX to refer to its address. */
2615
2616 rtx
assemble_static_space(unsigned HOST_WIDE_INT size)2617 assemble_static_space (unsigned HOST_WIDE_INT size)
2618 {
2619 char name[17];
2620 const char *namestring;
2621 rtx x;
2622
2623 ASM_GENERATE_INTERNAL_LABEL (name, "LF", const_labelno);
2624 ++const_labelno;
2625 namestring = ggc_strdup (name);
2626
2627 x = gen_rtx_SYMBOL_REF (Pmode, namestring);
2628 SYMBOL_REF_FLAGS (x) = SYMBOL_FLAG_LOCAL;
2629
2630 #ifdef ASM_OUTPUT_ALIGNED_DECL_LOCAL
2631 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, NULL_TREE, name, size,
2632 BIGGEST_ALIGNMENT);
2633 #else
2634 #ifdef ASM_OUTPUT_ALIGNED_LOCAL
2635 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, BIGGEST_ALIGNMENT);
2636 #else
2637 {
2638 /* Round size up to multiple of BIGGEST_ALIGNMENT bits
2639 so that each uninitialized object starts on such a boundary. */
2640 /* Variable `rounded' might or might not be used in ASM_OUTPUT_LOCAL. */
2641 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED
2642 = ((size + (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1)
2643 / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2644 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
2645 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded);
2646 }
2647 #endif
2648 #endif
2649 return x;
2650 }
2651
2652 /* Assemble the static constant template for function entry trampolines.
2653 This is done at most once per compilation.
2654 Returns an RTX for the address of the template. */
2655
2656 static GTY(()) rtx initial_trampoline;
2657
2658 rtx
assemble_trampoline_template(void)2659 assemble_trampoline_template (void)
2660 {
2661 char label[256];
2662 const char *name;
2663 int align;
2664 rtx symbol;
2665
2666 gcc_assert (targetm.asm_out.trampoline_template != NULL);
2667
2668 if (initial_trampoline)
2669 return initial_trampoline;
2670
2671 /* By default, put trampoline templates in read-only data section. */
2672
2673 #ifdef TRAMPOLINE_SECTION
2674 switch_to_section (TRAMPOLINE_SECTION);
2675 #else
2676 switch_to_section (readonly_data_section);
2677 #endif
2678
2679 /* Write the assembler code to define one. */
2680 align = floor_log2 (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
2681 if (align > 0)
2682 ASM_OUTPUT_ALIGN (asm_out_file, align);
2683
2684 targetm.asm_out.internal_label (asm_out_file, "LTRAMP", 0);
2685 targetm.asm_out.trampoline_template (asm_out_file);
2686
2687 /* Record the rtl to refer to it. */
2688 ASM_GENERATE_INTERNAL_LABEL (label, "LTRAMP", 0);
2689 name = ggc_strdup (label);
2690 symbol = gen_rtx_SYMBOL_REF (Pmode, name);
2691 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_LOCAL;
2692
2693 initial_trampoline = gen_const_mem (BLKmode, symbol);
2694 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
2695 set_mem_size (initial_trampoline, TRAMPOLINE_SIZE);
2696
2697 return initial_trampoline;
2698 }
2699
2700 /* A and B are either alignments or offsets. Return the minimum alignment
2701 that may be assumed after adding the two together. */
2702
2703 static inline unsigned
min_align(unsigned int a,unsigned int b)2704 min_align (unsigned int a, unsigned int b)
2705 {
2706 return least_bit_hwi (a | b);
2707 }
2708
2709 /* Return the assembler directive for creating a given kind of integer
2710 object. SIZE is the number of bytes in the object and ALIGNED_P
2711 indicates whether it is known to be aligned. Return NULL if the
2712 assembly dialect has no such directive.
2713
2714 The returned string should be printed at the start of a new line and
2715 be followed immediately by the object's initial value. */
2716
2717 const char *
integer_asm_op(int size,int aligned_p)2718 integer_asm_op (int size, int aligned_p)
2719 {
2720 struct asm_int_op *ops;
2721
2722 if (aligned_p)
2723 ops = &targetm.asm_out.aligned_op;
2724 else
2725 ops = &targetm.asm_out.unaligned_op;
2726
2727 switch (size)
2728 {
2729 case 1:
2730 return targetm.asm_out.byte_op;
2731 case 2:
2732 return ops->hi;
2733 case 4:
2734 return ops->si;
2735 case 8:
2736 return ops->di;
2737 case 16:
2738 return ops->ti;
2739 default:
2740 return NULL;
2741 }
2742 }
2743
2744 /* Use directive OP to assemble an integer object X. Print OP at the
2745 start of the line, followed immediately by the value of X. */
2746
2747 void
assemble_integer_with_op(const char * op,rtx x)2748 assemble_integer_with_op (const char *op, rtx x)
2749 {
2750 fputs (op, asm_out_file);
2751 output_addr_const (asm_out_file, x);
2752 fputc ('\n', asm_out_file);
2753 }
2754
2755 /* The default implementation of the asm_out.integer target hook. */
2756
2757 bool
default_assemble_integer(rtx x ATTRIBUTE_UNUSED,unsigned int size ATTRIBUTE_UNUSED,int aligned_p ATTRIBUTE_UNUSED)2758 default_assemble_integer (rtx x ATTRIBUTE_UNUSED,
2759 unsigned int size ATTRIBUTE_UNUSED,
2760 int aligned_p ATTRIBUTE_UNUSED)
2761 {
2762 const char *op = integer_asm_op (size, aligned_p);
2763 /* Avoid GAS bugs for large values. Specifically negative values whose
2764 absolute value fits in a bfd_vma, but not in a bfd_signed_vma. */
2765 if (size > UNITS_PER_WORD && size > POINTER_SIZE_UNITS)
2766 return false;
2767 return op && (assemble_integer_with_op (op, x), true);
2768 }
2769
2770 /* Assemble the integer constant X into an object of SIZE bytes. ALIGN is
2771 the alignment of the integer in bits. Return 1 if we were able to output
2772 the constant, otherwise 0. We must be able to output the constant,
2773 if FORCE is nonzero. */
2774
2775 bool
assemble_integer(rtx x,unsigned int size,unsigned int align,int force)2776 assemble_integer (rtx x, unsigned int size, unsigned int align, int force)
2777 {
2778 int aligned_p;
2779
2780 aligned_p = (align >= MIN (size * BITS_PER_UNIT, BIGGEST_ALIGNMENT));
2781
2782 /* See if the target hook can handle this kind of object. */
2783 if (targetm.asm_out.integer (x, size, aligned_p))
2784 return true;
2785
2786 /* If the object is a multi-byte one, try splitting it up. Split
2787 it into words it if is multi-word, otherwise split it into bytes. */
2788 if (size > 1)
2789 {
2790 machine_mode omode, imode;
2791 unsigned int subalign;
2792 unsigned int subsize, i;
2793 enum mode_class mclass;
2794
2795 subsize = size > UNITS_PER_WORD? UNITS_PER_WORD : 1;
2796 subalign = MIN (align, subsize * BITS_PER_UNIT);
2797 if (GET_CODE (x) == CONST_FIXED)
2798 mclass = GET_MODE_CLASS (GET_MODE (x));
2799 else
2800 mclass = MODE_INT;
2801
2802 omode = mode_for_size (subsize * BITS_PER_UNIT, mclass, 0).require ();
2803 imode = mode_for_size (size * BITS_PER_UNIT, mclass, 0).require ();
2804
2805 for (i = 0; i < size; i += subsize)
2806 {
2807 rtx partial = simplify_subreg (omode, x, imode, i);
2808 if (!partial || !assemble_integer (partial, subsize, subalign, 0))
2809 break;
2810 }
2811 if (i == size)
2812 return true;
2813
2814 /* If we've printed some of it, but not all of it, there's no going
2815 back now. */
2816 gcc_assert (!i);
2817 }
2818
2819 gcc_assert (!force);
2820
2821 return false;
2822 }
2823
2824 /* Assemble the floating-point constant D into an object of size MODE. ALIGN
2825 is the alignment of the constant in bits. If REVERSE is true, D is output
2826 in reverse storage order. */
2827
2828 void
assemble_real(REAL_VALUE_TYPE d,scalar_float_mode mode,unsigned int align,bool reverse)2829 assemble_real (REAL_VALUE_TYPE d, scalar_float_mode mode, unsigned int align,
2830 bool reverse)
2831 {
2832 long data[4] = {0, 0, 0, 0};
2833 int bitsize, nelts, nunits, units_per;
2834 rtx elt;
2835
2836 /* This is hairy. We have a quantity of known size. real_to_target
2837 will put it into an array of *host* longs, 32 bits per element
2838 (even if long is more than 32 bits). We need to determine the
2839 number of array elements that are occupied (nelts) and the number
2840 of *target* min-addressable units that will be occupied in the
2841 object file (nunits). We cannot assume that 32 divides the
2842 mode's bitsize (size * BITS_PER_UNIT) evenly.
2843
2844 size * BITS_PER_UNIT is used here to make sure that padding bits
2845 (which might appear at either end of the value; real_to_target
2846 will include the padding bits in its output array) are included. */
2847
2848 nunits = GET_MODE_SIZE (mode);
2849 bitsize = nunits * BITS_PER_UNIT;
2850 nelts = CEIL (bitsize, 32);
2851 units_per = 32 / BITS_PER_UNIT;
2852
2853 real_to_target (data, &d, mode);
2854
2855 /* Put out the first word with the specified alignment. */
2856 if (reverse)
2857 elt = flip_storage_order (SImode, gen_int_mode (data[nelts - 1], SImode));
2858 else
2859 elt = GEN_INT (data[0]);
2860 assemble_integer (elt, MIN (nunits, units_per), align, 1);
2861 nunits -= units_per;
2862
2863 /* Subsequent words need only 32-bit alignment. */
2864 align = min_align (align, 32);
2865
2866 for (int i = 1; i < nelts; i++)
2867 {
2868 if (reverse)
2869 elt = flip_storage_order (SImode,
2870 gen_int_mode (data[nelts - 1 - i], SImode));
2871 else
2872 elt = GEN_INT (data[i]);
2873 assemble_integer (elt, MIN (nunits, units_per), align, 1);
2874 nunits -= units_per;
2875 }
2876 }
2877
2878 /* Given an expression EXP with a constant value,
2879 reduce it to the sum of an assembler symbol and an integer.
2880 Store them both in the structure *VALUE.
2881 EXP must be reducible. */
2882
2883 struct addr_const {
2884 rtx base;
2885 poly_int64 offset;
2886 };
2887
2888 static void
decode_addr_const(tree exp,struct addr_const * value)2889 decode_addr_const (tree exp, struct addr_const *value)
2890 {
2891 tree target = TREE_OPERAND (exp, 0);
2892 poly_int64 offset = 0;
2893 rtx x;
2894
2895 while (1)
2896 {
2897 poly_int64 bytepos;
2898 if (TREE_CODE (target) == COMPONENT_REF
2899 && poly_int_tree_p (byte_position (TREE_OPERAND (target, 1)),
2900 &bytepos))
2901 {
2902 offset += bytepos;
2903 target = TREE_OPERAND (target, 0);
2904 }
2905 else if (TREE_CODE (target) == ARRAY_REF
2906 || TREE_CODE (target) == ARRAY_RANGE_REF)
2907 {
2908 /* Truncate big offset. */
2909 offset
2910 += (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (target)))
2911 * wi::to_poly_widest (TREE_OPERAND (target, 1)).force_shwi ());
2912 target = TREE_OPERAND (target, 0);
2913 }
2914 else if (TREE_CODE (target) == MEM_REF
2915 && TREE_CODE (TREE_OPERAND (target, 0)) == ADDR_EXPR)
2916 {
2917 offset += mem_ref_offset (target).force_shwi ();
2918 target = TREE_OPERAND (TREE_OPERAND (target, 0), 0);
2919 }
2920 else if (TREE_CODE (target) == INDIRECT_REF
2921 && TREE_CODE (TREE_OPERAND (target, 0)) == NOP_EXPR
2922 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (target, 0), 0))
2923 == ADDR_EXPR)
2924 target = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (target, 0), 0), 0);
2925 else
2926 break;
2927 }
2928
2929 switch (TREE_CODE (target))
2930 {
2931 case VAR_DECL:
2932 case FUNCTION_DECL:
2933 x = DECL_RTL (target);
2934 break;
2935
2936 case LABEL_DECL:
2937 x = gen_rtx_MEM (FUNCTION_MODE,
2938 gen_rtx_LABEL_REF (Pmode, force_label_rtx (target)));
2939 break;
2940
2941 case REAL_CST:
2942 case FIXED_CST:
2943 case STRING_CST:
2944 case COMPLEX_CST:
2945 case CONSTRUCTOR:
2946 case INTEGER_CST:
2947 x = output_constant_def (target, 1);
2948 break;
2949
2950 case INDIRECT_REF:
2951 /* This deals with absolute addresses. */
2952 offset += tree_to_shwi (TREE_OPERAND (target, 0));
2953 x = gen_rtx_MEM (QImode,
2954 gen_rtx_SYMBOL_REF (Pmode, "origin of addresses"));
2955 break;
2956
2957 case COMPOUND_LITERAL_EXPR:
2958 gcc_assert (COMPOUND_LITERAL_EXPR_DECL (target));
2959 x = DECL_RTL (COMPOUND_LITERAL_EXPR_DECL (target));
2960 break;
2961
2962 default:
2963 gcc_unreachable ();
2964 }
2965
2966 gcc_assert (MEM_P (x));
2967 x = XEXP (x, 0);
2968
2969 value->base = x;
2970 value->offset = offset;
2971 }
2972
2973 static GTY(()) hash_table<tree_descriptor_hasher> *const_desc_htab;
2974
2975 static void maybe_output_constant_def_contents (struct constant_descriptor_tree *, int);
2976
2977 /* Constant pool accessor function. */
2978
2979 hash_table<tree_descriptor_hasher> *
constant_pool_htab(void)2980 constant_pool_htab (void)
2981 {
2982 return const_desc_htab;
2983 }
2984
2985 /* Compute a hash code for a constant expression. */
2986
2987 hashval_t
hash(constant_descriptor_tree * ptr)2988 tree_descriptor_hasher::hash (constant_descriptor_tree *ptr)
2989 {
2990 return ptr->hash;
2991 }
2992
2993 static hashval_t
const_hash_1(const tree exp)2994 const_hash_1 (const tree exp)
2995 {
2996 const char *p;
2997 hashval_t hi;
2998 int len, i;
2999 enum tree_code code = TREE_CODE (exp);
3000
3001 /* Either set P and LEN to the address and len of something to hash and
3002 exit the switch or return a value. */
3003
3004 switch (code)
3005 {
3006 case INTEGER_CST:
3007 p = (char *) &TREE_INT_CST_ELT (exp, 0);
3008 len = TREE_INT_CST_NUNITS (exp) * sizeof (HOST_WIDE_INT);
3009 break;
3010
3011 case REAL_CST:
3012 return real_hash (TREE_REAL_CST_PTR (exp));
3013
3014 case FIXED_CST:
3015 return fixed_hash (TREE_FIXED_CST_PTR (exp));
3016
3017 case STRING_CST:
3018 p = TREE_STRING_POINTER (exp);
3019 len = TREE_STRING_LENGTH (exp);
3020 break;
3021
3022 case COMPLEX_CST:
3023 return (const_hash_1 (TREE_REALPART (exp)) * 5
3024 + const_hash_1 (TREE_IMAGPART (exp)));
3025
3026 case VECTOR_CST:
3027 {
3028 hi = 7 + VECTOR_CST_NPATTERNS (exp);
3029 hi = hi * 563 + VECTOR_CST_NELTS_PER_PATTERN (exp);
3030 unsigned int count = vector_cst_encoded_nelts (exp);
3031 for (unsigned int i = 0; i < count; ++i)
3032 hi = hi * 563 + const_hash_1 (VECTOR_CST_ENCODED_ELT (exp, i));
3033 return hi;
3034 }
3035
3036 case CONSTRUCTOR:
3037 {
3038 unsigned HOST_WIDE_INT idx;
3039 tree value;
3040
3041 hi = 5 + int_size_in_bytes (TREE_TYPE (exp));
3042
3043 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
3044 if (value)
3045 hi = hi * 603 + const_hash_1 (value);
3046
3047 return hi;
3048 }
3049
3050 case ADDR_EXPR:
3051 case FDESC_EXPR:
3052 {
3053 struct addr_const value;
3054
3055 decode_addr_const (exp, &value);
3056 switch (GET_CODE (value.base))
3057 {
3058 case SYMBOL_REF:
3059 /* Don't hash the address of the SYMBOL_REF;
3060 only use the offset and the symbol name. */
3061 hi = value.offset.coeffs[0];
3062 p = XSTR (value.base, 0);
3063 for (i = 0; p[i] != 0; i++)
3064 hi = ((hi * 613) + (unsigned) (p[i]));
3065 break;
3066
3067 case LABEL_REF:
3068 hi = (value.offset.coeffs[0]
3069 + CODE_LABEL_NUMBER (label_ref_label (value.base)) * 13);
3070 break;
3071
3072 default:
3073 gcc_unreachable ();
3074 }
3075 }
3076 return hi;
3077
3078 case PLUS_EXPR:
3079 case POINTER_PLUS_EXPR:
3080 case MINUS_EXPR:
3081 return (const_hash_1 (TREE_OPERAND (exp, 0)) * 9
3082 + const_hash_1 (TREE_OPERAND (exp, 1)));
3083
3084 CASE_CONVERT:
3085 return const_hash_1 (TREE_OPERAND (exp, 0)) * 7 + 2;
3086
3087 default:
3088 /* A language specific constant. Just hash the code. */
3089 return code;
3090 }
3091
3092 /* Compute hashing function. */
3093 hi = len;
3094 for (i = 0; i < len; i++)
3095 hi = ((hi * 613) + (unsigned) (p[i]));
3096
3097 return hi;
3098 }
3099
3100 /* Wrapper of compare_constant, for the htab interface. */
3101 bool
equal(constant_descriptor_tree * c1,constant_descriptor_tree * c2)3102 tree_descriptor_hasher::equal (constant_descriptor_tree *c1,
3103 constant_descriptor_tree *c2)
3104 {
3105 if (c1->hash != c2->hash)
3106 return 0;
3107 return compare_constant (c1->value, c2->value);
3108 }
3109
3110 /* Compare t1 and t2, and return 1 only if they are known to result in
3111 the same bit pattern on output. */
3112
3113 static int
compare_constant(const tree t1,const tree t2)3114 compare_constant (const tree t1, const tree t2)
3115 {
3116 enum tree_code typecode;
3117
3118 if (t1 == NULL_TREE)
3119 return t2 == NULL_TREE;
3120 if (t2 == NULL_TREE)
3121 return 0;
3122
3123 if (TREE_CODE (t1) != TREE_CODE (t2))
3124 return 0;
3125
3126 switch (TREE_CODE (t1))
3127 {
3128 case INTEGER_CST:
3129 /* Integer constants are the same only if the same width of type. */
3130 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3131 return 0;
3132 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
3133 return 0;
3134 return tree_int_cst_equal (t1, t2);
3135
3136 case REAL_CST:
3137 /* Real constants are the same only if the same width of type. In
3138 addition to the same width, we need to check whether the modes are the
3139 same. There might be two floating point modes that are the same size
3140 but have different representations, such as the PowerPC that has 2
3141 different 128-bit floating point types (IBM extended double and IEEE
3142 128-bit floating point). */
3143 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3144 return 0;
3145 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
3146 return 0;
3147 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
3148
3149 case FIXED_CST:
3150 /* Fixed constants are the same only if the same width of type. */
3151 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3152 return 0;
3153
3154 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
3155
3156 case STRING_CST:
3157 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
3158 return 0;
3159
3160 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
3161 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
3162 TREE_STRING_LENGTH (t1)));
3163
3164 case COMPLEX_CST:
3165 return (compare_constant (TREE_REALPART (t1), TREE_REALPART (t2))
3166 && compare_constant (TREE_IMAGPART (t1), TREE_IMAGPART (t2)));
3167
3168 case VECTOR_CST:
3169 {
3170 if (VECTOR_CST_NPATTERNS (t1)
3171 != VECTOR_CST_NPATTERNS (t2))
3172 return 0;
3173
3174 if (VECTOR_CST_NELTS_PER_PATTERN (t1)
3175 != VECTOR_CST_NELTS_PER_PATTERN (t2))
3176 return 0;
3177
3178 unsigned int count = vector_cst_encoded_nelts (t1);
3179 for (unsigned int i = 0; i < count; ++i)
3180 if (!compare_constant (VECTOR_CST_ENCODED_ELT (t1, i),
3181 VECTOR_CST_ENCODED_ELT (t2, i)))
3182 return 0;
3183
3184 return 1;
3185 }
3186
3187 case CONSTRUCTOR:
3188 {
3189 vec<constructor_elt, va_gc> *v1, *v2;
3190 unsigned HOST_WIDE_INT idx;
3191
3192 typecode = TREE_CODE (TREE_TYPE (t1));
3193 if (typecode != TREE_CODE (TREE_TYPE (t2)))
3194 return 0;
3195
3196 if (typecode == ARRAY_TYPE)
3197 {
3198 HOST_WIDE_INT size_1 = int_size_in_bytes (TREE_TYPE (t1));
3199 /* For arrays, check that mode, size and storage order match. */
3200 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))
3201 || size_1 == -1
3202 || size_1 != int_size_in_bytes (TREE_TYPE (t2))
3203 || TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t1))
3204 != TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t2)))
3205 return 0;
3206 }
3207 else
3208 {
3209 /* For record and union constructors, require exact type
3210 equality. */
3211 if (TREE_TYPE (t1) != TREE_TYPE (t2))
3212 return 0;
3213 }
3214
3215 v1 = CONSTRUCTOR_ELTS (t1);
3216 v2 = CONSTRUCTOR_ELTS (t2);
3217 if (vec_safe_length (v1) != vec_safe_length (v2))
3218 return 0;
3219
3220 for (idx = 0; idx < vec_safe_length (v1); ++idx)
3221 {
3222 constructor_elt *c1 = &(*v1)[idx];
3223 constructor_elt *c2 = &(*v2)[idx];
3224
3225 /* Check that each value is the same... */
3226 if (!compare_constant (c1->value, c2->value))
3227 return 0;
3228 /* ... and that they apply to the same fields! */
3229 if (typecode == ARRAY_TYPE)
3230 {
3231 if (!compare_constant (c1->index, c2->index))
3232 return 0;
3233 }
3234 else
3235 {
3236 if (c1->index != c2->index)
3237 return 0;
3238 }
3239 }
3240
3241 return 1;
3242 }
3243
3244 case ADDR_EXPR:
3245 case FDESC_EXPR:
3246 {
3247 struct addr_const value1, value2;
3248 enum rtx_code code;
3249 int ret;
3250
3251 decode_addr_const (t1, &value1);
3252 decode_addr_const (t2, &value2);
3253
3254 if (maybe_ne (value1.offset, value2.offset))
3255 return 0;
3256
3257 code = GET_CODE (value1.base);
3258 if (code != GET_CODE (value2.base))
3259 return 0;
3260
3261 switch (code)
3262 {
3263 case SYMBOL_REF:
3264 ret = (strcmp (XSTR (value1.base, 0), XSTR (value2.base, 0)) == 0);
3265 break;
3266
3267 case LABEL_REF:
3268 ret = (CODE_LABEL_NUMBER (label_ref_label (value1.base))
3269 == CODE_LABEL_NUMBER (label_ref_label (value2.base)));
3270 break;
3271
3272 default:
3273 gcc_unreachable ();
3274 }
3275 return ret;
3276 }
3277
3278 case PLUS_EXPR:
3279 case POINTER_PLUS_EXPR:
3280 case MINUS_EXPR:
3281 case RANGE_EXPR:
3282 return (compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0))
3283 && compare_constant (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)));
3284
3285 CASE_CONVERT:
3286 case VIEW_CONVERT_EXPR:
3287 return compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
3288
3289 default:
3290 return 0;
3291 }
3292
3293 gcc_unreachable ();
3294 }
3295
3296 /* Return the section into which constant EXP should be placed. */
3297
3298 static section *
get_constant_section(tree exp,unsigned int align)3299 get_constant_section (tree exp, unsigned int align)
3300 {
3301 return targetm.asm_out.select_section (exp,
3302 compute_reloc_for_constant (exp),
3303 align);
3304 }
3305
3306 /* Return the size of constant EXP in bytes. */
3307
3308 static HOST_WIDE_INT
get_constant_size(tree exp)3309 get_constant_size (tree exp)
3310 {
3311 HOST_WIDE_INT size;
3312
3313 size = int_size_in_bytes (TREE_TYPE (exp));
3314 if (TREE_CODE (exp) == STRING_CST)
3315 size = MAX (TREE_STRING_LENGTH (exp), size);
3316 return size;
3317 }
3318
3319 /* Subroutine of output_constant_def:
3320 No constant equal to EXP is known to have been output.
3321 Make a constant descriptor to enter EXP in the hash table.
3322 Assign the label number and construct RTL to refer to the
3323 constant's location in memory.
3324 Caller is responsible for updating the hash table. */
3325
3326 static struct constant_descriptor_tree *
build_constant_desc(tree exp)3327 build_constant_desc (tree exp)
3328 {
3329 struct constant_descriptor_tree *desc;
3330 rtx symbol, rtl;
3331 char label[256];
3332 int labelno;
3333 tree decl;
3334
3335 desc = ggc_alloc<constant_descriptor_tree> ();
3336 desc->value = exp;
3337
3338 /* Create a string containing the label name, in LABEL. */
3339 labelno = const_labelno++;
3340 ASM_GENERATE_INTERNAL_LABEL (label, "LC", labelno);
3341
3342 /* Construct the VAR_DECL associated with the constant. */
3343 decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (label),
3344 TREE_TYPE (exp));
3345 DECL_ARTIFICIAL (decl) = 1;
3346 DECL_IGNORED_P (decl) = 1;
3347 TREE_READONLY (decl) = 1;
3348 TREE_STATIC (decl) = 1;
3349 TREE_ADDRESSABLE (decl) = 1;
3350 /* We don't set the RTL yet as this would cause varpool to assume that the
3351 variable is referenced. Moreover, it would just be dropped in LTO mode.
3352 Instead we set the flag that will be recognized in make_decl_rtl. */
3353 DECL_IN_CONSTANT_POOL (decl) = 1;
3354 DECL_INITIAL (decl) = desc->value;
3355 /* ??? targetm.constant_alignment hasn't been updated for vector types on
3356 most architectures so use DATA_ALIGNMENT as well, except for strings. */
3357 if (TREE_CODE (exp) == STRING_CST)
3358 SET_DECL_ALIGN (decl, targetm.constant_alignment (exp, DECL_ALIGN (decl)));
3359 else
3360 align_variable (decl, 0);
3361
3362 /* Now construct the SYMBOL_REF and the MEM. */
3363 if (use_object_blocks_p ())
3364 {
3365 int align = (TREE_CODE (decl) == CONST_DECL
3366 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
3367 ? DECL_ALIGN (decl)
3368 : symtab_node::get (decl)->definition_alignment ());
3369 section *sect = get_constant_section (exp, align);
3370 symbol = create_block_symbol (ggc_strdup (label),
3371 get_block_for_section (sect), -1);
3372 }
3373 else
3374 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
3375 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
3376 SET_SYMBOL_REF_DECL (symbol, decl);
3377 TREE_CONSTANT_POOL_ADDRESS_P (symbol) = 1;
3378
3379 rtl = gen_const_mem (TYPE_MODE (TREE_TYPE (exp)), symbol);
3380 set_mem_attributes (rtl, exp, 1);
3381 set_mem_alias_set (rtl, 0);
3382
3383 /* Putting EXP into the literal pool might have imposed a different
3384 alignment which should be visible in the RTX as well. */
3385 set_mem_align (rtl, DECL_ALIGN (decl));
3386
3387 /* We cannot share RTX'es in pool entries.
3388 Mark this piece of RTL as required for unsharing. */
3389 RTX_FLAG (rtl, used) = 1;
3390
3391 /* Set flags or add text to the name to record information, such as
3392 that it is a local symbol. If the name is changed, the macro
3393 ASM_OUTPUT_LABELREF will have to know how to strip this
3394 information. This call might invalidate our local variable
3395 SYMBOL; we can't use it afterward. */
3396 targetm.encode_section_info (exp, rtl, true);
3397
3398 desc->rtl = rtl;
3399
3400 return desc;
3401 }
3402
3403 /* Return an rtx representing a reference to constant data in memory
3404 for the constant expression EXP.
3405
3406 If assembler code for such a constant has already been output,
3407 return an rtx to refer to it.
3408 Otherwise, output such a constant in memory
3409 and generate an rtx for it.
3410
3411 If DEFER is nonzero, this constant can be deferred and output only
3412 if referenced in the function after all optimizations.
3413
3414 `const_desc_table' records which constants already have label strings. */
3415
3416 rtx
output_constant_def(tree exp,int defer)3417 output_constant_def (tree exp, int defer)
3418 {
3419 struct constant_descriptor_tree *desc;
3420 struct constant_descriptor_tree key;
3421
3422 /* Look up EXP in the table of constant descriptors. If we didn't find
3423 it, create a new one. */
3424 key.value = exp;
3425 key.hash = const_hash_1 (exp);
3426 constant_descriptor_tree **loc
3427 = const_desc_htab->find_slot_with_hash (&key, key.hash, INSERT);
3428
3429 desc = *loc;
3430 if (desc == 0)
3431 {
3432 desc = build_constant_desc (exp);
3433 desc->hash = key.hash;
3434 *loc = desc;
3435 }
3436
3437 maybe_output_constant_def_contents (desc, defer);
3438 return desc->rtl;
3439 }
3440
3441 /* Subroutine of output_constant_def: Decide whether or not we need to
3442 output the constant DESC now, and if so, do it. */
3443 static void
maybe_output_constant_def_contents(struct constant_descriptor_tree * desc,int defer)3444 maybe_output_constant_def_contents (struct constant_descriptor_tree *desc,
3445 int defer)
3446 {
3447 rtx symbol = XEXP (desc->rtl, 0);
3448 tree exp = desc->value;
3449
3450 if (flag_syntax_only)
3451 return;
3452
3453 if (TREE_ASM_WRITTEN (exp))
3454 /* Already output; don't do it again. */
3455 return;
3456
3457 /* We can always defer constants as long as the context allows
3458 doing so. */
3459 if (defer)
3460 {
3461 /* Increment n_deferred_constants if it exists. It needs to be at
3462 least as large as the number of constants actually referred to
3463 by the function. If it's too small we'll stop looking too early
3464 and fail to emit constants; if it's too large we'll only look
3465 through the entire function when we could have stopped earlier. */
3466 if (cfun)
3467 n_deferred_constants++;
3468 return;
3469 }
3470
3471 output_constant_def_contents (symbol);
3472 }
3473
3474 /* Subroutine of output_constant_def_contents. Output the definition
3475 of constant EXP, which is pointed to by label LABEL. ALIGN is the
3476 constant's alignment in bits. */
3477
3478 static void
assemble_constant_contents(tree exp,const char * label,unsigned int align)3479 assemble_constant_contents (tree exp, const char *label, unsigned int align)
3480 {
3481 HOST_WIDE_INT size;
3482
3483 size = get_constant_size (exp);
3484
3485 /* Do any machine/system dependent processing of the constant. */
3486 targetm.asm_out.declare_constant_name (asm_out_file, label, exp, size);
3487
3488 /* Output the value of EXP. */
3489 output_constant (exp, size, align, false);
3490
3491 targetm.asm_out.decl_end ();
3492 }
3493
3494 /* We must output the constant data referred to by SYMBOL; do so. */
3495
3496 static void
output_constant_def_contents(rtx symbol)3497 output_constant_def_contents (rtx symbol)
3498 {
3499 tree decl = SYMBOL_REF_DECL (symbol);
3500 tree exp = DECL_INITIAL (decl);
3501 bool asan_protected = false;
3502
3503 /* Make sure any other constants whose addresses appear in EXP
3504 are assigned label numbers. */
3505 output_addressed_constants (exp);
3506
3507 /* We are no longer deferring this constant. */
3508 TREE_ASM_WRITTEN (decl) = TREE_ASM_WRITTEN (exp) = 1;
3509
3510 if ((flag_sanitize & SANITIZE_ADDRESS)
3511 && TREE_CODE (exp) == STRING_CST
3512 && asan_protect_global (exp))
3513 {
3514 asan_protected = true;
3515 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
3516 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
3517 }
3518
3519 /* If the constant is part of an object block, make sure that the
3520 decl has been positioned within its block, but do not write out
3521 its definition yet. output_object_blocks will do that later. */
3522 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol))
3523 place_block_symbol (symbol);
3524 else
3525 {
3526 int align = (TREE_CODE (decl) == CONST_DECL
3527 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
3528 ? DECL_ALIGN (decl)
3529 : symtab_node::get (decl)->definition_alignment ());
3530 switch_to_section (get_constant_section (exp, align));
3531 if (align > BITS_PER_UNIT)
3532 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
3533 assemble_constant_contents (exp, XSTR (symbol, 0), align);
3534 if (asan_protected)
3535 {
3536 HOST_WIDE_INT size = get_constant_size (exp);
3537 assemble_zeros (asan_red_zone_size (size));
3538 }
3539 }
3540 }
3541
3542 /* Look up EXP in the table of constant descriptors. Return the rtl
3543 if it has been emitted, else null. */
3544
3545 rtx
lookup_constant_def(tree exp)3546 lookup_constant_def (tree exp)
3547 {
3548 struct constant_descriptor_tree key;
3549
3550 key.value = exp;
3551 key.hash = const_hash_1 (exp);
3552 constant_descriptor_tree *desc
3553 = const_desc_htab->find_with_hash (&key, key.hash);
3554
3555 return (desc ? desc->rtl : NULL_RTX);
3556 }
3557
3558 /* Return a tree representing a reference to constant data in memory
3559 for the constant expression EXP.
3560
3561 This is the counterpart of output_constant_def at the Tree level. */
3562
3563 tree
tree_output_constant_def(tree exp)3564 tree_output_constant_def (tree exp)
3565 {
3566 struct constant_descriptor_tree *desc, key;
3567 tree decl;
3568
3569 /* Look up EXP in the table of constant descriptors. If we didn't find
3570 it, create a new one. */
3571 key.value = exp;
3572 key.hash = const_hash_1 (exp);
3573 constant_descriptor_tree **loc
3574 = const_desc_htab->find_slot_with_hash (&key, key.hash, INSERT);
3575
3576 desc = *loc;
3577 if (desc == 0)
3578 {
3579 desc = build_constant_desc (exp);
3580 desc->hash = key.hash;
3581 *loc = desc;
3582 }
3583
3584 decl = SYMBOL_REF_DECL (XEXP (desc->rtl, 0));
3585 varpool_node::finalize_decl (decl);
3586 return decl;
3587 }
3588
3589 struct GTY((chain_next ("%h.next"), for_user)) constant_descriptor_rtx {
3590 struct constant_descriptor_rtx *next;
3591 rtx mem;
3592 rtx sym;
3593 rtx constant;
3594 HOST_WIDE_INT offset;
3595 hashval_t hash;
3596 fixed_size_mode mode;
3597 unsigned int align;
3598 int labelno;
3599 int mark;
3600 };
3601
3602 struct const_rtx_desc_hasher : ggc_ptr_hash<constant_descriptor_rtx>
3603 {
3604 static hashval_t hash (constant_descriptor_rtx *);
3605 static bool equal (constant_descriptor_rtx *, constant_descriptor_rtx *);
3606 };
3607
3608 /* Used in the hash tables to avoid outputting the same constant
3609 twice. Unlike 'struct constant_descriptor_tree', RTX constants
3610 are output once per function, not once per file. */
3611 /* ??? Only a few targets need per-function constant pools. Most
3612 can use one per-file pool. Should add a targetm bit to tell the
3613 difference. */
3614
3615 struct GTY(()) rtx_constant_pool {
3616 /* Pointers to first and last constant in pool, as ordered by offset. */
3617 struct constant_descriptor_rtx *first;
3618 struct constant_descriptor_rtx *last;
3619
3620 /* Hash facility for making memory-constants from constant rtl-expressions.
3621 It is used on RISC machines where immediate integer arguments and
3622 constant addresses are restricted so that such constants must be stored
3623 in memory. */
3624 hash_table<const_rtx_desc_hasher> *const_rtx_htab;
3625
3626 /* Current offset in constant pool (does not include any
3627 machine-specific header). */
3628 HOST_WIDE_INT offset;
3629 };
3630
3631 /* Hash and compare functions for const_rtx_htab. */
3632
3633 hashval_t
hash(constant_descriptor_rtx * desc)3634 const_rtx_desc_hasher::hash (constant_descriptor_rtx *desc)
3635 {
3636 return desc->hash;
3637 }
3638
3639 bool
equal(constant_descriptor_rtx * x,constant_descriptor_rtx * y)3640 const_rtx_desc_hasher::equal (constant_descriptor_rtx *x,
3641 constant_descriptor_rtx *y)
3642 {
3643 if (x->mode != y->mode)
3644 return 0;
3645 return rtx_equal_p (x->constant, y->constant);
3646 }
3647
3648 /* Hash one component of a constant. */
3649
3650 static hashval_t
const_rtx_hash_1(const_rtx x)3651 const_rtx_hash_1 (const_rtx x)
3652 {
3653 unsigned HOST_WIDE_INT hwi;
3654 machine_mode mode;
3655 enum rtx_code code;
3656 hashval_t h;
3657 int i;
3658
3659 code = GET_CODE (x);
3660 mode = GET_MODE (x);
3661 h = (hashval_t) code * 1048573 + mode;
3662
3663 switch (code)
3664 {
3665 case CONST_INT:
3666 hwi = INTVAL (x);
3667
3668 fold_hwi:
3669 {
3670 int shift = sizeof (hashval_t) * CHAR_BIT;
3671 const int n = sizeof (HOST_WIDE_INT) / sizeof (hashval_t);
3672
3673 h ^= (hashval_t) hwi;
3674 for (i = 1; i < n; ++i)
3675 {
3676 hwi >>= shift;
3677 h ^= (hashval_t) hwi;
3678 }
3679 }
3680 break;
3681
3682 case CONST_WIDE_INT:
3683 hwi = 0;
3684 {
3685 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
3686 hwi ^= CONST_WIDE_INT_ELT (x, i);
3687 goto fold_hwi;
3688 }
3689
3690 case CONST_DOUBLE:
3691 if (TARGET_SUPPORTS_WIDE_INT == 0 && mode == VOIDmode)
3692 {
3693 hwi = CONST_DOUBLE_LOW (x) ^ CONST_DOUBLE_HIGH (x);
3694 goto fold_hwi;
3695 }
3696 else
3697 h ^= real_hash (CONST_DOUBLE_REAL_VALUE (x));
3698 break;
3699
3700 case CONST_FIXED:
3701 h ^= fixed_hash (CONST_FIXED_VALUE (x));
3702 break;
3703
3704 case SYMBOL_REF:
3705 h ^= htab_hash_string (XSTR (x, 0));
3706 break;
3707
3708 case LABEL_REF:
3709 h = h * 251 + CODE_LABEL_NUMBER (label_ref_label (x));
3710 break;
3711
3712 case UNSPEC:
3713 case UNSPEC_VOLATILE:
3714 h = h * 251 + XINT (x, 1);
3715 break;
3716
3717 default:
3718 break;
3719 }
3720
3721 return h;
3722 }
3723
3724 /* Compute a hash value for X, which should be a constant. */
3725
3726 static hashval_t
const_rtx_hash(rtx x)3727 const_rtx_hash (rtx x)
3728 {
3729 hashval_t h = 0;
3730 subrtx_iterator::array_type array;
3731 FOR_EACH_SUBRTX (iter, array, x, ALL)
3732 h = h * 509 + const_rtx_hash_1 (*iter);
3733 return h;
3734 }
3735
3736
3737 /* Create and return a new rtx constant pool. */
3738
3739 static struct rtx_constant_pool *
create_constant_pool(void)3740 create_constant_pool (void)
3741 {
3742 struct rtx_constant_pool *pool;
3743
3744 pool = ggc_alloc<rtx_constant_pool> ();
3745 pool->const_rtx_htab = hash_table<const_rtx_desc_hasher>::create_ggc (31);
3746 pool->first = NULL;
3747 pool->last = NULL;
3748 pool->offset = 0;
3749 return pool;
3750 }
3751
3752 /* Initialize constant pool hashing for a new function. */
3753
3754 void
init_varasm_status(void)3755 init_varasm_status (void)
3756 {
3757 crtl->varasm.pool = create_constant_pool ();
3758 crtl->varasm.deferred_constants = 0;
3759 }
3760
3761 /* Given a MINUS expression, simplify it if both sides
3762 include the same symbol. */
3763
3764 rtx
simplify_subtraction(rtx x)3765 simplify_subtraction (rtx x)
3766 {
3767 rtx r = simplify_rtx (x);
3768 return r ? r : x;
3769 }
3770
3771 /* Given a constant rtx X, make (or find) a memory constant for its value
3772 and return a MEM rtx to refer to it in memory. IN_MODE is the mode
3773 of X. */
3774
3775 rtx
force_const_mem(machine_mode in_mode,rtx x)3776 force_const_mem (machine_mode in_mode, rtx x)
3777 {
3778 struct constant_descriptor_rtx *desc, tmp;
3779 struct rtx_constant_pool *pool;
3780 char label[256];
3781 rtx def, symbol;
3782 hashval_t hash;
3783 unsigned int align;
3784 constant_descriptor_rtx **slot;
3785 fixed_size_mode mode;
3786
3787 /* We can't force variable-sized objects to memory. */
3788 if (!is_a <fixed_size_mode> (in_mode, &mode))
3789 return NULL_RTX;
3790
3791 /* If we're not allowed to drop X into the constant pool, don't. */
3792 if (targetm.cannot_force_const_mem (mode, x))
3793 return NULL_RTX;
3794
3795 /* Record that this function has used a constant pool entry. */
3796 crtl->uses_const_pool = 1;
3797
3798 /* Decide which pool to use. */
3799 pool = (targetm.use_blocks_for_constant_p (mode, x)
3800 ? shared_constant_pool
3801 : crtl->varasm.pool);
3802
3803 /* Lookup the value in the hashtable. */
3804 tmp.constant = x;
3805 tmp.mode = mode;
3806 hash = const_rtx_hash (x);
3807 slot = pool->const_rtx_htab->find_slot_with_hash (&tmp, hash, INSERT);
3808 desc = *slot;
3809
3810 /* If the constant was already present, return its memory. */
3811 if (desc)
3812 return copy_rtx (desc->mem);
3813
3814 /* Otherwise, create a new descriptor. */
3815 desc = ggc_alloc<constant_descriptor_rtx> ();
3816 *slot = desc;
3817
3818 /* Align the location counter as required by EXP's data type. */
3819 machine_mode align_mode = (mode == VOIDmode ? word_mode : mode);
3820 align = targetm.static_rtx_alignment (align_mode);
3821
3822 pool->offset += (align / BITS_PER_UNIT) - 1;
3823 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1);
3824
3825 desc->next = NULL;
3826 desc->constant = copy_rtx (tmp.constant);
3827 desc->offset = pool->offset;
3828 desc->hash = hash;
3829 desc->mode = mode;
3830 desc->align = align;
3831 desc->labelno = const_labelno;
3832 desc->mark = 0;
3833
3834 pool->offset += GET_MODE_SIZE (mode);
3835 if (pool->last)
3836 pool->last->next = desc;
3837 else
3838 pool->first = pool->last = desc;
3839 pool->last = desc;
3840
3841 /* Create a string containing the label name, in LABEL. */
3842 ASM_GENERATE_INTERNAL_LABEL (label, "LC", const_labelno);
3843 ++const_labelno;
3844
3845 /* Construct the SYMBOL_REF. Make sure to mark it as belonging to
3846 the constants pool. */
3847 if (use_object_blocks_p () && targetm.use_blocks_for_constant_p (mode, x))
3848 {
3849 section *sect = targetm.asm_out.select_rtx_section (mode, x, align);
3850 symbol = create_block_symbol (ggc_strdup (label),
3851 get_block_for_section (sect), -1);
3852 }
3853 else
3854 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
3855 desc->sym = symbol;
3856 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
3857 CONSTANT_POOL_ADDRESS_P (symbol) = 1;
3858 SET_SYMBOL_REF_CONSTANT (symbol, desc);
3859
3860 /* Construct the MEM. */
3861 desc->mem = def = gen_const_mem (mode, symbol);
3862 set_mem_align (def, align);
3863
3864 /* If we're dropping a label to the constant pool, make sure we
3865 don't delete it. */
3866 if (GET_CODE (x) == LABEL_REF)
3867 LABEL_PRESERVE_P (XEXP (x, 0)) = 1;
3868
3869 return copy_rtx (def);
3870 }
3871
3872 /* Given a constant pool SYMBOL_REF, return the corresponding constant. */
3873
3874 rtx
get_pool_constant(const_rtx addr)3875 get_pool_constant (const_rtx addr)
3876 {
3877 return SYMBOL_REF_CONSTANT (addr)->constant;
3878 }
3879
3880 /* Given a constant pool SYMBOL_REF, return the corresponding constant
3881 and whether it has been output or not. */
3882
3883 rtx
get_pool_constant_mark(rtx addr,bool * pmarked)3884 get_pool_constant_mark (rtx addr, bool *pmarked)
3885 {
3886 struct constant_descriptor_rtx *desc;
3887
3888 desc = SYMBOL_REF_CONSTANT (addr);
3889 *pmarked = (desc->mark != 0);
3890 return desc->constant;
3891 }
3892
3893 /* Similar, return the mode. */
3894
3895 fixed_size_mode
get_pool_mode(const_rtx addr)3896 get_pool_mode (const_rtx addr)
3897 {
3898 return SYMBOL_REF_CONSTANT (addr)->mode;
3899 }
3900
3901 /* Return TRUE if and only if the constant pool has no entries. Note
3902 that even entries we might end up choosing not to emit are counted
3903 here, so there is the potential for missed optimizations. */
3904
3905 bool
constant_pool_empty_p(void)3906 constant_pool_empty_p (void)
3907 {
3908 return crtl->varasm.pool->first == NULL;
3909 }
3910
3911 /* Worker function for output_constant_pool_1. Emit assembly for X
3912 in MODE with known alignment ALIGN. */
3913
3914 static void
output_constant_pool_2(fixed_size_mode mode,rtx x,unsigned int align)3915 output_constant_pool_2 (fixed_size_mode mode, rtx x, unsigned int align)
3916 {
3917 switch (GET_MODE_CLASS (mode))
3918 {
3919 case MODE_FLOAT:
3920 case MODE_DECIMAL_FLOAT:
3921 {
3922 gcc_assert (CONST_DOUBLE_AS_FLOAT_P (x));
3923 assemble_real (*CONST_DOUBLE_REAL_VALUE (x),
3924 as_a <scalar_float_mode> (mode), align, false);
3925 break;
3926 }
3927
3928 case MODE_INT:
3929 case MODE_PARTIAL_INT:
3930 case MODE_FRACT:
3931 case MODE_UFRACT:
3932 case MODE_ACCUM:
3933 case MODE_UACCUM:
3934 case MODE_POINTER_BOUNDS:
3935 assemble_integer (x, GET_MODE_SIZE (mode), align, 1);
3936 break;
3937
3938 case MODE_VECTOR_BOOL:
3939 {
3940 gcc_assert (GET_CODE (x) == CONST_VECTOR);
3941
3942 /* Pick the smallest integer mode that contains at least one
3943 whole element. Often this is byte_mode and contains more
3944 than one element. */
3945 unsigned int nelts = GET_MODE_NUNITS (mode);
3946 unsigned int elt_bits = GET_MODE_BITSIZE (mode) / nelts;
3947 unsigned int int_bits = MAX (elt_bits, BITS_PER_UNIT);
3948 scalar_int_mode int_mode = int_mode_for_size (int_bits, 0).require ();
3949
3950 /* Build the constant up one integer at a time. */
3951 unsigned int elts_per_int = int_bits / elt_bits;
3952 for (unsigned int i = 0; i < nelts; i += elts_per_int)
3953 {
3954 unsigned HOST_WIDE_INT value = 0;
3955 unsigned int limit = MIN (nelts - i, elts_per_int);
3956 for (unsigned int j = 0; j < limit; ++j)
3957 if (INTVAL (CONST_VECTOR_ELT (x, i + j)) != 0)
3958 value |= 1 << (j * elt_bits);
3959 output_constant_pool_2 (int_mode, gen_int_mode (value, int_mode),
3960 i != 0 ? MIN (align, int_bits) : align);
3961 }
3962 break;
3963 }
3964 case MODE_VECTOR_FLOAT:
3965 case MODE_VECTOR_INT:
3966 case MODE_VECTOR_FRACT:
3967 case MODE_VECTOR_UFRACT:
3968 case MODE_VECTOR_ACCUM:
3969 case MODE_VECTOR_UACCUM:
3970 {
3971 int i, units;
3972 scalar_mode submode = GET_MODE_INNER (mode);
3973 unsigned int subalign = MIN (align, GET_MODE_BITSIZE (submode));
3974
3975 gcc_assert (GET_CODE (x) == CONST_VECTOR);
3976 units = GET_MODE_NUNITS (mode);
3977
3978 for (i = 0; i < units; i++)
3979 {
3980 rtx elt = CONST_VECTOR_ELT (x, i);
3981 output_constant_pool_2 (submode, elt, i ? subalign : align);
3982 }
3983 }
3984 break;
3985
3986 default:
3987 gcc_unreachable ();
3988 }
3989 }
3990
3991 /* Worker function for output_constant_pool. Emit constant DESC,
3992 giving it ALIGN bits of alignment. */
3993
3994 static void
output_constant_pool_1(struct constant_descriptor_rtx * desc,unsigned int align)3995 output_constant_pool_1 (struct constant_descriptor_rtx *desc,
3996 unsigned int align)
3997 {
3998 rtx x, tmp;
3999
4000 x = desc->constant;
4001
4002 /* See if X is a LABEL_REF (or a CONST referring to a LABEL_REF)
4003 whose CODE_LABEL has been deleted. This can occur if a jump table
4004 is eliminated by optimization. If so, write a constant of zero
4005 instead. Note that this can also happen by turning the
4006 CODE_LABEL into a NOTE. */
4007 /* ??? This seems completely and utterly wrong. Certainly it's
4008 not true for NOTE_INSN_DELETED_LABEL, but I disbelieve proper
4009 functioning even with rtx_insn::deleted and friends. */
4010
4011 tmp = x;
4012 switch (GET_CODE (tmp))
4013 {
4014 case CONST:
4015 if (GET_CODE (XEXP (tmp, 0)) != PLUS
4016 || GET_CODE (XEXP (XEXP (tmp, 0), 0)) != LABEL_REF)
4017 break;
4018 tmp = XEXP (XEXP (tmp, 0), 0);
4019 /* FALLTHRU */
4020
4021 case LABEL_REF:
4022 {
4023 rtx_insn *insn = label_ref_label (tmp);
4024 gcc_assert (!insn->deleted ());
4025 gcc_assert (!NOTE_P (insn)
4026 || NOTE_KIND (insn) != NOTE_INSN_DELETED);
4027 break;
4028 }
4029
4030 default:
4031 break;
4032 }
4033
4034 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
4035 ASM_OUTPUT_SPECIAL_POOL_ENTRY (asm_out_file, x, desc->mode,
4036 align, desc->labelno, done);
4037 #endif
4038
4039 assemble_align (align);
4040
4041 /* Output the label. */
4042 targetm.asm_out.internal_label (asm_out_file, "LC", desc->labelno);
4043
4044 /* Output the data.
4045 Pass actual alignment value while emitting string constant to asm code
4046 as function 'output_constant_pool_1' explicitly passes the alignment as 1
4047 assuming that the data is already aligned which prevents the generation
4048 of fix-up table entries. */
4049 output_constant_pool_2 (desc->mode, x, desc->align);
4050
4051 /* Make sure all constants in SECTION_MERGE and not SECTION_STRINGS
4052 sections have proper size. */
4053 if (align > GET_MODE_BITSIZE (desc->mode)
4054 && in_section
4055 && (in_section->common.flags & SECTION_MERGE))
4056 assemble_align (align);
4057
4058 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
4059 done:
4060 #endif
4061 return;
4062 }
4063
4064 /* Recompute the offsets of entries in POOL, and the overall size of
4065 POOL. Do this after calling mark_constant_pool to ensure that we
4066 are computing the offset values for the pool which we will actually
4067 emit. */
4068
4069 static void
recompute_pool_offsets(struct rtx_constant_pool * pool)4070 recompute_pool_offsets (struct rtx_constant_pool *pool)
4071 {
4072 struct constant_descriptor_rtx *desc;
4073 pool->offset = 0;
4074
4075 for (desc = pool->first; desc ; desc = desc->next)
4076 if (desc->mark)
4077 {
4078 /* Recalculate offset. */
4079 unsigned int align = desc->align;
4080 pool->offset += (align / BITS_PER_UNIT) - 1;
4081 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1);
4082 desc->offset = pool->offset;
4083 pool->offset += GET_MODE_SIZE (desc->mode);
4084 }
4085 }
4086
4087 /* Mark all constants that are referenced by SYMBOL_REFs in X.
4088 Emit referenced deferred strings. */
4089
4090 static void
mark_constants_in_pattern(rtx insn)4091 mark_constants_in_pattern (rtx insn)
4092 {
4093 subrtx_iterator::array_type array;
4094 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
4095 {
4096 const_rtx x = *iter;
4097 if (GET_CODE (x) == SYMBOL_REF)
4098 {
4099 if (CONSTANT_POOL_ADDRESS_P (x))
4100 {
4101 struct constant_descriptor_rtx *desc = SYMBOL_REF_CONSTANT (x);
4102 if (desc->mark == 0)
4103 {
4104 desc->mark = 1;
4105 iter.substitute (desc->constant);
4106 }
4107 }
4108 else if (TREE_CONSTANT_POOL_ADDRESS_P (x))
4109 {
4110 tree decl = SYMBOL_REF_DECL (x);
4111 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl)))
4112 {
4113 n_deferred_constants--;
4114 output_constant_def_contents (CONST_CAST_RTX (x));
4115 }
4116 }
4117 }
4118 }
4119 }
4120
4121 /* Look through appropriate parts of INSN, marking all entries in the
4122 constant pool which are actually being used. Entries that are only
4123 referenced by other constants are also marked as used. Emit
4124 deferred strings that are used. */
4125
4126 static void
mark_constants(rtx_insn * insn)4127 mark_constants (rtx_insn *insn)
4128 {
4129 if (!INSN_P (insn))
4130 return;
4131
4132 /* Insns may appear inside a SEQUENCE. Only check the patterns of
4133 insns, not any notes that may be attached. We don't want to mark
4134 a constant just because it happens to appear in a REG_EQUIV note. */
4135 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
4136 {
4137 int i, n = seq->len ();
4138 for (i = 0; i < n; ++i)
4139 {
4140 rtx subinsn = seq->element (i);
4141 if (INSN_P (subinsn))
4142 mark_constants_in_pattern (subinsn);
4143 }
4144 }
4145 else
4146 mark_constants_in_pattern (insn);
4147 }
4148
4149 /* Look through the instructions for this function, and mark all the
4150 entries in POOL which are actually being used. Emit deferred constants
4151 which have indeed been used. */
4152
4153 static void
mark_constant_pool(void)4154 mark_constant_pool (void)
4155 {
4156 rtx_insn *insn;
4157
4158 if (!crtl->uses_const_pool && n_deferred_constants == 0)
4159 return;
4160
4161 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4162 mark_constants (insn);
4163 }
4164
4165 /* Write all the constants in POOL. */
4166
4167 static void
output_constant_pool_contents(struct rtx_constant_pool * pool)4168 output_constant_pool_contents (struct rtx_constant_pool *pool)
4169 {
4170 struct constant_descriptor_rtx *desc;
4171
4172 for (desc = pool->first; desc ; desc = desc->next)
4173 if (desc->mark)
4174 {
4175 /* If the constant is part of an object_block, make sure that
4176 the constant has been positioned within its block, but do not
4177 write out its definition yet. output_object_blocks will do
4178 that later. */
4179 if (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym)
4180 && SYMBOL_REF_BLOCK (desc->sym))
4181 place_block_symbol (desc->sym);
4182 else
4183 {
4184 switch_to_section (targetm.asm_out.select_rtx_section
4185 (desc->mode, desc->constant, desc->align));
4186 output_constant_pool_1 (desc, desc->align);
4187 }
4188 }
4189 }
4190
4191 /* Mark all constants that are used in the current function, then write
4192 out the function's private constant pool. */
4193
4194 static void
output_constant_pool(const char * fnname ATTRIBUTE_UNUSED,tree fndecl ATTRIBUTE_UNUSED)4195 output_constant_pool (const char *fnname ATTRIBUTE_UNUSED,
4196 tree fndecl ATTRIBUTE_UNUSED)
4197 {
4198 struct rtx_constant_pool *pool = crtl->varasm.pool;
4199
4200 /* It is possible for gcc to call force_const_mem and then to later
4201 discard the instructions which refer to the constant. In such a
4202 case we do not need to output the constant. */
4203 mark_constant_pool ();
4204
4205 /* Having marked the constant pool entries we'll actually emit, we
4206 now need to rebuild the offset information, which may have become
4207 stale. */
4208 recompute_pool_offsets (pool);
4209
4210 #ifdef ASM_OUTPUT_POOL_PROLOGUE
4211 ASM_OUTPUT_POOL_PROLOGUE (asm_out_file, fnname, fndecl, pool->offset);
4212 #endif
4213
4214 output_constant_pool_contents (pool);
4215
4216 #ifdef ASM_OUTPUT_POOL_EPILOGUE
4217 ASM_OUTPUT_POOL_EPILOGUE (asm_out_file, fnname, fndecl, pool->offset);
4218 #endif
4219 }
4220
4221 /* Write the contents of the shared constant pool. */
4222
4223 void
output_shared_constant_pool(void)4224 output_shared_constant_pool (void)
4225 {
4226 output_constant_pool_contents (shared_constant_pool);
4227 }
4228
4229 /* Determine what kind of relocations EXP may need. */
4230
4231 int
compute_reloc_for_constant(tree exp)4232 compute_reloc_for_constant (tree exp)
4233 {
4234 int reloc = 0, reloc2;
4235 tree tem;
4236
4237 switch (TREE_CODE (exp))
4238 {
4239 case ADDR_EXPR:
4240 case FDESC_EXPR:
4241 /* Go inside any operations that get_inner_reference can handle and see
4242 if what's inside is a constant: no need to do anything here for
4243 addresses of variables or functions. */
4244 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem);
4245 tem = TREE_OPERAND (tem, 0))
4246 ;
4247
4248 if (TREE_CODE (tem) == MEM_REF
4249 && TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR)
4250 {
4251 reloc = compute_reloc_for_constant (TREE_OPERAND (tem, 0));
4252 break;
4253 }
4254
4255 if (!targetm.binds_local_p (tem))
4256 reloc |= 2;
4257 else
4258 reloc |= 1;
4259 break;
4260
4261 case PLUS_EXPR:
4262 case POINTER_PLUS_EXPR:
4263 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4264 reloc |= compute_reloc_for_constant (TREE_OPERAND (exp, 1));
4265 break;
4266
4267 case MINUS_EXPR:
4268 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4269 reloc2 = compute_reloc_for_constant (TREE_OPERAND (exp, 1));
4270 /* The difference of two local labels is computable at link time. */
4271 if (reloc == 1 && reloc2 == 1)
4272 reloc = 0;
4273 else
4274 reloc |= reloc2;
4275 break;
4276
4277 CASE_CONVERT:
4278 case VIEW_CONVERT_EXPR:
4279 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4280 break;
4281
4282 case CONSTRUCTOR:
4283 {
4284 unsigned HOST_WIDE_INT idx;
4285 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem)
4286 if (tem != 0)
4287 reloc |= compute_reloc_for_constant (tem);
4288 }
4289 break;
4290
4291 default:
4292 break;
4293 }
4294 return reloc;
4295 }
4296
4297 /* Find all the constants whose addresses are referenced inside of EXP,
4298 and make sure assembler code with a label has been output for each one.
4299 Indicate whether an ADDR_EXPR has been encountered. */
4300
4301 static void
output_addressed_constants(tree exp)4302 output_addressed_constants (tree exp)
4303 {
4304 tree tem;
4305
4306 switch (TREE_CODE (exp))
4307 {
4308 case ADDR_EXPR:
4309 case FDESC_EXPR:
4310 /* Go inside any operations that get_inner_reference can handle and see
4311 if what's inside is a constant: no need to do anything here for
4312 addresses of variables or functions. */
4313 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem);
4314 tem = TREE_OPERAND (tem, 0))
4315 ;
4316
4317 /* If we have an initialized CONST_DECL, retrieve the initializer. */
4318 if (TREE_CODE (tem) == CONST_DECL && DECL_INITIAL (tem))
4319 tem = DECL_INITIAL (tem);
4320
4321 if (CONSTANT_CLASS_P (tem) || TREE_CODE (tem) == CONSTRUCTOR)
4322 output_constant_def (tem, 0);
4323
4324 if (TREE_CODE (tem) == MEM_REF)
4325 output_addressed_constants (TREE_OPERAND (tem, 0));
4326 break;
4327
4328 case PLUS_EXPR:
4329 case POINTER_PLUS_EXPR:
4330 case MINUS_EXPR:
4331 output_addressed_constants (TREE_OPERAND (exp, 1));
4332 gcc_fallthrough ();
4333
4334 CASE_CONVERT:
4335 case VIEW_CONVERT_EXPR:
4336 output_addressed_constants (TREE_OPERAND (exp, 0));
4337 break;
4338
4339 case CONSTRUCTOR:
4340 {
4341 unsigned HOST_WIDE_INT idx;
4342 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem)
4343 if (tem != 0)
4344 output_addressed_constants (tem);
4345 }
4346 break;
4347
4348 default:
4349 break;
4350 }
4351 }
4352
4353 /* Whether a constructor CTOR is a valid static constant initializer if all
4354 its elements are. This used to be internal to initializer_constant_valid_p
4355 and has been exposed to let other functions like categorize_ctor_elements
4356 evaluate the property while walking a constructor for other purposes. */
4357
4358 bool
constructor_static_from_elts_p(const_tree ctor)4359 constructor_static_from_elts_p (const_tree ctor)
4360 {
4361 return (TREE_CONSTANT (ctor)
4362 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4363 || TREE_CODE (TREE_TYPE (ctor)) == RECORD_TYPE
4364 || TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE));
4365 }
4366
4367 static tree initializer_constant_valid_p_1 (tree value, tree endtype,
4368 tree *cache);
4369
4370 /* A subroutine of initializer_constant_valid_p. VALUE is a MINUS_EXPR,
4371 PLUS_EXPR or POINTER_PLUS_EXPR. This looks for cases of VALUE
4372 which are valid when ENDTYPE is an integer of any size; in
4373 particular, this does not accept a pointer minus a constant. This
4374 returns null_pointer_node if the VALUE is an absolute constant
4375 which can be used to initialize a static variable. Otherwise it
4376 returns NULL. */
4377
4378 static tree
narrowing_initializer_constant_valid_p(tree value,tree endtype,tree * cache)4379 narrowing_initializer_constant_valid_p (tree value, tree endtype, tree *cache)
4380 {
4381 tree op0, op1;
4382
4383 if (!INTEGRAL_TYPE_P (endtype))
4384 return NULL_TREE;
4385
4386 op0 = TREE_OPERAND (value, 0);
4387 op1 = TREE_OPERAND (value, 1);
4388
4389 /* Like STRIP_NOPS except allow the operand mode to widen. This
4390 works around a feature of fold that simplifies (int)(p1 - p2) to
4391 ((int)p1 - (int)p2) under the theory that the narrower operation
4392 is cheaper. */
4393
4394 while (CONVERT_EXPR_P (op0)
4395 || TREE_CODE (op0) == NON_LVALUE_EXPR)
4396 {
4397 tree inner = TREE_OPERAND (op0, 0);
4398 if (inner == error_mark_node
4399 || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
4400 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op0)))
4401 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
4402 break;
4403 op0 = inner;
4404 }
4405
4406 while (CONVERT_EXPR_P (op1)
4407 || TREE_CODE (op1) == NON_LVALUE_EXPR)
4408 {
4409 tree inner = TREE_OPERAND (op1, 0);
4410 if (inner == error_mark_node
4411 || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
4412 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op1)))
4413 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
4414 break;
4415 op1 = inner;
4416 }
4417
4418 op0 = initializer_constant_valid_p_1 (op0, endtype, cache);
4419 if (!op0)
4420 return NULL_TREE;
4421
4422 op1 = initializer_constant_valid_p_1 (op1, endtype,
4423 cache ? cache + 2 : NULL);
4424 /* Both initializers must be known. */
4425 if (op1)
4426 {
4427 if (op0 == op1
4428 && (op0 == null_pointer_node
4429 || TREE_CODE (value) == MINUS_EXPR))
4430 return null_pointer_node;
4431
4432 /* Support differences between labels. */
4433 if (TREE_CODE (op0) == LABEL_DECL
4434 && TREE_CODE (op1) == LABEL_DECL)
4435 return null_pointer_node;
4436
4437 if (TREE_CODE (op0) == STRING_CST
4438 && TREE_CODE (op1) == STRING_CST
4439 && operand_equal_p (op0, op1, 1))
4440 return null_pointer_node;
4441 }
4442
4443 return NULL_TREE;
4444 }
4445
4446 /* Helper function of initializer_constant_valid_p.
4447 Return nonzero if VALUE is a valid constant-valued expression
4448 for use in initializing a static variable; one that can be an
4449 element of a "constant" initializer.
4450
4451 Return null_pointer_node if the value is absolute;
4452 if it is relocatable, return the variable that determines the relocation.
4453 We assume that VALUE has been folded as much as possible;
4454 therefore, we do not need to check for such things as
4455 arithmetic-combinations of integers.
4456
4457 Use CACHE (pointer to 2 tree values) for caching if non-NULL. */
4458
4459 static tree
initializer_constant_valid_p_1(tree value,tree endtype,tree * cache)4460 initializer_constant_valid_p_1 (tree value, tree endtype, tree *cache)
4461 {
4462 tree ret;
4463
4464 switch (TREE_CODE (value))
4465 {
4466 case CONSTRUCTOR:
4467 if (constructor_static_from_elts_p (value))
4468 {
4469 unsigned HOST_WIDE_INT idx;
4470 tree elt;
4471 bool absolute = true;
4472
4473 if (cache && cache[0] == value)
4474 return cache[1];
4475 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt)
4476 {
4477 tree reloc;
4478 reloc = initializer_constant_valid_p_1 (elt, TREE_TYPE (elt),
4479 NULL);
4480 if (!reloc
4481 /* An absolute value is required with reverse SSO. */
4482 || (reloc != null_pointer_node
4483 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (value))
4484 && !AGGREGATE_TYPE_P (TREE_TYPE (elt))))
4485 {
4486 if (cache)
4487 {
4488 cache[0] = value;
4489 cache[1] = NULL_TREE;
4490 }
4491 return NULL_TREE;
4492 }
4493 if (reloc != null_pointer_node)
4494 absolute = false;
4495 }
4496 /* For a non-absolute relocation, there is no single
4497 variable that can be "the variable that determines the
4498 relocation." */
4499 if (cache)
4500 {
4501 cache[0] = value;
4502 cache[1] = absolute ? null_pointer_node : error_mark_node;
4503 }
4504 return absolute ? null_pointer_node : error_mark_node;
4505 }
4506
4507 return TREE_STATIC (value) ? null_pointer_node : NULL_TREE;
4508
4509 case INTEGER_CST:
4510 case VECTOR_CST:
4511 case REAL_CST:
4512 case FIXED_CST:
4513 case STRING_CST:
4514 case COMPLEX_CST:
4515 return null_pointer_node;
4516
4517 case ADDR_EXPR:
4518 case FDESC_EXPR:
4519 {
4520 tree op0 = staticp (TREE_OPERAND (value, 0));
4521 if (op0)
4522 {
4523 /* "&(*a).f" is like unto pointer arithmetic. If "a" turns out
4524 to be a constant, this is old-skool offsetof-like nonsense. */
4525 if (TREE_CODE (op0) == INDIRECT_REF
4526 && TREE_CONSTANT (TREE_OPERAND (op0, 0)))
4527 return null_pointer_node;
4528 /* Taking the address of a nested function involves a trampoline,
4529 unless we don't need or want one. */
4530 if (TREE_CODE (op0) == FUNCTION_DECL
4531 && DECL_STATIC_CHAIN (op0)
4532 && !TREE_NO_TRAMPOLINE (value))
4533 return NULL_TREE;
4534 /* "&{...}" requires a temporary to hold the constructed
4535 object. */
4536 if (TREE_CODE (op0) == CONSTRUCTOR)
4537 return NULL_TREE;
4538 }
4539 return op0;
4540 }
4541
4542 case NON_LVALUE_EXPR:
4543 return initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4544 endtype, cache);
4545
4546 case VIEW_CONVERT_EXPR:
4547 {
4548 tree src = TREE_OPERAND (value, 0);
4549 tree src_type = TREE_TYPE (src);
4550 tree dest_type = TREE_TYPE (value);
4551
4552 /* Allow view-conversions from aggregate to non-aggregate type only
4553 if the bit pattern is fully preserved afterwards; otherwise, the
4554 RTL expander won't be able to apply a subsequent transformation
4555 to the underlying constructor. */
4556 if (AGGREGATE_TYPE_P (src_type) && !AGGREGATE_TYPE_P (dest_type))
4557 {
4558 if (TYPE_MODE (endtype) == TYPE_MODE (dest_type))
4559 return initializer_constant_valid_p_1 (src, endtype, cache);
4560 else
4561 return NULL_TREE;
4562 }
4563
4564 /* Allow all other kinds of view-conversion. */
4565 return initializer_constant_valid_p_1 (src, endtype, cache);
4566 }
4567
4568 CASE_CONVERT:
4569 {
4570 tree src = TREE_OPERAND (value, 0);
4571 tree src_type = TREE_TYPE (src);
4572 tree dest_type = TREE_TYPE (value);
4573
4574 /* Allow conversions between pointer types, floating-point
4575 types, and offset types. */
4576 if ((POINTER_TYPE_P (dest_type) && POINTER_TYPE_P (src_type))
4577 || (FLOAT_TYPE_P (dest_type) && FLOAT_TYPE_P (src_type))
4578 || (TREE_CODE (dest_type) == OFFSET_TYPE
4579 && TREE_CODE (src_type) == OFFSET_TYPE))
4580 return initializer_constant_valid_p_1 (src, endtype, cache);
4581
4582 /* Allow length-preserving conversions between integer types. */
4583 if (INTEGRAL_TYPE_P (dest_type) && INTEGRAL_TYPE_P (src_type)
4584 && (TYPE_PRECISION (dest_type) == TYPE_PRECISION (src_type)))
4585 return initializer_constant_valid_p_1 (src, endtype, cache);
4586
4587 /* Allow conversions between other integer types only if
4588 explicit value. Don't allow sign-extension to a type larger
4589 than word and pointer, there aren't relocations that would
4590 allow to sign extend it to a wider type. */
4591 if (INTEGRAL_TYPE_P (dest_type)
4592 && INTEGRAL_TYPE_P (src_type)
4593 && (TYPE_UNSIGNED (src_type)
4594 || TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type)
4595 || TYPE_PRECISION (dest_type) <= BITS_PER_WORD
4596 || TYPE_PRECISION (dest_type) <= POINTER_SIZE))
4597 {
4598 tree inner = initializer_constant_valid_p_1 (src, endtype, cache);
4599 if (inner == null_pointer_node)
4600 return null_pointer_node;
4601 break;
4602 }
4603
4604 /* Allow (int) &foo provided int is as wide as a pointer. */
4605 if (INTEGRAL_TYPE_P (dest_type) && POINTER_TYPE_P (src_type)
4606 && (TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type)))
4607 return initializer_constant_valid_p_1 (src, endtype, cache);
4608
4609 /* Likewise conversions from int to pointers, but also allow
4610 conversions from 0. */
4611 if ((POINTER_TYPE_P (dest_type)
4612 || TREE_CODE (dest_type) == OFFSET_TYPE)
4613 && INTEGRAL_TYPE_P (src_type))
4614 {
4615 if (TREE_CODE (src) == INTEGER_CST
4616 && TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type))
4617 return null_pointer_node;
4618 if (integer_zerop (src))
4619 return null_pointer_node;
4620 else if (TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type))
4621 return initializer_constant_valid_p_1 (src, endtype, cache);
4622 }
4623
4624 /* Allow conversions to struct or union types if the value
4625 inside is okay. */
4626 if (TREE_CODE (dest_type) == RECORD_TYPE
4627 || TREE_CODE (dest_type) == UNION_TYPE)
4628 return initializer_constant_valid_p_1 (src, endtype, cache);
4629 }
4630 break;
4631
4632 case POINTER_PLUS_EXPR:
4633 case PLUS_EXPR:
4634 /* Any valid floating-point constants will have been folded by now;
4635 with -frounding-math we hit this with addition of two constants. */
4636 if (TREE_CODE (endtype) == REAL_TYPE)
4637 return NULL_TREE;
4638 if (cache && cache[0] == value)
4639 return cache[1];
4640 if (! INTEGRAL_TYPE_P (endtype)
4641 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value)))
4642 {
4643 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
4644 tree valid0
4645 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4646 endtype, ncache);
4647 tree valid1
4648 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1),
4649 endtype, ncache + 2);
4650 /* If either term is absolute, use the other term's relocation. */
4651 if (valid0 == null_pointer_node)
4652 ret = valid1;
4653 else if (valid1 == null_pointer_node)
4654 ret = valid0;
4655 /* Support narrowing pointer differences. */
4656 else
4657 ret = narrowing_initializer_constant_valid_p (value, endtype,
4658 ncache);
4659 }
4660 else
4661 /* Support narrowing pointer differences. */
4662 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL);
4663 if (cache)
4664 {
4665 cache[0] = value;
4666 cache[1] = ret;
4667 }
4668 return ret;
4669
4670 case POINTER_DIFF_EXPR:
4671 case MINUS_EXPR:
4672 if (TREE_CODE (endtype) == REAL_TYPE)
4673 return NULL_TREE;
4674 if (cache && cache[0] == value)
4675 return cache[1];
4676 if (! INTEGRAL_TYPE_P (endtype)
4677 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value)))
4678 {
4679 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
4680 tree valid0
4681 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4682 endtype, ncache);
4683 tree valid1
4684 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1),
4685 endtype, ncache + 2);
4686 /* Win if second argument is absolute. */
4687 if (valid1 == null_pointer_node)
4688 ret = valid0;
4689 /* Win if both arguments have the same relocation.
4690 Then the value is absolute. */
4691 else if (valid0 == valid1 && valid0 != 0)
4692 ret = null_pointer_node;
4693 /* Since GCC guarantees that string constants are unique in the
4694 generated code, a subtraction between two copies of the same
4695 constant string is absolute. */
4696 else if (valid0 && TREE_CODE (valid0) == STRING_CST
4697 && valid1 && TREE_CODE (valid1) == STRING_CST
4698 && operand_equal_p (valid0, valid1, 1))
4699 ret = null_pointer_node;
4700 /* Support narrowing differences. */
4701 else
4702 ret = narrowing_initializer_constant_valid_p (value, endtype,
4703 ncache);
4704 }
4705 else
4706 /* Support narrowing differences. */
4707 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL);
4708 if (cache)
4709 {
4710 cache[0] = value;
4711 cache[1] = ret;
4712 }
4713 return ret;
4714
4715 default:
4716 break;
4717 }
4718
4719 return NULL_TREE;
4720 }
4721
4722 /* Return nonzero if VALUE is a valid constant-valued expression
4723 for use in initializing a static variable; one that can be an
4724 element of a "constant" initializer.
4725
4726 Return null_pointer_node if the value is absolute;
4727 if it is relocatable, return the variable that determines the relocation.
4728 We assume that VALUE has been folded as much as possible;
4729 therefore, we do not need to check for such things as
4730 arithmetic-combinations of integers. */
4731 tree
initializer_constant_valid_p(tree value,tree endtype,bool reverse)4732 initializer_constant_valid_p (tree value, tree endtype, bool reverse)
4733 {
4734 tree reloc = initializer_constant_valid_p_1 (value, endtype, NULL);
4735
4736 /* An absolute value is required with reverse storage order. */
4737 if (reloc
4738 && reloc != null_pointer_node
4739 && reverse
4740 && !AGGREGATE_TYPE_P (endtype)
4741 && !VECTOR_TYPE_P (endtype))
4742 reloc = NULL_TREE;
4743
4744 return reloc;
4745 }
4746
4747 /* Return true if VALUE is a valid constant-valued expression
4748 for use in initializing a static bit-field; one that can be
4749 an element of a "constant" initializer. */
4750
4751 bool
initializer_constant_valid_for_bitfield_p(tree value)4752 initializer_constant_valid_for_bitfield_p (tree value)
4753 {
4754 /* For bitfields we support integer constants or possibly nested aggregates
4755 of such. */
4756 switch (TREE_CODE (value))
4757 {
4758 case CONSTRUCTOR:
4759 {
4760 unsigned HOST_WIDE_INT idx;
4761 tree elt;
4762
4763 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt)
4764 if (!initializer_constant_valid_for_bitfield_p (elt))
4765 return false;
4766 return true;
4767 }
4768
4769 case INTEGER_CST:
4770 case REAL_CST:
4771 return true;
4772
4773 case VIEW_CONVERT_EXPR:
4774 case NON_LVALUE_EXPR:
4775 return
4776 initializer_constant_valid_for_bitfield_p (TREE_OPERAND (value, 0));
4777
4778 default:
4779 break;
4780 }
4781
4782 return false;
4783 }
4784
4785 /* output_constructor outer state of relevance in recursive calls, typically
4786 for nested aggregate bitfields. */
4787
4788 struct oc_outer_state {
4789 unsigned int bit_offset; /* current position in ... */
4790 int byte; /* ... the outer byte buffer. */
4791 };
4792
4793 static unsigned HOST_WIDE_INT
4794 output_constructor (tree, unsigned HOST_WIDE_INT, unsigned int, bool,
4795 oc_outer_state *);
4796
4797 /* Output assembler code for constant EXP, with no label.
4798 This includes the pseudo-op such as ".int" or ".byte", and a newline.
4799 Assumes output_addressed_constants has been done on EXP already.
4800
4801 Generate at least SIZE bytes of assembler data, padding at the end
4802 with zeros if necessary. SIZE must always be specified. The returned
4803 value is the actual number of bytes of assembler data generated, which
4804 may be bigger than SIZE if the object contains a variable length field.
4805
4806 SIZE is important for structure constructors,
4807 since trailing members may have been omitted from the constructor.
4808 It is also important for initialization of arrays from string constants
4809 since the full length of the string constant might not be wanted.
4810 It is also needed for initialization of unions, where the initializer's
4811 type is just one member, and that may not be as long as the union.
4812
4813 There a case in which we would fail to output exactly SIZE bytes:
4814 for a structure constructor that wants to produce more than SIZE bytes.
4815 But such constructors will never be generated for any possible input.
4816
4817 ALIGN is the alignment of the data in bits.
4818
4819 If REVERSE is true, EXP is output in reverse storage order. */
4820
4821 static unsigned HOST_WIDE_INT
output_constant(tree exp,unsigned HOST_WIDE_INT size,unsigned int align,bool reverse)4822 output_constant (tree exp, unsigned HOST_WIDE_INT size, unsigned int align,
4823 bool reverse)
4824 {
4825 enum tree_code code;
4826 unsigned HOST_WIDE_INT thissize;
4827 rtx cst;
4828
4829 if (size == 0 || flag_syntax_only)
4830 return size;
4831
4832 /* See if we're trying to initialize a pointer in a non-default mode
4833 to the address of some declaration somewhere. If the target says
4834 the mode is valid for pointers, assume the target has a way of
4835 resolving it. */
4836 if (TREE_CODE (exp) == NOP_EXPR
4837 && POINTER_TYPE_P (TREE_TYPE (exp))
4838 && targetm.addr_space.valid_pointer_mode
4839 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
4840 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
4841 {
4842 tree saved_type = TREE_TYPE (exp);
4843
4844 /* Peel off any intermediate conversions-to-pointer for valid
4845 pointer modes. */
4846 while (TREE_CODE (exp) == NOP_EXPR
4847 && POINTER_TYPE_P (TREE_TYPE (exp))
4848 && targetm.addr_space.valid_pointer_mode
4849 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
4850 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
4851 exp = TREE_OPERAND (exp, 0);
4852
4853 /* If what we're left with is the address of something, we can
4854 convert the address to the final type and output it that
4855 way. */
4856 if (TREE_CODE (exp) == ADDR_EXPR)
4857 exp = build1 (ADDR_EXPR, saved_type, TREE_OPERAND (exp, 0));
4858 /* Likewise for constant ints. */
4859 else if (TREE_CODE (exp) == INTEGER_CST)
4860 exp = fold_convert (saved_type, exp);
4861
4862 }
4863
4864 /* Eliminate any conversions since we'll be outputting the underlying
4865 constant. */
4866 while (CONVERT_EXPR_P (exp)
4867 || TREE_CODE (exp) == NON_LVALUE_EXPR
4868 || TREE_CODE (exp) == VIEW_CONVERT_EXPR)
4869 {
4870 HOST_WIDE_INT type_size = int_size_in_bytes (TREE_TYPE (exp));
4871 HOST_WIDE_INT op_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0)));
4872
4873 /* Make sure eliminating the conversion is really a no-op, except with
4874 VIEW_CONVERT_EXPRs to allow for wild Ada unchecked conversions and
4875 union types to allow for Ada unchecked unions. */
4876 if (type_size > op_size
4877 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
4878 && TREE_CODE (TREE_TYPE (exp)) != UNION_TYPE)
4879 /* Keep the conversion. */
4880 break;
4881 else
4882 exp = TREE_OPERAND (exp, 0);
4883 }
4884
4885 code = TREE_CODE (TREE_TYPE (exp));
4886 thissize = int_size_in_bytes (TREE_TYPE (exp));
4887
4888 /* Allow a constructor with no elements for any data type.
4889 This means to fill the space with zeros. */
4890 if (TREE_CODE (exp) == CONSTRUCTOR
4891 && vec_safe_is_empty (CONSTRUCTOR_ELTS (exp)))
4892 {
4893 assemble_zeros (size);
4894 return size;
4895 }
4896
4897 if (TREE_CODE (exp) == FDESC_EXPR)
4898 {
4899 #ifdef ASM_OUTPUT_FDESC
4900 HOST_WIDE_INT part = tree_to_shwi (TREE_OPERAND (exp, 1));
4901 tree decl = TREE_OPERAND (exp, 0);
4902 ASM_OUTPUT_FDESC (asm_out_file, decl, part);
4903 #else
4904 gcc_unreachable ();
4905 #endif
4906 return size;
4907 }
4908
4909 /* Now output the underlying data. If we've handling the padding, return.
4910 Otherwise, break and ensure SIZE is the size written. */
4911 switch (code)
4912 {
4913 case BOOLEAN_TYPE:
4914 case INTEGER_TYPE:
4915 case ENUMERAL_TYPE:
4916 case POINTER_TYPE:
4917 case REFERENCE_TYPE:
4918 case OFFSET_TYPE:
4919 case FIXED_POINT_TYPE:
4920 case POINTER_BOUNDS_TYPE:
4921 case NULLPTR_TYPE:
4922 cst = expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
4923 if (reverse)
4924 cst = flip_storage_order (TYPE_MODE (TREE_TYPE (exp)), cst);
4925 if (!assemble_integer (cst, MIN (size, thissize), align, 0))
4926 error ("initializer for integer/fixed-point value is too complicated");
4927 break;
4928
4929 case REAL_TYPE:
4930 if (TREE_CODE (exp) != REAL_CST)
4931 error ("initializer for floating value is not a floating constant");
4932 else
4933 assemble_real (TREE_REAL_CST (exp),
4934 SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (exp)),
4935 align, reverse);
4936 break;
4937
4938 case COMPLEX_TYPE:
4939 output_constant (TREE_REALPART (exp), thissize / 2, align, reverse);
4940 output_constant (TREE_IMAGPART (exp), thissize / 2,
4941 min_align (align, BITS_PER_UNIT * (thissize / 2)),
4942 reverse);
4943 break;
4944
4945 case ARRAY_TYPE:
4946 case VECTOR_TYPE:
4947 switch (TREE_CODE (exp))
4948 {
4949 case CONSTRUCTOR:
4950 return output_constructor (exp, size, align, reverse, NULL);
4951 case STRING_CST:
4952 thissize
4953 = MIN ((unsigned HOST_WIDE_INT)TREE_STRING_LENGTH (exp), size);
4954 assemble_string (TREE_STRING_POINTER (exp), thissize);
4955 break;
4956 case VECTOR_CST:
4957 {
4958 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
4959 unsigned int nalign = MIN (align, GET_MODE_ALIGNMENT (inner));
4960 int elt_size = GET_MODE_SIZE (inner);
4961 output_constant (VECTOR_CST_ELT (exp, 0), elt_size, align,
4962 reverse);
4963 thissize = elt_size;
4964 /* Static constants must have a fixed size. */
4965 unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
4966 for (unsigned int i = 1; i < nunits; i++)
4967 {
4968 output_constant (VECTOR_CST_ELT (exp, i), elt_size, nalign,
4969 reverse);
4970 thissize += elt_size;
4971 }
4972 break;
4973 }
4974 default:
4975 gcc_unreachable ();
4976 }
4977 break;
4978
4979 case RECORD_TYPE:
4980 case UNION_TYPE:
4981 gcc_assert (TREE_CODE (exp) == CONSTRUCTOR);
4982 return output_constructor (exp, size, align, reverse, NULL);
4983
4984 case ERROR_MARK:
4985 return 0;
4986
4987 default:
4988 gcc_unreachable ();
4989 }
4990
4991 if (size > thissize)
4992 assemble_zeros (size - thissize);
4993
4994 return size;
4995 }
4996
4997 /* Subroutine of output_constructor, used for computing the size of
4998 arrays of unspecified length. VAL must be a CONSTRUCTOR of an array
4999 type with an unspecified upper bound. */
5000
5001 static unsigned HOST_WIDE_INT
array_size_for_constructor(tree val)5002 array_size_for_constructor (tree val)
5003 {
5004 tree max_index;
5005 unsigned HOST_WIDE_INT cnt;
5006 tree index, value, tmp;
5007 offset_int i;
5008
5009 /* This code used to attempt to handle string constants that are not
5010 arrays of single-bytes, but nothing else does, so there's no point in
5011 doing it here. */
5012 if (TREE_CODE (val) == STRING_CST)
5013 return TREE_STRING_LENGTH (val);
5014
5015 max_index = NULL_TREE;
5016 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (val), cnt, index, value)
5017 {
5018 if (TREE_CODE (index) == RANGE_EXPR)
5019 index = TREE_OPERAND (index, 1);
5020 if (max_index == NULL_TREE || tree_int_cst_lt (max_index, index))
5021 max_index = index;
5022 }
5023
5024 if (max_index == NULL_TREE)
5025 return 0;
5026
5027 /* Compute the total number of array elements. */
5028 tmp = TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (val)));
5029 i = wi::to_offset (max_index) - wi::to_offset (tmp) + 1;
5030
5031 /* Multiply by the array element unit size to find number of bytes. */
5032 i *= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (val))));
5033
5034 gcc_assert (wi::fits_uhwi_p (i));
5035 return i.to_uhwi ();
5036 }
5037
5038 /* Other datastructures + helpers for output_constructor. */
5039
5040 /* output_constructor local state to support interaction with helpers. */
5041
5042 struct oc_local_state {
5043
5044 /* Received arguments. */
5045 tree exp; /* Constructor expression. */
5046 tree type; /* Type of constructor expression. */
5047 unsigned HOST_WIDE_INT size; /* # bytes to output - pad if necessary. */
5048 unsigned int align; /* Known initial alignment. */
5049 tree min_index; /* Lower bound if specified for an array. */
5050
5051 /* Output processing state. */
5052 HOST_WIDE_INT total_bytes; /* # bytes output so far / current position. */
5053 int byte; /* Part of a bitfield byte yet to be output. */
5054 int last_relative_index; /* Implicit or explicit index of the last
5055 array element output within a bitfield. */
5056 bool byte_buffer_in_use; /* Whether BYTE is in use. */
5057 bool reverse; /* Whether reverse storage order is in use. */
5058
5059 /* Current element. */
5060 tree field; /* Current field decl in a record. */
5061 tree val; /* Current element value. */
5062 tree index; /* Current element index. */
5063
5064 };
5065
5066 /* Helper for output_constructor. From the current LOCAL state, output a
5067 RANGE_EXPR element. */
5068
5069 static void
output_constructor_array_range(oc_local_state * local)5070 output_constructor_array_range (oc_local_state *local)
5071 {
5072 unsigned HOST_WIDE_INT fieldsize
5073 = int_size_in_bytes (TREE_TYPE (local->type));
5074
5075 HOST_WIDE_INT lo_index
5076 = tree_to_shwi (TREE_OPERAND (local->index, 0));
5077 HOST_WIDE_INT hi_index
5078 = tree_to_shwi (TREE_OPERAND (local->index, 1));
5079 HOST_WIDE_INT index;
5080
5081 unsigned int align2
5082 = min_align (local->align, fieldsize * BITS_PER_UNIT);
5083
5084 for (index = lo_index; index <= hi_index; index++)
5085 {
5086 /* Output the element's initial value. */
5087 if (local->val == NULL_TREE)
5088 assemble_zeros (fieldsize);
5089 else
5090 fieldsize
5091 = output_constant (local->val, fieldsize, align2, local->reverse);
5092
5093 /* Count its size. */
5094 local->total_bytes += fieldsize;
5095 }
5096 }
5097
5098 /* Helper for output_constructor. From the current LOCAL state, output a
5099 field element that is not true bitfield or part of an outer one. */
5100
5101 static void
output_constructor_regular_field(oc_local_state * local)5102 output_constructor_regular_field (oc_local_state *local)
5103 {
5104 /* Field size and position. Since this structure is static, we know the
5105 positions are constant. */
5106 unsigned HOST_WIDE_INT fieldsize;
5107 HOST_WIDE_INT fieldpos;
5108
5109 unsigned int align2;
5110
5111 /* Output any buffered-up bit-fields preceding this element. */
5112 if (local->byte_buffer_in_use)
5113 {
5114 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5115 local->total_bytes++;
5116 local->byte_buffer_in_use = false;
5117 }
5118
5119 if (local->index != NULL_TREE)
5120 {
5121 /* Perform the index calculation in modulo arithmetic but
5122 sign-extend the result because Ada has negative DECL_FIELD_OFFSETs
5123 but we are using an unsigned sizetype. */
5124 unsigned prec = TYPE_PRECISION (sizetype);
5125 offset_int idx = wi::sext (wi::to_offset (local->index)
5126 - wi::to_offset (local->min_index), prec);
5127 fieldpos = (idx * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (local->val))))
5128 .to_short_addr ();
5129 }
5130 else if (local->field != NULL_TREE)
5131 fieldpos = int_byte_position (local->field);
5132 else
5133 fieldpos = 0;
5134
5135 /* Advance to offset of this element.
5136 Note no alignment needed in an array, since that is guaranteed
5137 if each element has the proper size. */
5138 if (local->field != NULL_TREE || local->index != NULL_TREE)
5139 {
5140 if (fieldpos > local->total_bytes)
5141 {
5142 assemble_zeros (fieldpos - local->total_bytes);
5143 local->total_bytes = fieldpos;
5144 }
5145 else
5146 /* Must not go backwards. */
5147 gcc_assert (fieldpos == local->total_bytes);
5148 }
5149
5150 /* Find the alignment of this element. */
5151 align2 = min_align (local->align, BITS_PER_UNIT * fieldpos);
5152
5153 /* Determine size this element should occupy. */
5154 if (local->field)
5155 {
5156 fieldsize = 0;
5157
5158 /* If this is an array with an unspecified upper bound,
5159 the initializer determines the size. */
5160 /* ??? This ought to only checked if DECL_SIZE_UNIT is NULL,
5161 but we cannot do this until the deprecated support for
5162 initializing zero-length array members is removed. */
5163 if (TREE_CODE (TREE_TYPE (local->field)) == ARRAY_TYPE
5164 && (!TYPE_DOMAIN (TREE_TYPE (local->field))
5165 || !TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (local->field)))))
5166 {
5167 fieldsize = array_size_for_constructor (local->val);
5168 /* Given a non-empty initialization, this field had better
5169 be last. Given a flexible array member, the next field
5170 on the chain is a TYPE_DECL of the enclosing struct. */
5171 const_tree next = DECL_CHAIN (local->field);
5172 gcc_assert (!fieldsize || !next || TREE_CODE (next) != FIELD_DECL);
5173 }
5174 else
5175 fieldsize = tree_to_uhwi (DECL_SIZE_UNIT (local->field));
5176 }
5177 else
5178 fieldsize = int_size_in_bytes (TREE_TYPE (local->type));
5179
5180 /* Output the element's initial value. */
5181 if (local->val == NULL_TREE)
5182 assemble_zeros (fieldsize);
5183 else
5184 fieldsize
5185 = output_constant (local->val, fieldsize, align2, local->reverse);
5186
5187 /* Count its size. */
5188 local->total_bytes += fieldsize;
5189 }
5190
5191 /* Helper for output_constructor. From the LOCAL state, output an element
5192 that is a true bitfield or part of an outer one. BIT_OFFSET is the offset
5193 from the start of a possibly ongoing outer byte buffer. */
5194
5195 static void
output_constructor_bitfield(oc_local_state * local,unsigned int bit_offset)5196 output_constructor_bitfield (oc_local_state *local, unsigned int bit_offset)
5197 {
5198 /* Bit size of this element. */
5199 HOST_WIDE_INT ebitsize
5200 = (local->field
5201 ? tree_to_uhwi (DECL_SIZE (local->field))
5202 : tree_to_uhwi (TYPE_SIZE (TREE_TYPE (local->type))));
5203
5204 /* Relative index of this element if this is an array component. */
5205 HOST_WIDE_INT relative_index
5206 = (!local->field
5207 ? (local->index
5208 ? (tree_to_shwi (local->index)
5209 - tree_to_shwi (local->min_index))
5210 : local->last_relative_index + 1)
5211 : 0);
5212
5213 /* Bit position of this element from the start of the containing
5214 constructor. */
5215 HOST_WIDE_INT constructor_relative_ebitpos
5216 = (local->field
5217 ? int_bit_position (local->field)
5218 : ebitsize * relative_index);
5219
5220 /* Bit position of this element from the start of a possibly ongoing
5221 outer byte buffer. */
5222 HOST_WIDE_INT byte_relative_ebitpos
5223 = bit_offset + constructor_relative_ebitpos;
5224
5225 /* From the start of a possibly ongoing outer byte buffer, offsets to
5226 the first bit of this element and to the first bit past the end of
5227 this element. */
5228 HOST_WIDE_INT next_offset = byte_relative_ebitpos;
5229 HOST_WIDE_INT end_offset = byte_relative_ebitpos + ebitsize;
5230
5231 local->last_relative_index = relative_index;
5232
5233 if (local->val == NULL_TREE)
5234 local->val = integer_zero_node;
5235
5236 while (TREE_CODE (local->val) == VIEW_CONVERT_EXPR
5237 || TREE_CODE (local->val) == NON_LVALUE_EXPR)
5238 local->val = TREE_OPERAND (local->val, 0);
5239
5240 if (TREE_CODE (local->val) != INTEGER_CST
5241 && TREE_CODE (local->val) != CONSTRUCTOR)
5242 {
5243 error ("invalid initial value for member %qE", DECL_NAME (local->field));
5244 return;
5245 }
5246
5247 /* If this field does not start in this (or next) byte, skip some bytes. */
5248 if (next_offset / BITS_PER_UNIT != local->total_bytes)
5249 {
5250 /* Output remnant of any bit field in previous bytes. */
5251 if (local->byte_buffer_in_use)
5252 {
5253 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5254 local->total_bytes++;
5255 local->byte_buffer_in_use = false;
5256 }
5257
5258 /* If still not at proper byte, advance to there. */
5259 if (next_offset / BITS_PER_UNIT != local->total_bytes)
5260 {
5261 gcc_assert (next_offset / BITS_PER_UNIT >= local->total_bytes);
5262 assemble_zeros (next_offset / BITS_PER_UNIT - local->total_bytes);
5263 local->total_bytes = next_offset / BITS_PER_UNIT;
5264 }
5265 }
5266
5267 /* Set up the buffer if necessary. */
5268 if (!local->byte_buffer_in_use)
5269 {
5270 local->byte = 0;
5271 if (ebitsize > 0)
5272 local->byte_buffer_in_use = true;
5273 }
5274
5275 /* If this is nested constructor, recurse passing the bit offset and the
5276 pending data, then retrieve the new pending data afterwards. */
5277 if (TREE_CODE (local->val) == CONSTRUCTOR)
5278 {
5279 oc_outer_state temp_state;
5280 temp_state.bit_offset = next_offset % BITS_PER_UNIT;
5281 temp_state.byte = local->byte;
5282 local->total_bytes
5283 += output_constructor (local->val, 0, 0, local->reverse, &temp_state);
5284 local->byte = temp_state.byte;
5285 return;
5286 }
5287
5288 /* Otherwise, we must split the element into pieces that fall within
5289 separate bytes, and combine each byte with previous or following
5290 bit-fields. */
5291 while (next_offset < end_offset)
5292 {
5293 int this_time;
5294 int shift;
5295 HOST_WIDE_INT value;
5296 HOST_WIDE_INT next_byte = next_offset / BITS_PER_UNIT;
5297 HOST_WIDE_INT next_bit = next_offset % BITS_PER_UNIT;
5298
5299 /* Advance from byte to byte within this element when necessary. */
5300 while (next_byte != local->total_bytes)
5301 {
5302 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5303 local->total_bytes++;
5304 local->byte = 0;
5305 }
5306
5307 /* Number of bits we can process at once (all part of the same byte). */
5308 this_time = MIN (end_offset - next_offset, BITS_PER_UNIT - next_bit);
5309 if (local->reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5310 {
5311 /* For big-endian data, take the most significant bits (of the
5312 bits that are significant) first and put them into bytes from
5313 the most significant end. */
5314 shift = end_offset - next_offset - this_time;
5315
5316 /* Don't try to take a bunch of bits that cross
5317 the word boundary in the INTEGER_CST. We can
5318 only select bits from one element. */
5319 if ((shift / HOST_BITS_PER_WIDE_INT)
5320 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT))
5321 {
5322 const int end = shift + this_time - 1;
5323 shift = end & -HOST_BITS_PER_WIDE_INT;
5324 this_time = end - shift + 1;
5325 }
5326
5327 /* Now get the bits from the appropriate constant word. */
5328 value = TREE_INT_CST_ELT (local->val, shift / HOST_BITS_PER_WIDE_INT);
5329 shift = shift & (HOST_BITS_PER_WIDE_INT - 1);
5330
5331 /* Get the result. This works only when:
5332 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
5333 local->byte |= (((value >> shift)
5334 & (((HOST_WIDE_INT) 2 << (this_time - 1)) - 1))
5335 << (BITS_PER_UNIT - this_time - next_bit));
5336 }
5337 else
5338 {
5339 /* On little-endian machines, take the least significant bits of
5340 the value first and pack them starting at the least significant
5341 bits of the bytes. */
5342 shift = next_offset - byte_relative_ebitpos;
5343
5344 /* Don't try to take a bunch of bits that cross
5345 the word boundary in the INTEGER_CST. We can
5346 only select bits from one element. */
5347 if ((shift / HOST_BITS_PER_WIDE_INT)
5348 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT))
5349 this_time
5350 = HOST_BITS_PER_WIDE_INT - (shift & (HOST_BITS_PER_WIDE_INT - 1));
5351
5352 /* Now get the bits from the appropriate constant word. */
5353 value = TREE_INT_CST_ELT (local->val, shift / HOST_BITS_PER_WIDE_INT);
5354 shift = shift & (HOST_BITS_PER_WIDE_INT - 1);
5355
5356 /* Get the result. This works only when:
5357 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
5358 local->byte |= (((value >> shift)
5359 & (((HOST_WIDE_INT) 2 << (this_time - 1)) - 1))
5360 << next_bit);
5361 }
5362
5363 next_offset += this_time;
5364 local->byte_buffer_in_use = true;
5365 }
5366 }
5367
5368 /* Subroutine of output_constant, used for CONSTRUCTORs (aggregate constants).
5369 Generate at least SIZE bytes, padding if necessary. OUTER designates the
5370 caller output state of relevance in recursive invocations. */
5371
5372 static unsigned HOST_WIDE_INT
output_constructor(tree exp,unsigned HOST_WIDE_INT size,unsigned int align,bool reverse,oc_outer_state * outer)5373 output_constructor (tree exp, unsigned HOST_WIDE_INT size, unsigned int align,
5374 bool reverse, oc_outer_state *outer)
5375 {
5376 unsigned HOST_WIDE_INT cnt;
5377 constructor_elt *ce;
5378 oc_local_state local;
5379
5380 /* Setup our local state to communicate with helpers. */
5381 local.exp = exp;
5382 local.type = TREE_TYPE (exp);
5383 local.size = size;
5384 local.align = align;
5385 if (TREE_CODE (local.type) == ARRAY_TYPE && TYPE_DOMAIN (local.type))
5386 local.min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (local.type));
5387 else
5388 local.min_index = integer_zero_node;
5389
5390 local.total_bytes = 0;
5391 local.byte_buffer_in_use = outer != NULL;
5392 local.byte = outer ? outer->byte : 0;
5393 local.last_relative_index = -1;
5394 /* The storage order is specified for every aggregate type. */
5395 if (AGGREGATE_TYPE_P (local.type))
5396 local.reverse = TYPE_REVERSE_STORAGE_ORDER (local.type);
5397 else
5398 local.reverse = reverse;
5399
5400 gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_UNIT);
5401
5402 /* As CE goes through the elements of the constant, FIELD goes through the
5403 structure fields if the constant is a structure. If the constant is a
5404 union, we override this by getting the field from the TREE_LIST element.
5405 But the constant could also be an array. Then FIELD is zero.
5406
5407 There is always a maximum of one element in the chain LINK for unions
5408 (even if the initializer in a source program incorrectly contains
5409 more one). */
5410
5411 if (TREE_CODE (local.type) == RECORD_TYPE)
5412 local.field = TYPE_FIELDS (local.type);
5413 else
5414 local.field = NULL_TREE;
5415
5416 for (cnt = 0;
5417 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), cnt, &ce);
5418 cnt++, local.field = local.field ? DECL_CHAIN (local.field) : 0)
5419 {
5420 local.val = ce->value;
5421 local.index = NULL_TREE;
5422
5423 /* The element in a union constructor specifies the proper field
5424 or index. */
5425 if (RECORD_OR_UNION_TYPE_P (local.type) && ce->index != NULL_TREE)
5426 local.field = ce->index;
5427
5428 else if (TREE_CODE (local.type) == ARRAY_TYPE)
5429 local.index = ce->index;
5430
5431 if (local.field && flag_verbose_asm)
5432 fprintf (asm_out_file, "%s %s:\n",
5433 ASM_COMMENT_START,
5434 DECL_NAME (local.field)
5435 ? IDENTIFIER_POINTER (DECL_NAME (local.field))
5436 : "<anonymous>");
5437
5438 /* Eliminate the marker that makes a cast not be an lvalue. */
5439 if (local.val != NULL_TREE)
5440 STRIP_NOPS (local.val);
5441
5442 /* Output the current element, using the appropriate helper ... */
5443
5444 /* For an array slice not part of an outer bitfield. */
5445 if (!outer
5446 && local.index != NULL_TREE
5447 && TREE_CODE (local.index) == RANGE_EXPR)
5448 output_constructor_array_range (&local);
5449
5450 /* For a field that is neither a true bitfield nor part of an outer one,
5451 known to be at least byte aligned and multiple-of-bytes long. */
5452 else if (!outer
5453 && (local.field == NULL_TREE
5454 || !CONSTRUCTOR_BITFIELD_P (local.field)))
5455 output_constructor_regular_field (&local);
5456
5457 /* For a true bitfield or part of an outer one. Only INTEGER_CSTs are
5458 supported for scalar fields, so we may need to convert first. */
5459 else
5460 {
5461 if (TREE_CODE (local.val) == REAL_CST)
5462 local.val
5463 = fold_unary (VIEW_CONVERT_EXPR,
5464 build_nonstandard_integer_type
5465 (TYPE_PRECISION (TREE_TYPE (local.val)), 0),
5466 local.val);
5467 output_constructor_bitfield (&local, outer ? outer->bit_offset : 0);
5468 }
5469 }
5470
5471 /* If we are not at toplevel, save the pending data for our caller.
5472 Otherwise output the pending data and padding zeros as needed. */
5473 if (outer)
5474 outer->byte = local.byte;
5475 else
5476 {
5477 if (local.byte_buffer_in_use)
5478 {
5479 assemble_integer (GEN_INT (local.byte), 1, BITS_PER_UNIT, 1);
5480 local.total_bytes++;
5481 }
5482
5483 if ((unsigned HOST_WIDE_INT)local.total_bytes < local.size)
5484 {
5485 assemble_zeros (local.size - local.total_bytes);
5486 local.total_bytes = local.size;
5487 }
5488 }
5489
5490 return local.total_bytes;
5491 }
5492
5493 /* Mark DECL as weak. */
5494
5495 static void
mark_weak(tree decl)5496 mark_weak (tree decl)
5497 {
5498 if (DECL_WEAK (decl))
5499 return;
5500
5501 struct symtab_node *n = symtab_node::get (decl);
5502 if (n && n->refuse_visibility_changes)
5503 error ("%+qD declared weak after being used", decl);
5504 DECL_WEAK (decl) = 1;
5505
5506 if (DECL_RTL_SET_P (decl)
5507 && MEM_P (DECL_RTL (decl))
5508 && XEXP (DECL_RTL (decl), 0)
5509 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == SYMBOL_REF)
5510 SYMBOL_REF_WEAK (XEXP (DECL_RTL (decl), 0)) = 1;
5511 }
5512
5513 /* Merge weak status between NEWDECL and OLDDECL. */
5514
5515 void
merge_weak(tree newdecl,tree olddecl)5516 merge_weak (tree newdecl, tree olddecl)
5517 {
5518 if (DECL_WEAK (newdecl) == DECL_WEAK (olddecl))
5519 {
5520 if (DECL_WEAK (newdecl) && TARGET_SUPPORTS_WEAK)
5521 {
5522 tree *pwd;
5523 /* We put the NEWDECL on the weak_decls list at some point
5524 and OLDDECL as well. Keep just OLDDECL on the list. */
5525 for (pwd = &weak_decls; *pwd; pwd = &TREE_CHAIN (*pwd))
5526 if (TREE_VALUE (*pwd) == newdecl)
5527 {
5528 *pwd = TREE_CHAIN (*pwd);
5529 break;
5530 }
5531 }
5532 return;
5533 }
5534
5535 if (DECL_WEAK (newdecl))
5536 {
5537 tree wd;
5538
5539 /* NEWDECL is weak, but OLDDECL is not. */
5540
5541 /* If we already output the OLDDECL, we're in trouble; we can't
5542 go back and make it weak. This should never happen in
5543 unit-at-a-time compilation. */
5544 gcc_assert (!TREE_ASM_WRITTEN (olddecl));
5545
5546 /* If we've already generated rtl referencing OLDDECL, we may
5547 have done so in a way that will not function properly with
5548 a weak symbol. Again in unit-at-a-time this should be
5549 impossible. */
5550 gcc_assert (!TREE_USED (olddecl)
5551 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (olddecl)));
5552
5553 /* PR 49899: You cannot convert a static function into a weak, public function. */
5554 if (! TREE_PUBLIC (olddecl) && TREE_PUBLIC (newdecl))
5555 error ("weak declaration of %q+D being applied to a already "
5556 "existing, static definition", newdecl);
5557
5558 if (TARGET_SUPPORTS_WEAK)
5559 {
5560 /* We put the NEWDECL on the weak_decls list at some point.
5561 Replace it with the OLDDECL. */
5562 for (wd = weak_decls; wd; wd = TREE_CHAIN (wd))
5563 if (TREE_VALUE (wd) == newdecl)
5564 {
5565 TREE_VALUE (wd) = olddecl;
5566 break;
5567 }
5568 /* We may not find the entry on the list. If NEWDECL is a
5569 weak alias, then we will have already called
5570 globalize_decl to remove the entry; in that case, we do
5571 not need to do anything. */
5572 }
5573
5574 /* Make the OLDDECL weak; it's OLDDECL that we'll be keeping. */
5575 mark_weak (olddecl);
5576 }
5577 else
5578 /* OLDDECL was weak, but NEWDECL was not explicitly marked as
5579 weak. Just update NEWDECL to indicate that it's weak too. */
5580 mark_weak (newdecl);
5581 }
5582
5583 /* Declare DECL to be a weak symbol. */
5584
5585 void
declare_weak(tree decl)5586 declare_weak (tree decl)
5587 {
5588 gcc_assert (TREE_CODE (decl) != FUNCTION_DECL || !TREE_ASM_WRITTEN (decl));
5589 if (! TREE_PUBLIC (decl))
5590 {
5591 error ("weak declaration of %q+D must be public", decl);
5592 return;
5593 }
5594 else if (!TARGET_SUPPORTS_WEAK)
5595 warning (0, "weak declaration of %q+D not supported", decl);
5596
5597 mark_weak (decl);
5598 if (!lookup_attribute ("weak", DECL_ATTRIBUTES (decl)))
5599 DECL_ATTRIBUTES (decl)
5600 = tree_cons (get_identifier ("weak"), NULL, DECL_ATTRIBUTES (decl));
5601 }
5602
5603 static void
weak_finish_1(tree decl)5604 weak_finish_1 (tree decl)
5605 {
5606 #if defined (ASM_WEAKEN_DECL) || defined (ASM_WEAKEN_LABEL)
5607 const char *const name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5608 #endif
5609
5610 if (! TREE_USED (decl))
5611 return;
5612
5613 #ifdef ASM_WEAKEN_DECL
5614 ASM_WEAKEN_DECL (asm_out_file, decl, name, NULL);
5615 #else
5616 #ifdef ASM_WEAKEN_LABEL
5617 ASM_WEAKEN_LABEL (asm_out_file, name);
5618 #else
5619 #ifdef ASM_OUTPUT_WEAK_ALIAS
5620 {
5621 static bool warn_once = 0;
5622 if (! warn_once)
5623 {
5624 warning (0, "only weak aliases are supported in this configuration");
5625 warn_once = 1;
5626 }
5627 return;
5628 }
5629 #endif
5630 #endif
5631 #endif
5632 }
5633
5634 /* Fiven an assembly name, find the decl it is associated with. */
5635 static tree
find_decl(tree target)5636 find_decl (tree target)
5637 {
5638 symtab_node *node = symtab_node::get_for_asmname (target);
5639 if (node)
5640 return node->decl;
5641 return NULL_TREE;
5642 }
5643
5644 /* This TREE_LIST contains weakref targets. */
5645
5646 static GTY(()) tree weakref_targets;
5647
5648 /* Emit any pending weak declarations. */
5649
5650 void
weak_finish(void)5651 weak_finish (void)
5652 {
5653 tree t;
5654
5655 for (t = weakref_targets; t; t = TREE_CHAIN (t))
5656 {
5657 tree alias_decl = TREE_PURPOSE (t);
5658 tree target = ultimate_transparent_alias_target (&TREE_VALUE (t));
5659
5660 if (! TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (alias_decl)))
5661 /* Remove alias_decl from the weak list, but leave entries for
5662 the target alone. */
5663 target = NULL_TREE;
5664 #ifndef ASM_OUTPUT_WEAKREF
5665 else if (! TREE_SYMBOL_REFERENCED (target))
5666 {
5667 /* Use ASM_WEAKEN_LABEL only if ASM_WEAKEN_DECL is not
5668 defined, otherwise we and weak_finish_1 would use
5669 different macros. */
5670 # if defined ASM_WEAKEN_LABEL && ! defined ASM_WEAKEN_DECL
5671 ASM_WEAKEN_LABEL (asm_out_file, IDENTIFIER_POINTER (target));
5672 # else
5673 tree decl = find_decl (target);
5674
5675 if (! decl)
5676 {
5677 decl = build_decl (DECL_SOURCE_LOCATION (alias_decl),
5678 TREE_CODE (alias_decl), target,
5679 TREE_TYPE (alias_decl));
5680
5681 DECL_EXTERNAL (decl) = 1;
5682 TREE_PUBLIC (decl) = 1;
5683 DECL_ARTIFICIAL (decl) = 1;
5684 TREE_NOTHROW (decl) = TREE_NOTHROW (alias_decl);
5685 TREE_USED (decl) = 1;
5686 }
5687
5688 weak_finish_1 (decl);
5689 # endif
5690 }
5691 #endif
5692
5693 {
5694 tree *p;
5695 tree t2;
5696
5697 /* Remove the alias and the target from the pending weak list
5698 so that we do not emit any .weak directives for the former,
5699 nor multiple .weak directives for the latter. */
5700 for (p = &weak_decls; (t2 = *p) ; )
5701 {
5702 if (TREE_VALUE (t2) == alias_decl
5703 || target == DECL_ASSEMBLER_NAME (TREE_VALUE (t2)))
5704 *p = TREE_CHAIN (t2);
5705 else
5706 p = &TREE_CHAIN (t2);
5707 }
5708
5709 /* Remove other weakrefs to the same target, to speed things up. */
5710 for (p = &TREE_CHAIN (t); (t2 = *p) ; )
5711 {
5712 if (target == ultimate_transparent_alias_target (&TREE_VALUE (t2)))
5713 *p = TREE_CHAIN (t2);
5714 else
5715 p = &TREE_CHAIN (t2);
5716 }
5717 }
5718 }
5719
5720 for (t = weak_decls; t; t = TREE_CHAIN (t))
5721 {
5722 tree decl = TREE_VALUE (t);
5723
5724 weak_finish_1 (decl);
5725 }
5726 }
5727
5728 /* Emit the assembly bits to indicate that DECL is globally visible. */
5729
5730 static void
globalize_decl(tree decl)5731 globalize_decl (tree decl)
5732 {
5733
5734 #if defined (ASM_WEAKEN_LABEL) || defined (ASM_WEAKEN_DECL)
5735 if (DECL_WEAK (decl))
5736 {
5737 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
5738 tree *p, t;
5739
5740 #ifdef ASM_WEAKEN_DECL
5741 ASM_WEAKEN_DECL (asm_out_file, decl, name, 0);
5742 #else
5743 ASM_WEAKEN_LABEL (asm_out_file, name);
5744 #endif
5745
5746 /* Remove this function from the pending weak list so that
5747 we do not emit multiple .weak directives for it. */
5748 for (p = &weak_decls; (t = *p) ; )
5749 {
5750 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t)))
5751 *p = TREE_CHAIN (t);
5752 else
5753 p = &TREE_CHAIN (t);
5754 }
5755
5756 /* Remove weakrefs to the same target from the pending weakref
5757 list, for the same reason. */
5758 for (p = &weakref_targets; (t = *p) ; )
5759 {
5760 if (DECL_ASSEMBLER_NAME (decl)
5761 == ultimate_transparent_alias_target (&TREE_VALUE (t)))
5762 *p = TREE_CHAIN (t);
5763 else
5764 p = &TREE_CHAIN (t);
5765 }
5766
5767 return;
5768 }
5769 #endif
5770
5771 targetm.asm_out.globalize_decl_name (asm_out_file, decl);
5772 }
5773
5774 vec<alias_pair, va_gc> *alias_pairs;
5775
5776 /* Output the assembler code for a define (equate) using ASM_OUTPUT_DEF
5777 or ASM_OUTPUT_DEF_FROM_DECLS. The function defines the symbol whose
5778 tree node is DECL to have the value of the tree node TARGET. */
5779
5780 void
do_assemble_alias(tree decl,tree target)5781 do_assemble_alias (tree decl, tree target)
5782 {
5783 tree id;
5784
5785 /* Emulated TLS had better not get this var. */
5786 gcc_assert (!(!targetm.have_tls
5787 && VAR_P (decl)
5788 && DECL_THREAD_LOCAL_P (decl)));
5789
5790 if (TREE_ASM_WRITTEN (decl))
5791 return;
5792
5793 id = DECL_ASSEMBLER_NAME (decl);
5794 ultimate_transparent_alias_target (&id);
5795 ultimate_transparent_alias_target (&target);
5796
5797 /* We must force creation of DECL_RTL for debug info generation, even though
5798 we don't use it here. */
5799 make_decl_rtl (decl);
5800
5801 TREE_ASM_WRITTEN (decl) = 1;
5802 TREE_ASM_WRITTEN (DECL_ASSEMBLER_NAME (decl)) = 1;
5803 TREE_ASM_WRITTEN (id) = 1;
5804
5805 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
5806 {
5807 if (!TREE_SYMBOL_REFERENCED (target))
5808 weakref_targets = tree_cons (decl, target, weakref_targets);
5809
5810 #ifdef ASM_OUTPUT_WEAKREF
5811 ASM_OUTPUT_WEAKREF (asm_out_file, decl,
5812 IDENTIFIER_POINTER (id),
5813 IDENTIFIER_POINTER (target));
5814 #else
5815 if (!TARGET_SUPPORTS_WEAK)
5816 {
5817 error_at (DECL_SOURCE_LOCATION (decl),
5818 "weakref is not supported in this configuration");
5819 return;
5820 }
5821 #endif
5822 return;
5823 }
5824
5825 #ifdef ASM_OUTPUT_DEF
5826 tree orig_decl = decl;
5827
5828 if (TREE_CODE (decl) == FUNCTION_DECL
5829 && cgraph_node::get (decl)->instrumentation_clone
5830 && cgraph_node::get (decl)->instrumented_version)
5831 orig_decl = cgraph_node::get (decl)->instrumented_version->decl;
5832
5833 /* Make name accessible from other files, if appropriate. */
5834
5835 if (TREE_PUBLIC (decl) || TREE_PUBLIC (orig_decl))
5836 {
5837 globalize_decl (decl);
5838 maybe_assemble_visibility (decl);
5839 }
5840 if (TREE_CODE (decl) == FUNCTION_DECL
5841 && cgraph_node::get (decl)->ifunc_resolver)
5842 {
5843 #if defined (ASM_OUTPUT_TYPE_DIRECTIVE)
5844 if (targetm.has_ifunc_p ())
5845 ASM_OUTPUT_TYPE_DIRECTIVE
5846 (asm_out_file, IDENTIFIER_POINTER (id),
5847 IFUNC_ASM_TYPE);
5848 else
5849 #endif
5850 error_at (DECL_SOURCE_LOCATION (decl),
5851 "ifunc is not supported on this target");
5852 }
5853
5854 # ifdef ASM_OUTPUT_DEF_FROM_DECLS
5855 ASM_OUTPUT_DEF_FROM_DECLS (asm_out_file, decl, target);
5856 # else
5857 ASM_OUTPUT_DEF (asm_out_file,
5858 IDENTIFIER_POINTER (id),
5859 IDENTIFIER_POINTER (target));
5860 # endif
5861 #elif defined (ASM_OUTPUT_WEAK_ALIAS) || defined (ASM_WEAKEN_DECL)
5862 {
5863 const char *name;
5864 tree *p, t;
5865
5866 name = IDENTIFIER_POINTER (id);
5867 # ifdef ASM_WEAKEN_DECL
5868 ASM_WEAKEN_DECL (asm_out_file, decl, name, IDENTIFIER_POINTER (target));
5869 # else
5870 ASM_OUTPUT_WEAK_ALIAS (asm_out_file, name, IDENTIFIER_POINTER (target));
5871 # endif
5872 /* Remove this function from the pending weak list so that
5873 we do not emit multiple .weak directives for it. */
5874 for (p = &weak_decls; (t = *p) ; )
5875 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t))
5876 || id == DECL_ASSEMBLER_NAME (TREE_VALUE (t)))
5877 *p = TREE_CHAIN (t);
5878 else
5879 p = &TREE_CHAIN (t);
5880
5881 /* Remove weakrefs to the same target from the pending weakref
5882 list, for the same reason. */
5883 for (p = &weakref_targets; (t = *p) ; )
5884 {
5885 if (id == ultimate_transparent_alias_target (&TREE_VALUE (t)))
5886 *p = TREE_CHAIN (t);
5887 else
5888 p = &TREE_CHAIN (t);
5889 }
5890 }
5891 #endif
5892 }
5893
5894 /* Emit an assembler directive to make the symbol for DECL an alias to
5895 the symbol for TARGET. */
5896
5897 void
assemble_alias(tree decl,tree target)5898 assemble_alias (tree decl, tree target)
5899 {
5900 tree target_decl;
5901
5902 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
5903 {
5904 tree alias = DECL_ASSEMBLER_NAME (decl);
5905
5906 ultimate_transparent_alias_target (&target);
5907
5908 if (alias == target)
5909 error ("weakref %q+D ultimately targets itself", decl);
5910 if (TREE_PUBLIC (decl))
5911 error ("weakref %q+D must have static linkage", decl);
5912 }
5913 else
5914 {
5915 #if !defined (ASM_OUTPUT_DEF)
5916 # if !defined(ASM_OUTPUT_WEAK_ALIAS) && !defined (ASM_WEAKEN_DECL)
5917 error_at (DECL_SOURCE_LOCATION (decl),
5918 "alias definitions not supported in this configuration");
5919 TREE_ASM_WRITTEN (decl) = 1;
5920 return;
5921 # else
5922 if (!DECL_WEAK (decl))
5923 {
5924 /* NB: ifunc_resolver isn't set when an error is detected. */
5925 if (TREE_CODE (decl) == FUNCTION_DECL
5926 && lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
5927 error_at (DECL_SOURCE_LOCATION (decl),
5928 "ifunc is not supported in this configuration");
5929 else
5930 error_at (DECL_SOURCE_LOCATION (decl),
5931 "only weak aliases are supported in this configuration");
5932 TREE_ASM_WRITTEN (decl) = 1;
5933 return;
5934 }
5935 # endif
5936 #endif
5937 }
5938 TREE_USED (decl) = 1;
5939
5940 /* Allow aliases to aliases. */
5941 if (TREE_CODE (decl) == FUNCTION_DECL)
5942 cgraph_node::get_create (decl)->alias = true;
5943 else
5944 varpool_node::get_create (decl)->alias = true;
5945
5946 /* If the target has already been emitted, we don't have to queue the
5947 alias. This saves a tad of memory. */
5948 if (symtab->global_info_ready)
5949 target_decl = find_decl (target);
5950 else
5951 target_decl= NULL;
5952 if ((target_decl && TREE_ASM_WRITTEN (target_decl))
5953 || symtab->state >= EXPANSION)
5954 do_assemble_alias (decl, target);
5955 else
5956 {
5957 alias_pair p = {decl, target};
5958 vec_safe_push (alias_pairs, p);
5959 }
5960 }
5961
5962 /* Record and output a table of translations from original function
5963 to its transaction aware clone. Note that tm_pure functions are
5964 considered to be their own clone. */
5965
5966 struct tm_clone_hasher : ggc_cache_ptr_hash<tree_map>
5967 {
hashtm_clone_hasher5968 static hashval_t hash (tree_map *m) { return tree_map_hash (m); }
equaltm_clone_hasher5969 static bool equal (tree_map *a, tree_map *b) { return tree_map_eq (a, b); }
5970
5971 static int
keep_cache_entrytm_clone_hasher5972 keep_cache_entry (tree_map *&e)
5973 {
5974 return ggc_marked_p (e->base.from);
5975 }
5976 };
5977
5978 static GTY((cache)) hash_table<tm_clone_hasher> *tm_clone_hash;
5979
5980 void
record_tm_clone_pair(tree o,tree n)5981 record_tm_clone_pair (tree o, tree n)
5982 {
5983 struct tree_map **slot, *h;
5984
5985 if (tm_clone_hash == NULL)
5986 tm_clone_hash = hash_table<tm_clone_hasher>::create_ggc (32);
5987
5988 h = ggc_alloc<tree_map> ();
5989 h->hash = htab_hash_pointer (o);
5990 h->base.from = o;
5991 h->to = n;
5992
5993 slot = tm_clone_hash->find_slot_with_hash (h, h->hash, INSERT);
5994 *slot = h;
5995 }
5996
5997 tree
get_tm_clone_pair(tree o)5998 get_tm_clone_pair (tree o)
5999 {
6000 if (tm_clone_hash)
6001 {
6002 struct tree_map *h, in;
6003
6004 in.base.from = o;
6005 in.hash = htab_hash_pointer (o);
6006 h = tm_clone_hash->find_with_hash (&in, in.hash);
6007 if (h)
6008 return h->to;
6009 }
6010 return NULL_TREE;
6011 }
6012
6013 struct tm_alias_pair
6014 {
6015 unsigned int uid;
6016 tree from;
6017 tree to;
6018 };
6019
6020
6021 /* Dump the actual pairs to the .tm_clone_table section. */
6022
6023 static void
dump_tm_clone_pairs(vec<tm_alias_pair> tm_alias_pairs)6024 dump_tm_clone_pairs (vec<tm_alias_pair> tm_alias_pairs)
6025 {
6026 unsigned i;
6027 tm_alias_pair *p;
6028 bool switched = false;
6029
6030 FOR_EACH_VEC_ELT (tm_alias_pairs, i, p)
6031 {
6032 tree src = p->from;
6033 tree dst = p->to;
6034 struct cgraph_node *src_n = cgraph_node::get (src);
6035 struct cgraph_node *dst_n = cgraph_node::get (dst);
6036
6037 /* The function ipa_tm_create_version() marks the clone as needed if
6038 the original function was needed. But we also mark the clone as
6039 needed if we ever called the clone indirectly through
6040 TM_GETTMCLONE. If neither of these are true, we didn't generate
6041 a clone, and we didn't call it indirectly... no sense keeping it
6042 in the clone table. */
6043 if (!dst_n || !dst_n->definition)
6044 continue;
6045
6046 /* This covers the case where we have optimized the original
6047 function away, and only access the transactional clone. */
6048 if (!src_n || !src_n->definition)
6049 continue;
6050
6051 if (!switched)
6052 {
6053 switch_to_section (targetm.asm_out.tm_clone_table_section ());
6054 assemble_align (POINTER_SIZE);
6055 switched = true;
6056 }
6057
6058 assemble_integer (XEXP (DECL_RTL (src), 0),
6059 POINTER_SIZE_UNITS, POINTER_SIZE, 1);
6060 assemble_integer (XEXP (DECL_RTL (dst), 0),
6061 POINTER_SIZE_UNITS, POINTER_SIZE, 1);
6062 }
6063 }
6064
6065 /* Provide a default for the tm_clone_table section. */
6066
6067 section *
default_clone_table_section(void)6068 default_clone_table_section (void)
6069 {
6070 return get_named_section (NULL, ".tm_clone_table", 3);
6071 }
6072
6073 /* Helper comparison function for qsorting by the DECL_UID stored in
6074 alias_pair->emitted_diags. */
6075
6076 static int
tm_alias_pair_cmp(const void * x,const void * y)6077 tm_alias_pair_cmp (const void *x, const void *y)
6078 {
6079 const tm_alias_pair *p1 = (const tm_alias_pair *) x;
6080 const tm_alias_pair *p2 = (const tm_alias_pair *) y;
6081 if (p1->uid < p2->uid)
6082 return -1;
6083 if (p1->uid > p2->uid)
6084 return 1;
6085 return 0;
6086 }
6087
6088 void
finish_tm_clone_pairs(void)6089 finish_tm_clone_pairs (void)
6090 {
6091 vec<tm_alias_pair> tm_alias_pairs = vNULL;
6092
6093 if (tm_clone_hash == NULL)
6094 return;
6095
6096 /* We need a determenistic order for the .tm_clone_table, otherwise
6097 we will get bootstrap comparison failures, so dump the hash table
6098 to a vector, sort it, and dump the vector. */
6099
6100 /* Dump the hashtable to a vector. */
6101 tree_map *map;
6102 hash_table<tm_clone_hasher>::iterator iter;
6103 FOR_EACH_HASH_TABLE_ELEMENT (*tm_clone_hash, map, tree_map *, iter)
6104 {
6105 tm_alias_pair p = {DECL_UID (map->base.from), map->base.from, map->to};
6106 tm_alias_pairs.safe_push (p);
6107 }
6108 /* Sort it. */
6109 tm_alias_pairs.qsort (tm_alias_pair_cmp);
6110
6111 /* Dump it. */
6112 dump_tm_clone_pairs (tm_alias_pairs);
6113
6114 tm_clone_hash->empty ();
6115 tm_clone_hash = NULL;
6116 tm_alias_pairs.release ();
6117 }
6118
6119
6120 /* Emit an assembler directive to set symbol for DECL visibility to
6121 the visibility type VIS, which must not be VISIBILITY_DEFAULT. */
6122
6123 void
default_assemble_visibility(tree decl ATTRIBUTE_UNUSED,int vis ATTRIBUTE_UNUSED)6124 default_assemble_visibility (tree decl ATTRIBUTE_UNUSED,
6125 int vis ATTRIBUTE_UNUSED)
6126 {
6127 #ifdef HAVE_GAS_HIDDEN
6128 static const char * const visibility_types[] = {
6129 NULL, "protected", "hidden", "internal"
6130 };
6131
6132 const char *name, *type;
6133 tree id;
6134
6135 id = DECL_ASSEMBLER_NAME (decl);
6136 ultimate_transparent_alias_target (&id);
6137 name = IDENTIFIER_POINTER (id);
6138
6139 type = visibility_types[vis];
6140
6141 fprintf (asm_out_file, "\t.%s\t", type);
6142 assemble_name (asm_out_file, name);
6143 fprintf (asm_out_file, "\n");
6144 #else
6145 if (!DECL_ARTIFICIAL (decl))
6146 warning (OPT_Wattributes, "visibility attribute not supported "
6147 "in this configuration; ignored");
6148 #endif
6149 }
6150
6151 /* A helper function to call assemble_visibility when needed for a decl. */
6152
6153 int
maybe_assemble_visibility(tree decl)6154 maybe_assemble_visibility (tree decl)
6155 {
6156 enum symbol_visibility vis = DECL_VISIBILITY (decl);
6157
6158 if (TREE_CODE (decl) == FUNCTION_DECL
6159 && cgraph_node::get (decl)
6160 && cgraph_node::get (decl)->instrumentation_clone
6161 && cgraph_node::get (decl)->instrumented_version)
6162 vis = DECL_VISIBILITY (cgraph_node::get (decl)->instrumented_version->decl);
6163
6164 if (vis != VISIBILITY_DEFAULT)
6165 {
6166 targetm.asm_out.assemble_visibility (decl, vis);
6167 return 1;
6168 }
6169 else
6170 return 0;
6171 }
6172
6173 /* Returns 1 if the target configuration supports defining public symbols
6174 so that one of them will be chosen at link time instead of generating a
6175 multiply-defined symbol error, whether through the use of weak symbols or
6176 a target-specific mechanism for having duplicates discarded. */
6177
6178 int
supports_one_only(void)6179 supports_one_only (void)
6180 {
6181 if (SUPPORTS_ONE_ONLY)
6182 return 1;
6183 return TARGET_SUPPORTS_WEAK;
6184 }
6185
6186 /* Set up DECL as a public symbol that can be defined in multiple
6187 translation units without generating a linker error. */
6188
6189 void
make_decl_one_only(tree decl,tree comdat_group)6190 make_decl_one_only (tree decl, tree comdat_group)
6191 {
6192 struct symtab_node *symbol;
6193 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6194
6195 TREE_PUBLIC (decl) = 1;
6196
6197 if (VAR_P (decl))
6198 symbol = varpool_node::get_create (decl);
6199 else
6200 symbol = cgraph_node::get_create (decl);
6201
6202 if (SUPPORTS_ONE_ONLY)
6203 {
6204 #ifdef MAKE_DECL_ONE_ONLY
6205 MAKE_DECL_ONE_ONLY (decl);
6206 #endif
6207 symbol->set_comdat_group (comdat_group);
6208 }
6209 else if (VAR_P (decl)
6210 && (DECL_INITIAL (decl) == 0
6211 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node)))
6212 DECL_COMMON (decl) = 1;
6213 else
6214 {
6215 gcc_assert (TARGET_SUPPORTS_WEAK);
6216 DECL_WEAK (decl) = 1;
6217 }
6218 }
6219
6220 void
init_varasm_once(void)6221 init_varasm_once (void)
6222 {
6223 section_htab = hash_table<section_hasher>::create_ggc (31);
6224 object_block_htab = hash_table<object_block_hasher>::create_ggc (31);
6225 const_desc_htab = hash_table<tree_descriptor_hasher>::create_ggc (1009);
6226
6227 shared_constant_pool = create_constant_pool ();
6228
6229 #ifdef TEXT_SECTION_ASM_OP
6230 text_section = get_unnamed_section (SECTION_CODE, output_section_asm_op,
6231 TEXT_SECTION_ASM_OP);
6232 #endif
6233
6234 #ifdef DATA_SECTION_ASM_OP
6235 data_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
6236 DATA_SECTION_ASM_OP);
6237 #endif
6238
6239 #ifdef SDATA_SECTION_ASM_OP
6240 sdata_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
6241 SDATA_SECTION_ASM_OP);
6242 #endif
6243
6244 #ifdef READONLY_DATA_SECTION_ASM_OP
6245 readonly_data_section = get_unnamed_section (0, output_section_asm_op,
6246 READONLY_DATA_SECTION_ASM_OP);
6247 #endif
6248
6249 #ifdef CTORS_SECTION_ASM_OP
6250 ctors_section = get_unnamed_section (0, output_section_asm_op,
6251 CTORS_SECTION_ASM_OP);
6252 #endif
6253
6254 #ifdef DTORS_SECTION_ASM_OP
6255 dtors_section = get_unnamed_section (0, output_section_asm_op,
6256 DTORS_SECTION_ASM_OP);
6257 #endif
6258
6259 #ifdef BSS_SECTION_ASM_OP
6260 bss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
6261 output_section_asm_op,
6262 BSS_SECTION_ASM_OP);
6263 #endif
6264
6265 #ifdef SBSS_SECTION_ASM_OP
6266 sbss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
6267 output_section_asm_op,
6268 SBSS_SECTION_ASM_OP);
6269 #endif
6270
6271 tls_comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6272 | SECTION_COMMON, emit_tls_common);
6273 lcomm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6274 | SECTION_COMMON, emit_local);
6275 comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6276 | SECTION_COMMON, emit_common);
6277
6278 #if defined ASM_OUTPUT_ALIGNED_BSS
6279 bss_noswitch_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS,
6280 emit_bss);
6281 #endif
6282
6283 targetm.asm_out.init_sections ();
6284
6285 if (readonly_data_section == NULL)
6286 readonly_data_section = text_section;
6287
6288 #ifdef ASM_OUTPUT_EXTERNAL
6289 pending_assemble_externals_set = new hash_set<tree>;
6290 #endif
6291 }
6292
6293 enum tls_model
decl_default_tls_model(const_tree decl)6294 decl_default_tls_model (const_tree decl)
6295 {
6296 enum tls_model kind;
6297 bool is_local;
6298
6299 is_local = targetm.binds_local_p (decl);
6300 if (!flag_shlib)
6301 {
6302 if (is_local)
6303 kind = TLS_MODEL_LOCAL_EXEC;
6304 else
6305 kind = TLS_MODEL_INITIAL_EXEC;
6306 }
6307
6308 /* Local dynamic is inefficient when we're not combining the
6309 parts of the address. */
6310 else if (optimize && is_local)
6311 kind = TLS_MODEL_LOCAL_DYNAMIC;
6312 else
6313 kind = TLS_MODEL_GLOBAL_DYNAMIC;
6314 if (kind < flag_tls_default)
6315 kind = flag_tls_default;
6316
6317 return kind;
6318 }
6319
6320 /* Select a set of attributes for section NAME based on the properties
6321 of DECL and whether or not RELOC indicates that DECL's initializer
6322 might contain runtime relocations.
6323
6324 We make the section read-only and executable for a function decl,
6325 read-only for a const data decl, and writable for a non-const data decl. */
6326
6327 unsigned int
default_section_type_flags(tree decl,const char * name,int reloc)6328 default_section_type_flags (tree decl, const char *name, int reloc)
6329 {
6330 unsigned int flags;
6331
6332 if (decl && TREE_CODE (decl) == FUNCTION_DECL)
6333 flags = SECTION_CODE;
6334 else if (decl)
6335 {
6336 enum section_category category
6337 = categorize_decl_for_section (decl, reloc);
6338 if (decl_readonly_section_1 (category))
6339 flags = 0;
6340 else if (category == SECCAT_DATA_REL_RO
6341 || category == SECCAT_DATA_REL_RO_LOCAL)
6342 flags = SECTION_WRITE | SECTION_RELRO;
6343 else
6344 flags = SECTION_WRITE;
6345 }
6346 else
6347 {
6348 flags = SECTION_WRITE;
6349 if (strcmp (name, ".data.rel.ro") == 0
6350 || strcmp (name, ".data.rel.ro.local") == 0)
6351 flags |= SECTION_RELRO;
6352 }
6353
6354 if (decl && DECL_P (decl) && DECL_COMDAT_GROUP (decl))
6355 flags |= SECTION_LINKONCE;
6356
6357 if (strcmp (name, ".vtable_map_vars") == 0)
6358 flags |= SECTION_LINKONCE;
6359
6360 if (decl && VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
6361 flags |= SECTION_TLS | SECTION_WRITE;
6362
6363 if (strcmp (name, ".bss") == 0
6364 || strncmp (name, ".bss.", 5) == 0
6365 || strncmp (name, ".gnu.linkonce.b.", 16) == 0
6366 || strcmp (name, ".persistent.bss") == 0
6367 || strcmp (name, ".sbss") == 0
6368 || strncmp (name, ".sbss.", 6) == 0
6369 || strncmp (name, ".gnu.linkonce.sb.", 17) == 0)
6370 flags |= SECTION_BSS;
6371
6372 if (strcmp (name, ".tdata") == 0
6373 || strncmp (name, ".tdata.", 7) == 0
6374 || strncmp (name, ".gnu.linkonce.td.", 17) == 0)
6375 flags |= SECTION_TLS;
6376
6377 if (strcmp (name, ".tbss") == 0
6378 || strncmp (name, ".tbss.", 6) == 0
6379 || strncmp (name, ".gnu.linkonce.tb.", 17) == 0)
6380 flags |= SECTION_TLS | SECTION_BSS;
6381
6382 /* Various sections have special ELF types that the assembler will
6383 assign by default based on the name. They are neither SHT_PROGBITS
6384 nor SHT_NOBITS, so when changing sections we don't want to print a
6385 section type (@progbits or @nobits). Rather than duplicating the
6386 assembler's knowledge of what those special name patterns are, just
6387 let the assembler choose the type if we don't know a specific
6388 reason to set it to something other than the default. SHT_PROGBITS
6389 is the default for sections whose name is not specially known to
6390 the assembler, so it does no harm to leave the choice to the
6391 assembler when @progbits is the best thing we know to use. If
6392 someone is silly enough to emit code or TLS variables to one of
6393 these sections, then don't handle them specially.
6394
6395 default_elf_asm_named_section (below) handles the BSS, TLS, ENTSIZE, and
6396 LINKONCE cases when NOTYPE is not set, so leave those to its logic. */
6397 if (!(flags & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE))
6398 && !(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)))
6399 flags |= SECTION_NOTYPE;
6400
6401 return flags;
6402 }
6403
6404 /* Return true if the target supports some form of global BSS,
6405 either through bss_noswitch_section, or by selecting a BSS
6406 section in TARGET_ASM_SELECT_SECTION. */
6407
6408 bool
have_global_bss_p(void)6409 have_global_bss_p (void)
6410 {
6411 return bss_noswitch_section || targetm.have_switchable_bss_sections;
6412 }
6413
6414 /* Output assembly to switch to section NAME with attribute FLAGS.
6415 Four variants for common object file formats. */
6416
6417 void
default_no_named_section(const char * name ATTRIBUTE_UNUSED,unsigned int flags ATTRIBUTE_UNUSED,tree decl ATTRIBUTE_UNUSED)6418 default_no_named_section (const char *name ATTRIBUTE_UNUSED,
6419 unsigned int flags ATTRIBUTE_UNUSED,
6420 tree decl ATTRIBUTE_UNUSED)
6421 {
6422 /* Some object formats don't support named sections at all. The
6423 front-end should already have flagged this as an error. */
6424 gcc_unreachable ();
6425 }
6426
6427 #ifndef TLS_SECTION_ASM_FLAG
6428 #define TLS_SECTION_ASM_FLAG 'T'
6429 #endif
6430
6431 void
default_elf_asm_named_section(const char * name,unsigned int flags,tree decl)6432 default_elf_asm_named_section (const char *name, unsigned int flags,
6433 tree decl)
6434 {
6435 char flagchars[11], *f = flagchars;
6436 unsigned int numeric_value = 0;
6437
6438 /* If we have already declared this section, we can use an
6439 abbreviated form to switch back to it -- unless this section is
6440 part of a COMDAT groups, in which case GAS requires the full
6441 declaration every time. */
6442 if (!(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6443 && (flags & SECTION_DECLARED))
6444 {
6445 fprintf (asm_out_file, "\t.section\t%s\n", name);
6446 return;
6447 }
6448
6449 /* If we have a machine specific flag, then use the numeric value to pass
6450 this on to GAS. */
6451 if (targetm.asm_out.elf_flags_numeric (flags, &numeric_value))
6452 snprintf (f, sizeof (flagchars), "0x%08x", numeric_value);
6453 else
6454 {
6455 if (!(flags & SECTION_DEBUG))
6456 *f++ = 'a';
6457 #if defined (HAVE_GAS_SECTION_EXCLUDE) && HAVE_GAS_SECTION_EXCLUDE == 1
6458 if (flags & SECTION_EXCLUDE)
6459 *f++ = 'e';
6460 #endif
6461 if (flags & SECTION_WRITE)
6462 *f++ = 'w';
6463 if (flags & SECTION_CODE)
6464 *f++ = 'x';
6465 if (flags & SECTION_SMALL)
6466 *f++ = 's';
6467 if (flags & SECTION_MERGE)
6468 *f++ = 'M';
6469 if (flags & SECTION_STRINGS)
6470 *f++ = 'S';
6471 if (flags & SECTION_TLS)
6472 *f++ = TLS_SECTION_ASM_FLAG;
6473 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6474 *f++ = 'G';
6475 #ifdef MACH_DEP_SECTION_ASM_FLAG
6476 if (flags & SECTION_MACH_DEP)
6477 *f++ = MACH_DEP_SECTION_ASM_FLAG;
6478 #endif
6479 *f = '\0';
6480 }
6481
6482 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
6483
6484 /* default_section_type_flags (above) knows which flags need special
6485 handling here, and sets NOTYPE when none of these apply so that the
6486 assembler's logic for default types can apply to user-chosen
6487 section names. */
6488 if (!(flags & SECTION_NOTYPE))
6489 {
6490 const char *type;
6491 const char *format;
6492
6493 if (flags & SECTION_BSS)
6494 type = "nobits";
6495 else
6496 type = "progbits";
6497
6498 format = ",@%s";
6499 /* On platforms that use "@" as the assembly comment character,
6500 use "%" instead. */
6501 if (strcmp (ASM_COMMENT_START, "@") == 0)
6502 format = ",%%%s";
6503 fprintf (asm_out_file, format, type);
6504
6505 if (flags & SECTION_ENTSIZE)
6506 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
6507 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6508 {
6509 if (TREE_CODE (decl) == IDENTIFIER_NODE)
6510 fprintf (asm_out_file, ",%s,comdat", IDENTIFIER_POINTER (decl));
6511 else
6512 fprintf (asm_out_file, ",%s,comdat",
6513 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (decl)));
6514 }
6515 }
6516
6517 putc ('\n', asm_out_file);
6518 }
6519
6520 void
default_coff_asm_named_section(const char * name,unsigned int flags,tree decl ATTRIBUTE_UNUSED)6521 default_coff_asm_named_section (const char *name, unsigned int flags,
6522 tree decl ATTRIBUTE_UNUSED)
6523 {
6524 char flagchars[8], *f = flagchars;
6525
6526 if (flags & SECTION_WRITE)
6527 *f++ = 'w';
6528 if (flags & SECTION_CODE)
6529 *f++ = 'x';
6530 *f = '\0';
6531
6532 fprintf (asm_out_file, "\t.section\t%s,\"%s\"\n", name, flagchars);
6533 }
6534
6535 void
default_pe_asm_named_section(const char * name,unsigned int flags,tree decl)6536 default_pe_asm_named_section (const char *name, unsigned int flags,
6537 tree decl)
6538 {
6539 default_coff_asm_named_section (name, flags, decl);
6540
6541 if (flags & SECTION_LINKONCE)
6542 {
6543 /* Functions may have been compiled at various levels of
6544 optimization so we can't use `same_size' here.
6545 Instead, have the linker pick one. */
6546 fprintf (asm_out_file, "\t.linkonce %s\n",
6547 (flags & SECTION_CODE ? "discard" : "same_size"));
6548 }
6549 }
6550
6551 /* The lame default section selector. */
6552
6553 section *
default_select_section(tree decl,int reloc,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)6554 default_select_section (tree decl, int reloc,
6555 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
6556 {
6557 if (DECL_P (decl))
6558 {
6559 if (decl_readonly_section (decl, reloc))
6560 return readonly_data_section;
6561 }
6562 else if (TREE_CODE (decl) == CONSTRUCTOR)
6563 {
6564 if (! ((flag_pic && reloc)
6565 || !TREE_READONLY (decl)
6566 || TREE_SIDE_EFFECTS (decl)
6567 || !TREE_CONSTANT (decl)))
6568 return readonly_data_section;
6569 }
6570 else if (TREE_CODE (decl) == STRING_CST)
6571 return readonly_data_section;
6572 else if (! (flag_pic && reloc))
6573 return readonly_data_section;
6574
6575 return data_section;
6576 }
6577
6578 enum section_category
categorize_decl_for_section(const_tree decl,int reloc)6579 categorize_decl_for_section (const_tree decl, int reloc)
6580 {
6581 enum section_category ret;
6582
6583 if (TREE_CODE (decl) == FUNCTION_DECL)
6584 return SECCAT_TEXT;
6585 else if (TREE_CODE (decl) == STRING_CST)
6586 {
6587 if ((flag_sanitize & SANITIZE_ADDRESS)
6588 && asan_protect_global (CONST_CAST_TREE (decl)))
6589 /* or !flag_merge_constants */
6590 return SECCAT_RODATA;
6591 else
6592 return SECCAT_RODATA_MERGE_STR;
6593 }
6594 else if (VAR_P (decl))
6595 {
6596 tree d = CONST_CAST_TREE (decl);
6597 if (bss_initializer_p (decl))
6598 ret = SECCAT_BSS;
6599 else if (! TREE_READONLY (decl)
6600 || TREE_SIDE_EFFECTS (decl)
6601 || (DECL_INITIAL (decl)
6602 && ! TREE_CONSTANT (DECL_INITIAL (decl))))
6603 {
6604 /* Here the reloc_rw_mask is not testing whether the section should
6605 be read-only or not, but whether the dynamic link will have to
6606 do something. If so, we wish to segregate the data in order to
6607 minimize cache misses inside the dynamic linker. */
6608 if (reloc & targetm.asm_out.reloc_rw_mask ())
6609 ret = reloc == 1 ? SECCAT_DATA_REL_LOCAL : SECCAT_DATA_REL;
6610 else
6611 ret = SECCAT_DATA;
6612 }
6613 else if (reloc & targetm.asm_out.reloc_rw_mask ())
6614 ret = reloc == 1 ? SECCAT_DATA_REL_RO_LOCAL : SECCAT_DATA_REL_RO;
6615 else if (reloc || flag_merge_constants < 2
6616 || ((flag_sanitize & SANITIZE_ADDRESS)
6617 /* PR 81697: for architectures that use section anchors we
6618 need to ignore DECL_RTL_SET_P (decl) for string constants
6619 inside this asan_protect_global call because otherwise
6620 we'll wrongly put them into SECCAT_RODATA_MERGE_CONST
6621 section, set DECL_RTL (decl) later on and add DECL to
6622 protected globals via successive asan_protect_global
6623 calls. In this scenario we'll end up with wrong
6624 alignment of these strings at runtime and possible ASan
6625 false positives. */
6626 && asan_protect_global (d, use_object_blocks_p ()
6627 && use_blocks_for_decl_p (d))))
6628 /* C and C++ don't allow different variables to share the same
6629 location. -fmerge-all-constants allows even that (at the
6630 expense of not conforming). */
6631 ret = SECCAT_RODATA;
6632 else if (DECL_INITIAL (decl)
6633 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST)
6634 ret = SECCAT_RODATA_MERGE_STR_INIT;
6635 else
6636 ret = SECCAT_RODATA_MERGE_CONST;
6637 }
6638 else if (TREE_CODE (decl) == CONSTRUCTOR)
6639 {
6640 if ((reloc & targetm.asm_out.reloc_rw_mask ())
6641 || TREE_SIDE_EFFECTS (decl)
6642 || ! TREE_CONSTANT (decl))
6643 ret = SECCAT_DATA;
6644 else
6645 ret = SECCAT_RODATA;
6646 }
6647 else
6648 ret = SECCAT_RODATA;
6649
6650 /* There are no read-only thread-local sections. */
6651 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
6652 {
6653 /* Note that this would be *just* SECCAT_BSS, except that there's
6654 no concept of a read-only thread-local-data section. */
6655 if (ret == SECCAT_BSS
6656 || DECL_INITIAL (decl) == NULL
6657 || (flag_zero_initialized_in_bss
6658 && initializer_zerop (DECL_INITIAL (decl))))
6659 ret = SECCAT_TBSS;
6660 else
6661 ret = SECCAT_TDATA;
6662 }
6663
6664 /* If the target uses small data sections, select it. */
6665 else if (targetm.in_small_data_p (decl))
6666 {
6667 if (ret == SECCAT_BSS)
6668 ret = SECCAT_SBSS;
6669 else if (targetm.have_srodata_section && ret == SECCAT_RODATA)
6670 ret = SECCAT_SRODATA;
6671 else
6672 ret = SECCAT_SDATA;
6673 }
6674
6675 return ret;
6676 }
6677
6678 static bool
decl_readonly_section_1(enum section_category category)6679 decl_readonly_section_1 (enum section_category category)
6680 {
6681 switch (category)
6682 {
6683 case SECCAT_RODATA:
6684 case SECCAT_RODATA_MERGE_STR:
6685 case SECCAT_RODATA_MERGE_STR_INIT:
6686 case SECCAT_RODATA_MERGE_CONST:
6687 case SECCAT_SRODATA:
6688 return true;
6689 default:
6690 return false;
6691 }
6692 }
6693
6694 bool
decl_readonly_section(const_tree decl,int reloc)6695 decl_readonly_section (const_tree decl, int reloc)
6696 {
6697 return decl_readonly_section_1 (categorize_decl_for_section (decl, reloc));
6698 }
6699
6700 /* Select a section based on the above categorization. */
6701
6702 section *
default_elf_select_section(tree decl,int reloc,unsigned HOST_WIDE_INT align)6703 default_elf_select_section (tree decl, int reloc,
6704 unsigned HOST_WIDE_INT align)
6705 {
6706 const char *sname;
6707 switch (categorize_decl_for_section (decl, reloc))
6708 {
6709 case SECCAT_TEXT:
6710 /* We're not supposed to be called on FUNCTION_DECLs. */
6711 gcc_unreachable ();
6712 case SECCAT_RODATA:
6713 return readonly_data_section;
6714 case SECCAT_RODATA_MERGE_STR:
6715 return mergeable_string_section (decl, align, 0);
6716 case SECCAT_RODATA_MERGE_STR_INIT:
6717 return mergeable_string_section (DECL_INITIAL (decl), align, 0);
6718 case SECCAT_RODATA_MERGE_CONST:
6719 return mergeable_constant_section (DECL_MODE (decl), align, 0);
6720 case SECCAT_SRODATA:
6721 sname = ".sdata2";
6722 break;
6723 case SECCAT_DATA:
6724 return data_section;
6725 case SECCAT_DATA_REL:
6726 sname = ".data.rel";
6727 break;
6728 case SECCAT_DATA_REL_LOCAL:
6729 sname = ".data.rel.local";
6730 break;
6731 case SECCAT_DATA_REL_RO:
6732 sname = ".data.rel.ro";
6733 break;
6734 case SECCAT_DATA_REL_RO_LOCAL:
6735 sname = ".data.rel.ro.local";
6736 break;
6737 case SECCAT_SDATA:
6738 sname = ".sdata";
6739 break;
6740 case SECCAT_TDATA:
6741 sname = ".tdata";
6742 break;
6743 case SECCAT_BSS:
6744 if (bss_section)
6745 return bss_section;
6746 sname = ".bss";
6747 break;
6748 case SECCAT_SBSS:
6749 sname = ".sbss";
6750 break;
6751 case SECCAT_TBSS:
6752 sname = ".tbss";
6753 break;
6754 default:
6755 gcc_unreachable ();
6756 }
6757
6758 return get_named_section (decl, sname, reloc);
6759 }
6760
6761 /* Construct a unique section name based on the decl name and the
6762 categorization performed above. */
6763
6764 void
default_unique_section(tree decl,int reloc)6765 default_unique_section (tree decl, int reloc)
6766 {
6767 /* We only need to use .gnu.linkonce if we don't have COMDAT groups. */
6768 bool one_only = DECL_ONE_ONLY (decl) && !HAVE_COMDAT_GROUP;
6769 const char *prefix, *name, *linkonce;
6770 char *string;
6771 tree id;
6772
6773 switch (categorize_decl_for_section (decl, reloc))
6774 {
6775 case SECCAT_TEXT:
6776 prefix = one_only ? ".t" : ".text";
6777 break;
6778 case SECCAT_RODATA:
6779 case SECCAT_RODATA_MERGE_STR:
6780 case SECCAT_RODATA_MERGE_STR_INIT:
6781 case SECCAT_RODATA_MERGE_CONST:
6782 prefix = one_only ? ".r" : ".rodata";
6783 break;
6784 case SECCAT_SRODATA:
6785 prefix = one_only ? ".s2" : ".sdata2";
6786 break;
6787 case SECCAT_DATA:
6788 prefix = one_only ? ".d" : ".data";
6789 break;
6790 case SECCAT_DATA_REL:
6791 prefix = one_only ? ".d.rel" : ".data.rel";
6792 break;
6793 case SECCAT_DATA_REL_LOCAL:
6794 prefix = one_only ? ".d.rel.local" : ".data.rel.local";
6795 break;
6796 case SECCAT_DATA_REL_RO:
6797 prefix = one_only ? ".d.rel.ro" : ".data.rel.ro";
6798 break;
6799 case SECCAT_DATA_REL_RO_LOCAL:
6800 prefix = one_only ? ".d.rel.ro.local" : ".data.rel.ro.local";
6801 break;
6802 case SECCAT_SDATA:
6803 prefix = one_only ? ".s" : ".sdata";
6804 break;
6805 case SECCAT_BSS:
6806 prefix = one_only ? ".b" : ".bss";
6807 break;
6808 case SECCAT_SBSS:
6809 prefix = one_only ? ".sb" : ".sbss";
6810 break;
6811 case SECCAT_TDATA:
6812 prefix = one_only ? ".td" : ".tdata";
6813 break;
6814 case SECCAT_TBSS:
6815 prefix = one_only ? ".tb" : ".tbss";
6816 break;
6817 default:
6818 gcc_unreachable ();
6819 }
6820
6821 id = DECL_ASSEMBLER_NAME (decl);
6822 ultimate_transparent_alias_target (&id);
6823 name = IDENTIFIER_POINTER (id);
6824 name = targetm.strip_name_encoding (name);
6825
6826 /* If we're using one_only, then there needs to be a .gnu.linkonce
6827 prefix to the section name. */
6828 linkonce = one_only ? ".gnu.linkonce" : "";
6829
6830 string = ACONCAT ((linkonce, prefix, ".", name, NULL));
6831
6832 set_decl_section_name (decl, string);
6833 }
6834
6835 /* Subroutine of compute_reloc_for_rtx for leaf rtxes. */
6836
6837 static int
compute_reloc_for_rtx_1(const_rtx x)6838 compute_reloc_for_rtx_1 (const_rtx x)
6839 {
6840 switch (GET_CODE (x))
6841 {
6842 case SYMBOL_REF:
6843 return SYMBOL_REF_LOCAL_P (x) ? 1 : 2;
6844 case LABEL_REF:
6845 return 1;
6846 default:
6847 return 0;
6848 }
6849 }
6850
6851 /* Like compute_reloc_for_constant, except for an RTX. The return value
6852 is a mask for which bit 1 indicates a global relocation, and bit 0
6853 indicates a local relocation. */
6854
6855 static int
compute_reloc_for_rtx(const_rtx x)6856 compute_reloc_for_rtx (const_rtx x)
6857 {
6858 switch (GET_CODE (x))
6859 {
6860 case SYMBOL_REF:
6861 case LABEL_REF:
6862 return compute_reloc_for_rtx_1 (x);
6863
6864 case CONST:
6865 {
6866 int reloc = 0;
6867 subrtx_iterator::array_type array;
6868 FOR_EACH_SUBRTX (iter, array, x, ALL)
6869 reloc |= compute_reloc_for_rtx_1 (*iter);
6870 return reloc;
6871 }
6872
6873 default:
6874 return 0;
6875 }
6876 }
6877
6878 section *
default_select_rtx_section(machine_mode mode ATTRIBUTE_UNUSED,rtx x,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)6879 default_select_rtx_section (machine_mode mode ATTRIBUTE_UNUSED,
6880 rtx x,
6881 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
6882 {
6883 if (compute_reloc_for_rtx (x) & targetm.asm_out.reloc_rw_mask ())
6884 return data_section;
6885 else
6886 return readonly_data_section;
6887 }
6888
6889 section *
default_elf_select_rtx_section(machine_mode mode,rtx x,unsigned HOST_WIDE_INT align)6890 default_elf_select_rtx_section (machine_mode mode, rtx x,
6891 unsigned HOST_WIDE_INT align)
6892 {
6893 int reloc = compute_reloc_for_rtx (x);
6894
6895 /* ??? Handle small data here somehow. */
6896
6897 if (reloc & targetm.asm_out.reloc_rw_mask ())
6898 {
6899 if (reloc == 1)
6900 return get_named_section (NULL, ".data.rel.ro.local", 1);
6901 else
6902 return get_named_section (NULL, ".data.rel.ro", 3);
6903 }
6904
6905 return mergeable_constant_section (mode, align, 0);
6906 }
6907
6908 /* Set the generally applicable flags on the SYMBOL_REF for EXP. */
6909
6910 void
default_encode_section_info(tree decl,rtx rtl,int first ATTRIBUTE_UNUSED)6911 default_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
6912 {
6913 rtx symbol;
6914 int flags;
6915
6916 /* Careful not to prod global register variables. */
6917 if (!MEM_P (rtl))
6918 return;
6919 symbol = XEXP (rtl, 0);
6920 if (GET_CODE (symbol) != SYMBOL_REF)
6921 return;
6922
6923 flags = SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_HAS_BLOCK_INFO;
6924 if (TREE_CODE (decl) == FUNCTION_DECL)
6925 flags |= SYMBOL_FLAG_FUNCTION;
6926 if (targetm.binds_local_p (decl))
6927 flags |= SYMBOL_FLAG_LOCAL;
6928 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
6929 flags |= DECL_TLS_MODEL (decl) << SYMBOL_FLAG_TLS_SHIFT;
6930 else if (targetm.in_small_data_p (decl))
6931 flags |= SYMBOL_FLAG_SMALL;
6932 /* ??? Why is DECL_EXTERNAL ever set for non-PUBLIC names? Without
6933 being PUBLIC, the thing *must* be defined in this translation unit.
6934 Prevent this buglet from being propagated into rtl code as well. */
6935 if (DECL_P (decl) && DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
6936 flags |= SYMBOL_FLAG_EXTERNAL;
6937
6938 SYMBOL_REF_FLAGS (symbol) = flags;
6939 }
6940
6941 /* By default, we do nothing for encode_section_info, so we need not
6942 do anything but discard the '*' marker. */
6943
6944 const char *
default_strip_name_encoding(const char * str)6945 default_strip_name_encoding (const char *str)
6946 {
6947 return str + (*str == '*');
6948 }
6949
6950 #ifdef ASM_OUTPUT_DEF
6951 /* The default implementation of TARGET_ASM_OUTPUT_ANCHOR. Define the
6952 anchor relative to ".", the current section position. */
6953
6954 void
default_asm_output_anchor(rtx symbol)6955 default_asm_output_anchor (rtx symbol)
6956 {
6957 char buffer[100];
6958
6959 sprintf (buffer, "*. + " HOST_WIDE_INT_PRINT_DEC,
6960 SYMBOL_REF_BLOCK_OFFSET (symbol));
6961 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
6962 }
6963 #endif
6964
6965 /* The default implementation of TARGET_USE_ANCHORS_FOR_SYMBOL_P. */
6966
6967 bool
default_use_anchors_for_symbol_p(const_rtx symbol)6968 default_use_anchors_for_symbol_p (const_rtx symbol)
6969 {
6970 section *sect;
6971 tree decl;
6972
6973 /* Don't use anchors for mergeable sections. The linker might move
6974 the objects around. */
6975 sect = SYMBOL_REF_BLOCK (symbol)->sect;
6976 if (sect->common.flags & SECTION_MERGE)
6977 return false;
6978
6979 /* Don't use anchors for small data sections. The small data register
6980 acts as an anchor for such sections. */
6981 if (sect->common.flags & SECTION_SMALL)
6982 return false;
6983
6984 decl = SYMBOL_REF_DECL (symbol);
6985 if (decl && DECL_P (decl))
6986 {
6987 /* Don't use section anchors for decls that might be defined or
6988 usurped by other modules. */
6989 if (TREE_PUBLIC (decl) && !decl_binds_to_current_def_p (decl))
6990 return false;
6991
6992 /* Don't use section anchors for decls that will be placed in a
6993 small data section. */
6994 /* ??? Ideally, this check would be redundant with the SECTION_SMALL
6995 one above. The problem is that we only use SECTION_SMALL for
6996 sections that should be marked as small in the section directive. */
6997 if (targetm.in_small_data_p (decl))
6998 return false;
6999
7000 /* Don't use section anchors for decls that won't fit inside a single
7001 anchor range to reduce the amount of instructions required to refer
7002 to the entire declaration. */
7003 if (DECL_SIZE_UNIT (decl) == NULL_TREE
7004 || !tree_fits_uhwi_p (DECL_SIZE_UNIT (decl))
7005 || (tree_to_uhwi (DECL_SIZE_UNIT (decl))
7006 >= (unsigned HOST_WIDE_INT) targetm.max_anchor_offset))
7007 return false;
7008
7009 }
7010 return true;
7011 }
7012
7013 /* Return true when RESOLUTION indicate that symbol will be bound to the
7014 definition provided by current .o file. */
7015
7016 static bool
resolution_to_local_definition_p(enum ld_plugin_symbol_resolution resolution)7017 resolution_to_local_definition_p (enum ld_plugin_symbol_resolution resolution)
7018 {
7019 return (resolution == LDPR_PREVAILING_DEF
7020 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP
7021 || resolution == LDPR_PREVAILING_DEF_IRONLY);
7022 }
7023
7024 /* Return true when RESOLUTION indicate that symbol will be bound locally
7025 within current executable or DSO. */
7026
7027 static bool
resolution_local_p(enum ld_plugin_symbol_resolution resolution)7028 resolution_local_p (enum ld_plugin_symbol_resolution resolution)
7029 {
7030 return (resolution == LDPR_PREVAILING_DEF
7031 || resolution == LDPR_PREVAILING_DEF_IRONLY
7032 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP
7033 || resolution == LDPR_PREEMPTED_REG
7034 || resolution == LDPR_PREEMPTED_IR
7035 || resolution == LDPR_RESOLVED_IR
7036 || resolution == LDPR_RESOLVED_EXEC);
7037 }
7038
7039 /* COMMON_LOCAL_P is true means that the linker can guarantee that an
7040 uninitialized common symbol in the executable will still be defined
7041 (through COPY relocation) in the executable. */
7042
7043 bool
default_binds_local_p_3(const_tree exp,bool shlib,bool weak_dominate,bool extern_protected_data,bool common_local_p)7044 default_binds_local_p_3 (const_tree exp, bool shlib, bool weak_dominate,
7045 bool extern_protected_data, bool common_local_p)
7046 {
7047 /* A non-decl is an entry in the constant pool. */
7048 if (!DECL_P (exp))
7049 return true;
7050
7051 /* Weakrefs may not bind locally, even though the weakref itself is always
7052 static and therefore local. Similarly, the resolver for ifunc functions
7053 might resolve to a non-local function.
7054 FIXME: We can resolve the weakref case more curefuly by looking at the
7055 weakref alias. */
7056 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (exp))
7057 || (TREE_CODE (exp) == FUNCTION_DECL
7058 && cgraph_node::get (exp)
7059 && cgraph_node::get (exp)->ifunc_resolver))
7060 return false;
7061
7062 /* Static variables are always local. */
7063 if (! TREE_PUBLIC (exp))
7064 return true;
7065
7066 /* With resolution file in hand, take look into resolutions.
7067 We can't just return true for resolved_locally symbols,
7068 because dynamic linking might overwrite symbols
7069 in shared libraries. */
7070 bool resolved_locally = false;
7071
7072 bool uninited_common = (DECL_COMMON (exp)
7073 && (DECL_INITIAL (exp) == NULL
7074 || (!in_lto_p
7075 && DECL_INITIAL (exp) == error_mark_node)));
7076
7077 /* A non-external variable is defined locally only if it isn't
7078 uninitialized COMMON variable or common_local_p is true. */
7079 bool defined_locally = (!DECL_EXTERNAL (exp)
7080 && (!uninited_common || common_local_p));
7081 if (symtab_node *node = symtab_node::get (exp))
7082 {
7083 if (node->in_other_partition)
7084 defined_locally = true;
7085 if (node->can_be_discarded_p ())
7086 ;
7087 else if (resolution_to_local_definition_p (node->resolution))
7088 defined_locally = resolved_locally = true;
7089 else if (resolution_local_p (node->resolution))
7090 resolved_locally = true;
7091 }
7092 if (defined_locally && weak_dominate && !shlib)
7093 resolved_locally = true;
7094
7095 /* Undefined weak symbols are never defined locally. */
7096 if (DECL_WEAK (exp) && !defined_locally)
7097 return false;
7098
7099 /* A symbol is local if the user has said explicitly that it will be,
7100 or if we have a definition for the symbol. We cannot infer visibility
7101 for undefined symbols. */
7102 if (DECL_VISIBILITY (exp) != VISIBILITY_DEFAULT
7103 && (TREE_CODE (exp) == FUNCTION_DECL
7104 || !extern_protected_data
7105 || DECL_VISIBILITY (exp) != VISIBILITY_PROTECTED)
7106 && (DECL_VISIBILITY_SPECIFIED (exp) || defined_locally))
7107 return true;
7108
7109 /* If PIC, then assume that any global name can be overridden by
7110 symbols resolved from other modules. */
7111 if (shlib)
7112 return false;
7113
7114 /* Variables defined outside this object might not be local. */
7115 if (DECL_EXTERNAL (exp) && !resolved_locally)
7116 return false;
7117
7118 /* Non-dominant weak symbols are not defined locally. */
7119 if (DECL_WEAK (exp) && !resolved_locally)
7120 return false;
7121
7122 /* Uninitialized COMMON variable may be unified with symbols
7123 resolved from other modules. */
7124 if (uninited_common && !resolved_locally)
7125 return false;
7126
7127 /* Otherwise we're left with initialized (or non-common) global data
7128 which is of necessity defined locally. */
7129 return true;
7130 }
7131
7132 /* Assume ELF-ish defaults, since that's pretty much the most liberal
7133 wrt cross-module name binding. */
7134
7135 bool
default_binds_local_p(const_tree exp)7136 default_binds_local_p (const_tree exp)
7137 {
7138 return default_binds_local_p_3 (exp, flag_shlib != 0, true, false, false);
7139 }
7140
7141 /* Similar to default_binds_local_p, but common symbol may be local and
7142 extern protected data is non-local. */
7143
7144 bool
default_binds_local_p_2(const_tree exp)7145 default_binds_local_p_2 (const_tree exp)
7146 {
7147 return default_binds_local_p_3 (exp, flag_shlib != 0, true, true,
7148 !flag_pic);
7149 }
7150
7151 bool
default_binds_local_p_1(const_tree exp,int shlib)7152 default_binds_local_p_1 (const_tree exp, int shlib)
7153 {
7154 return default_binds_local_p_3 (exp, shlib != 0, false, false, false);
7155 }
7156
7157 /* Return true when references to DECL must bind to current definition in
7158 final executable.
7159
7160 The condition is usually equivalent to whether the function binds to the
7161 current module (shared library or executable), that is to binds_local_p.
7162 We use this fact to avoid need for another target hook and implement
7163 the logic using binds_local_p and just special cases where
7164 decl_binds_to_current_def_p is stronger than binds_local_p. In particular
7165 the weak definitions (that can be overwritten at linktime by other
7166 definition from different object file) and when resolution info is available
7167 we simply use the knowledge passed to us by linker plugin. */
7168 bool
decl_binds_to_current_def_p(const_tree decl)7169 decl_binds_to_current_def_p (const_tree decl)
7170 {
7171 gcc_assert (DECL_P (decl));
7172 if (!targetm.binds_local_p (decl))
7173 return false;
7174 if (!TREE_PUBLIC (decl))
7175 return true;
7176
7177 /* When resolution is available, just use it. */
7178 if (symtab_node *node = symtab_node::get (decl))
7179 {
7180 if (node->resolution != LDPR_UNKNOWN
7181 && !node->can_be_discarded_p ())
7182 return resolution_to_local_definition_p (node->resolution);
7183 }
7184
7185 /* Otherwise we have to assume the worst for DECL_WEAK (hidden weaks
7186 binds locally but still can be overwritten), DECL_COMMON (can be merged
7187 with a non-common definition somewhere in the same module) or
7188 DECL_EXTERNAL.
7189 This rely on fact that binds_local_p behave as decl_replaceable_p
7190 for all other declaration types. */
7191 if (DECL_WEAK (decl))
7192 return false;
7193 if (DECL_COMMON (decl)
7194 && (DECL_INITIAL (decl) == NULL
7195 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node)))
7196 return false;
7197 if (DECL_EXTERNAL (decl))
7198 return false;
7199 return true;
7200 }
7201
7202 /* A replaceable function or variable is one which may be replaced
7203 at link-time with an entirely different definition, provided that the
7204 replacement has the same type. For example, functions declared
7205 with __attribute__((weak)) on most systems are replaceable.
7206
7207 COMDAT functions are not replaceable, since all definitions of the
7208 function must be equivalent. It is important that COMDAT functions
7209 not be treated as replaceable so that use of C++ template
7210 instantiations is not penalized. */
7211
7212 bool
decl_replaceable_p(tree decl)7213 decl_replaceable_p (tree decl)
7214 {
7215 gcc_assert (DECL_P (decl));
7216 if (!TREE_PUBLIC (decl) || DECL_COMDAT (decl))
7217 return false;
7218 if (!flag_semantic_interposition
7219 && !DECL_WEAK (decl))
7220 return false;
7221 return !decl_binds_to_current_def_p (decl);
7222 }
7223
7224 /* Default function to output code that will globalize a label. A
7225 target must define GLOBAL_ASM_OP or provide its own function to
7226 globalize a label. */
7227 #ifdef GLOBAL_ASM_OP
7228 void
default_globalize_label(FILE * stream,const char * name)7229 default_globalize_label (FILE * stream, const char *name)
7230 {
7231 fputs (GLOBAL_ASM_OP, stream);
7232 assemble_name (stream, name);
7233 putc ('\n', stream);
7234 }
7235 #endif /* GLOBAL_ASM_OP */
7236
7237 /* Default function to output code that will globalize a declaration. */
7238 void
default_globalize_decl_name(FILE * stream,tree decl)7239 default_globalize_decl_name (FILE * stream, tree decl)
7240 {
7241 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
7242 targetm.asm_out.globalize_label (stream, name);
7243 }
7244
7245 /* Default function to output a label for unwind information. The
7246 default is to do nothing. A target that needs nonlocal labels for
7247 unwind information must provide its own function to do this. */
7248 void
default_emit_unwind_label(FILE * stream ATTRIBUTE_UNUSED,tree decl ATTRIBUTE_UNUSED,int for_eh ATTRIBUTE_UNUSED,int empty ATTRIBUTE_UNUSED)7249 default_emit_unwind_label (FILE * stream ATTRIBUTE_UNUSED,
7250 tree decl ATTRIBUTE_UNUSED,
7251 int for_eh ATTRIBUTE_UNUSED,
7252 int empty ATTRIBUTE_UNUSED)
7253 {
7254 }
7255
7256 /* Default function to output a label to divide up the exception table.
7257 The default is to do nothing. A target that needs/wants to divide
7258 up the table must provide it's own function to do this. */
7259 void
default_emit_except_table_label(FILE * stream ATTRIBUTE_UNUSED)7260 default_emit_except_table_label (FILE * stream ATTRIBUTE_UNUSED)
7261 {
7262 }
7263
7264 /* This is how to output an internal numbered label where PREFIX is
7265 the class of label and LABELNO is the number within the class. */
7266
7267 void
default_generate_internal_label(char * buf,const char * prefix,unsigned long labelno)7268 default_generate_internal_label (char *buf, const char *prefix,
7269 unsigned long labelno)
7270 {
7271 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno);
7272 }
7273
7274 /* This is how to output an internal numbered label where PREFIX is
7275 the class of label and LABELNO is the number within the class. */
7276
7277 void
default_internal_label(FILE * stream,const char * prefix,unsigned long labelno)7278 default_internal_label (FILE *stream, const char *prefix,
7279 unsigned long labelno)
7280 {
7281 char *const buf = (char *) alloca (40 + strlen (prefix));
7282 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno);
7283 ASM_OUTPUT_INTERNAL_LABEL (stream, buf);
7284 }
7285
7286
7287 /* The default implementation of ASM_DECLARE_CONSTANT_NAME. */
7288
7289 void
default_asm_declare_constant_name(FILE * file,const char * name,const_tree exp ATTRIBUTE_UNUSED,HOST_WIDE_INT size ATTRIBUTE_UNUSED)7290 default_asm_declare_constant_name (FILE *file, const char *name,
7291 const_tree exp ATTRIBUTE_UNUSED,
7292 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
7293 {
7294 assemble_label (file, name);
7295 }
7296
7297 /* This is the default behavior at the beginning of a file. It's
7298 controlled by two other target-hook toggles. */
7299 void
default_file_start(void)7300 default_file_start (void)
7301 {
7302 if (targetm.asm_file_start_app_off
7303 && !(flag_verbose_asm || flag_debug_asm || flag_dump_rtl_in_asm))
7304 fputs (ASM_APP_OFF, asm_out_file);
7305
7306 if (targetm.asm_file_start_file_directive)
7307 {
7308 /* LTO produced units have no meaningful main_input_filename. */
7309 if (in_lto_p)
7310 output_file_directive (asm_out_file, "<artificial>");
7311 else
7312 output_file_directive (asm_out_file, main_input_filename);
7313 }
7314 }
7315
7316 /* This is a generic routine suitable for use as TARGET_ASM_FILE_END
7317 which emits a special section directive used to indicate whether or
7318 not this object file needs an executable stack. This is primarily
7319 a GNU extension to ELF but could be used on other targets. */
7320
7321 int trampolines_created;
7322
7323 void
file_end_indicate_exec_stack(void)7324 file_end_indicate_exec_stack (void)
7325 {
7326 unsigned int flags = SECTION_DEBUG;
7327 if (trampolines_created)
7328 flags |= SECTION_CODE;
7329
7330 switch_to_section (get_section (".note.GNU-stack", flags, NULL));
7331 }
7332
7333 /* Emit a special section directive to indicate that this object file
7334 was compiled with -fsplit-stack. This is used to let the linker
7335 detect calls between split-stack code and non-split-stack code, so
7336 that it can modify the split-stack code to allocate a sufficiently
7337 large stack. We emit another special section if there are any
7338 functions in this file which have the no_split_stack attribute, to
7339 prevent the linker from warning about being unable to convert the
7340 functions if they call non-split-stack code. */
7341
7342 void
file_end_indicate_split_stack(void)7343 file_end_indicate_split_stack (void)
7344 {
7345 if (flag_split_stack)
7346 {
7347 switch_to_section (get_section (".note.GNU-split-stack", SECTION_DEBUG,
7348 NULL));
7349 if (saw_no_split_stack)
7350 switch_to_section (get_section (".note.GNU-no-split-stack",
7351 SECTION_DEBUG, NULL));
7352 }
7353 }
7354
7355 /* Output DIRECTIVE (a C string) followed by a newline. This is used as
7356 a get_unnamed_section callback. */
7357
7358 void
output_section_asm_op(const void * directive)7359 output_section_asm_op (const void *directive)
7360 {
7361 fprintf (asm_out_file, "%s\n", (const char *) directive);
7362 }
7363
7364 /* Emit assembly code to switch to section NEW_SECTION. Do nothing if
7365 the current section is NEW_SECTION. */
7366
7367 void
switch_to_section(section * new_section)7368 switch_to_section (section *new_section)
7369 {
7370 if (in_section == new_section)
7371 return;
7372
7373 if (new_section->common.flags & SECTION_FORGET)
7374 in_section = NULL;
7375 else
7376 in_section = new_section;
7377
7378 switch (SECTION_STYLE (new_section))
7379 {
7380 case SECTION_NAMED:
7381 targetm.asm_out.named_section (new_section->named.name,
7382 new_section->named.common.flags,
7383 new_section->named.decl);
7384 break;
7385
7386 case SECTION_UNNAMED:
7387 new_section->unnamed.callback (new_section->unnamed.data);
7388 break;
7389
7390 case SECTION_NOSWITCH:
7391 gcc_unreachable ();
7392 break;
7393 }
7394
7395 new_section->common.flags |= SECTION_DECLARED;
7396 }
7397
7398 /* If block symbol SYMBOL has not yet been assigned an offset, place
7399 it at the end of its block. */
7400
7401 void
place_block_symbol(rtx symbol)7402 place_block_symbol (rtx symbol)
7403 {
7404 unsigned HOST_WIDE_INT size, mask, offset;
7405 struct constant_descriptor_rtx *desc;
7406 unsigned int alignment;
7407 struct object_block *block;
7408 tree decl;
7409
7410 gcc_assert (SYMBOL_REF_BLOCK (symbol));
7411 if (SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0)
7412 return;
7413
7414 /* Work out the symbol's size and alignment. */
7415 if (CONSTANT_POOL_ADDRESS_P (symbol))
7416 {
7417 desc = SYMBOL_REF_CONSTANT (symbol);
7418 alignment = desc->align;
7419 size = GET_MODE_SIZE (desc->mode);
7420 }
7421 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
7422 {
7423 decl = SYMBOL_REF_DECL (symbol);
7424 gcc_checking_assert (DECL_IN_CONSTANT_POOL (decl));
7425 alignment = DECL_ALIGN (decl);
7426 size = get_constant_size (DECL_INITIAL (decl));
7427 if ((flag_sanitize & SANITIZE_ADDRESS)
7428 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST
7429 && asan_protect_global (DECL_INITIAL (decl)))
7430 {
7431 size += asan_red_zone_size (size);
7432 alignment = MAX (alignment,
7433 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
7434 }
7435 }
7436 else
7437 {
7438 struct symtab_node *snode;
7439 decl = SYMBOL_REF_DECL (symbol);
7440
7441 snode = symtab_node::get (decl);
7442 if (snode->alias)
7443 {
7444 rtx target = DECL_RTL (snode->ultimate_alias_target ()->decl);
7445
7446 gcc_assert (MEM_P (target)
7447 && GET_CODE (XEXP (target, 0)) == SYMBOL_REF
7448 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (target, 0)));
7449 target = XEXP (target, 0);
7450 place_block_symbol (target);
7451 SYMBOL_REF_BLOCK_OFFSET (symbol) = SYMBOL_REF_BLOCK_OFFSET (target);
7452 return;
7453 }
7454 alignment = get_variable_align (decl);
7455 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
7456 if ((flag_sanitize & SANITIZE_ADDRESS)
7457 && asan_protect_global (decl))
7458 {
7459 size += asan_red_zone_size (size);
7460 alignment = MAX (alignment,
7461 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
7462 }
7463 }
7464
7465 /* Calculate the object's offset from the start of the block. */
7466 block = SYMBOL_REF_BLOCK (symbol);
7467 mask = alignment / BITS_PER_UNIT - 1;
7468 offset = (block->size + mask) & ~mask;
7469 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
7470
7471 /* Record the block's new alignment and size. */
7472 block->alignment = MAX (block->alignment, alignment);
7473 block->size = offset + size;
7474
7475 vec_safe_push (block->objects, symbol);
7476 }
7477
7478 /* Return the anchor that should be used to address byte offset OFFSET
7479 from the first object in BLOCK. MODEL is the TLS model used
7480 to access it. */
7481
7482 rtx
get_section_anchor(struct object_block * block,HOST_WIDE_INT offset,enum tls_model model)7483 get_section_anchor (struct object_block *block, HOST_WIDE_INT offset,
7484 enum tls_model model)
7485 {
7486 char label[100];
7487 unsigned int begin, middle, end;
7488 unsigned HOST_WIDE_INT min_offset, max_offset, range, bias, delta;
7489 rtx anchor;
7490
7491 /* Work out the anchor's offset. Use an offset of 0 for the first
7492 anchor so that we don't pessimize the case where we take the address
7493 of a variable at the beginning of the block. This is particularly
7494 useful when a block has only one variable assigned to it.
7495
7496 We try to place anchors RANGE bytes apart, so there can then be
7497 anchors at +/-RANGE, +/-2 * RANGE, and so on, up to the limits of
7498 a ptr_mode offset. With some target settings, the lowest such
7499 anchor might be out of range for the lowest ptr_mode offset;
7500 likewise the highest anchor for the highest offset. Use anchors
7501 at the extreme ends of the ptr_mode range in such cases.
7502
7503 All arithmetic uses unsigned integers in order to avoid
7504 signed overflow. */
7505 max_offset = (unsigned HOST_WIDE_INT) targetm.max_anchor_offset;
7506 min_offset = (unsigned HOST_WIDE_INT) targetm.min_anchor_offset;
7507 range = max_offset - min_offset + 1;
7508 if (range == 0)
7509 offset = 0;
7510 else
7511 {
7512 bias = HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (ptr_mode) - 1);
7513 if (offset < 0)
7514 {
7515 delta = -(unsigned HOST_WIDE_INT) offset + max_offset;
7516 delta -= delta % range;
7517 if (delta > bias)
7518 delta = bias;
7519 offset = (HOST_WIDE_INT) (-delta);
7520 }
7521 else
7522 {
7523 delta = (unsigned HOST_WIDE_INT) offset - min_offset;
7524 delta -= delta % range;
7525 if (delta > bias - 1)
7526 delta = bias - 1;
7527 offset = (HOST_WIDE_INT) delta;
7528 }
7529 }
7530
7531 /* Do a binary search to see if there's already an anchor we can use.
7532 Set BEGIN to the new anchor's index if not. */
7533 begin = 0;
7534 end = vec_safe_length (block->anchors);
7535 while (begin != end)
7536 {
7537 middle = (end + begin) / 2;
7538 anchor = (*block->anchors)[middle];
7539 if (SYMBOL_REF_BLOCK_OFFSET (anchor) > offset)
7540 end = middle;
7541 else if (SYMBOL_REF_BLOCK_OFFSET (anchor) < offset)
7542 begin = middle + 1;
7543 else if (SYMBOL_REF_TLS_MODEL (anchor) > model)
7544 end = middle;
7545 else if (SYMBOL_REF_TLS_MODEL (anchor) < model)
7546 begin = middle + 1;
7547 else
7548 return anchor;
7549 }
7550
7551 /* Create a new anchor with a unique label. */
7552 ASM_GENERATE_INTERNAL_LABEL (label, "LANCHOR", anchor_labelno++);
7553 anchor = create_block_symbol (ggc_strdup (label), block, offset);
7554 SYMBOL_REF_FLAGS (anchor) |= SYMBOL_FLAG_LOCAL | SYMBOL_FLAG_ANCHOR;
7555 SYMBOL_REF_FLAGS (anchor) |= model << SYMBOL_FLAG_TLS_SHIFT;
7556
7557 /* Insert it at index BEGIN. */
7558 vec_safe_insert (block->anchors, begin, anchor);
7559 return anchor;
7560 }
7561
7562 /* Output the objects in BLOCK. */
7563
7564 static void
output_object_block(struct object_block * block)7565 output_object_block (struct object_block *block)
7566 {
7567 struct constant_descriptor_rtx *desc;
7568 unsigned int i;
7569 HOST_WIDE_INT offset;
7570 tree decl;
7571 rtx symbol;
7572
7573 if (!block->objects)
7574 return;
7575
7576 /* Switch to the section and make sure that the first byte is
7577 suitably aligned. */
7578 /* Special case VTV comdat sections similar to assemble_variable. */
7579 if (SECTION_STYLE (block->sect) == SECTION_NAMED
7580 && block->sect->named.name
7581 && (strcmp (block->sect->named.name, ".vtable_map_vars") == 0))
7582 handle_vtv_comdat_section (block->sect, block->sect->named.decl);
7583 else
7584 switch_to_section (block->sect);
7585
7586 assemble_align (block->alignment);
7587
7588 /* Define the values of all anchors relative to the current section
7589 position. */
7590 FOR_EACH_VEC_SAFE_ELT (block->anchors, i, symbol)
7591 targetm.asm_out.output_anchor (symbol);
7592
7593 /* Output the objects themselves. */
7594 offset = 0;
7595 FOR_EACH_VEC_ELT (*block->objects, i, symbol)
7596 {
7597 /* Move to the object's offset, padding with zeros if necessary. */
7598 assemble_zeros (SYMBOL_REF_BLOCK_OFFSET (symbol) - offset);
7599 offset = SYMBOL_REF_BLOCK_OFFSET (symbol);
7600 if (CONSTANT_POOL_ADDRESS_P (symbol))
7601 {
7602 desc = SYMBOL_REF_CONSTANT (symbol);
7603 /* Pass 1 for align as we have already laid out everything in the block.
7604 So aligning shouldn't be necessary. */
7605 output_constant_pool_1 (desc, 1);
7606 offset += GET_MODE_SIZE (desc->mode);
7607 }
7608 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
7609 {
7610 HOST_WIDE_INT size;
7611 decl = SYMBOL_REF_DECL (symbol);
7612 assemble_constant_contents
7613 (DECL_INITIAL (decl), XSTR (symbol, 0), DECL_ALIGN (decl));
7614
7615 size = get_constant_size (DECL_INITIAL (decl));
7616 offset += size;
7617 if ((flag_sanitize & SANITIZE_ADDRESS)
7618 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST
7619 && asan_protect_global (DECL_INITIAL (decl)))
7620 {
7621 size = asan_red_zone_size (size);
7622 assemble_zeros (size);
7623 offset += size;
7624 }
7625 }
7626 else
7627 {
7628 HOST_WIDE_INT size;
7629 decl = SYMBOL_REF_DECL (symbol);
7630 assemble_variable_contents (decl, XSTR (symbol, 0), false);
7631 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
7632 offset += size;
7633 if ((flag_sanitize & SANITIZE_ADDRESS)
7634 && asan_protect_global (decl))
7635 {
7636 size = asan_red_zone_size (size);
7637 assemble_zeros (size);
7638 offset += size;
7639 }
7640 }
7641 }
7642 }
7643
7644 /* A callback for qsort to compare object_blocks. */
7645
7646 static int
output_object_block_compare(const void * x,const void * y)7647 output_object_block_compare (const void *x, const void *y)
7648 {
7649 object_block *p1 = *(object_block * const*)x;
7650 object_block *p2 = *(object_block * const*)y;
7651
7652 if (p1->sect->common.flags & SECTION_NAMED
7653 && !(p2->sect->common.flags & SECTION_NAMED))
7654 return 1;
7655
7656 if (!(p1->sect->common.flags & SECTION_NAMED)
7657 && p2->sect->common.flags & SECTION_NAMED)
7658 return -1;
7659
7660 if (p1->sect->common.flags & SECTION_NAMED
7661 && p2->sect->common.flags & SECTION_NAMED)
7662 return strcmp (p1->sect->named.name, p2->sect->named.name);
7663
7664 unsigned f1 = p1->sect->common.flags;
7665 unsigned f2 = p2->sect->common.flags;
7666 if (f1 == f2)
7667 return 0;
7668 return f1 < f2 ? -1 : 1;
7669 }
7670
7671 /* Output the definitions of all object_blocks. */
7672
7673 void
output_object_blocks(void)7674 output_object_blocks (void)
7675 {
7676 vec<object_block *, va_heap> v;
7677 v.create (object_block_htab->elements ());
7678 object_block *obj;
7679 hash_table<object_block_hasher>::iterator hi;
7680
7681 FOR_EACH_HASH_TABLE_ELEMENT (*object_block_htab, obj, object_block *, hi)
7682 v.quick_push (obj);
7683
7684 /* Sort them in order to output them in a deterministic manner,
7685 otherwise we may get .rodata sections in different orders with
7686 and without -g. */
7687 v.qsort (output_object_block_compare);
7688 unsigned i;
7689 FOR_EACH_VEC_ELT (v, i, obj)
7690 output_object_block (obj);
7691
7692 v.release ();
7693 }
7694
7695 /* This function provides a possible implementation of the
7696 TARGET_ASM_RECORD_GCC_SWITCHES target hook for ELF targets. When triggered
7697 by -frecord-gcc-switches it creates a new mergeable, string section in the
7698 assembler output file called TARGET_ASM_RECORD_GCC_SWITCHES_SECTION which
7699 contains the switches in ASCII format.
7700
7701 FIXME: This code does not correctly handle double quote characters
7702 that appear inside strings, (it strips them rather than preserving them).
7703 FIXME: ASM_OUTPUT_ASCII, as defined in config/elfos.h will not emit NUL
7704 characters - instead it treats them as sub-string separators. Since
7705 we want to emit NUL strings terminators into the object file we have to use
7706 ASM_OUTPUT_SKIP. */
7707
7708 int
elf_record_gcc_switches(print_switch_type type,const char * name)7709 elf_record_gcc_switches (print_switch_type type, const char * name)
7710 {
7711 switch (type)
7712 {
7713 case SWITCH_TYPE_PASSED:
7714 ASM_OUTPUT_ASCII (asm_out_file, name, strlen (name));
7715 ASM_OUTPUT_SKIP (asm_out_file, HOST_WIDE_INT_1U);
7716 break;
7717
7718 case SWITCH_TYPE_DESCRIPTIVE:
7719 if (name == NULL)
7720 {
7721 /* Distinguish between invocations where name is NULL. */
7722 static bool started = false;
7723
7724 if (!started)
7725 {
7726 section * sec;
7727
7728 sec = get_section (targetm.asm_out.record_gcc_switches_section,
7729 SECTION_DEBUG
7730 | SECTION_MERGE
7731 | SECTION_STRINGS
7732 | (SECTION_ENTSIZE & 1),
7733 NULL);
7734 switch_to_section (sec);
7735 started = true;
7736 }
7737 }
7738
7739 default:
7740 break;
7741 }
7742
7743 /* The return value is currently ignored by the caller, but must be 0.
7744 For -fverbose-asm the return value would be the number of characters
7745 emitted into the assembler file. */
7746 return 0;
7747 }
7748
7749 /* Emit text to declare externally defined symbols. It is needed to
7750 properly support non-default visibility. */
7751 void
default_elf_asm_output_external(FILE * file ATTRIBUTE_UNUSED,tree decl,const char * name ATTRIBUTE_UNUSED)7752 default_elf_asm_output_external (FILE *file ATTRIBUTE_UNUSED,
7753 tree decl,
7754 const char *name ATTRIBUTE_UNUSED)
7755 {
7756 /* We output the name if and only if TREE_SYMBOL_REFERENCED is
7757 set in order to avoid putting out names that are never really
7758 used. Always output visibility specified in the source. */
7759 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
7760 && (DECL_VISIBILITY_SPECIFIED (decl)
7761 || targetm.binds_local_p (decl)))
7762 maybe_assemble_visibility (decl);
7763 }
7764
7765 /* The default hook for TARGET_ASM_OUTPUT_SOURCE_FILENAME. */
7766
7767 void
default_asm_output_source_filename(FILE * file,const char * name)7768 default_asm_output_source_filename (FILE *file, const char *name)
7769 {
7770 #ifdef ASM_OUTPUT_SOURCE_FILENAME
7771 ASM_OUTPUT_SOURCE_FILENAME (file, name);
7772 #else
7773 fprintf (file, "\t.file\t");
7774 output_quoted_string (file, name);
7775 putc ('\n', file);
7776 #endif
7777 }
7778
7779 /* Output a file name in the form wanted by System V. */
7780
7781 void
output_file_directive(FILE * asm_file,const char * input_name)7782 output_file_directive (FILE *asm_file, const char *input_name)
7783 {
7784 int len;
7785 const char *na;
7786
7787 if (input_name == NULL)
7788 input_name = "<stdin>";
7789 else
7790 input_name = remap_debug_filename (input_name);
7791
7792 len = strlen (input_name);
7793 na = input_name + len;
7794
7795 /* NA gets INPUT_NAME sans directory names. */
7796 while (na > input_name)
7797 {
7798 if (IS_DIR_SEPARATOR (na[-1]))
7799 break;
7800 na--;
7801 }
7802
7803 targetm.asm_out.output_source_filename (asm_file, na);
7804 }
7805
7806 /* Create a DEBUG_EXPR_DECL / DEBUG_EXPR pair from RTL expression
7807 EXP. */
7808 rtx
make_debug_expr_from_rtl(const_rtx exp)7809 make_debug_expr_from_rtl (const_rtx exp)
7810 {
7811 tree ddecl = make_node (DEBUG_EXPR_DECL), type;
7812 machine_mode mode = GET_MODE (exp);
7813 rtx dval;
7814
7815 DECL_ARTIFICIAL (ddecl) = 1;
7816 if (REG_P (exp) && REG_EXPR (exp))
7817 type = TREE_TYPE (REG_EXPR (exp));
7818 else if (MEM_P (exp) && MEM_EXPR (exp))
7819 type = TREE_TYPE (MEM_EXPR (exp));
7820 else
7821 type = NULL_TREE;
7822 if (type && TYPE_MODE (type) == mode)
7823 TREE_TYPE (ddecl) = type;
7824 else
7825 TREE_TYPE (ddecl) = lang_hooks.types.type_for_mode (mode, 1);
7826 SET_DECL_MODE (ddecl, mode);
7827 dval = gen_rtx_DEBUG_EXPR (mode);
7828 DEBUG_EXPR_TREE_DECL (dval) = ddecl;
7829 SET_DECL_RTL (ddecl, dval);
7830 return dval;
7831 }
7832
7833 #ifdef ELF_ASCII_ESCAPES
7834 /* Default ASM_OUTPUT_LIMITED_STRING for ELF targets. */
7835
7836 void
default_elf_asm_output_limited_string(FILE * f,const char * s)7837 default_elf_asm_output_limited_string (FILE *f, const char *s)
7838 {
7839 int escape;
7840 unsigned char c;
7841
7842 fputs (STRING_ASM_OP, f);
7843 putc ('"', f);
7844 while (*s != '\0')
7845 {
7846 c = *s;
7847 escape = ELF_ASCII_ESCAPES[c];
7848 switch (escape)
7849 {
7850 case 0:
7851 putc (c, f);
7852 break;
7853 case 1:
7854 putc ('\\', f);
7855 putc ('0'+((c>>6)&7), f);
7856 putc ('0'+((c>>3)&7), f);
7857 putc ('0'+(c&7), f);
7858 break;
7859 default:
7860 putc ('\\', f);
7861 putc (escape, f);
7862 break;
7863 }
7864 s++;
7865 }
7866 putc ('\"', f);
7867 putc ('\n', f);
7868 }
7869
7870 /* Default ASM_OUTPUT_ASCII for ELF targets. */
7871
7872 void
default_elf_asm_output_ascii(FILE * f,const char * s,unsigned int len)7873 default_elf_asm_output_ascii (FILE *f, const char *s, unsigned int len)
7874 {
7875 const char *limit = s + len;
7876 const char *last_null = NULL;
7877 unsigned bytes_in_chunk = 0;
7878 unsigned char c;
7879 int escape;
7880
7881 for (; s < limit; s++)
7882 {
7883 const char *p;
7884
7885 if (bytes_in_chunk >= 60)
7886 {
7887 putc ('\"', f);
7888 putc ('\n', f);
7889 bytes_in_chunk = 0;
7890 }
7891
7892 if (s > last_null)
7893 {
7894 for (p = s; p < limit && *p != '\0'; p++)
7895 continue;
7896 last_null = p;
7897 }
7898 else
7899 p = last_null;
7900
7901 if (p < limit && (p - s) <= (long) ELF_STRING_LIMIT)
7902 {
7903 if (bytes_in_chunk > 0)
7904 {
7905 putc ('\"', f);
7906 putc ('\n', f);
7907 bytes_in_chunk = 0;
7908 }
7909
7910 default_elf_asm_output_limited_string (f, s);
7911 s = p;
7912 }
7913 else
7914 {
7915 if (bytes_in_chunk == 0)
7916 fputs (ASCII_DATA_ASM_OP "\"", f);
7917
7918 c = *s;
7919 escape = ELF_ASCII_ESCAPES[c];
7920 switch (escape)
7921 {
7922 case 0:
7923 putc (c, f);
7924 bytes_in_chunk++;
7925 break;
7926 case 1:
7927 putc ('\\', f);
7928 putc ('0'+((c>>6)&7), f);
7929 putc ('0'+((c>>3)&7), f);
7930 putc ('0'+(c&7), f);
7931 bytes_in_chunk += 4;
7932 break;
7933 default:
7934 putc ('\\', f);
7935 putc (escape, f);
7936 bytes_in_chunk += 2;
7937 break;
7938 }
7939
7940 }
7941 }
7942
7943 if (bytes_in_chunk > 0)
7944 {
7945 putc ('\"', f);
7946 putc ('\n', f);
7947 }
7948 }
7949 #endif
7950
7951 static GTY(()) section *elf_init_array_section;
7952 static GTY(()) section *elf_fini_array_section;
7953
7954 static section *
get_elf_initfini_array_priority_section(int priority,bool constructor_p)7955 get_elf_initfini_array_priority_section (int priority,
7956 bool constructor_p)
7957 {
7958 section *sec;
7959 if (priority != DEFAULT_INIT_PRIORITY)
7960 {
7961 char buf[18];
7962 sprintf (buf, "%s.%.5u",
7963 constructor_p ? ".init_array" : ".fini_array",
7964 priority);
7965 sec = get_section (buf, SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
7966 }
7967 else
7968 {
7969 if (constructor_p)
7970 {
7971 if (elf_init_array_section == NULL)
7972 elf_init_array_section
7973 = get_section (".init_array",
7974 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
7975 sec = elf_init_array_section;
7976 }
7977 else
7978 {
7979 if (elf_fini_array_section == NULL)
7980 elf_fini_array_section
7981 = get_section (".fini_array",
7982 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
7983 sec = elf_fini_array_section;
7984 }
7985 }
7986 return sec;
7987 }
7988
7989 /* Use .init_array section for constructors. */
7990
7991 void
default_elf_init_array_asm_out_constructor(rtx symbol,int priority)7992 default_elf_init_array_asm_out_constructor (rtx symbol, int priority)
7993 {
7994 section *sec = get_elf_initfini_array_priority_section (priority,
7995 true);
7996 assemble_addr_to_section (symbol, sec);
7997 }
7998
7999 /* Use .fini_array section for destructors. */
8000
8001 void
default_elf_fini_array_asm_out_destructor(rtx symbol,int priority)8002 default_elf_fini_array_asm_out_destructor (rtx symbol, int priority)
8003 {
8004 section *sec = get_elf_initfini_array_priority_section (priority,
8005 false);
8006 assemble_addr_to_section (symbol, sec);
8007 }
8008
8009 /* Default TARGET_ASM_OUTPUT_IDENT hook.
8010
8011 This is a bit of a cheat. The real default is a no-op, but this
8012 hook is the default for all targets with a .ident directive. */
8013
8014 void
default_asm_output_ident_directive(const char * ident_str)8015 default_asm_output_ident_directive (const char *ident_str)
8016 {
8017 const char *ident_asm_op = "\t.ident\t";
8018
8019 /* If we are still in the front end, do not write out the string
8020 to asm_out_file. Instead, add a fake top-level asm statement.
8021 This allows the front ends to use this hook without actually
8022 writing to asm_out_file, to handle #ident or Pragma Ident. */
8023 if (symtab->state == PARSING)
8024 {
8025 char *buf = ACONCAT ((ident_asm_op, "\"", ident_str, "\"\n", NULL));
8026 symtab->finalize_toplevel_asm (build_string (strlen (buf), buf));
8027 }
8028 else
8029 fprintf (asm_out_file, "%s\"%s\"\n", ident_asm_op, ident_str);
8030 }
8031
8032
8033 /* This function ensures that vtable_map variables are not only
8034 in the comdat section, but that each variable has its own unique
8035 comdat name. Without this the variables end up in the same section
8036 with a single comdat name.
8037
8038 FIXME: resolve_unique_section needs to deal better with
8039 decls with both DECL_SECTION_NAME and DECL_ONE_ONLY. Once
8040 that is fixed, this if-else statement can be replaced with
8041 a single call to "switch_to_section (sect)". */
8042
8043 static void
handle_vtv_comdat_section(section * sect,const_tree decl ATTRIBUTE_UNUSED)8044 handle_vtv_comdat_section (section *sect, const_tree decl ATTRIBUTE_UNUSED)
8045 {
8046 #if defined (OBJECT_FORMAT_ELF)
8047 targetm.asm_out.named_section (sect->named.name,
8048 sect->named.common.flags
8049 | SECTION_LINKONCE,
8050 DECL_NAME (decl));
8051 in_section = sect;
8052 #else
8053 /* Neither OBJECT_FORMAT_PE, nor OBJECT_FORMAT_COFF is set here.
8054 Therefore the following check is used.
8055 In case a the target is PE or COFF a comdat group section
8056 is created, e.g. .vtable_map_vars$foo. The linker places
8057 everything in .vtable_map_vars at the end.
8058
8059 A fix could be made in
8060 gcc/config/i386/winnt.c: i386_pe_unique_section. */
8061 if (TARGET_PECOFF)
8062 {
8063 char *name;
8064
8065 if (TREE_CODE (DECL_NAME (decl)) == IDENTIFIER_NODE)
8066 name = ACONCAT ((sect->named.name, "$",
8067 IDENTIFIER_POINTER (DECL_NAME (decl)), NULL));
8068 else
8069 name = ACONCAT ((sect->named.name, "$",
8070 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (DECL_NAME (decl))),
8071 NULL));
8072
8073 targetm.asm_out.named_section (name,
8074 sect->named.common.flags
8075 | SECTION_LINKONCE,
8076 DECL_NAME (decl));
8077 in_section = sect;
8078 }
8079 else
8080 switch_to_section (sect);
8081 #endif
8082 }
8083
8084 #include "gt-varasm.h"
8085