1 /* Output variables, constants and external declarations, for GNU compiler.
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* This file handles generation of all the assembler code
22 *except* the instructions of a function.
23 This includes declarations of variables and their initial values.
24
25 We also output the assembler code for constants stored in memory
26 and are responsible for combining constants with the same value. */
27
28 #include "config.h"
29 #include "system.h"
30 #include "coretypes.h"
31 #include "backend.h"
32 #include "target.h"
33 #include "rtl.h"
34 #include "tree.h"
35 #include "predict.h"
36 #include "memmodel.h"
37 #include "tm_p.h"
38 #include "stringpool.h"
39 #include "regs.h"
40 #include "emit-rtl.h"
41 #include "cgraph.h"
42 #include "diagnostic-core.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "varasm.h"
46 #include "flags.h"
47 #include "stmt.h"
48 #include "expr.h"
49 #include "expmed.h"
50 #include "optabs.h"
51 #include "output.h"
52 #include "langhooks.h"
53 #include "debug.h"
54 #include "common/common-target.h"
55 #include "stringpool.h"
56 #include "attribs.h"
57 #include "asan.h"
58 #include "rtl-iter.h"
59 #include "file-prefix-map.h" /* remap_debug_filename() */
60
61 #ifdef XCOFF_DEBUGGING_INFO
62 #include "xcoffout.h" /* Needed for external data declarations. */
63 #endif
64
65 /* The (assembler) name of the first globally-visible object output. */
66 extern GTY(()) const char *first_global_object_name;
67 extern GTY(()) const char *weak_global_object_name;
68
69 const char *first_global_object_name;
70 const char *weak_global_object_name;
71
72 class addr_const;
73 class constant_descriptor_rtx;
74 struct rtx_constant_pool;
75
76 #define n_deferred_constants (crtl->varasm.deferred_constants)
77
78 /* Number for making the label on the next
79 constant that is stored in memory. */
80
81 static GTY(()) int const_labelno;
82
83 /* Carry information from ASM_DECLARE_OBJECT_NAME
84 to ASM_FINISH_DECLARE_OBJECT. */
85
86 int size_directive_output;
87
88 /* The last decl for which assemble_variable was called,
89 if it did ASM_DECLARE_OBJECT_NAME.
90 If the last call to assemble_variable didn't do that,
91 this holds 0. */
92
93 tree last_assemble_variable_decl;
94
95 /* The following global variable indicates if the first basic block
96 in a function belongs to the cold partition or not. */
97
98 bool first_function_block_is_cold;
99
100 /* Whether we saw any functions with no_split_stack. */
101
102 static bool saw_no_split_stack;
103
104 static const char *strip_reg_name (const char *);
105 static int contains_pointers_p (tree);
106 #ifdef ASM_OUTPUT_EXTERNAL
107 static bool incorporeal_function_p (tree);
108 #endif
109 static void decode_addr_const (tree, class addr_const *);
110 static hashval_t const_hash_1 (const tree);
111 static int compare_constant (const tree, const tree);
112 static void output_constant_def_contents (rtx);
113 static void output_addressed_constants (tree);
114 static unsigned HOST_WIDE_INT output_constant (tree, unsigned HOST_WIDE_INT,
115 unsigned int, bool, bool);
116 static void globalize_decl (tree);
117 static bool decl_readonly_section_1 (enum section_category);
118 #ifdef BSS_SECTION_ASM_OP
119 #ifdef ASM_OUTPUT_ALIGNED_BSS
120 static void asm_output_aligned_bss (FILE *, tree, const char *,
121 unsigned HOST_WIDE_INT, int)
122 ATTRIBUTE_UNUSED;
123 #endif
124 #endif /* BSS_SECTION_ASM_OP */
125 static void mark_weak (tree);
126 static void output_constant_pool (const char *, tree);
127 static void handle_vtv_comdat_section (section *, const_tree);
128
129 /* Well-known sections, each one associated with some sort of *_ASM_OP. */
130 section *text_section;
131 section *data_section;
132 section *readonly_data_section;
133 section *sdata_section;
134 section *ctors_section;
135 section *dtors_section;
136 section *bss_section;
137 section *sbss_section;
138
139 /* Various forms of common section. All are guaranteed to be nonnull. */
140 section *tls_comm_section;
141 section *comm_section;
142 section *lcomm_section;
143
144 /* A SECTION_NOSWITCH section used for declaring global BSS variables.
145 May be null. */
146 section *bss_noswitch_section;
147
148 /* The section that holds the main exception table, when known. The section
149 is set either by the target's init_sections hook or by the first call to
150 switch_to_exception_section. */
151 section *exception_section;
152
153 /* The section that holds the DWARF2 frame unwind information, when known.
154 The section is set either by the target's init_sections hook or by the
155 first call to switch_to_eh_frame_section. */
156 section *eh_frame_section;
157
158 /* asm_out_file's current section. This is NULL if no section has yet
159 been selected or if we lose track of what the current section is. */
160 section *in_section;
161
162 /* True if code for the current function is currently being directed
163 at the cold section. */
164 bool in_cold_section_p;
165
166 /* The following global holds the "function name" for the code in the
167 cold section of a function, if hot/cold function splitting is enabled
168 and there was actually code that went into the cold section. A
169 pseudo function name is needed for the cold section of code for some
170 debugging tools that perform symbolization. */
171 tree cold_function_name = NULL_TREE;
172
173 /* A linked list of all the unnamed sections. */
174 static GTY(()) section *unnamed_sections;
175
176 /* Return a nonzero value if DECL has a section attribute. */
177 #define IN_NAMED_SECTION(DECL) \
178 (VAR_OR_FUNCTION_DECL_P (DECL) && DECL_SECTION_NAME (DECL) != NULL)
179
180 struct section_hasher : ggc_ptr_hash<section>
181 {
182 typedef const char *compare_type;
183
184 static hashval_t hash (section *);
185 static bool equal (section *, const char *);
186 };
187
188 /* Hash table of named sections. */
189 static GTY(()) hash_table<section_hasher> *section_htab;
190
191 struct object_block_hasher : ggc_ptr_hash<object_block>
192 {
193 typedef const section *compare_type;
194
195 static hashval_t hash (object_block *);
196 static bool equal (object_block *, const section *);
197 };
198
199 /* A table of object_blocks, indexed by section. */
200 static GTY(()) hash_table<object_block_hasher> *object_block_htab;
201
202 /* The next number to use for internal anchor labels. */
203 static GTY(()) int anchor_labelno;
204
205 /* A pool of constants that can be shared between functions. */
206 static GTY(()) struct rtx_constant_pool *shared_constant_pool;
207
208 /* Helper routines for maintaining section_htab. */
209
210 bool
equal(section * old,const char * new_name)211 section_hasher::equal (section *old, const char *new_name)
212 {
213 return strcmp (old->named.name, new_name) == 0;
214 }
215
216 hashval_t
hash(section * old)217 section_hasher::hash (section *old)
218 {
219 return htab_hash_string (old->named.name);
220 }
221
222 /* Return a hash value for section SECT. */
223
224 static hashval_t
hash_section(section * sect)225 hash_section (section *sect)
226 {
227 if (sect->common.flags & SECTION_NAMED)
228 return htab_hash_string (sect->named.name);
229 return sect->common.flags & ~SECTION_DECLARED;
230 }
231
232 /* Helper routines for maintaining object_block_htab. */
233
234 inline bool
equal(object_block * old,const section * new_section)235 object_block_hasher::equal (object_block *old, const section *new_section)
236 {
237 return old->sect == new_section;
238 }
239
240 hashval_t
hash(object_block * old)241 object_block_hasher::hash (object_block *old)
242 {
243 return hash_section (old->sect);
244 }
245
246 /* Return a new unnamed section with the given fields. */
247
248 section *
get_unnamed_section(unsigned int flags,void (* callback)(const void *),const void * data)249 get_unnamed_section (unsigned int flags, void (*callback) (const void *),
250 const void *data)
251 {
252 section *sect;
253
254 sect = ggc_alloc<section> ();
255 sect->unnamed.common.flags = flags | SECTION_UNNAMED;
256 sect->unnamed.callback = callback;
257 sect->unnamed.data = data;
258 sect->unnamed.next = unnamed_sections;
259
260 unnamed_sections = sect;
261 return sect;
262 }
263
264 /* Return a SECTION_NOSWITCH section with the given fields. */
265
266 static section *
get_noswitch_section(unsigned int flags,noswitch_section_callback callback)267 get_noswitch_section (unsigned int flags, noswitch_section_callback callback)
268 {
269 section *sect;
270
271 sect = ggc_alloc<section> ();
272 sect->noswitch.common.flags = flags | SECTION_NOSWITCH;
273 sect->noswitch.callback = callback;
274
275 return sect;
276 }
277
278 /* Return the named section structure associated with NAME. Create
279 a new section with the given fields if no such structure exists. */
280
281 section *
get_section(const char * name,unsigned int flags,tree decl)282 get_section (const char *name, unsigned int flags, tree decl)
283 {
284 section *sect, **slot;
285
286 slot = section_htab->find_slot_with_hash (name, htab_hash_string (name),
287 INSERT);
288 flags |= SECTION_NAMED;
289 if (*slot == NULL)
290 {
291 sect = ggc_alloc<section> ();
292 sect->named.common.flags = flags;
293 sect->named.name = ggc_strdup (name);
294 sect->named.decl = decl;
295 *slot = sect;
296 }
297 else
298 {
299 sect = *slot;
300 /* It is fine if one of the sections has SECTION_NOTYPE as long as
301 the other has none of the contrary flags (see the logic at the end
302 of default_section_type_flags, below). */
303 if (((sect->common.flags ^ flags) & SECTION_NOTYPE)
304 && !((sect->common.flags | flags)
305 & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE
306 | (HAVE_COMDAT_GROUP ? SECTION_LINKONCE : 0))))
307 {
308 sect->common.flags |= SECTION_NOTYPE;
309 flags |= SECTION_NOTYPE;
310 }
311 if ((sect->common.flags & ~SECTION_DECLARED) != flags
312 && ((sect->common.flags | flags) & SECTION_OVERRIDE) == 0)
313 {
314 /* It is fine if one of the section flags is
315 SECTION_WRITE | SECTION_RELRO and the other has none of these
316 flags (i.e. read-only) in named sections and either the
317 section hasn't been declared yet or has been declared as writable.
318 In that case just make sure the resulting flags are
319 SECTION_WRITE | SECTION_RELRO, ie. writable only because of
320 relocations. */
321 if (((sect->common.flags ^ flags) & (SECTION_WRITE | SECTION_RELRO))
322 == (SECTION_WRITE | SECTION_RELRO)
323 && (sect->common.flags
324 & ~(SECTION_DECLARED | SECTION_WRITE | SECTION_RELRO))
325 == (flags & ~(SECTION_WRITE | SECTION_RELRO))
326 && ((sect->common.flags & SECTION_DECLARED) == 0
327 || (sect->common.flags & SECTION_WRITE)))
328 {
329 sect->common.flags |= (SECTION_WRITE | SECTION_RELRO);
330 return sect;
331 }
332 /* Sanity check user variables for flag changes. */
333 if (sect->named.decl != NULL
334 && DECL_P (sect->named.decl)
335 && decl != sect->named.decl)
336 {
337 if (decl != NULL && DECL_P (decl))
338 error ("%+qD causes a section type conflict with %qD",
339 decl, sect->named.decl);
340 else
341 error ("section type conflict with %qD", sect->named.decl);
342 inform (DECL_SOURCE_LOCATION (sect->named.decl),
343 "%qD was declared here", sect->named.decl);
344 }
345 else if (decl != NULL && DECL_P (decl))
346 error ("%+qD causes a section type conflict", decl);
347 else
348 error ("section type conflict");
349 /* Make sure we don't error about one section multiple times. */
350 sect->common.flags |= SECTION_OVERRIDE;
351 }
352 }
353 return sect;
354 }
355
356 /* Return true if the current compilation mode benefits from having
357 objects grouped into blocks. */
358
359 static bool
use_object_blocks_p(void)360 use_object_blocks_p (void)
361 {
362 return flag_section_anchors;
363 }
364
365 /* Return the object_block structure for section SECT. Create a new
366 structure if we haven't created one already. Return null if SECT
367 itself is null. Return also null for mergeable sections since
368 section anchors can't be used in mergeable sections anyway,
369 because the linker might move objects around, and using the
370 object blocks infrastructure in that case is both a waste and a
371 maintenance burden. */
372
373 static struct object_block *
get_block_for_section(section * sect)374 get_block_for_section (section *sect)
375 {
376 struct object_block *block;
377
378 if (sect == NULL)
379 return NULL;
380
381 if (sect->common.flags & SECTION_MERGE)
382 return NULL;
383
384 object_block **slot
385 = object_block_htab->find_slot_with_hash (sect, hash_section (sect),
386 INSERT);
387 block = *slot;
388 if (block == NULL)
389 {
390 block = ggc_cleared_alloc<object_block> ();
391 block->sect = sect;
392 *slot = block;
393 }
394 return block;
395 }
396
397 /* Create a symbol with label LABEL and place it at byte offset
398 OFFSET in BLOCK. OFFSET can be negative if the symbol's offset
399 is not yet known. LABEL must be a garbage-collected string. */
400
401 static rtx
create_block_symbol(const char * label,struct object_block * block,HOST_WIDE_INT offset)402 create_block_symbol (const char *label, struct object_block *block,
403 HOST_WIDE_INT offset)
404 {
405 rtx symbol;
406 unsigned int size;
407
408 /* Create the extended SYMBOL_REF. */
409 size = RTX_HDR_SIZE + sizeof (struct block_symbol);
410 symbol = (rtx) ggc_internal_alloc (size);
411
412 /* Initialize the normal SYMBOL_REF fields. */
413 memset (symbol, 0, size);
414 PUT_CODE (symbol, SYMBOL_REF);
415 PUT_MODE (symbol, Pmode);
416 XSTR (symbol, 0) = label;
417 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_HAS_BLOCK_INFO;
418
419 /* Initialize the block_symbol stuff. */
420 SYMBOL_REF_BLOCK (symbol) = block;
421 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
422
423 return symbol;
424 }
425
426 /* Return a section with a particular name and with whatever SECTION_*
427 flags section_type_flags deems appropriate. The name of the section
428 is taken from NAME if nonnull, otherwise it is taken from DECL's
429 DECL_SECTION_NAME. DECL is the decl associated with the section
430 (see the section comment for details) and RELOC is as for
431 section_type_flags. */
432
433 section *
get_named_section(tree decl,const char * name,int reloc)434 get_named_section (tree decl, const char *name, int reloc)
435 {
436 unsigned int flags;
437
438 if (name == NULL)
439 {
440 gcc_assert (decl && DECL_P (decl) && DECL_SECTION_NAME (decl));
441 name = DECL_SECTION_NAME (decl);
442 }
443
444 flags = targetm.section_type_flags (decl, name, reloc);
445 return get_section (name, flags, decl);
446 }
447
448 /* Worker for resolve_unique_section. */
449
450 static bool
set_implicit_section(struct symtab_node * n,void * data ATTRIBUTE_UNUSED)451 set_implicit_section (struct symtab_node *n, void *data ATTRIBUTE_UNUSED)
452 {
453 n->implicit_section = true;
454 return false;
455 }
456
457 /* If required, set DECL_SECTION_NAME to a unique name. */
458
459 void
resolve_unique_section(tree decl,int reloc ATTRIBUTE_UNUSED,int flag_function_or_data_sections)460 resolve_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED,
461 int flag_function_or_data_sections)
462 {
463 if (DECL_SECTION_NAME (decl) == NULL
464 && targetm_common.have_named_sections
465 && (flag_function_or_data_sections
466 || DECL_COMDAT_GROUP (decl)))
467 {
468 targetm.asm_out.unique_section (decl, reloc);
469 if (DECL_SECTION_NAME (decl))
470 symtab_node::get (decl)->call_for_symbol_and_aliases
471 (set_implicit_section, NULL, true);
472 }
473 }
474
475 #ifdef BSS_SECTION_ASM_OP
476
477 #ifdef ASM_OUTPUT_ALIGNED_BSS
478
479 /* Utility function for targets to use in implementing
480 ASM_OUTPUT_ALIGNED_BSS.
481 ??? It is believed that this function will work in most cases so such
482 support is localized here. */
483
484 static void
asm_output_aligned_bss(FILE * file,tree decl ATTRIBUTE_UNUSED,const char * name,unsigned HOST_WIDE_INT size,int align)485 asm_output_aligned_bss (FILE *file, tree decl ATTRIBUTE_UNUSED,
486 const char *name, unsigned HOST_WIDE_INT size,
487 int align)
488 {
489 switch_to_section (bss_section);
490 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
491 #ifdef ASM_DECLARE_OBJECT_NAME
492 last_assemble_variable_decl = decl;
493 ASM_DECLARE_OBJECT_NAME (file, name, decl);
494 #else
495 /* Standard thing is just output label for the object. */
496 ASM_OUTPUT_LABEL (file, name);
497 #endif /* ASM_DECLARE_OBJECT_NAME */
498 ASM_OUTPUT_SKIP (file, size ? size : 1);
499 }
500
501 #endif
502
503 #endif /* BSS_SECTION_ASM_OP */
504
505 #ifndef USE_SELECT_SECTION_FOR_FUNCTIONS
506 /* Return the hot section for function DECL. Return text_section for
507 null DECLs. */
508
509 static section *
hot_function_section(tree decl)510 hot_function_section (tree decl)
511 {
512 if (decl != NULL_TREE
513 && DECL_SECTION_NAME (decl) != NULL
514 && targetm_common.have_named_sections)
515 return get_named_section (decl, NULL, 0);
516 else
517 return text_section;
518 }
519 #endif
520
521 /* Return section for TEXT_SECTION_NAME if DECL or DECL_SECTION_NAME (DECL)
522 is NULL.
523
524 When DECL_SECTION_NAME is non-NULL and it is implicit section and
525 NAMED_SECTION_SUFFIX is non-NULL, then produce section called
526 concatenate the name with NAMED_SECTION_SUFFIX.
527 Otherwise produce "TEXT_SECTION_NAME.IMPLICIT_NAME". */
528
529 section *
get_named_text_section(tree decl,const char * text_section_name,const char * named_section_suffix)530 get_named_text_section (tree decl,
531 const char *text_section_name,
532 const char *named_section_suffix)
533 {
534 if (decl && DECL_SECTION_NAME (decl))
535 {
536 if (named_section_suffix)
537 {
538 const char *dsn = DECL_SECTION_NAME (decl);
539 const char *stripped_name;
540 char *name, *buffer;
541
542 name = (char *) alloca (strlen (dsn) + 1);
543 memcpy (name, dsn,
544 strlen (dsn) + 1);
545
546 stripped_name = targetm.strip_name_encoding (name);
547
548 buffer = ACONCAT ((stripped_name, named_section_suffix, NULL));
549 return get_named_section (decl, buffer, 0);
550 }
551 else if (symtab_node::get (decl)->implicit_section)
552 {
553 const char *name;
554
555 /* Do not try to split gnu_linkonce functions. This gets somewhat
556 slipperly. */
557 if (DECL_COMDAT_GROUP (decl) && !HAVE_COMDAT_GROUP)
558 return NULL;
559 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
560 name = targetm.strip_name_encoding (name);
561 return get_named_section (decl, ACONCAT ((text_section_name, ".",
562 name, NULL)), 0);
563 }
564 else
565 return NULL;
566 }
567 return get_named_section (decl, text_section_name, 0);
568 }
569
570 /* Choose named function section based on its frequency. */
571
572 section *
default_function_section(tree decl,enum node_frequency freq,bool startup,bool exit)573 default_function_section (tree decl, enum node_frequency freq,
574 bool startup, bool exit)
575 {
576 #if defined HAVE_LD_EH_GC_SECTIONS && defined HAVE_LD_EH_GC_SECTIONS_BUG
577 /* Old GNU linkers have buggy --gc-section support, which sometimes
578 results in .gcc_except_table* sections being garbage collected. */
579 if (decl
580 && symtab_node::get (decl)->implicit_section)
581 return NULL;
582 #endif
583
584 if (!flag_reorder_functions
585 || !targetm_common.have_named_sections)
586 return NULL;
587 /* Startup code should go to startup subsection unless it is
588 unlikely executed (this happens especially with function splitting
589 where we can split away unnecessary parts of static constructors. */
590 if (startup && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED)
591 {
592 /* During LTO the tp_first_run profiling will naturally place all
593 initialization code first. Using separate section is counter-productive
594 because startup only code may call functions which are no longer
595 startup only. */
596 if (!in_lto_p
597 || !cgraph_node::get (decl)->tp_first_run
598 || !opt_for_fn (decl, flag_profile_reorder_functions))
599 return get_named_text_section (decl, ".text.startup", NULL);
600 else
601 return NULL;
602 }
603
604 /* Similarly for exit. */
605 if (exit && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED)
606 return get_named_text_section (decl, ".text.exit", NULL);
607
608 /* Group cold functions together, similarly for hot code. */
609 switch (freq)
610 {
611 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
612 return get_named_text_section (decl, ".text.unlikely", NULL);
613 case NODE_FREQUENCY_HOT:
614 return get_named_text_section (decl, ".text.hot", NULL);
615 /* FALLTHRU */
616 default:
617 return NULL;
618 }
619 }
620
621 /* Return the section for function DECL.
622
623 If DECL is NULL_TREE, return the text section. We can be passed
624 NULL_TREE under some circumstances by dbxout.c at least.
625
626 If FORCE_COLD is true, return cold function section ignoring
627 the frequency info of cgraph_node. */
628
629 static section *
function_section_1(tree decl,bool force_cold)630 function_section_1 (tree decl, bool force_cold)
631 {
632 section *section = NULL;
633 enum node_frequency freq = NODE_FREQUENCY_NORMAL;
634 bool startup = false, exit = false;
635
636 if (decl)
637 {
638 struct cgraph_node *node = cgraph_node::get (decl);
639
640 if (node)
641 {
642 freq = node->frequency;
643 startup = node->only_called_at_startup;
644 exit = node->only_called_at_exit;
645 }
646 }
647 if (force_cold)
648 freq = NODE_FREQUENCY_UNLIKELY_EXECUTED;
649
650 #ifdef USE_SELECT_SECTION_FOR_FUNCTIONS
651 if (decl != NULL_TREE
652 && DECL_SECTION_NAME (decl) != NULL)
653 {
654 if (targetm.asm_out.function_section)
655 section = targetm.asm_out.function_section (decl, freq,
656 startup, exit);
657 if (section)
658 return section;
659 return get_named_section (decl, NULL, 0);
660 }
661 else
662 return targetm.asm_out.select_section
663 (decl, freq == NODE_FREQUENCY_UNLIKELY_EXECUTED,
664 symtab_node::get (decl)->definition_alignment ());
665 #else
666 if (targetm.asm_out.function_section)
667 section = targetm.asm_out.function_section (decl, freq, startup, exit);
668 if (section)
669 return section;
670 return hot_function_section (decl);
671 #endif
672 }
673
674 /* Return the section for function DECL.
675
676 If DECL is NULL_TREE, return the text section. We can be passed
677 NULL_TREE under some circumstances by dbxout.c at least. */
678
679 section *
function_section(tree decl)680 function_section (tree decl)
681 {
682 /* Handle cases where function splitting code decides
683 to put function entry point into unlikely executed section
684 despite the fact that the function itself is not cold
685 (i.e. it is called rarely but contains a hot loop that is
686 better to live in hot subsection for the code locality). */
687 return function_section_1 (decl,
688 first_function_block_is_cold);
689 }
690
691 /* Return the section for the current function, take IN_COLD_SECTION_P
692 into account. */
693
694 section *
current_function_section(void)695 current_function_section (void)
696 {
697 return function_section_1 (current_function_decl, in_cold_section_p);
698 }
699
700 /* Tell assembler to switch to unlikely-to-be-executed text section. */
701
702 section *
unlikely_text_section(void)703 unlikely_text_section (void)
704 {
705 return function_section_1 (current_function_decl, true);
706 }
707
708 /* When called within a function context, return true if the function
709 has been assigned a cold text section and if SECT is that section.
710 When called outside a function context, return true if SECT is the
711 default cold section. */
712
713 bool
unlikely_text_section_p(section * sect)714 unlikely_text_section_p (section *sect)
715 {
716 return sect == function_section_1 (current_function_decl, true);
717 }
718
719 /* Switch to the other function partition (if inside of hot section
720 into cold section, otherwise into the hot section). */
721
722 void
switch_to_other_text_partition(void)723 switch_to_other_text_partition (void)
724 {
725 in_cold_section_p = !in_cold_section_p;
726 switch_to_section (current_function_section ());
727 }
728
729 /* Return the read-only data section associated with function DECL. */
730
731 section *
default_function_rodata_section(tree decl)732 default_function_rodata_section (tree decl)
733 {
734 if (decl != NULL_TREE && DECL_SECTION_NAME (decl))
735 {
736 const char *name = DECL_SECTION_NAME (decl);
737
738 if (DECL_COMDAT_GROUP (decl) && HAVE_COMDAT_GROUP)
739 {
740 const char *dot;
741 size_t len;
742 char* rname;
743
744 dot = strchr (name + 1, '.');
745 if (!dot)
746 dot = name;
747 len = strlen (dot) + 8;
748 rname = (char *) alloca (len);
749
750 strcpy (rname, ".rodata");
751 strcat (rname, dot);
752 return get_section (rname, SECTION_LINKONCE, decl);
753 }
754 /* For .gnu.linkonce.t.foo we want to use .gnu.linkonce.r.foo. */
755 else if (DECL_COMDAT_GROUP (decl)
756 && strncmp (name, ".gnu.linkonce.t.", 16) == 0)
757 {
758 size_t len = strlen (name) + 1;
759 char *rname = (char *) alloca (len);
760
761 memcpy (rname, name, len);
762 rname[14] = 'r';
763 return get_section (rname, SECTION_LINKONCE, decl);
764 }
765 /* For .text.foo we want to use .rodata.foo. */
766 else if (flag_function_sections && flag_data_sections
767 && strncmp (name, ".text.", 6) == 0)
768 {
769 size_t len = strlen (name) + 1;
770 char *rname = (char *) alloca (len + 2);
771
772 memcpy (rname, ".rodata", 7);
773 memcpy (rname + 7, name + 5, len - 5);
774 return get_section (rname, 0, decl);
775 }
776 }
777
778 return readonly_data_section;
779 }
780
781 /* Return the read-only data section associated with function DECL
782 for targets where that section should be always the single
783 readonly data section. */
784
785 section *
default_no_function_rodata_section(tree decl ATTRIBUTE_UNUSED)786 default_no_function_rodata_section (tree decl ATTRIBUTE_UNUSED)
787 {
788 return readonly_data_section;
789 }
790
791 /* A subroutine of mergeable_string_section and mergeable_constant_section. */
792
793 static const char *
function_mergeable_rodata_prefix(void)794 function_mergeable_rodata_prefix (void)
795 {
796 section *s = targetm.asm_out.function_rodata_section (current_function_decl);
797 if (SECTION_STYLE (s) == SECTION_NAMED)
798 return s->named.name;
799 else
800 return targetm.asm_out.mergeable_rodata_prefix;
801 }
802
803 /* Return the section to use for string merging. */
804
805 static section *
mergeable_string_section(tree decl ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,unsigned int flags ATTRIBUTE_UNUSED)806 mergeable_string_section (tree decl ATTRIBUTE_UNUSED,
807 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
808 unsigned int flags ATTRIBUTE_UNUSED)
809 {
810 HOST_WIDE_INT len;
811
812 if (HAVE_GAS_SHF_MERGE && flag_merge_constants
813 && TREE_CODE (decl) == STRING_CST
814 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
815 && align <= 256
816 && (len = int_size_in_bytes (TREE_TYPE (decl))) > 0
817 && TREE_STRING_LENGTH (decl) == len)
818 {
819 scalar_int_mode mode;
820 unsigned int modesize;
821 const char *str;
822 HOST_WIDE_INT i;
823 int j, unit;
824 const char *prefix = function_mergeable_rodata_prefix ();
825 char *name = (char *) alloca (strlen (prefix) + 30);
826
827 mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (TREE_TYPE (decl)));
828 modesize = GET_MODE_BITSIZE (mode);
829 if (modesize >= 8 && modesize <= 256
830 && (modesize & (modesize - 1)) == 0)
831 {
832 if (align < modesize)
833 align = modesize;
834
835 if (!HAVE_LD_ALIGNED_SHF_MERGE && align > 8)
836 return readonly_data_section;
837
838 str = TREE_STRING_POINTER (decl);
839 unit = GET_MODE_SIZE (mode);
840
841 /* Check for embedded NUL characters. */
842 for (i = 0; i < len; i += unit)
843 {
844 for (j = 0; j < unit; j++)
845 if (str[i + j] != '\0')
846 break;
847 if (j == unit)
848 break;
849 }
850 if (i == len - unit || (unit == 1 && i == len))
851 {
852 sprintf (name, "%s.str%d.%d", prefix,
853 modesize / 8, (int) (align / 8));
854 flags |= (modesize / 8) | SECTION_MERGE | SECTION_STRINGS;
855 return get_section (name, flags, NULL);
856 }
857 }
858 }
859
860 return readonly_data_section;
861 }
862
863 /* Return the section to use for constant merging. */
864
865 section *
mergeable_constant_section(machine_mode mode ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,unsigned int flags ATTRIBUTE_UNUSED)866 mergeable_constant_section (machine_mode mode ATTRIBUTE_UNUSED,
867 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
868 unsigned int flags ATTRIBUTE_UNUSED)
869 {
870 if (HAVE_GAS_SHF_MERGE && flag_merge_constants
871 && mode != VOIDmode
872 && mode != BLKmode
873 && known_le (GET_MODE_BITSIZE (mode), align)
874 && align >= 8
875 && align <= 256
876 && (align & (align - 1)) == 0
877 && (HAVE_LD_ALIGNED_SHF_MERGE ? 1 : align == 8))
878 {
879 const char *prefix = function_mergeable_rodata_prefix ();
880 char *name = (char *) alloca (strlen (prefix) + 30);
881
882 sprintf (name, "%s.cst%d", prefix, (int) (align / 8));
883 flags |= (align / 8) | SECTION_MERGE;
884 return get_section (name, flags, NULL);
885 }
886 return readonly_data_section;
887 }
888
889 /* Given NAME, a putative register name, discard any customary prefixes. */
890
891 static const char *
strip_reg_name(const char * name)892 strip_reg_name (const char *name)
893 {
894 #ifdef REGISTER_PREFIX
895 if (!strncmp (name, REGISTER_PREFIX, strlen (REGISTER_PREFIX)))
896 name += strlen (REGISTER_PREFIX);
897 #endif
898 if (name[0] == '%' || name[0] == '#')
899 name++;
900 return name;
901 }
902
903 /* The user has asked for a DECL to have a particular name. Set (or
904 change) it in such a way that we don't prefix an underscore to
905 it. */
906 void
set_user_assembler_name(tree decl,const char * name)907 set_user_assembler_name (tree decl, const char *name)
908 {
909 char *starred = (char *) alloca (strlen (name) + 2);
910 starred[0] = '*';
911 strcpy (starred + 1, name);
912 symtab->change_decl_assembler_name (decl, get_identifier (starred));
913 SET_DECL_RTL (decl, NULL_RTX);
914 }
915
916 /* Decode an `asm' spec for a declaration as a register name.
917 Return the register number, or -1 if nothing specified,
918 or -2 if the ASMSPEC is not `cc' or `memory' and is not recognized,
919 or -3 if ASMSPEC is `cc' and is not recognized,
920 or -4 if ASMSPEC is `memory' and is not recognized.
921 Accept an exact spelling or a decimal number.
922 Prefixes such as % are optional. */
923
924 int
decode_reg_name_and_count(const char * asmspec,int * pnregs)925 decode_reg_name_and_count (const char *asmspec, int *pnregs)
926 {
927 /* Presume just one register is clobbered. */
928 *pnregs = 1;
929
930 if (asmspec != 0)
931 {
932 int i;
933
934 /* Get rid of confusing prefixes. */
935 asmspec = strip_reg_name (asmspec);
936
937 /* Allow a decimal number as a "register name". */
938 for (i = strlen (asmspec) - 1; i >= 0; i--)
939 if (! ISDIGIT (asmspec[i]))
940 break;
941 if (asmspec[0] != 0 && i < 0)
942 {
943 i = atoi (asmspec);
944 if (i < FIRST_PSEUDO_REGISTER && i >= 0 && reg_names[i][0])
945 return i;
946 else
947 return -2;
948 }
949
950 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
951 if (reg_names[i][0]
952 && ! strcmp (asmspec, strip_reg_name (reg_names[i])))
953 return i;
954
955 #ifdef OVERLAPPING_REGISTER_NAMES
956 {
957 static const struct
958 {
959 const char *const name;
960 const int number;
961 const int nregs;
962 } table[] = OVERLAPPING_REGISTER_NAMES;
963
964 for (i = 0; i < (int) ARRAY_SIZE (table); i++)
965 if (table[i].name[0]
966 && ! strcmp (asmspec, table[i].name))
967 {
968 *pnregs = table[i].nregs;
969 return table[i].number;
970 }
971 }
972 #endif /* OVERLAPPING_REGISTER_NAMES */
973
974 #ifdef ADDITIONAL_REGISTER_NAMES
975 {
976 static const struct { const char *const name; const int number; } table[]
977 = ADDITIONAL_REGISTER_NAMES;
978
979 for (i = 0; i < (int) ARRAY_SIZE (table); i++)
980 if (table[i].name[0]
981 && ! strcmp (asmspec, table[i].name)
982 && reg_names[table[i].number][0])
983 return table[i].number;
984 }
985 #endif /* ADDITIONAL_REGISTER_NAMES */
986
987 if (!strcmp (asmspec, "memory"))
988 return -4;
989
990 if (!strcmp (asmspec, "cc"))
991 return -3;
992
993 return -2;
994 }
995
996 return -1;
997 }
998
999 int
decode_reg_name(const char * name)1000 decode_reg_name (const char *name)
1001 {
1002 int count;
1003 return decode_reg_name_and_count (name, &count);
1004 }
1005
1006
1007 /* Return true if DECL's initializer is suitable for a BSS section. */
1008
1009 bool
bss_initializer_p(const_tree decl,bool named)1010 bss_initializer_p (const_tree decl, bool named)
1011 {
1012 /* Do not put non-common constants into the .bss section, they belong in
1013 a readonly section, except when NAMED is true. */
1014 return ((!TREE_READONLY (decl) || DECL_COMMON (decl) || named)
1015 && (DECL_INITIAL (decl) == NULL
1016 /* In LTO we have no errors in program; error_mark_node is used
1017 to mark offlined constructors. */
1018 || (DECL_INITIAL (decl) == error_mark_node
1019 && !in_lto_p)
1020 || (flag_zero_initialized_in_bss
1021 && initializer_zerop (DECL_INITIAL (decl)))));
1022 }
1023
1024 /* Compute the alignment of variable specified by DECL.
1025 DONT_OUTPUT_DATA is from assemble_variable. */
1026
1027 void
align_variable(tree decl,bool dont_output_data)1028 align_variable (tree decl, bool dont_output_data)
1029 {
1030 unsigned int align = DECL_ALIGN (decl);
1031
1032 /* In the case for initialing an array whose length isn't specified,
1033 where we have not yet been able to do the layout,
1034 figure out the proper alignment now. */
1035 if (dont_output_data && DECL_SIZE (decl) == 0
1036 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1037 align = MAX (align, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1038
1039 /* Some object file formats have a maximum alignment which they support.
1040 In particular, a.out format supports a maximum alignment of 4. */
1041 if (align > MAX_OFILE_ALIGNMENT)
1042 {
1043 error ("alignment of %q+D is greater than maximum object "
1044 "file alignment %d", decl,
1045 MAX_OFILE_ALIGNMENT/BITS_PER_UNIT);
1046 align = MAX_OFILE_ALIGNMENT;
1047 }
1048
1049 if (! DECL_USER_ALIGN (decl))
1050 {
1051 #ifdef DATA_ABI_ALIGNMENT
1052 unsigned int data_abi_align
1053 = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align);
1054 /* For backwards compatibility, don't assume the ABI alignment for
1055 TLS variables. */
1056 if (! DECL_THREAD_LOCAL_P (decl) || data_abi_align <= BITS_PER_WORD)
1057 align = data_abi_align;
1058 #endif
1059
1060 /* On some machines, it is good to increase alignment sometimes.
1061 But as DECL_ALIGN is used both for actually emitting the variable
1062 and for code accessing the variable as guaranteed alignment, we
1063 can only increase the alignment if it is a performance optimization
1064 if the references to it must bind to the current definition. */
1065 if (decl_binds_to_current_def_p (decl)
1066 && !DECL_VIRTUAL_P (decl))
1067 {
1068 #ifdef DATA_ALIGNMENT
1069 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
1070 /* Don't increase alignment too much for TLS variables - TLS space
1071 is too precious. */
1072 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD)
1073 align = data_align;
1074 #endif
1075 if (DECL_INITIAL (decl) != 0
1076 /* In LTO we have no errors in program; error_mark_node is used
1077 to mark offlined constructors. */
1078 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
1079 {
1080 unsigned int const_align
1081 = targetm.constant_alignment (DECL_INITIAL (decl), align);
1082 /* Don't increase alignment too much for TLS variables - TLS
1083 space is too precious. */
1084 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
1085 align = const_align;
1086 }
1087 }
1088 }
1089
1090 /* Reset the alignment in case we have made it tighter, so we can benefit
1091 from it in get_pointer_alignment. */
1092 SET_DECL_ALIGN (decl, align);
1093 }
1094
1095 /* Return DECL_ALIGN (decl), possibly increased for optimization purposes
1096 beyond what align_variable returned. */
1097
1098 static unsigned int
get_variable_align(tree decl)1099 get_variable_align (tree decl)
1100 {
1101 unsigned int align = DECL_ALIGN (decl);
1102
1103 /* For user aligned vars or static vars align_variable already did
1104 everything. */
1105 if (DECL_USER_ALIGN (decl) || !TREE_PUBLIC (decl))
1106 return align;
1107
1108 #ifdef DATA_ABI_ALIGNMENT
1109 if (DECL_THREAD_LOCAL_P (decl))
1110 align = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align);
1111 #endif
1112
1113 /* For decls that bind to the current definition, align_variable
1114 did also everything, except for not assuming ABI required alignment
1115 of TLS variables. For other vars, increase the alignment here
1116 as an optimization. */
1117 if (!decl_binds_to_current_def_p (decl))
1118 {
1119 /* On some machines, it is good to increase alignment sometimes. */
1120 #ifdef DATA_ALIGNMENT
1121 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
1122 /* Don't increase alignment too much for TLS variables - TLS space
1123 is too precious. */
1124 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD)
1125 align = data_align;
1126 #endif
1127 if (DECL_INITIAL (decl) != 0
1128 /* In LTO we have no errors in program; error_mark_node is used
1129 to mark offlined constructors. */
1130 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
1131 {
1132 unsigned int const_align
1133 = targetm.constant_alignment (DECL_INITIAL (decl), align);
1134 /* Don't increase alignment too much for TLS variables - TLS space
1135 is too precious. */
1136 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
1137 align = const_align;
1138 }
1139 }
1140
1141 return align;
1142 }
1143
1144 /* Return the section into which the given VAR_DECL or CONST_DECL
1145 should be placed. PREFER_NOSWITCH_P is true if a noswitch
1146 section should be used wherever possible. */
1147
1148 section *
get_variable_section(tree decl,bool prefer_noswitch_p)1149 get_variable_section (tree decl, bool prefer_noswitch_p)
1150 {
1151 addr_space_t as = ADDR_SPACE_GENERIC;
1152 int reloc;
1153 varpool_node *vnode = varpool_node::get (decl);
1154 if (vnode)
1155 {
1156 vnode = vnode->ultimate_alias_target ();
1157 decl = vnode->decl;
1158 }
1159
1160 if (TREE_TYPE (decl) != error_mark_node)
1161 as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
1162
1163 /* We need the constructor to figure out reloc flag. */
1164 if (vnode)
1165 vnode->get_constructor ();
1166
1167 if (DECL_COMMON (decl))
1168 {
1169 /* If the decl has been given an explicit section name, or it resides
1170 in a non-generic address space, then it isn't common, and shouldn't
1171 be handled as such. */
1172 gcc_assert (DECL_SECTION_NAME (decl) == NULL
1173 && ADDR_SPACE_GENERIC_P (as));
1174 if (DECL_THREAD_LOCAL_P (decl))
1175 return tls_comm_section;
1176 else if (TREE_PUBLIC (decl) && bss_initializer_p (decl))
1177 return comm_section;
1178 }
1179
1180 if (DECL_INITIAL (decl) == error_mark_node)
1181 reloc = contains_pointers_p (TREE_TYPE (decl)) ? 3 : 0;
1182 else if (DECL_INITIAL (decl))
1183 reloc = compute_reloc_for_constant (DECL_INITIAL (decl));
1184 else
1185 reloc = 0;
1186
1187 resolve_unique_section (decl, reloc, flag_data_sections);
1188 if (IN_NAMED_SECTION (decl))
1189 {
1190 section *sect = get_named_section (decl, NULL, reloc);
1191
1192 if ((sect->common.flags & SECTION_BSS)
1193 && !bss_initializer_p (decl, true))
1194 {
1195 error_at (DECL_SOURCE_LOCATION (decl),
1196 "only zero initializers are allowed in section %qs",
1197 sect->named.name);
1198 DECL_INITIAL (decl) = error_mark_node;
1199 }
1200 return sect;
1201 }
1202
1203 if (ADDR_SPACE_GENERIC_P (as)
1204 && !DECL_THREAD_LOCAL_P (decl)
1205 && !(prefer_noswitch_p && targetm.have_switchable_bss_sections)
1206 && bss_initializer_p (decl))
1207 {
1208 if (!TREE_PUBLIC (decl)
1209 && !((flag_sanitize & SANITIZE_ADDRESS)
1210 && asan_protect_global (decl)))
1211 return lcomm_section;
1212 if (bss_noswitch_section)
1213 return bss_noswitch_section;
1214 }
1215
1216 return targetm.asm_out.select_section (decl, reloc,
1217 get_variable_align (decl));
1218 }
1219
1220 /* Return the block into which object_block DECL should be placed. */
1221
1222 static struct object_block *
get_block_for_decl(tree decl)1223 get_block_for_decl (tree decl)
1224 {
1225 section *sect;
1226
1227 if (VAR_P (decl))
1228 {
1229 /* The object must be defined in this translation unit. */
1230 if (DECL_EXTERNAL (decl))
1231 return NULL;
1232
1233 /* There's no point using object blocks for something that is
1234 isolated by definition. */
1235 if (DECL_COMDAT_GROUP (decl))
1236 return NULL;
1237 }
1238
1239 /* We can only calculate block offsets if the decl has a known
1240 constant size. */
1241 if (DECL_SIZE_UNIT (decl) == NULL)
1242 return NULL;
1243 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (decl)))
1244 return NULL;
1245
1246 /* Find out which section should contain DECL. We cannot put it into
1247 an object block if it requires a standalone definition. */
1248 if (VAR_P (decl))
1249 align_variable (decl, 0);
1250 sect = get_variable_section (decl, true);
1251 if (SECTION_STYLE (sect) == SECTION_NOSWITCH)
1252 return NULL;
1253
1254 return get_block_for_section (sect);
1255 }
1256
1257 /* Make sure block symbol SYMBOL is in block BLOCK. */
1258
1259 static void
change_symbol_block(rtx symbol,struct object_block * block)1260 change_symbol_block (rtx symbol, struct object_block *block)
1261 {
1262 if (block != SYMBOL_REF_BLOCK (symbol))
1263 {
1264 gcc_assert (SYMBOL_REF_BLOCK_OFFSET (symbol) < 0);
1265 SYMBOL_REF_BLOCK (symbol) = block;
1266 }
1267 }
1268
1269 /* Return true if it is possible to put DECL in an object_block. */
1270
1271 static bool
use_blocks_for_decl_p(tree decl)1272 use_blocks_for_decl_p (tree decl)
1273 {
1274 struct symtab_node *snode;
1275
1276 /* Only data DECLs can be placed into object blocks. */
1277 if (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
1278 return false;
1279
1280 /* DECL_INITIAL (decl) set to decl is a hack used for some decls that
1281 are never used from code directly and we never want object block handling
1282 for those. */
1283 if (DECL_INITIAL (decl) == decl)
1284 return false;
1285
1286 /* If this decl is an alias, then we don't want to emit a
1287 definition. */
1288 if (VAR_P (decl)
1289 && (snode = symtab_node::get (decl)) != NULL
1290 && snode->alias)
1291 return false;
1292
1293 return targetm.use_blocks_for_decl_p (decl);
1294 }
1295
1296 /* Follow the IDENTIFIER_TRANSPARENT_ALIAS chain starting at *ALIAS
1297 until we find an identifier that is not itself a transparent alias.
1298 Modify the alias passed to it by reference (and all aliases on the
1299 way to the ultimate target), such that they do not have to be
1300 followed again, and return the ultimate target of the alias
1301 chain. */
1302
1303 static inline tree
ultimate_transparent_alias_target(tree * alias)1304 ultimate_transparent_alias_target (tree *alias)
1305 {
1306 tree target = *alias;
1307
1308 if (IDENTIFIER_TRANSPARENT_ALIAS (target))
1309 {
1310 gcc_assert (TREE_CHAIN (target));
1311 target = ultimate_transparent_alias_target (&TREE_CHAIN (target));
1312 gcc_assert (! IDENTIFIER_TRANSPARENT_ALIAS (target)
1313 && ! TREE_CHAIN (target));
1314 *alias = target;
1315 }
1316
1317 return target;
1318 }
1319
1320 /* Create the DECL_RTL for a VAR_DECL or FUNCTION_DECL. DECL should
1321 have static storage duration. In other words, it should not be an
1322 automatic variable, including PARM_DECLs.
1323
1324 There is, however, one exception: this function handles variables
1325 explicitly placed in a particular register by the user.
1326
1327 This is never called for PARM_DECL nodes. */
1328
1329 void
make_decl_rtl(tree decl)1330 make_decl_rtl (tree decl)
1331 {
1332 const char *name = 0;
1333 int reg_number;
1334 tree id;
1335 rtx x;
1336
1337 /* Check that we are not being given an automatic variable. */
1338 gcc_assert (TREE_CODE (decl) != PARM_DECL
1339 && TREE_CODE (decl) != RESULT_DECL);
1340
1341 /* A weak alias has TREE_PUBLIC set but not the other bits. */
1342 gcc_assert (!VAR_P (decl)
1343 || TREE_STATIC (decl)
1344 || TREE_PUBLIC (decl)
1345 || DECL_EXTERNAL (decl)
1346 || DECL_REGISTER (decl));
1347
1348 /* And that we were not given a type or a label. */
1349 gcc_assert (TREE_CODE (decl) != TYPE_DECL
1350 && TREE_CODE (decl) != LABEL_DECL);
1351
1352 /* For a duplicate declaration, we can be called twice on the
1353 same DECL node. Don't discard the RTL already made. */
1354 if (DECL_RTL_SET_P (decl))
1355 {
1356 /* If the old RTL had the wrong mode, fix the mode. */
1357 x = DECL_RTL (decl);
1358 if (GET_MODE (x) != DECL_MODE (decl))
1359 SET_DECL_RTL (decl, adjust_address_nv (x, DECL_MODE (decl), 0));
1360
1361 if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
1362 return;
1363
1364 /* ??? Another way to do this would be to maintain a hashed
1365 table of such critters. Instead of adding stuff to a DECL
1366 to give certain attributes to it, we could use an external
1367 hash map from DECL to set of attributes. */
1368
1369 /* Let the target reassign the RTL if it wants.
1370 This is necessary, for example, when one machine specific
1371 decl attribute overrides another. */
1372 targetm.encode_section_info (decl, DECL_RTL (decl), false);
1373
1374 /* If the symbol has a SYMBOL_REF_BLOCK field, update it based
1375 on the new decl information. */
1376 if (MEM_P (x)
1377 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
1378 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (x, 0)))
1379 change_symbol_block (XEXP (x, 0), get_block_for_decl (decl));
1380
1381 return;
1382 }
1383
1384 /* If this variable belongs to the global constant pool, retrieve the
1385 pre-computed RTL or recompute it in LTO mode. */
1386 if (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
1387 {
1388 SET_DECL_RTL (decl, output_constant_def (DECL_INITIAL (decl), 1));
1389 return;
1390 }
1391
1392 id = DECL_ASSEMBLER_NAME (decl);
1393 name = IDENTIFIER_POINTER (id);
1394
1395 if (name[0] != '*' && TREE_CODE (decl) != FUNCTION_DECL
1396 && DECL_REGISTER (decl))
1397 {
1398 error ("register name not specified for %q+D", decl);
1399 }
1400 else if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
1401 {
1402 const char *asmspec = name+1;
1403 machine_mode mode = DECL_MODE (decl);
1404 reg_number = decode_reg_name (asmspec);
1405 /* First detect errors in declaring global registers. */
1406 if (reg_number == -1)
1407 error ("register name not specified for %q+D", decl);
1408 else if (reg_number < 0)
1409 error ("invalid register name for %q+D", decl);
1410 else if (mode == BLKmode)
1411 error ("data type of %q+D isn%'t suitable for a register",
1412 decl);
1413 else if (!in_hard_reg_set_p (accessible_reg_set, mode, reg_number))
1414 error ("the register specified for %q+D cannot be accessed"
1415 " by the current target", decl);
1416 else if (!in_hard_reg_set_p (operand_reg_set, mode, reg_number))
1417 error ("the register specified for %q+D is not general enough"
1418 " to be used as a register variable", decl);
1419 else if (!targetm.hard_regno_mode_ok (reg_number, mode))
1420 error ("register specified for %q+D isn%'t suitable for data type",
1421 decl);
1422 /* Now handle properly declared static register variables. */
1423 else
1424 {
1425 int nregs;
1426
1427 if (DECL_INITIAL (decl) != 0 && TREE_STATIC (decl))
1428 {
1429 DECL_INITIAL (decl) = 0;
1430 error ("global register variable has initial value");
1431 }
1432 if (TREE_THIS_VOLATILE (decl))
1433 warning (OPT_Wvolatile_register_var,
1434 "optimization may eliminate reads and/or "
1435 "writes to register variables");
1436
1437 /* If the user specified one of the eliminables registers here,
1438 e.g., FRAME_POINTER_REGNUM, we don't want to get this variable
1439 confused with that register and be eliminated. This usage is
1440 somewhat suspect... */
1441
1442 SET_DECL_RTL (decl, gen_raw_REG (mode, reg_number));
1443 ORIGINAL_REGNO (DECL_RTL (decl)) = reg_number;
1444 REG_USERVAR_P (DECL_RTL (decl)) = 1;
1445
1446 if (TREE_STATIC (decl))
1447 {
1448 /* Make this register global, so not usable for anything
1449 else. */
1450 #ifdef ASM_DECLARE_REGISTER_GLOBAL
1451 name = IDENTIFIER_POINTER (DECL_NAME (decl));
1452 ASM_DECLARE_REGISTER_GLOBAL (asm_out_file, decl, reg_number, name);
1453 #endif
1454 nregs = hard_regno_nregs (reg_number, mode);
1455 while (nregs > 0)
1456 globalize_reg (decl, reg_number + --nregs);
1457 }
1458
1459 /* As a register variable, it has no section. */
1460 return;
1461 }
1462 /* Avoid internal errors from invalid register
1463 specifications. */
1464 SET_DECL_ASSEMBLER_NAME (decl, NULL_TREE);
1465 DECL_HARD_REGISTER (decl) = 0;
1466 /* Also avoid SSA inconsistencies by pretending this is an external
1467 decl now. */
1468 DECL_EXTERNAL (decl) = 1;
1469 return;
1470 }
1471 /* Now handle ordinary static variables and functions (in memory).
1472 Also handle vars declared register invalidly. */
1473 else if (name[0] == '*')
1474 {
1475 #ifdef REGISTER_PREFIX
1476 if (strlen (REGISTER_PREFIX) != 0)
1477 {
1478 reg_number = decode_reg_name (name);
1479 if (reg_number >= 0 || reg_number == -3)
1480 error ("register name given for non-register variable %q+D", decl);
1481 }
1482 #endif
1483 }
1484
1485 /* Specifying a section attribute on a variable forces it into a
1486 non-.bss section, and thus it cannot be common. */
1487 /* FIXME: In general this code should not be necessary because
1488 visibility pass is doing the same work. But notice_global_symbol
1489 is called early and it needs to make DECL_RTL to get the name.
1490 we take care of recomputing the DECL_RTL after visibility is changed. */
1491 if (VAR_P (decl)
1492 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1493 && DECL_SECTION_NAME (decl) != NULL
1494 && DECL_INITIAL (decl) == NULL_TREE
1495 && DECL_COMMON (decl))
1496 DECL_COMMON (decl) = 0;
1497
1498 /* Variables can't be both common and weak. */
1499 if (VAR_P (decl) && DECL_WEAK (decl))
1500 DECL_COMMON (decl) = 0;
1501
1502 if (use_object_blocks_p () && use_blocks_for_decl_p (decl))
1503 x = create_block_symbol (name, get_block_for_decl (decl), -1);
1504 else
1505 {
1506 machine_mode address_mode = Pmode;
1507 if (TREE_TYPE (decl) != error_mark_node)
1508 {
1509 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
1510 address_mode = targetm.addr_space.address_mode (as);
1511 }
1512 x = gen_rtx_SYMBOL_REF (address_mode, name);
1513 }
1514 SYMBOL_REF_WEAK (x) = DECL_WEAK (decl);
1515 SET_SYMBOL_REF_DECL (x, decl);
1516
1517 x = gen_rtx_MEM (DECL_MODE (decl), x);
1518 if (TREE_CODE (decl) != FUNCTION_DECL)
1519 set_mem_attributes (x, decl, 1);
1520 SET_DECL_RTL (decl, x);
1521
1522 /* Optionally set flags or add text to the name to record information
1523 such as that it is a function name.
1524 If the name is changed, the macro ASM_OUTPUT_LABELREF
1525 will have to know how to strip this information. */
1526 targetm.encode_section_info (decl, DECL_RTL (decl), true);
1527 }
1528
1529 /* Like make_decl_rtl, but inhibit creation of new alias sets when
1530 calling make_decl_rtl. Also, reset DECL_RTL before returning the
1531 rtl. */
1532
1533 rtx
make_decl_rtl_for_debug(tree decl)1534 make_decl_rtl_for_debug (tree decl)
1535 {
1536 unsigned int save_aliasing_flag;
1537 rtx rtl;
1538
1539 if (DECL_RTL_SET_P (decl))
1540 return DECL_RTL (decl);
1541
1542 /* Kludge alert! Somewhere down the call chain, make_decl_rtl will
1543 call new_alias_set. If running with -fcompare-debug, sometimes
1544 we do not want to create alias sets that will throw the alias
1545 numbers off in the comparison dumps. So... clearing
1546 flag_strict_aliasing will keep new_alias_set() from creating a
1547 new set. */
1548 save_aliasing_flag = flag_strict_aliasing;
1549 flag_strict_aliasing = 0;
1550
1551 rtl = DECL_RTL (decl);
1552 /* Reset DECL_RTL back, as various parts of the compiler expects
1553 DECL_RTL set meaning it is actually going to be output. */
1554 SET_DECL_RTL (decl, NULL);
1555
1556 flag_strict_aliasing = save_aliasing_flag;
1557 return rtl;
1558 }
1559
1560 /* Output a string of literal assembler code
1561 for an `asm' keyword used between functions. */
1562
1563 void
assemble_asm(tree string)1564 assemble_asm (tree string)
1565 {
1566 const char *p;
1567 app_enable ();
1568
1569 if (TREE_CODE (string) == ADDR_EXPR)
1570 string = TREE_OPERAND (string, 0);
1571
1572 p = TREE_STRING_POINTER (string);
1573 fprintf (asm_out_file, "%s%s\n", p[0] == '\t' ? "" : "\t", p);
1574 }
1575
1576 /* Write the address of the entity given by SYMBOL to SEC. */
1577 void
assemble_addr_to_section(rtx symbol,section * sec)1578 assemble_addr_to_section (rtx symbol, section *sec)
1579 {
1580 switch_to_section (sec);
1581 assemble_align (POINTER_SIZE);
1582 assemble_integer (symbol, POINTER_SIZE_UNITS, POINTER_SIZE, 1);
1583 }
1584
1585 /* Return the numbered .ctors.N (if CONSTRUCTOR_P) or .dtors.N (if
1586 not) section for PRIORITY. */
1587 section *
get_cdtor_priority_section(int priority,bool constructor_p)1588 get_cdtor_priority_section (int priority, bool constructor_p)
1589 {
1590 /* Buffer conservatively large enough for the full range of a 32-bit
1591 int plus the text below. */
1592 char buf[18];
1593
1594 /* ??? This only works reliably with the GNU linker. */
1595 sprintf (buf, "%s.%.5u",
1596 constructor_p ? ".ctors" : ".dtors",
1597 /* Invert the numbering so the linker puts us in the proper
1598 order; constructors are run from right to left, and the
1599 linker sorts in increasing order. */
1600 MAX_INIT_PRIORITY - priority);
1601 return get_section (buf, SECTION_WRITE, NULL);
1602 }
1603
1604 void
default_named_section_asm_out_destructor(rtx symbol,int priority)1605 default_named_section_asm_out_destructor (rtx symbol, int priority)
1606 {
1607 section *sec;
1608
1609 if (priority != DEFAULT_INIT_PRIORITY)
1610 sec = get_cdtor_priority_section (priority,
1611 /*constructor_p=*/false);
1612 else
1613 sec = get_section (".dtors", SECTION_WRITE, NULL);
1614
1615 assemble_addr_to_section (symbol, sec);
1616 }
1617
1618 #ifdef DTORS_SECTION_ASM_OP
1619 void
default_dtor_section_asm_out_destructor(rtx symbol,int priority ATTRIBUTE_UNUSED)1620 default_dtor_section_asm_out_destructor (rtx symbol,
1621 int priority ATTRIBUTE_UNUSED)
1622 {
1623 assemble_addr_to_section (symbol, dtors_section);
1624 }
1625 #endif
1626
1627 void
default_named_section_asm_out_constructor(rtx symbol,int priority)1628 default_named_section_asm_out_constructor (rtx symbol, int priority)
1629 {
1630 section *sec;
1631
1632 if (priority != DEFAULT_INIT_PRIORITY)
1633 sec = get_cdtor_priority_section (priority,
1634 /*constructor_p=*/true);
1635 else
1636 sec = get_section (".ctors", SECTION_WRITE, NULL);
1637
1638 assemble_addr_to_section (symbol, sec);
1639 }
1640
1641 #ifdef CTORS_SECTION_ASM_OP
1642 void
default_ctor_section_asm_out_constructor(rtx symbol,int priority ATTRIBUTE_UNUSED)1643 default_ctor_section_asm_out_constructor (rtx symbol,
1644 int priority ATTRIBUTE_UNUSED)
1645 {
1646 assemble_addr_to_section (symbol, ctors_section);
1647 }
1648 #endif
1649
1650 /* CONSTANT_POOL_BEFORE_FUNCTION may be defined as an expression with
1651 a nonzero value if the constant pool should be output before the
1652 start of the function, or a zero value if the pool should output
1653 after the end of the function. The default is to put it before the
1654 start. */
1655
1656 #ifndef CONSTANT_POOL_BEFORE_FUNCTION
1657 #define CONSTANT_POOL_BEFORE_FUNCTION 1
1658 #endif
1659
1660 /* DECL is an object (either VAR_DECL or FUNCTION_DECL) which is going
1661 to be output to assembler.
1662 Set first_global_object_name and weak_global_object_name as appropriate. */
1663
1664 void
notice_global_symbol(tree decl)1665 notice_global_symbol (tree decl)
1666 {
1667 const char **t = &first_global_object_name;
1668
1669 if (first_global_object_name
1670 || !TREE_PUBLIC (decl)
1671 || DECL_EXTERNAL (decl)
1672 || !DECL_NAME (decl)
1673 || (VAR_P (decl) && DECL_HARD_REGISTER (decl))
1674 || (TREE_CODE (decl) != FUNCTION_DECL
1675 && (!VAR_P (decl)
1676 || (DECL_COMMON (decl)
1677 && (DECL_INITIAL (decl) == 0
1678 || DECL_INITIAL (decl) == error_mark_node)))))
1679 return;
1680
1681 /* We win when global object is found, but it is useful to know about weak
1682 symbol as well so we can produce nicer unique names. */
1683 if (DECL_WEAK (decl) || DECL_ONE_ONLY (decl) || flag_shlib)
1684 t = &weak_global_object_name;
1685
1686 if (!*t)
1687 {
1688 tree id = DECL_ASSEMBLER_NAME (decl);
1689 ultimate_transparent_alias_target (&id);
1690 *t = ggc_strdup (targetm.strip_name_encoding (IDENTIFIER_POINTER (id)));
1691 }
1692 }
1693
1694 /* If not using flag_reorder_blocks_and_partition, decide early whether the
1695 current function goes into the cold section, so that targets can use
1696 current_function_section during RTL expansion. DECL describes the
1697 function. */
1698
1699 void
decide_function_section(tree decl)1700 decide_function_section (tree decl)
1701 {
1702 first_function_block_is_cold = false;
1703
1704 if (DECL_SECTION_NAME (decl))
1705 {
1706 struct cgraph_node *node = cgraph_node::get (current_function_decl);
1707 /* Calls to function_section rely on first_function_block_is_cold
1708 being accurate. */
1709 first_function_block_is_cold = (node
1710 && node->frequency
1711 == NODE_FREQUENCY_UNLIKELY_EXECUTED);
1712 }
1713
1714 in_cold_section_p = first_function_block_is_cold;
1715 }
1716
1717 /* Get the function's name, as described by its RTL. This may be
1718 different from the DECL_NAME name used in the source file. */
1719 const char *
get_fnname_from_decl(tree decl)1720 get_fnname_from_decl (tree decl)
1721 {
1722 rtx x = DECL_RTL (decl);
1723 gcc_assert (MEM_P (x));
1724 x = XEXP (x, 0);
1725 gcc_assert (GET_CODE (x) == SYMBOL_REF);
1726 return XSTR (x, 0);
1727 }
1728
1729 /* Output assembler code for the constant pool of a function and associated
1730 with defining the name of the function. DECL describes the function.
1731 NAME is the function's name. For the constant pool, we use the current
1732 constant pool data. */
1733
1734 void
assemble_start_function(tree decl,const char * fnname)1735 assemble_start_function (tree decl, const char *fnname)
1736 {
1737 int align;
1738 char tmp_label[100];
1739 bool hot_label_written = false;
1740
1741 if (crtl->has_bb_partition)
1742 {
1743 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTB", const_labelno);
1744 crtl->subsections.hot_section_label = ggc_strdup (tmp_label);
1745 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDB", const_labelno);
1746 crtl->subsections.cold_section_label = ggc_strdup (tmp_label);
1747 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTE", const_labelno);
1748 crtl->subsections.hot_section_end_label = ggc_strdup (tmp_label);
1749 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDE", const_labelno);
1750 crtl->subsections.cold_section_end_label = ggc_strdup (tmp_label);
1751 const_labelno++;
1752 cold_function_name = NULL_TREE;
1753 }
1754 else
1755 {
1756 crtl->subsections.hot_section_label = NULL;
1757 crtl->subsections.cold_section_label = NULL;
1758 crtl->subsections.hot_section_end_label = NULL;
1759 crtl->subsections.cold_section_end_label = NULL;
1760 }
1761
1762 /* The following code does not need preprocessing in the assembler. */
1763
1764 app_disable ();
1765
1766 if (CONSTANT_POOL_BEFORE_FUNCTION)
1767 output_constant_pool (fnname, decl);
1768
1769 align = symtab_node::get (decl)->definition_alignment ();
1770
1771 /* Make sure the not and cold text (code) sections are properly
1772 aligned. This is necessary here in the case where the function
1773 has both hot and cold sections, because we don't want to re-set
1774 the alignment when the section switch happens mid-function. */
1775
1776 if (crtl->has_bb_partition)
1777 {
1778 first_function_block_is_cold = false;
1779
1780 switch_to_section (unlikely_text_section ());
1781 assemble_align (align);
1782 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_label);
1783
1784 /* When the function starts with a cold section, we need to explicitly
1785 align the hot section and write out the hot section label.
1786 But if the current function is a thunk, we do not have a CFG. */
1787 if (!cfun->is_thunk
1788 && BB_PARTITION (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) == BB_COLD_PARTITION)
1789 {
1790 switch_to_section (text_section);
1791 assemble_align (align);
1792 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
1793 hot_label_written = true;
1794 first_function_block_is_cold = true;
1795 }
1796 in_cold_section_p = first_function_block_is_cold;
1797 }
1798
1799
1800 /* Switch to the correct text section for the start of the function. */
1801
1802 switch_to_section (function_section (decl));
1803 if (crtl->has_bb_partition && !hot_label_written)
1804 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
1805
1806 /* Tell assembler to move to target machine's alignment for functions. */
1807 align = floor_log2 (align / BITS_PER_UNIT);
1808 if (align > 0)
1809 {
1810 ASM_OUTPUT_ALIGN (asm_out_file, align);
1811 }
1812
1813 /* Handle a user-specified function alignment.
1814 Note that we still need to align to DECL_ALIGN, as above,
1815 because ASM_OUTPUT_MAX_SKIP_ALIGN might not do any alignment at all. */
1816 if (! DECL_USER_ALIGN (decl)
1817 && align_functions.levels[0].log > align
1818 && optimize_function_for_speed_p (cfun))
1819 {
1820 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1821 int align_log = align_functions.levels[0].log;
1822 #endif
1823 int max_skip = align_functions.levels[0].maxskip;
1824 if (flag_limit_function_alignment && crtl->max_insn_address > 0
1825 && max_skip >= crtl->max_insn_address)
1826 max_skip = crtl->max_insn_address - 1;
1827
1828 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1829 ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file, align_log, max_skip);
1830 if (max_skip == align_functions.levels[0].maxskip)
1831 ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file,
1832 align_functions.levels[1].log,
1833 align_functions.levels[1].maxskip);
1834 #else
1835 ASM_OUTPUT_ALIGN (asm_out_file, align_functions.levels[0].log);
1836 #endif
1837 }
1838
1839 #ifdef ASM_OUTPUT_FUNCTION_PREFIX
1840 ASM_OUTPUT_FUNCTION_PREFIX (asm_out_file, fnname);
1841 #endif
1842
1843 if (!DECL_IGNORED_P (decl))
1844 (*debug_hooks->begin_function) (decl);
1845
1846 /* Make function name accessible from other files, if appropriate. */
1847
1848 if (TREE_PUBLIC (decl))
1849 {
1850 notice_global_symbol (decl);
1851
1852 globalize_decl (decl);
1853
1854 maybe_assemble_visibility (decl);
1855 }
1856
1857 if (DECL_PRESERVE_P (decl))
1858 targetm.asm_out.mark_decl_preserved (fnname);
1859
1860 unsigned HOST_WIDE_INT patch_area_size = function_entry_patch_area_size;
1861 unsigned HOST_WIDE_INT patch_area_entry = function_entry_patch_area_start;
1862
1863 tree patchable_function_entry_attr
1864 = lookup_attribute ("patchable_function_entry", DECL_ATTRIBUTES (decl));
1865 if (patchable_function_entry_attr)
1866 {
1867 tree pp_val = TREE_VALUE (patchable_function_entry_attr);
1868 tree patchable_function_entry_value1 = TREE_VALUE (pp_val);
1869
1870 patch_area_size = tree_to_uhwi (patchable_function_entry_value1);
1871 patch_area_entry = 0;
1872 if (TREE_CHAIN (pp_val) != NULL_TREE)
1873 {
1874 tree patchable_function_entry_value2
1875 = TREE_VALUE (TREE_CHAIN (pp_val));
1876 patch_area_entry = tree_to_uhwi (patchable_function_entry_value2);
1877 }
1878 }
1879
1880 if (patch_area_entry > patch_area_size)
1881 {
1882 if (patch_area_size > 0)
1883 warning (OPT_Wattributes,
1884 "patchable function entry %wu exceeds size %wu",
1885 patch_area_entry, patch_area_size);
1886 patch_area_entry = 0;
1887 }
1888
1889 /* Emit the patching area before the entry label, if any. */
1890 if (patch_area_entry > 0)
1891 targetm.asm_out.print_patchable_function_entry (asm_out_file,
1892 patch_area_entry, true);
1893
1894 /* Do any machine/system dependent processing of the function name. */
1895 #ifdef ASM_DECLARE_FUNCTION_NAME
1896 ASM_DECLARE_FUNCTION_NAME (asm_out_file, fnname, current_function_decl);
1897 #else
1898 /* Standard thing is just output label for the function. */
1899 ASM_OUTPUT_FUNCTION_LABEL (asm_out_file, fnname, current_function_decl);
1900 #endif /* ASM_DECLARE_FUNCTION_NAME */
1901
1902 /* And the area after the label. Record it if we haven't done so yet. */
1903 if (patch_area_size > patch_area_entry)
1904 targetm.asm_out.print_patchable_function_entry (asm_out_file,
1905 patch_area_size
1906 - patch_area_entry,
1907 patch_area_entry == 0);
1908
1909 if (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (decl)))
1910 saw_no_split_stack = true;
1911 }
1912
1913 /* Output assembler code associated with defining the size of the
1914 function. DECL describes the function. NAME is the function's name. */
1915
1916 void
assemble_end_function(tree decl,const char * fnname ATTRIBUTE_UNUSED)1917 assemble_end_function (tree decl, const char *fnname ATTRIBUTE_UNUSED)
1918 {
1919 #ifdef ASM_DECLARE_FUNCTION_SIZE
1920 /* We could have switched section in the middle of the function. */
1921 if (crtl->has_bb_partition)
1922 switch_to_section (function_section (decl));
1923 ASM_DECLARE_FUNCTION_SIZE (asm_out_file, fnname, decl);
1924 #endif
1925 if (! CONSTANT_POOL_BEFORE_FUNCTION)
1926 {
1927 output_constant_pool (fnname, decl);
1928 switch_to_section (function_section (decl)); /* need to switch back */
1929 }
1930 /* Output labels for end of hot/cold text sections (to be used by
1931 debug info.) */
1932 if (crtl->has_bb_partition)
1933 {
1934 section *save_text_section;
1935
1936 save_text_section = in_section;
1937 switch_to_section (unlikely_text_section ());
1938 #ifdef ASM_DECLARE_COLD_FUNCTION_SIZE
1939 if (cold_function_name != NULL_TREE)
1940 ASM_DECLARE_COLD_FUNCTION_SIZE (asm_out_file,
1941 IDENTIFIER_POINTER (cold_function_name),
1942 decl);
1943 #endif
1944 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_end_label);
1945 if (first_function_block_is_cold)
1946 switch_to_section (text_section);
1947 else
1948 switch_to_section (function_section (decl));
1949 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_end_label);
1950 switch_to_section (save_text_section);
1951 }
1952 }
1953
1954 /* Assemble code to leave SIZE bytes of zeros. */
1955
1956 void
assemble_zeros(unsigned HOST_WIDE_INT size)1957 assemble_zeros (unsigned HOST_WIDE_INT size)
1958 {
1959 /* Do no output if -fsyntax-only. */
1960 if (flag_syntax_only)
1961 return;
1962
1963 #ifdef ASM_NO_SKIP_IN_TEXT
1964 /* The `space' pseudo in the text section outputs nop insns rather than 0s,
1965 so we must output 0s explicitly in the text section. */
1966 if (ASM_NO_SKIP_IN_TEXT && (in_section->common.flags & SECTION_CODE) != 0)
1967 {
1968 unsigned HOST_WIDE_INT i;
1969 for (i = 0; i < size; i++)
1970 assemble_integer (const0_rtx, 1, BITS_PER_UNIT, 1);
1971 }
1972 else
1973 #endif
1974 if (size > 0)
1975 ASM_OUTPUT_SKIP (asm_out_file, size);
1976 }
1977
1978 /* Assemble an alignment pseudo op for an ALIGN-bit boundary. */
1979
1980 void
assemble_align(unsigned int align)1981 assemble_align (unsigned int align)
1982 {
1983 if (align > BITS_PER_UNIT)
1984 {
1985 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
1986 }
1987 }
1988
1989 /* Assemble a string constant with the specified C string as contents. */
1990
1991 void
assemble_string(const char * p,int size)1992 assemble_string (const char *p, int size)
1993 {
1994 int pos = 0;
1995 int maximum = 2000;
1996
1997 /* If the string is very long, split it up. */
1998
1999 while (pos < size)
2000 {
2001 int thissize = size - pos;
2002 if (thissize > maximum)
2003 thissize = maximum;
2004
2005 ASM_OUTPUT_ASCII (asm_out_file, p, thissize);
2006
2007 pos += thissize;
2008 p += thissize;
2009 }
2010 }
2011
2012
2013 /* A noswitch_section_callback for lcomm_section. */
2014
2015 static bool
emit_local(tree decl ATTRIBUTE_UNUSED,const char * name ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)2016 emit_local (tree decl ATTRIBUTE_UNUSED,
2017 const char *name ATTRIBUTE_UNUSED,
2018 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2019 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2020 {
2021 #if defined ASM_OUTPUT_ALIGNED_DECL_LOCAL
2022 unsigned int align = symtab_node::get (decl)->definition_alignment ();
2023 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, decl, name,
2024 size, align);
2025 return true;
2026 #elif defined ASM_OUTPUT_ALIGNED_LOCAL
2027 unsigned int align = symtab_node::get (decl)->definition_alignment ();
2028 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, align);
2029 return true;
2030 #else
2031 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded);
2032 return false;
2033 #endif
2034 }
2035
2036 /* A noswitch_section_callback for bss_noswitch_section. */
2037
2038 #if defined ASM_OUTPUT_ALIGNED_BSS
2039 static bool
emit_bss(tree decl ATTRIBUTE_UNUSED,const char * name ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)2040 emit_bss (tree decl ATTRIBUTE_UNUSED,
2041 const char *name ATTRIBUTE_UNUSED,
2042 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2043 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2044 {
2045 ASM_OUTPUT_ALIGNED_BSS (asm_out_file, decl, name, size,
2046 get_variable_align (decl));
2047 return true;
2048 }
2049 #endif
2050
2051 /* A noswitch_section_callback for comm_section. */
2052
2053 static bool
emit_common(tree decl ATTRIBUTE_UNUSED,const char * name ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)2054 emit_common (tree decl ATTRIBUTE_UNUSED,
2055 const char *name ATTRIBUTE_UNUSED,
2056 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2057 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2058 {
2059 #if defined ASM_OUTPUT_ALIGNED_DECL_COMMON
2060 ASM_OUTPUT_ALIGNED_DECL_COMMON (asm_out_file, decl, name,
2061 size, get_variable_align (decl));
2062 return true;
2063 #elif defined ASM_OUTPUT_ALIGNED_COMMON
2064 ASM_OUTPUT_ALIGNED_COMMON (asm_out_file, name, size,
2065 get_variable_align (decl));
2066 return true;
2067 #else
2068 ASM_OUTPUT_COMMON (asm_out_file, name, size, rounded);
2069 return false;
2070 #endif
2071 }
2072
2073 /* A noswitch_section_callback for tls_comm_section. */
2074
2075 static bool
emit_tls_common(tree decl ATTRIBUTE_UNUSED,const char * name ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)2076 emit_tls_common (tree decl ATTRIBUTE_UNUSED,
2077 const char *name ATTRIBUTE_UNUSED,
2078 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2079 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2080 {
2081 #ifdef ASM_OUTPUT_TLS_COMMON
2082 ASM_OUTPUT_TLS_COMMON (asm_out_file, decl, name, size);
2083 return true;
2084 #else
2085 sorry ("thread-local COMMON data not implemented");
2086 return true;
2087 #endif
2088 }
2089
2090 /* Assemble DECL given that it belongs in SECTION_NOSWITCH section SECT.
2091 NAME is the name of DECL's SYMBOL_REF. */
2092
2093 static void
assemble_noswitch_variable(tree decl,const char * name,section * sect,unsigned int align)2094 assemble_noswitch_variable (tree decl, const char *name, section *sect,
2095 unsigned int align)
2096 {
2097 unsigned HOST_WIDE_INT size, rounded;
2098
2099 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2100 rounded = size;
2101
2102 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_protect_global (decl))
2103 size += asan_red_zone_size (size);
2104
2105 /* Don't allocate zero bytes of common,
2106 since that means "undefined external" in the linker. */
2107 if (size == 0)
2108 rounded = 1;
2109
2110 /* Round size up to multiple of BIGGEST_ALIGNMENT bits
2111 so that each uninitialized object starts on such a boundary. */
2112 rounded += (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1;
2113 rounded = (rounded / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2114 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
2115
2116 if (!sect->noswitch.callback (decl, name, size, rounded)
2117 && (unsigned HOST_WIDE_INT) (align / BITS_PER_UNIT) > rounded)
2118 error ("requested alignment for %q+D is greater than "
2119 "implemented alignment of %wu", decl, rounded);
2120 }
2121
2122 /* A subroutine of assemble_variable. Output the label and contents of
2123 DECL, whose address is a SYMBOL_REF with name NAME. DONT_OUTPUT_DATA
2124 is as for assemble_variable. */
2125
2126 static void
assemble_variable_contents(tree decl,const char * name,bool dont_output_data,bool merge_strings)2127 assemble_variable_contents (tree decl, const char *name,
2128 bool dont_output_data, bool merge_strings)
2129 {
2130 /* Do any machine/system dependent processing of the object. */
2131 #ifdef ASM_DECLARE_OBJECT_NAME
2132 last_assemble_variable_decl = decl;
2133 ASM_DECLARE_OBJECT_NAME (asm_out_file, name, decl);
2134 #else
2135 /* Standard thing is just output label for the object. */
2136 ASM_OUTPUT_LABEL (asm_out_file, name);
2137 #endif /* ASM_DECLARE_OBJECT_NAME */
2138
2139 if (!dont_output_data)
2140 {
2141 /* Caller is supposed to use varpool_get_constructor when it wants
2142 to output the body. */
2143 gcc_assert (!in_lto_p || DECL_INITIAL (decl) != error_mark_node);
2144 if (DECL_INITIAL (decl)
2145 && DECL_INITIAL (decl) != error_mark_node
2146 && !initializer_zerop (DECL_INITIAL (decl)))
2147 /* Output the actual data. */
2148 output_constant (DECL_INITIAL (decl),
2149 tree_to_uhwi (DECL_SIZE_UNIT (decl)),
2150 get_variable_align (decl),
2151 false, merge_strings);
2152 else
2153 /* Leave space for it. */
2154 assemble_zeros (tree_to_uhwi (DECL_SIZE_UNIT (decl)));
2155 targetm.asm_out.decl_end ();
2156 }
2157 }
2158
2159 /* Write out assembly for the variable DECL, which is not defined in
2160 the current translation unit. */
2161 void
assemble_undefined_decl(tree decl)2162 assemble_undefined_decl (tree decl)
2163 {
2164 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2165 targetm.asm_out.assemble_undefined_decl (asm_out_file, name, decl);
2166 }
2167
2168 /* Assemble everything that is needed for a variable or function declaration.
2169 Not used for automatic variables, and not used for function definitions.
2170 Should not be called for variables of incomplete structure type.
2171
2172 TOP_LEVEL is nonzero if this variable has file scope.
2173 AT_END is nonzero if this is the special handling, at end of compilation,
2174 to define things that have had only tentative definitions.
2175 DONT_OUTPUT_DATA if nonzero means don't actually output the
2176 initial value (that will be done by the caller). */
2177
2178 void
assemble_variable(tree decl,int top_level ATTRIBUTE_UNUSED,int at_end ATTRIBUTE_UNUSED,int dont_output_data)2179 assemble_variable (tree decl, int top_level ATTRIBUTE_UNUSED,
2180 int at_end ATTRIBUTE_UNUSED, int dont_output_data)
2181 {
2182 const char *name;
2183 rtx decl_rtl, symbol;
2184 section *sect;
2185 unsigned int align;
2186 bool asan_protected = false;
2187
2188 /* This function is supposed to handle VARIABLES. Ensure we have one. */
2189 gcc_assert (VAR_P (decl));
2190
2191 /* Emulated TLS had better not get this far. */
2192 gcc_checking_assert (targetm.have_tls || !DECL_THREAD_LOCAL_P (decl));
2193
2194 last_assemble_variable_decl = 0;
2195
2196 /* Normally no need to say anything here for external references,
2197 since assemble_external is called by the language-specific code
2198 when a declaration is first seen. */
2199
2200 if (DECL_EXTERNAL (decl))
2201 return;
2202
2203 /* Do nothing for global register variables. */
2204 if (DECL_RTL_SET_P (decl) && REG_P (DECL_RTL (decl)))
2205 {
2206 TREE_ASM_WRITTEN (decl) = 1;
2207 return;
2208 }
2209
2210 /* If type was incomplete when the variable was declared,
2211 see if it is complete now. */
2212
2213 if (DECL_SIZE (decl) == 0)
2214 layout_decl (decl, 0);
2215
2216 /* Still incomplete => don't allocate it; treat the tentative defn
2217 (which is what it must have been) as an `extern' reference. */
2218
2219 if (!dont_output_data && DECL_SIZE (decl) == 0)
2220 {
2221 error ("storage size of %q+D isn%'t known", decl);
2222 TREE_ASM_WRITTEN (decl) = 1;
2223 return;
2224 }
2225
2226 /* The first declaration of a variable that comes through this function
2227 decides whether it is global (in C, has external linkage)
2228 or local (in C, has internal linkage). So do nothing more
2229 if this function has already run. */
2230
2231 if (TREE_ASM_WRITTEN (decl))
2232 return;
2233
2234 /* Make sure targetm.encode_section_info is invoked before we set
2235 ASM_WRITTEN. */
2236 decl_rtl = DECL_RTL (decl);
2237
2238 TREE_ASM_WRITTEN (decl) = 1;
2239
2240 /* Do no output if -fsyntax-only. */
2241 if (flag_syntax_only)
2242 return;
2243
2244 if (! dont_output_data
2245 && ! valid_constant_size_p (DECL_SIZE_UNIT (decl)))
2246 {
2247 error ("size of variable %q+D is too large", decl);
2248 return;
2249 }
2250
2251 gcc_assert (MEM_P (decl_rtl));
2252 gcc_assert (GET_CODE (XEXP (decl_rtl, 0)) == SYMBOL_REF);
2253 symbol = XEXP (decl_rtl, 0);
2254
2255 /* If this symbol belongs to the tree constant pool, output the constant
2256 if it hasn't already been written. */
2257 if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
2258 {
2259 tree decl = SYMBOL_REF_DECL (symbol);
2260 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl)))
2261 output_constant_def_contents (symbol);
2262 return;
2263 }
2264
2265 app_disable ();
2266
2267 name = XSTR (symbol, 0);
2268 if (TREE_PUBLIC (decl) && DECL_NAME (decl))
2269 notice_global_symbol (decl);
2270
2271 /* Compute the alignment of this data. */
2272
2273 align_variable (decl, dont_output_data);
2274
2275 if ((flag_sanitize & SANITIZE_ADDRESS)
2276 && asan_protect_global (decl))
2277 {
2278 asan_protected = true;
2279 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
2280 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
2281 }
2282
2283 set_mem_align (decl_rtl, DECL_ALIGN (decl));
2284
2285 align = get_variable_align (decl);
2286
2287 if (TREE_PUBLIC (decl))
2288 maybe_assemble_visibility (decl);
2289
2290 if (DECL_PRESERVE_P (decl))
2291 targetm.asm_out.mark_decl_preserved (name);
2292
2293 /* First make the assembler name(s) global if appropriate. */
2294 sect = get_variable_section (decl, false);
2295 if (TREE_PUBLIC (decl)
2296 && (sect->common.flags & SECTION_COMMON) == 0)
2297 globalize_decl (decl);
2298
2299 /* Output any data that we will need to use the address of. */
2300 if (DECL_INITIAL (decl) && DECL_INITIAL (decl) != error_mark_node)
2301 output_addressed_constants (DECL_INITIAL (decl));
2302
2303 /* dbxout.c needs to know this. */
2304 if (sect && (sect->common.flags & SECTION_CODE) != 0)
2305 DECL_IN_TEXT_SECTION (decl) = 1;
2306
2307 /* If the decl is part of an object_block, make sure that the decl
2308 has been positioned within its block, but do not write out its
2309 definition yet. output_object_blocks will do that later. */
2310 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol))
2311 {
2312 gcc_assert (!dont_output_data);
2313 place_block_symbol (symbol);
2314 }
2315 else if (SECTION_STYLE (sect) == SECTION_NOSWITCH)
2316 assemble_noswitch_variable (decl, name, sect, align);
2317 else
2318 {
2319 /* Special-case handling of vtv comdat sections. */
2320 if (sect->named.name
2321 && (strcmp (sect->named.name, ".vtable_map_vars") == 0))
2322 handle_vtv_comdat_section (sect, decl);
2323 else
2324 switch_to_section (sect);
2325 if (align > BITS_PER_UNIT)
2326 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
2327 assemble_variable_contents (decl, name, dont_output_data,
2328 (sect->common.flags & SECTION_MERGE)
2329 && (sect->common.flags & SECTION_STRINGS));
2330 if (asan_protected)
2331 {
2332 unsigned HOST_WIDE_INT int size
2333 = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2334 assemble_zeros (asan_red_zone_size (size));
2335 }
2336 }
2337 }
2338
2339
2340 /* Given a function declaration (FN_DECL), this function assembles the
2341 function into the .preinit_array section. */
2342
2343 void
assemble_vtv_preinit_initializer(tree fn_decl)2344 assemble_vtv_preinit_initializer (tree fn_decl)
2345 {
2346 section *sect;
2347 unsigned flags = SECTION_WRITE;
2348 rtx symbol = XEXP (DECL_RTL (fn_decl), 0);
2349
2350 flags |= SECTION_NOTYPE;
2351 sect = get_section (".preinit_array", flags, fn_decl);
2352 switch_to_section (sect);
2353 assemble_addr_to_section (symbol, sect);
2354 }
2355
2356 /* Return 1 if type TYPE contains any pointers. */
2357
2358 static int
contains_pointers_p(tree type)2359 contains_pointers_p (tree type)
2360 {
2361 switch (TREE_CODE (type))
2362 {
2363 case POINTER_TYPE:
2364 case REFERENCE_TYPE:
2365 /* I'm not sure whether OFFSET_TYPE needs this treatment,
2366 so I'll play safe and return 1. */
2367 case OFFSET_TYPE:
2368 return 1;
2369
2370 case RECORD_TYPE:
2371 case UNION_TYPE:
2372 case QUAL_UNION_TYPE:
2373 {
2374 tree fields;
2375 /* For a type that has fields, see if the fields have pointers. */
2376 for (fields = TYPE_FIELDS (type); fields; fields = DECL_CHAIN (fields))
2377 if (TREE_CODE (fields) == FIELD_DECL
2378 && contains_pointers_p (TREE_TYPE (fields)))
2379 return 1;
2380 return 0;
2381 }
2382
2383 case ARRAY_TYPE:
2384 /* An array type contains pointers if its element type does. */
2385 return contains_pointers_p (TREE_TYPE (type));
2386
2387 default:
2388 return 0;
2389 }
2390 }
2391
2392 /* We delay assemble_external processing until
2393 the compilation unit is finalized. This is the best we can do for
2394 right now (i.e. stage 3 of GCC 4.0) - the right thing is to delay
2395 it all the way to final. See PR 17982 for further discussion. */
2396 static GTY(()) tree pending_assemble_externals;
2397
2398 #ifdef ASM_OUTPUT_EXTERNAL
2399 /* Some targets delay some output to final using TARGET_ASM_FILE_END.
2400 As a result, assemble_external can be called after the list of externals
2401 is processed and the pointer set destroyed. */
2402 static bool pending_assemble_externals_processed;
2403
2404 /* Avoid O(external_decls**2) lookups in the pending_assemble_externals
2405 TREE_LIST in assemble_external. */
2406 static hash_set<tree> *pending_assemble_externals_set;
2407
2408 /* True if DECL is a function decl for which no out-of-line copy exists.
2409 It is assumed that DECL's assembler name has been set. */
2410
2411 static bool
incorporeal_function_p(tree decl)2412 incorporeal_function_p (tree decl)
2413 {
2414 if (TREE_CODE (decl) == FUNCTION_DECL && fndecl_built_in_p (decl))
2415 {
2416 const char *name;
2417
2418 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2419 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2420 return true;
2421
2422 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2423 /* Atomic or sync builtins which have survived this far will be
2424 resolved externally and therefore are not incorporeal. */
2425 if (strncmp (name, "__builtin_", 10) == 0)
2426 return true;
2427 }
2428 return false;
2429 }
2430
2431 /* Actually do the tests to determine if this is necessary, and invoke
2432 ASM_OUTPUT_EXTERNAL. */
2433 static void
assemble_external_real(tree decl)2434 assemble_external_real (tree decl)
2435 {
2436 rtx rtl = DECL_RTL (decl);
2437
2438 if (MEM_P (rtl) && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF
2439 && !SYMBOL_REF_USED (XEXP (rtl, 0))
2440 && !incorporeal_function_p (decl))
2441 {
2442 /* Some systems do require some output. */
2443 SYMBOL_REF_USED (XEXP (rtl, 0)) = 1;
2444 ASM_OUTPUT_EXTERNAL (asm_out_file, decl, XSTR (XEXP (rtl, 0), 0));
2445 }
2446 }
2447 #endif
2448
2449 void
process_pending_assemble_externals(void)2450 process_pending_assemble_externals (void)
2451 {
2452 #ifdef ASM_OUTPUT_EXTERNAL
2453 tree list;
2454 for (list = pending_assemble_externals; list; list = TREE_CHAIN (list))
2455 assemble_external_real (TREE_VALUE (list));
2456
2457 pending_assemble_externals = 0;
2458 pending_assemble_externals_processed = true;
2459 delete pending_assemble_externals_set;
2460 #endif
2461 }
2462
2463 /* This TREE_LIST contains any weak symbol declarations waiting
2464 to be emitted. */
2465 static GTY(()) tree weak_decls;
2466
2467 /* Output something to declare an external symbol to the assembler,
2468 and qualifiers such as weakness. (Most assemblers don't need
2469 extern declaration, so we normally output nothing.) Do nothing if
2470 DECL is not external. */
2471
2472 void
assemble_external(tree decl ATTRIBUTE_UNUSED)2473 assemble_external (tree decl ATTRIBUTE_UNUSED)
2474 {
2475 /* Make sure that the ASM_OUT_FILE is open.
2476 If it's not, we should not be calling this function. */
2477 gcc_assert (asm_out_file);
2478
2479 /* In a perfect world, the following condition would be true.
2480 Sadly, the Go front end emit assembly *from the front end*,
2481 bypassing the call graph. See PR52739. Fix before GCC 4.8. */
2482 #if 0
2483 /* This function should only be called if we are expanding, or have
2484 expanded, to RTL.
2485 Ideally, only final.c would be calling this function, but it is
2486 not clear whether that would break things somehow. See PR 17982
2487 for further discussion. */
2488 gcc_assert (state == EXPANSION
2489 || state == FINISHED);
2490 #endif
2491
2492 if (!DECL_P (decl) || !DECL_EXTERNAL (decl) || !TREE_PUBLIC (decl))
2493 return;
2494
2495 /* We want to output annotation for weak and external symbols at
2496 very last to check if they are references or not. */
2497
2498 if (TARGET_SUPPORTS_WEAK
2499 && DECL_WEAK (decl)
2500 /* TREE_STATIC is a weird and abused creature which is not
2501 generally the right test for whether an entity has been
2502 locally emitted, inlined or otherwise not-really-extern, but
2503 for declarations that can be weak, it happens to be
2504 match. */
2505 && !TREE_STATIC (decl)
2506 && lookup_attribute ("weak", DECL_ATTRIBUTES (decl))
2507 && value_member (decl, weak_decls) == NULL_TREE)
2508 weak_decls = tree_cons (NULL, decl, weak_decls);
2509
2510 #ifdef ASM_OUTPUT_EXTERNAL
2511 if (pending_assemble_externals_processed)
2512 {
2513 assemble_external_real (decl);
2514 return;
2515 }
2516
2517 if (! pending_assemble_externals_set->add (decl))
2518 pending_assemble_externals = tree_cons (NULL, decl,
2519 pending_assemble_externals);
2520 #endif
2521 }
2522
2523 /* Similar, for calling a library function FUN. */
2524
2525 void
assemble_external_libcall(rtx fun)2526 assemble_external_libcall (rtx fun)
2527 {
2528 /* Declare library function name external when first used, if nec. */
2529 if (! SYMBOL_REF_USED (fun))
2530 {
2531 SYMBOL_REF_USED (fun) = 1;
2532 targetm.asm_out.external_libcall (fun);
2533 }
2534 }
2535
2536 /* Assemble a label named NAME. */
2537
2538 void
assemble_label(FILE * file,const char * name)2539 assemble_label (FILE *file, const char *name)
2540 {
2541 ASM_OUTPUT_LABEL (file, name);
2542 }
2543
2544 /* Set the symbol_referenced flag for ID. */
2545 void
mark_referenced(tree id)2546 mark_referenced (tree id)
2547 {
2548 TREE_SYMBOL_REFERENCED (id) = 1;
2549 }
2550
2551 /* Set the symbol_referenced flag for DECL and notify callgraph. */
2552 void
mark_decl_referenced(tree decl)2553 mark_decl_referenced (tree decl)
2554 {
2555 if (TREE_CODE (decl) == FUNCTION_DECL)
2556 {
2557 /* Extern inline functions don't become needed when referenced.
2558 If we know a method will be emitted in other TU and no new
2559 functions can be marked reachable, just use the external
2560 definition. */
2561 struct cgraph_node *node = cgraph_node::get_create (decl);
2562 if (!DECL_EXTERNAL (decl)
2563 && !node->definition)
2564 node->mark_force_output ();
2565 }
2566 else if (VAR_P (decl))
2567 {
2568 varpool_node *node = varpool_node::get_create (decl);
2569 /* C++ frontend use mark_decl_references to force COMDAT variables
2570 to be output that might appear dead otherwise. */
2571 node->force_output = true;
2572 }
2573 /* else do nothing - we can get various sorts of CST nodes here,
2574 which do not need to be marked. */
2575 }
2576
2577
2578 /* Output to FILE (an assembly file) a reference to NAME. If NAME
2579 starts with a *, the rest of NAME is output verbatim. Otherwise
2580 NAME is transformed in a target-specific way (usually by the
2581 addition of an underscore). */
2582
2583 void
assemble_name_raw(FILE * file,const char * name)2584 assemble_name_raw (FILE *file, const char *name)
2585 {
2586 if (name[0] == '*')
2587 fputs (&name[1], file);
2588 else
2589 ASM_OUTPUT_LABELREF (file, name);
2590 }
2591
2592 /* Return NAME that should actually be emitted, looking through
2593 transparent aliases. If NAME refers to an entity that is also
2594 represented as a tree (like a function or variable), mark the entity
2595 as referenced. */
2596 const char *
assemble_name_resolve(const char * name)2597 assemble_name_resolve (const char *name)
2598 {
2599 const char *real_name = targetm.strip_name_encoding (name);
2600 tree id = maybe_get_identifier (real_name);
2601
2602 if (id)
2603 {
2604 tree id_orig = id;
2605
2606 mark_referenced (id);
2607 ultimate_transparent_alias_target (&id);
2608 if (id != id_orig)
2609 name = IDENTIFIER_POINTER (id);
2610 gcc_assert (! TREE_CHAIN (id));
2611 }
2612
2613 return name;
2614 }
2615
2616 /* Like assemble_name_raw, but should be used when NAME might refer to
2617 an entity that is also represented as a tree (like a function or
2618 variable). If NAME does refer to such an entity, that entity will
2619 be marked as referenced. */
2620
2621 void
assemble_name(FILE * file,const char * name)2622 assemble_name (FILE *file, const char *name)
2623 {
2624 assemble_name_raw (file, assemble_name_resolve (name));
2625 }
2626
2627 /* Allocate SIZE bytes writable static space with a gensym name
2628 and return an RTX to refer to its address. */
2629
2630 rtx
assemble_static_space(unsigned HOST_WIDE_INT size)2631 assemble_static_space (unsigned HOST_WIDE_INT size)
2632 {
2633 char name[17];
2634 const char *namestring;
2635 rtx x;
2636
2637 ASM_GENERATE_INTERNAL_LABEL (name, "LF", const_labelno);
2638 ++const_labelno;
2639 namestring = ggc_strdup (name);
2640
2641 x = gen_rtx_SYMBOL_REF (Pmode, namestring);
2642 SYMBOL_REF_FLAGS (x) = SYMBOL_FLAG_LOCAL;
2643
2644 #ifdef ASM_OUTPUT_ALIGNED_DECL_LOCAL
2645 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, NULL_TREE, name, size,
2646 BIGGEST_ALIGNMENT);
2647 #else
2648 #ifdef ASM_OUTPUT_ALIGNED_LOCAL
2649 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, BIGGEST_ALIGNMENT);
2650 #else
2651 {
2652 /* Round size up to multiple of BIGGEST_ALIGNMENT bits
2653 so that each uninitialized object starts on such a boundary. */
2654 /* Variable `rounded' might or might not be used in ASM_OUTPUT_LOCAL. */
2655 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED
2656 = ((size + (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1)
2657 / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2658 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
2659 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded);
2660 }
2661 #endif
2662 #endif
2663 return x;
2664 }
2665
2666 /* Assemble the static constant template for function entry trampolines.
2667 This is done at most once per compilation.
2668 Returns an RTX for the address of the template. */
2669
2670 static GTY(()) rtx initial_trampoline;
2671
2672 rtx
assemble_trampoline_template(void)2673 assemble_trampoline_template (void)
2674 {
2675 char label[256];
2676 const char *name;
2677 int align;
2678 rtx symbol;
2679
2680 gcc_assert (targetm.asm_out.trampoline_template != NULL);
2681
2682 if (initial_trampoline)
2683 return initial_trampoline;
2684
2685 /* By default, put trampoline templates in read-only data section. */
2686
2687 #ifdef TRAMPOLINE_SECTION
2688 switch_to_section (TRAMPOLINE_SECTION);
2689 #else
2690 switch_to_section (readonly_data_section);
2691 #endif
2692
2693 /* Write the assembler code to define one. */
2694 align = floor_log2 (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
2695 if (align > 0)
2696 ASM_OUTPUT_ALIGN (asm_out_file, align);
2697
2698 targetm.asm_out.internal_label (asm_out_file, "LTRAMP", 0);
2699 targetm.asm_out.trampoline_template (asm_out_file);
2700
2701 /* Record the rtl to refer to it. */
2702 ASM_GENERATE_INTERNAL_LABEL (label, "LTRAMP", 0);
2703 name = ggc_strdup (label);
2704 symbol = gen_rtx_SYMBOL_REF (Pmode, name);
2705 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_LOCAL;
2706
2707 initial_trampoline = gen_const_mem (BLKmode, symbol);
2708 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
2709 set_mem_size (initial_trampoline, TRAMPOLINE_SIZE);
2710
2711 return initial_trampoline;
2712 }
2713
2714 /* A and B are either alignments or offsets. Return the minimum alignment
2715 that may be assumed after adding the two together. */
2716
2717 static inline unsigned
min_align(unsigned int a,unsigned int b)2718 min_align (unsigned int a, unsigned int b)
2719 {
2720 return least_bit_hwi (a | b);
2721 }
2722
2723 /* Return the assembler directive for creating a given kind of integer
2724 object. SIZE is the number of bytes in the object and ALIGNED_P
2725 indicates whether it is known to be aligned. Return NULL if the
2726 assembly dialect has no such directive.
2727
2728 The returned string should be printed at the start of a new line and
2729 be followed immediately by the object's initial value. */
2730
2731 const char *
integer_asm_op(int size,int aligned_p)2732 integer_asm_op (int size, int aligned_p)
2733 {
2734 struct asm_int_op *ops;
2735
2736 if (aligned_p)
2737 ops = &targetm.asm_out.aligned_op;
2738 else
2739 ops = &targetm.asm_out.unaligned_op;
2740
2741 switch (size)
2742 {
2743 case 1:
2744 return targetm.asm_out.byte_op;
2745 case 2:
2746 return ops->hi;
2747 case 3:
2748 return ops->psi;
2749 case 4:
2750 return ops->si;
2751 case 5:
2752 case 6:
2753 case 7:
2754 return ops->pdi;
2755 case 8:
2756 return ops->di;
2757 case 9:
2758 case 10:
2759 case 11:
2760 case 12:
2761 case 13:
2762 case 14:
2763 case 15:
2764 return ops->pti;
2765 case 16:
2766 return ops->ti;
2767 default:
2768 return NULL;
2769 }
2770 }
2771
2772 /* Use directive OP to assemble an integer object X. Print OP at the
2773 start of the line, followed immediately by the value of X. */
2774
2775 void
assemble_integer_with_op(const char * op,rtx x)2776 assemble_integer_with_op (const char *op, rtx x)
2777 {
2778 fputs (op, asm_out_file);
2779 output_addr_const (asm_out_file, x);
2780 fputc ('\n', asm_out_file);
2781 }
2782
2783 /* The default implementation of the asm_out.integer target hook. */
2784
2785 bool
default_assemble_integer(rtx x ATTRIBUTE_UNUSED,unsigned int size ATTRIBUTE_UNUSED,int aligned_p ATTRIBUTE_UNUSED)2786 default_assemble_integer (rtx x ATTRIBUTE_UNUSED,
2787 unsigned int size ATTRIBUTE_UNUSED,
2788 int aligned_p ATTRIBUTE_UNUSED)
2789 {
2790 const char *op = integer_asm_op (size, aligned_p);
2791 /* Avoid GAS bugs for large values. Specifically negative values whose
2792 absolute value fits in a bfd_vma, but not in a bfd_signed_vma. */
2793 if (size > UNITS_PER_WORD && size > POINTER_SIZE_UNITS)
2794 return false;
2795 return op && (assemble_integer_with_op (op, x), true);
2796 }
2797
2798 /* Assemble the integer constant X into an object of SIZE bytes. ALIGN is
2799 the alignment of the integer in bits. Return 1 if we were able to output
2800 the constant, otherwise 0. We must be able to output the constant,
2801 if FORCE is nonzero. */
2802
2803 bool
assemble_integer(rtx x,unsigned int size,unsigned int align,int force)2804 assemble_integer (rtx x, unsigned int size, unsigned int align, int force)
2805 {
2806 int aligned_p;
2807
2808 aligned_p = (align >= MIN (size * BITS_PER_UNIT, BIGGEST_ALIGNMENT));
2809
2810 /* See if the target hook can handle this kind of object. */
2811 if (targetm.asm_out.integer (x, size, aligned_p))
2812 return true;
2813
2814 /* If the object is a multi-byte one, try splitting it up. Split
2815 it into words it if is multi-word, otherwise split it into bytes. */
2816 if (size > 1)
2817 {
2818 machine_mode omode, imode;
2819 unsigned int subalign;
2820 unsigned int subsize, i;
2821 enum mode_class mclass;
2822
2823 subsize = size > UNITS_PER_WORD? UNITS_PER_WORD : 1;
2824 subalign = MIN (align, subsize * BITS_PER_UNIT);
2825 if (GET_CODE (x) == CONST_FIXED)
2826 mclass = GET_MODE_CLASS (GET_MODE (x));
2827 else
2828 mclass = MODE_INT;
2829
2830 omode = mode_for_size (subsize * BITS_PER_UNIT, mclass, 0).require ();
2831 imode = mode_for_size (size * BITS_PER_UNIT, mclass, 0).require ();
2832
2833 for (i = 0; i < size; i += subsize)
2834 {
2835 rtx partial = simplify_subreg (omode, x, imode, i);
2836 if (!partial || !assemble_integer (partial, subsize, subalign, 0))
2837 break;
2838 }
2839 if (i == size)
2840 return true;
2841
2842 /* If we've printed some of it, but not all of it, there's no going
2843 back now. */
2844 gcc_assert (!i);
2845 }
2846
2847 gcc_assert (!force);
2848
2849 return false;
2850 }
2851
2852 /* Assemble the floating-point constant D into an object of size MODE. ALIGN
2853 is the alignment of the constant in bits. If REVERSE is true, D is output
2854 in reverse storage order. */
2855
2856 void
assemble_real(REAL_VALUE_TYPE d,scalar_float_mode mode,unsigned int align,bool reverse)2857 assemble_real (REAL_VALUE_TYPE d, scalar_float_mode mode, unsigned int align,
2858 bool reverse)
2859 {
2860 long data[4] = {0, 0, 0, 0};
2861 int bitsize, nelts, nunits, units_per;
2862 rtx elt;
2863
2864 /* This is hairy. We have a quantity of known size. real_to_target
2865 will put it into an array of *host* longs, 32 bits per element
2866 (even if long is more than 32 bits). We need to determine the
2867 number of array elements that are occupied (nelts) and the number
2868 of *target* min-addressable units that will be occupied in the
2869 object file (nunits). We cannot assume that 32 divides the
2870 mode's bitsize (size * BITS_PER_UNIT) evenly.
2871
2872 size * BITS_PER_UNIT is used here to make sure that padding bits
2873 (which might appear at either end of the value; real_to_target
2874 will include the padding bits in its output array) are included. */
2875
2876 nunits = GET_MODE_SIZE (mode);
2877 bitsize = nunits * BITS_PER_UNIT;
2878 nelts = CEIL (bitsize, 32);
2879 units_per = 32 / BITS_PER_UNIT;
2880
2881 real_to_target (data, &d, mode);
2882
2883 /* Put out the first word with the specified alignment. */
2884 unsigned int chunk_nunits = MIN (nunits, units_per);
2885 if (reverse)
2886 elt = flip_storage_order (SImode, gen_int_mode (data[nelts - 1], SImode));
2887 else
2888 elt = GEN_INT (sext_hwi (data[0], chunk_nunits * BITS_PER_UNIT));
2889 assemble_integer (elt, chunk_nunits, align, 1);
2890 nunits -= chunk_nunits;
2891
2892 /* Subsequent words need only 32-bit alignment. */
2893 align = min_align (align, 32);
2894
2895 for (int i = 1; i < nelts; i++)
2896 {
2897 chunk_nunits = MIN (nunits, units_per);
2898 if (reverse)
2899 elt = flip_storage_order (SImode,
2900 gen_int_mode (data[nelts - 1 - i], SImode));
2901 else
2902 elt = GEN_INT (sext_hwi (data[i], chunk_nunits * BITS_PER_UNIT));
2903 assemble_integer (elt, chunk_nunits, align, 1);
2904 nunits -= chunk_nunits;
2905 }
2906 }
2907
2908 /* Given an expression EXP with a constant value,
2909 reduce it to the sum of an assembler symbol and an integer.
2910 Store them both in the structure *VALUE.
2911 EXP must be reducible. */
2912
2913 class addr_const {
2914 public:
2915 rtx base;
2916 poly_int64 offset;
2917 };
2918
2919 static void
decode_addr_const(tree exp,class addr_const * value)2920 decode_addr_const (tree exp, class addr_const *value)
2921 {
2922 tree target = TREE_OPERAND (exp, 0);
2923 poly_int64 offset = 0;
2924 rtx x;
2925
2926 while (1)
2927 {
2928 poly_int64 bytepos;
2929 if (TREE_CODE (target) == COMPONENT_REF
2930 && poly_int_tree_p (byte_position (TREE_OPERAND (target, 1)),
2931 &bytepos))
2932 {
2933 offset += bytepos;
2934 target = TREE_OPERAND (target, 0);
2935 }
2936 else if (TREE_CODE (target) == ARRAY_REF
2937 || TREE_CODE (target) == ARRAY_RANGE_REF)
2938 {
2939 /* Truncate big offset. */
2940 offset
2941 += (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (target)))
2942 * wi::to_poly_widest (TREE_OPERAND (target, 1)).force_shwi ());
2943 target = TREE_OPERAND (target, 0);
2944 }
2945 else if (TREE_CODE (target) == MEM_REF
2946 && TREE_CODE (TREE_OPERAND (target, 0)) == ADDR_EXPR)
2947 {
2948 offset += mem_ref_offset (target).force_shwi ();
2949 target = TREE_OPERAND (TREE_OPERAND (target, 0), 0);
2950 }
2951 else if (TREE_CODE (target) == INDIRECT_REF
2952 && TREE_CODE (TREE_OPERAND (target, 0)) == NOP_EXPR
2953 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (target, 0), 0))
2954 == ADDR_EXPR)
2955 target = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (target, 0), 0), 0);
2956 else
2957 break;
2958 }
2959
2960 switch (TREE_CODE (target))
2961 {
2962 case VAR_DECL:
2963 case FUNCTION_DECL:
2964 x = DECL_RTL (target);
2965 break;
2966
2967 case LABEL_DECL:
2968 x = gen_rtx_MEM (FUNCTION_MODE,
2969 gen_rtx_LABEL_REF (Pmode, force_label_rtx (target)));
2970 break;
2971
2972 case REAL_CST:
2973 case FIXED_CST:
2974 case STRING_CST:
2975 case COMPLEX_CST:
2976 case CONSTRUCTOR:
2977 case INTEGER_CST:
2978 x = lookup_constant_def (target);
2979 /* Should have been added by output_addressed_constants. */
2980 gcc_assert (x);
2981 break;
2982
2983 case INDIRECT_REF:
2984 /* This deals with absolute addresses. */
2985 offset += tree_to_shwi (TREE_OPERAND (target, 0));
2986 x = gen_rtx_MEM (QImode,
2987 gen_rtx_SYMBOL_REF (Pmode, "origin of addresses"));
2988 break;
2989
2990 case COMPOUND_LITERAL_EXPR:
2991 gcc_assert (COMPOUND_LITERAL_EXPR_DECL (target));
2992 x = DECL_RTL (COMPOUND_LITERAL_EXPR_DECL (target));
2993 break;
2994
2995 default:
2996 gcc_unreachable ();
2997 }
2998
2999 gcc_assert (MEM_P (x));
3000 x = XEXP (x, 0);
3001
3002 value->base = x;
3003 value->offset = offset;
3004 }
3005
3006 static GTY(()) hash_table<tree_descriptor_hasher> *const_desc_htab;
3007
3008 static void maybe_output_constant_def_contents (struct constant_descriptor_tree *, int);
3009
3010 /* Constant pool accessor function. */
3011
3012 hash_table<tree_descriptor_hasher> *
constant_pool_htab(void)3013 constant_pool_htab (void)
3014 {
3015 return const_desc_htab;
3016 }
3017
3018 /* Compute a hash code for a constant expression. */
3019
3020 hashval_t
hash(constant_descriptor_tree * ptr)3021 tree_descriptor_hasher::hash (constant_descriptor_tree *ptr)
3022 {
3023 return ptr->hash;
3024 }
3025
3026 static hashval_t
const_hash_1(const tree exp)3027 const_hash_1 (const tree exp)
3028 {
3029 const char *p;
3030 hashval_t hi;
3031 int len, i;
3032 enum tree_code code = TREE_CODE (exp);
3033
3034 /* Either set P and LEN to the address and len of something to hash and
3035 exit the switch or return a value. */
3036
3037 switch (code)
3038 {
3039 case INTEGER_CST:
3040 p = (char *) &TREE_INT_CST_ELT (exp, 0);
3041 len = TREE_INT_CST_NUNITS (exp) * sizeof (HOST_WIDE_INT);
3042 break;
3043
3044 case REAL_CST:
3045 return real_hash (TREE_REAL_CST_PTR (exp));
3046
3047 case FIXED_CST:
3048 return fixed_hash (TREE_FIXED_CST_PTR (exp));
3049
3050 case STRING_CST:
3051 p = TREE_STRING_POINTER (exp);
3052 len = TREE_STRING_LENGTH (exp);
3053 break;
3054
3055 case COMPLEX_CST:
3056 return (const_hash_1 (TREE_REALPART (exp)) * 5
3057 + const_hash_1 (TREE_IMAGPART (exp)));
3058
3059 case VECTOR_CST:
3060 {
3061 hi = 7 + VECTOR_CST_NPATTERNS (exp);
3062 hi = hi * 563 + VECTOR_CST_NELTS_PER_PATTERN (exp);
3063 unsigned int count = vector_cst_encoded_nelts (exp);
3064 for (unsigned int i = 0; i < count; ++i)
3065 hi = hi * 563 + const_hash_1 (VECTOR_CST_ENCODED_ELT (exp, i));
3066 return hi;
3067 }
3068
3069 case CONSTRUCTOR:
3070 {
3071 unsigned HOST_WIDE_INT idx;
3072 tree value;
3073
3074 hi = 5 + int_size_in_bytes (TREE_TYPE (exp));
3075
3076 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
3077 if (value)
3078 hi = hi * 603 + const_hash_1 (value);
3079
3080 return hi;
3081 }
3082
3083 case ADDR_EXPR:
3084 if (CONSTANT_CLASS_P (TREE_OPERAND (exp, 0)))
3085 return const_hash_1 (TREE_OPERAND (exp, 0));
3086
3087 /* Fallthru. */
3088 case FDESC_EXPR:
3089 {
3090 class addr_const value;
3091
3092 decode_addr_const (exp, &value);
3093 switch (GET_CODE (value.base))
3094 {
3095 case SYMBOL_REF:
3096 /* Don't hash the address of the SYMBOL_REF;
3097 only use the offset and the symbol name. */
3098 hi = value.offset.coeffs[0];
3099 p = XSTR (value.base, 0);
3100 for (i = 0; p[i] != 0; i++)
3101 hi = ((hi * 613) + (unsigned) (p[i]));
3102 break;
3103
3104 case LABEL_REF:
3105 hi = (value.offset.coeffs[0]
3106 + CODE_LABEL_NUMBER (label_ref_label (value.base)) * 13);
3107 break;
3108
3109 default:
3110 gcc_unreachable ();
3111 }
3112 }
3113 return hi;
3114
3115 case PLUS_EXPR:
3116 case POINTER_PLUS_EXPR:
3117 case MINUS_EXPR:
3118 return (const_hash_1 (TREE_OPERAND (exp, 0)) * 9
3119 + const_hash_1 (TREE_OPERAND (exp, 1)));
3120
3121 CASE_CONVERT:
3122 return const_hash_1 (TREE_OPERAND (exp, 0)) * 7 + 2;
3123
3124 default:
3125 /* A language specific constant. Just hash the code. */
3126 return code;
3127 }
3128
3129 /* Compute hashing function. */
3130 hi = len;
3131 for (i = 0; i < len; i++)
3132 hi = ((hi * 613) + (unsigned) (p[i]));
3133
3134 return hi;
3135 }
3136
3137 /* Wrapper of compare_constant, for the htab interface. */
3138 bool
equal(constant_descriptor_tree * c1,constant_descriptor_tree * c2)3139 tree_descriptor_hasher::equal (constant_descriptor_tree *c1,
3140 constant_descriptor_tree *c2)
3141 {
3142 if (c1->hash != c2->hash)
3143 return 0;
3144 return compare_constant (c1->value, c2->value);
3145 }
3146
3147 /* Compare t1 and t2, and return 1 only if they are known to result in
3148 the same bit pattern on output. */
3149
3150 static int
compare_constant(const tree t1,const tree t2)3151 compare_constant (const tree t1, const tree t2)
3152 {
3153 enum tree_code typecode;
3154
3155 if (t1 == NULL_TREE)
3156 return t2 == NULL_TREE;
3157 if (t2 == NULL_TREE)
3158 return 0;
3159
3160 if (TREE_CODE (t1) != TREE_CODE (t2))
3161 return 0;
3162
3163 switch (TREE_CODE (t1))
3164 {
3165 case INTEGER_CST:
3166 /* Integer constants are the same only if the same width of type. */
3167 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3168 return 0;
3169 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
3170 return 0;
3171 return tree_int_cst_equal (t1, t2);
3172
3173 case REAL_CST:
3174 /* Real constants are the same only if the same width of type. In
3175 addition to the same width, we need to check whether the modes are the
3176 same. There might be two floating point modes that are the same size
3177 but have different representations, such as the PowerPC that has 2
3178 different 128-bit floating point types (IBM extended double and IEEE
3179 128-bit floating point). */
3180 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3181 return 0;
3182 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
3183 return 0;
3184 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
3185
3186 case FIXED_CST:
3187 /* Fixed constants are the same only if the same width of type. */
3188 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3189 return 0;
3190
3191 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
3192
3193 case STRING_CST:
3194 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))
3195 || int_size_in_bytes (TREE_TYPE (t1))
3196 != int_size_in_bytes (TREE_TYPE (t2)))
3197 return 0;
3198
3199 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
3200 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
3201 TREE_STRING_LENGTH (t1)));
3202
3203 case COMPLEX_CST:
3204 return (compare_constant (TREE_REALPART (t1), TREE_REALPART (t2))
3205 && compare_constant (TREE_IMAGPART (t1), TREE_IMAGPART (t2)));
3206
3207 case VECTOR_CST:
3208 {
3209 if (VECTOR_CST_NPATTERNS (t1)
3210 != VECTOR_CST_NPATTERNS (t2))
3211 return 0;
3212
3213 if (VECTOR_CST_NELTS_PER_PATTERN (t1)
3214 != VECTOR_CST_NELTS_PER_PATTERN (t2))
3215 return 0;
3216
3217 unsigned int count = vector_cst_encoded_nelts (t1);
3218 for (unsigned int i = 0; i < count; ++i)
3219 if (!compare_constant (VECTOR_CST_ENCODED_ELT (t1, i),
3220 VECTOR_CST_ENCODED_ELT (t2, i)))
3221 return 0;
3222
3223 return 1;
3224 }
3225
3226 case CONSTRUCTOR:
3227 {
3228 vec<constructor_elt, va_gc> *v1, *v2;
3229 unsigned HOST_WIDE_INT idx;
3230
3231 typecode = TREE_CODE (TREE_TYPE (t1));
3232 if (typecode != TREE_CODE (TREE_TYPE (t2)))
3233 return 0;
3234
3235 if (typecode == ARRAY_TYPE)
3236 {
3237 HOST_WIDE_INT size_1 = int_size_in_bytes (TREE_TYPE (t1));
3238 /* For arrays, check that mode, size and storage order match. */
3239 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))
3240 || size_1 == -1
3241 || size_1 != int_size_in_bytes (TREE_TYPE (t2))
3242 || TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t1))
3243 != TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t2)))
3244 return 0;
3245 }
3246 else
3247 {
3248 /* For record and union constructors, require exact type
3249 equality. */
3250 if (TREE_TYPE (t1) != TREE_TYPE (t2))
3251 return 0;
3252 }
3253
3254 v1 = CONSTRUCTOR_ELTS (t1);
3255 v2 = CONSTRUCTOR_ELTS (t2);
3256 if (vec_safe_length (v1) != vec_safe_length (v2))
3257 return 0;
3258
3259 for (idx = 0; idx < vec_safe_length (v1); ++idx)
3260 {
3261 constructor_elt *c1 = &(*v1)[idx];
3262 constructor_elt *c2 = &(*v2)[idx];
3263
3264 /* Check that each value is the same... */
3265 if (!compare_constant (c1->value, c2->value))
3266 return 0;
3267 /* ... and that they apply to the same fields! */
3268 if (typecode == ARRAY_TYPE)
3269 {
3270 if (!compare_constant (c1->index, c2->index))
3271 return 0;
3272 }
3273 else
3274 {
3275 if (c1->index != c2->index)
3276 return 0;
3277 }
3278 }
3279
3280 return 1;
3281 }
3282
3283 case ADDR_EXPR:
3284 case FDESC_EXPR:
3285 {
3286 class addr_const value1, value2;
3287 enum rtx_code code;
3288 int ret;
3289
3290 decode_addr_const (t1, &value1);
3291 decode_addr_const (t2, &value2);
3292
3293 if (maybe_ne (value1.offset, value2.offset))
3294 return 0;
3295
3296 code = GET_CODE (value1.base);
3297 if (code != GET_CODE (value2.base))
3298 return 0;
3299
3300 switch (code)
3301 {
3302 case SYMBOL_REF:
3303 ret = (strcmp (XSTR (value1.base, 0), XSTR (value2.base, 0)) == 0);
3304 break;
3305
3306 case LABEL_REF:
3307 ret = (CODE_LABEL_NUMBER (label_ref_label (value1.base))
3308 == CODE_LABEL_NUMBER (label_ref_label (value2.base)));
3309 break;
3310
3311 default:
3312 gcc_unreachable ();
3313 }
3314 return ret;
3315 }
3316
3317 case PLUS_EXPR:
3318 case POINTER_PLUS_EXPR:
3319 case MINUS_EXPR:
3320 case RANGE_EXPR:
3321 return (compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0))
3322 && compare_constant (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)));
3323
3324 CASE_CONVERT:
3325 case VIEW_CONVERT_EXPR:
3326 return compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
3327
3328 default:
3329 return 0;
3330 }
3331
3332 gcc_unreachable ();
3333 }
3334
3335 /* Return the section into which constant EXP should be placed. */
3336
3337 static section *
get_constant_section(tree exp,unsigned int align)3338 get_constant_section (tree exp, unsigned int align)
3339 {
3340 return targetm.asm_out.select_section (exp,
3341 compute_reloc_for_constant (exp),
3342 align);
3343 }
3344
3345 /* Return the size of constant EXP in bytes. */
3346
3347 static HOST_WIDE_INT
get_constant_size(tree exp)3348 get_constant_size (tree exp)
3349 {
3350 HOST_WIDE_INT size;
3351
3352 size = int_size_in_bytes (TREE_TYPE (exp));
3353 gcc_checking_assert (size >= 0);
3354 gcc_checking_assert (TREE_CODE (exp) != STRING_CST
3355 || size >= TREE_STRING_LENGTH (exp));
3356 return size;
3357 }
3358
3359 /* Subroutine of output_constant_def:
3360 No constant equal to EXP is known to have been output.
3361 Make a constant descriptor to enter EXP in the hash table.
3362 Assign the label number and construct RTL to refer to the
3363 constant's location in memory.
3364 Caller is responsible for updating the hash table. */
3365
3366 static struct constant_descriptor_tree *
build_constant_desc(tree exp)3367 build_constant_desc (tree exp)
3368 {
3369 struct constant_descriptor_tree *desc;
3370 rtx symbol, rtl;
3371 char label[256];
3372 int labelno;
3373 tree decl;
3374
3375 desc = ggc_alloc<constant_descriptor_tree> ();
3376 desc->value = exp;
3377
3378 /* Create a string containing the label name, in LABEL. */
3379 labelno = const_labelno++;
3380 ASM_GENERATE_INTERNAL_LABEL (label, "LC", labelno);
3381
3382 /* Construct the VAR_DECL associated with the constant. */
3383 decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (label),
3384 TREE_TYPE (exp));
3385 DECL_ARTIFICIAL (decl) = 1;
3386 DECL_IGNORED_P (decl) = 1;
3387 TREE_READONLY (decl) = 1;
3388 TREE_STATIC (decl) = 1;
3389 TREE_ADDRESSABLE (decl) = 1;
3390 /* We don't set the RTL yet as this would cause varpool to assume that the
3391 variable is referenced. Moreover, it would just be dropped in LTO mode.
3392 Instead we set the flag that will be recognized in make_decl_rtl. */
3393 DECL_IN_CONSTANT_POOL (decl) = 1;
3394 DECL_INITIAL (decl) = desc->value;
3395 /* ??? targetm.constant_alignment hasn't been updated for vector types on
3396 most architectures so use DATA_ALIGNMENT as well, except for strings. */
3397 if (TREE_CODE (exp) == STRING_CST)
3398 SET_DECL_ALIGN (decl, targetm.constant_alignment (exp, DECL_ALIGN (decl)));
3399 else
3400 {
3401 align_variable (decl, 0);
3402 if (DECL_ALIGN (decl) < GET_MODE_ALIGNMENT (DECL_MODE (decl))
3403 && ((optab_handler (movmisalign_optab, DECL_MODE (decl))
3404 != CODE_FOR_nothing)
3405 || targetm.slow_unaligned_access (DECL_MODE (decl),
3406 DECL_ALIGN (decl))))
3407 SET_DECL_ALIGN (decl, GET_MODE_ALIGNMENT (DECL_MODE (decl)));
3408 }
3409
3410 /* Now construct the SYMBOL_REF and the MEM. */
3411 if (use_object_blocks_p ())
3412 {
3413 int align = (TREE_CODE (decl) == CONST_DECL
3414 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
3415 ? DECL_ALIGN (decl)
3416 : symtab_node::get (decl)->definition_alignment ());
3417 section *sect = get_constant_section (exp, align);
3418 symbol = create_block_symbol (ggc_strdup (label),
3419 get_block_for_section (sect), -1);
3420 }
3421 else
3422 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
3423 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
3424 SET_SYMBOL_REF_DECL (symbol, decl);
3425 TREE_CONSTANT_POOL_ADDRESS_P (symbol) = 1;
3426
3427 rtl = gen_const_mem (TYPE_MODE (TREE_TYPE (exp)), symbol);
3428 set_mem_alias_set (rtl, 0);
3429
3430 /* Putting EXP into the literal pool might have imposed a different
3431 alignment which should be visible in the RTX as well. */
3432 set_mem_align (rtl, DECL_ALIGN (decl));
3433
3434 /* We cannot share RTX'es in pool entries.
3435 Mark this piece of RTL as required for unsharing. */
3436 RTX_FLAG (rtl, used) = 1;
3437
3438 /* Set flags or add text to the name to record information, such as
3439 that it is a local symbol. If the name is changed, the macro
3440 ASM_OUTPUT_LABELREF will have to know how to strip this
3441 information. This call might invalidate our local variable
3442 SYMBOL; we can't use it afterward. */
3443 targetm.encode_section_info (exp, rtl, true);
3444
3445 desc->rtl = rtl;
3446
3447 return desc;
3448 }
3449
3450 /* Subroutine of output_constant_def and tree_output_constant_def:
3451 Add a constant to the hash table that tracks which constants
3452 already have labels. */
3453
3454 static constant_descriptor_tree *
add_constant_to_table(tree exp)3455 add_constant_to_table (tree exp)
3456 {
3457 /* The hash table methods may call output_constant_def for addressed
3458 constants, so handle them first. */
3459 output_addressed_constants (exp);
3460
3461 /* Sanity check to catch recursive insertion. */
3462 static bool inserting;
3463 gcc_assert (!inserting);
3464 inserting = true;
3465
3466 /* Look up EXP in the table of constant descriptors. If we didn't
3467 find it, create a new one. */
3468 struct constant_descriptor_tree key;
3469 key.value = exp;
3470 key.hash = const_hash_1 (exp);
3471 constant_descriptor_tree **loc
3472 = const_desc_htab->find_slot_with_hash (&key, key.hash, INSERT);
3473
3474 inserting = false;
3475
3476 struct constant_descriptor_tree *desc = *loc;
3477 if (!desc)
3478 {
3479 desc = build_constant_desc (exp);
3480 desc->hash = key.hash;
3481 *loc = desc;
3482 }
3483
3484 return desc;
3485 }
3486
3487 /* Return an rtx representing a reference to constant data in memory
3488 for the constant expression EXP.
3489
3490 If assembler code for such a constant has already been output,
3491 return an rtx to refer to it.
3492 Otherwise, output such a constant in memory
3493 and generate an rtx for it.
3494
3495 If DEFER is nonzero, this constant can be deferred and output only
3496 if referenced in the function after all optimizations.
3497
3498 `const_desc_table' records which constants already have label strings. */
3499
3500 rtx
output_constant_def(tree exp,int defer)3501 output_constant_def (tree exp, int defer)
3502 {
3503 struct constant_descriptor_tree *desc = add_constant_to_table (exp);
3504 maybe_output_constant_def_contents (desc, defer);
3505 return desc->rtl;
3506 }
3507
3508 /* Subroutine of output_constant_def: Decide whether or not we need to
3509 output the constant DESC now, and if so, do it. */
3510 static void
maybe_output_constant_def_contents(struct constant_descriptor_tree * desc,int defer)3511 maybe_output_constant_def_contents (struct constant_descriptor_tree *desc,
3512 int defer)
3513 {
3514 rtx symbol = XEXP (desc->rtl, 0);
3515 tree exp = desc->value;
3516
3517 if (flag_syntax_only)
3518 return;
3519
3520 if (TREE_ASM_WRITTEN (exp))
3521 /* Already output; don't do it again. */
3522 return;
3523
3524 /* We can always defer constants as long as the context allows
3525 doing so. */
3526 if (defer)
3527 {
3528 /* Increment n_deferred_constants if it exists. It needs to be at
3529 least as large as the number of constants actually referred to
3530 by the function. If it's too small we'll stop looking too early
3531 and fail to emit constants; if it's too large we'll only look
3532 through the entire function when we could have stopped earlier. */
3533 if (cfun)
3534 n_deferred_constants++;
3535 return;
3536 }
3537
3538 output_constant_def_contents (symbol);
3539 }
3540
3541 /* Subroutine of output_constant_def_contents. Output the definition
3542 of constant EXP, which is pointed to by label LABEL. ALIGN is the
3543 constant's alignment in bits. */
3544
3545 static void
assemble_constant_contents(tree exp,const char * label,unsigned int align,bool merge_strings)3546 assemble_constant_contents (tree exp, const char *label, unsigned int align,
3547 bool merge_strings)
3548 {
3549 HOST_WIDE_INT size;
3550
3551 size = get_constant_size (exp);
3552
3553 /* Do any machine/system dependent processing of the constant. */
3554 targetm.asm_out.declare_constant_name (asm_out_file, label, exp, size);
3555
3556 /* Output the value of EXP. */
3557 output_constant (exp, size, align, false, merge_strings);
3558
3559 targetm.asm_out.decl_end ();
3560 }
3561
3562 /* We must output the constant data referred to by SYMBOL; do so. */
3563
3564 static void
output_constant_def_contents(rtx symbol)3565 output_constant_def_contents (rtx symbol)
3566 {
3567 tree decl = SYMBOL_REF_DECL (symbol);
3568 tree exp = DECL_INITIAL (decl);
3569 bool asan_protected = false;
3570
3571 /* Make sure any other constants whose addresses appear in EXP
3572 are assigned label numbers. */
3573 output_addressed_constants (exp);
3574
3575 /* We are no longer deferring this constant. */
3576 TREE_ASM_WRITTEN (decl) = TREE_ASM_WRITTEN (exp) = 1;
3577
3578 if ((flag_sanitize & SANITIZE_ADDRESS)
3579 && TREE_CODE (exp) == STRING_CST
3580 && asan_protect_global (exp))
3581 {
3582 asan_protected = true;
3583 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
3584 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
3585 }
3586
3587 /* If the constant is part of an object block, make sure that the
3588 decl has been positioned within its block, but do not write out
3589 its definition yet. output_object_blocks will do that later. */
3590 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol))
3591 place_block_symbol (symbol);
3592 else
3593 {
3594 int align = (TREE_CODE (decl) == CONST_DECL
3595 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
3596 ? DECL_ALIGN (decl)
3597 : symtab_node::get (decl)->definition_alignment ());
3598 section *sect = get_constant_section (exp, align);
3599 switch_to_section (sect);
3600 if (align > BITS_PER_UNIT)
3601 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
3602 assemble_constant_contents (exp, XSTR (symbol, 0), align,
3603 (sect->common.flags & SECTION_MERGE)
3604 && (sect->common.flags & SECTION_STRINGS));
3605 if (asan_protected)
3606 {
3607 HOST_WIDE_INT size = get_constant_size (exp);
3608 assemble_zeros (asan_red_zone_size (size));
3609 }
3610 }
3611 }
3612
3613 /* Look up EXP in the table of constant descriptors. Return the rtl
3614 if it has been emitted, else null. */
3615
3616 rtx
lookup_constant_def(tree exp)3617 lookup_constant_def (tree exp)
3618 {
3619 struct constant_descriptor_tree key;
3620
3621 key.value = exp;
3622 key.hash = const_hash_1 (exp);
3623 constant_descriptor_tree *desc
3624 = const_desc_htab->find_with_hash (&key, key.hash);
3625
3626 return (desc ? desc->rtl : NULL_RTX);
3627 }
3628
3629 /* Return a tree representing a reference to constant data in memory
3630 for the constant expression EXP.
3631
3632 This is the counterpart of output_constant_def at the Tree level. */
3633
3634 tree
tree_output_constant_def(tree exp)3635 tree_output_constant_def (tree exp)
3636 {
3637 struct constant_descriptor_tree *desc = add_constant_to_table (exp);
3638 tree decl = SYMBOL_REF_DECL (XEXP (desc->rtl, 0));
3639 varpool_node::finalize_decl (decl);
3640 return decl;
3641 }
3642
3643 class GTY((chain_next ("%h.next"), for_user)) constant_descriptor_rtx {
3644 public:
3645 class constant_descriptor_rtx *next;
3646 rtx mem;
3647 rtx sym;
3648 rtx constant;
3649 HOST_WIDE_INT offset;
3650 hashval_t hash;
3651 fixed_size_mode mode;
3652 unsigned int align;
3653 int labelno;
3654 int mark;
3655 };
3656
3657 struct const_rtx_desc_hasher : ggc_ptr_hash<constant_descriptor_rtx>
3658 {
3659 static hashval_t hash (constant_descriptor_rtx *);
3660 static bool equal (constant_descriptor_rtx *, constant_descriptor_rtx *);
3661 };
3662
3663 /* Used in the hash tables to avoid outputting the same constant
3664 twice. Unlike 'struct constant_descriptor_tree', RTX constants
3665 are output once per function, not once per file. */
3666 /* ??? Only a few targets need per-function constant pools. Most
3667 can use one per-file pool. Should add a targetm bit to tell the
3668 difference. */
3669
3670 struct GTY(()) rtx_constant_pool {
3671 /* Pointers to first and last constant in pool, as ordered by offset. */
3672 class constant_descriptor_rtx *first;
3673 class constant_descriptor_rtx *last;
3674
3675 /* Hash facility for making memory-constants from constant rtl-expressions.
3676 It is used on RISC machines where immediate integer arguments and
3677 constant addresses are restricted so that such constants must be stored
3678 in memory. */
3679 hash_table<const_rtx_desc_hasher> *const_rtx_htab;
3680
3681 /* Current offset in constant pool (does not include any
3682 machine-specific header). */
3683 HOST_WIDE_INT offset;
3684 };
3685
3686 /* Hash and compare functions for const_rtx_htab. */
3687
3688 hashval_t
hash(constant_descriptor_rtx * desc)3689 const_rtx_desc_hasher::hash (constant_descriptor_rtx *desc)
3690 {
3691 return desc->hash;
3692 }
3693
3694 bool
equal(constant_descriptor_rtx * x,constant_descriptor_rtx * y)3695 const_rtx_desc_hasher::equal (constant_descriptor_rtx *x,
3696 constant_descriptor_rtx *y)
3697 {
3698 if (x->mode != y->mode)
3699 return 0;
3700 return rtx_equal_p (x->constant, y->constant);
3701 }
3702
3703 /* Hash one component of a constant. */
3704
3705 static hashval_t
const_rtx_hash_1(const_rtx x)3706 const_rtx_hash_1 (const_rtx x)
3707 {
3708 unsigned HOST_WIDE_INT hwi;
3709 machine_mode mode;
3710 enum rtx_code code;
3711 hashval_t h;
3712 int i;
3713
3714 code = GET_CODE (x);
3715 mode = GET_MODE (x);
3716 h = (hashval_t) code * 1048573 + mode;
3717
3718 switch (code)
3719 {
3720 case CONST_INT:
3721 hwi = INTVAL (x);
3722
3723 fold_hwi:
3724 {
3725 int shift = sizeof (hashval_t) * CHAR_BIT;
3726 const int n = sizeof (HOST_WIDE_INT) / sizeof (hashval_t);
3727
3728 h ^= (hashval_t) hwi;
3729 for (i = 1; i < n; ++i)
3730 {
3731 hwi >>= shift;
3732 h ^= (hashval_t) hwi;
3733 }
3734 }
3735 break;
3736
3737 case CONST_WIDE_INT:
3738 hwi = 0;
3739 {
3740 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
3741 hwi ^= CONST_WIDE_INT_ELT (x, i);
3742 goto fold_hwi;
3743 }
3744
3745 case CONST_DOUBLE:
3746 if (TARGET_SUPPORTS_WIDE_INT == 0 && mode == VOIDmode)
3747 {
3748 hwi = CONST_DOUBLE_LOW (x) ^ CONST_DOUBLE_HIGH (x);
3749 goto fold_hwi;
3750 }
3751 else
3752 h ^= real_hash (CONST_DOUBLE_REAL_VALUE (x));
3753 break;
3754
3755 case CONST_FIXED:
3756 h ^= fixed_hash (CONST_FIXED_VALUE (x));
3757 break;
3758
3759 case SYMBOL_REF:
3760 h ^= htab_hash_string (XSTR (x, 0));
3761 break;
3762
3763 case LABEL_REF:
3764 h = h * 251 + CODE_LABEL_NUMBER (label_ref_label (x));
3765 break;
3766
3767 case UNSPEC:
3768 case UNSPEC_VOLATILE:
3769 h = h * 251 + XINT (x, 1);
3770 break;
3771
3772 default:
3773 break;
3774 }
3775
3776 return h;
3777 }
3778
3779 /* Compute a hash value for X, which should be a constant. */
3780
3781 static hashval_t
const_rtx_hash(rtx x)3782 const_rtx_hash (rtx x)
3783 {
3784 hashval_t h = 0;
3785 subrtx_iterator::array_type array;
3786 FOR_EACH_SUBRTX (iter, array, x, ALL)
3787 h = h * 509 + const_rtx_hash_1 (*iter);
3788 return h;
3789 }
3790
3791
3792 /* Create and return a new rtx constant pool. */
3793
3794 static struct rtx_constant_pool *
create_constant_pool(void)3795 create_constant_pool (void)
3796 {
3797 struct rtx_constant_pool *pool;
3798
3799 pool = ggc_alloc<rtx_constant_pool> ();
3800 pool->const_rtx_htab = hash_table<const_rtx_desc_hasher>::create_ggc (31);
3801 pool->first = NULL;
3802 pool->last = NULL;
3803 pool->offset = 0;
3804 return pool;
3805 }
3806
3807 /* Initialize constant pool hashing for a new function. */
3808
3809 void
init_varasm_status(void)3810 init_varasm_status (void)
3811 {
3812 crtl->varasm.pool = create_constant_pool ();
3813 crtl->varasm.deferred_constants = 0;
3814 }
3815
3816 /* Given a MINUS expression, simplify it if both sides
3817 include the same symbol. */
3818
3819 rtx
simplify_subtraction(rtx x)3820 simplify_subtraction (rtx x)
3821 {
3822 rtx r = simplify_rtx (x);
3823 return r ? r : x;
3824 }
3825
3826 /* Given a constant rtx X, make (or find) a memory constant for its value
3827 and return a MEM rtx to refer to it in memory. IN_MODE is the mode
3828 of X. */
3829
3830 rtx
force_const_mem(machine_mode in_mode,rtx x)3831 force_const_mem (machine_mode in_mode, rtx x)
3832 {
3833 class constant_descriptor_rtx *desc, tmp;
3834 struct rtx_constant_pool *pool;
3835 char label[256];
3836 rtx def, symbol;
3837 hashval_t hash;
3838 unsigned int align;
3839 constant_descriptor_rtx **slot;
3840 fixed_size_mode mode;
3841
3842 /* We can't force variable-sized objects to memory. */
3843 if (!is_a <fixed_size_mode> (in_mode, &mode))
3844 return NULL_RTX;
3845
3846 /* If we're not allowed to drop X into the constant pool, don't. */
3847 if (targetm.cannot_force_const_mem (mode, x))
3848 return NULL_RTX;
3849
3850 /* Record that this function has used a constant pool entry. */
3851 crtl->uses_const_pool = 1;
3852
3853 /* Decide which pool to use. */
3854 pool = (targetm.use_blocks_for_constant_p (mode, x)
3855 ? shared_constant_pool
3856 : crtl->varasm.pool);
3857
3858 /* Lookup the value in the hashtable. */
3859 tmp.constant = x;
3860 tmp.mode = mode;
3861 hash = const_rtx_hash (x);
3862 slot = pool->const_rtx_htab->find_slot_with_hash (&tmp, hash, INSERT);
3863 desc = *slot;
3864
3865 /* If the constant was already present, return its memory. */
3866 if (desc)
3867 return copy_rtx (desc->mem);
3868
3869 /* Otherwise, create a new descriptor. */
3870 desc = ggc_alloc<constant_descriptor_rtx> ();
3871 *slot = desc;
3872
3873 /* Align the location counter as required by EXP's data type. */
3874 machine_mode align_mode = (mode == VOIDmode ? word_mode : mode);
3875 align = targetm.static_rtx_alignment (align_mode);
3876
3877 pool->offset += (align / BITS_PER_UNIT) - 1;
3878 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1);
3879
3880 desc->next = NULL;
3881 desc->constant = copy_rtx (tmp.constant);
3882 desc->offset = pool->offset;
3883 desc->hash = hash;
3884 desc->mode = mode;
3885 desc->align = align;
3886 desc->labelno = const_labelno;
3887 desc->mark = 0;
3888
3889 pool->offset += GET_MODE_SIZE (mode);
3890 if (pool->last)
3891 pool->last->next = desc;
3892 else
3893 pool->first = pool->last = desc;
3894 pool->last = desc;
3895
3896 /* Create a string containing the label name, in LABEL. */
3897 ASM_GENERATE_INTERNAL_LABEL (label, "LC", const_labelno);
3898 ++const_labelno;
3899
3900 /* Construct the SYMBOL_REF. Make sure to mark it as belonging to
3901 the constants pool. */
3902 if (use_object_blocks_p () && targetm.use_blocks_for_constant_p (mode, x))
3903 {
3904 section *sect = targetm.asm_out.select_rtx_section (mode, x, align);
3905 symbol = create_block_symbol (ggc_strdup (label),
3906 get_block_for_section (sect), -1);
3907 }
3908 else
3909 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
3910 desc->sym = symbol;
3911 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
3912 CONSTANT_POOL_ADDRESS_P (symbol) = 1;
3913 SET_SYMBOL_REF_CONSTANT (symbol, desc);
3914
3915 /* Construct the MEM. */
3916 desc->mem = def = gen_const_mem (mode, symbol);
3917 set_mem_align (def, align);
3918
3919 /* If we're dropping a label to the constant pool, make sure we
3920 don't delete it. */
3921 if (GET_CODE (x) == LABEL_REF)
3922 LABEL_PRESERVE_P (XEXP (x, 0)) = 1;
3923
3924 return copy_rtx (def);
3925 }
3926
3927 /* Given a constant pool SYMBOL_REF, return the corresponding constant. */
3928
3929 rtx
get_pool_constant(const_rtx addr)3930 get_pool_constant (const_rtx addr)
3931 {
3932 return SYMBOL_REF_CONSTANT (addr)->constant;
3933 }
3934
3935 /* Given a constant pool SYMBOL_REF, return the corresponding constant
3936 and whether it has been output or not. */
3937
3938 rtx
get_pool_constant_mark(rtx addr,bool * pmarked)3939 get_pool_constant_mark (rtx addr, bool *pmarked)
3940 {
3941 class constant_descriptor_rtx *desc;
3942
3943 desc = SYMBOL_REF_CONSTANT (addr);
3944 *pmarked = (desc->mark != 0);
3945 return desc->constant;
3946 }
3947
3948 /* Similar, return the mode. */
3949
3950 fixed_size_mode
get_pool_mode(const_rtx addr)3951 get_pool_mode (const_rtx addr)
3952 {
3953 return SYMBOL_REF_CONSTANT (addr)->mode;
3954 }
3955
3956 /* Return TRUE if and only if the constant pool has no entries. Note
3957 that even entries we might end up choosing not to emit are counted
3958 here, so there is the potential for missed optimizations. */
3959
3960 bool
constant_pool_empty_p(void)3961 constant_pool_empty_p (void)
3962 {
3963 return crtl->varasm.pool->first == NULL;
3964 }
3965
3966 /* Worker function for output_constant_pool_1. Emit assembly for X
3967 in MODE with known alignment ALIGN. */
3968
3969 static void
output_constant_pool_2(fixed_size_mode mode,rtx x,unsigned int align)3970 output_constant_pool_2 (fixed_size_mode mode, rtx x, unsigned int align)
3971 {
3972 switch (GET_MODE_CLASS (mode))
3973 {
3974 case MODE_FLOAT:
3975 case MODE_DECIMAL_FLOAT:
3976 {
3977 gcc_assert (CONST_DOUBLE_AS_FLOAT_P (x));
3978 assemble_real (*CONST_DOUBLE_REAL_VALUE (x),
3979 as_a <scalar_float_mode> (mode), align, false);
3980 break;
3981 }
3982
3983 case MODE_INT:
3984 case MODE_PARTIAL_INT:
3985 case MODE_FRACT:
3986 case MODE_UFRACT:
3987 case MODE_ACCUM:
3988 case MODE_UACCUM:
3989 assemble_integer (x, GET_MODE_SIZE (mode), align, 1);
3990 break;
3991
3992 case MODE_VECTOR_BOOL:
3993 {
3994 gcc_assert (GET_CODE (x) == CONST_VECTOR);
3995
3996 /* Pick the smallest integer mode that contains at least one
3997 whole element. Often this is byte_mode and contains more
3998 than one element. */
3999 unsigned int nelts = GET_MODE_NUNITS (mode);
4000 unsigned int elt_bits = GET_MODE_BITSIZE (mode) / nelts;
4001 unsigned int int_bits = MAX (elt_bits, BITS_PER_UNIT);
4002 scalar_int_mode int_mode = int_mode_for_size (int_bits, 0).require ();
4003
4004 /* Build the constant up one integer at a time. */
4005 unsigned int elts_per_int = int_bits / elt_bits;
4006 for (unsigned int i = 0; i < nelts; i += elts_per_int)
4007 {
4008 unsigned HOST_WIDE_INT value = 0;
4009 unsigned int limit = MIN (nelts - i, elts_per_int);
4010 for (unsigned int j = 0; j < limit; ++j)
4011 if (INTVAL (CONST_VECTOR_ELT (x, i + j)) != 0)
4012 value |= 1 << (j * elt_bits);
4013 output_constant_pool_2 (int_mode, gen_int_mode (value, int_mode),
4014 i != 0 ? MIN (align, int_bits) : align);
4015 }
4016 break;
4017 }
4018 case MODE_VECTOR_FLOAT:
4019 case MODE_VECTOR_INT:
4020 case MODE_VECTOR_FRACT:
4021 case MODE_VECTOR_UFRACT:
4022 case MODE_VECTOR_ACCUM:
4023 case MODE_VECTOR_UACCUM:
4024 {
4025 int i, units;
4026 scalar_mode submode = GET_MODE_INNER (mode);
4027 unsigned int subalign = MIN (align, GET_MODE_BITSIZE (submode));
4028
4029 gcc_assert (GET_CODE (x) == CONST_VECTOR);
4030 units = GET_MODE_NUNITS (mode);
4031
4032 for (i = 0; i < units; i++)
4033 {
4034 rtx elt = CONST_VECTOR_ELT (x, i);
4035 output_constant_pool_2 (submode, elt, i ? subalign : align);
4036 }
4037 }
4038 break;
4039
4040 default:
4041 gcc_unreachable ();
4042 }
4043 }
4044
4045 /* Worker function for output_constant_pool. Emit constant DESC,
4046 giving it ALIGN bits of alignment. */
4047
4048 static void
output_constant_pool_1(class constant_descriptor_rtx * desc,unsigned int align)4049 output_constant_pool_1 (class constant_descriptor_rtx *desc,
4050 unsigned int align)
4051 {
4052 rtx x, tmp;
4053
4054 x = desc->constant;
4055
4056 /* See if X is a LABEL_REF (or a CONST referring to a LABEL_REF)
4057 whose CODE_LABEL has been deleted. This can occur if a jump table
4058 is eliminated by optimization. If so, write a constant of zero
4059 instead. Note that this can also happen by turning the
4060 CODE_LABEL into a NOTE. */
4061 /* ??? This seems completely and utterly wrong. Certainly it's
4062 not true for NOTE_INSN_DELETED_LABEL, but I disbelieve proper
4063 functioning even with rtx_insn::deleted and friends. */
4064
4065 tmp = x;
4066 switch (GET_CODE (tmp))
4067 {
4068 case CONST:
4069 if (GET_CODE (XEXP (tmp, 0)) != PLUS
4070 || GET_CODE (XEXP (XEXP (tmp, 0), 0)) != LABEL_REF)
4071 break;
4072 tmp = XEXP (XEXP (tmp, 0), 0);
4073 /* FALLTHRU */
4074
4075 case LABEL_REF:
4076 {
4077 rtx_insn *insn = label_ref_label (tmp);
4078 gcc_assert (!insn->deleted ());
4079 gcc_assert (!NOTE_P (insn)
4080 || NOTE_KIND (insn) != NOTE_INSN_DELETED);
4081 break;
4082 }
4083
4084 default:
4085 break;
4086 }
4087
4088 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
4089 ASM_OUTPUT_SPECIAL_POOL_ENTRY (asm_out_file, x, desc->mode,
4090 align, desc->labelno, done);
4091 #endif
4092
4093 assemble_align (align);
4094
4095 /* Output the label. */
4096 targetm.asm_out.internal_label (asm_out_file, "LC", desc->labelno);
4097
4098 /* Output the data.
4099 Pass actual alignment value while emitting string constant to asm code
4100 as function 'output_constant_pool_1' explicitly passes the alignment as 1
4101 assuming that the data is already aligned which prevents the generation
4102 of fix-up table entries. */
4103 output_constant_pool_2 (desc->mode, x, desc->align);
4104
4105 /* Make sure all constants in SECTION_MERGE and not SECTION_STRINGS
4106 sections have proper size. */
4107 if (align > GET_MODE_BITSIZE (desc->mode)
4108 && in_section
4109 && (in_section->common.flags & SECTION_MERGE))
4110 assemble_align (align);
4111
4112 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
4113 done:
4114 #endif
4115 return;
4116 }
4117
4118 /* Recompute the offsets of entries in POOL, and the overall size of
4119 POOL. Do this after calling mark_constant_pool to ensure that we
4120 are computing the offset values for the pool which we will actually
4121 emit. */
4122
4123 static void
recompute_pool_offsets(struct rtx_constant_pool * pool)4124 recompute_pool_offsets (struct rtx_constant_pool *pool)
4125 {
4126 class constant_descriptor_rtx *desc;
4127 pool->offset = 0;
4128
4129 for (desc = pool->first; desc ; desc = desc->next)
4130 if (desc->mark)
4131 {
4132 /* Recalculate offset. */
4133 unsigned int align = desc->align;
4134 pool->offset += (align / BITS_PER_UNIT) - 1;
4135 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1);
4136 desc->offset = pool->offset;
4137 pool->offset += GET_MODE_SIZE (desc->mode);
4138 }
4139 }
4140
4141 /* Mark all constants that are referenced by SYMBOL_REFs in X.
4142 Emit referenced deferred strings. */
4143
4144 static void
mark_constants_in_pattern(rtx insn)4145 mark_constants_in_pattern (rtx insn)
4146 {
4147 subrtx_iterator::array_type array;
4148 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
4149 {
4150 const_rtx x = *iter;
4151 if (GET_CODE (x) == SYMBOL_REF)
4152 {
4153 if (CONSTANT_POOL_ADDRESS_P (x))
4154 {
4155 class constant_descriptor_rtx *desc = SYMBOL_REF_CONSTANT (x);
4156 if (desc->mark == 0)
4157 {
4158 desc->mark = 1;
4159 iter.substitute (desc->constant);
4160 }
4161 }
4162 else if (TREE_CONSTANT_POOL_ADDRESS_P (x))
4163 {
4164 tree decl = SYMBOL_REF_DECL (x);
4165 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl)))
4166 {
4167 n_deferred_constants--;
4168 output_constant_def_contents (CONST_CAST_RTX (x));
4169 }
4170 }
4171 }
4172 }
4173 }
4174
4175 /* Look through appropriate parts of INSN, marking all entries in the
4176 constant pool which are actually being used. Entries that are only
4177 referenced by other constants are also marked as used. Emit
4178 deferred strings that are used. */
4179
4180 static void
mark_constants(rtx_insn * insn)4181 mark_constants (rtx_insn *insn)
4182 {
4183 if (!INSN_P (insn))
4184 return;
4185
4186 /* Insns may appear inside a SEQUENCE. Only check the patterns of
4187 insns, not any notes that may be attached. We don't want to mark
4188 a constant just because it happens to appear in a REG_EQUIV note. */
4189 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
4190 {
4191 int i, n = seq->len ();
4192 for (i = 0; i < n; ++i)
4193 {
4194 rtx subinsn = seq->element (i);
4195 if (INSN_P (subinsn))
4196 mark_constants_in_pattern (subinsn);
4197 }
4198 }
4199 else
4200 mark_constants_in_pattern (insn);
4201 }
4202
4203 /* Look through the instructions for this function, and mark all the
4204 entries in POOL which are actually being used. Emit deferred constants
4205 which have indeed been used. */
4206
4207 static void
mark_constant_pool(void)4208 mark_constant_pool (void)
4209 {
4210 rtx_insn *insn;
4211
4212 if (!crtl->uses_const_pool && n_deferred_constants == 0)
4213 return;
4214
4215 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4216 mark_constants (insn);
4217 }
4218
4219 /* Write all the constants in POOL. */
4220
4221 static void
output_constant_pool_contents(struct rtx_constant_pool * pool)4222 output_constant_pool_contents (struct rtx_constant_pool *pool)
4223 {
4224 class constant_descriptor_rtx *desc;
4225
4226 for (desc = pool->first; desc ; desc = desc->next)
4227 if (desc->mark)
4228 {
4229 /* If the constant is part of an object_block, make sure that
4230 the constant has been positioned within its block, but do not
4231 write out its definition yet. output_object_blocks will do
4232 that later. */
4233 if (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym)
4234 && SYMBOL_REF_BLOCK (desc->sym))
4235 place_block_symbol (desc->sym);
4236 else
4237 {
4238 switch_to_section (targetm.asm_out.select_rtx_section
4239 (desc->mode, desc->constant, desc->align));
4240 output_constant_pool_1 (desc, desc->align);
4241 }
4242 }
4243 }
4244
4245 /* Mark all constants that are used in the current function, then write
4246 out the function's private constant pool. */
4247
4248 static void
output_constant_pool(const char * fnname ATTRIBUTE_UNUSED,tree fndecl ATTRIBUTE_UNUSED)4249 output_constant_pool (const char *fnname ATTRIBUTE_UNUSED,
4250 tree fndecl ATTRIBUTE_UNUSED)
4251 {
4252 struct rtx_constant_pool *pool = crtl->varasm.pool;
4253
4254 /* It is possible for gcc to call force_const_mem and then to later
4255 discard the instructions which refer to the constant. In such a
4256 case we do not need to output the constant. */
4257 mark_constant_pool ();
4258
4259 /* Having marked the constant pool entries we'll actually emit, we
4260 now need to rebuild the offset information, which may have become
4261 stale. */
4262 recompute_pool_offsets (pool);
4263
4264 #ifdef ASM_OUTPUT_POOL_PROLOGUE
4265 ASM_OUTPUT_POOL_PROLOGUE (asm_out_file, fnname, fndecl, pool->offset);
4266 #endif
4267
4268 output_constant_pool_contents (pool);
4269
4270 #ifdef ASM_OUTPUT_POOL_EPILOGUE
4271 ASM_OUTPUT_POOL_EPILOGUE (asm_out_file, fnname, fndecl, pool->offset);
4272 #endif
4273 }
4274
4275 /* Write the contents of the shared constant pool. */
4276
4277 void
output_shared_constant_pool(void)4278 output_shared_constant_pool (void)
4279 {
4280 output_constant_pool_contents (shared_constant_pool);
4281 }
4282
4283 /* Determine what kind of relocations EXP may need. */
4284
4285 int
compute_reloc_for_constant(tree exp)4286 compute_reloc_for_constant (tree exp)
4287 {
4288 int reloc = 0, reloc2;
4289 tree tem;
4290
4291 switch (TREE_CODE (exp))
4292 {
4293 case ADDR_EXPR:
4294 case FDESC_EXPR:
4295 /* Go inside any operations that get_inner_reference can handle and see
4296 if what's inside is a constant: no need to do anything here for
4297 addresses of variables or functions. */
4298 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem);
4299 tem = TREE_OPERAND (tem, 0))
4300 ;
4301
4302 if (TREE_CODE (tem) == MEM_REF
4303 && TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR)
4304 {
4305 reloc = compute_reloc_for_constant (TREE_OPERAND (tem, 0));
4306 break;
4307 }
4308
4309 if (!targetm.binds_local_p (tem))
4310 reloc |= 2;
4311 else
4312 reloc |= 1;
4313 break;
4314
4315 case PLUS_EXPR:
4316 case POINTER_PLUS_EXPR:
4317 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4318 reloc |= compute_reloc_for_constant (TREE_OPERAND (exp, 1));
4319 break;
4320
4321 case MINUS_EXPR:
4322 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4323 reloc2 = compute_reloc_for_constant (TREE_OPERAND (exp, 1));
4324 /* The difference of two local labels is computable at link time. */
4325 if (reloc == 1 && reloc2 == 1)
4326 reloc = 0;
4327 else
4328 reloc |= reloc2;
4329 break;
4330
4331 CASE_CONVERT:
4332 case VIEW_CONVERT_EXPR:
4333 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4334 break;
4335
4336 case CONSTRUCTOR:
4337 {
4338 unsigned HOST_WIDE_INT idx;
4339 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem)
4340 if (tem != 0)
4341 reloc |= compute_reloc_for_constant (tem);
4342 }
4343 break;
4344
4345 default:
4346 break;
4347 }
4348 return reloc;
4349 }
4350
4351 /* Find all the constants whose addresses are referenced inside of EXP,
4352 and make sure assembler code with a label has been output for each one.
4353 Indicate whether an ADDR_EXPR has been encountered. */
4354
4355 static void
output_addressed_constants(tree exp)4356 output_addressed_constants (tree exp)
4357 {
4358 tree tem;
4359
4360 switch (TREE_CODE (exp))
4361 {
4362 case ADDR_EXPR:
4363 case FDESC_EXPR:
4364 /* Go inside any operations that get_inner_reference can handle and see
4365 if what's inside is a constant: no need to do anything here for
4366 addresses of variables or functions. */
4367 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem);
4368 tem = TREE_OPERAND (tem, 0))
4369 ;
4370
4371 /* If we have an initialized CONST_DECL, retrieve the initializer. */
4372 if (TREE_CODE (tem) == CONST_DECL && DECL_INITIAL (tem))
4373 tem = DECL_INITIAL (tem);
4374
4375 if (CONSTANT_CLASS_P (tem) || TREE_CODE (tem) == CONSTRUCTOR)
4376 output_constant_def (tem, 0);
4377
4378 if (TREE_CODE (tem) == MEM_REF)
4379 output_addressed_constants (TREE_OPERAND (tem, 0));
4380 break;
4381
4382 case PLUS_EXPR:
4383 case POINTER_PLUS_EXPR:
4384 case MINUS_EXPR:
4385 output_addressed_constants (TREE_OPERAND (exp, 1));
4386 gcc_fallthrough ();
4387
4388 CASE_CONVERT:
4389 case VIEW_CONVERT_EXPR:
4390 output_addressed_constants (TREE_OPERAND (exp, 0));
4391 break;
4392
4393 case CONSTRUCTOR:
4394 {
4395 unsigned HOST_WIDE_INT idx;
4396 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem)
4397 if (tem != 0)
4398 output_addressed_constants (tem);
4399 }
4400 break;
4401
4402 default:
4403 break;
4404 }
4405 }
4406
4407 /* Whether a constructor CTOR is a valid static constant initializer if all
4408 its elements are. This used to be internal to initializer_constant_valid_p
4409 and has been exposed to let other functions like categorize_ctor_elements
4410 evaluate the property while walking a constructor for other purposes. */
4411
4412 bool
constructor_static_from_elts_p(const_tree ctor)4413 constructor_static_from_elts_p (const_tree ctor)
4414 {
4415 return (TREE_CONSTANT (ctor)
4416 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4417 || TREE_CODE (TREE_TYPE (ctor)) == RECORD_TYPE
4418 || TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE));
4419 }
4420
4421 static tree initializer_constant_valid_p_1 (tree value, tree endtype,
4422 tree *cache);
4423
4424 /* A subroutine of initializer_constant_valid_p. VALUE is a MINUS_EXPR,
4425 PLUS_EXPR or POINTER_PLUS_EXPR. This looks for cases of VALUE
4426 which are valid when ENDTYPE is an integer of any size; in
4427 particular, this does not accept a pointer minus a constant. This
4428 returns null_pointer_node if the VALUE is an absolute constant
4429 which can be used to initialize a static variable. Otherwise it
4430 returns NULL. */
4431
4432 static tree
narrowing_initializer_constant_valid_p(tree value,tree endtype,tree * cache)4433 narrowing_initializer_constant_valid_p (tree value, tree endtype, tree *cache)
4434 {
4435 tree op0, op1;
4436
4437 if (!INTEGRAL_TYPE_P (endtype))
4438 return NULL_TREE;
4439
4440 op0 = TREE_OPERAND (value, 0);
4441 op1 = TREE_OPERAND (value, 1);
4442
4443 /* Like STRIP_NOPS except allow the operand mode to widen. This
4444 works around a feature of fold that simplifies (int)(p1 - p2) to
4445 ((int)p1 - (int)p2) under the theory that the narrower operation
4446 is cheaper. */
4447
4448 while (CONVERT_EXPR_P (op0)
4449 || TREE_CODE (op0) == NON_LVALUE_EXPR)
4450 {
4451 tree inner = TREE_OPERAND (op0, 0);
4452 if (inner == error_mark_node
4453 || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
4454 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op0)))
4455 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
4456 break;
4457 op0 = inner;
4458 }
4459
4460 while (CONVERT_EXPR_P (op1)
4461 || TREE_CODE (op1) == NON_LVALUE_EXPR)
4462 {
4463 tree inner = TREE_OPERAND (op1, 0);
4464 if (inner == error_mark_node
4465 || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
4466 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op1)))
4467 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
4468 break;
4469 op1 = inner;
4470 }
4471
4472 op0 = initializer_constant_valid_p_1 (op0, endtype, cache);
4473 if (!op0)
4474 return NULL_TREE;
4475
4476 op1 = initializer_constant_valid_p_1 (op1, endtype,
4477 cache ? cache + 2 : NULL);
4478 /* Both initializers must be known. */
4479 if (op1)
4480 {
4481 if (op0 == op1
4482 && (op0 == null_pointer_node
4483 || TREE_CODE (value) == MINUS_EXPR))
4484 return null_pointer_node;
4485
4486 /* Support differences between labels. */
4487 if (TREE_CODE (op0) == LABEL_DECL
4488 && TREE_CODE (op1) == LABEL_DECL)
4489 return null_pointer_node;
4490
4491 if (TREE_CODE (op0) == STRING_CST
4492 && TREE_CODE (op1) == STRING_CST
4493 && operand_equal_p (op0, op1, 1))
4494 return null_pointer_node;
4495 }
4496
4497 return NULL_TREE;
4498 }
4499
4500 /* Helper function of initializer_constant_valid_p.
4501 Return nonzero if VALUE is a valid constant-valued expression
4502 for use in initializing a static variable; one that can be an
4503 element of a "constant" initializer.
4504
4505 Return null_pointer_node if the value is absolute;
4506 if it is relocatable, return the variable that determines the relocation.
4507 We assume that VALUE has been folded as much as possible;
4508 therefore, we do not need to check for such things as
4509 arithmetic-combinations of integers.
4510
4511 Use CACHE (pointer to 2 tree values) for caching if non-NULL. */
4512
4513 static tree
initializer_constant_valid_p_1(tree value,tree endtype,tree * cache)4514 initializer_constant_valid_p_1 (tree value, tree endtype, tree *cache)
4515 {
4516 tree ret;
4517
4518 switch (TREE_CODE (value))
4519 {
4520 case CONSTRUCTOR:
4521 if (constructor_static_from_elts_p (value))
4522 {
4523 unsigned HOST_WIDE_INT idx;
4524 tree elt;
4525 bool absolute = true;
4526
4527 if (cache && cache[0] == value)
4528 return cache[1];
4529 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt)
4530 {
4531 tree reloc;
4532 reloc = initializer_constant_valid_p_1 (elt, TREE_TYPE (elt),
4533 NULL);
4534 if (!reloc
4535 /* An absolute value is required with reverse SSO. */
4536 || (reloc != null_pointer_node
4537 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (value))
4538 && !AGGREGATE_TYPE_P (TREE_TYPE (elt))))
4539 {
4540 if (cache)
4541 {
4542 cache[0] = value;
4543 cache[1] = NULL_TREE;
4544 }
4545 return NULL_TREE;
4546 }
4547 if (reloc != null_pointer_node)
4548 absolute = false;
4549 }
4550 /* For a non-absolute relocation, there is no single
4551 variable that can be "the variable that determines the
4552 relocation." */
4553 if (cache)
4554 {
4555 cache[0] = value;
4556 cache[1] = absolute ? null_pointer_node : error_mark_node;
4557 }
4558 return absolute ? null_pointer_node : error_mark_node;
4559 }
4560
4561 return TREE_STATIC (value) ? null_pointer_node : NULL_TREE;
4562
4563 case INTEGER_CST:
4564 case VECTOR_CST:
4565 case REAL_CST:
4566 case FIXED_CST:
4567 case STRING_CST:
4568 case COMPLEX_CST:
4569 return null_pointer_node;
4570
4571 case ADDR_EXPR:
4572 case FDESC_EXPR:
4573 {
4574 tree op0 = staticp (TREE_OPERAND (value, 0));
4575 if (op0)
4576 {
4577 /* "&(*a).f" is like unto pointer arithmetic. If "a" turns out
4578 to be a constant, this is old-skool offsetof-like nonsense. */
4579 if (TREE_CODE (op0) == INDIRECT_REF
4580 && TREE_CONSTANT (TREE_OPERAND (op0, 0)))
4581 return null_pointer_node;
4582 /* Taking the address of a nested function involves a trampoline,
4583 unless we don't need or want one. */
4584 if (TREE_CODE (op0) == FUNCTION_DECL
4585 && DECL_STATIC_CHAIN (op0)
4586 && !TREE_NO_TRAMPOLINE (value))
4587 return NULL_TREE;
4588 /* "&{...}" requires a temporary to hold the constructed
4589 object. */
4590 if (TREE_CODE (op0) == CONSTRUCTOR)
4591 return NULL_TREE;
4592 }
4593 return op0;
4594 }
4595
4596 case NON_LVALUE_EXPR:
4597 return initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4598 endtype, cache);
4599
4600 case VIEW_CONVERT_EXPR:
4601 {
4602 tree src = TREE_OPERAND (value, 0);
4603 tree src_type = TREE_TYPE (src);
4604 tree dest_type = TREE_TYPE (value);
4605
4606 /* Allow view-conversions from aggregate to non-aggregate type only
4607 if the bit pattern is fully preserved afterwards; otherwise, the
4608 RTL expander won't be able to apply a subsequent transformation
4609 to the underlying constructor. */
4610 if (AGGREGATE_TYPE_P (src_type) && !AGGREGATE_TYPE_P (dest_type))
4611 {
4612 if (TYPE_MODE (endtype) == TYPE_MODE (dest_type))
4613 return initializer_constant_valid_p_1 (src, endtype, cache);
4614 else
4615 return NULL_TREE;
4616 }
4617
4618 /* Allow all other kinds of view-conversion. */
4619 return initializer_constant_valid_p_1 (src, endtype, cache);
4620 }
4621
4622 CASE_CONVERT:
4623 {
4624 tree src = TREE_OPERAND (value, 0);
4625 tree src_type = TREE_TYPE (src);
4626 tree dest_type = TREE_TYPE (value);
4627
4628 /* Allow conversions between pointer types, floating-point
4629 types, and offset types. */
4630 if ((POINTER_TYPE_P (dest_type) && POINTER_TYPE_P (src_type))
4631 || (FLOAT_TYPE_P (dest_type) && FLOAT_TYPE_P (src_type))
4632 || (TREE_CODE (dest_type) == OFFSET_TYPE
4633 && TREE_CODE (src_type) == OFFSET_TYPE))
4634 return initializer_constant_valid_p_1 (src, endtype, cache);
4635
4636 /* Allow length-preserving conversions between integer types. */
4637 if (INTEGRAL_TYPE_P (dest_type) && INTEGRAL_TYPE_P (src_type)
4638 && (TYPE_PRECISION (dest_type) == TYPE_PRECISION (src_type)))
4639 return initializer_constant_valid_p_1 (src, endtype, cache);
4640
4641 /* Allow conversions between other integer types only if
4642 explicit value. Don't allow sign-extension to a type larger
4643 than word and pointer, there aren't relocations that would
4644 allow to sign extend it to a wider type. */
4645 if (INTEGRAL_TYPE_P (dest_type)
4646 && INTEGRAL_TYPE_P (src_type)
4647 && (TYPE_UNSIGNED (src_type)
4648 || TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type)
4649 || TYPE_PRECISION (dest_type) <= BITS_PER_WORD
4650 || TYPE_PRECISION (dest_type) <= POINTER_SIZE))
4651 {
4652 tree inner = initializer_constant_valid_p_1 (src, endtype, cache);
4653 if (inner == null_pointer_node)
4654 return null_pointer_node;
4655 break;
4656 }
4657
4658 /* Allow (int) &foo provided int is as wide as a pointer. */
4659 if (INTEGRAL_TYPE_P (dest_type) && POINTER_TYPE_P (src_type)
4660 && (TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type)))
4661 return initializer_constant_valid_p_1 (src, endtype, cache);
4662
4663 /* Likewise conversions from int to pointers, but also allow
4664 conversions from 0. */
4665 if ((POINTER_TYPE_P (dest_type)
4666 || TREE_CODE (dest_type) == OFFSET_TYPE)
4667 && INTEGRAL_TYPE_P (src_type))
4668 {
4669 if (TREE_CODE (src) == INTEGER_CST
4670 && TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type))
4671 return null_pointer_node;
4672 if (integer_zerop (src))
4673 return null_pointer_node;
4674 else if (TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type))
4675 return initializer_constant_valid_p_1 (src, endtype, cache);
4676 }
4677
4678 /* Allow conversions to struct or union types if the value
4679 inside is okay. */
4680 if (TREE_CODE (dest_type) == RECORD_TYPE
4681 || TREE_CODE (dest_type) == UNION_TYPE)
4682 return initializer_constant_valid_p_1 (src, endtype, cache);
4683 }
4684 break;
4685
4686 case POINTER_PLUS_EXPR:
4687 case PLUS_EXPR:
4688 /* Any valid floating-point constants will have been folded by now;
4689 with -frounding-math we hit this with addition of two constants. */
4690 if (TREE_CODE (endtype) == REAL_TYPE)
4691 return NULL_TREE;
4692 if (cache && cache[0] == value)
4693 return cache[1];
4694 if (! INTEGRAL_TYPE_P (endtype)
4695 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value)))
4696 {
4697 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
4698 tree valid0
4699 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4700 endtype, ncache);
4701 tree valid1
4702 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1),
4703 endtype, ncache + 2);
4704 /* If either term is absolute, use the other term's relocation. */
4705 if (valid0 == null_pointer_node)
4706 ret = valid1;
4707 else if (valid1 == null_pointer_node)
4708 ret = valid0;
4709 /* Support narrowing pointer differences. */
4710 else
4711 ret = narrowing_initializer_constant_valid_p (value, endtype,
4712 ncache);
4713 }
4714 else
4715 /* Support narrowing pointer differences. */
4716 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL);
4717 if (cache)
4718 {
4719 cache[0] = value;
4720 cache[1] = ret;
4721 }
4722 return ret;
4723
4724 case POINTER_DIFF_EXPR:
4725 case MINUS_EXPR:
4726 if (TREE_CODE (endtype) == REAL_TYPE)
4727 return NULL_TREE;
4728 if (cache && cache[0] == value)
4729 return cache[1];
4730 if (! INTEGRAL_TYPE_P (endtype)
4731 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value)))
4732 {
4733 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
4734 tree valid0
4735 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4736 endtype, ncache);
4737 tree valid1
4738 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1),
4739 endtype, ncache + 2);
4740 /* Win if second argument is absolute. */
4741 if (valid1 == null_pointer_node)
4742 ret = valid0;
4743 /* Win if both arguments have the same relocation.
4744 Then the value is absolute. */
4745 else if (valid0 == valid1 && valid0 != 0)
4746 ret = null_pointer_node;
4747 /* Since GCC guarantees that string constants are unique in the
4748 generated code, a subtraction between two copies of the same
4749 constant string is absolute. */
4750 else if (valid0 && TREE_CODE (valid0) == STRING_CST
4751 && valid1 && TREE_CODE (valid1) == STRING_CST
4752 && operand_equal_p (valid0, valid1, 1))
4753 ret = null_pointer_node;
4754 /* Support narrowing differences. */
4755 else
4756 ret = narrowing_initializer_constant_valid_p (value, endtype,
4757 ncache);
4758 }
4759 else
4760 /* Support narrowing differences. */
4761 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL);
4762 if (cache)
4763 {
4764 cache[0] = value;
4765 cache[1] = ret;
4766 }
4767 return ret;
4768
4769 default:
4770 break;
4771 }
4772
4773 return NULL_TREE;
4774 }
4775
4776 /* Return nonzero if VALUE is a valid constant-valued expression
4777 for use in initializing a static variable; one that can be an
4778 element of a "constant" initializer.
4779
4780 Return null_pointer_node if the value is absolute;
4781 if it is relocatable, return the variable that determines the relocation.
4782 We assume that VALUE has been folded as much as possible;
4783 therefore, we do not need to check for such things as
4784 arithmetic-combinations of integers. */
4785 tree
initializer_constant_valid_p(tree value,tree endtype,bool reverse)4786 initializer_constant_valid_p (tree value, tree endtype, bool reverse)
4787 {
4788 tree reloc = initializer_constant_valid_p_1 (value, endtype, NULL);
4789
4790 /* An absolute value is required with reverse storage order. */
4791 if (reloc
4792 && reloc != null_pointer_node
4793 && reverse
4794 && !AGGREGATE_TYPE_P (endtype)
4795 && !VECTOR_TYPE_P (endtype))
4796 reloc = NULL_TREE;
4797
4798 return reloc;
4799 }
4800
4801 /* Return true if VALUE is a valid constant-valued expression
4802 for use in initializing a static bit-field; one that can be
4803 an element of a "constant" initializer. */
4804
4805 bool
initializer_constant_valid_for_bitfield_p(tree value)4806 initializer_constant_valid_for_bitfield_p (tree value)
4807 {
4808 /* For bitfields we support integer constants or possibly nested aggregates
4809 of such. */
4810 switch (TREE_CODE (value))
4811 {
4812 case CONSTRUCTOR:
4813 {
4814 unsigned HOST_WIDE_INT idx;
4815 tree elt;
4816
4817 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt)
4818 if (!initializer_constant_valid_for_bitfield_p (elt))
4819 return false;
4820 return true;
4821 }
4822
4823 case INTEGER_CST:
4824 case REAL_CST:
4825 return true;
4826
4827 case VIEW_CONVERT_EXPR:
4828 case NON_LVALUE_EXPR:
4829 return
4830 initializer_constant_valid_for_bitfield_p (TREE_OPERAND (value, 0));
4831
4832 default:
4833 break;
4834 }
4835
4836 return false;
4837 }
4838
4839 /* Check if a STRING_CST fits into the field.
4840 Tolerate only the case when the NUL termination
4841 does not fit into the field. */
4842
4843 static bool
check_string_literal(tree string,unsigned HOST_WIDE_INT size)4844 check_string_literal (tree string, unsigned HOST_WIDE_INT size)
4845 {
4846 tree type = TREE_TYPE (string);
4847 tree eltype = TREE_TYPE (type);
4848 unsigned HOST_WIDE_INT elts = tree_to_uhwi (TYPE_SIZE_UNIT (eltype));
4849 unsigned HOST_WIDE_INT mem_size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
4850 int len = TREE_STRING_LENGTH (string);
4851
4852 if (elts != 1 && elts != 2 && elts != 4)
4853 return false;
4854 if (len < 0 || len % elts != 0)
4855 return false;
4856 if (size < (unsigned)len)
4857 return false;
4858 if (mem_size != size)
4859 return false;
4860 return true;
4861 }
4862
4863 /* output_constructor outer state of relevance in recursive calls, typically
4864 for nested aggregate bitfields. */
4865
4866 struct oc_outer_state {
4867 unsigned int bit_offset; /* current position in ... */
4868 int byte; /* ... the outer byte buffer. */
4869 };
4870
4871 static unsigned HOST_WIDE_INT
4872 output_constructor (tree, unsigned HOST_WIDE_INT, unsigned int, bool,
4873 oc_outer_state *);
4874
4875 /* Output assembler code for constant EXP, with no label.
4876 This includes the pseudo-op such as ".int" or ".byte", and a newline.
4877 Assumes output_addressed_constants has been done on EXP already.
4878
4879 Generate at least SIZE bytes of assembler data, padding at the end
4880 with zeros if necessary. SIZE must always be specified. The returned
4881 value is the actual number of bytes of assembler data generated, which
4882 may be bigger than SIZE if the object contains a variable length field.
4883
4884 SIZE is important for structure constructors,
4885 since trailing members may have been omitted from the constructor.
4886 It is also important for initialization of arrays from string constants
4887 since the full length of the string constant might not be wanted.
4888 It is also needed for initialization of unions, where the initializer's
4889 type is just one member, and that may not be as long as the union.
4890
4891 There a case in which we would fail to output exactly SIZE bytes:
4892 for a structure constructor that wants to produce more than SIZE bytes.
4893 But such constructors will never be generated for any possible input.
4894
4895 ALIGN is the alignment of the data in bits.
4896
4897 If REVERSE is true, EXP is output in reverse storage order. */
4898
4899 static unsigned HOST_WIDE_INT
output_constant(tree exp,unsigned HOST_WIDE_INT size,unsigned int align,bool reverse,bool merge_strings)4900 output_constant (tree exp, unsigned HOST_WIDE_INT size, unsigned int align,
4901 bool reverse, bool merge_strings)
4902 {
4903 enum tree_code code;
4904 unsigned HOST_WIDE_INT thissize;
4905 rtx cst;
4906
4907 if (size == 0 || flag_syntax_only)
4908 return size;
4909
4910 /* See if we're trying to initialize a pointer in a non-default mode
4911 to the address of some declaration somewhere. If the target says
4912 the mode is valid for pointers, assume the target has a way of
4913 resolving it. */
4914 if (TREE_CODE (exp) == NOP_EXPR
4915 && POINTER_TYPE_P (TREE_TYPE (exp))
4916 && targetm.addr_space.valid_pointer_mode
4917 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
4918 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
4919 {
4920 tree saved_type = TREE_TYPE (exp);
4921
4922 /* Peel off any intermediate conversions-to-pointer for valid
4923 pointer modes. */
4924 while (TREE_CODE (exp) == NOP_EXPR
4925 && POINTER_TYPE_P (TREE_TYPE (exp))
4926 && targetm.addr_space.valid_pointer_mode
4927 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
4928 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
4929 exp = TREE_OPERAND (exp, 0);
4930
4931 /* If what we're left with is the address of something, we can
4932 convert the address to the final type and output it that
4933 way. */
4934 if (TREE_CODE (exp) == ADDR_EXPR)
4935 exp = build1 (ADDR_EXPR, saved_type, TREE_OPERAND (exp, 0));
4936 /* Likewise for constant ints. */
4937 else if (TREE_CODE (exp) == INTEGER_CST)
4938 exp = fold_convert (saved_type, exp);
4939
4940 }
4941
4942 /* Eliminate any conversions since we'll be outputting the underlying
4943 constant. */
4944 while (CONVERT_EXPR_P (exp)
4945 || TREE_CODE (exp) == NON_LVALUE_EXPR
4946 || TREE_CODE (exp) == VIEW_CONVERT_EXPR)
4947 {
4948 HOST_WIDE_INT type_size = int_size_in_bytes (TREE_TYPE (exp));
4949 HOST_WIDE_INT op_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0)));
4950
4951 /* Make sure eliminating the conversion is really a no-op, except with
4952 VIEW_CONVERT_EXPRs to allow for wild Ada unchecked conversions and
4953 union types to allow for Ada unchecked unions. */
4954 if (type_size > op_size
4955 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
4956 && TREE_CODE (TREE_TYPE (exp)) != UNION_TYPE)
4957 /* Keep the conversion. */
4958 break;
4959 else
4960 exp = TREE_OPERAND (exp, 0);
4961 }
4962
4963 code = TREE_CODE (TREE_TYPE (exp));
4964 thissize = int_size_in_bytes (TREE_TYPE (exp));
4965
4966 /* Allow a constructor with no elements for any data type.
4967 This means to fill the space with zeros. */
4968 if (TREE_CODE (exp) == CONSTRUCTOR
4969 && vec_safe_is_empty (CONSTRUCTOR_ELTS (exp)))
4970 {
4971 assemble_zeros (size);
4972 return size;
4973 }
4974
4975 if (TREE_CODE (exp) == FDESC_EXPR)
4976 {
4977 #ifdef ASM_OUTPUT_FDESC
4978 HOST_WIDE_INT part = tree_to_shwi (TREE_OPERAND (exp, 1));
4979 tree decl = TREE_OPERAND (exp, 0);
4980 ASM_OUTPUT_FDESC (asm_out_file, decl, part);
4981 #else
4982 gcc_unreachable ();
4983 #endif
4984 return size;
4985 }
4986
4987 /* Now output the underlying data. If we've handling the padding, return.
4988 Otherwise, break and ensure SIZE is the size written. */
4989 switch (code)
4990 {
4991 case BOOLEAN_TYPE:
4992 case INTEGER_TYPE:
4993 case ENUMERAL_TYPE:
4994 case POINTER_TYPE:
4995 case REFERENCE_TYPE:
4996 case OFFSET_TYPE:
4997 case FIXED_POINT_TYPE:
4998 case NULLPTR_TYPE:
4999 cst = expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
5000 if (reverse)
5001 cst = flip_storage_order (TYPE_MODE (TREE_TYPE (exp)), cst);
5002 if (!assemble_integer (cst, MIN (size, thissize), align, 0))
5003 error ("initializer for integer/fixed-point value is too complicated");
5004 break;
5005
5006 case REAL_TYPE:
5007 if (TREE_CODE (exp) != REAL_CST)
5008 error ("initializer for floating value is not a floating constant");
5009 else
5010 assemble_real (TREE_REAL_CST (exp),
5011 SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (exp)),
5012 align, reverse);
5013 break;
5014
5015 case COMPLEX_TYPE:
5016 output_constant (TREE_REALPART (exp), thissize / 2, align,
5017 reverse, false);
5018 output_constant (TREE_IMAGPART (exp), thissize / 2,
5019 min_align (align, BITS_PER_UNIT * (thissize / 2)),
5020 reverse, false);
5021 break;
5022
5023 case ARRAY_TYPE:
5024 case VECTOR_TYPE:
5025 switch (TREE_CODE (exp))
5026 {
5027 case CONSTRUCTOR:
5028 return output_constructor (exp, size, align, reverse, NULL);
5029 case STRING_CST:
5030 thissize = (unsigned HOST_WIDE_INT)TREE_STRING_LENGTH (exp);
5031 if (merge_strings
5032 && (thissize == 0
5033 || TREE_STRING_POINTER (exp) [thissize - 1] != '\0'))
5034 thissize++;
5035 gcc_checking_assert (check_string_literal (exp, size));
5036 assemble_string (TREE_STRING_POINTER (exp), thissize);
5037 break;
5038 case VECTOR_CST:
5039 {
5040 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5041 unsigned int nalign = MIN (align, GET_MODE_ALIGNMENT (inner));
5042 int elt_size = GET_MODE_SIZE (inner);
5043 output_constant (VECTOR_CST_ELT (exp, 0), elt_size, align,
5044 reverse, false);
5045 thissize = elt_size;
5046 /* Static constants must have a fixed size. */
5047 unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
5048 for (unsigned int i = 1; i < nunits; i++)
5049 {
5050 output_constant (VECTOR_CST_ELT (exp, i), elt_size, nalign,
5051 reverse, false);
5052 thissize += elt_size;
5053 }
5054 break;
5055 }
5056 default:
5057 gcc_unreachable ();
5058 }
5059 break;
5060
5061 case RECORD_TYPE:
5062 case UNION_TYPE:
5063 gcc_assert (TREE_CODE (exp) == CONSTRUCTOR);
5064 return output_constructor (exp, size, align, reverse, NULL);
5065
5066 case ERROR_MARK:
5067 return 0;
5068
5069 default:
5070 gcc_unreachable ();
5071 }
5072
5073 if (size > thissize)
5074 assemble_zeros (size - thissize);
5075
5076 return size;
5077 }
5078
5079 /* Subroutine of output_constructor, used for computing the size of
5080 arrays of unspecified length. VAL must be a CONSTRUCTOR of an array
5081 type with an unspecified upper bound. */
5082
5083 static unsigned HOST_WIDE_INT
array_size_for_constructor(tree val)5084 array_size_for_constructor (tree val)
5085 {
5086 tree max_index;
5087 unsigned HOST_WIDE_INT cnt;
5088 tree index, value, tmp;
5089 offset_int i;
5090
5091 /* This code used to attempt to handle string constants that are not
5092 arrays of single-bytes, but nothing else does, so there's no point in
5093 doing it here. */
5094 if (TREE_CODE (val) == STRING_CST)
5095 return TREE_STRING_LENGTH (val);
5096
5097 max_index = NULL_TREE;
5098 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (val), cnt, index, value)
5099 {
5100 if (TREE_CODE (index) == RANGE_EXPR)
5101 index = TREE_OPERAND (index, 1);
5102 if (max_index == NULL_TREE || tree_int_cst_lt (max_index, index))
5103 max_index = index;
5104 }
5105
5106 if (max_index == NULL_TREE)
5107 return 0;
5108
5109 /* Compute the total number of array elements. */
5110 tmp = TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (val)));
5111 i = wi::to_offset (max_index) - wi::to_offset (tmp) + 1;
5112
5113 /* Multiply by the array element unit size to find number of bytes. */
5114 i *= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (val))));
5115
5116 gcc_assert (wi::fits_uhwi_p (i));
5117 return i.to_uhwi ();
5118 }
5119
5120 /* Other datastructures + helpers for output_constructor. */
5121
5122 /* output_constructor local state to support interaction with helpers. */
5123
5124 struct oc_local_state {
5125
5126 /* Received arguments. */
5127 tree exp; /* Constructor expression. */
5128 tree type; /* Type of constructor expression. */
5129 unsigned HOST_WIDE_INT size; /* # bytes to output - pad if necessary. */
5130 unsigned int align; /* Known initial alignment. */
5131 tree min_index; /* Lower bound if specified for an array. */
5132
5133 /* Output processing state. */
5134 HOST_WIDE_INT total_bytes; /* # bytes output so far / current position. */
5135 int byte; /* Part of a bitfield byte yet to be output. */
5136 int last_relative_index; /* Implicit or explicit index of the last
5137 array element output within a bitfield. */
5138 bool byte_buffer_in_use; /* Whether BYTE is in use. */
5139 bool reverse; /* Whether reverse storage order is in use. */
5140
5141 /* Current element. */
5142 tree field; /* Current field decl in a record. */
5143 tree val; /* Current element value. */
5144 tree index; /* Current element index. */
5145
5146 };
5147
5148 /* Helper for output_constructor. From the current LOCAL state, output a
5149 RANGE_EXPR element. */
5150
5151 static void
output_constructor_array_range(oc_local_state * local)5152 output_constructor_array_range (oc_local_state *local)
5153 {
5154 /* Perform the index calculation in modulo arithmetic but
5155 sign-extend the result because Ada has negative DECL_FIELD_OFFSETs
5156 but we are using an unsigned sizetype. */
5157 unsigned prec = TYPE_PRECISION (sizetype);
5158 offset_int idx = wi::sext (wi::to_offset (TREE_OPERAND (local->index, 0))
5159 - wi::to_offset (local->min_index), prec);
5160 tree valtype = TREE_TYPE (local->val);
5161 HOST_WIDE_INT fieldpos
5162 = (idx * wi::to_offset (TYPE_SIZE_UNIT (valtype))).to_short_addr ();
5163
5164 /* Advance to offset of this element. */
5165 if (fieldpos > local->total_bytes)
5166 {
5167 assemble_zeros (fieldpos - local->total_bytes);
5168 local->total_bytes = fieldpos;
5169 }
5170 else
5171 /* Must not go backwards. */
5172 gcc_assert (fieldpos == local->total_bytes);
5173
5174 unsigned HOST_WIDE_INT fieldsize
5175 = int_size_in_bytes (TREE_TYPE (local->type));
5176
5177 HOST_WIDE_INT lo_index
5178 = tree_to_shwi (TREE_OPERAND (local->index, 0));
5179 HOST_WIDE_INT hi_index
5180 = tree_to_shwi (TREE_OPERAND (local->index, 1));
5181 HOST_WIDE_INT index;
5182
5183 unsigned int align2
5184 = min_align (local->align, fieldsize * BITS_PER_UNIT);
5185
5186 for (index = lo_index; index <= hi_index; index++)
5187 {
5188 /* Output the element's initial value. */
5189 if (local->val == NULL_TREE)
5190 assemble_zeros (fieldsize);
5191 else
5192 fieldsize = output_constant (local->val, fieldsize, align2,
5193 local->reverse, false);
5194
5195 /* Count its size. */
5196 local->total_bytes += fieldsize;
5197 }
5198 }
5199
5200 /* Helper for output_constructor. From the current LOCAL state, output a
5201 field element that is not true bitfield or part of an outer one. */
5202
5203 static void
output_constructor_regular_field(oc_local_state * local)5204 output_constructor_regular_field (oc_local_state *local)
5205 {
5206 /* Field size and position. Since this structure is static, we know the
5207 positions are constant. */
5208 unsigned HOST_WIDE_INT fieldsize;
5209 HOST_WIDE_INT fieldpos;
5210
5211 unsigned int align2;
5212
5213 /* Output any buffered-up bit-fields preceding this element. */
5214 if (local->byte_buffer_in_use)
5215 {
5216 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5217 local->total_bytes++;
5218 local->byte_buffer_in_use = false;
5219 }
5220
5221 if (local->index != NULL_TREE)
5222 {
5223 /* Perform the index calculation in modulo arithmetic but
5224 sign-extend the result because Ada has negative DECL_FIELD_OFFSETs
5225 but we are using an unsigned sizetype. */
5226 unsigned prec = TYPE_PRECISION (sizetype);
5227 offset_int idx = wi::sext (wi::to_offset (local->index)
5228 - wi::to_offset (local->min_index), prec);
5229 fieldpos = (idx * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (local->val))))
5230 .to_short_addr ();
5231 }
5232 else if (local->field != NULL_TREE)
5233 fieldpos = int_byte_position (local->field);
5234 else
5235 fieldpos = 0;
5236
5237 /* Advance to offset of this element.
5238 Note no alignment needed in an array, since that is guaranteed
5239 if each element has the proper size. */
5240 if (local->field != NULL_TREE || local->index != NULL_TREE)
5241 {
5242 if (fieldpos > local->total_bytes)
5243 {
5244 assemble_zeros (fieldpos - local->total_bytes);
5245 local->total_bytes = fieldpos;
5246 }
5247 else
5248 /* Must not go backwards. */
5249 gcc_assert (fieldpos == local->total_bytes);
5250 }
5251
5252 /* Find the alignment of this element. */
5253 align2 = min_align (local->align, BITS_PER_UNIT * fieldpos);
5254
5255 /* Determine size this element should occupy. */
5256 if (local->field)
5257 {
5258 fieldsize = 0;
5259
5260 /* If this is an array with an unspecified upper bound,
5261 the initializer determines the size. */
5262 /* ??? This ought to only checked if DECL_SIZE_UNIT is NULL,
5263 but we cannot do this until the deprecated support for
5264 initializing zero-length array members is removed. */
5265 if (TREE_CODE (TREE_TYPE (local->field)) == ARRAY_TYPE
5266 && (!TYPE_DOMAIN (TREE_TYPE (local->field))
5267 || !TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (local->field)))))
5268 {
5269 fieldsize = array_size_for_constructor (local->val);
5270 /* Given a non-empty initialization, this field had better
5271 be last. Given a flexible array member, the next field
5272 on the chain is a TYPE_DECL of the enclosing struct. */
5273 const_tree next = DECL_CHAIN (local->field);
5274 gcc_assert (!fieldsize || !next || TREE_CODE (next) != FIELD_DECL);
5275 tree size = TYPE_SIZE_UNIT (TREE_TYPE (local->val));
5276 gcc_checking_assert (compare_tree_int (size, fieldsize) == 0);
5277 }
5278 else
5279 fieldsize = tree_to_uhwi (DECL_SIZE_UNIT (local->field));
5280 }
5281 else
5282 fieldsize = int_size_in_bytes (TREE_TYPE (local->type));
5283
5284 /* Output the element's initial value. */
5285 if (local->val == NULL_TREE)
5286 assemble_zeros (fieldsize);
5287 else
5288 fieldsize = output_constant (local->val, fieldsize, align2,
5289 local->reverse, false);
5290
5291 /* Count its size. */
5292 local->total_bytes += fieldsize;
5293 }
5294
5295 /* Helper for output_constructor. From the LOCAL state, output an element
5296 that is a true bitfield or part of an outer one. BIT_OFFSET is the offset
5297 from the start of a possibly ongoing outer byte buffer. */
5298
5299 static void
output_constructor_bitfield(oc_local_state * local,unsigned int bit_offset)5300 output_constructor_bitfield (oc_local_state *local, unsigned int bit_offset)
5301 {
5302 /* Bit size of this element. */
5303 HOST_WIDE_INT ebitsize
5304 = (local->field
5305 ? tree_to_uhwi (DECL_SIZE (local->field))
5306 : tree_to_uhwi (TYPE_SIZE (TREE_TYPE (local->type))));
5307
5308 /* Relative index of this element if this is an array component. */
5309 HOST_WIDE_INT relative_index
5310 = (!local->field
5311 ? (local->index
5312 ? (tree_to_shwi (local->index)
5313 - tree_to_shwi (local->min_index))
5314 : local->last_relative_index + 1)
5315 : 0);
5316
5317 /* Bit position of this element from the start of the containing
5318 constructor. */
5319 HOST_WIDE_INT constructor_relative_ebitpos
5320 = (local->field
5321 ? int_bit_position (local->field)
5322 : ebitsize * relative_index);
5323
5324 /* Bit position of this element from the start of a possibly ongoing
5325 outer byte buffer. */
5326 HOST_WIDE_INT byte_relative_ebitpos
5327 = bit_offset + constructor_relative_ebitpos;
5328
5329 /* From the start of a possibly ongoing outer byte buffer, offsets to
5330 the first bit of this element and to the first bit past the end of
5331 this element. */
5332 HOST_WIDE_INT next_offset = byte_relative_ebitpos;
5333 HOST_WIDE_INT end_offset = byte_relative_ebitpos + ebitsize;
5334
5335 local->last_relative_index = relative_index;
5336
5337 if (local->val == NULL_TREE)
5338 local->val = integer_zero_node;
5339
5340 while (TREE_CODE (local->val) == VIEW_CONVERT_EXPR
5341 || TREE_CODE (local->val) == NON_LVALUE_EXPR)
5342 local->val = TREE_OPERAND (local->val, 0);
5343
5344 if (TREE_CODE (local->val) != INTEGER_CST
5345 && TREE_CODE (local->val) != CONSTRUCTOR)
5346 {
5347 error ("invalid initial value for member %qE", DECL_NAME (local->field));
5348 return;
5349 }
5350
5351 /* If this field does not start in this (or next) byte, skip some bytes. */
5352 if (next_offset / BITS_PER_UNIT != local->total_bytes)
5353 {
5354 /* Output remnant of any bit field in previous bytes. */
5355 if (local->byte_buffer_in_use)
5356 {
5357 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5358 local->total_bytes++;
5359 local->byte_buffer_in_use = false;
5360 }
5361
5362 /* If still not at proper byte, advance to there. */
5363 if (next_offset / BITS_PER_UNIT != local->total_bytes)
5364 {
5365 gcc_assert (next_offset / BITS_PER_UNIT >= local->total_bytes);
5366 assemble_zeros (next_offset / BITS_PER_UNIT - local->total_bytes);
5367 local->total_bytes = next_offset / BITS_PER_UNIT;
5368 }
5369 }
5370
5371 /* Set up the buffer if necessary. */
5372 if (!local->byte_buffer_in_use)
5373 {
5374 local->byte = 0;
5375 if (ebitsize > 0)
5376 local->byte_buffer_in_use = true;
5377 }
5378
5379 /* If this is nested constructor, recurse passing the bit offset and the
5380 pending data, then retrieve the new pending data afterwards. */
5381 if (TREE_CODE (local->val) == CONSTRUCTOR)
5382 {
5383 oc_outer_state temp_state;
5384 temp_state.bit_offset = next_offset % BITS_PER_UNIT;
5385 temp_state.byte = local->byte;
5386 local->total_bytes
5387 += output_constructor (local->val, 0, 0, local->reverse, &temp_state);
5388 local->byte = temp_state.byte;
5389 return;
5390 }
5391
5392 /* Otherwise, we must split the element into pieces that fall within
5393 separate bytes, and combine each byte with previous or following
5394 bit-fields. */
5395 while (next_offset < end_offset)
5396 {
5397 int this_time;
5398 int shift;
5399 unsigned HOST_WIDE_INT value;
5400 HOST_WIDE_INT next_byte = next_offset / BITS_PER_UNIT;
5401 HOST_WIDE_INT next_bit = next_offset % BITS_PER_UNIT;
5402
5403 /* Advance from byte to byte within this element when necessary. */
5404 while (next_byte != local->total_bytes)
5405 {
5406 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5407 local->total_bytes++;
5408 local->byte = 0;
5409 }
5410
5411 /* Number of bits we can process at once (all part of the same byte). */
5412 this_time = MIN (end_offset - next_offset, BITS_PER_UNIT - next_bit);
5413 if (local->reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5414 {
5415 /* For big-endian data, take the most significant bits (of the
5416 bits that are significant) first and put them into bytes from
5417 the most significant end. */
5418 shift = end_offset - next_offset - this_time;
5419
5420 /* Don't try to take a bunch of bits that cross
5421 the word boundary in the INTEGER_CST. We can
5422 only select bits from one element. */
5423 if ((shift / HOST_BITS_PER_WIDE_INT)
5424 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT))
5425 {
5426 const int end = shift + this_time - 1;
5427 shift = end & -HOST_BITS_PER_WIDE_INT;
5428 this_time = end - shift + 1;
5429 }
5430
5431 /* Now get the bits we want to insert. */
5432 value = wi::extract_uhwi (wi::to_widest (local->val),
5433 shift, this_time);
5434
5435 /* Get the result. This works only when:
5436 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
5437 local->byte |= value << (BITS_PER_UNIT - this_time - next_bit);
5438 }
5439 else
5440 {
5441 /* On little-endian machines, take the least significant bits of
5442 the value first and pack them starting at the least significant
5443 bits of the bytes. */
5444 shift = next_offset - byte_relative_ebitpos;
5445
5446 /* Don't try to take a bunch of bits that cross
5447 the word boundary in the INTEGER_CST. We can
5448 only select bits from one element. */
5449 if ((shift / HOST_BITS_PER_WIDE_INT)
5450 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT))
5451 this_time
5452 = HOST_BITS_PER_WIDE_INT - (shift & (HOST_BITS_PER_WIDE_INT - 1));
5453
5454 /* Now get the bits we want to insert. */
5455 value = wi::extract_uhwi (wi::to_widest (local->val),
5456 shift, this_time);
5457
5458 /* Get the result. This works only when:
5459 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
5460 local->byte |= value << next_bit;
5461 }
5462
5463 next_offset += this_time;
5464 local->byte_buffer_in_use = true;
5465 }
5466 }
5467
5468 /* Subroutine of output_constant, used for CONSTRUCTORs (aggregate constants).
5469 Generate at least SIZE bytes, padding if necessary. OUTER designates the
5470 caller output state of relevance in recursive invocations. */
5471
5472 static unsigned HOST_WIDE_INT
output_constructor(tree exp,unsigned HOST_WIDE_INT size,unsigned int align,bool reverse,oc_outer_state * outer)5473 output_constructor (tree exp, unsigned HOST_WIDE_INT size, unsigned int align,
5474 bool reverse, oc_outer_state *outer)
5475 {
5476 unsigned HOST_WIDE_INT cnt;
5477 constructor_elt *ce;
5478 oc_local_state local;
5479
5480 /* Setup our local state to communicate with helpers. */
5481 local.exp = exp;
5482 local.type = TREE_TYPE (exp);
5483 local.size = size;
5484 local.align = align;
5485 if (TREE_CODE (local.type) == ARRAY_TYPE && TYPE_DOMAIN (local.type))
5486 local.min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (local.type));
5487 else
5488 local.min_index = integer_zero_node;
5489
5490 local.total_bytes = 0;
5491 local.byte_buffer_in_use = outer != NULL;
5492 local.byte = outer ? outer->byte : 0;
5493 local.last_relative_index = -1;
5494 /* The storage order is specified for every aggregate type. */
5495 if (AGGREGATE_TYPE_P (local.type))
5496 local.reverse = TYPE_REVERSE_STORAGE_ORDER (local.type);
5497 else
5498 local.reverse = reverse;
5499
5500 gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_UNIT);
5501
5502 /* As CE goes through the elements of the constant, FIELD goes through the
5503 structure fields if the constant is a structure. If the constant is a
5504 union, we override this by getting the field from the TREE_LIST element.
5505 But the constant could also be an array. Then FIELD is zero.
5506
5507 There is always a maximum of one element in the chain LINK for unions
5508 (even if the initializer in a source program incorrectly contains
5509 more one). */
5510
5511 if (TREE_CODE (local.type) == RECORD_TYPE)
5512 local.field = TYPE_FIELDS (local.type);
5513 else
5514 local.field = NULL_TREE;
5515
5516 for (cnt = 0;
5517 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), cnt, &ce);
5518 cnt++, local.field = local.field ? DECL_CHAIN (local.field) : 0)
5519 {
5520 local.val = ce->value;
5521 local.index = NULL_TREE;
5522
5523 /* The element in a union constructor specifies the proper field
5524 or index. */
5525 if (RECORD_OR_UNION_TYPE_P (local.type) && ce->index != NULL_TREE)
5526 local.field = ce->index;
5527
5528 else if (TREE_CODE (local.type) == ARRAY_TYPE)
5529 local.index = ce->index;
5530
5531 if (local.field && flag_verbose_asm)
5532 fprintf (asm_out_file, "%s %s:\n",
5533 ASM_COMMENT_START,
5534 DECL_NAME (local.field)
5535 ? IDENTIFIER_POINTER (DECL_NAME (local.field))
5536 : "<anonymous>");
5537
5538 /* Eliminate the marker that makes a cast not be an lvalue. */
5539 if (local.val != NULL_TREE)
5540 STRIP_NOPS (local.val);
5541
5542 /* Output the current element, using the appropriate helper ... */
5543
5544 /* For an array slice not part of an outer bitfield. */
5545 if (!outer
5546 && local.index != NULL_TREE
5547 && TREE_CODE (local.index) == RANGE_EXPR)
5548 output_constructor_array_range (&local);
5549
5550 /* For a field that is neither a true bitfield nor part of an outer one,
5551 known to be at least byte aligned and multiple-of-bytes long. */
5552 else if (!outer
5553 && (local.field == NULL_TREE
5554 || !CONSTRUCTOR_BITFIELD_P (local.field)))
5555 output_constructor_regular_field (&local);
5556
5557 /* For a true bitfield or part of an outer one. Only INTEGER_CSTs are
5558 supported for scalar fields, so we may need to convert first. */
5559 else
5560 {
5561 if (TREE_CODE (local.val) == REAL_CST)
5562 local.val
5563 = fold_unary (VIEW_CONVERT_EXPR,
5564 build_nonstandard_integer_type
5565 (TYPE_PRECISION (TREE_TYPE (local.val)), 0),
5566 local.val);
5567 output_constructor_bitfield (&local, outer ? outer->bit_offset : 0);
5568 }
5569 }
5570
5571 /* If we are not at toplevel, save the pending data for our caller.
5572 Otherwise output the pending data and padding zeros as needed. */
5573 if (outer)
5574 outer->byte = local.byte;
5575 else
5576 {
5577 if (local.byte_buffer_in_use)
5578 {
5579 assemble_integer (GEN_INT (local.byte), 1, BITS_PER_UNIT, 1);
5580 local.total_bytes++;
5581 }
5582
5583 if ((unsigned HOST_WIDE_INT)local.total_bytes < local.size)
5584 {
5585 assemble_zeros (local.size - local.total_bytes);
5586 local.total_bytes = local.size;
5587 }
5588 }
5589
5590 return local.total_bytes;
5591 }
5592
5593 /* Mark DECL as weak. */
5594
5595 static void
mark_weak(tree decl)5596 mark_weak (tree decl)
5597 {
5598 if (DECL_WEAK (decl))
5599 return;
5600
5601 struct symtab_node *n = symtab_node::get (decl);
5602 if (n && n->refuse_visibility_changes)
5603 error ("%+qD declared weak after being used", decl);
5604 DECL_WEAK (decl) = 1;
5605
5606 if (DECL_RTL_SET_P (decl)
5607 && MEM_P (DECL_RTL (decl))
5608 && XEXP (DECL_RTL (decl), 0)
5609 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == SYMBOL_REF)
5610 SYMBOL_REF_WEAK (XEXP (DECL_RTL (decl), 0)) = 1;
5611 }
5612
5613 /* Merge weak status between NEWDECL and OLDDECL. */
5614
5615 void
merge_weak(tree newdecl,tree olddecl)5616 merge_weak (tree newdecl, tree olddecl)
5617 {
5618 if (DECL_WEAK (newdecl) == DECL_WEAK (olddecl))
5619 {
5620 if (DECL_WEAK (newdecl) && TARGET_SUPPORTS_WEAK)
5621 {
5622 tree *pwd;
5623 /* We put the NEWDECL on the weak_decls list at some point
5624 and OLDDECL as well. Keep just OLDDECL on the list. */
5625 for (pwd = &weak_decls; *pwd; pwd = &TREE_CHAIN (*pwd))
5626 if (TREE_VALUE (*pwd) == newdecl)
5627 {
5628 *pwd = TREE_CHAIN (*pwd);
5629 break;
5630 }
5631 }
5632 return;
5633 }
5634
5635 if (DECL_WEAK (newdecl))
5636 {
5637 tree wd;
5638
5639 /* NEWDECL is weak, but OLDDECL is not. */
5640
5641 /* If we already output the OLDDECL, we're in trouble; we can't
5642 go back and make it weak. This should never happen in
5643 unit-at-a-time compilation. */
5644 gcc_assert (!TREE_ASM_WRITTEN (olddecl));
5645
5646 /* If we've already generated rtl referencing OLDDECL, we may
5647 have done so in a way that will not function properly with
5648 a weak symbol. Again in unit-at-a-time this should be
5649 impossible. */
5650 gcc_assert (!TREE_USED (olddecl)
5651 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (olddecl)));
5652
5653 /* PR 49899: You cannot convert a static function into a weak, public function. */
5654 if (! TREE_PUBLIC (olddecl) && TREE_PUBLIC (newdecl))
5655 error ("weak declaration of %q+D being applied to a already "
5656 "existing, static definition", newdecl);
5657
5658 if (TARGET_SUPPORTS_WEAK)
5659 {
5660 /* We put the NEWDECL on the weak_decls list at some point.
5661 Replace it with the OLDDECL. */
5662 for (wd = weak_decls; wd; wd = TREE_CHAIN (wd))
5663 if (TREE_VALUE (wd) == newdecl)
5664 {
5665 TREE_VALUE (wd) = olddecl;
5666 break;
5667 }
5668 /* We may not find the entry on the list. If NEWDECL is a
5669 weak alias, then we will have already called
5670 globalize_decl to remove the entry; in that case, we do
5671 not need to do anything. */
5672 }
5673
5674 /* Make the OLDDECL weak; it's OLDDECL that we'll be keeping. */
5675 mark_weak (olddecl);
5676 }
5677 else
5678 /* OLDDECL was weak, but NEWDECL was not explicitly marked as
5679 weak. Just update NEWDECL to indicate that it's weak too. */
5680 mark_weak (newdecl);
5681 }
5682
5683 /* Declare DECL to be a weak symbol. */
5684
5685 void
declare_weak(tree decl)5686 declare_weak (tree decl)
5687 {
5688 gcc_assert (TREE_CODE (decl) != FUNCTION_DECL || !TREE_ASM_WRITTEN (decl));
5689 if (! TREE_PUBLIC (decl))
5690 {
5691 error ("weak declaration of %q+D must be public", decl);
5692 return;
5693 }
5694 else if (!TARGET_SUPPORTS_WEAK)
5695 warning (0, "weak declaration of %q+D not supported", decl);
5696
5697 mark_weak (decl);
5698 if (!lookup_attribute ("weak", DECL_ATTRIBUTES (decl)))
5699 DECL_ATTRIBUTES (decl)
5700 = tree_cons (get_identifier ("weak"), NULL, DECL_ATTRIBUTES (decl));
5701 }
5702
5703 static void
weak_finish_1(tree decl)5704 weak_finish_1 (tree decl)
5705 {
5706 #if defined (ASM_WEAKEN_DECL) || defined (ASM_WEAKEN_LABEL)
5707 const char *const name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5708 #endif
5709
5710 if (! TREE_USED (decl))
5711 return;
5712
5713 #ifdef ASM_WEAKEN_DECL
5714 ASM_WEAKEN_DECL (asm_out_file, decl, name, NULL);
5715 #else
5716 #ifdef ASM_WEAKEN_LABEL
5717 ASM_WEAKEN_LABEL (asm_out_file, name);
5718 #else
5719 #ifdef ASM_OUTPUT_WEAK_ALIAS
5720 {
5721 static bool warn_once = 0;
5722 if (! warn_once)
5723 {
5724 warning (0, "only weak aliases are supported in this configuration");
5725 warn_once = 1;
5726 }
5727 return;
5728 }
5729 #endif
5730 #endif
5731 #endif
5732 }
5733
5734 /* Fiven an assembly name, find the decl it is associated with. */
5735 static tree
find_decl(tree target)5736 find_decl (tree target)
5737 {
5738 symtab_node *node = symtab_node::get_for_asmname (target);
5739 if (node)
5740 return node->decl;
5741 return NULL_TREE;
5742 }
5743
5744 /* This TREE_LIST contains weakref targets. */
5745
5746 static GTY(()) tree weakref_targets;
5747
5748 /* Emit any pending weak declarations. */
5749
5750 void
weak_finish(void)5751 weak_finish (void)
5752 {
5753 tree t;
5754
5755 for (t = weakref_targets; t; t = TREE_CHAIN (t))
5756 {
5757 tree alias_decl = TREE_PURPOSE (t);
5758 tree target = ultimate_transparent_alias_target (&TREE_VALUE (t));
5759
5760 if (! TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (alias_decl))
5761 || TREE_SYMBOL_REFERENCED (target))
5762 /* Remove alias_decl from the weak list, but leave entries for
5763 the target alone. */
5764 target = NULL_TREE;
5765 #ifndef ASM_OUTPUT_WEAKREF
5766 else if (! TREE_SYMBOL_REFERENCED (target))
5767 {
5768 /* Use ASM_WEAKEN_LABEL only if ASM_WEAKEN_DECL is not
5769 defined, otherwise we and weak_finish_1 would use
5770 different macros. */
5771 # if defined ASM_WEAKEN_LABEL && ! defined ASM_WEAKEN_DECL
5772 ASM_WEAKEN_LABEL (asm_out_file, IDENTIFIER_POINTER (target));
5773 # else
5774 tree decl = find_decl (target);
5775
5776 if (! decl)
5777 {
5778 decl = build_decl (DECL_SOURCE_LOCATION (alias_decl),
5779 TREE_CODE (alias_decl), target,
5780 TREE_TYPE (alias_decl));
5781
5782 DECL_EXTERNAL (decl) = 1;
5783 TREE_PUBLIC (decl) = 1;
5784 DECL_ARTIFICIAL (decl) = 1;
5785 TREE_NOTHROW (decl) = TREE_NOTHROW (alias_decl);
5786 TREE_USED (decl) = 1;
5787 }
5788
5789 weak_finish_1 (decl);
5790 # endif
5791 }
5792 #endif
5793
5794 {
5795 tree *p;
5796 tree t2;
5797
5798 /* Remove the alias and the target from the pending weak list
5799 so that we do not emit any .weak directives for the former,
5800 nor multiple .weak directives for the latter. */
5801 for (p = &weak_decls; (t2 = *p) ; )
5802 {
5803 if (TREE_VALUE (t2) == alias_decl
5804 || target == DECL_ASSEMBLER_NAME (TREE_VALUE (t2)))
5805 *p = TREE_CHAIN (t2);
5806 else
5807 p = &TREE_CHAIN (t2);
5808 }
5809
5810 /* Remove other weakrefs to the same target, to speed things up. */
5811 for (p = &TREE_CHAIN (t); (t2 = *p) ; )
5812 {
5813 if (target == ultimate_transparent_alias_target (&TREE_VALUE (t2)))
5814 *p = TREE_CHAIN (t2);
5815 else
5816 p = &TREE_CHAIN (t2);
5817 }
5818 }
5819 }
5820
5821 for (t = weak_decls; t; t = TREE_CHAIN (t))
5822 {
5823 tree decl = TREE_VALUE (t);
5824
5825 weak_finish_1 (decl);
5826 }
5827 }
5828
5829 /* Emit the assembly bits to indicate that DECL is globally visible. */
5830
5831 static void
globalize_decl(tree decl)5832 globalize_decl (tree decl)
5833 {
5834
5835 #if defined (ASM_WEAKEN_LABEL) || defined (ASM_WEAKEN_DECL)
5836 if (DECL_WEAK (decl))
5837 {
5838 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
5839 tree *p, t;
5840
5841 #ifdef ASM_WEAKEN_DECL
5842 ASM_WEAKEN_DECL (asm_out_file, decl, name, 0);
5843 #else
5844 ASM_WEAKEN_LABEL (asm_out_file, name);
5845 #endif
5846
5847 /* Remove this function from the pending weak list so that
5848 we do not emit multiple .weak directives for it. */
5849 for (p = &weak_decls; (t = *p) ; )
5850 {
5851 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t)))
5852 *p = TREE_CHAIN (t);
5853 else
5854 p = &TREE_CHAIN (t);
5855 }
5856
5857 /* Remove weakrefs to the same target from the pending weakref
5858 list, for the same reason. */
5859 for (p = &weakref_targets; (t = *p) ; )
5860 {
5861 if (DECL_ASSEMBLER_NAME (decl)
5862 == ultimate_transparent_alias_target (&TREE_VALUE (t)))
5863 *p = TREE_CHAIN (t);
5864 else
5865 p = &TREE_CHAIN (t);
5866 }
5867
5868 return;
5869 }
5870 #endif
5871
5872 targetm.asm_out.globalize_decl_name (asm_out_file, decl);
5873 }
5874
5875 vec<alias_pair, va_gc> *alias_pairs;
5876
5877 /* Output the assembler code for a define (equate) using ASM_OUTPUT_DEF
5878 or ASM_OUTPUT_DEF_FROM_DECLS. The function defines the symbol whose
5879 tree node is DECL to have the value of the tree node TARGET. */
5880
5881 void
do_assemble_alias(tree decl,tree target)5882 do_assemble_alias (tree decl, tree target)
5883 {
5884 tree id;
5885
5886 /* Emulated TLS had better not get this var. */
5887 gcc_assert (!(!targetm.have_tls
5888 && VAR_P (decl)
5889 && DECL_THREAD_LOCAL_P (decl)));
5890
5891 if (TREE_ASM_WRITTEN (decl))
5892 return;
5893
5894 id = DECL_ASSEMBLER_NAME (decl);
5895 ultimate_transparent_alias_target (&id);
5896 ultimate_transparent_alias_target (&target);
5897
5898 /* We must force creation of DECL_RTL for debug info generation, even though
5899 we don't use it here. */
5900 make_decl_rtl (decl);
5901
5902 TREE_ASM_WRITTEN (decl) = 1;
5903 TREE_ASM_WRITTEN (DECL_ASSEMBLER_NAME (decl)) = 1;
5904 TREE_ASM_WRITTEN (id) = 1;
5905
5906 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
5907 {
5908 if (!TREE_SYMBOL_REFERENCED (target))
5909 weakref_targets = tree_cons (decl, target, weakref_targets);
5910
5911 #ifdef ASM_OUTPUT_WEAKREF
5912 ASM_OUTPUT_WEAKREF (asm_out_file, decl,
5913 IDENTIFIER_POINTER (id),
5914 IDENTIFIER_POINTER (target));
5915 #else
5916 if (!TARGET_SUPPORTS_WEAK)
5917 {
5918 error_at (DECL_SOURCE_LOCATION (decl),
5919 "weakref is not supported in this configuration");
5920 return;
5921 }
5922 #endif
5923 return;
5924 }
5925
5926 #ifdef ASM_OUTPUT_DEF
5927 tree orig_decl = decl;
5928
5929 /* Make name accessible from other files, if appropriate. */
5930
5931 if (TREE_PUBLIC (decl) || TREE_PUBLIC (orig_decl))
5932 {
5933 globalize_decl (decl);
5934 maybe_assemble_visibility (decl);
5935 }
5936 if (TREE_CODE (decl) == FUNCTION_DECL
5937 && cgraph_node::get (decl)->ifunc_resolver)
5938 {
5939 #if defined (ASM_OUTPUT_TYPE_DIRECTIVE)
5940 if (targetm.has_ifunc_p ())
5941 ASM_OUTPUT_TYPE_DIRECTIVE
5942 (asm_out_file, IDENTIFIER_POINTER (id),
5943 IFUNC_ASM_TYPE);
5944 else
5945 #endif
5946 error_at (DECL_SOURCE_LOCATION (decl),
5947 "%qs is not supported on this target", "ifunc");
5948 }
5949
5950 # ifdef ASM_OUTPUT_DEF_FROM_DECLS
5951 ASM_OUTPUT_DEF_FROM_DECLS (asm_out_file, decl, target);
5952 # else
5953 ASM_OUTPUT_DEF (asm_out_file,
5954 IDENTIFIER_POINTER (id),
5955 IDENTIFIER_POINTER (target));
5956 # endif
5957 #elif defined (ASM_OUTPUT_WEAK_ALIAS) || defined (ASM_WEAKEN_DECL)
5958 {
5959 const char *name;
5960 tree *p, t;
5961
5962 name = IDENTIFIER_POINTER (id);
5963 # ifdef ASM_WEAKEN_DECL
5964 ASM_WEAKEN_DECL (asm_out_file, decl, name, IDENTIFIER_POINTER (target));
5965 # else
5966 ASM_OUTPUT_WEAK_ALIAS (asm_out_file, name, IDENTIFIER_POINTER (target));
5967 # endif
5968 /* Remove this function from the pending weak list so that
5969 we do not emit multiple .weak directives for it. */
5970 for (p = &weak_decls; (t = *p) ; )
5971 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t))
5972 || id == DECL_ASSEMBLER_NAME (TREE_VALUE (t)))
5973 *p = TREE_CHAIN (t);
5974 else
5975 p = &TREE_CHAIN (t);
5976
5977 /* Remove weakrefs to the same target from the pending weakref
5978 list, for the same reason. */
5979 for (p = &weakref_targets; (t = *p) ; )
5980 {
5981 if (id == ultimate_transparent_alias_target (&TREE_VALUE (t)))
5982 *p = TREE_CHAIN (t);
5983 else
5984 p = &TREE_CHAIN (t);
5985 }
5986 }
5987 #endif
5988 }
5989
5990 /* Output .symver directive. */
5991
5992 void
do_assemble_symver(tree decl,tree target)5993 do_assemble_symver (tree decl, tree target)
5994 {
5995 tree id = DECL_ASSEMBLER_NAME (decl);
5996 ultimate_transparent_alias_target (&id);
5997 ultimate_transparent_alias_target (&target);
5998 #ifdef ASM_OUTPUT_SYMVER_DIRECTIVE
5999 ASM_OUTPUT_SYMVER_DIRECTIVE (asm_out_file,
6000 IDENTIFIER_POINTER (target),
6001 IDENTIFIER_POINTER (id));
6002 #else
6003 error ("symver is only supported on ELF platforms");
6004 #endif
6005 }
6006
6007 /* Emit an assembler directive to make the symbol for DECL an alias to
6008 the symbol for TARGET. */
6009
6010 void
assemble_alias(tree decl,tree target)6011 assemble_alias (tree decl, tree target)
6012 {
6013 tree target_decl;
6014
6015 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
6016 {
6017 tree alias = DECL_ASSEMBLER_NAME (decl);
6018
6019 ultimate_transparent_alias_target (&target);
6020
6021 if (alias == target)
6022 error ("%qs symbol %q+D ultimately targets itself", "weakref", decl);
6023 if (TREE_PUBLIC (decl))
6024 error ("%qs symbol %q+D must have static linkage", "weakref", decl);
6025 }
6026 else
6027 {
6028 #if !defined (ASM_OUTPUT_DEF)
6029 # if !defined(ASM_OUTPUT_WEAK_ALIAS) && !defined (ASM_WEAKEN_DECL)
6030 error_at (DECL_SOURCE_LOCATION (decl),
6031 "alias definitions not supported in this configuration");
6032 TREE_ASM_WRITTEN (decl) = 1;
6033 return;
6034 # else
6035 if (!DECL_WEAK (decl))
6036 {
6037 /* NB: ifunc_resolver isn't set when an error is detected. */
6038 if (TREE_CODE (decl) == FUNCTION_DECL
6039 && lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
6040 error_at (DECL_SOURCE_LOCATION (decl),
6041 "%qs is not supported in this configuration", "ifunc");
6042 else
6043 error_at (DECL_SOURCE_LOCATION (decl),
6044 "only weak aliases are supported in this configuration");
6045 TREE_ASM_WRITTEN (decl) = 1;
6046 return;
6047 }
6048 # endif
6049 #endif
6050 }
6051 TREE_USED (decl) = 1;
6052
6053 /* Allow aliases to aliases. */
6054 if (TREE_CODE (decl) == FUNCTION_DECL)
6055 cgraph_node::get_create (decl)->alias = true;
6056 else
6057 varpool_node::get_create (decl)->alias = true;
6058
6059 /* If the target has already been emitted, we don't have to queue the
6060 alias. This saves a tad of memory. */
6061 if (symtab->global_info_ready)
6062 target_decl = find_decl (target);
6063 else
6064 target_decl= NULL;
6065 if ((target_decl && TREE_ASM_WRITTEN (target_decl))
6066 || symtab->state >= EXPANSION)
6067 do_assemble_alias (decl, target);
6068 else
6069 {
6070 alias_pair p = {decl, target};
6071 vec_safe_push (alias_pairs, p);
6072 }
6073 }
6074
6075 /* Record and output a table of translations from original function
6076 to its transaction aware clone. Note that tm_pure functions are
6077 considered to be their own clone. */
6078
6079 struct tm_clone_hasher : ggc_cache_ptr_hash<tree_map>
6080 {
hashtm_clone_hasher6081 static hashval_t hash (tree_map *m) { return tree_map_hash (m); }
equaltm_clone_hasher6082 static bool equal (tree_map *a, tree_map *b) { return tree_map_eq (a, b); }
6083
6084 static int
keep_cache_entrytm_clone_hasher6085 keep_cache_entry (tree_map *&e)
6086 {
6087 return ggc_marked_p (e->base.from);
6088 }
6089 };
6090
6091 static GTY((cache)) hash_table<tm_clone_hasher> *tm_clone_hash;
6092
6093 void
record_tm_clone_pair(tree o,tree n)6094 record_tm_clone_pair (tree o, tree n)
6095 {
6096 struct tree_map **slot, *h;
6097
6098 if (tm_clone_hash == NULL)
6099 tm_clone_hash = hash_table<tm_clone_hasher>::create_ggc (32);
6100
6101 h = ggc_alloc<tree_map> ();
6102 h->hash = htab_hash_pointer (o);
6103 h->base.from = o;
6104 h->to = n;
6105
6106 slot = tm_clone_hash->find_slot_with_hash (h, h->hash, INSERT);
6107 *slot = h;
6108 }
6109
6110 tree
get_tm_clone_pair(tree o)6111 get_tm_clone_pair (tree o)
6112 {
6113 if (tm_clone_hash)
6114 {
6115 struct tree_map *h, in;
6116
6117 in.base.from = o;
6118 in.hash = htab_hash_pointer (o);
6119 h = tm_clone_hash->find_with_hash (&in, in.hash);
6120 if (h)
6121 return h->to;
6122 }
6123 return NULL_TREE;
6124 }
6125
6126 struct tm_alias_pair
6127 {
6128 unsigned int uid;
6129 tree from;
6130 tree to;
6131 };
6132
6133
6134 /* Dump the actual pairs to the .tm_clone_table section. */
6135
6136 static void
dump_tm_clone_pairs(vec<tm_alias_pair> tm_alias_pairs)6137 dump_tm_clone_pairs (vec<tm_alias_pair> tm_alias_pairs)
6138 {
6139 unsigned i;
6140 tm_alias_pair *p;
6141 bool switched = false;
6142
6143 FOR_EACH_VEC_ELT (tm_alias_pairs, i, p)
6144 {
6145 tree src = p->from;
6146 tree dst = p->to;
6147 struct cgraph_node *src_n = cgraph_node::get (src);
6148 struct cgraph_node *dst_n = cgraph_node::get (dst);
6149
6150 /* The function ipa_tm_create_version() marks the clone as needed if
6151 the original function was needed. But we also mark the clone as
6152 needed if we ever called the clone indirectly through
6153 TM_GETTMCLONE. If neither of these are true, we didn't generate
6154 a clone, and we didn't call it indirectly... no sense keeping it
6155 in the clone table. */
6156 if (!dst_n || !dst_n->definition)
6157 continue;
6158
6159 /* This covers the case where we have optimized the original
6160 function away, and only access the transactional clone. */
6161 if (!src_n || !src_n->definition)
6162 continue;
6163
6164 if (!switched)
6165 {
6166 switch_to_section (targetm.asm_out.tm_clone_table_section ());
6167 assemble_align (POINTER_SIZE);
6168 switched = true;
6169 }
6170
6171 assemble_integer (XEXP (DECL_RTL (src), 0),
6172 POINTER_SIZE_UNITS, POINTER_SIZE, 1);
6173 assemble_integer (XEXP (DECL_RTL (dst), 0),
6174 POINTER_SIZE_UNITS, POINTER_SIZE, 1);
6175 }
6176 }
6177
6178 /* Provide a default for the tm_clone_table section. */
6179
6180 section *
default_clone_table_section(void)6181 default_clone_table_section (void)
6182 {
6183 return get_named_section (NULL, ".tm_clone_table", 3);
6184 }
6185
6186 /* Helper comparison function for qsorting by the DECL_UID stored in
6187 alias_pair->emitted_diags. */
6188
6189 static int
tm_alias_pair_cmp(const void * x,const void * y)6190 tm_alias_pair_cmp (const void *x, const void *y)
6191 {
6192 const tm_alias_pair *p1 = (const tm_alias_pair *) x;
6193 const tm_alias_pair *p2 = (const tm_alias_pair *) y;
6194 if (p1->uid < p2->uid)
6195 return -1;
6196 if (p1->uid > p2->uid)
6197 return 1;
6198 return 0;
6199 }
6200
6201 void
finish_tm_clone_pairs(void)6202 finish_tm_clone_pairs (void)
6203 {
6204 vec<tm_alias_pair> tm_alias_pairs = vNULL;
6205
6206 if (tm_clone_hash == NULL)
6207 return;
6208
6209 /* We need a determenistic order for the .tm_clone_table, otherwise
6210 we will get bootstrap comparison failures, so dump the hash table
6211 to a vector, sort it, and dump the vector. */
6212
6213 /* Dump the hashtable to a vector. */
6214 tree_map *map;
6215 hash_table<tm_clone_hasher>::iterator iter;
6216 FOR_EACH_HASH_TABLE_ELEMENT (*tm_clone_hash, map, tree_map *, iter)
6217 {
6218 tm_alias_pair p = {DECL_UID (map->base.from), map->base.from, map->to};
6219 tm_alias_pairs.safe_push (p);
6220 }
6221 /* Sort it. */
6222 tm_alias_pairs.qsort (tm_alias_pair_cmp);
6223
6224 /* Dump it. */
6225 dump_tm_clone_pairs (tm_alias_pairs);
6226
6227 tm_clone_hash->empty ();
6228 tm_clone_hash = NULL;
6229 tm_alias_pairs.release ();
6230 }
6231
6232
6233 /* Emit an assembler directive to set symbol for DECL visibility to
6234 the visibility type VIS, which must not be VISIBILITY_DEFAULT. */
6235
6236 void
default_assemble_visibility(tree decl ATTRIBUTE_UNUSED,int vis ATTRIBUTE_UNUSED)6237 default_assemble_visibility (tree decl ATTRIBUTE_UNUSED,
6238 int vis ATTRIBUTE_UNUSED)
6239 {
6240 #ifdef HAVE_GAS_HIDDEN
6241 static const char * const visibility_types[] = {
6242 NULL, "protected", "hidden", "internal"
6243 };
6244
6245 const char *name, *type;
6246 tree id;
6247
6248 id = DECL_ASSEMBLER_NAME (decl);
6249 ultimate_transparent_alias_target (&id);
6250 name = IDENTIFIER_POINTER (id);
6251
6252 type = visibility_types[vis];
6253
6254 fprintf (asm_out_file, "\t.%s\t", type);
6255 assemble_name (asm_out_file, name);
6256 fprintf (asm_out_file, "\n");
6257 #else
6258 if (!DECL_ARTIFICIAL (decl))
6259 warning (OPT_Wattributes, "visibility attribute not supported "
6260 "in this configuration; ignored");
6261 #endif
6262 }
6263
6264 /* A helper function to call assemble_visibility when needed for a decl. */
6265
6266 int
maybe_assemble_visibility(tree decl)6267 maybe_assemble_visibility (tree decl)
6268 {
6269 enum symbol_visibility vis = DECL_VISIBILITY (decl);
6270 if (vis != VISIBILITY_DEFAULT)
6271 {
6272 targetm.asm_out.assemble_visibility (decl, vis);
6273 return 1;
6274 }
6275 else
6276 return 0;
6277 }
6278
6279 /* Returns 1 if the target configuration supports defining public symbols
6280 so that one of them will be chosen at link time instead of generating a
6281 multiply-defined symbol error, whether through the use of weak symbols or
6282 a target-specific mechanism for having duplicates discarded. */
6283
6284 int
supports_one_only(void)6285 supports_one_only (void)
6286 {
6287 if (SUPPORTS_ONE_ONLY)
6288 return 1;
6289 return TARGET_SUPPORTS_WEAK;
6290 }
6291
6292 /* Set up DECL as a public symbol that can be defined in multiple
6293 translation units without generating a linker error. */
6294
6295 void
make_decl_one_only(tree decl,tree comdat_group)6296 make_decl_one_only (tree decl, tree comdat_group)
6297 {
6298 struct symtab_node *symbol;
6299 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6300
6301 TREE_PUBLIC (decl) = 1;
6302
6303 if (VAR_P (decl))
6304 symbol = varpool_node::get_create (decl);
6305 else
6306 symbol = cgraph_node::get_create (decl);
6307
6308 if (SUPPORTS_ONE_ONLY)
6309 {
6310 #ifdef MAKE_DECL_ONE_ONLY
6311 MAKE_DECL_ONE_ONLY (decl);
6312 #endif
6313 symbol->set_comdat_group (comdat_group);
6314 }
6315 else if (VAR_P (decl)
6316 && (DECL_INITIAL (decl) == 0
6317 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node)))
6318 DECL_COMMON (decl) = 1;
6319 else
6320 {
6321 gcc_assert (TARGET_SUPPORTS_WEAK);
6322 DECL_WEAK (decl) = 1;
6323 }
6324 }
6325
6326 void
init_varasm_once(void)6327 init_varasm_once (void)
6328 {
6329 section_htab = hash_table<section_hasher>::create_ggc (31);
6330 object_block_htab = hash_table<object_block_hasher>::create_ggc (31);
6331 const_desc_htab = hash_table<tree_descriptor_hasher>::create_ggc (1009);
6332
6333 shared_constant_pool = create_constant_pool ();
6334
6335 #ifdef TEXT_SECTION_ASM_OP
6336 text_section = get_unnamed_section (SECTION_CODE, output_section_asm_op,
6337 TEXT_SECTION_ASM_OP);
6338 #endif
6339
6340 #ifdef DATA_SECTION_ASM_OP
6341 data_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
6342 DATA_SECTION_ASM_OP);
6343 #endif
6344
6345 #ifdef SDATA_SECTION_ASM_OP
6346 sdata_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
6347 SDATA_SECTION_ASM_OP);
6348 #endif
6349
6350 #ifdef READONLY_DATA_SECTION_ASM_OP
6351 readonly_data_section = get_unnamed_section (0, output_section_asm_op,
6352 READONLY_DATA_SECTION_ASM_OP);
6353 #endif
6354
6355 #ifdef CTORS_SECTION_ASM_OP
6356 ctors_section = get_unnamed_section (0, output_section_asm_op,
6357 CTORS_SECTION_ASM_OP);
6358 #endif
6359
6360 #ifdef DTORS_SECTION_ASM_OP
6361 dtors_section = get_unnamed_section (0, output_section_asm_op,
6362 DTORS_SECTION_ASM_OP);
6363 #endif
6364
6365 #ifdef BSS_SECTION_ASM_OP
6366 bss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
6367 output_section_asm_op,
6368 BSS_SECTION_ASM_OP);
6369 #endif
6370
6371 #ifdef SBSS_SECTION_ASM_OP
6372 sbss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
6373 output_section_asm_op,
6374 SBSS_SECTION_ASM_OP);
6375 #endif
6376
6377 tls_comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6378 | SECTION_COMMON, emit_tls_common);
6379 lcomm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6380 | SECTION_COMMON, emit_local);
6381 comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6382 | SECTION_COMMON, emit_common);
6383
6384 #if defined ASM_OUTPUT_ALIGNED_BSS
6385 bss_noswitch_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS,
6386 emit_bss);
6387 #endif
6388
6389 targetm.asm_out.init_sections ();
6390
6391 if (readonly_data_section == NULL)
6392 readonly_data_section = text_section;
6393
6394 #ifdef ASM_OUTPUT_EXTERNAL
6395 pending_assemble_externals_set = new hash_set<tree>;
6396 #endif
6397 }
6398
6399 enum tls_model
decl_default_tls_model(const_tree decl)6400 decl_default_tls_model (const_tree decl)
6401 {
6402 enum tls_model kind;
6403 bool is_local;
6404
6405 is_local = targetm.binds_local_p (decl);
6406 if (!flag_shlib)
6407 {
6408 if (is_local)
6409 kind = TLS_MODEL_LOCAL_EXEC;
6410 else
6411 kind = TLS_MODEL_INITIAL_EXEC;
6412 }
6413
6414 /* Local dynamic is inefficient when we're not combining the
6415 parts of the address. */
6416 else if (optimize && is_local)
6417 kind = TLS_MODEL_LOCAL_DYNAMIC;
6418 else
6419 kind = TLS_MODEL_GLOBAL_DYNAMIC;
6420 if (kind < flag_tls_default)
6421 kind = flag_tls_default;
6422
6423 return kind;
6424 }
6425
6426 /* Select a set of attributes for section NAME based on the properties
6427 of DECL and whether or not RELOC indicates that DECL's initializer
6428 might contain runtime relocations.
6429
6430 We make the section read-only and executable for a function decl,
6431 read-only for a const data decl, and writable for a non-const data decl. */
6432
6433 unsigned int
default_section_type_flags(tree decl,const char * name,int reloc)6434 default_section_type_flags (tree decl, const char *name, int reloc)
6435 {
6436 unsigned int flags;
6437
6438 if (decl && TREE_CODE (decl) == FUNCTION_DECL)
6439 flags = SECTION_CODE;
6440 else if (decl)
6441 {
6442 enum section_category category
6443 = categorize_decl_for_section (decl, reloc);
6444 if (decl_readonly_section_1 (category))
6445 flags = 0;
6446 else if (category == SECCAT_DATA_REL_RO
6447 || category == SECCAT_DATA_REL_RO_LOCAL)
6448 flags = SECTION_WRITE | SECTION_RELRO;
6449 else
6450 flags = SECTION_WRITE;
6451 }
6452 else
6453 {
6454 flags = SECTION_WRITE;
6455 if (strcmp (name, ".data.rel.ro") == 0
6456 || strcmp (name, ".data.rel.ro.local") == 0)
6457 flags |= SECTION_RELRO;
6458 }
6459
6460 if (decl && DECL_P (decl) && DECL_COMDAT_GROUP (decl))
6461 flags |= SECTION_LINKONCE;
6462
6463 if (strcmp (name, ".vtable_map_vars") == 0)
6464 flags |= SECTION_LINKONCE;
6465
6466 if (decl && VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
6467 flags |= SECTION_TLS | SECTION_WRITE;
6468
6469 if (strcmp (name, ".bss") == 0
6470 || strncmp (name, ".bss.", 5) == 0
6471 || strncmp (name, ".gnu.linkonce.b.", 16) == 0
6472 || strcmp (name, ".persistent.bss") == 0
6473 || strcmp (name, ".sbss") == 0
6474 || strncmp (name, ".sbss.", 6) == 0
6475 || strncmp (name, ".gnu.linkonce.sb.", 17) == 0)
6476 flags |= SECTION_BSS;
6477
6478 if (strcmp (name, ".tdata") == 0
6479 || strncmp (name, ".tdata.", 7) == 0
6480 || strncmp (name, ".gnu.linkonce.td.", 17) == 0)
6481 flags |= SECTION_TLS;
6482
6483 if (strcmp (name, ".tbss") == 0
6484 || strncmp (name, ".tbss.", 6) == 0
6485 || strncmp (name, ".gnu.linkonce.tb.", 17) == 0)
6486 flags |= SECTION_TLS | SECTION_BSS;
6487
6488 if (strcmp (name, ".noinit") == 0)
6489 flags |= SECTION_WRITE | SECTION_BSS | SECTION_NOTYPE;
6490
6491 /* Various sections have special ELF types that the assembler will
6492 assign by default based on the name. They are neither SHT_PROGBITS
6493 nor SHT_NOBITS, so when changing sections we don't want to print a
6494 section type (@progbits or @nobits). Rather than duplicating the
6495 assembler's knowledge of what those special name patterns are, just
6496 let the assembler choose the type if we don't know a specific
6497 reason to set it to something other than the default. SHT_PROGBITS
6498 is the default for sections whose name is not specially known to
6499 the assembler, so it does no harm to leave the choice to the
6500 assembler when @progbits is the best thing we know to use. If
6501 someone is silly enough to emit code or TLS variables to one of
6502 these sections, then don't handle them specially.
6503
6504 default_elf_asm_named_section (below) handles the BSS, TLS, ENTSIZE, and
6505 LINKONCE cases when NOTYPE is not set, so leave those to its logic. */
6506 if (!(flags & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE))
6507 && !(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)))
6508 flags |= SECTION_NOTYPE;
6509
6510 return flags;
6511 }
6512
6513 /* Return true if the target supports some form of global BSS,
6514 either through bss_noswitch_section, or by selecting a BSS
6515 section in TARGET_ASM_SELECT_SECTION. */
6516
6517 bool
have_global_bss_p(void)6518 have_global_bss_p (void)
6519 {
6520 return bss_noswitch_section || targetm.have_switchable_bss_sections;
6521 }
6522
6523 /* Output assembly to switch to section NAME with attribute FLAGS.
6524 Four variants for common object file formats. */
6525
6526 void
default_no_named_section(const char * name ATTRIBUTE_UNUSED,unsigned int flags ATTRIBUTE_UNUSED,tree decl ATTRIBUTE_UNUSED)6527 default_no_named_section (const char *name ATTRIBUTE_UNUSED,
6528 unsigned int flags ATTRIBUTE_UNUSED,
6529 tree decl ATTRIBUTE_UNUSED)
6530 {
6531 /* Some object formats don't support named sections at all. The
6532 front-end should already have flagged this as an error. */
6533 gcc_unreachable ();
6534 }
6535
6536 #ifndef TLS_SECTION_ASM_FLAG
6537 #define TLS_SECTION_ASM_FLAG 'T'
6538 #endif
6539
6540 void
default_elf_asm_named_section(const char * name,unsigned int flags,tree decl)6541 default_elf_asm_named_section (const char *name, unsigned int flags,
6542 tree decl)
6543 {
6544 char flagchars[11], *f = flagchars;
6545 unsigned int numeric_value = 0;
6546
6547 /* If we have already declared this section, we can use an
6548 abbreviated form to switch back to it -- unless this section is
6549 part of a COMDAT groups, in which case GAS requires the full
6550 declaration every time. */
6551 if (!(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6552 && (flags & SECTION_DECLARED))
6553 {
6554 fprintf (asm_out_file, "\t.section\t%s\n", name);
6555 return;
6556 }
6557
6558 /* If we have a machine specific flag, then use the numeric value to pass
6559 this on to GAS. */
6560 if (targetm.asm_out.elf_flags_numeric (flags, &numeric_value))
6561 snprintf (f, sizeof (flagchars), "0x%08x", numeric_value);
6562 else
6563 {
6564 if (!(flags & SECTION_DEBUG))
6565 *f++ = 'a';
6566 #if HAVE_GAS_SECTION_EXCLUDE
6567 if (flags & SECTION_EXCLUDE)
6568 *f++ = 'e';
6569 #endif
6570 if (flags & SECTION_WRITE)
6571 *f++ = 'w';
6572 if (flags & SECTION_CODE)
6573 *f++ = 'x';
6574 if (flags & SECTION_SMALL)
6575 *f++ = 's';
6576 if (flags & SECTION_MERGE)
6577 *f++ = 'M';
6578 if (flags & SECTION_STRINGS)
6579 *f++ = 'S';
6580 if (flags & SECTION_TLS)
6581 *f++ = TLS_SECTION_ASM_FLAG;
6582 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6583 *f++ = 'G';
6584 #ifdef MACH_DEP_SECTION_ASM_FLAG
6585 if (flags & SECTION_MACH_DEP)
6586 *f++ = MACH_DEP_SECTION_ASM_FLAG;
6587 #endif
6588 *f = '\0';
6589 }
6590
6591 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
6592
6593 /* default_section_type_flags (above) knows which flags need special
6594 handling here, and sets NOTYPE when none of these apply so that the
6595 assembler's logic for default types can apply to user-chosen
6596 section names. */
6597 if (!(flags & SECTION_NOTYPE))
6598 {
6599 const char *type;
6600 const char *format;
6601
6602 if (flags & SECTION_BSS)
6603 type = "nobits";
6604 else
6605 type = "progbits";
6606
6607 format = ",@%s";
6608 /* On platforms that use "@" as the assembly comment character,
6609 use "%" instead. */
6610 if (strcmp (ASM_COMMENT_START, "@") == 0)
6611 format = ",%%%s";
6612 fprintf (asm_out_file, format, type);
6613
6614 if (flags & SECTION_ENTSIZE)
6615 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
6616 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6617 {
6618 if (TREE_CODE (decl) == IDENTIFIER_NODE)
6619 fprintf (asm_out_file, ",%s,comdat", IDENTIFIER_POINTER (decl));
6620 else
6621 fprintf (asm_out_file, ",%s,comdat",
6622 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (decl)));
6623 }
6624 }
6625
6626 putc ('\n', asm_out_file);
6627 }
6628
6629 void
default_coff_asm_named_section(const char * name,unsigned int flags,tree decl ATTRIBUTE_UNUSED)6630 default_coff_asm_named_section (const char *name, unsigned int flags,
6631 tree decl ATTRIBUTE_UNUSED)
6632 {
6633 char flagchars[8], *f = flagchars;
6634
6635 if (flags & SECTION_WRITE)
6636 *f++ = 'w';
6637 if (flags & SECTION_CODE)
6638 *f++ = 'x';
6639 *f = '\0';
6640
6641 fprintf (asm_out_file, "\t.section\t%s,\"%s\"\n", name, flagchars);
6642 }
6643
6644 void
default_pe_asm_named_section(const char * name,unsigned int flags,tree decl)6645 default_pe_asm_named_section (const char *name, unsigned int flags,
6646 tree decl)
6647 {
6648 default_coff_asm_named_section (name, flags, decl);
6649
6650 if (flags & SECTION_LINKONCE)
6651 {
6652 /* Functions may have been compiled at various levels of
6653 optimization so we can't use `same_size' here.
6654 Instead, have the linker pick one. */
6655 fprintf (asm_out_file, "\t.linkonce %s\n",
6656 (flags & SECTION_CODE ? "discard" : "same_size"));
6657 }
6658 }
6659
6660 /* The lame default section selector. */
6661
6662 section *
default_select_section(tree decl,int reloc,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)6663 default_select_section (tree decl, int reloc,
6664 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
6665 {
6666 if (DECL_P (decl))
6667 {
6668 if (decl_readonly_section (decl, reloc))
6669 return readonly_data_section;
6670 }
6671 else if (TREE_CODE (decl) == CONSTRUCTOR)
6672 {
6673 if (! ((flag_pic && reloc)
6674 || !TREE_READONLY (decl)
6675 || TREE_SIDE_EFFECTS (decl)
6676 || !TREE_CONSTANT (decl)))
6677 return readonly_data_section;
6678 }
6679 else if (TREE_CODE (decl) == STRING_CST)
6680 return readonly_data_section;
6681 else if (! (flag_pic && reloc))
6682 return readonly_data_section;
6683
6684 return data_section;
6685 }
6686
6687 enum section_category
categorize_decl_for_section(const_tree decl,int reloc)6688 categorize_decl_for_section (const_tree decl, int reloc)
6689 {
6690 enum section_category ret;
6691
6692 if (TREE_CODE (decl) == FUNCTION_DECL)
6693 return SECCAT_TEXT;
6694 else if (TREE_CODE (decl) == STRING_CST)
6695 {
6696 if ((flag_sanitize & SANITIZE_ADDRESS)
6697 && asan_protect_global (CONST_CAST_TREE (decl)))
6698 /* or !flag_merge_constants */
6699 return SECCAT_RODATA;
6700 else
6701 return SECCAT_RODATA_MERGE_STR;
6702 }
6703 else if (VAR_P (decl))
6704 {
6705 tree d = CONST_CAST_TREE (decl);
6706 if (bss_initializer_p (decl))
6707 ret = SECCAT_BSS;
6708 else if (! TREE_READONLY (decl)
6709 || TREE_SIDE_EFFECTS (decl)
6710 || (DECL_INITIAL (decl)
6711 && ! TREE_CONSTANT (DECL_INITIAL (decl))))
6712 {
6713 /* Here the reloc_rw_mask is not testing whether the section should
6714 be read-only or not, but whether the dynamic link will have to
6715 do something. If so, we wish to segregate the data in order to
6716 minimize cache misses inside the dynamic linker. */
6717 if (reloc & targetm.asm_out.reloc_rw_mask ())
6718 ret = reloc == 1 ? SECCAT_DATA_REL_LOCAL : SECCAT_DATA_REL;
6719 else
6720 ret = SECCAT_DATA;
6721 }
6722 else if (reloc & targetm.asm_out.reloc_rw_mask ())
6723 ret = reloc == 1 ? SECCAT_DATA_REL_RO_LOCAL : SECCAT_DATA_REL_RO;
6724 else if (reloc || flag_merge_constants < 2
6725 || ((flag_sanitize & SANITIZE_ADDRESS)
6726 /* PR 81697: for architectures that use section anchors we
6727 need to ignore DECL_RTL_SET_P (decl) for string constants
6728 inside this asan_protect_global call because otherwise
6729 we'll wrongly put them into SECCAT_RODATA_MERGE_CONST
6730 section, set DECL_RTL (decl) later on and add DECL to
6731 protected globals via successive asan_protect_global
6732 calls. In this scenario we'll end up with wrong
6733 alignment of these strings at runtime and possible ASan
6734 false positives. */
6735 && asan_protect_global (d, use_object_blocks_p ()
6736 && use_blocks_for_decl_p (d))))
6737 /* C and C++ don't allow different variables to share the same
6738 location. -fmerge-all-constants allows even that (at the
6739 expense of not conforming). */
6740 ret = SECCAT_RODATA;
6741 else if (DECL_INITIAL (decl)
6742 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST)
6743 ret = SECCAT_RODATA_MERGE_STR_INIT;
6744 else
6745 ret = SECCAT_RODATA_MERGE_CONST;
6746 }
6747 else if (TREE_CODE (decl) == CONSTRUCTOR)
6748 {
6749 if ((reloc & targetm.asm_out.reloc_rw_mask ())
6750 || TREE_SIDE_EFFECTS (decl)
6751 || ! TREE_CONSTANT (decl))
6752 ret = SECCAT_DATA;
6753 else
6754 ret = SECCAT_RODATA;
6755 }
6756 else
6757 ret = SECCAT_RODATA;
6758
6759 /* There are no read-only thread-local sections. */
6760 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
6761 {
6762 /* Note that this would be *just* SECCAT_BSS, except that there's
6763 no concept of a read-only thread-local-data section. */
6764 if (ret == SECCAT_BSS
6765 || DECL_INITIAL (decl) == NULL
6766 || (flag_zero_initialized_in_bss
6767 && initializer_zerop (DECL_INITIAL (decl))))
6768 ret = SECCAT_TBSS;
6769 else
6770 ret = SECCAT_TDATA;
6771 }
6772
6773 /* If the target uses small data sections, select it. */
6774 else if (targetm.in_small_data_p (decl))
6775 {
6776 if (ret == SECCAT_BSS)
6777 ret = SECCAT_SBSS;
6778 else if (targetm.have_srodata_section && ret == SECCAT_RODATA)
6779 ret = SECCAT_SRODATA;
6780 else
6781 ret = SECCAT_SDATA;
6782 }
6783
6784 return ret;
6785 }
6786
6787 static bool
decl_readonly_section_1(enum section_category category)6788 decl_readonly_section_1 (enum section_category category)
6789 {
6790 switch (category)
6791 {
6792 case SECCAT_RODATA:
6793 case SECCAT_RODATA_MERGE_STR:
6794 case SECCAT_RODATA_MERGE_STR_INIT:
6795 case SECCAT_RODATA_MERGE_CONST:
6796 case SECCAT_SRODATA:
6797 return true;
6798 default:
6799 return false;
6800 }
6801 }
6802
6803 bool
decl_readonly_section(const_tree decl,int reloc)6804 decl_readonly_section (const_tree decl, int reloc)
6805 {
6806 return decl_readonly_section_1 (categorize_decl_for_section (decl, reloc));
6807 }
6808
6809 /* Select a section based on the above categorization. */
6810
6811 section *
default_elf_select_section(tree decl,int reloc,unsigned HOST_WIDE_INT align)6812 default_elf_select_section (tree decl, int reloc,
6813 unsigned HOST_WIDE_INT align)
6814 {
6815 const char *sname;
6816
6817 switch (categorize_decl_for_section (decl, reloc))
6818 {
6819 case SECCAT_TEXT:
6820 /* We're not supposed to be called on FUNCTION_DECLs. */
6821 gcc_unreachable ();
6822 case SECCAT_RODATA:
6823 return readonly_data_section;
6824 case SECCAT_RODATA_MERGE_STR:
6825 return mergeable_string_section (decl, align, 0);
6826 case SECCAT_RODATA_MERGE_STR_INIT:
6827 return mergeable_string_section (DECL_INITIAL (decl), align, 0);
6828 case SECCAT_RODATA_MERGE_CONST:
6829 return mergeable_constant_section (DECL_MODE (decl), align, 0);
6830 case SECCAT_SRODATA:
6831 sname = ".sdata2";
6832 break;
6833 case SECCAT_DATA:
6834 return data_section;
6835 case SECCAT_DATA_REL:
6836 sname = ".data.rel";
6837 break;
6838 case SECCAT_DATA_REL_LOCAL:
6839 sname = ".data.rel.local";
6840 break;
6841 case SECCAT_DATA_REL_RO:
6842 sname = ".data.rel.ro";
6843 break;
6844 case SECCAT_DATA_REL_RO_LOCAL:
6845 sname = ".data.rel.ro.local";
6846 break;
6847 case SECCAT_SDATA:
6848 sname = ".sdata";
6849 break;
6850 case SECCAT_TDATA:
6851 sname = ".tdata";
6852 break;
6853 case SECCAT_BSS:
6854 if (DECL_P (decl)
6855 && lookup_attribute ("noinit", DECL_ATTRIBUTES (decl)) != NULL_TREE)
6856 {
6857 sname = ".noinit";
6858 break;
6859 }
6860
6861 if (bss_section)
6862 return bss_section;
6863 sname = ".bss";
6864 break;
6865 case SECCAT_SBSS:
6866 sname = ".sbss";
6867 break;
6868 case SECCAT_TBSS:
6869 sname = ".tbss";
6870 break;
6871 default:
6872 gcc_unreachable ();
6873 }
6874
6875 return get_named_section (decl, sname, reloc);
6876 }
6877
6878 /* Construct a unique section name based on the decl name and the
6879 categorization performed above. */
6880
6881 void
default_unique_section(tree decl,int reloc)6882 default_unique_section (tree decl, int reloc)
6883 {
6884 /* We only need to use .gnu.linkonce if we don't have COMDAT groups. */
6885 bool one_only = DECL_ONE_ONLY (decl) && !HAVE_COMDAT_GROUP;
6886 const char *prefix, *name, *linkonce;
6887 char *string;
6888 tree id;
6889
6890 switch (categorize_decl_for_section (decl, reloc))
6891 {
6892 case SECCAT_TEXT:
6893 prefix = one_only ? ".t" : ".text";
6894 break;
6895 case SECCAT_RODATA:
6896 case SECCAT_RODATA_MERGE_STR:
6897 case SECCAT_RODATA_MERGE_STR_INIT:
6898 case SECCAT_RODATA_MERGE_CONST:
6899 prefix = one_only ? ".r" : ".rodata";
6900 break;
6901 case SECCAT_SRODATA:
6902 prefix = one_only ? ".s2" : ".sdata2";
6903 break;
6904 case SECCAT_DATA:
6905 prefix = one_only ? ".d" : ".data";
6906 break;
6907 case SECCAT_DATA_REL:
6908 prefix = one_only ? ".d.rel" : ".data.rel";
6909 break;
6910 case SECCAT_DATA_REL_LOCAL:
6911 prefix = one_only ? ".d.rel.local" : ".data.rel.local";
6912 break;
6913 case SECCAT_DATA_REL_RO:
6914 prefix = one_only ? ".d.rel.ro" : ".data.rel.ro";
6915 break;
6916 case SECCAT_DATA_REL_RO_LOCAL:
6917 prefix = one_only ? ".d.rel.ro.local" : ".data.rel.ro.local";
6918 break;
6919 case SECCAT_SDATA:
6920 prefix = one_only ? ".s" : ".sdata";
6921 break;
6922 case SECCAT_BSS:
6923 prefix = one_only ? ".b" : ".bss";
6924 break;
6925 case SECCAT_SBSS:
6926 prefix = one_only ? ".sb" : ".sbss";
6927 break;
6928 case SECCAT_TDATA:
6929 prefix = one_only ? ".td" : ".tdata";
6930 break;
6931 case SECCAT_TBSS:
6932 prefix = one_only ? ".tb" : ".tbss";
6933 break;
6934 default:
6935 gcc_unreachable ();
6936 }
6937
6938 id = DECL_ASSEMBLER_NAME (decl);
6939 ultimate_transparent_alias_target (&id);
6940 name = IDENTIFIER_POINTER (id);
6941 name = targetm.strip_name_encoding (name);
6942
6943 /* If we're using one_only, then there needs to be a .gnu.linkonce
6944 prefix to the section name. */
6945 linkonce = one_only ? ".gnu.linkonce" : "";
6946
6947 string = ACONCAT ((linkonce, prefix, ".", name, NULL));
6948
6949 set_decl_section_name (decl, string);
6950 }
6951
6952 /* Subroutine of compute_reloc_for_rtx for leaf rtxes. */
6953
6954 static int
compute_reloc_for_rtx_1(const_rtx x)6955 compute_reloc_for_rtx_1 (const_rtx x)
6956 {
6957 switch (GET_CODE (x))
6958 {
6959 case SYMBOL_REF:
6960 return SYMBOL_REF_LOCAL_P (x) ? 1 : 2;
6961 case LABEL_REF:
6962 return 1;
6963 default:
6964 return 0;
6965 }
6966 }
6967
6968 /* Like compute_reloc_for_constant, except for an RTX. The return value
6969 is a mask for which bit 1 indicates a global relocation, and bit 0
6970 indicates a local relocation. */
6971
6972 static int
compute_reloc_for_rtx(const_rtx x)6973 compute_reloc_for_rtx (const_rtx x)
6974 {
6975 switch (GET_CODE (x))
6976 {
6977 case SYMBOL_REF:
6978 case LABEL_REF:
6979 return compute_reloc_for_rtx_1 (x);
6980
6981 case CONST:
6982 {
6983 int reloc = 0;
6984 subrtx_iterator::array_type array;
6985 FOR_EACH_SUBRTX (iter, array, x, ALL)
6986 reloc |= compute_reloc_for_rtx_1 (*iter);
6987 return reloc;
6988 }
6989
6990 default:
6991 return 0;
6992 }
6993 }
6994
6995 section *
default_select_rtx_section(machine_mode mode ATTRIBUTE_UNUSED,rtx x,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)6996 default_select_rtx_section (machine_mode mode ATTRIBUTE_UNUSED,
6997 rtx x,
6998 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
6999 {
7000 if (compute_reloc_for_rtx (x) & targetm.asm_out.reloc_rw_mask ())
7001 return data_section;
7002 else
7003 return readonly_data_section;
7004 }
7005
7006 section *
default_elf_select_rtx_section(machine_mode mode,rtx x,unsigned HOST_WIDE_INT align)7007 default_elf_select_rtx_section (machine_mode mode, rtx x,
7008 unsigned HOST_WIDE_INT align)
7009 {
7010 int reloc = compute_reloc_for_rtx (x);
7011
7012 /* ??? Handle small data here somehow. */
7013
7014 if (reloc & targetm.asm_out.reloc_rw_mask ())
7015 {
7016 if (reloc == 1)
7017 return get_named_section (NULL, ".data.rel.ro.local", 1);
7018 else
7019 return get_named_section (NULL, ".data.rel.ro", 3);
7020 }
7021
7022 return mergeable_constant_section (mode, align, 0);
7023 }
7024
7025 /* Set the generally applicable flags on the SYMBOL_REF for EXP. */
7026
7027 void
default_encode_section_info(tree decl,rtx rtl,int first ATTRIBUTE_UNUSED)7028 default_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
7029 {
7030 rtx symbol;
7031 int flags;
7032
7033 /* Careful not to prod global register variables. */
7034 if (!MEM_P (rtl))
7035 return;
7036 symbol = XEXP (rtl, 0);
7037 if (GET_CODE (symbol) != SYMBOL_REF)
7038 return;
7039
7040 flags = SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_HAS_BLOCK_INFO;
7041 if (TREE_CODE (decl) == FUNCTION_DECL)
7042 flags |= SYMBOL_FLAG_FUNCTION;
7043 if (targetm.binds_local_p (decl))
7044 flags |= SYMBOL_FLAG_LOCAL;
7045 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
7046 flags |= DECL_TLS_MODEL (decl) << SYMBOL_FLAG_TLS_SHIFT;
7047 else if (targetm.in_small_data_p (decl))
7048 flags |= SYMBOL_FLAG_SMALL;
7049 /* ??? Why is DECL_EXTERNAL ever set for non-PUBLIC names? Without
7050 being PUBLIC, the thing *must* be defined in this translation unit.
7051 Prevent this buglet from being propagated into rtl code as well. */
7052 if (DECL_P (decl) && DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
7053 flags |= SYMBOL_FLAG_EXTERNAL;
7054
7055 SYMBOL_REF_FLAGS (symbol) = flags;
7056 }
7057
7058 /* By default, we do nothing for encode_section_info, so we need not
7059 do anything but discard the '*' marker. */
7060
7061 const char *
default_strip_name_encoding(const char * str)7062 default_strip_name_encoding (const char *str)
7063 {
7064 return str + (*str == '*');
7065 }
7066
7067 #ifdef ASM_OUTPUT_DEF
7068 /* The default implementation of TARGET_ASM_OUTPUT_ANCHOR. Define the
7069 anchor relative to ".", the current section position. */
7070
7071 void
default_asm_output_anchor(rtx symbol)7072 default_asm_output_anchor (rtx symbol)
7073 {
7074 char buffer[100];
7075
7076 sprintf (buffer, "*. + " HOST_WIDE_INT_PRINT_DEC,
7077 SYMBOL_REF_BLOCK_OFFSET (symbol));
7078 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
7079 }
7080 #endif
7081
7082 /* The default implementation of TARGET_USE_ANCHORS_FOR_SYMBOL_P. */
7083
7084 bool
default_use_anchors_for_symbol_p(const_rtx symbol)7085 default_use_anchors_for_symbol_p (const_rtx symbol)
7086 {
7087 tree decl;
7088 section *sect = SYMBOL_REF_BLOCK (symbol)->sect;
7089
7090 /* This function should only be called with non-zero SYMBOL_REF_BLOCK,
7091 furthermore get_block_for_section should not create object blocks
7092 for mergeable sections. */
7093 gcc_checking_assert (sect && !(sect->common.flags & SECTION_MERGE));
7094
7095 /* Don't use anchors for small data sections. The small data register
7096 acts as an anchor for such sections. */
7097 if (sect->common.flags & SECTION_SMALL)
7098 return false;
7099
7100 decl = SYMBOL_REF_DECL (symbol);
7101 if (decl && DECL_P (decl))
7102 {
7103 /* Don't use section anchors for decls that might be defined or
7104 usurped by other modules. */
7105 if (TREE_PUBLIC (decl) && !decl_binds_to_current_def_p (decl))
7106 return false;
7107
7108 /* Don't use section anchors for decls that will be placed in a
7109 small data section. */
7110 /* ??? Ideally, this check would be redundant with the SECTION_SMALL
7111 one above. The problem is that we only use SECTION_SMALL for
7112 sections that should be marked as small in the section directive. */
7113 if (targetm.in_small_data_p (decl))
7114 return false;
7115
7116 /* Don't use section anchors for decls that won't fit inside a single
7117 anchor range to reduce the amount of instructions required to refer
7118 to the entire declaration. */
7119 if (DECL_SIZE_UNIT (decl) == NULL_TREE
7120 || !tree_fits_uhwi_p (DECL_SIZE_UNIT (decl))
7121 || (tree_to_uhwi (DECL_SIZE_UNIT (decl))
7122 >= (unsigned HOST_WIDE_INT) targetm.max_anchor_offset))
7123 return false;
7124
7125 }
7126 return true;
7127 }
7128
7129 /* Return true when RESOLUTION indicate that symbol will be bound to the
7130 definition provided by current .o file. */
7131
7132 static bool
resolution_to_local_definition_p(enum ld_plugin_symbol_resolution resolution)7133 resolution_to_local_definition_p (enum ld_plugin_symbol_resolution resolution)
7134 {
7135 return (resolution == LDPR_PREVAILING_DEF
7136 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP
7137 || resolution == LDPR_PREVAILING_DEF_IRONLY);
7138 }
7139
7140 /* Return true when RESOLUTION indicate that symbol will be bound locally
7141 within current executable or DSO. */
7142
7143 static bool
resolution_local_p(enum ld_plugin_symbol_resolution resolution)7144 resolution_local_p (enum ld_plugin_symbol_resolution resolution)
7145 {
7146 return (resolution == LDPR_PREVAILING_DEF
7147 || resolution == LDPR_PREVAILING_DEF_IRONLY
7148 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP
7149 || resolution == LDPR_PREEMPTED_REG
7150 || resolution == LDPR_PREEMPTED_IR
7151 || resolution == LDPR_RESOLVED_IR
7152 || resolution == LDPR_RESOLVED_EXEC);
7153 }
7154
7155 /* COMMON_LOCAL_P is true means that the linker can guarantee that an
7156 uninitialized common symbol in the executable will still be defined
7157 (through COPY relocation) in the executable. */
7158
7159 bool
default_binds_local_p_3(const_tree exp,bool shlib,bool weak_dominate,bool extern_protected_data,bool common_local_p)7160 default_binds_local_p_3 (const_tree exp, bool shlib, bool weak_dominate,
7161 bool extern_protected_data, bool common_local_p)
7162 {
7163 /* A non-decl is an entry in the constant pool. */
7164 if (!DECL_P (exp))
7165 return true;
7166
7167 /* Weakrefs may not bind locally, even though the weakref itself is always
7168 static and therefore local. Similarly, the resolver for ifunc functions
7169 might resolve to a non-local function.
7170 FIXME: We can resolve the weakref case more curefuly by looking at the
7171 weakref alias. */
7172 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (exp))
7173 || (TREE_CODE (exp) == FUNCTION_DECL
7174 && cgraph_node::get (exp)
7175 && cgraph_node::get (exp)->ifunc_resolver))
7176 return false;
7177
7178 /* Static variables are always local. */
7179 if (! TREE_PUBLIC (exp))
7180 return true;
7181
7182 /* With resolution file in hand, take look into resolutions.
7183 We can't just return true for resolved_locally symbols,
7184 because dynamic linking might overwrite symbols
7185 in shared libraries. */
7186 bool resolved_locally = false;
7187
7188 bool uninited_common = (DECL_COMMON (exp)
7189 && (DECL_INITIAL (exp) == NULL
7190 || (!in_lto_p
7191 && DECL_INITIAL (exp) == error_mark_node)));
7192
7193 /* A non-external variable is defined locally only if it isn't
7194 uninitialized COMMON variable or common_local_p is true. */
7195 bool defined_locally = (!DECL_EXTERNAL (exp)
7196 && (!uninited_common || common_local_p));
7197 if (symtab_node *node = symtab_node::get (exp))
7198 {
7199 if (node->in_other_partition)
7200 defined_locally = true;
7201 if (node->can_be_discarded_p ())
7202 ;
7203 else if (resolution_to_local_definition_p (node->resolution))
7204 defined_locally = resolved_locally = true;
7205 else if (resolution_local_p (node->resolution))
7206 resolved_locally = true;
7207 }
7208 if (defined_locally && weak_dominate && !shlib)
7209 resolved_locally = true;
7210
7211 /* Undefined weak symbols are never defined locally. */
7212 if (DECL_WEAK (exp) && !defined_locally)
7213 return false;
7214
7215 /* A symbol is local if the user has said explicitly that it will be,
7216 or if we have a definition for the symbol. We cannot infer visibility
7217 for undefined symbols. */
7218 if (DECL_VISIBILITY (exp) != VISIBILITY_DEFAULT
7219 && (TREE_CODE (exp) == FUNCTION_DECL
7220 || !extern_protected_data
7221 || DECL_VISIBILITY (exp) != VISIBILITY_PROTECTED)
7222 && (DECL_VISIBILITY_SPECIFIED (exp) || defined_locally))
7223 return true;
7224
7225 /* If PIC, then assume that any global name can be overridden by
7226 symbols resolved from other modules. */
7227 if (shlib)
7228 return false;
7229
7230 /* Variables defined outside this object might not be local. */
7231 if (DECL_EXTERNAL (exp) && !resolved_locally)
7232 return false;
7233
7234 /* Non-dominant weak symbols are not defined locally. */
7235 if (DECL_WEAK (exp) && !resolved_locally)
7236 return false;
7237
7238 /* Uninitialized COMMON variable may be unified with symbols
7239 resolved from other modules. */
7240 if (uninited_common && !resolved_locally)
7241 return false;
7242
7243 /* Otherwise we're left with initialized (or non-common) global data
7244 which is of necessity defined locally. */
7245 return true;
7246 }
7247
7248 /* Assume ELF-ish defaults, since that's pretty much the most liberal
7249 wrt cross-module name binding. */
7250
7251 bool
default_binds_local_p(const_tree exp)7252 default_binds_local_p (const_tree exp)
7253 {
7254 return default_binds_local_p_3 (exp, flag_shlib != 0, true, false, false);
7255 }
7256
7257 /* Similar to default_binds_local_p, but common symbol may be local and
7258 extern protected data is non-local. */
7259
7260 bool
default_binds_local_p_2(const_tree exp)7261 default_binds_local_p_2 (const_tree exp)
7262 {
7263 return default_binds_local_p_3 (exp, flag_shlib != 0, true, true,
7264 !flag_pic);
7265 }
7266
7267 bool
default_binds_local_p_1(const_tree exp,int shlib)7268 default_binds_local_p_1 (const_tree exp, int shlib)
7269 {
7270 return default_binds_local_p_3 (exp, shlib != 0, false, false, false);
7271 }
7272
7273 /* Return true when references to DECL must bind to current definition in
7274 final executable.
7275
7276 The condition is usually equivalent to whether the function binds to the
7277 current module (shared library or executable), that is to binds_local_p.
7278 We use this fact to avoid need for another target hook and implement
7279 the logic using binds_local_p and just special cases where
7280 decl_binds_to_current_def_p is stronger than binds_local_p. In particular
7281 the weak definitions (that can be overwritten at linktime by other
7282 definition from different object file) and when resolution info is available
7283 we simply use the knowledge passed to us by linker plugin. */
7284 bool
decl_binds_to_current_def_p(const_tree decl)7285 decl_binds_to_current_def_p (const_tree decl)
7286 {
7287 gcc_assert (DECL_P (decl));
7288 if (!targetm.binds_local_p (decl))
7289 return false;
7290 if (!TREE_PUBLIC (decl))
7291 return true;
7292
7293 /* When resolution is available, just use it. */
7294 if (symtab_node *node = symtab_node::get (decl))
7295 {
7296 if (node->resolution != LDPR_UNKNOWN
7297 && !node->can_be_discarded_p ())
7298 return resolution_to_local_definition_p (node->resolution);
7299 }
7300
7301 /* Otherwise we have to assume the worst for DECL_WEAK (hidden weaks
7302 binds locally but still can be overwritten), DECL_COMMON (can be merged
7303 with a non-common definition somewhere in the same module) or
7304 DECL_EXTERNAL.
7305 This rely on fact that binds_local_p behave as decl_replaceable_p
7306 for all other declaration types. */
7307 if (DECL_WEAK (decl))
7308 return false;
7309 if (DECL_COMMON (decl)
7310 && (DECL_INITIAL (decl) == NULL
7311 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node)))
7312 return false;
7313 if (DECL_EXTERNAL (decl))
7314 return false;
7315 return true;
7316 }
7317
7318 /* A replaceable function or variable is one which may be replaced
7319 at link-time with an entirely different definition, provided that the
7320 replacement has the same type. For example, functions declared
7321 with __attribute__((weak)) on most systems are replaceable.
7322
7323 COMDAT functions are not replaceable, since all definitions of the
7324 function must be equivalent. It is important that COMDAT functions
7325 not be treated as replaceable so that use of C++ template
7326 instantiations is not penalized. */
7327
7328 bool
decl_replaceable_p(tree decl)7329 decl_replaceable_p (tree decl)
7330 {
7331 gcc_assert (DECL_P (decl));
7332 if (!TREE_PUBLIC (decl) || DECL_COMDAT (decl))
7333 return false;
7334 if (!flag_semantic_interposition
7335 && !DECL_WEAK (decl))
7336 return false;
7337 return !decl_binds_to_current_def_p (decl);
7338 }
7339
7340 /* Default function to output code that will globalize a label. A
7341 target must define GLOBAL_ASM_OP or provide its own function to
7342 globalize a label. */
7343 #ifdef GLOBAL_ASM_OP
7344 void
default_globalize_label(FILE * stream,const char * name)7345 default_globalize_label (FILE * stream, const char *name)
7346 {
7347 fputs (GLOBAL_ASM_OP, stream);
7348 assemble_name (stream, name);
7349 putc ('\n', stream);
7350 }
7351 #endif /* GLOBAL_ASM_OP */
7352
7353 /* Default function to output code that will globalize a declaration. */
7354 void
default_globalize_decl_name(FILE * stream,tree decl)7355 default_globalize_decl_name (FILE * stream, tree decl)
7356 {
7357 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
7358 targetm.asm_out.globalize_label (stream, name);
7359 }
7360
7361 /* Default function to output a label for unwind information. The
7362 default is to do nothing. A target that needs nonlocal labels for
7363 unwind information must provide its own function to do this. */
7364 void
default_emit_unwind_label(FILE * stream ATTRIBUTE_UNUSED,tree decl ATTRIBUTE_UNUSED,int for_eh ATTRIBUTE_UNUSED,int empty ATTRIBUTE_UNUSED)7365 default_emit_unwind_label (FILE * stream ATTRIBUTE_UNUSED,
7366 tree decl ATTRIBUTE_UNUSED,
7367 int for_eh ATTRIBUTE_UNUSED,
7368 int empty ATTRIBUTE_UNUSED)
7369 {
7370 }
7371
7372 /* Default function to output a label to divide up the exception table.
7373 The default is to do nothing. A target that needs/wants to divide
7374 up the table must provide it's own function to do this. */
7375 void
default_emit_except_table_label(FILE * stream ATTRIBUTE_UNUSED)7376 default_emit_except_table_label (FILE * stream ATTRIBUTE_UNUSED)
7377 {
7378 }
7379
7380 /* This is how to output an internal numbered label where PREFIX is
7381 the class of label and LABELNO is the number within the class. */
7382
7383 void
default_generate_internal_label(char * buf,const char * prefix,unsigned long labelno)7384 default_generate_internal_label (char *buf, const char *prefix,
7385 unsigned long labelno)
7386 {
7387 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno);
7388 }
7389
7390 /* This is how to output an internal numbered label where PREFIX is
7391 the class of label and LABELNO is the number within the class. */
7392
7393 void
default_internal_label(FILE * stream,const char * prefix,unsigned long labelno)7394 default_internal_label (FILE *stream, const char *prefix,
7395 unsigned long labelno)
7396 {
7397 char *const buf = (char *) alloca (40 + strlen (prefix));
7398 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno);
7399 ASM_OUTPUT_INTERNAL_LABEL (stream, buf);
7400 }
7401
7402
7403 /* The default implementation of ASM_DECLARE_CONSTANT_NAME. */
7404
7405 void
default_asm_declare_constant_name(FILE * file,const char * name,const_tree exp ATTRIBUTE_UNUSED,HOST_WIDE_INT size ATTRIBUTE_UNUSED)7406 default_asm_declare_constant_name (FILE *file, const char *name,
7407 const_tree exp ATTRIBUTE_UNUSED,
7408 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
7409 {
7410 assemble_label (file, name);
7411 }
7412
7413 /* This is the default behavior at the beginning of a file. It's
7414 controlled by two other target-hook toggles. */
7415 void
default_file_start(void)7416 default_file_start (void)
7417 {
7418 if (targetm.asm_file_start_app_off
7419 && !(flag_verbose_asm || flag_debug_asm || flag_dump_rtl_in_asm))
7420 fputs (ASM_APP_OFF, asm_out_file);
7421
7422 if (targetm.asm_file_start_file_directive)
7423 {
7424 /* LTO produced units have no meaningful main_input_filename. */
7425 if (in_lto_p)
7426 output_file_directive (asm_out_file, "<artificial>");
7427 else
7428 output_file_directive (asm_out_file, main_input_filename);
7429 }
7430 }
7431
7432 /* This is a generic routine suitable for use as TARGET_ASM_FILE_END
7433 which emits a special section directive used to indicate whether or
7434 not this object file needs an executable stack. This is primarily
7435 a GNU extension to ELF but could be used on other targets. */
7436
7437 int trampolines_created;
7438
7439 void
file_end_indicate_exec_stack(void)7440 file_end_indicate_exec_stack (void)
7441 {
7442 unsigned int flags = SECTION_DEBUG;
7443 if (trampolines_created)
7444 flags |= SECTION_CODE;
7445
7446 switch_to_section (get_section (".note.GNU-stack", flags, NULL));
7447 }
7448
7449 /* Emit a special section directive to indicate that this object file
7450 was compiled with -fsplit-stack. This is used to let the linker
7451 detect calls between split-stack code and non-split-stack code, so
7452 that it can modify the split-stack code to allocate a sufficiently
7453 large stack. We emit another special section if there are any
7454 functions in this file which have the no_split_stack attribute, to
7455 prevent the linker from warning about being unable to convert the
7456 functions if they call non-split-stack code. */
7457
7458 void
file_end_indicate_split_stack(void)7459 file_end_indicate_split_stack (void)
7460 {
7461 if (flag_split_stack)
7462 {
7463 switch_to_section (get_section (".note.GNU-split-stack", SECTION_DEBUG,
7464 NULL));
7465 if (saw_no_split_stack)
7466 switch_to_section (get_section (".note.GNU-no-split-stack",
7467 SECTION_DEBUG, NULL));
7468 }
7469 }
7470
7471 /* Output DIRECTIVE (a C string) followed by a newline. This is used as
7472 a get_unnamed_section callback. */
7473
7474 void
output_section_asm_op(const void * directive)7475 output_section_asm_op (const void *directive)
7476 {
7477 fprintf (asm_out_file, "%s\n", (const char *) directive);
7478 }
7479
7480 /* Emit assembly code to switch to section NEW_SECTION. Do nothing if
7481 the current section is NEW_SECTION. */
7482
7483 void
switch_to_section(section * new_section)7484 switch_to_section (section *new_section)
7485 {
7486 if (in_section == new_section)
7487 return;
7488
7489 if (new_section->common.flags & SECTION_FORGET)
7490 in_section = NULL;
7491 else
7492 in_section = new_section;
7493
7494 switch (SECTION_STYLE (new_section))
7495 {
7496 case SECTION_NAMED:
7497 targetm.asm_out.named_section (new_section->named.name,
7498 new_section->named.common.flags,
7499 new_section->named.decl);
7500 break;
7501
7502 case SECTION_UNNAMED:
7503 new_section->unnamed.callback (new_section->unnamed.data);
7504 break;
7505
7506 case SECTION_NOSWITCH:
7507 gcc_unreachable ();
7508 break;
7509 }
7510
7511 new_section->common.flags |= SECTION_DECLARED;
7512 }
7513
7514 /* If block symbol SYMBOL has not yet been assigned an offset, place
7515 it at the end of its block. */
7516
7517 void
place_block_symbol(rtx symbol)7518 place_block_symbol (rtx symbol)
7519 {
7520 unsigned HOST_WIDE_INT size, mask, offset;
7521 class constant_descriptor_rtx *desc;
7522 unsigned int alignment;
7523 struct object_block *block;
7524 tree decl;
7525
7526 gcc_assert (SYMBOL_REF_BLOCK (symbol));
7527 if (SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0)
7528 return;
7529
7530 /* Work out the symbol's size and alignment. */
7531 if (CONSTANT_POOL_ADDRESS_P (symbol))
7532 {
7533 desc = SYMBOL_REF_CONSTANT (symbol);
7534 alignment = desc->align;
7535 size = GET_MODE_SIZE (desc->mode);
7536 }
7537 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
7538 {
7539 decl = SYMBOL_REF_DECL (symbol);
7540 gcc_checking_assert (DECL_IN_CONSTANT_POOL (decl));
7541 alignment = DECL_ALIGN (decl);
7542 size = get_constant_size (DECL_INITIAL (decl));
7543 if ((flag_sanitize & SANITIZE_ADDRESS)
7544 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST
7545 && asan_protect_global (DECL_INITIAL (decl)))
7546 {
7547 size += asan_red_zone_size (size);
7548 alignment = MAX (alignment,
7549 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
7550 }
7551 }
7552 else
7553 {
7554 struct symtab_node *snode;
7555 decl = SYMBOL_REF_DECL (symbol);
7556
7557 snode = symtab_node::get (decl);
7558 if (snode->alias)
7559 {
7560 rtx target = DECL_RTL (snode->ultimate_alias_target ()->decl);
7561
7562 gcc_assert (MEM_P (target)
7563 && GET_CODE (XEXP (target, 0)) == SYMBOL_REF
7564 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (target, 0)));
7565 target = XEXP (target, 0);
7566 place_block_symbol (target);
7567 SYMBOL_REF_BLOCK_OFFSET (symbol) = SYMBOL_REF_BLOCK_OFFSET (target);
7568 return;
7569 }
7570 alignment = get_variable_align (decl);
7571 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
7572 if ((flag_sanitize & SANITIZE_ADDRESS)
7573 && asan_protect_global (decl))
7574 {
7575 size += asan_red_zone_size (size);
7576 alignment = MAX (alignment,
7577 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
7578 }
7579 }
7580
7581 /* Calculate the object's offset from the start of the block. */
7582 block = SYMBOL_REF_BLOCK (symbol);
7583 mask = alignment / BITS_PER_UNIT - 1;
7584 offset = (block->size + mask) & ~mask;
7585 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
7586
7587 /* Record the block's new alignment and size. */
7588 block->alignment = MAX (block->alignment, alignment);
7589 block->size = offset + size;
7590
7591 vec_safe_push (block->objects, symbol);
7592 }
7593
7594 /* Return the anchor that should be used to address byte offset OFFSET
7595 from the first object in BLOCK. MODEL is the TLS model used
7596 to access it. */
7597
7598 rtx
get_section_anchor(struct object_block * block,HOST_WIDE_INT offset,enum tls_model model)7599 get_section_anchor (struct object_block *block, HOST_WIDE_INT offset,
7600 enum tls_model model)
7601 {
7602 char label[100];
7603 unsigned int begin, middle, end;
7604 unsigned HOST_WIDE_INT min_offset, max_offset, range, bias, delta;
7605 rtx anchor;
7606
7607 /* Work out the anchor's offset. Use an offset of 0 for the first
7608 anchor so that we don't pessimize the case where we take the address
7609 of a variable at the beginning of the block. This is particularly
7610 useful when a block has only one variable assigned to it.
7611
7612 We try to place anchors RANGE bytes apart, so there can then be
7613 anchors at +/-RANGE, +/-2 * RANGE, and so on, up to the limits of
7614 a ptr_mode offset. With some target settings, the lowest such
7615 anchor might be out of range for the lowest ptr_mode offset;
7616 likewise the highest anchor for the highest offset. Use anchors
7617 at the extreme ends of the ptr_mode range in such cases.
7618
7619 All arithmetic uses unsigned integers in order to avoid
7620 signed overflow. */
7621 max_offset = (unsigned HOST_WIDE_INT) targetm.max_anchor_offset;
7622 min_offset = (unsigned HOST_WIDE_INT) targetm.min_anchor_offset;
7623 range = max_offset - min_offset + 1;
7624 if (range == 0)
7625 offset = 0;
7626 else
7627 {
7628 bias = HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (ptr_mode) - 1);
7629 if (offset < 0)
7630 {
7631 delta = -(unsigned HOST_WIDE_INT) offset + max_offset;
7632 delta -= delta % range;
7633 if (delta > bias)
7634 delta = bias;
7635 offset = (HOST_WIDE_INT) (-delta);
7636 }
7637 else
7638 {
7639 delta = (unsigned HOST_WIDE_INT) offset - min_offset;
7640 delta -= delta % range;
7641 if (delta > bias - 1)
7642 delta = bias - 1;
7643 offset = (HOST_WIDE_INT) delta;
7644 }
7645 }
7646
7647 /* Do a binary search to see if there's already an anchor we can use.
7648 Set BEGIN to the new anchor's index if not. */
7649 begin = 0;
7650 end = vec_safe_length (block->anchors);
7651 while (begin != end)
7652 {
7653 middle = (end + begin) / 2;
7654 anchor = (*block->anchors)[middle];
7655 if (SYMBOL_REF_BLOCK_OFFSET (anchor) > offset)
7656 end = middle;
7657 else if (SYMBOL_REF_BLOCK_OFFSET (anchor) < offset)
7658 begin = middle + 1;
7659 else if (SYMBOL_REF_TLS_MODEL (anchor) > model)
7660 end = middle;
7661 else if (SYMBOL_REF_TLS_MODEL (anchor) < model)
7662 begin = middle + 1;
7663 else
7664 return anchor;
7665 }
7666
7667 /* Create a new anchor with a unique label. */
7668 ASM_GENERATE_INTERNAL_LABEL (label, "LANCHOR", anchor_labelno++);
7669 anchor = create_block_symbol (ggc_strdup (label), block, offset);
7670 SYMBOL_REF_FLAGS (anchor) |= SYMBOL_FLAG_LOCAL | SYMBOL_FLAG_ANCHOR;
7671 SYMBOL_REF_FLAGS (anchor) |= model << SYMBOL_FLAG_TLS_SHIFT;
7672
7673 /* Insert it at index BEGIN. */
7674 vec_safe_insert (block->anchors, begin, anchor);
7675 return anchor;
7676 }
7677
7678 /* Output the objects in BLOCK. */
7679
7680 static void
output_object_block(struct object_block * block)7681 output_object_block (struct object_block *block)
7682 {
7683 class constant_descriptor_rtx *desc;
7684 unsigned int i;
7685 HOST_WIDE_INT offset;
7686 tree decl;
7687 rtx symbol;
7688
7689 if (!block->objects)
7690 return;
7691
7692 /* Switch to the section and make sure that the first byte is
7693 suitably aligned. */
7694 /* Special case VTV comdat sections similar to assemble_variable. */
7695 if (SECTION_STYLE (block->sect) == SECTION_NAMED
7696 && block->sect->named.name
7697 && (strcmp (block->sect->named.name, ".vtable_map_vars") == 0))
7698 handle_vtv_comdat_section (block->sect, block->sect->named.decl);
7699 else
7700 switch_to_section (block->sect);
7701
7702 gcc_checking_assert (!(block->sect->common.flags & SECTION_MERGE));
7703 assemble_align (block->alignment);
7704
7705 /* Define the values of all anchors relative to the current section
7706 position. */
7707 FOR_EACH_VEC_SAFE_ELT (block->anchors, i, symbol)
7708 targetm.asm_out.output_anchor (symbol);
7709
7710 /* Output the objects themselves. */
7711 offset = 0;
7712 FOR_EACH_VEC_ELT (*block->objects, i, symbol)
7713 {
7714 /* Move to the object's offset, padding with zeros if necessary. */
7715 assemble_zeros (SYMBOL_REF_BLOCK_OFFSET (symbol) - offset);
7716 offset = SYMBOL_REF_BLOCK_OFFSET (symbol);
7717 if (CONSTANT_POOL_ADDRESS_P (symbol))
7718 {
7719 desc = SYMBOL_REF_CONSTANT (symbol);
7720 /* Pass 1 for align as we have already laid out everything in the block.
7721 So aligning shouldn't be necessary. */
7722 output_constant_pool_1 (desc, 1);
7723 offset += GET_MODE_SIZE (desc->mode);
7724 }
7725 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
7726 {
7727 HOST_WIDE_INT size;
7728 decl = SYMBOL_REF_DECL (symbol);
7729 assemble_constant_contents (DECL_INITIAL (decl), XSTR (symbol, 0),
7730 DECL_ALIGN (decl), false);
7731
7732 size = get_constant_size (DECL_INITIAL (decl));
7733 offset += size;
7734 if ((flag_sanitize & SANITIZE_ADDRESS)
7735 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST
7736 && asan_protect_global (DECL_INITIAL (decl)))
7737 {
7738 size = asan_red_zone_size (size);
7739 assemble_zeros (size);
7740 offset += size;
7741 }
7742 }
7743 else
7744 {
7745 HOST_WIDE_INT size;
7746 decl = SYMBOL_REF_DECL (symbol);
7747 assemble_variable_contents (decl, XSTR (symbol, 0), false, false);
7748 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
7749 offset += size;
7750 if ((flag_sanitize & SANITIZE_ADDRESS)
7751 && asan_protect_global (decl))
7752 {
7753 size = asan_red_zone_size (size);
7754 assemble_zeros (size);
7755 offset += size;
7756 }
7757 }
7758 }
7759 }
7760
7761 /* A callback for qsort to compare object_blocks. */
7762
7763 static int
output_object_block_compare(const void * x,const void * y)7764 output_object_block_compare (const void *x, const void *y)
7765 {
7766 object_block *p1 = *(object_block * const*)x;
7767 object_block *p2 = *(object_block * const*)y;
7768
7769 if (p1->sect->common.flags & SECTION_NAMED
7770 && !(p2->sect->common.flags & SECTION_NAMED))
7771 return 1;
7772
7773 if (!(p1->sect->common.flags & SECTION_NAMED)
7774 && p2->sect->common.flags & SECTION_NAMED)
7775 return -1;
7776
7777 if (p1->sect->common.flags & SECTION_NAMED
7778 && p2->sect->common.flags & SECTION_NAMED)
7779 return strcmp (p1->sect->named.name, p2->sect->named.name);
7780
7781 unsigned f1 = p1->sect->common.flags;
7782 unsigned f2 = p2->sect->common.flags;
7783 if (f1 == f2)
7784 return 0;
7785 return f1 < f2 ? -1 : 1;
7786 }
7787
7788 /* Output the definitions of all object_blocks. */
7789
7790 void
output_object_blocks(void)7791 output_object_blocks (void)
7792 {
7793 vec<object_block *, va_heap> v;
7794 v.create (object_block_htab->elements ());
7795 object_block *obj;
7796 hash_table<object_block_hasher>::iterator hi;
7797
7798 FOR_EACH_HASH_TABLE_ELEMENT (*object_block_htab, obj, object_block *, hi)
7799 v.quick_push (obj);
7800
7801 /* Sort them in order to output them in a deterministic manner,
7802 otherwise we may get .rodata sections in different orders with
7803 and without -g. */
7804 v.qsort (output_object_block_compare);
7805 unsigned i;
7806 FOR_EACH_VEC_ELT (v, i, obj)
7807 output_object_block (obj);
7808
7809 v.release ();
7810 }
7811
7812 /* This function provides a possible implementation of the
7813 TARGET_ASM_RECORD_GCC_SWITCHES target hook for ELF targets. When triggered
7814 by -frecord-gcc-switches it creates a new mergeable, string section in the
7815 assembler output file called TARGET_ASM_RECORD_GCC_SWITCHES_SECTION which
7816 contains the switches in ASCII format.
7817
7818 FIXME: This code does not correctly handle double quote characters
7819 that appear inside strings, (it strips them rather than preserving them).
7820 FIXME: ASM_OUTPUT_ASCII, as defined in config/elfos.h will not emit NUL
7821 characters - instead it treats them as sub-string separators. Since
7822 we want to emit NUL strings terminators into the object file we have to use
7823 ASM_OUTPUT_SKIP. */
7824
7825 int
elf_record_gcc_switches(print_switch_type type,const char * name)7826 elf_record_gcc_switches (print_switch_type type, const char * name)
7827 {
7828 switch (type)
7829 {
7830 case SWITCH_TYPE_PASSED:
7831 ASM_OUTPUT_ASCII (asm_out_file, name, strlen (name));
7832 ASM_OUTPUT_SKIP (asm_out_file, HOST_WIDE_INT_1U);
7833 break;
7834
7835 case SWITCH_TYPE_DESCRIPTIVE:
7836 if (name == NULL)
7837 {
7838 /* Distinguish between invocations where name is NULL. */
7839 static bool started = false;
7840
7841 if (!started)
7842 {
7843 section * sec;
7844
7845 sec = get_section (targetm.asm_out.record_gcc_switches_section,
7846 SECTION_DEBUG
7847 | SECTION_MERGE
7848 | SECTION_STRINGS
7849 | (SECTION_ENTSIZE & 1),
7850 NULL);
7851 switch_to_section (sec);
7852 started = true;
7853 }
7854 }
7855
7856 default:
7857 break;
7858 }
7859
7860 /* The return value is currently ignored by the caller, but must be 0.
7861 For -fverbose-asm the return value would be the number of characters
7862 emitted into the assembler file. */
7863 return 0;
7864 }
7865
7866 /* Emit text to declare externally defined symbols. It is needed to
7867 properly support non-default visibility. */
7868 void
default_elf_asm_output_external(FILE * file ATTRIBUTE_UNUSED,tree decl,const char * name ATTRIBUTE_UNUSED)7869 default_elf_asm_output_external (FILE *file ATTRIBUTE_UNUSED,
7870 tree decl,
7871 const char *name ATTRIBUTE_UNUSED)
7872 {
7873 /* We output the name if and only if TREE_SYMBOL_REFERENCED is
7874 set in order to avoid putting out names that are never really
7875 used. Always output visibility specified in the source. */
7876 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
7877 && (DECL_VISIBILITY_SPECIFIED (decl)
7878 || targetm.binds_local_p (decl)))
7879 maybe_assemble_visibility (decl);
7880 }
7881
7882 /* The default hook for TARGET_ASM_OUTPUT_SOURCE_FILENAME. */
7883
7884 void
default_asm_output_source_filename(FILE * file,const char * name)7885 default_asm_output_source_filename (FILE *file, const char *name)
7886 {
7887 #ifdef ASM_OUTPUT_SOURCE_FILENAME
7888 ASM_OUTPUT_SOURCE_FILENAME (file, name);
7889 #else
7890 fprintf (file, "\t.file\t");
7891 output_quoted_string (file, name);
7892 putc ('\n', file);
7893 #endif
7894 }
7895
7896 /* Output a file name in the form wanted by System V. */
7897
7898 void
output_file_directive(FILE * asm_file,const char * input_name)7899 output_file_directive (FILE *asm_file, const char *input_name)
7900 {
7901 int len;
7902 const char *na;
7903
7904 if (input_name == NULL)
7905 input_name = "<stdin>";
7906 else
7907 input_name = remap_debug_filename (input_name);
7908
7909 len = strlen (input_name);
7910 na = input_name + len;
7911
7912 /* NA gets INPUT_NAME sans directory names. */
7913 while (na > input_name)
7914 {
7915 if (IS_DIR_SEPARATOR (na[-1]))
7916 break;
7917 na--;
7918 }
7919
7920 targetm.asm_out.output_source_filename (asm_file, na);
7921 }
7922
7923 /* Create a DEBUG_EXPR_DECL / DEBUG_EXPR pair from RTL expression
7924 EXP. */
7925 rtx
make_debug_expr_from_rtl(const_rtx exp)7926 make_debug_expr_from_rtl (const_rtx exp)
7927 {
7928 tree ddecl = make_node (DEBUG_EXPR_DECL), type;
7929 machine_mode mode = GET_MODE (exp);
7930 rtx dval;
7931
7932 DECL_ARTIFICIAL (ddecl) = 1;
7933 if (REG_P (exp) && REG_EXPR (exp))
7934 type = TREE_TYPE (REG_EXPR (exp));
7935 else if (MEM_P (exp) && MEM_EXPR (exp))
7936 type = TREE_TYPE (MEM_EXPR (exp));
7937 else
7938 type = NULL_TREE;
7939 if (type && TYPE_MODE (type) == mode)
7940 TREE_TYPE (ddecl) = type;
7941 else
7942 TREE_TYPE (ddecl) = lang_hooks.types.type_for_mode (mode, 1);
7943 SET_DECL_MODE (ddecl, mode);
7944 dval = gen_rtx_DEBUG_EXPR (mode);
7945 DEBUG_EXPR_TREE_DECL (dval) = ddecl;
7946 SET_DECL_RTL (ddecl, dval);
7947 return dval;
7948 }
7949
7950 #ifdef ELF_ASCII_ESCAPES
7951 /* Default ASM_OUTPUT_LIMITED_STRING for ELF targets. */
7952
7953 void
default_elf_asm_output_limited_string(FILE * f,const char * s)7954 default_elf_asm_output_limited_string (FILE *f, const char *s)
7955 {
7956 int escape;
7957 unsigned char c;
7958
7959 fputs (STRING_ASM_OP, f);
7960 putc ('"', f);
7961 while (*s != '\0')
7962 {
7963 c = *s;
7964 escape = ELF_ASCII_ESCAPES[c];
7965 switch (escape)
7966 {
7967 case 0:
7968 putc (c, f);
7969 break;
7970 case 1:
7971 putc ('\\', f);
7972 putc ('0'+((c>>6)&7), f);
7973 putc ('0'+((c>>3)&7), f);
7974 putc ('0'+(c&7), f);
7975 break;
7976 default:
7977 putc ('\\', f);
7978 putc (escape, f);
7979 break;
7980 }
7981 s++;
7982 }
7983 putc ('\"', f);
7984 putc ('\n', f);
7985 }
7986
7987 /* Default ASM_OUTPUT_ASCII for ELF targets. */
7988
7989 void
default_elf_asm_output_ascii(FILE * f,const char * s,unsigned int len)7990 default_elf_asm_output_ascii (FILE *f, const char *s, unsigned int len)
7991 {
7992 const char *limit = s + len;
7993 const char *last_null = NULL;
7994 unsigned bytes_in_chunk = 0;
7995 unsigned char c;
7996 int escape;
7997
7998 for (; s < limit; s++)
7999 {
8000 const char *p;
8001
8002 if (bytes_in_chunk >= 60)
8003 {
8004 putc ('\"', f);
8005 putc ('\n', f);
8006 bytes_in_chunk = 0;
8007 }
8008
8009 if (s > last_null)
8010 {
8011 for (p = s; p < limit && *p != '\0'; p++)
8012 continue;
8013 last_null = p;
8014 }
8015 else
8016 p = last_null;
8017
8018 if (p < limit && (p - s) <= (long) ELF_STRING_LIMIT)
8019 {
8020 if (bytes_in_chunk > 0)
8021 {
8022 putc ('\"', f);
8023 putc ('\n', f);
8024 bytes_in_chunk = 0;
8025 }
8026
8027 default_elf_asm_output_limited_string (f, s);
8028 s = p;
8029 }
8030 else
8031 {
8032 if (bytes_in_chunk == 0)
8033 fputs (ASCII_DATA_ASM_OP "\"", f);
8034
8035 c = *s;
8036 escape = ELF_ASCII_ESCAPES[c];
8037 switch (escape)
8038 {
8039 case 0:
8040 putc (c, f);
8041 bytes_in_chunk++;
8042 break;
8043 case 1:
8044 putc ('\\', f);
8045 putc ('0'+((c>>6)&7), f);
8046 putc ('0'+((c>>3)&7), f);
8047 putc ('0'+(c&7), f);
8048 bytes_in_chunk += 4;
8049 break;
8050 default:
8051 putc ('\\', f);
8052 putc (escape, f);
8053 bytes_in_chunk += 2;
8054 break;
8055 }
8056
8057 }
8058 }
8059
8060 if (bytes_in_chunk > 0)
8061 {
8062 putc ('\"', f);
8063 putc ('\n', f);
8064 }
8065 }
8066 #endif
8067
8068 static GTY(()) section *elf_init_array_section;
8069 static GTY(()) section *elf_fini_array_section;
8070
8071 static section *
get_elf_initfini_array_priority_section(int priority,bool constructor_p)8072 get_elf_initfini_array_priority_section (int priority,
8073 bool constructor_p)
8074 {
8075 section *sec;
8076 if (priority != DEFAULT_INIT_PRIORITY)
8077 {
8078 char buf[18];
8079 sprintf (buf, "%s.%.5u",
8080 constructor_p ? ".init_array" : ".fini_array",
8081 priority);
8082 sec = get_section (buf, SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8083 }
8084 else
8085 {
8086 if (constructor_p)
8087 {
8088 if (elf_init_array_section == NULL)
8089 elf_init_array_section
8090 = get_section (".init_array",
8091 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8092 sec = elf_init_array_section;
8093 }
8094 else
8095 {
8096 if (elf_fini_array_section == NULL)
8097 elf_fini_array_section
8098 = get_section (".fini_array",
8099 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8100 sec = elf_fini_array_section;
8101 }
8102 }
8103 return sec;
8104 }
8105
8106 /* Use .init_array section for constructors. */
8107
8108 void
default_elf_init_array_asm_out_constructor(rtx symbol,int priority)8109 default_elf_init_array_asm_out_constructor (rtx symbol, int priority)
8110 {
8111 section *sec = get_elf_initfini_array_priority_section (priority,
8112 true);
8113 assemble_addr_to_section (symbol, sec);
8114 }
8115
8116 /* Use .fini_array section for destructors. */
8117
8118 void
default_elf_fini_array_asm_out_destructor(rtx symbol,int priority)8119 default_elf_fini_array_asm_out_destructor (rtx symbol, int priority)
8120 {
8121 section *sec = get_elf_initfini_array_priority_section (priority,
8122 false);
8123 assemble_addr_to_section (symbol, sec);
8124 }
8125
8126 /* Default TARGET_ASM_OUTPUT_IDENT hook.
8127
8128 This is a bit of a cheat. The real default is a no-op, but this
8129 hook is the default for all targets with a .ident directive. */
8130
8131 void
default_asm_output_ident_directive(const char * ident_str)8132 default_asm_output_ident_directive (const char *ident_str)
8133 {
8134 const char *ident_asm_op = "\t.ident\t";
8135
8136 /* If we are still in the front end, do not write out the string
8137 to asm_out_file. Instead, add a fake top-level asm statement.
8138 This allows the front ends to use this hook without actually
8139 writing to asm_out_file, to handle #ident or Pragma Ident. */
8140 if (symtab->state == PARSING)
8141 {
8142 char *buf = ACONCAT ((ident_asm_op, "\"", ident_str, "\"\n", NULL));
8143 symtab->finalize_toplevel_asm (build_string (strlen (buf), buf));
8144 }
8145 else
8146 fprintf (asm_out_file, "%s\"%s\"\n", ident_asm_op, ident_str);
8147 }
8148
8149
8150 /* This function ensures that vtable_map variables are not only
8151 in the comdat section, but that each variable has its own unique
8152 comdat name. Without this the variables end up in the same section
8153 with a single comdat name.
8154
8155 FIXME: resolve_unique_section needs to deal better with
8156 decls with both DECL_SECTION_NAME and DECL_ONE_ONLY. Once
8157 that is fixed, this if-else statement can be replaced with
8158 a single call to "switch_to_section (sect)". */
8159
8160 static void
handle_vtv_comdat_section(section * sect,const_tree decl ATTRIBUTE_UNUSED)8161 handle_vtv_comdat_section (section *sect, const_tree decl ATTRIBUTE_UNUSED)
8162 {
8163 #if defined (OBJECT_FORMAT_ELF)
8164 targetm.asm_out.named_section (sect->named.name,
8165 sect->named.common.flags
8166 | SECTION_LINKONCE,
8167 DECL_NAME (decl));
8168 in_section = sect;
8169 #else
8170 /* Neither OBJECT_FORMAT_PE, nor OBJECT_FORMAT_COFF is set here.
8171 Therefore the following check is used.
8172 In case a the target is PE or COFF a comdat group section
8173 is created, e.g. .vtable_map_vars$foo. The linker places
8174 everything in .vtable_map_vars at the end.
8175
8176 A fix could be made in
8177 gcc/config/i386/winnt.c: i386_pe_unique_section. */
8178 if (TARGET_PECOFF)
8179 {
8180 char *name;
8181
8182 if (TREE_CODE (DECL_NAME (decl)) == IDENTIFIER_NODE)
8183 name = ACONCAT ((sect->named.name, "$",
8184 IDENTIFIER_POINTER (DECL_NAME (decl)), NULL));
8185 else
8186 name = ACONCAT ((sect->named.name, "$",
8187 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (DECL_NAME (decl))),
8188 NULL));
8189
8190 targetm.asm_out.named_section (name,
8191 sect->named.common.flags
8192 | SECTION_LINKONCE,
8193 DECL_NAME (decl));
8194 in_section = sect;
8195 }
8196 else
8197 switch_to_section (sect);
8198 #endif
8199 }
8200
8201 #include "gt-varasm.h"
8202