1 /* Output variables, constants and external declarations, for GNU compiler.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* This file handles generation of all the assembler code
22 *except* the instructions of a function.
23 This includes declarations of variables and their initial values.
24
25 We also output the assembler code for constants stored in memory
26 and are responsible for combining constants with the same value. */
27
28 #include "config.h"
29 #include "system.h"
30 #include "coretypes.h"
31 #include "backend.h"
32 #include "target.h"
33 #include "rtl.h"
34 #include "tree.h"
35 #include "predict.h"
36 #include "memmodel.h"
37 #include "tm_p.h"
38 #include "stringpool.h"
39 #include "regs.h"
40 #include "emit-rtl.h"
41 #include "cgraph.h"
42 #include "diagnostic-core.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "varasm.h"
46 #include "version.h"
47 #include "flags.h"
48 #include "stmt.h"
49 #include "expr.h"
50 #include "expmed.h"
51 #include "optabs.h"
52 #include "output.h"
53 #include "langhooks.h"
54 #include "debug.h"
55 #include "common/common-target.h"
56 #include "stringpool.h"
57 #include "attribs.h"
58 #include "asan.h"
59 #include "rtl-iter.h"
60 #include "file-prefix-map.h" /* remap_debug_filename() */
61 #include "alloc-pool.h"
62 #include "toplev.h"
63 #include "opts.h"
64
65 #ifdef XCOFF_DEBUGGING_INFO
66 #include "xcoffout.h" /* Needed for external data declarations. */
67 #endif
68
69 /* The (assembler) name of the first globally-visible object output. */
70 extern GTY(()) const char *first_global_object_name;
71 extern GTY(()) const char *weak_global_object_name;
72
73 const char *first_global_object_name;
74 const char *weak_global_object_name;
75
76 class addr_const;
77 class constant_descriptor_rtx;
78 struct rtx_constant_pool;
79
80 #define n_deferred_constants (crtl->varasm.deferred_constants)
81
82 /* Number for making the label on the next
83 constant that is stored in memory. */
84
85 static GTY(()) int const_labelno;
86
87 /* Carry information from ASM_DECLARE_OBJECT_NAME
88 to ASM_FINISH_DECLARE_OBJECT. */
89
90 int size_directive_output;
91
92 /* The last decl for which assemble_variable was called,
93 if it did ASM_DECLARE_OBJECT_NAME.
94 If the last call to assemble_variable didn't do that,
95 this holds 0. */
96
97 tree last_assemble_variable_decl;
98
99 /* The following global variable indicates if the first basic block
100 in a function belongs to the cold partition or not. */
101
102 bool first_function_block_is_cold;
103
104 /* Whether we saw any functions with no_split_stack. */
105
106 static bool saw_no_split_stack;
107
108 static const char *strip_reg_name (const char *);
109 static int contains_pointers_p (tree);
110 #ifdef ASM_OUTPUT_EXTERNAL
111 static bool incorporeal_function_p (tree);
112 #endif
113 static void decode_addr_const (tree, class addr_const *);
114 static hashval_t const_hash_1 (const tree);
115 static int compare_constant (const tree, const tree);
116 static void output_constant_def_contents (rtx);
117 static void output_addressed_constants (tree, int);
118 static unsigned HOST_WIDE_INT output_constant (tree, unsigned HOST_WIDE_INT,
119 unsigned int, bool, bool);
120 static void globalize_decl (tree);
121 static bool decl_readonly_section_1 (enum section_category);
122 #ifdef BSS_SECTION_ASM_OP
123 #ifdef ASM_OUTPUT_ALIGNED_BSS
124 static void asm_output_aligned_bss (FILE *, tree, const char *,
125 unsigned HOST_WIDE_INT, int)
126 ATTRIBUTE_UNUSED;
127 #endif
128 #endif /* BSS_SECTION_ASM_OP */
129 static void mark_weak (tree);
130 static void output_constant_pool (const char *, tree);
131 static void handle_vtv_comdat_section (section *, const_tree);
132
133 /* Well-known sections, each one associated with some sort of *_ASM_OP. */
134 section *text_section;
135 section *data_section;
136 section *readonly_data_section;
137 section *sdata_section;
138 section *ctors_section;
139 section *dtors_section;
140 section *bss_section;
141 section *sbss_section;
142
143 /* Various forms of common section. All are guaranteed to be nonnull. */
144 section *tls_comm_section;
145 section *comm_section;
146 section *lcomm_section;
147
148 /* A SECTION_NOSWITCH section used for declaring global BSS variables.
149 May be null. */
150 section *bss_noswitch_section;
151
152 /* The section that holds the main exception table, when known. The section
153 is set either by the target's init_sections hook or by the first call to
154 switch_to_exception_section. */
155 section *exception_section;
156
157 /* The section that holds the DWARF2 frame unwind information, when known.
158 The section is set either by the target's init_sections hook or by the
159 first call to switch_to_eh_frame_section. */
160 section *eh_frame_section;
161
162 /* asm_out_file's current section. This is NULL if no section has yet
163 been selected or if we lose track of what the current section is. */
164 section *in_section;
165
166 /* True if code for the current function is currently being directed
167 at the cold section. */
168 bool in_cold_section_p;
169
170 /* The following global holds the "function name" for the code in the
171 cold section of a function, if hot/cold function splitting is enabled
172 and there was actually code that went into the cold section. A
173 pseudo function name is needed for the cold section of code for some
174 debugging tools that perform symbolization. */
175 tree cold_function_name = NULL_TREE;
176
177 /* A linked list of all the unnamed sections. */
178 static GTY(()) section *unnamed_sections;
179
180 /* Return a nonzero value if DECL has a section attribute. */
181 #define IN_NAMED_SECTION(DECL) \
182 (VAR_OR_FUNCTION_DECL_P (DECL) && DECL_SECTION_NAME (DECL) != NULL)
183
184 struct section_hasher : ggc_ptr_hash<section>
185 {
186 typedef const char *compare_type;
187
188 static hashval_t hash (section *);
189 static bool equal (section *, const char *);
190 };
191
192 /* Hash table of named sections. */
193 static GTY(()) hash_table<section_hasher> *section_htab;
194
195 struct object_block_hasher : ggc_ptr_hash<object_block>
196 {
197 typedef const section *compare_type;
198
199 static hashval_t hash (object_block *);
200 static bool equal (object_block *, const section *);
201 };
202
203 /* A table of object_blocks, indexed by section. */
204 static GTY(()) hash_table<object_block_hasher> *object_block_htab;
205
206 /* The next number to use for internal anchor labels. */
207 static GTY(()) int anchor_labelno;
208
209 /* A pool of constants that can be shared between functions. */
210 static GTY(()) struct rtx_constant_pool *shared_constant_pool;
211
212 /* Helper routines for maintaining section_htab. */
213
214 bool
equal(section * old,const char * new_name)215 section_hasher::equal (section *old, const char *new_name)
216 {
217 return strcmp (old->named.name, new_name) == 0;
218 }
219
220 hashval_t
hash(section * old)221 section_hasher::hash (section *old)
222 {
223 return htab_hash_string (old->named.name);
224 }
225
226 /* Return a hash value for section SECT. */
227
228 static hashval_t
hash_section(section * sect)229 hash_section (section *sect)
230 {
231 if (sect->common.flags & SECTION_NAMED)
232 return htab_hash_string (sect->named.name);
233 return sect->common.flags & ~SECTION_DECLARED;
234 }
235
236 /* Helper routines for maintaining object_block_htab. */
237
238 inline bool
equal(object_block * old,const section * new_section)239 object_block_hasher::equal (object_block *old, const section *new_section)
240 {
241 return old->sect == new_section;
242 }
243
244 hashval_t
hash(object_block * old)245 object_block_hasher::hash (object_block *old)
246 {
247 return hash_section (old->sect);
248 }
249
250 /* Return a new unnamed section with the given fields. */
251
252 section *
get_unnamed_section(unsigned int flags,void (* callback)(const void *),const void * data)253 get_unnamed_section (unsigned int flags, void (*callback) (const void *),
254 const void *data)
255 {
256 section *sect;
257
258 sect = ggc_alloc<section> ();
259 sect->unnamed.common.flags = flags | SECTION_UNNAMED;
260 sect->unnamed.callback = callback;
261 sect->unnamed.data = data;
262 sect->unnamed.next = unnamed_sections;
263
264 unnamed_sections = sect;
265 return sect;
266 }
267
268 /* Return a SECTION_NOSWITCH section with the given fields. */
269
270 static section *
get_noswitch_section(unsigned int flags,noswitch_section_callback callback)271 get_noswitch_section (unsigned int flags, noswitch_section_callback callback)
272 {
273 section *sect;
274
275 sect = ggc_alloc<section> ();
276 sect->noswitch.common.flags = flags | SECTION_NOSWITCH;
277 sect->noswitch.callback = callback;
278
279 return sect;
280 }
281
282 /* Return the named section structure associated with NAME. Create
283 a new section with the given fields if no such structure exists.
284 When NOT_EXISTING, then fail if the section already exists. Return
285 the existing section if the SECTION_RETAIN bit doesn't match. Set
286 the SECTION_WRITE | SECTION_RELRO bits on the the existing section
287 if one of the section flags is SECTION_WRITE | SECTION_RELRO and the
288 other has none of these flags in named sections and either the section
289 hasn't been declared yet or has been declared as writable. */
290
291 section *
get_section(const char * name,unsigned int flags,tree decl,bool not_existing)292 get_section (const char *name, unsigned int flags, tree decl,
293 bool not_existing)
294 {
295 section *sect, **slot;
296
297 slot = section_htab->find_slot_with_hash (name, htab_hash_string (name),
298 INSERT);
299 flags |= SECTION_NAMED;
300 if (decl != nullptr
301 && DECL_P (decl)
302 && lookup_attribute ("retain", DECL_ATTRIBUTES (decl)))
303 flags |= SECTION_RETAIN;
304 if (*slot == NULL)
305 {
306 sect = ggc_alloc<section> ();
307 sect->named.common.flags = flags;
308 sect->named.name = ggc_strdup (name);
309 sect->named.decl = decl;
310 *slot = sect;
311 }
312 else
313 {
314 if (not_existing)
315 internal_error ("Section already exists: %qs", name);
316
317 sect = *slot;
318 /* It is fine if one of the sections has SECTION_NOTYPE as long as
319 the other has none of the contrary flags (see the logic at the end
320 of default_section_type_flags, below). */
321 if (((sect->common.flags ^ flags) & SECTION_NOTYPE)
322 && !((sect->common.flags | flags)
323 & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE
324 | (HAVE_COMDAT_GROUP ? SECTION_LINKONCE : 0))))
325 {
326 sect->common.flags |= SECTION_NOTYPE;
327 flags |= SECTION_NOTYPE;
328 }
329 if ((sect->common.flags & ~SECTION_DECLARED) != flags
330 && ((sect->common.flags | flags) & SECTION_OVERRIDE) == 0)
331 {
332 /* It is fine if one of the section flags is
333 SECTION_WRITE | SECTION_RELRO and the other has none of these
334 flags (i.e. read-only) in named sections and either the
335 section hasn't been declared yet or has been declared as writable.
336 In that case just make sure the resulting flags are
337 SECTION_WRITE | SECTION_RELRO, ie. writable only because of
338 relocations. */
339 if (((sect->common.flags ^ flags) & (SECTION_WRITE | SECTION_RELRO))
340 == (SECTION_WRITE | SECTION_RELRO)
341 && (sect->common.flags
342 & ~(SECTION_DECLARED | SECTION_WRITE | SECTION_RELRO))
343 == (flags & ~(SECTION_WRITE | SECTION_RELRO))
344 && ((sect->common.flags & SECTION_DECLARED) == 0
345 || (sect->common.flags & SECTION_WRITE)))
346 {
347 sect->common.flags |= (SECTION_WRITE | SECTION_RELRO);
348 return sect;
349 }
350 /* If the SECTION_RETAIN bit doesn't match, return and switch
351 to a new section later. */
352 if ((sect->common.flags & SECTION_RETAIN)
353 != (flags & SECTION_RETAIN))
354 return sect;
355 /* Sanity check user variables for flag changes. */
356 if (sect->named.decl != NULL
357 && DECL_P (sect->named.decl)
358 && decl != sect->named.decl)
359 {
360 if (decl != NULL && DECL_P (decl))
361 error ("%+qD causes a section type conflict with %qD",
362 decl, sect->named.decl);
363 else
364 error ("section type conflict with %qD", sect->named.decl);
365 inform (DECL_SOURCE_LOCATION (sect->named.decl),
366 "%qD was declared here", sect->named.decl);
367 }
368 else if (decl != NULL && DECL_P (decl))
369 error ("%+qD causes a section type conflict", decl);
370 else
371 error ("section type conflict");
372 /* Make sure we don't error about one section multiple times. */
373 sect->common.flags |= SECTION_OVERRIDE;
374 }
375 }
376 return sect;
377 }
378
379 /* Return true if the current compilation mode benefits from having
380 objects grouped into blocks. */
381
382 static bool
use_object_blocks_p(void)383 use_object_blocks_p (void)
384 {
385 return flag_section_anchors;
386 }
387
388 /* Return the object_block structure for section SECT. Create a new
389 structure if we haven't created one already. Return null if SECT
390 itself is null. Return also null for mergeable sections since
391 section anchors can't be used in mergeable sections anyway,
392 because the linker might move objects around, and using the
393 object blocks infrastructure in that case is both a waste and a
394 maintenance burden. */
395
396 static struct object_block *
get_block_for_section(section * sect)397 get_block_for_section (section *sect)
398 {
399 struct object_block *block;
400
401 if (sect == NULL)
402 return NULL;
403
404 if (sect->common.flags & SECTION_MERGE)
405 return NULL;
406
407 object_block **slot
408 = object_block_htab->find_slot_with_hash (sect, hash_section (sect),
409 INSERT);
410 block = *slot;
411 if (block == NULL)
412 {
413 block = ggc_cleared_alloc<object_block> ();
414 block->sect = sect;
415 *slot = block;
416 }
417 return block;
418 }
419
420 /* Create a symbol with label LABEL and place it at byte offset
421 OFFSET in BLOCK. OFFSET can be negative if the symbol's offset
422 is not yet known. LABEL must be a garbage-collected string. */
423
424 static rtx
create_block_symbol(const char * label,struct object_block * block,HOST_WIDE_INT offset)425 create_block_symbol (const char *label, struct object_block *block,
426 HOST_WIDE_INT offset)
427 {
428 rtx symbol;
429 unsigned int size;
430
431 /* Create the extended SYMBOL_REF. */
432 size = RTX_HDR_SIZE + sizeof (struct block_symbol);
433 symbol = (rtx) ggc_internal_alloc (size);
434
435 /* Initialize the normal SYMBOL_REF fields. */
436 memset (symbol, 0, size);
437 PUT_CODE (symbol, SYMBOL_REF);
438 PUT_MODE (symbol, Pmode);
439 XSTR (symbol, 0) = label;
440 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_HAS_BLOCK_INFO;
441
442 /* Initialize the block_symbol stuff. */
443 SYMBOL_REF_BLOCK (symbol) = block;
444 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
445
446 return symbol;
447 }
448
449 /* Return a section with a particular name and with whatever SECTION_*
450 flags section_type_flags deems appropriate. The name of the section
451 is taken from NAME if nonnull, otherwise it is taken from DECL's
452 DECL_SECTION_NAME. DECL is the decl associated with the section
453 (see the section comment for details) and RELOC is as for
454 section_type_flags. */
455
456 section *
get_named_section(tree decl,const char * name,int reloc)457 get_named_section (tree decl, const char *name, int reloc)
458 {
459 unsigned int flags;
460
461 if (name == NULL)
462 {
463 gcc_assert (decl && DECL_P (decl) && DECL_SECTION_NAME (decl));
464 name = DECL_SECTION_NAME (decl);
465 }
466
467 flags = targetm.section_type_flags (decl, name, reloc);
468 return get_section (name, flags, decl);
469 }
470
471 /* Worker for resolve_unique_section. */
472
473 static bool
set_implicit_section(struct symtab_node * n,void * data ATTRIBUTE_UNUSED)474 set_implicit_section (struct symtab_node *n, void *data ATTRIBUTE_UNUSED)
475 {
476 n->implicit_section = true;
477 return false;
478 }
479
480 /* If required, set DECL_SECTION_NAME to a unique name. */
481
482 void
resolve_unique_section(tree decl,int reloc ATTRIBUTE_UNUSED,int flag_function_or_data_sections)483 resolve_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED,
484 int flag_function_or_data_sections)
485 {
486 if (DECL_SECTION_NAME (decl) == NULL
487 && targetm_common.have_named_sections
488 && (flag_function_or_data_sections
489 || lookup_attribute ("retain", DECL_ATTRIBUTES (decl))
490 || DECL_COMDAT_GROUP (decl)))
491 {
492 targetm.asm_out.unique_section (decl, reloc);
493 if (DECL_SECTION_NAME (decl))
494 symtab_node::get (decl)->call_for_symbol_and_aliases
495 (set_implicit_section, NULL, true);
496 }
497 }
498
499 #ifdef BSS_SECTION_ASM_OP
500
501 #ifdef ASM_OUTPUT_ALIGNED_BSS
502
503 /* Utility function for targets to use in implementing
504 ASM_OUTPUT_ALIGNED_BSS.
505 ??? It is believed that this function will work in most cases so such
506 support is localized here. */
507
508 static void
asm_output_aligned_bss(FILE * file,tree decl ATTRIBUTE_UNUSED,const char * name,unsigned HOST_WIDE_INT size,int align)509 asm_output_aligned_bss (FILE *file, tree decl ATTRIBUTE_UNUSED,
510 const char *name, unsigned HOST_WIDE_INT size,
511 int align)
512 {
513 switch_to_section (bss_section);
514 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
515 #ifdef ASM_DECLARE_OBJECT_NAME
516 last_assemble_variable_decl = decl;
517 ASM_DECLARE_OBJECT_NAME (file, name, decl);
518 #else
519 /* Standard thing is just output label for the object. */
520 ASM_OUTPUT_LABEL (file, name);
521 #endif /* ASM_DECLARE_OBJECT_NAME */
522 ASM_OUTPUT_SKIP (file, size ? size : 1);
523 }
524
525 #endif
526
527 #endif /* BSS_SECTION_ASM_OP */
528
529 #ifndef USE_SELECT_SECTION_FOR_FUNCTIONS
530 /* Return the hot section for function DECL. Return text_section for
531 null DECLs. */
532
533 static section *
hot_function_section(tree decl)534 hot_function_section (tree decl)
535 {
536 if (decl != NULL_TREE
537 && DECL_SECTION_NAME (decl) != NULL
538 && targetm_common.have_named_sections)
539 return get_named_section (decl, NULL, 0);
540 else
541 return text_section;
542 }
543 #endif
544
545 /* Return section for TEXT_SECTION_NAME if DECL or DECL_SECTION_NAME (DECL)
546 is NULL.
547
548 When DECL_SECTION_NAME is non-NULL and it is implicit section and
549 NAMED_SECTION_SUFFIX is non-NULL, then produce section called
550 concatenate the name with NAMED_SECTION_SUFFIX.
551 Otherwise produce "TEXT_SECTION_NAME.IMPLICIT_NAME". */
552
553 section *
get_named_text_section(tree decl,const char * text_section_name,const char * named_section_suffix)554 get_named_text_section (tree decl,
555 const char *text_section_name,
556 const char *named_section_suffix)
557 {
558 if (decl && DECL_SECTION_NAME (decl))
559 {
560 if (named_section_suffix)
561 {
562 const char *dsn = DECL_SECTION_NAME (decl);
563 const char *stripped_name;
564 char *name, *buffer;
565
566 name = (char *) alloca (strlen (dsn) + 1);
567 memcpy (name, dsn,
568 strlen (dsn) + 1);
569
570 stripped_name = targetm.strip_name_encoding (name);
571
572 buffer = ACONCAT ((stripped_name, named_section_suffix, NULL));
573 return get_named_section (decl, buffer, 0);
574 }
575 else if (symtab_node::get (decl)->implicit_section)
576 {
577 const char *name;
578
579 /* Do not try to split gnu_linkonce functions. This gets somewhat
580 slipperly. */
581 if (DECL_COMDAT_GROUP (decl) && !HAVE_COMDAT_GROUP)
582 return NULL;
583 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
584 name = targetm.strip_name_encoding (name);
585 return get_named_section (decl, ACONCAT ((text_section_name, ".",
586 name, NULL)), 0);
587 }
588 else
589 return NULL;
590 }
591 return get_named_section (decl, text_section_name, 0);
592 }
593
594 /* Choose named function section based on its frequency. */
595
596 section *
default_function_section(tree decl,enum node_frequency freq,bool startup,bool exit)597 default_function_section (tree decl, enum node_frequency freq,
598 bool startup, bool exit)
599 {
600 #if defined HAVE_LD_EH_GC_SECTIONS && defined HAVE_LD_EH_GC_SECTIONS_BUG
601 /* Old GNU linkers have buggy --gc-section support, which sometimes
602 results in .gcc_except_table* sections being garbage collected. */
603 if (decl
604 && symtab_node::get (decl)->implicit_section)
605 return NULL;
606 #endif
607
608 if (!flag_reorder_functions
609 || !targetm_common.have_named_sections)
610 return NULL;
611 /* Startup code should go to startup subsection unless it is
612 unlikely executed (this happens especially with function splitting
613 where we can split away unnecessary parts of static constructors. */
614 if (startup && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED)
615 {
616 /* During LTO the tp_first_run profiling will naturally place all
617 initialization code first. Using separate section is counter-productive
618 because startup only code may call functions which are no longer
619 startup only. */
620 if (!in_lto_p
621 || !cgraph_node::get (decl)->tp_first_run
622 || !opt_for_fn (decl, flag_profile_reorder_functions))
623 return get_named_text_section (decl, ".text.startup", NULL);
624 else
625 return NULL;
626 }
627
628 /* Similarly for exit. */
629 if (exit && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED)
630 return get_named_text_section (decl, ".text.exit", NULL);
631
632 /* Group cold functions together, similarly for hot code. */
633 switch (freq)
634 {
635 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
636 return get_named_text_section (decl, ".text.unlikely", NULL);
637 case NODE_FREQUENCY_HOT:
638 return get_named_text_section (decl, ".text.hot", NULL);
639 /* FALLTHRU */
640 default:
641 return NULL;
642 }
643 }
644
645 /* Return the section for function DECL.
646
647 If DECL is NULL_TREE, return the text section. We can be passed
648 NULL_TREE under some circumstances by dbxout.c at least.
649
650 If FORCE_COLD is true, return cold function section ignoring
651 the frequency info of cgraph_node. */
652
653 static section *
function_section_1(tree decl,bool force_cold)654 function_section_1 (tree decl, bool force_cold)
655 {
656 section *section = NULL;
657 enum node_frequency freq = NODE_FREQUENCY_NORMAL;
658 bool startup = false, exit = false;
659
660 if (decl)
661 {
662 struct cgraph_node *node = cgraph_node::get (decl);
663
664 if (node)
665 {
666 freq = node->frequency;
667 startup = node->only_called_at_startup;
668 exit = node->only_called_at_exit;
669 }
670 }
671 if (force_cold)
672 freq = NODE_FREQUENCY_UNLIKELY_EXECUTED;
673
674 #ifdef USE_SELECT_SECTION_FOR_FUNCTIONS
675 if (decl != NULL_TREE
676 && DECL_SECTION_NAME (decl) != NULL)
677 {
678 if (targetm.asm_out.function_section)
679 section = targetm.asm_out.function_section (decl, freq,
680 startup, exit);
681 if (section)
682 return section;
683 return get_named_section (decl, NULL, 0);
684 }
685 else
686 return targetm.asm_out.select_section
687 (decl, freq == NODE_FREQUENCY_UNLIKELY_EXECUTED,
688 symtab_node::get (decl)->definition_alignment ());
689 #else
690 if (targetm.asm_out.function_section)
691 section = targetm.asm_out.function_section (decl, freq, startup, exit);
692 if (section)
693 return section;
694 return hot_function_section (decl);
695 #endif
696 }
697
698 /* Return the section for function DECL.
699
700 If DECL is NULL_TREE, return the text section. We can be passed
701 NULL_TREE under some circumstances by dbxout.c at least. */
702
703 section *
function_section(tree decl)704 function_section (tree decl)
705 {
706 /* Handle cases where function splitting code decides
707 to put function entry point into unlikely executed section
708 despite the fact that the function itself is not cold
709 (i.e. it is called rarely but contains a hot loop that is
710 better to live in hot subsection for the code locality). */
711 return function_section_1 (decl,
712 first_function_block_is_cold);
713 }
714
715 /* Return the section for the current function, take IN_COLD_SECTION_P
716 into account. */
717
718 section *
current_function_section(void)719 current_function_section (void)
720 {
721 return function_section_1 (current_function_decl, in_cold_section_p);
722 }
723
724 /* Tell assembler to switch to unlikely-to-be-executed text section. */
725
726 section *
unlikely_text_section(void)727 unlikely_text_section (void)
728 {
729 return function_section_1 (current_function_decl, true);
730 }
731
732 /* When called within a function context, return true if the function
733 has been assigned a cold text section and if SECT is that section.
734 When called outside a function context, return true if SECT is the
735 default cold section. */
736
737 bool
unlikely_text_section_p(section * sect)738 unlikely_text_section_p (section *sect)
739 {
740 return sect == function_section_1 (current_function_decl, true);
741 }
742
743 /* Switch to the other function partition (if inside of hot section
744 into cold section, otherwise into the hot section). */
745
746 void
switch_to_other_text_partition(void)747 switch_to_other_text_partition (void)
748 {
749 in_cold_section_p = !in_cold_section_p;
750 switch_to_section (current_function_section ());
751 }
752
753 /* Return the read-only or relocated read-only data section
754 associated with function DECL. */
755
756 section *
default_function_rodata_section(tree decl,bool relocatable)757 default_function_rodata_section (tree decl, bool relocatable)
758 {
759 const char* sname;
760 unsigned int flags;
761
762 flags = 0;
763
764 if (relocatable)
765 {
766 sname = ".data.rel.ro.local";
767 flags = (SECTION_WRITE | SECTION_RELRO);
768 }
769 else
770 sname = ".rodata";
771
772 if (decl && DECL_SECTION_NAME (decl))
773 {
774 const char *name = DECL_SECTION_NAME (decl);
775
776 if (DECL_COMDAT_GROUP (decl) && HAVE_COMDAT_GROUP)
777 {
778 const char *dot;
779 size_t len;
780 char* rname;
781
782 dot = strchr (name + 1, '.');
783 if (!dot)
784 dot = name;
785 len = strlen (dot) + strlen (sname) + 1;
786 rname = (char *) alloca (len);
787
788 strcpy (rname, sname);
789 strcat (rname, dot);
790 return get_section (rname, (SECTION_LINKONCE | flags), decl);
791 }
792 /* For .gnu.linkonce.t.foo we want to use .gnu.linkonce.r.foo or
793 .gnu.linkonce.d.rel.ro.local.foo if the jump table is relocatable. */
794 else if (DECL_COMDAT_GROUP (decl)
795 && strncmp (name, ".gnu.linkonce.t.", 16) == 0)
796 {
797 size_t len;
798 char *rname;
799
800 if (relocatable)
801 {
802 len = strlen (name) + strlen (".rel.ro.local") + 1;
803 rname = (char *) alloca (len);
804
805 strcpy (rname, ".gnu.linkonce.d.rel.ro.local");
806 strcat (rname, name + 15);
807 }
808 else
809 {
810 len = strlen (name) + 1;
811 rname = (char *) alloca (len);
812
813 memcpy (rname, name, len);
814 rname[14] = 'r';
815 }
816 return get_section (rname, (SECTION_LINKONCE | flags), decl);
817 }
818 /* For .text.foo we want to use .rodata.foo. */
819 else if (flag_function_sections && flag_data_sections
820 && strncmp (name, ".text.", 6) == 0)
821 {
822 size_t len = strlen (name) + 1;
823 char *rname = (char *) alloca (len + strlen (sname) - 5);
824
825 memcpy (rname, sname, strlen (sname));
826 memcpy (rname + strlen (sname), name + 5, len - 5);
827 return get_section (rname, flags, decl);
828 }
829 }
830
831 if (relocatable)
832 return get_section (sname, flags, decl);
833 else
834 return readonly_data_section;
835 }
836
837 /* Return the read-only data section associated with function DECL
838 for targets where that section should be always the single
839 readonly data section. */
840
841 section *
default_no_function_rodata_section(tree,bool)842 default_no_function_rodata_section (tree, bool)
843 {
844 return readonly_data_section;
845 }
846
847 /* A subroutine of mergeable_string_section and mergeable_constant_section. */
848
849 static const char *
function_mergeable_rodata_prefix(void)850 function_mergeable_rodata_prefix (void)
851 {
852 section *s = targetm.asm_out.function_rodata_section (current_function_decl,
853 false);
854 if (SECTION_STYLE (s) == SECTION_NAMED)
855 return s->named.name;
856 else
857 return targetm.asm_out.mergeable_rodata_prefix;
858 }
859
860 /* Return the section to use for string merging. */
861
862 static section *
mergeable_string_section(tree decl ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,unsigned int flags ATTRIBUTE_UNUSED)863 mergeable_string_section (tree decl ATTRIBUTE_UNUSED,
864 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
865 unsigned int flags ATTRIBUTE_UNUSED)
866 {
867 HOST_WIDE_INT len;
868
869 if (HAVE_GAS_SHF_MERGE && flag_merge_constants
870 && TREE_CODE (decl) == STRING_CST
871 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
872 && align <= 256
873 && (len = int_size_in_bytes (TREE_TYPE (decl))) > 0
874 && TREE_STRING_LENGTH (decl) == len)
875 {
876 scalar_int_mode mode;
877 unsigned int modesize;
878 const char *str;
879 HOST_WIDE_INT i;
880 int j, unit;
881 const char *prefix = function_mergeable_rodata_prefix ();
882 char *name = (char *) alloca (strlen (prefix) + 30);
883
884 mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (TREE_TYPE (decl)));
885 modesize = GET_MODE_BITSIZE (mode);
886 if (modesize >= 8 && modesize <= 256
887 && (modesize & (modesize - 1)) == 0)
888 {
889 if (align < modesize)
890 align = modesize;
891
892 if (!HAVE_LD_ALIGNED_SHF_MERGE && align > 8)
893 return readonly_data_section;
894
895 str = TREE_STRING_POINTER (decl);
896 unit = GET_MODE_SIZE (mode);
897
898 /* Check for embedded NUL characters. */
899 for (i = 0; i < len; i += unit)
900 {
901 for (j = 0; j < unit; j++)
902 if (str[i + j] != '\0')
903 break;
904 if (j == unit)
905 break;
906 }
907 if (i == len - unit || (unit == 1 && i == len))
908 {
909 sprintf (name, "%s.str%d.%d", prefix,
910 modesize / 8, (int) (align / 8));
911 flags |= (modesize / 8) | SECTION_MERGE | SECTION_STRINGS;
912 return get_section (name, flags, NULL);
913 }
914 }
915 }
916
917 return readonly_data_section;
918 }
919
920 /* Return the section to use for constant merging. */
921
922 section *
mergeable_constant_section(machine_mode mode ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,unsigned int flags ATTRIBUTE_UNUSED)923 mergeable_constant_section (machine_mode mode ATTRIBUTE_UNUSED,
924 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
925 unsigned int flags ATTRIBUTE_UNUSED)
926 {
927 if (HAVE_GAS_SHF_MERGE && flag_merge_constants
928 && mode != VOIDmode
929 && mode != BLKmode
930 && known_le (GET_MODE_BITSIZE (mode), align)
931 && align >= 8
932 && align <= 256
933 && (align & (align - 1)) == 0
934 && (HAVE_LD_ALIGNED_SHF_MERGE ? 1 : align == 8))
935 {
936 const char *prefix = function_mergeable_rodata_prefix ();
937 char *name = (char *) alloca (strlen (prefix) + 30);
938
939 sprintf (name, "%s.cst%d", prefix, (int) (align / 8));
940 flags |= (align / 8) | SECTION_MERGE;
941 return get_section (name, flags, NULL);
942 }
943 return readonly_data_section;
944 }
945
946 /* Given NAME, a putative register name, discard any customary prefixes. */
947
948 static const char *
strip_reg_name(const char * name)949 strip_reg_name (const char *name)
950 {
951 #ifdef REGISTER_PREFIX
952 if (!strncmp (name, REGISTER_PREFIX, strlen (REGISTER_PREFIX)))
953 name += strlen (REGISTER_PREFIX);
954 #endif
955 if (name[0] == '%' || name[0] == '#')
956 name++;
957 return name;
958 }
959
960 /* The user has asked for a DECL to have a particular name. Set (or
961 change) it in such a way that we don't prefix an underscore to
962 it. */
963 void
set_user_assembler_name(tree decl,const char * name)964 set_user_assembler_name (tree decl, const char *name)
965 {
966 char *starred = (char *) alloca (strlen (name) + 2);
967 starred[0] = '*';
968 strcpy (starred + 1, name);
969 symtab->change_decl_assembler_name (decl, get_identifier (starred));
970 SET_DECL_RTL (decl, NULL_RTX);
971 }
972
973 /* Decode an `asm' spec for a declaration as a register name.
974 Return the register number, or -1 if nothing specified,
975 or -2 if the ASMSPEC is not `cc' or `memory' and is not recognized,
976 or -3 if ASMSPEC is `cc' and is not recognized,
977 or -4 if ASMSPEC is `memory' and is not recognized.
978 Accept an exact spelling or a decimal number.
979 Prefixes such as % are optional. */
980
981 int
decode_reg_name_and_count(const char * asmspec,int * pnregs)982 decode_reg_name_and_count (const char *asmspec, int *pnregs)
983 {
984 /* Presume just one register is clobbered. */
985 *pnregs = 1;
986
987 if (asmspec != 0)
988 {
989 int i;
990
991 /* Get rid of confusing prefixes. */
992 asmspec = strip_reg_name (asmspec);
993
994 /* Allow a decimal number as a "register name". */
995 for (i = strlen (asmspec) - 1; i >= 0; i--)
996 if (! ISDIGIT (asmspec[i]))
997 break;
998 if (asmspec[0] != 0 && i < 0)
999 {
1000 i = atoi (asmspec);
1001 if (i < FIRST_PSEUDO_REGISTER && i >= 0 && reg_names[i][0])
1002 return i;
1003 else
1004 return -2;
1005 }
1006
1007 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1008 if (reg_names[i][0]
1009 && ! strcmp (asmspec, strip_reg_name (reg_names[i])))
1010 return i;
1011
1012 #ifdef OVERLAPPING_REGISTER_NAMES
1013 {
1014 static const struct
1015 {
1016 const char *const name;
1017 const int number;
1018 const int nregs;
1019 } table[] = OVERLAPPING_REGISTER_NAMES;
1020
1021 for (i = 0; i < (int) ARRAY_SIZE (table); i++)
1022 if (table[i].name[0]
1023 && ! strcmp (asmspec, table[i].name))
1024 {
1025 *pnregs = table[i].nregs;
1026 return table[i].number;
1027 }
1028 }
1029 #endif /* OVERLAPPING_REGISTER_NAMES */
1030
1031 #ifdef ADDITIONAL_REGISTER_NAMES
1032 {
1033 static const struct { const char *const name; const int number; } table[]
1034 = ADDITIONAL_REGISTER_NAMES;
1035
1036 for (i = 0; i < (int) ARRAY_SIZE (table); i++)
1037 if (table[i].name[0]
1038 && ! strcmp (asmspec, table[i].name)
1039 && reg_names[table[i].number][0])
1040 return table[i].number;
1041 }
1042 #endif /* ADDITIONAL_REGISTER_NAMES */
1043
1044 if (!strcmp (asmspec, "memory"))
1045 return -4;
1046
1047 if (!strcmp (asmspec, "cc"))
1048 return -3;
1049
1050 return -2;
1051 }
1052
1053 return -1;
1054 }
1055
1056 int
decode_reg_name(const char * name)1057 decode_reg_name (const char *name)
1058 {
1059 int count;
1060 return decode_reg_name_and_count (name, &count);
1061 }
1062
1063
1064 /* Return true if DECL's initializer is suitable for a BSS section. */
1065
1066 bool
bss_initializer_p(const_tree decl,bool named)1067 bss_initializer_p (const_tree decl, bool named)
1068 {
1069 /* Do not put non-common constants into the .bss section, they belong in
1070 a readonly section, except when NAMED is true. */
1071 return ((!TREE_READONLY (decl) || DECL_COMMON (decl) || named)
1072 && (DECL_INITIAL (decl) == NULL
1073 /* In LTO we have no errors in program; error_mark_node is used
1074 to mark offlined constructors. */
1075 || (DECL_INITIAL (decl) == error_mark_node
1076 && !in_lto_p)
1077 || (flag_zero_initialized_in_bss
1078 && initializer_zerop (DECL_INITIAL (decl))
1079 /* A decl with the "persistent" attribute applied and
1080 explicitly initialized to 0 should not be treated as a BSS
1081 variable. */
1082 && !DECL_PERSISTENT_P (decl))));
1083 }
1084
1085 /* Compute the alignment of variable specified by DECL.
1086 DONT_OUTPUT_DATA is from assemble_variable. */
1087
1088 void
align_variable(tree decl,bool dont_output_data)1089 align_variable (tree decl, bool dont_output_data)
1090 {
1091 unsigned int align = DECL_ALIGN (decl);
1092
1093 /* In the case for initialing an array whose length isn't specified,
1094 where we have not yet been able to do the layout,
1095 figure out the proper alignment now. */
1096 if (dont_output_data && DECL_SIZE (decl) == 0
1097 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1098 align = MAX (align, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1099
1100 /* Some object file formats have a maximum alignment which they support.
1101 In particular, a.out format supports a maximum alignment of 4. */
1102 if (align > MAX_OFILE_ALIGNMENT)
1103 {
1104 error ("alignment of %q+D is greater than maximum object "
1105 "file alignment %d", decl,
1106 MAX_OFILE_ALIGNMENT/BITS_PER_UNIT);
1107 align = MAX_OFILE_ALIGNMENT;
1108 }
1109
1110 if (! DECL_USER_ALIGN (decl))
1111 {
1112 #ifdef DATA_ABI_ALIGNMENT
1113 unsigned int data_abi_align
1114 = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align);
1115 /* For backwards compatibility, don't assume the ABI alignment for
1116 TLS variables. */
1117 if (! DECL_THREAD_LOCAL_P (decl) || data_abi_align <= BITS_PER_WORD)
1118 align = data_abi_align;
1119 #endif
1120
1121 /* On some machines, it is good to increase alignment sometimes.
1122 But as DECL_ALIGN is used both for actually emitting the variable
1123 and for code accessing the variable as guaranteed alignment, we
1124 can only increase the alignment if it is a performance optimization
1125 if the references to it must bind to the current definition. */
1126 if (decl_binds_to_current_def_p (decl)
1127 && !DECL_VIRTUAL_P (decl))
1128 {
1129 #ifdef DATA_ALIGNMENT
1130 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
1131 /* Don't increase alignment too much for TLS variables - TLS space
1132 is too precious. */
1133 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD)
1134 align = data_align;
1135 #endif
1136 if (DECL_INITIAL (decl) != 0
1137 /* In LTO we have no errors in program; error_mark_node is used
1138 to mark offlined constructors. */
1139 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
1140 {
1141 unsigned int const_align
1142 = targetm.constant_alignment (DECL_INITIAL (decl), align);
1143 /* Don't increase alignment too much for TLS variables - TLS
1144 space is too precious. */
1145 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
1146 align = const_align;
1147 }
1148 }
1149 }
1150
1151 /* Reset the alignment in case we have made it tighter, so we can benefit
1152 from it in get_pointer_alignment. */
1153 SET_DECL_ALIGN (decl, align);
1154 }
1155
1156 /* Return DECL_ALIGN (decl), possibly increased for optimization purposes
1157 beyond what align_variable returned. */
1158
1159 static unsigned int
get_variable_align(tree decl)1160 get_variable_align (tree decl)
1161 {
1162 unsigned int align = DECL_ALIGN (decl);
1163
1164 /* For user aligned vars or static vars align_variable already did
1165 everything. */
1166 if (DECL_USER_ALIGN (decl) || !TREE_PUBLIC (decl))
1167 return align;
1168
1169 #ifdef DATA_ABI_ALIGNMENT
1170 if (DECL_THREAD_LOCAL_P (decl))
1171 align = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align);
1172 #endif
1173
1174 /* For decls that bind to the current definition, align_variable
1175 did also everything, except for not assuming ABI required alignment
1176 of TLS variables. For other vars, increase the alignment here
1177 as an optimization. */
1178 if (!decl_binds_to_current_def_p (decl))
1179 {
1180 /* On some machines, it is good to increase alignment sometimes. */
1181 #ifdef DATA_ALIGNMENT
1182 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
1183 /* Don't increase alignment too much for TLS variables - TLS space
1184 is too precious. */
1185 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD)
1186 align = data_align;
1187 #endif
1188 if (DECL_INITIAL (decl) != 0
1189 /* In LTO we have no errors in program; error_mark_node is used
1190 to mark offlined constructors. */
1191 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
1192 {
1193 unsigned int const_align
1194 = targetm.constant_alignment (DECL_INITIAL (decl), align);
1195 /* Don't increase alignment too much for TLS variables - TLS space
1196 is too precious. */
1197 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
1198 align = const_align;
1199 }
1200 }
1201
1202 return align;
1203 }
1204
1205 /* Return the section into which the given VAR_DECL or CONST_DECL
1206 should be placed. PREFER_NOSWITCH_P is true if a noswitch
1207 section should be used wherever possible. */
1208
1209 section *
get_variable_section(tree decl,bool prefer_noswitch_p)1210 get_variable_section (tree decl, bool prefer_noswitch_p)
1211 {
1212 addr_space_t as = ADDR_SPACE_GENERIC;
1213 int reloc;
1214 varpool_node *vnode = varpool_node::get (decl);
1215 if (vnode)
1216 {
1217 vnode = vnode->ultimate_alias_target ();
1218 decl = vnode->decl;
1219 }
1220
1221 if (TREE_TYPE (decl) != error_mark_node)
1222 as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
1223
1224 /* We need the constructor to figure out reloc flag. */
1225 if (vnode)
1226 vnode->get_constructor ();
1227
1228 if (DECL_COMMON (decl)
1229 && !lookup_attribute ("retain", DECL_ATTRIBUTES (decl)))
1230 {
1231 /* If the decl has been given an explicit section name, or it resides
1232 in a non-generic address space, then it isn't common, and shouldn't
1233 be handled as such. */
1234 gcc_assert (DECL_SECTION_NAME (decl) == NULL
1235 && ADDR_SPACE_GENERIC_P (as));
1236 if (DECL_THREAD_LOCAL_P (decl))
1237 return tls_comm_section;
1238 else if (TREE_PUBLIC (decl) && bss_initializer_p (decl))
1239 return comm_section;
1240 }
1241
1242 if (DECL_INITIAL (decl) == error_mark_node)
1243 reloc = contains_pointers_p (TREE_TYPE (decl)) ? 3 : 0;
1244 else if (DECL_INITIAL (decl))
1245 reloc = compute_reloc_for_constant (DECL_INITIAL (decl));
1246 else
1247 reloc = 0;
1248
1249 resolve_unique_section (decl, reloc, flag_data_sections);
1250 if (IN_NAMED_SECTION (decl))
1251 {
1252 section *sect = get_named_section (decl, NULL, reloc);
1253
1254 if ((sect->common.flags & SECTION_BSS)
1255 && !bss_initializer_p (decl, true))
1256 {
1257 error_at (DECL_SOURCE_LOCATION (decl),
1258 "only zero initializers are allowed in section %qs",
1259 sect->named.name);
1260 DECL_INITIAL (decl) = error_mark_node;
1261 }
1262 return sect;
1263 }
1264
1265 if (ADDR_SPACE_GENERIC_P (as)
1266 && !DECL_THREAD_LOCAL_P (decl)
1267 && !DECL_NOINIT_P (decl)
1268 && !(prefer_noswitch_p && targetm.have_switchable_bss_sections)
1269 && bss_initializer_p (decl))
1270 {
1271 if (!TREE_PUBLIC (decl)
1272 && !((flag_sanitize & SANITIZE_ADDRESS)
1273 && asan_protect_global (decl)))
1274 return lcomm_section;
1275 if (bss_noswitch_section)
1276 return bss_noswitch_section;
1277 }
1278
1279 return targetm.asm_out.select_section (decl, reloc,
1280 get_variable_align (decl));
1281 }
1282
1283 /* Return the block into which object_block DECL should be placed. */
1284
1285 static struct object_block *
get_block_for_decl(tree decl)1286 get_block_for_decl (tree decl)
1287 {
1288 section *sect;
1289
1290 if (VAR_P (decl))
1291 {
1292 /* The object must be defined in this translation unit. */
1293 if (DECL_EXTERNAL (decl))
1294 return NULL;
1295
1296 /* There's no point using object blocks for something that is
1297 isolated by definition. */
1298 if (DECL_COMDAT_GROUP (decl))
1299 return NULL;
1300 }
1301
1302 /* We can only calculate block offsets if the decl has a known
1303 constant size. */
1304 if (DECL_SIZE_UNIT (decl) == NULL)
1305 return NULL;
1306 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (decl)))
1307 return NULL;
1308
1309 /* Find out which section should contain DECL. We cannot put it into
1310 an object block if it requires a standalone definition. */
1311 if (VAR_P (decl))
1312 align_variable (decl, 0);
1313 sect = get_variable_section (decl, true);
1314 if (SECTION_STYLE (sect) == SECTION_NOSWITCH)
1315 return NULL;
1316
1317 return get_block_for_section (sect);
1318 }
1319
1320 /* Make sure block symbol SYMBOL is in block BLOCK. */
1321
1322 static void
change_symbol_block(rtx symbol,struct object_block * block)1323 change_symbol_block (rtx symbol, struct object_block *block)
1324 {
1325 if (block != SYMBOL_REF_BLOCK (symbol))
1326 {
1327 gcc_assert (SYMBOL_REF_BLOCK_OFFSET (symbol) < 0);
1328 SYMBOL_REF_BLOCK (symbol) = block;
1329 }
1330 }
1331
1332 /* Return true if it is possible to put DECL in an object_block. */
1333
1334 static bool
use_blocks_for_decl_p(tree decl)1335 use_blocks_for_decl_p (tree decl)
1336 {
1337 struct symtab_node *snode;
1338
1339 /* Only data DECLs can be placed into object blocks. */
1340 if (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
1341 return false;
1342
1343 /* DECL_INITIAL (decl) set to decl is a hack used for some decls that
1344 are never used from code directly and we never want object block handling
1345 for those. */
1346 if (DECL_INITIAL (decl) == decl)
1347 return false;
1348
1349 /* If this decl is an alias, then we don't want to emit a
1350 definition. */
1351 if (VAR_P (decl)
1352 && (snode = symtab_node::get (decl)) != NULL
1353 && snode->alias)
1354 return false;
1355
1356 return targetm.use_blocks_for_decl_p (decl);
1357 }
1358
1359 /* Follow the IDENTIFIER_TRANSPARENT_ALIAS chain starting at *ALIAS
1360 until we find an identifier that is not itself a transparent alias.
1361 Modify the alias passed to it by reference (and all aliases on the
1362 way to the ultimate target), such that they do not have to be
1363 followed again, and return the ultimate target of the alias
1364 chain. */
1365
1366 static inline tree
ultimate_transparent_alias_target(tree * alias)1367 ultimate_transparent_alias_target (tree *alias)
1368 {
1369 tree target = *alias;
1370
1371 if (IDENTIFIER_TRANSPARENT_ALIAS (target))
1372 {
1373 gcc_assert (TREE_CHAIN (target));
1374 target = ultimate_transparent_alias_target (&TREE_CHAIN (target));
1375 gcc_assert (! IDENTIFIER_TRANSPARENT_ALIAS (target)
1376 && ! TREE_CHAIN (target));
1377 *alias = target;
1378 }
1379
1380 return target;
1381 }
1382
1383 /* Return true if REGNUM is mentioned in ELIMINABLE_REGS as a from
1384 register number. */
1385
1386 static bool
eliminable_regno_p(int regnum)1387 eliminable_regno_p (int regnum)
1388 {
1389 static const struct
1390 {
1391 const int from;
1392 const int to;
1393 } eliminables[] = ELIMINABLE_REGS;
1394 for (size_t i = 0; i < ARRAY_SIZE (eliminables); i++)
1395 if (regnum == eliminables[i].from)
1396 return true;
1397 return false;
1398 }
1399
1400 /* Create the DECL_RTL for a VAR_DECL or FUNCTION_DECL. DECL should
1401 have static storage duration. In other words, it should not be an
1402 automatic variable, including PARM_DECLs.
1403
1404 There is, however, one exception: this function handles variables
1405 explicitly placed in a particular register by the user.
1406
1407 This is never called for PARM_DECL nodes. */
1408
1409 void
make_decl_rtl(tree decl)1410 make_decl_rtl (tree decl)
1411 {
1412 const char *name = 0;
1413 int reg_number;
1414 tree id;
1415 rtx x;
1416
1417 /* Check that we are not being given an automatic variable. */
1418 gcc_assert (TREE_CODE (decl) != PARM_DECL
1419 && TREE_CODE (decl) != RESULT_DECL);
1420
1421 /* A weak alias has TREE_PUBLIC set but not the other bits. */
1422 gcc_assert (!VAR_P (decl)
1423 || TREE_STATIC (decl)
1424 || TREE_PUBLIC (decl)
1425 || DECL_EXTERNAL (decl)
1426 || DECL_REGISTER (decl));
1427
1428 /* And that we were not given a type or a label. */
1429 gcc_assert (TREE_CODE (decl) != TYPE_DECL
1430 && TREE_CODE (decl) != LABEL_DECL);
1431
1432 /* For a duplicate declaration, we can be called twice on the
1433 same DECL node. Don't discard the RTL already made. */
1434 if (DECL_RTL_SET_P (decl))
1435 {
1436 /* If the old RTL had the wrong mode, fix the mode. */
1437 x = DECL_RTL (decl);
1438 if (GET_MODE (x) != DECL_MODE (decl))
1439 SET_DECL_RTL (decl, adjust_address_nv (x, DECL_MODE (decl), 0));
1440
1441 if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
1442 return;
1443
1444 /* ??? Another way to do this would be to maintain a hashed
1445 table of such critters. Instead of adding stuff to a DECL
1446 to give certain attributes to it, we could use an external
1447 hash map from DECL to set of attributes. */
1448
1449 /* Let the target reassign the RTL if it wants.
1450 This is necessary, for example, when one machine specific
1451 decl attribute overrides another. */
1452 targetm.encode_section_info (decl, DECL_RTL (decl), false);
1453
1454 /* If the symbol has a SYMBOL_REF_BLOCK field, update it based
1455 on the new decl information. */
1456 if (MEM_P (x)
1457 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
1458 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (x, 0)))
1459 change_symbol_block (XEXP (x, 0), get_block_for_decl (decl));
1460
1461 return;
1462 }
1463
1464 /* If this variable belongs to the global constant pool, retrieve the
1465 pre-computed RTL or recompute it in LTO mode. */
1466 if (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
1467 {
1468 SET_DECL_RTL (decl, output_constant_def (DECL_INITIAL (decl), 1));
1469 return;
1470 }
1471
1472 id = DECL_ASSEMBLER_NAME (decl);
1473 name = IDENTIFIER_POINTER (id);
1474
1475 if (name[0] != '*' && TREE_CODE (decl) != FUNCTION_DECL
1476 && DECL_REGISTER (decl))
1477 {
1478 error ("register name not specified for %q+D", decl);
1479 }
1480 else if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
1481 {
1482 const char *asmspec = name+1;
1483 machine_mode mode = DECL_MODE (decl);
1484 reg_number = decode_reg_name (asmspec);
1485 /* First detect errors in declaring global registers. */
1486 if (reg_number == -1)
1487 error ("register name not specified for %q+D", decl);
1488 else if (reg_number < 0)
1489 error ("invalid register name for %q+D", decl);
1490 else if (mode == BLKmode)
1491 error ("data type of %q+D isn%'t suitable for a register",
1492 decl);
1493 else if (!in_hard_reg_set_p (accessible_reg_set, mode, reg_number))
1494 error ("the register specified for %q+D cannot be accessed"
1495 " by the current target", decl);
1496 else if (!in_hard_reg_set_p (operand_reg_set, mode, reg_number))
1497 error ("the register specified for %q+D is not general enough"
1498 " to be used as a register variable", decl);
1499 else if (!targetm.hard_regno_mode_ok (reg_number, mode))
1500 error ("register specified for %q+D isn%'t suitable for data type",
1501 decl);
1502 else if (reg_number != HARD_FRAME_POINTER_REGNUM
1503 && (reg_number == FRAME_POINTER_REGNUM
1504 #ifdef RETURN_ADDRESS_POINTER_REGNUM
1505 || reg_number == RETURN_ADDRESS_POINTER_REGNUM
1506 #endif
1507 || reg_number == ARG_POINTER_REGNUM)
1508 && eliminable_regno_p (reg_number))
1509 error ("register specified for %q+D is an internal GCC "
1510 "implementation detail", decl);
1511 /* Now handle properly declared static register variables. */
1512 else
1513 {
1514 int nregs;
1515
1516 if (DECL_INITIAL (decl) != 0 && TREE_STATIC (decl))
1517 {
1518 DECL_INITIAL (decl) = 0;
1519 error ("global register variable has initial value");
1520 }
1521 if (TREE_THIS_VOLATILE (decl))
1522 warning (OPT_Wvolatile_register_var,
1523 "optimization may eliminate reads and/or "
1524 "writes to register variables");
1525
1526 /* If the user specified one of the eliminables registers here,
1527 e.g., FRAME_POINTER_REGNUM, we don't want to get this variable
1528 confused with that register and be eliminated. This usage is
1529 somewhat suspect... */
1530
1531 SET_DECL_RTL (decl, gen_raw_REG (mode, reg_number));
1532 ORIGINAL_REGNO (DECL_RTL (decl)) = reg_number;
1533 REG_USERVAR_P (DECL_RTL (decl)) = 1;
1534
1535 if (TREE_STATIC (decl))
1536 {
1537 /* Make this register global, so not usable for anything
1538 else. */
1539 #ifdef ASM_DECLARE_REGISTER_GLOBAL
1540 name = IDENTIFIER_POINTER (DECL_NAME (decl));
1541 ASM_DECLARE_REGISTER_GLOBAL (asm_out_file, decl, reg_number, name);
1542 #endif
1543 nregs = hard_regno_nregs (reg_number, mode);
1544 while (nregs > 0)
1545 globalize_reg (decl, reg_number + --nregs);
1546 }
1547
1548 /* As a register variable, it has no section. */
1549 return;
1550 }
1551 /* Avoid internal errors from invalid register
1552 specifications. */
1553 SET_DECL_ASSEMBLER_NAME (decl, NULL_TREE);
1554 DECL_HARD_REGISTER (decl) = 0;
1555 /* Also avoid SSA inconsistencies by pretending this is an external
1556 decl now. */
1557 DECL_EXTERNAL (decl) = 1;
1558 return;
1559 }
1560 /* Now handle ordinary static variables and functions (in memory).
1561 Also handle vars declared register invalidly. */
1562 else if (name[0] == '*')
1563 {
1564 #ifdef REGISTER_PREFIX
1565 if (strlen (REGISTER_PREFIX) != 0)
1566 {
1567 reg_number = decode_reg_name (name);
1568 if (reg_number >= 0 || reg_number == -3)
1569 error ("register name given for non-register variable %q+D", decl);
1570 }
1571 #endif
1572 }
1573
1574 /* Specifying a section attribute on a variable forces it into a
1575 non-.bss section, and thus it cannot be common. */
1576 /* FIXME: In general this code should not be necessary because
1577 visibility pass is doing the same work. But notice_global_symbol
1578 is called early and it needs to make DECL_RTL to get the name.
1579 we take care of recomputing the DECL_RTL after visibility is changed. */
1580 if (VAR_P (decl)
1581 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1582 && DECL_SECTION_NAME (decl) != NULL
1583 && DECL_INITIAL (decl) == NULL_TREE
1584 && DECL_COMMON (decl))
1585 DECL_COMMON (decl) = 0;
1586
1587 /* Variables can't be both common and weak. */
1588 if (VAR_P (decl) && DECL_WEAK (decl))
1589 DECL_COMMON (decl) = 0;
1590
1591 if (use_object_blocks_p () && use_blocks_for_decl_p (decl))
1592 x = create_block_symbol (name, get_block_for_decl (decl), -1);
1593 else
1594 {
1595 machine_mode address_mode = Pmode;
1596 if (TREE_TYPE (decl) != error_mark_node)
1597 {
1598 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
1599 address_mode = targetm.addr_space.address_mode (as);
1600 }
1601 x = gen_rtx_SYMBOL_REF (address_mode, name);
1602 }
1603 SYMBOL_REF_WEAK (x) = DECL_WEAK (decl);
1604 SET_SYMBOL_REF_DECL (x, decl);
1605
1606 x = gen_rtx_MEM (DECL_MODE (decl), x);
1607 if (TREE_CODE (decl) != FUNCTION_DECL)
1608 set_mem_attributes (x, decl, 1);
1609 SET_DECL_RTL (decl, x);
1610
1611 /* Optionally set flags or add text to the name to record information
1612 such as that it is a function name.
1613 If the name is changed, the macro ASM_OUTPUT_LABELREF
1614 will have to know how to strip this information. */
1615 targetm.encode_section_info (decl, DECL_RTL (decl), true);
1616 }
1617
1618 /* Like make_decl_rtl, but inhibit creation of new alias sets when
1619 calling make_decl_rtl. Also, reset DECL_RTL before returning the
1620 rtl. */
1621
1622 rtx
make_decl_rtl_for_debug(tree decl)1623 make_decl_rtl_for_debug (tree decl)
1624 {
1625 unsigned int save_aliasing_flag;
1626 rtx rtl;
1627
1628 if (DECL_RTL_SET_P (decl))
1629 return DECL_RTL (decl);
1630
1631 /* Kludge alert! Somewhere down the call chain, make_decl_rtl will
1632 call new_alias_set. If running with -fcompare-debug, sometimes
1633 we do not want to create alias sets that will throw the alias
1634 numbers off in the comparison dumps. So... clearing
1635 flag_strict_aliasing will keep new_alias_set() from creating a
1636 new set. */
1637 save_aliasing_flag = flag_strict_aliasing;
1638 flag_strict_aliasing = 0;
1639
1640 rtl = DECL_RTL (decl);
1641 /* Reset DECL_RTL back, as various parts of the compiler expects
1642 DECL_RTL set meaning it is actually going to be output. */
1643 SET_DECL_RTL (decl, NULL);
1644
1645 flag_strict_aliasing = save_aliasing_flag;
1646 return rtl;
1647 }
1648
1649 /* Output a string of literal assembler code
1650 for an `asm' keyword used between functions. */
1651
1652 void
assemble_asm(tree string)1653 assemble_asm (tree string)
1654 {
1655 const char *p;
1656 app_enable ();
1657
1658 if (TREE_CODE (string) == ADDR_EXPR)
1659 string = TREE_OPERAND (string, 0);
1660
1661 p = TREE_STRING_POINTER (string);
1662 fprintf (asm_out_file, "%s%s\n", p[0] == '\t' ? "" : "\t", p);
1663 }
1664
1665 /* Write the address of the entity given by SYMBOL to SEC. */
1666 void
assemble_addr_to_section(rtx symbol,section * sec)1667 assemble_addr_to_section (rtx symbol, section *sec)
1668 {
1669 switch_to_section (sec);
1670 assemble_align (POINTER_SIZE);
1671 assemble_integer (symbol, POINTER_SIZE_UNITS, POINTER_SIZE, 1);
1672 }
1673
1674 /* Return the numbered .ctors.N (if CONSTRUCTOR_P) or .dtors.N (if
1675 not) section for PRIORITY. */
1676 section *
get_cdtor_priority_section(int priority,bool constructor_p)1677 get_cdtor_priority_section (int priority, bool constructor_p)
1678 {
1679 /* Buffer conservatively large enough for the full range of a 32-bit
1680 int plus the text below. */
1681 char buf[18];
1682
1683 /* ??? This only works reliably with the GNU linker. */
1684 sprintf (buf, "%s.%.5u",
1685 constructor_p ? ".ctors" : ".dtors",
1686 /* Invert the numbering so the linker puts us in the proper
1687 order; constructors are run from right to left, and the
1688 linker sorts in increasing order. */
1689 MAX_INIT_PRIORITY - priority);
1690 return get_section (buf, SECTION_WRITE, NULL);
1691 }
1692
1693 void
default_named_section_asm_out_destructor(rtx symbol,int priority)1694 default_named_section_asm_out_destructor (rtx symbol, int priority)
1695 {
1696 section *sec;
1697
1698 if (priority != DEFAULT_INIT_PRIORITY)
1699 sec = get_cdtor_priority_section (priority,
1700 /*constructor_p=*/false);
1701 else
1702 sec = get_section (".dtors", SECTION_WRITE, NULL);
1703
1704 assemble_addr_to_section (symbol, sec);
1705 }
1706
1707 #ifdef DTORS_SECTION_ASM_OP
1708 void
default_dtor_section_asm_out_destructor(rtx symbol,int priority ATTRIBUTE_UNUSED)1709 default_dtor_section_asm_out_destructor (rtx symbol,
1710 int priority ATTRIBUTE_UNUSED)
1711 {
1712 assemble_addr_to_section (symbol, dtors_section);
1713 }
1714 #endif
1715
1716 void
default_named_section_asm_out_constructor(rtx symbol,int priority)1717 default_named_section_asm_out_constructor (rtx symbol, int priority)
1718 {
1719 section *sec;
1720
1721 if (priority != DEFAULT_INIT_PRIORITY)
1722 sec = get_cdtor_priority_section (priority,
1723 /*constructor_p=*/true);
1724 else
1725 sec = get_section (".ctors", SECTION_WRITE, NULL);
1726
1727 assemble_addr_to_section (symbol, sec);
1728 }
1729
1730 #ifdef CTORS_SECTION_ASM_OP
1731 void
default_ctor_section_asm_out_constructor(rtx symbol,int priority ATTRIBUTE_UNUSED)1732 default_ctor_section_asm_out_constructor (rtx symbol,
1733 int priority ATTRIBUTE_UNUSED)
1734 {
1735 assemble_addr_to_section (symbol, ctors_section);
1736 }
1737 #endif
1738
1739 /* CONSTANT_POOL_BEFORE_FUNCTION may be defined as an expression with
1740 a nonzero value if the constant pool should be output before the
1741 start of the function, or a zero value if the pool should output
1742 after the end of the function. The default is to put it before the
1743 start. */
1744
1745 #ifndef CONSTANT_POOL_BEFORE_FUNCTION
1746 #define CONSTANT_POOL_BEFORE_FUNCTION 1
1747 #endif
1748
1749 /* DECL is an object (either VAR_DECL or FUNCTION_DECL) which is going
1750 to be output to assembler.
1751 Set first_global_object_name and weak_global_object_name as appropriate. */
1752
1753 void
notice_global_symbol(tree decl)1754 notice_global_symbol (tree decl)
1755 {
1756 const char **t = &first_global_object_name;
1757
1758 if (first_global_object_name
1759 || !TREE_PUBLIC (decl)
1760 || DECL_EXTERNAL (decl)
1761 || !DECL_NAME (decl)
1762 || (VAR_P (decl) && DECL_HARD_REGISTER (decl))
1763 || (TREE_CODE (decl) != FUNCTION_DECL
1764 && (!VAR_P (decl)
1765 || (DECL_COMMON (decl)
1766 && (DECL_INITIAL (decl) == 0
1767 || DECL_INITIAL (decl) == error_mark_node)))))
1768 return;
1769
1770 /* We win when global object is found, but it is useful to know about weak
1771 symbol as well so we can produce nicer unique names. */
1772 if (DECL_WEAK (decl) || DECL_ONE_ONLY (decl) || flag_shlib)
1773 t = &weak_global_object_name;
1774
1775 if (!*t)
1776 {
1777 tree id = DECL_ASSEMBLER_NAME (decl);
1778 ultimate_transparent_alias_target (&id);
1779 *t = ggc_strdup (targetm.strip_name_encoding (IDENTIFIER_POINTER (id)));
1780 }
1781 }
1782
1783 /* If not using flag_reorder_blocks_and_partition, decide early whether the
1784 current function goes into the cold section, so that targets can use
1785 current_function_section during RTL expansion. DECL describes the
1786 function. */
1787
1788 void
decide_function_section(tree decl)1789 decide_function_section (tree decl)
1790 {
1791 first_function_block_is_cold = false;
1792
1793 if (DECL_SECTION_NAME (decl))
1794 {
1795 struct cgraph_node *node = cgraph_node::get (current_function_decl);
1796 /* Calls to function_section rely on first_function_block_is_cold
1797 being accurate. */
1798 first_function_block_is_cold = (node
1799 && node->frequency
1800 == NODE_FREQUENCY_UNLIKELY_EXECUTED);
1801 }
1802
1803 in_cold_section_p = first_function_block_is_cold;
1804 }
1805
1806 /* Get the function's name, as described by its RTL. This may be
1807 different from the DECL_NAME name used in the source file. */
1808 const char *
get_fnname_from_decl(tree decl)1809 get_fnname_from_decl (tree decl)
1810 {
1811 rtx x = DECL_RTL (decl);
1812 gcc_assert (MEM_P (x));
1813 x = XEXP (x, 0);
1814 gcc_assert (GET_CODE (x) == SYMBOL_REF);
1815 return XSTR (x, 0);
1816 }
1817
1818 /* Output assembler code for the constant pool of a function and associated
1819 with defining the name of the function. DECL describes the function.
1820 NAME is the function's name. For the constant pool, we use the current
1821 constant pool data. */
1822
1823 void
assemble_start_function(tree decl,const char * fnname)1824 assemble_start_function (tree decl, const char *fnname)
1825 {
1826 int align;
1827 char tmp_label[100];
1828 bool hot_label_written = false;
1829
1830 if (crtl->has_bb_partition)
1831 {
1832 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTB", const_labelno);
1833 crtl->subsections.hot_section_label = ggc_strdup (tmp_label);
1834 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDB", const_labelno);
1835 crtl->subsections.cold_section_label = ggc_strdup (tmp_label);
1836 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTE", const_labelno);
1837 crtl->subsections.hot_section_end_label = ggc_strdup (tmp_label);
1838 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDE", const_labelno);
1839 crtl->subsections.cold_section_end_label = ggc_strdup (tmp_label);
1840 const_labelno++;
1841 cold_function_name = NULL_TREE;
1842 }
1843 else
1844 {
1845 crtl->subsections.hot_section_label = NULL;
1846 crtl->subsections.cold_section_label = NULL;
1847 crtl->subsections.hot_section_end_label = NULL;
1848 crtl->subsections.cold_section_end_label = NULL;
1849 }
1850
1851 /* The following code does not need preprocessing in the assembler. */
1852
1853 app_disable ();
1854
1855 if (CONSTANT_POOL_BEFORE_FUNCTION)
1856 output_constant_pool (fnname, decl);
1857
1858 align = symtab_node::get (decl)->definition_alignment ();
1859
1860 /* Make sure the not and cold text (code) sections are properly
1861 aligned. This is necessary here in the case where the function
1862 has both hot and cold sections, because we don't want to re-set
1863 the alignment when the section switch happens mid-function. */
1864
1865 if (crtl->has_bb_partition)
1866 {
1867 first_function_block_is_cold = false;
1868
1869 switch_to_section (unlikely_text_section ());
1870 assemble_align (align);
1871 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_label);
1872
1873 /* When the function starts with a cold section, we need to explicitly
1874 align the hot section and write out the hot section label.
1875 But if the current function is a thunk, we do not have a CFG. */
1876 if (!cfun->is_thunk
1877 && BB_PARTITION (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) == BB_COLD_PARTITION)
1878 {
1879 switch_to_section (text_section);
1880 assemble_align (align);
1881 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
1882 hot_label_written = true;
1883 first_function_block_is_cold = true;
1884 }
1885 in_cold_section_p = first_function_block_is_cold;
1886 }
1887
1888
1889 /* Switch to the correct text section for the start of the function. */
1890
1891 switch_to_section (function_section (decl), decl);
1892 if (crtl->has_bb_partition && !hot_label_written)
1893 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
1894
1895 /* Tell assembler to move to target machine's alignment for functions. */
1896 align = floor_log2 (align / BITS_PER_UNIT);
1897 if (align > 0)
1898 {
1899 ASM_OUTPUT_ALIGN (asm_out_file, align);
1900 }
1901
1902 /* Handle a user-specified function alignment.
1903 Note that we still need to align to DECL_ALIGN, as above,
1904 because ASM_OUTPUT_MAX_SKIP_ALIGN might not do any alignment at all. */
1905 if (! DECL_USER_ALIGN (decl)
1906 && align_functions.levels[0].log > align
1907 && optimize_function_for_speed_p (cfun))
1908 {
1909 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1910 int align_log = align_functions.levels[0].log;
1911 #endif
1912 int max_skip = align_functions.levels[0].maxskip;
1913 if (flag_limit_function_alignment && crtl->max_insn_address > 0
1914 && max_skip >= crtl->max_insn_address)
1915 max_skip = crtl->max_insn_address - 1;
1916
1917 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1918 ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file, align_log, max_skip);
1919 if (max_skip == align_functions.levels[0].maxskip)
1920 ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file,
1921 align_functions.levels[1].log,
1922 align_functions.levels[1].maxskip);
1923 #else
1924 ASM_OUTPUT_ALIGN (asm_out_file, align_functions.levels[0].log);
1925 #endif
1926 }
1927
1928 #ifdef ASM_OUTPUT_FUNCTION_PREFIX
1929 ASM_OUTPUT_FUNCTION_PREFIX (asm_out_file, fnname);
1930 #endif
1931
1932 if (!DECL_IGNORED_P (decl))
1933 (*debug_hooks->begin_function) (decl);
1934
1935 /* Make function name accessible from other files, if appropriate. */
1936
1937 if (TREE_PUBLIC (decl))
1938 {
1939 notice_global_symbol (decl);
1940
1941 globalize_decl (decl);
1942
1943 maybe_assemble_visibility (decl);
1944 }
1945
1946 if (DECL_PRESERVE_P (decl))
1947 targetm.asm_out.mark_decl_preserved (fnname);
1948
1949 unsigned short patch_area_size = crtl->patch_area_size;
1950 unsigned short patch_area_entry = crtl->patch_area_entry;
1951
1952 /* Emit the patching area before the entry label, if any. */
1953 if (patch_area_entry > 0)
1954 targetm.asm_out.print_patchable_function_entry (asm_out_file,
1955 patch_area_entry, true);
1956
1957 /* Do any machine/system dependent processing of the function name. */
1958 #ifdef ASM_DECLARE_FUNCTION_NAME
1959 ASM_DECLARE_FUNCTION_NAME (asm_out_file, fnname, current_function_decl);
1960 #else
1961 /* Standard thing is just output label for the function. */
1962 ASM_OUTPUT_FUNCTION_LABEL (asm_out_file, fnname, current_function_decl);
1963 #endif /* ASM_DECLARE_FUNCTION_NAME */
1964
1965 /* And the area after the label. Record it if we haven't done so yet. */
1966 if (patch_area_size > patch_area_entry)
1967 targetm.asm_out.print_patchable_function_entry (asm_out_file,
1968 patch_area_size
1969 - patch_area_entry,
1970 patch_area_entry == 0);
1971
1972 if (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (decl)))
1973 saw_no_split_stack = true;
1974 }
1975
1976 /* Output assembler code associated with defining the size of the
1977 function. DECL describes the function. NAME is the function's name. */
1978
1979 void
assemble_end_function(tree decl,const char * fnname ATTRIBUTE_UNUSED)1980 assemble_end_function (tree decl, const char *fnname ATTRIBUTE_UNUSED)
1981 {
1982 #ifdef ASM_DECLARE_FUNCTION_SIZE
1983 /* We could have switched section in the middle of the function. */
1984 if (crtl->has_bb_partition)
1985 switch_to_section (function_section (decl));
1986 ASM_DECLARE_FUNCTION_SIZE (asm_out_file, fnname, decl);
1987 #endif
1988 if (! CONSTANT_POOL_BEFORE_FUNCTION)
1989 {
1990 output_constant_pool (fnname, decl);
1991 switch_to_section (function_section (decl)); /* need to switch back */
1992 }
1993 /* Output labels for end of hot/cold text sections (to be used by
1994 debug info.) */
1995 if (crtl->has_bb_partition)
1996 {
1997 section *save_text_section;
1998
1999 save_text_section = in_section;
2000 switch_to_section (unlikely_text_section ());
2001 #ifdef ASM_DECLARE_COLD_FUNCTION_SIZE
2002 if (cold_function_name != NULL_TREE)
2003 ASM_DECLARE_COLD_FUNCTION_SIZE (asm_out_file,
2004 IDENTIFIER_POINTER (cold_function_name),
2005 decl);
2006 #endif
2007 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_end_label);
2008 if (first_function_block_is_cold)
2009 switch_to_section (text_section);
2010 else
2011 switch_to_section (function_section (decl));
2012 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_end_label);
2013 switch_to_section (save_text_section);
2014 }
2015 }
2016
2017 /* Assemble code to leave SIZE bytes of zeros. */
2018
2019 void
assemble_zeros(unsigned HOST_WIDE_INT size)2020 assemble_zeros (unsigned HOST_WIDE_INT size)
2021 {
2022 /* Do no output if -fsyntax-only. */
2023 if (flag_syntax_only)
2024 return;
2025
2026 #ifdef ASM_NO_SKIP_IN_TEXT
2027 /* The `space' pseudo in the text section outputs nop insns rather than 0s,
2028 so we must output 0s explicitly in the text section. */
2029 if (ASM_NO_SKIP_IN_TEXT && (in_section->common.flags & SECTION_CODE) != 0)
2030 {
2031 unsigned HOST_WIDE_INT i;
2032 for (i = 0; i < size; i++)
2033 assemble_integer (const0_rtx, 1, BITS_PER_UNIT, 1);
2034 }
2035 else
2036 #endif
2037 if (size > 0)
2038 ASM_OUTPUT_SKIP (asm_out_file, size);
2039 }
2040
2041 /* Assemble an alignment pseudo op for an ALIGN-bit boundary. */
2042
2043 void
assemble_align(unsigned int align)2044 assemble_align (unsigned int align)
2045 {
2046 if (align > BITS_PER_UNIT)
2047 {
2048 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
2049 }
2050 }
2051
2052 /* Assemble a string constant with the specified C string as contents. */
2053
2054 void
assemble_string(const char * p,int size)2055 assemble_string (const char *p, int size)
2056 {
2057 int pos = 0;
2058 int maximum = 2000;
2059
2060 /* If the string is very long, split it up. */
2061
2062 while (pos < size)
2063 {
2064 int thissize = size - pos;
2065 if (thissize > maximum)
2066 thissize = maximum;
2067
2068 ASM_OUTPUT_ASCII (asm_out_file, p, thissize);
2069
2070 pos += thissize;
2071 p += thissize;
2072 }
2073 }
2074
2075
2076 /* A noswitch_section_callback for lcomm_section. */
2077
2078 static bool
emit_local(tree decl ATTRIBUTE_UNUSED,const char * name ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)2079 emit_local (tree decl ATTRIBUTE_UNUSED,
2080 const char *name ATTRIBUTE_UNUSED,
2081 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2082 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2083 {
2084 #if defined ASM_OUTPUT_ALIGNED_DECL_LOCAL
2085 unsigned int align = symtab_node::get (decl)->definition_alignment ();
2086 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, decl, name,
2087 size, align);
2088 return true;
2089 #elif defined ASM_OUTPUT_ALIGNED_LOCAL
2090 unsigned int align = symtab_node::get (decl)->definition_alignment ();
2091 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, align);
2092 return true;
2093 #else
2094 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded);
2095 return false;
2096 #endif
2097 }
2098
2099 /* A noswitch_section_callback for bss_noswitch_section. */
2100
2101 #if defined ASM_OUTPUT_ALIGNED_BSS
2102 static bool
emit_bss(tree decl ATTRIBUTE_UNUSED,const char * name ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)2103 emit_bss (tree decl ATTRIBUTE_UNUSED,
2104 const char *name ATTRIBUTE_UNUSED,
2105 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2106 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2107 {
2108 ASM_OUTPUT_ALIGNED_BSS (asm_out_file, decl, name, size,
2109 get_variable_align (decl));
2110 return true;
2111 }
2112 #endif
2113
2114 /* A noswitch_section_callback for comm_section. */
2115
2116 static bool
emit_common(tree decl ATTRIBUTE_UNUSED,const char * name ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)2117 emit_common (tree decl ATTRIBUTE_UNUSED,
2118 const char *name ATTRIBUTE_UNUSED,
2119 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2120 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2121 {
2122 #if defined ASM_OUTPUT_ALIGNED_DECL_COMMON
2123 ASM_OUTPUT_ALIGNED_DECL_COMMON (asm_out_file, decl, name,
2124 size, get_variable_align (decl));
2125 return true;
2126 #elif defined ASM_OUTPUT_ALIGNED_COMMON
2127 ASM_OUTPUT_ALIGNED_COMMON (asm_out_file, name, size,
2128 get_variable_align (decl));
2129 return true;
2130 #else
2131 ASM_OUTPUT_COMMON (asm_out_file, name, size, rounded);
2132 return false;
2133 #endif
2134 }
2135
2136 /* A noswitch_section_callback for tls_comm_section. */
2137
2138 static bool
emit_tls_common(tree decl ATTRIBUTE_UNUSED,const char * name ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)2139 emit_tls_common (tree decl ATTRIBUTE_UNUSED,
2140 const char *name ATTRIBUTE_UNUSED,
2141 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2142 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2143 {
2144 #ifdef ASM_OUTPUT_TLS_COMMON
2145 ASM_OUTPUT_TLS_COMMON (asm_out_file, decl, name, size);
2146 return true;
2147 #else
2148 sorry ("thread-local COMMON data not implemented");
2149 return true;
2150 #endif
2151 }
2152
2153 /* Assemble DECL given that it belongs in SECTION_NOSWITCH section SECT.
2154 NAME is the name of DECL's SYMBOL_REF. */
2155
2156 static void
assemble_noswitch_variable(tree decl,const char * name,section * sect,unsigned int align)2157 assemble_noswitch_variable (tree decl, const char *name, section *sect,
2158 unsigned int align)
2159 {
2160 unsigned HOST_WIDE_INT size, rounded;
2161
2162 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2163 rounded = size;
2164
2165 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_protect_global (decl))
2166 size += asan_red_zone_size (size);
2167
2168 /* Don't allocate zero bytes of common,
2169 since that means "undefined external" in the linker. */
2170 if (size == 0)
2171 rounded = 1;
2172
2173 /* Round size up to multiple of BIGGEST_ALIGNMENT bits
2174 so that each uninitialized object starts on such a boundary. */
2175 rounded += (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1;
2176 rounded = (rounded / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2177 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
2178
2179 if (!sect->noswitch.callback (decl, name, size, rounded)
2180 && (unsigned HOST_WIDE_INT) (align / BITS_PER_UNIT) > rounded)
2181 error ("requested alignment for %q+D is greater than "
2182 "implemented alignment of %wu", decl, rounded);
2183 }
2184
2185 /* A subroutine of assemble_variable. Output the label and contents of
2186 DECL, whose address is a SYMBOL_REF with name NAME. DONT_OUTPUT_DATA
2187 is as for assemble_variable. */
2188
2189 static void
assemble_variable_contents(tree decl,const char * name,bool dont_output_data,bool merge_strings)2190 assemble_variable_contents (tree decl, const char *name,
2191 bool dont_output_data, bool merge_strings)
2192 {
2193 /* Do any machine/system dependent processing of the object. */
2194 #ifdef ASM_DECLARE_OBJECT_NAME
2195 last_assemble_variable_decl = decl;
2196 ASM_DECLARE_OBJECT_NAME (asm_out_file, name, decl);
2197 #else
2198 /* Standard thing is just output label for the object. */
2199 ASM_OUTPUT_LABEL (asm_out_file, name);
2200 #endif /* ASM_DECLARE_OBJECT_NAME */
2201
2202 if (!dont_output_data)
2203 {
2204 /* Caller is supposed to use varpool_get_constructor when it wants
2205 to output the body. */
2206 gcc_assert (!in_lto_p || DECL_INITIAL (decl) != error_mark_node);
2207 if (DECL_INITIAL (decl)
2208 && DECL_INITIAL (decl) != error_mark_node
2209 && !initializer_zerop (DECL_INITIAL (decl)))
2210 /* Output the actual data. */
2211 output_constant (DECL_INITIAL (decl),
2212 tree_to_uhwi (DECL_SIZE_UNIT (decl)),
2213 get_variable_align (decl),
2214 false, merge_strings);
2215 else
2216 /* Leave space for it. */
2217 assemble_zeros (tree_to_uhwi (DECL_SIZE_UNIT (decl)));
2218 targetm.asm_out.decl_end ();
2219 }
2220 }
2221
2222 /* Write out assembly for the variable DECL, which is not defined in
2223 the current translation unit. */
2224 void
assemble_undefined_decl(tree decl)2225 assemble_undefined_decl (tree decl)
2226 {
2227 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2228 targetm.asm_out.assemble_undefined_decl (asm_out_file, name, decl);
2229 }
2230
2231 /* Assemble everything that is needed for a variable or function declaration.
2232 Not used for automatic variables, and not used for function definitions.
2233 Should not be called for variables of incomplete structure type.
2234
2235 TOP_LEVEL is nonzero if this variable has file scope.
2236 AT_END is nonzero if this is the special handling, at end of compilation,
2237 to define things that have had only tentative definitions.
2238 DONT_OUTPUT_DATA if nonzero means don't actually output the
2239 initial value (that will be done by the caller). */
2240
2241 void
assemble_variable(tree decl,int top_level ATTRIBUTE_UNUSED,int at_end ATTRIBUTE_UNUSED,int dont_output_data)2242 assemble_variable (tree decl, int top_level ATTRIBUTE_UNUSED,
2243 int at_end ATTRIBUTE_UNUSED, int dont_output_data)
2244 {
2245 const char *name;
2246 rtx decl_rtl, symbol;
2247 section *sect;
2248 unsigned int align;
2249 bool asan_protected = false;
2250
2251 /* This function is supposed to handle VARIABLES. Ensure we have one. */
2252 gcc_assert (VAR_P (decl));
2253
2254 /* Emulated TLS had better not get this far. */
2255 gcc_checking_assert (targetm.have_tls || !DECL_THREAD_LOCAL_P (decl));
2256
2257 last_assemble_variable_decl = 0;
2258
2259 /* Normally no need to say anything here for external references,
2260 since assemble_external is called by the language-specific code
2261 when a declaration is first seen. */
2262
2263 if (DECL_EXTERNAL (decl))
2264 return;
2265
2266 /* Do nothing for global register variables. */
2267 if (DECL_RTL_SET_P (decl) && REG_P (DECL_RTL (decl)))
2268 {
2269 TREE_ASM_WRITTEN (decl) = 1;
2270 return;
2271 }
2272
2273 /* If type was incomplete when the variable was declared,
2274 see if it is complete now. */
2275
2276 if (DECL_SIZE (decl) == 0)
2277 layout_decl (decl, 0);
2278
2279 /* Still incomplete => don't allocate it; treat the tentative defn
2280 (which is what it must have been) as an `extern' reference. */
2281
2282 if (!dont_output_data && DECL_SIZE (decl) == 0)
2283 {
2284 error ("storage size of %q+D isn%'t known", decl);
2285 TREE_ASM_WRITTEN (decl) = 1;
2286 return;
2287 }
2288
2289 /* The first declaration of a variable that comes through this function
2290 decides whether it is global (in C, has external linkage)
2291 or local (in C, has internal linkage). So do nothing more
2292 if this function has already run. */
2293
2294 if (TREE_ASM_WRITTEN (decl))
2295 return;
2296
2297 /* Make sure targetm.encode_section_info is invoked before we set
2298 ASM_WRITTEN. */
2299 decl_rtl = DECL_RTL (decl);
2300
2301 TREE_ASM_WRITTEN (decl) = 1;
2302
2303 /* Do no output if -fsyntax-only. */
2304 if (flag_syntax_only)
2305 return;
2306
2307 if (! dont_output_data
2308 && ! valid_constant_size_p (DECL_SIZE_UNIT (decl)))
2309 {
2310 error ("size of variable %q+D is too large", decl);
2311 return;
2312 }
2313
2314 gcc_assert (MEM_P (decl_rtl));
2315 gcc_assert (GET_CODE (XEXP (decl_rtl, 0)) == SYMBOL_REF);
2316 symbol = XEXP (decl_rtl, 0);
2317
2318 /* If this symbol belongs to the tree constant pool, output the constant
2319 if it hasn't already been written. */
2320 if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
2321 {
2322 tree decl = SYMBOL_REF_DECL (symbol);
2323 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl)))
2324 output_constant_def_contents (symbol);
2325 return;
2326 }
2327
2328 app_disable ();
2329
2330 name = XSTR (symbol, 0);
2331 if (TREE_PUBLIC (decl) && DECL_NAME (decl))
2332 notice_global_symbol (decl);
2333
2334 /* Compute the alignment of this data. */
2335
2336 align_variable (decl, dont_output_data);
2337
2338 if ((flag_sanitize & SANITIZE_ADDRESS)
2339 && asan_protect_global (decl))
2340 {
2341 asan_protected = true;
2342 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
2343 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
2344 }
2345
2346 set_mem_align (decl_rtl, DECL_ALIGN (decl));
2347
2348 align = get_variable_align (decl);
2349
2350 if (TREE_PUBLIC (decl))
2351 maybe_assemble_visibility (decl);
2352
2353 if (DECL_PRESERVE_P (decl))
2354 targetm.asm_out.mark_decl_preserved (name);
2355
2356 /* First make the assembler name(s) global if appropriate. */
2357 sect = get_variable_section (decl, false);
2358 if (TREE_PUBLIC (decl)
2359 && (sect->common.flags & SECTION_COMMON) == 0)
2360 globalize_decl (decl);
2361
2362 /* Output any data that we will need to use the address of. */
2363 if (DECL_INITIAL (decl) && DECL_INITIAL (decl) != error_mark_node)
2364 output_addressed_constants (DECL_INITIAL (decl), 0);
2365
2366 /* dbxout.c needs to know this. */
2367 if (sect && (sect->common.flags & SECTION_CODE) != 0)
2368 DECL_IN_TEXT_SECTION (decl) = 1;
2369
2370 /* If the decl is part of an object_block, make sure that the decl
2371 has been positioned within its block, but do not write out its
2372 definition yet. output_object_blocks will do that later. */
2373 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol))
2374 {
2375 gcc_assert (!dont_output_data);
2376 place_block_symbol (symbol);
2377 }
2378 else if (SECTION_STYLE (sect) == SECTION_NOSWITCH)
2379 assemble_noswitch_variable (decl, name, sect, align);
2380 else
2381 {
2382 /* Special-case handling of vtv comdat sections. */
2383 if (sect->named.name
2384 && (strcmp (sect->named.name, ".vtable_map_vars") == 0))
2385 handle_vtv_comdat_section (sect, decl);
2386 else
2387 switch_to_section (sect, decl);
2388 if (align > BITS_PER_UNIT)
2389 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
2390 assemble_variable_contents (decl, name, dont_output_data,
2391 (sect->common.flags & SECTION_MERGE)
2392 && (sect->common.flags & SECTION_STRINGS));
2393 if (asan_protected)
2394 {
2395 unsigned HOST_WIDE_INT int size
2396 = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2397 assemble_zeros (asan_red_zone_size (size));
2398 }
2399 }
2400 }
2401
2402
2403 /* Given a function declaration (FN_DECL), this function assembles the
2404 function into the .preinit_array section. */
2405
2406 void
assemble_vtv_preinit_initializer(tree fn_decl)2407 assemble_vtv_preinit_initializer (tree fn_decl)
2408 {
2409 section *sect;
2410 unsigned flags = SECTION_WRITE;
2411 rtx symbol = XEXP (DECL_RTL (fn_decl), 0);
2412
2413 flags |= SECTION_NOTYPE;
2414 sect = get_section (".preinit_array", flags, fn_decl);
2415 switch_to_section (sect);
2416 assemble_addr_to_section (symbol, sect);
2417 }
2418
2419 /* Return 1 if type TYPE contains any pointers. */
2420
2421 static int
contains_pointers_p(tree type)2422 contains_pointers_p (tree type)
2423 {
2424 switch (TREE_CODE (type))
2425 {
2426 case POINTER_TYPE:
2427 case REFERENCE_TYPE:
2428 /* I'm not sure whether OFFSET_TYPE needs this treatment,
2429 so I'll play safe and return 1. */
2430 case OFFSET_TYPE:
2431 return 1;
2432
2433 case RECORD_TYPE:
2434 case UNION_TYPE:
2435 case QUAL_UNION_TYPE:
2436 {
2437 tree fields;
2438 /* For a type that has fields, see if the fields have pointers. */
2439 for (fields = TYPE_FIELDS (type); fields; fields = DECL_CHAIN (fields))
2440 if (TREE_CODE (fields) == FIELD_DECL
2441 && contains_pointers_p (TREE_TYPE (fields)))
2442 return 1;
2443 return 0;
2444 }
2445
2446 case ARRAY_TYPE:
2447 /* An array type contains pointers if its element type does. */
2448 return contains_pointers_p (TREE_TYPE (type));
2449
2450 default:
2451 return 0;
2452 }
2453 }
2454
2455 /* We delay assemble_external processing until
2456 the compilation unit is finalized. This is the best we can do for
2457 right now (i.e. stage 3 of GCC 4.0) - the right thing is to delay
2458 it all the way to final. See PR 17982 for further discussion. */
2459 static GTY(()) tree pending_assemble_externals;
2460
2461 #ifdef ASM_OUTPUT_EXTERNAL
2462 /* Some targets delay some output to final using TARGET_ASM_FILE_END.
2463 As a result, assemble_external can be called after the list of externals
2464 is processed and the pointer set destroyed. */
2465 static bool pending_assemble_externals_processed;
2466
2467 /* Avoid O(external_decls**2) lookups in the pending_assemble_externals
2468 TREE_LIST in assemble_external. */
2469 static hash_set<tree> *pending_assemble_externals_set;
2470
2471 /* True if DECL is a function decl for which no out-of-line copy exists.
2472 It is assumed that DECL's assembler name has been set. */
2473
2474 static bool
incorporeal_function_p(tree decl)2475 incorporeal_function_p (tree decl)
2476 {
2477 if (TREE_CODE (decl) == FUNCTION_DECL && fndecl_built_in_p (decl))
2478 {
2479 const char *name;
2480
2481 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2482 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2483 return true;
2484
2485 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2486 /* Atomic or sync builtins which have survived this far will be
2487 resolved externally and therefore are not incorporeal. */
2488 if (strncmp (name, "__builtin_", 10) == 0)
2489 return true;
2490 }
2491 return false;
2492 }
2493
2494 /* Actually do the tests to determine if this is necessary, and invoke
2495 ASM_OUTPUT_EXTERNAL. */
2496 static void
assemble_external_real(tree decl)2497 assemble_external_real (tree decl)
2498 {
2499 rtx rtl = DECL_RTL (decl);
2500
2501 if (MEM_P (rtl) && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF
2502 && !SYMBOL_REF_USED (XEXP (rtl, 0))
2503 && !incorporeal_function_p (decl))
2504 {
2505 /* Some systems do require some output. */
2506 SYMBOL_REF_USED (XEXP (rtl, 0)) = 1;
2507 ASM_OUTPUT_EXTERNAL (asm_out_file, decl, XSTR (XEXP (rtl, 0), 0));
2508 }
2509 }
2510 #endif
2511
2512 void
process_pending_assemble_externals(void)2513 process_pending_assemble_externals (void)
2514 {
2515 #ifdef ASM_OUTPUT_EXTERNAL
2516 tree list;
2517 for (list = pending_assemble_externals; list; list = TREE_CHAIN (list))
2518 assemble_external_real (TREE_VALUE (list));
2519
2520 pending_assemble_externals = 0;
2521 pending_assemble_externals_processed = true;
2522 delete pending_assemble_externals_set;
2523 #endif
2524 }
2525
2526 /* This TREE_LIST contains any weak symbol declarations waiting
2527 to be emitted. */
2528 static GTY(()) tree weak_decls;
2529
2530 /* Output something to declare an external symbol to the assembler,
2531 and qualifiers such as weakness. (Most assemblers don't need
2532 extern declaration, so we normally output nothing.) Do nothing if
2533 DECL is not external. */
2534
2535 void
assemble_external(tree decl ATTRIBUTE_UNUSED)2536 assemble_external (tree decl ATTRIBUTE_UNUSED)
2537 {
2538 /* Make sure that the ASM_OUT_FILE is open.
2539 If it's not, we should not be calling this function. */
2540 gcc_assert (asm_out_file);
2541
2542 /* In a perfect world, the following condition would be true.
2543 Sadly, the Go front end emit assembly *from the front end*,
2544 bypassing the call graph. See PR52739. Fix before GCC 4.8. */
2545 #if 0
2546 /* This function should only be called if we are expanding, or have
2547 expanded, to RTL.
2548 Ideally, only final.c would be calling this function, but it is
2549 not clear whether that would break things somehow. See PR 17982
2550 for further discussion. */
2551 gcc_assert (state == EXPANSION
2552 || state == FINISHED);
2553 #endif
2554
2555 if (!DECL_P (decl) || !DECL_EXTERNAL (decl) || !TREE_PUBLIC (decl))
2556 return;
2557
2558 /* We want to output annotation for weak and external symbols at
2559 very last to check if they are references or not. */
2560
2561 if (TARGET_SUPPORTS_WEAK
2562 && DECL_WEAK (decl)
2563 /* TREE_STATIC is a weird and abused creature which is not
2564 generally the right test for whether an entity has been
2565 locally emitted, inlined or otherwise not-really-extern, but
2566 for declarations that can be weak, it happens to be
2567 match. */
2568 && !TREE_STATIC (decl)
2569 && lookup_attribute ("weak", DECL_ATTRIBUTES (decl))
2570 && value_member (decl, weak_decls) == NULL_TREE)
2571 weak_decls = tree_cons (NULL, decl, weak_decls);
2572
2573 #ifdef ASM_OUTPUT_EXTERNAL
2574 if (pending_assemble_externals_processed)
2575 {
2576 assemble_external_real (decl);
2577 return;
2578 }
2579
2580 if (! pending_assemble_externals_set->add (decl))
2581 pending_assemble_externals = tree_cons (NULL, decl,
2582 pending_assemble_externals);
2583 #endif
2584 }
2585
2586 /* Similar, for calling a library function FUN. */
2587
2588 void
assemble_external_libcall(rtx fun)2589 assemble_external_libcall (rtx fun)
2590 {
2591 /* Declare library function name external when first used, if nec. */
2592 if (! SYMBOL_REF_USED (fun))
2593 {
2594 SYMBOL_REF_USED (fun) = 1;
2595 targetm.asm_out.external_libcall (fun);
2596 }
2597 }
2598
2599 /* Assemble a label named NAME. */
2600
2601 void
assemble_label(FILE * file,const char * name)2602 assemble_label (FILE *file, const char *name)
2603 {
2604 ASM_OUTPUT_LABEL (file, name);
2605 }
2606
2607 /* Set the symbol_referenced flag for ID. */
2608 void
mark_referenced(tree id)2609 mark_referenced (tree id)
2610 {
2611 TREE_SYMBOL_REFERENCED (id) = 1;
2612 }
2613
2614 /* Set the symbol_referenced flag for DECL and notify callgraph. */
2615 void
mark_decl_referenced(tree decl)2616 mark_decl_referenced (tree decl)
2617 {
2618 if (TREE_CODE (decl) == FUNCTION_DECL)
2619 {
2620 /* Extern inline functions don't become needed when referenced.
2621 If we know a method will be emitted in other TU and no new
2622 functions can be marked reachable, just use the external
2623 definition. */
2624 struct cgraph_node *node = cgraph_node::get_create (decl);
2625 if (!DECL_EXTERNAL (decl)
2626 && !node->definition)
2627 node->mark_force_output ();
2628 }
2629 else if (VAR_P (decl))
2630 {
2631 varpool_node *node = varpool_node::get_create (decl);
2632 /* C++ frontend use mark_decl_references to force COMDAT variables
2633 to be output that might appear dead otherwise. */
2634 node->force_output = true;
2635 }
2636 /* else do nothing - we can get various sorts of CST nodes here,
2637 which do not need to be marked. */
2638 }
2639
2640
2641 /* Output to FILE (an assembly file) a reference to NAME. If NAME
2642 starts with a *, the rest of NAME is output verbatim. Otherwise
2643 NAME is transformed in a target-specific way (usually by the
2644 addition of an underscore). */
2645
2646 void
assemble_name_raw(FILE * file,const char * name)2647 assemble_name_raw (FILE *file, const char *name)
2648 {
2649 if (name[0] == '*')
2650 fputs (&name[1], file);
2651 else
2652 ASM_OUTPUT_LABELREF (file, name);
2653 }
2654
2655 /* Return NAME that should actually be emitted, looking through
2656 transparent aliases. If NAME refers to an entity that is also
2657 represented as a tree (like a function or variable), mark the entity
2658 as referenced. */
2659 const char *
assemble_name_resolve(const char * name)2660 assemble_name_resolve (const char *name)
2661 {
2662 const char *real_name = targetm.strip_name_encoding (name);
2663 tree id = maybe_get_identifier (real_name);
2664
2665 if (id)
2666 {
2667 tree id_orig = id;
2668
2669 mark_referenced (id);
2670 ultimate_transparent_alias_target (&id);
2671 if (id != id_orig)
2672 name = IDENTIFIER_POINTER (id);
2673 gcc_assert (! TREE_CHAIN (id));
2674 }
2675
2676 return name;
2677 }
2678
2679 /* Like assemble_name_raw, but should be used when NAME might refer to
2680 an entity that is also represented as a tree (like a function or
2681 variable). If NAME does refer to such an entity, that entity will
2682 be marked as referenced. */
2683
2684 void
assemble_name(FILE * file,const char * name)2685 assemble_name (FILE *file, const char *name)
2686 {
2687 assemble_name_raw (file, assemble_name_resolve (name));
2688 }
2689
2690 /* Allocate SIZE bytes writable static space with a gensym name
2691 and return an RTX to refer to its address. */
2692
2693 rtx
assemble_static_space(unsigned HOST_WIDE_INT size)2694 assemble_static_space (unsigned HOST_WIDE_INT size)
2695 {
2696 char name[17];
2697 const char *namestring;
2698 rtx x;
2699
2700 ASM_GENERATE_INTERNAL_LABEL (name, "LF", const_labelno);
2701 ++const_labelno;
2702 namestring = ggc_strdup (name);
2703
2704 x = gen_rtx_SYMBOL_REF (Pmode, namestring);
2705 SYMBOL_REF_FLAGS (x) = SYMBOL_FLAG_LOCAL;
2706
2707 #ifdef ASM_OUTPUT_ALIGNED_DECL_LOCAL
2708 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, NULL_TREE, name, size,
2709 BIGGEST_ALIGNMENT);
2710 #else
2711 #ifdef ASM_OUTPUT_ALIGNED_LOCAL
2712 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, BIGGEST_ALIGNMENT);
2713 #else
2714 {
2715 /* Round size up to multiple of BIGGEST_ALIGNMENT bits
2716 so that each uninitialized object starts on such a boundary. */
2717 /* Variable `rounded' might or might not be used in ASM_OUTPUT_LOCAL. */
2718 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED
2719 = ((size + (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1)
2720 / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2721 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
2722 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded);
2723 }
2724 #endif
2725 #endif
2726 return x;
2727 }
2728
2729 /* Assemble the static constant template for function entry trampolines.
2730 This is done at most once per compilation.
2731 Returns an RTX for the address of the template. */
2732
2733 static GTY(()) rtx initial_trampoline;
2734
2735 rtx
assemble_trampoline_template(void)2736 assemble_trampoline_template (void)
2737 {
2738 char label[256];
2739 const char *name;
2740 int align;
2741 rtx symbol;
2742
2743 gcc_assert (targetm.asm_out.trampoline_template != NULL);
2744
2745 if (initial_trampoline)
2746 return initial_trampoline;
2747
2748 /* By default, put trampoline templates in read-only data section. */
2749
2750 #ifdef TRAMPOLINE_SECTION
2751 switch_to_section (TRAMPOLINE_SECTION);
2752 #else
2753 switch_to_section (readonly_data_section);
2754 #endif
2755
2756 /* Write the assembler code to define one. */
2757 align = floor_log2 (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
2758 if (align > 0)
2759 ASM_OUTPUT_ALIGN (asm_out_file, align);
2760
2761 targetm.asm_out.internal_label (asm_out_file, "LTRAMP", 0);
2762 targetm.asm_out.trampoline_template (asm_out_file);
2763
2764 /* Record the rtl to refer to it. */
2765 ASM_GENERATE_INTERNAL_LABEL (label, "LTRAMP", 0);
2766 name = ggc_strdup (label);
2767 symbol = gen_rtx_SYMBOL_REF (Pmode, name);
2768 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_LOCAL;
2769
2770 initial_trampoline = gen_const_mem (BLKmode, symbol);
2771 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
2772 set_mem_size (initial_trampoline, TRAMPOLINE_SIZE);
2773
2774 return initial_trampoline;
2775 }
2776
2777 /* A and B are either alignments or offsets. Return the minimum alignment
2778 that may be assumed after adding the two together. */
2779
2780 static inline unsigned
min_align(unsigned int a,unsigned int b)2781 min_align (unsigned int a, unsigned int b)
2782 {
2783 return least_bit_hwi (a | b);
2784 }
2785
2786 /* Return the assembler directive for creating a given kind of integer
2787 object. SIZE is the number of bytes in the object and ALIGNED_P
2788 indicates whether it is known to be aligned. Return NULL if the
2789 assembly dialect has no such directive.
2790
2791 The returned string should be printed at the start of a new line and
2792 be followed immediately by the object's initial value. */
2793
2794 const char *
integer_asm_op(int size,int aligned_p)2795 integer_asm_op (int size, int aligned_p)
2796 {
2797 struct asm_int_op *ops;
2798
2799 if (aligned_p)
2800 ops = &targetm.asm_out.aligned_op;
2801 else
2802 ops = &targetm.asm_out.unaligned_op;
2803
2804 switch (size)
2805 {
2806 case 1:
2807 return targetm.asm_out.byte_op;
2808 case 2:
2809 return ops->hi;
2810 case 3:
2811 return ops->psi;
2812 case 4:
2813 return ops->si;
2814 case 5:
2815 case 6:
2816 case 7:
2817 return ops->pdi;
2818 case 8:
2819 return ops->di;
2820 case 9:
2821 case 10:
2822 case 11:
2823 case 12:
2824 case 13:
2825 case 14:
2826 case 15:
2827 return ops->pti;
2828 case 16:
2829 return ops->ti;
2830 default:
2831 return NULL;
2832 }
2833 }
2834
2835 /* Use directive OP to assemble an integer object X. Print OP at the
2836 start of the line, followed immediately by the value of X. */
2837
2838 void
assemble_integer_with_op(const char * op,rtx x)2839 assemble_integer_with_op (const char *op, rtx x)
2840 {
2841 fputs (op, asm_out_file);
2842 output_addr_const (asm_out_file, x);
2843 fputc ('\n', asm_out_file);
2844 }
2845
2846 /* The default implementation of the asm_out.integer target hook. */
2847
2848 bool
default_assemble_integer(rtx x ATTRIBUTE_UNUSED,unsigned int size ATTRIBUTE_UNUSED,int aligned_p ATTRIBUTE_UNUSED)2849 default_assemble_integer (rtx x ATTRIBUTE_UNUSED,
2850 unsigned int size ATTRIBUTE_UNUSED,
2851 int aligned_p ATTRIBUTE_UNUSED)
2852 {
2853 const char *op = integer_asm_op (size, aligned_p);
2854 /* Avoid GAS bugs for large values. Specifically negative values whose
2855 absolute value fits in a bfd_vma, but not in a bfd_signed_vma. */
2856 if (size > UNITS_PER_WORD && size > POINTER_SIZE_UNITS)
2857 return false;
2858 return op && (assemble_integer_with_op (op, x), true);
2859 }
2860
2861 /* Assemble the integer constant X into an object of SIZE bytes. ALIGN is
2862 the alignment of the integer in bits. Return 1 if we were able to output
2863 the constant, otherwise 0. We must be able to output the constant,
2864 if FORCE is nonzero. */
2865
2866 bool
assemble_integer(rtx x,unsigned int size,unsigned int align,int force)2867 assemble_integer (rtx x, unsigned int size, unsigned int align, int force)
2868 {
2869 int aligned_p;
2870
2871 aligned_p = (align >= MIN (size * BITS_PER_UNIT, BIGGEST_ALIGNMENT));
2872
2873 /* See if the target hook can handle this kind of object. */
2874 if (targetm.asm_out.integer (x, size, aligned_p))
2875 return true;
2876
2877 /* If the object is a multi-byte one, try splitting it up. Split
2878 it into words it if is multi-word, otherwise split it into bytes. */
2879 if (size > 1)
2880 {
2881 machine_mode omode, imode;
2882 unsigned int subalign;
2883 unsigned int subsize, i;
2884 enum mode_class mclass;
2885
2886 subsize = size > UNITS_PER_WORD? UNITS_PER_WORD : 1;
2887 subalign = MIN (align, subsize * BITS_PER_UNIT);
2888 if (GET_CODE (x) == CONST_FIXED)
2889 mclass = GET_MODE_CLASS (GET_MODE (x));
2890 else
2891 mclass = MODE_INT;
2892
2893 omode = mode_for_size (subsize * BITS_PER_UNIT, mclass, 0).require ();
2894 imode = mode_for_size (size * BITS_PER_UNIT, mclass, 0).require ();
2895
2896 for (i = 0; i < size; i += subsize)
2897 {
2898 rtx partial = simplify_subreg (omode, x, imode, i);
2899 if (!partial || !assemble_integer (partial, subsize, subalign, 0))
2900 break;
2901 }
2902 if (i == size)
2903 return true;
2904
2905 /* If we've printed some of it, but not all of it, there's no going
2906 back now. */
2907 gcc_assert (!i);
2908 }
2909
2910 gcc_assert (!force);
2911
2912 return false;
2913 }
2914
2915 /* Assemble the floating-point constant D into an object of size MODE. ALIGN
2916 is the alignment of the constant in bits. If REVERSE is true, D is output
2917 in reverse storage order. */
2918
2919 void
assemble_real(REAL_VALUE_TYPE d,scalar_float_mode mode,unsigned int align,bool reverse)2920 assemble_real (REAL_VALUE_TYPE d, scalar_float_mode mode, unsigned int align,
2921 bool reverse)
2922 {
2923 long data[4] = {0, 0, 0, 0};
2924 int bitsize, nelts, nunits, units_per;
2925 rtx elt;
2926
2927 /* This is hairy. We have a quantity of known size. real_to_target
2928 will put it into an array of *host* longs, 32 bits per element
2929 (even if long is more than 32 bits). We need to determine the
2930 number of array elements that are occupied (nelts) and the number
2931 of *target* min-addressable units that will be occupied in the
2932 object file (nunits). We cannot assume that 32 divides the
2933 mode's bitsize (size * BITS_PER_UNIT) evenly.
2934
2935 size * BITS_PER_UNIT is used here to make sure that padding bits
2936 (which might appear at either end of the value; real_to_target
2937 will include the padding bits in its output array) are included. */
2938
2939 nunits = GET_MODE_SIZE (mode);
2940 bitsize = nunits * BITS_PER_UNIT;
2941 nelts = CEIL (bitsize, 32);
2942 units_per = 32 / BITS_PER_UNIT;
2943
2944 real_to_target (data, &d, mode);
2945
2946 /* Put out the first word with the specified alignment. */
2947 unsigned int chunk_nunits = MIN (nunits, units_per);
2948 if (reverse)
2949 elt = flip_storage_order (SImode, gen_int_mode (data[nelts - 1], SImode));
2950 else
2951 elt = GEN_INT (sext_hwi (data[0], chunk_nunits * BITS_PER_UNIT));
2952 assemble_integer (elt, chunk_nunits, align, 1);
2953 nunits -= chunk_nunits;
2954
2955 /* Subsequent words need only 32-bit alignment. */
2956 align = min_align (align, 32);
2957
2958 for (int i = 1; i < nelts; i++)
2959 {
2960 chunk_nunits = MIN (nunits, units_per);
2961 if (reverse)
2962 elt = flip_storage_order (SImode,
2963 gen_int_mode (data[nelts - 1 - i], SImode));
2964 else
2965 elt = GEN_INT (sext_hwi (data[i], chunk_nunits * BITS_PER_UNIT));
2966 assemble_integer (elt, chunk_nunits, align, 1);
2967 nunits -= chunk_nunits;
2968 }
2969 }
2970
2971 /* Given an expression EXP with a constant value,
2972 reduce it to the sum of an assembler symbol and an integer.
2973 Store them both in the structure *VALUE.
2974 EXP must be reducible. */
2975
2976 class addr_const {
2977 public:
2978 rtx base;
2979 poly_int64 offset;
2980 };
2981
2982 static void
decode_addr_const(tree exp,class addr_const * value)2983 decode_addr_const (tree exp, class addr_const *value)
2984 {
2985 tree target = TREE_OPERAND (exp, 0);
2986 poly_int64 offset = 0;
2987 rtx x;
2988
2989 while (1)
2990 {
2991 poly_int64 bytepos;
2992 if (TREE_CODE (target) == COMPONENT_REF
2993 && poly_int_tree_p (byte_position (TREE_OPERAND (target, 1)),
2994 &bytepos))
2995 {
2996 offset += bytepos;
2997 target = TREE_OPERAND (target, 0);
2998 }
2999 else if (TREE_CODE (target) == ARRAY_REF
3000 || TREE_CODE (target) == ARRAY_RANGE_REF)
3001 {
3002 /* Truncate big offset. */
3003 offset
3004 += (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (target)))
3005 * wi::to_poly_widest (TREE_OPERAND (target, 1)).force_shwi ());
3006 target = TREE_OPERAND (target, 0);
3007 }
3008 else if (TREE_CODE (target) == MEM_REF
3009 && TREE_CODE (TREE_OPERAND (target, 0)) == ADDR_EXPR)
3010 {
3011 offset += mem_ref_offset (target).force_shwi ();
3012 target = TREE_OPERAND (TREE_OPERAND (target, 0), 0);
3013 }
3014 else if (TREE_CODE (target) == INDIRECT_REF
3015 && TREE_CODE (TREE_OPERAND (target, 0)) == NOP_EXPR
3016 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (target, 0), 0))
3017 == ADDR_EXPR)
3018 target = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (target, 0), 0), 0);
3019 else
3020 break;
3021 }
3022
3023 switch (TREE_CODE (target))
3024 {
3025 case VAR_DECL:
3026 case FUNCTION_DECL:
3027 x = DECL_RTL (target);
3028 break;
3029
3030 case LABEL_DECL:
3031 x = gen_rtx_MEM (FUNCTION_MODE,
3032 gen_rtx_LABEL_REF (Pmode, force_label_rtx (target)));
3033 break;
3034
3035 case REAL_CST:
3036 case FIXED_CST:
3037 case STRING_CST:
3038 case COMPLEX_CST:
3039 case CONSTRUCTOR:
3040 case INTEGER_CST:
3041 x = lookup_constant_def (target);
3042 /* Should have been added by output_addressed_constants. */
3043 gcc_assert (x);
3044 break;
3045
3046 case INDIRECT_REF:
3047 /* This deals with absolute addresses. */
3048 offset += tree_to_shwi (TREE_OPERAND (target, 0));
3049 x = gen_rtx_MEM (QImode,
3050 gen_rtx_SYMBOL_REF (Pmode, "origin of addresses"));
3051 break;
3052
3053 case COMPOUND_LITERAL_EXPR:
3054 gcc_assert (COMPOUND_LITERAL_EXPR_DECL (target));
3055 x = DECL_RTL (COMPOUND_LITERAL_EXPR_DECL (target));
3056 break;
3057
3058 default:
3059 gcc_unreachable ();
3060 }
3061
3062 gcc_assert (MEM_P (x));
3063 x = XEXP (x, 0);
3064
3065 value->base = x;
3066 value->offset = offset;
3067 }
3068
3069 static GTY(()) hash_table<tree_descriptor_hasher> *const_desc_htab;
3070
3071 static void maybe_output_constant_def_contents (struct constant_descriptor_tree *, int);
3072
3073 /* Constant pool accessor function. */
3074
3075 hash_table<tree_descriptor_hasher> *
constant_pool_htab(void)3076 constant_pool_htab (void)
3077 {
3078 return const_desc_htab;
3079 }
3080
3081 /* Compute a hash code for a constant expression. */
3082
3083 hashval_t
hash(constant_descriptor_tree * ptr)3084 tree_descriptor_hasher::hash (constant_descriptor_tree *ptr)
3085 {
3086 return ptr->hash;
3087 }
3088
3089 static hashval_t
const_hash_1(const tree exp)3090 const_hash_1 (const tree exp)
3091 {
3092 const char *p;
3093 hashval_t hi;
3094 int len, i;
3095 enum tree_code code = TREE_CODE (exp);
3096
3097 /* Either set P and LEN to the address and len of something to hash and
3098 exit the switch or return a value. */
3099
3100 switch (code)
3101 {
3102 case INTEGER_CST:
3103 p = (char *) &TREE_INT_CST_ELT (exp, 0);
3104 len = TREE_INT_CST_NUNITS (exp) * sizeof (HOST_WIDE_INT);
3105 break;
3106
3107 case REAL_CST:
3108 return real_hash (TREE_REAL_CST_PTR (exp));
3109
3110 case FIXED_CST:
3111 return fixed_hash (TREE_FIXED_CST_PTR (exp));
3112
3113 case STRING_CST:
3114 p = TREE_STRING_POINTER (exp);
3115 len = TREE_STRING_LENGTH (exp);
3116 break;
3117
3118 case COMPLEX_CST:
3119 return (const_hash_1 (TREE_REALPART (exp)) * 5
3120 + const_hash_1 (TREE_IMAGPART (exp)));
3121
3122 case VECTOR_CST:
3123 {
3124 hi = 7 + VECTOR_CST_NPATTERNS (exp);
3125 hi = hi * 563 + VECTOR_CST_NELTS_PER_PATTERN (exp);
3126 unsigned int count = vector_cst_encoded_nelts (exp);
3127 for (unsigned int i = 0; i < count; ++i)
3128 hi = hi * 563 + const_hash_1 (VECTOR_CST_ENCODED_ELT (exp, i));
3129 return hi;
3130 }
3131
3132 case CONSTRUCTOR:
3133 {
3134 unsigned HOST_WIDE_INT idx;
3135 tree value;
3136
3137 hi = 5 + int_size_in_bytes (TREE_TYPE (exp));
3138
3139 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
3140 if (value)
3141 hi = hi * 603 + const_hash_1 (value);
3142
3143 return hi;
3144 }
3145
3146 case ADDR_EXPR:
3147 if (CONSTANT_CLASS_P (TREE_OPERAND (exp, 0)))
3148 return const_hash_1 (TREE_OPERAND (exp, 0));
3149
3150 /* Fallthru. */
3151 case FDESC_EXPR:
3152 {
3153 class addr_const value;
3154
3155 decode_addr_const (exp, &value);
3156 switch (GET_CODE (value.base))
3157 {
3158 case SYMBOL_REF:
3159 /* Don't hash the address of the SYMBOL_REF;
3160 only use the offset and the symbol name. */
3161 hi = value.offset.coeffs[0];
3162 p = XSTR (value.base, 0);
3163 for (i = 0; p[i] != 0; i++)
3164 hi = ((hi * 613) + (unsigned) (p[i]));
3165 break;
3166
3167 case LABEL_REF:
3168 hi = (value.offset.coeffs[0]
3169 + CODE_LABEL_NUMBER (label_ref_label (value.base)) * 13);
3170 break;
3171
3172 default:
3173 gcc_unreachable ();
3174 }
3175 }
3176 return hi;
3177
3178 case PLUS_EXPR:
3179 case POINTER_PLUS_EXPR:
3180 case MINUS_EXPR:
3181 return (const_hash_1 (TREE_OPERAND (exp, 0)) * 9
3182 + const_hash_1 (TREE_OPERAND (exp, 1)));
3183
3184 CASE_CONVERT:
3185 return const_hash_1 (TREE_OPERAND (exp, 0)) * 7 + 2;
3186
3187 default:
3188 /* A language specific constant. Just hash the code. */
3189 return code;
3190 }
3191
3192 /* Compute hashing function. */
3193 hi = len;
3194 for (i = 0; i < len; i++)
3195 hi = ((hi * 613) + (unsigned) (p[i]));
3196
3197 return hi;
3198 }
3199
3200 /* Wrapper of compare_constant, for the htab interface. */
3201 bool
equal(constant_descriptor_tree * c1,constant_descriptor_tree * c2)3202 tree_descriptor_hasher::equal (constant_descriptor_tree *c1,
3203 constant_descriptor_tree *c2)
3204 {
3205 if (c1->hash != c2->hash)
3206 return 0;
3207 return compare_constant (c1->value, c2->value);
3208 }
3209
3210 /* Compare t1 and t2, and return 1 only if they are known to result in
3211 the same bit pattern on output. */
3212
3213 static int
compare_constant(const tree t1,const tree t2)3214 compare_constant (const tree t1, const tree t2)
3215 {
3216 enum tree_code typecode;
3217
3218 if (t1 == NULL_TREE)
3219 return t2 == NULL_TREE;
3220 if (t2 == NULL_TREE)
3221 return 0;
3222
3223 if (TREE_CODE (t1) != TREE_CODE (t2))
3224 return 0;
3225
3226 switch (TREE_CODE (t1))
3227 {
3228 case INTEGER_CST:
3229 /* Integer constants are the same only if the same width of type. */
3230 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3231 return 0;
3232 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
3233 return 0;
3234 return tree_int_cst_equal (t1, t2);
3235
3236 case REAL_CST:
3237 /* Real constants are the same only if the same width of type. In
3238 addition to the same width, we need to check whether the modes are the
3239 same. There might be two floating point modes that are the same size
3240 but have different representations, such as the PowerPC that has 2
3241 different 128-bit floating point types (IBM extended double and IEEE
3242 128-bit floating point). */
3243 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3244 return 0;
3245 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
3246 return 0;
3247 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
3248
3249 case FIXED_CST:
3250 /* Fixed constants are the same only if the same width of type. */
3251 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3252 return 0;
3253
3254 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
3255
3256 case STRING_CST:
3257 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))
3258 || int_size_in_bytes (TREE_TYPE (t1))
3259 != int_size_in_bytes (TREE_TYPE (t2)))
3260 return 0;
3261
3262 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
3263 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
3264 TREE_STRING_LENGTH (t1)));
3265
3266 case COMPLEX_CST:
3267 return (compare_constant (TREE_REALPART (t1), TREE_REALPART (t2))
3268 && compare_constant (TREE_IMAGPART (t1), TREE_IMAGPART (t2)));
3269
3270 case VECTOR_CST:
3271 {
3272 if (VECTOR_CST_NPATTERNS (t1)
3273 != VECTOR_CST_NPATTERNS (t2))
3274 return 0;
3275
3276 if (VECTOR_CST_NELTS_PER_PATTERN (t1)
3277 != VECTOR_CST_NELTS_PER_PATTERN (t2))
3278 return 0;
3279
3280 unsigned int count = vector_cst_encoded_nelts (t1);
3281 for (unsigned int i = 0; i < count; ++i)
3282 if (!compare_constant (VECTOR_CST_ENCODED_ELT (t1, i),
3283 VECTOR_CST_ENCODED_ELT (t2, i)))
3284 return 0;
3285
3286 return 1;
3287 }
3288
3289 case CONSTRUCTOR:
3290 {
3291 vec<constructor_elt, va_gc> *v1, *v2;
3292 unsigned HOST_WIDE_INT idx;
3293
3294 typecode = TREE_CODE (TREE_TYPE (t1));
3295 if (typecode != TREE_CODE (TREE_TYPE (t2)))
3296 return 0;
3297
3298 if (typecode == ARRAY_TYPE)
3299 {
3300 HOST_WIDE_INT size_1 = int_size_in_bytes (TREE_TYPE (t1));
3301 /* For arrays, check that mode, size and storage order match. */
3302 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))
3303 || size_1 == -1
3304 || size_1 != int_size_in_bytes (TREE_TYPE (t2))
3305 || TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t1))
3306 != TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t2)))
3307 return 0;
3308 }
3309 else
3310 {
3311 /* For record and union constructors, require exact type
3312 equality. */
3313 if (TREE_TYPE (t1) != TREE_TYPE (t2))
3314 return 0;
3315 }
3316
3317 v1 = CONSTRUCTOR_ELTS (t1);
3318 v2 = CONSTRUCTOR_ELTS (t2);
3319 if (vec_safe_length (v1) != vec_safe_length (v2))
3320 return 0;
3321
3322 for (idx = 0; idx < vec_safe_length (v1); ++idx)
3323 {
3324 constructor_elt *c1 = &(*v1)[idx];
3325 constructor_elt *c2 = &(*v2)[idx];
3326
3327 /* Check that each value is the same... */
3328 if (!compare_constant (c1->value, c2->value))
3329 return 0;
3330 /* ... and that they apply to the same fields! */
3331 if (typecode == ARRAY_TYPE)
3332 {
3333 if (!compare_constant (c1->index, c2->index))
3334 return 0;
3335 }
3336 else
3337 {
3338 if (c1->index != c2->index)
3339 return 0;
3340 }
3341 }
3342
3343 return 1;
3344 }
3345
3346 case ADDR_EXPR:
3347 case FDESC_EXPR:
3348 {
3349 class addr_const value1, value2;
3350 enum rtx_code code;
3351 int ret;
3352
3353 decode_addr_const (t1, &value1);
3354 decode_addr_const (t2, &value2);
3355
3356 if (maybe_ne (value1.offset, value2.offset))
3357 return 0;
3358
3359 code = GET_CODE (value1.base);
3360 if (code != GET_CODE (value2.base))
3361 return 0;
3362
3363 switch (code)
3364 {
3365 case SYMBOL_REF:
3366 ret = (strcmp (XSTR (value1.base, 0), XSTR (value2.base, 0)) == 0);
3367 break;
3368
3369 case LABEL_REF:
3370 ret = (CODE_LABEL_NUMBER (label_ref_label (value1.base))
3371 == CODE_LABEL_NUMBER (label_ref_label (value2.base)));
3372 break;
3373
3374 default:
3375 gcc_unreachable ();
3376 }
3377 return ret;
3378 }
3379
3380 case PLUS_EXPR:
3381 case POINTER_PLUS_EXPR:
3382 case MINUS_EXPR:
3383 case RANGE_EXPR:
3384 return (compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0))
3385 && compare_constant (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)));
3386
3387 CASE_CONVERT:
3388 case VIEW_CONVERT_EXPR:
3389 return compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
3390
3391 default:
3392 return 0;
3393 }
3394
3395 gcc_unreachable ();
3396 }
3397
3398 /* Return the section into which constant EXP should be placed. */
3399
3400 static section *
get_constant_section(tree exp,unsigned int align)3401 get_constant_section (tree exp, unsigned int align)
3402 {
3403 return targetm.asm_out.select_section (exp,
3404 compute_reloc_for_constant (exp),
3405 align);
3406 }
3407
3408 /* Return the size of constant EXP in bytes. */
3409
3410 static HOST_WIDE_INT
get_constant_size(tree exp)3411 get_constant_size (tree exp)
3412 {
3413 HOST_WIDE_INT size;
3414
3415 size = int_size_in_bytes (TREE_TYPE (exp));
3416 gcc_checking_assert (size >= 0);
3417 gcc_checking_assert (TREE_CODE (exp) != STRING_CST
3418 || size >= TREE_STRING_LENGTH (exp));
3419 return size;
3420 }
3421
3422 /* Subroutine of output_constant_def:
3423 No constant equal to EXP is known to have been output.
3424 Make a constant descriptor to enter EXP in the hash table.
3425 Assign the label number and construct RTL to refer to the
3426 constant's location in memory.
3427 Caller is responsible for updating the hash table. */
3428
3429 static struct constant_descriptor_tree *
build_constant_desc(tree exp)3430 build_constant_desc (tree exp)
3431 {
3432 struct constant_descriptor_tree *desc;
3433 rtx symbol, rtl;
3434 char label[256];
3435 int labelno;
3436 tree decl;
3437
3438 desc = ggc_alloc<constant_descriptor_tree> ();
3439 desc->value = exp;
3440
3441 /* Create a string containing the label name, in LABEL. */
3442 labelno = const_labelno++;
3443 ASM_GENERATE_INTERNAL_LABEL (label, "LC", labelno);
3444
3445 /* Construct the VAR_DECL associated with the constant. */
3446 decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (label),
3447 TREE_TYPE (exp));
3448 DECL_ARTIFICIAL (decl) = 1;
3449 DECL_IGNORED_P (decl) = 1;
3450 TREE_READONLY (decl) = 1;
3451 TREE_STATIC (decl) = 1;
3452 TREE_ADDRESSABLE (decl) = 1;
3453 /* We don't set the RTL yet as this would cause varpool to assume that the
3454 variable is referenced. Moreover, it would just be dropped in LTO mode.
3455 Instead we set the flag that will be recognized in make_decl_rtl. */
3456 DECL_IN_CONSTANT_POOL (decl) = 1;
3457 DECL_INITIAL (decl) = desc->value;
3458 /* ??? targetm.constant_alignment hasn't been updated for vector types on
3459 most architectures so use DATA_ALIGNMENT as well, except for strings. */
3460 if (TREE_CODE (exp) == STRING_CST)
3461 SET_DECL_ALIGN (decl, targetm.constant_alignment (exp, DECL_ALIGN (decl)));
3462 else
3463 {
3464 align_variable (decl, 0);
3465 if (DECL_ALIGN (decl) < GET_MODE_ALIGNMENT (DECL_MODE (decl))
3466 && ((optab_handler (movmisalign_optab, DECL_MODE (decl))
3467 != CODE_FOR_nothing)
3468 || targetm.slow_unaligned_access (DECL_MODE (decl),
3469 DECL_ALIGN (decl))))
3470 SET_DECL_ALIGN (decl, GET_MODE_ALIGNMENT (DECL_MODE (decl)));
3471 }
3472
3473 /* Now construct the SYMBOL_REF and the MEM. */
3474 if (use_object_blocks_p ())
3475 {
3476 int align = (TREE_CODE (decl) == CONST_DECL
3477 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
3478 ? DECL_ALIGN (decl)
3479 : symtab_node::get (decl)->definition_alignment ());
3480 section *sect = get_constant_section (exp, align);
3481 symbol = create_block_symbol (ggc_strdup (label),
3482 get_block_for_section (sect), -1);
3483 }
3484 else
3485 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
3486 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
3487 SET_SYMBOL_REF_DECL (symbol, decl);
3488 TREE_CONSTANT_POOL_ADDRESS_P (symbol) = 1;
3489
3490 rtl = gen_const_mem (TYPE_MODE (TREE_TYPE (exp)), symbol);
3491 set_mem_alias_set (rtl, 0);
3492
3493 /* Putting EXP into the literal pool might have imposed a different
3494 alignment which should be visible in the RTX as well. */
3495 set_mem_align (rtl, DECL_ALIGN (decl));
3496
3497 /* We cannot share RTX'es in pool entries.
3498 Mark this piece of RTL as required for unsharing. */
3499 RTX_FLAG (rtl, used) = 1;
3500
3501 /* Set flags or add text to the name to record information, such as
3502 that it is a local symbol. If the name is changed, the macro
3503 ASM_OUTPUT_LABELREF will have to know how to strip this
3504 information. This call might invalidate our local variable
3505 SYMBOL; we can't use it afterward. */
3506 targetm.encode_section_info (exp, rtl, true);
3507
3508 desc->rtl = rtl;
3509
3510 return desc;
3511 }
3512
3513 /* Subroutine of output_constant_def and tree_output_constant_def:
3514 Add a constant to the hash table that tracks which constants
3515 already have labels. */
3516
3517 static constant_descriptor_tree *
add_constant_to_table(tree exp,int defer)3518 add_constant_to_table (tree exp, int defer)
3519 {
3520 /* The hash table methods may call output_constant_def for addressed
3521 constants, so handle them first. */
3522 output_addressed_constants (exp, defer);
3523
3524 /* Sanity check to catch recursive insertion. */
3525 static bool inserting;
3526 gcc_assert (!inserting);
3527 inserting = true;
3528
3529 /* Look up EXP in the table of constant descriptors. If we didn't
3530 find it, create a new one. */
3531 struct constant_descriptor_tree key;
3532 key.value = exp;
3533 key.hash = const_hash_1 (exp);
3534 constant_descriptor_tree **loc
3535 = const_desc_htab->find_slot_with_hash (&key, key.hash, INSERT);
3536
3537 inserting = false;
3538
3539 struct constant_descriptor_tree *desc = *loc;
3540 if (!desc)
3541 {
3542 desc = build_constant_desc (exp);
3543 desc->hash = key.hash;
3544 *loc = desc;
3545 }
3546
3547 return desc;
3548 }
3549
3550 /* Return an rtx representing a reference to constant data in memory
3551 for the constant expression EXP.
3552
3553 If assembler code for such a constant has already been output,
3554 return an rtx to refer to it.
3555 Otherwise, output such a constant in memory
3556 and generate an rtx for it.
3557
3558 If DEFER is nonzero, this constant can be deferred and output only
3559 if referenced in the function after all optimizations.
3560
3561 `const_desc_table' records which constants already have label strings. */
3562
3563 rtx
output_constant_def(tree exp,int defer)3564 output_constant_def (tree exp, int defer)
3565 {
3566 struct constant_descriptor_tree *desc = add_constant_to_table (exp, defer);
3567 maybe_output_constant_def_contents (desc, defer);
3568 return desc->rtl;
3569 }
3570
3571 /* Subroutine of output_constant_def: Decide whether or not we need to
3572 output the constant DESC now, and if so, do it. */
3573 static void
maybe_output_constant_def_contents(struct constant_descriptor_tree * desc,int defer)3574 maybe_output_constant_def_contents (struct constant_descriptor_tree *desc,
3575 int defer)
3576 {
3577 rtx symbol = XEXP (desc->rtl, 0);
3578 tree exp = desc->value;
3579
3580 if (flag_syntax_only)
3581 return;
3582
3583 if (TREE_ASM_WRITTEN (exp))
3584 /* Already output; don't do it again. */
3585 return;
3586
3587 /* We can always defer constants as long as the context allows
3588 doing so. */
3589 if (defer)
3590 {
3591 /* Increment n_deferred_constants if it exists. It needs to be at
3592 least as large as the number of constants actually referred to
3593 by the function. If it's too small we'll stop looking too early
3594 and fail to emit constants; if it's too large we'll only look
3595 through the entire function when we could have stopped earlier. */
3596 if (cfun)
3597 n_deferred_constants++;
3598 return;
3599 }
3600
3601 output_constant_def_contents (symbol);
3602 }
3603
3604 /* Subroutine of output_constant_def_contents. Output the definition
3605 of constant EXP, which is pointed to by label LABEL. ALIGN is the
3606 constant's alignment in bits. */
3607
3608 static void
assemble_constant_contents(tree exp,const char * label,unsigned int align,bool merge_strings)3609 assemble_constant_contents (tree exp, const char *label, unsigned int align,
3610 bool merge_strings)
3611 {
3612 HOST_WIDE_INT size;
3613
3614 size = get_constant_size (exp);
3615
3616 /* Do any machine/system dependent processing of the constant. */
3617 targetm.asm_out.declare_constant_name (asm_out_file, label, exp, size);
3618
3619 /* Output the value of EXP. */
3620 output_constant (exp, size, align, false, merge_strings);
3621
3622 targetm.asm_out.decl_end ();
3623 }
3624
3625 /* We must output the constant data referred to by SYMBOL; do so. */
3626
3627 static void
output_constant_def_contents(rtx symbol)3628 output_constant_def_contents (rtx symbol)
3629 {
3630 tree decl = SYMBOL_REF_DECL (symbol);
3631 tree exp = DECL_INITIAL (decl);
3632 bool asan_protected = false;
3633
3634 /* Make sure any other constants whose addresses appear in EXP
3635 are assigned label numbers. */
3636 output_addressed_constants (exp, 0);
3637
3638 /* We are no longer deferring this constant. */
3639 TREE_ASM_WRITTEN (decl) = TREE_ASM_WRITTEN (exp) = 1;
3640
3641 if ((flag_sanitize & SANITIZE_ADDRESS)
3642 && TREE_CODE (exp) == STRING_CST
3643 && asan_protect_global (exp))
3644 {
3645 asan_protected = true;
3646 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
3647 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
3648 }
3649
3650 /* If the constant is part of an object block, make sure that the
3651 decl has been positioned within its block, but do not write out
3652 its definition yet. output_object_blocks will do that later. */
3653 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol))
3654 place_block_symbol (symbol);
3655 else
3656 {
3657 int align = (TREE_CODE (decl) == CONST_DECL
3658 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
3659 ? DECL_ALIGN (decl)
3660 : symtab_node::get (decl)->definition_alignment ());
3661 section *sect = get_constant_section (exp, align);
3662 switch_to_section (sect);
3663 if (align > BITS_PER_UNIT)
3664 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
3665 assemble_constant_contents (exp, XSTR (symbol, 0), align,
3666 (sect->common.flags & SECTION_MERGE)
3667 && (sect->common.flags & SECTION_STRINGS));
3668 if (asan_protected)
3669 {
3670 HOST_WIDE_INT size = get_constant_size (exp);
3671 assemble_zeros (asan_red_zone_size (size));
3672 }
3673 }
3674 }
3675
3676 /* Look up EXP in the table of constant descriptors. Return the rtl
3677 if it has been emitted, else null. */
3678
3679 rtx
lookup_constant_def(tree exp)3680 lookup_constant_def (tree exp)
3681 {
3682 struct constant_descriptor_tree key;
3683
3684 key.value = exp;
3685 key.hash = const_hash_1 (exp);
3686 constant_descriptor_tree *desc
3687 = const_desc_htab->find_with_hash (&key, key.hash);
3688
3689 return (desc ? desc->rtl : NULL_RTX);
3690 }
3691
3692 /* Return a tree representing a reference to constant data in memory
3693 for the constant expression EXP.
3694
3695 This is the counterpart of output_constant_def at the Tree level. */
3696
3697 tree
tree_output_constant_def(tree exp)3698 tree_output_constant_def (tree exp)
3699 {
3700 struct constant_descriptor_tree *desc = add_constant_to_table (exp, 1);
3701 tree decl = SYMBOL_REF_DECL (XEXP (desc->rtl, 0));
3702 varpool_node::finalize_decl (decl);
3703 return decl;
3704 }
3705
3706 class GTY((chain_next ("%h.next"), for_user)) constant_descriptor_rtx {
3707 public:
3708 class constant_descriptor_rtx *next;
3709 rtx mem;
3710 rtx sym;
3711 rtx constant;
3712 HOST_WIDE_INT offset;
3713 hashval_t hash;
3714 fixed_size_mode mode;
3715 unsigned int align;
3716 int labelno;
3717 int mark;
3718 };
3719
3720 struct const_rtx_desc_hasher : ggc_ptr_hash<constant_descriptor_rtx>
3721 {
3722 static hashval_t hash (constant_descriptor_rtx *);
3723 static bool equal (constant_descriptor_rtx *, constant_descriptor_rtx *);
3724 };
3725
3726 /* Used in the hash tables to avoid outputting the same constant
3727 twice. Unlike 'struct constant_descriptor_tree', RTX constants
3728 are output once per function, not once per file. */
3729 /* ??? Only a few targets need per-function constant pools. Most
3730 can use one per-file pool. Should add a targetm bit to tell the
3731 difference. */
3732
3733 struct GTY(()) rtx_constant_pool {
3734 /* Pointers to first and last constant in pool, as ordered by offset. */
3735 class constant_descriptor_rtx *first;
3736 class constant_descriptor_rtx *last;
3737
3738 /* Hash facility for making memory-constants from constant rtl-expressions.
3739 It is used on RISC machines where immediate integer arguments and
3740 constant addresses are restricted so that such constants must be stored
3741 in memory. */
3742 hash_table<const_rtx_desc_hasher> *const_rtx_htab;
3743
3744 /* Current offset in constant pool (does not include any
3745 machine-specific header). */
3746 HOST_WIDE_INT offset;
3747 };
3748
3749 /* Hash and compare functions for const_rtx_htab. */
3750
3751 hashval_t
hash(constant_descriptor_rtx * desc)3752 const_rtx_desc_hasher::hash (constant_descriptor_rtx *desc)
3753 {
3754 return desc->hash;
3755 }
3756
3757 bool
equal(constant_descriptor_rtx * x,constant_descriptor_rtx * y)3758 const_rtx_desc_hasher::equal (constant_descriptor_rtx *x,
3759 constant_descriptor_rtx *y)
3760 {
3761 if (x->mode != y->mode)
3762 return 0;
3763 return rtx_equal_p (x->constant, y->constant);
3764 }
3765
3766 /* Hash one component of a constant. */
3767
3768 static hashval_t
const_rtx_hash_1(const_rtx x)3769 const_rtx_hash_1 (const_rtx x)
3770 {
3771 unsigned HOST_WIDE_INT hwi;
3772 machine_mode mode;
3773 enum rtx_code code;
3774 hashval_t h;
3775 int i;
3776
3777 code = GET_CODE (x);
3778 mode = GET_MODE (x);
3779 h = (hashval_t) code * 1048573 + mode;
3780
3781 switch (code)
3782 {
3783 case CONST_INT:
3784 hwi = INTVAL (x);
3785
3786 fold_hwi:
3787 {
3788 int shift = sizeof (hashval_t) * CHAR_BIT;
3789 const int n = sizeof (HOST_WIDE_INT) / sizeof (hashval_t);
3790
3791 h ^= (hashval_t) hwi;
3792 for (i = 1; i < n; ++i)
3793 {
3794 hwi >>= shift;
3795 h ^= (hashval_t) hwi;
3796 }
3797 }
3798 break;
3799
3800 case CONST_WIDE_INT:
3801 hwi = 0;
3802 {
3803 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
3804 hwi ^= CONST_WIDE_INT_ELT (x, i);
3805 goto fold_hwi;
3806 }
3807
3808 case CONST_DOUBLE:
3809 if (TARGET_SUPPORTS_WIDE_INT == 0 && mode == VOIDmode)
3810 {
3811 hwi = CONST_DOUBLE_LOW (x) ^ CONST_DOUBLE_HIGH (x);
3812 goto fold_hwi;
3813 }
3814 else
3815 h ^= real_hash (CONST_DOUBLE_REAL_VALUE (x));
3816 break;
3817
3818 case CONST_FIXED:
3819 h ^= fixed_hash (CONST_FIXED_VALUE (x));
3820 break;
3821
3822 case SYMBOL_REF:
3823 h ^= htab_hash_string (XSTR (x, 0));
3824 break;
3825
3826 case LABEL_REF:
3827 h = h * 251 + CODE_LABEL_NUMBER (label_ref_label (x));
3828 break;
3829
3830 case UNSPEC:
3831 case UNSPEC_VOLATILE:
3832 h = h * 251 + XINT (x, 1);
3833 break;
3834
3835 default:
3836 break;
3837 }
3838
3839 return h;
3840 }
3841
3842 /* Compute a hash value for X, which should be a constant. */
3843
3844 static hashval_t
const_rtx_hash(rtx x)3845 const_rtx_hash (rtx x)
3846 {
3847 hashval_t h = 0;
3848 subrtx_iterator::array_type array;
3849 FOR_EACH_SUBRTX (iter, array, x, ALL)
3850 h = h * 509 + const_rtx_hash_1 (*iter);
3851 return h;
3852 }
3853
3854
3855 /* Create and return a new rtx constant pool. */
3856
3857 static struct rtx_constant_pool *
create_constant_pool(void)3858 create_constant_pool (void)
3859 {
3860 struct rtx_constant_pool *pool;
3861
3862 pool = ggc_alloc<rtx_constant_pool> ();
3863 pool->const_rtx_htab = hash_table<const_rtx_desc_hasher>::create_ggc (31);
3864 pool->first = NULL;
3865 pool->last = NULL;
3866 pool->offset = 0;
3867 return pool;
3868 }
3869
3870 /* Initialize constant pool hashing for a new function. */
3871
3872 void
init_varasm_status(void)3873 init_varasm_status (void)
3874 {
3875 crtl->varasm.pool = create_constant_pool ();
3876 crtl->varasm.deferred_constants = 0;
3877 }
3878
3879 /* Given a MINUS expression, simplify it if both sides
3880 include the same symbol. */
3881
3882 rtx
simplify_subtraction(rtx x)3883 simplify_subtraction (rtx x)
3884 {
3885 rtx r = simplify_rtx (x);
3886 return r ? r : x;
3887 }
3888
3889 /* Given a constant rtx X, make (or find) a memory constant for its value
3890 and return a MEM rtx to refer to it in memory. IN_MODE is the mode
3891 of X. */
3892
3893 rtx
force_const_mem(machine_mode in_mode,rtx x)3894 force_const_mem (machine_mode in_mode, rtx x)
3895 {
3896 class constant_descriptor_rtx *desc, tmp;
3897 struct rtx_constant_pool *pool;
3898 char label[256];
3899 rtx def, symbol;
3900 hashval_t hash;
3901 unsigned int align;
3902 constant_descriptor_rtx **slot;
3903 fixed_size_mode mode;
3904
3905 /* We can't force variable-sized objects to memory. */
3906 if (!is_a <fixed_size_mode> (in_mode, &mode))
3907 return NULL_RTX;
3908
3909 /* If we're not allowed to drop X into the constant pool, don't. */
3910 if (targetm.cannot_force_const_mem (mode, x))
3911 return NULL_RTX;
3912
3913 /* Record that this function has used a constant pool entry. */
3914 crtl->uses_const_pool = 1;
3915
3916 /* Decide which pool to use. */
3917 pool = (targetm.use_blocks_for_constant_p (mode, x)
3918 ? shared_constant_pool
3919 : crtl->varasm.pool);
3920
3921 /* Lookup the value in the hashtable. */
3922 tmp.constant = x;
3923 tmp.mode = mode;
3924 hash = const_rtx_hash (x);
3925 slot = pool->const_rtx_htab->find_slot_with_hash (&tmp, hash, INSERT);
3926 desc = *slot;
3927
3928 /* If the constant was already present, return its memory. */
3929 if (desc)
3930 return copy_rtx (desc->mem);
3931
3932 /* Otherwise, create a new descriptor. */
3933 desc = ggc_alloc<constant_descriptor_rtx> ();
3934 *slot = desc;
3935
3936 /* Align the location counter as required by EXP's data type. */
3937 machine_mode align_mode = (mode == VOIDmode ? word_mode : mode);
3938 align = targetm.static_rtx_alignment (align_mode);
3939
3940 pool->offset += (align / BITS_PER_UNIT) - 1;
3941 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1);
3942
3943 desc->next = NULL;
3944 desc->constant = copy_rtx (tmp.constant);
3945 desc->offset = pool->offset;
3946 desc->hash = hash;
3947 desc->mode = mode;
3948 desc->align = align;
3949 desc->labelno = const_labelno;
3950 desc->mark = 0;
3951
3952 pool->offset += GET_MODE_SIZE (mode);
3953 if (pool->last)
3954 pool->last->next = desc;
3955 else
3956 pool->first = pool->last = desc;
3957 pool->last = desc;
3958
3959 /* Create a string containing the label name, in LABEL. */
3960 ASM_GENERATE_INTERNAL_LABEL (label, "LC", const_labelno);
3961 ++const_labelno;
3962
3963 /* Construct the SYMBOL_REF. Make sure to mark it as belonging to
3964 the constants pool. */
3965 if (use_object_blocks_p () && targetm.use_blocks_for_constant_p (mode, x))
3966 {
3967 section *sect = targetm.asm_out.select_rtx_section (mode, x, align);
3968 symbol = create_block_symbol (ggc_strdup (label),
3969 get_block_for_section (sect), -1);
3970 }
3971 else
3972 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
3973 desc->sym = symbol;
3974 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
3975 CONSTANT_POOL_ADDRESS_P (symbol) = 1;
3976 SET_SYMBOL_REF_CONSTANT (symbol, desc);
3977
3978 /* Construct the MEM. */
3979 desc->mem = def = gen_const_mem (mode, symbol);
3980 set_mem_align (def, align);
3981
3982 /* If we're dropping a label to the constant pool, make sure we
3983 don't delete it. */
3984 if (GET_CODE (x) == LABEL_REF)
3985 LABEL_PRESERVE_P (XEXP (x, 0)) = 1;
3986
3987 return copy_rtx (def);
3988 }
3989
3990 /* Given a constant pool SYMBOL_REF, return the corresponding constant. */
3991
3992 rtx
get_pool_constant(const_rtx addr)3993 get_pool_constant (const_rtx addr)
3994 {
3995 return SYMBOL_REF_CONSTANT (addr)->constant;
3996 }
3997
3998 /* Given a constant pool SYMBOL_REF, return the corresponding constant
3999 and whether it has been output or not. */
4000
4001 rtx
get_pool_constant_mark(rtx addr,bool * pmarked)4002 get_pool_constant_mark (rtx addr, bool *pmarked)
4003 {
4004 class constant_descriptor_rtx *desc;
4005
4006 desc = SYMBOL_REF_CONSTANT (addr);
4007 *pmarked = (desc->mark != 0);
4008 return desc->constant;
4009 }
4010
4011 /* Similar, return the mode. */
4012
4013 fixed_size_mode
get_pool_mode(const_rtx addr)4014 get_pool_mode (const_rtx addr)
4015 {
4016 return SYMBOL_REF_CONSTANT (addr)->mode;
4017 }
4018
4019 /* Return TRUE if and only if the constant pool has no entries. Note
4020 that even entries we might end up choosing not to emit are counted
4021 here, so there is the potential for missed optimizations. */
4022
4023 bool
constant_pool_empty_p(void)4024 constant_pool_empty_p (void)
4025 {
4026 return crtl->varasm.pool->first == NULL;
4027 }
4028
4029 /* Worker function for output_constant_pool_1. Emit assembly for X
4030 in MODE with known alignment ALIGN. */
4031
4032 static void
output_constant_pool_2(fixed_size_mode mode,rtx x,unsigned int align)4033 output_constant_pool_2 (fixed_size_mode mode, rtx x, unsigned int align)
4034 {
4035 switch (GET_MODE_CLASS (mode))
4036 {
4037 case MODE_FLOAT:
4038 case MODE_DECIMAL_FLOAT:
4039 {
4040 gcc_assert (CONST_DOUBLE_AS_FLOAT_P (x));
4041 assemble_real (*CONST_DOUBLE_REAL_VALUE (x),
4042 as_a <scalar_float_mode> (mode), align, false);
4043 break;
4044 }
4045
4046 case MODE_INT:
4047 case MODE_PARTIAL_INT:
4048 case MODE_FRACT:
4049 case MODE_UFRACT:
4050 case MODE_ACCUM:
4051 case MODE_UACCUM:
4052 assemble_integer (x, GET_MODE_SIZE (mode), align, 1);
4053 break;
4054
4055 case MODE_VECTOR_BOOL:
4056 {
4057 gcc_assert (GET_CODE (x) == CONST_VECTOR);
4058
4059 /* Pick the smallest integer mode that contains at least one
4060 whole element. Often this is byte_mode and contains more
4061 than one element. */
4062 unsigned int nelts = GET_MODE_NUNITS (mode);
4063 unsigned int elt_bits = GET_MODE_BITSIZE (mode) / nelts;
4064 unsigned int int_bits = MAX (elt_bits, BITS_PER_UNIT);
4065 scalar_int_mode int_mode = int_mode_for_size (int_bits, 0).require ();
4066
4067 /* Build the constant up one integer at a time. */
4068 unsigned int elts_per_int = int_bits / elt_bits;
4069 for (unsigned int i = 0; i < nelts; i += elts_per_int)
4070 {
4071 unsigned HOST_WIDE_INT value = 0;
4072 unsigned int limit = MIN (nelts - i, elts_per_int);
4073 for (unsigned int j = 0; j < limit; ++j)
4074 if (INTVAL (CONST_VECTOR_ELT (x, i + j)) != 0)
4075 value |= 1 << (j * elt_bits);
4076 output_constant_pool_2 (int_mode, gen_int_mode (value, int_mode),
4077 i != 0 ? MIN (align, int_bits) : align);
4078 }
4079 break;
4080 }
4081 case MODE_VECTOR_FLOAT:
4082 case MODE_VECTOR_INT:
4083 case MODE_VECTOR_FRACT:
4084 case MODE_VECTOR_UFRACT:
4085 case MODE_VECTOR_ACCUM:
4086 case MODE_VECTOR_UACCUM:
4087 {
4088 int i, units;
4089 scalar_mode submode = GET_MODE_INNER (mode);
4090 unsigned int subalign = MIN (align, GET_MODE_BITSIZE (submode));
4091
4092 gcc_assert (GET_CODE (x) == CONST_VECTOR);
4093 units = GET_MODE_NUNITS (mode);
4094
4095 for (i = 0; i < units; i++)
4096 {
4097 rtx elt = CONST_VECTOR_ELT (x, i);
4098 output_constant_pool_2 (submode, elt, i ? subalign : align);
4099 }
4100 }
4101 break;
4102
4103 default:
4104 gcc_unreachable ();
4105 }
4106 }
4107
4108 /* Worker function for output_constant_pool. Emit constant DESC,
4109 giving it ALIGN bits of alignment. */
4110
4111 static void
output_constant_pool_1(class constant_descriptor_rtx * desc,unsigned int align)4112 output_constant_pool_1 (class constant_descriptor_rtx *desc,
4113 unsigned int align)
4114 {
4115 rtx x, tmp;
4116
4117 x = desc->constant;
4118
4119 /* See if X is a LABEL_REF (or a CONST referring to a LABEL_REF)
4120 whose CODE_LABEL has been deleted. This can occur if a jump table
4121 is eliminated by optimization. If so, write a constant of zero
4122 instead. Note that this can also happen by turning the
4123 CODE_LABEL into a NOTE. */
4124 /* ??? This seems completely and utterly wrong. Certainly it's
4125 not true for NOTE_INSN_DELETED_LABEL, but I disbelieve proper
4126 functioning even with rtx_insn::deleted and friends. */
4127
4128 tmp = x;
4129 switch (GET_CODE (tmp))
4130 {
4131 case CONST:
4132 if (GET_CODE (XEXP (tmp, 0)) != PLUS
4133 || GET_CODE (XEXP (XEXP (tmp, 0), 0)) != LABEL_REF)
4134 break;
4135 tmp = XEXP (XEXP (tmp, 0), 0);
4136 /* FALLTHRU */
4137
4138 case LABEL_REF:
4139 {
4140 rtx_insn *insn = label_ref_label (tmp);
4141 gcc_assert (!insn->deleted ());
4142 gcc_assert (!NOTE_P (insn)
4143 || NOTE_KIND (insn) != NOTE_INSN_DELETED);
4144 break;
4145 }
4146
4147 default:
4148 break;
4149 }
4150
4151 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
4152 ASM_OUTPUT_SPECIAL_POOL_ENTRY (asm_out_file, x, desc->mode,
4153 align, desc->labelno, done);
4154 #endif
4155
4156 assemble_align (align);
4157
4158 /* Output the label. */
4159 targetm.asm_out.internal_label (asm_out_file, "LC", desc->labelno);
4160
4161 /* Output the data.
4162 Pass actual alignment value while emitting string constant to asm code
4163 as function 'output_constant_pool_1' explicitly passes the alignment as 1
4164 assuming that the data is already aligned which prevents the generation
4165 of fix-up table entries. */
4166 output_constant_pool_2 (desc->mode, x, desc->align);
4167
4168 /* Make sure all constants in SECTION_MERGE and not SECTION_STRINGS
4169 sections have proper size. */
4170 if (align > GET_MODE_BITSIZE (desc->mode)
4171 && in_section
4172 && (in_section->common.flags & SECTION_MERGE))
4173 assemble_align (align);
4174
4175 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
4176 done:
4177 #endif
4178 return;
4179 }
4180
4181 /* Recompute the offsets of entries in POOL, and the overall size of
4182 POOL. Do this after calling mark_constant_pool to ensure that we
4183 are computing the offset values for the pool which we will actually
4184 emit. */
4185
4186 static void
recompute_pool_offsets(struct rtx_constant_pool * pool)4187 recompute_pool_offsets (struct rtx_constant_pool *pool)
4188 {
4189 class constant_descriptor_rtx *desc;
4190 pool->offset = 0;
4191
4192 for (desc = pool->first; desc ; desc = desc->next)
4193 if (desc->mark)
4194 {
4195 /* Recalculate offset. */
4196 unsigned int align = desc->align;
4197 pool->offset += (align / BITS_PER_UNIT) - 1;
4198 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1);
4199 desc->offset = pool->offset;
4200 pool->offset += GET_MODE_SIZE (desc->mode);
4201 }
4202 }
4203
4204 /* Mark all constants that are referenced by SYMBOL_REFs in X.
4205 Emit referenced deferred strings. */
4206
4207 static void
mark_constants_in_pattern(rtx insn)4208 mark_constants_in_pattern (rtx insn)
4209 {
4210 subrtx_iterator::array_type array;
4211 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
4212 {
4213 const_rtx x = *iter;
4214 if (GET_CODE (x) == SYMBOL_REF)
4215 {
4216 if (CONSTANT_POOL_ADDRESS_P (x))
4217 {
4218 class constant_descriptor_rtx *desc = SYMBOL_REF_CONSTANT (x);
4219 if (desc->mark == 0)
4220 {
4221 desc->mark = 1;
4222 iter.substitute (desc->constant);
4223 }
4224 }
4225 else if (TREE_CONSTANT_POOL_ADDRESS_P (x))
4226 {
4227 tree decl = SYMBOL_REF_DECL (x);
4228 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl)))
4229 {
4230 n_deferred_constants--;
4231 output_constant_def_contents (CONST_CAST_RTX (x));
4232 }
4233 }
4234 }
4235 }
4236 }
4237
4238 /* Look through appropriate parts of INSN, marking all entries in the
4239 constant pool which are actually being used. Entries that are only
4240 referenced by other constants are also marked as used. Emit
4241 deferred strings that are used. */
4242
4243 static void
mark_constants(rtx_insn * insn)4244 mark_constants (rtx_insn *insn)
4245 {
4246 if (!INSN_P (insn))
4247 return;
4248
4249 /* Insns may appear inside a SEQUENCE. Only check the patterns of
4250 insns, not any notes that may be attached. We don't want to mark
4251 a constant just because it happens to appear in a REG_EQUIV note. */
4252 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
4253 {
4254 int i, n = seq->len ();
4255 for (i = 0; i < n; ++i)
4256 {
4257 rtx subinsn = seq->element (i);
4258 if (INSN_P (subinsn))
4259 mark_constants_in_pattern (subinsn);
4260 }
4261 }
4262 else
4263 mark_constants_in_pattern (insn);
4264 }
4265
4266 /* Look through the instructions for this function, and mark all the
4267 entries in POOL which are actually being used. Emit deferred constants
4268 which have indeed been used. */
4269
4270 static void
mark_constant_pool(void)4271 mark_constant_pool (void)
4272 {
4273 rtx_insn *insn;
4274
4275 if (!crtl->uses_const_pool && n_deferred_constants == 0)
4276 return;
4277
4278 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4279 mark_constants (insn);
4280 }
4281
4282 /* Write all the constants in POOL. */
4283
4284 static void
output_constant_pool_contents(struct rtx_constant_pool * pool)4285 output_constant_pool_contents (struct rtx_constant_pool *pool)
4286 {
4287 class constant_descriptor_rtx *desc;
4288
4289 for (desc = pool->first; desc ; desc = desc->next)
4290 if (desc->mark < 0)
4291 {
4292 #ifdef ASM_OUTPUT_DEF
4293 const char *name = XSTR (desc->sym, 0);
4294 char label[256];
4295 char buffer[256 + 32];
4296 const char *p;
4297
4298 ASM_GENERATE_INTERNAL_LABEL (label, "LC", ~desc->mark);
4299 p = label;
4300 if (desc->offset)
4301 {
4302 sprintf (buffer, "%s+%ld", p, (long) (desc->offset));
4303 p = buffer;
4304 }
4305 ASM_OUTPUT_DEF (asm_out_file, name, p);
4306 #else
4307 gcc_unreachable ();
4308 #endif
4309 }
4310 else if (desc->mark)
4311 {
4312 /* If the constant is part of an object_block, make sure that
4313 the constant has been positioned within its block, but do not
4314 write out its definition yet. output_object_blocks will do
4315 that later. */
4316 if (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym)
4317 && SYMBOL_REF_BLOCK (desc->sym))
4318 place_block_symbol (desc->sym);
4319 else
4320 {
4321 switch_to_section (targetm.asm_out.select_rtx_section
4322 (desc->mode, desc->constant, desc->align));
4323 output_constant_pool_1 (desc, desc->align);
4324 }
4325 }
4326 }
4327
4328 struct constant_descriptor_rtx_data {
4329 constant_descriptor_rtx *desc;
4330 target_unit *bytes;
4331 unsigned short size;
4332 unsigned short offset;
4333 unsigned int hash;
4334 };
4335
4336 /* qsort callback to sort constant_descriptor_rtx_data * vector by
4337 decreasing size. */
4338
4339 static int
constant_descriptor_rtx_data_cmp(const void * p1,const void * p2)4340 constant_descriptor_rtx_data_cmp (const void *p1, const void *p2)
4341 {
4342 constant_descriptor_rtx_data *const data1
4343 = *(constant_descriptor_rtx_data * const *) p1;
4344 constant_descriptor_rtx_data *const data2
4345 = *(constant_descriptor_rtx_data * const *) p2;
4346 if (data1->size > data2->size)
4347 return -1;
4348 if (data1->size < data2->size)
4349 return 1;
4350 if (data1->hash < data2->hash)
4351 return -1;
4352 gcc_assert (data1->hash > data2->hash);
4353 return 1;
4354 }
4355
4356 struct const_rtx_data_hasher : nofree_ptr_hash<constant_descriptor_rtx_data>
4357 {
4358 static hashval_t hash (constant_descriptor_rtx_data *);
4359 static bool equal (constant_descriptor_rtx_data *,
4360 constant_descriptor_rtx_data *);
4361 };
4362
4363 /* Hash and compare functions for const_rtx_data_htab. */
4364
4365 hashval_t
hash(constant_descriptor_rtx_data * data)4366 const_rtx_data_hasher::hash (constant_descriptor_rtx_data *data)
4367 {
4368 return data->hash;
4369 }
4370
4371 bool
equal(constant_descriptor_rtx_data * x,constant_descriptor_rtx_data * y)4372 const_rtx_data_hasher::equal (constant_descriptor_rtx_data *x,
4373 constant_descriptor_rtx_data *y)
4374 {
4375 if (x->hash != y->hash || x->size != y->size)
4376 return 0;
4377 unsigned int align1 = x->desc->align;
4378 unsigned int align2 = y->desc->align;
4379 unsigned int offset1 = (x->offset * BITS_PER_UNIT) & (align1 - 1);
4380 unsigned int offset2 = (y->offset * BITS_PER_UNIT) & (align2 - 1);
4381 if (offset1)
4382 align1 = least_bit_hwi (offset1);
4383 if (offset2)
4384 align2 = least_bit_hwi (offset2);
4385 if (align2 > align1)
4386 return 0;
4387 if (memcmp (x->bytes, y->bytes, x->size * sizeof (target_unit)) != 0)
4388 return 0;
4389 return 1;
4390 }
4391
4392 /* Attempt to optimize constant pool POOL. If it contains both CONST_VECTOR
4393 constants and scalar constants with the values of CONST_VECTOR elements,
4394 try to alias the scalar constants with the CONST_VECTOR elements. */
4395
4396 static void
optimize_constant_pool(struct rtx_constant_pool * pool)4397 optimize_constant_pool (struct rtx_constant_pool *pool)
4398 {
4399 auto_vec<target_unit, 128> buffer;
4400 auto_vec<constant_descriptor_rtx_data *, 128> vec;
4401 object_allocator<constant_descriptor_rtx_data>
4402 data_pool ("constant_descriptor_rtx_data_pool");
4403 int idx = 0;
4404 size_t size = 0;
4405 for (constant_descriptor_rtx *desc = pool->first; desc; desc = desc->next)
4406 if (desc->mark > 0
4407 && ! (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym)
4408 && SYMBOL_REF_BLOCK (desc->sym)))
4409 {
4410 buffer.truncate (0);
4411 buffer.reserve (GET_MODE_SIZE (desc->mode));
4412 if (native_encode_rtx (desc->mode, desc->constant, buffer, 0,
4413 GET_MODE_SIZE (desc->mode)))
4414 {
4415 constant_descriptor_rtx_data *data = data_pool.allocate ();
4416 data->desc = desc;
4417 data->bytes = NULL;
4418 data->size = GET_MODE_SIZE (desc->mode);
4419 data->offset = 0;
4420 data->hash = idx++;
4421 size += data->size;
4422 vec.safe_push (data);
4423 }
4424 }
4425 if (idx)
4426 {
4427 vec.qsort (constant_descriptor_rtx_data_cmp);
4428 unsigned min_size = vec.last ()->size;
4429 target_unit *bytes = XNEWVEC (target_unit, size);
4430 unsigned int i;
4431 constant_descriptor_rtx_data *data;
4432 hash_table<const_rtx_data_hasher> * htab
4433 = new hash_table<const_rtx_data_hasher> (31);
4434 size = 0;
4435 FOR_EACH_VEC_ELT (vec, i, data)
4436 {
4437 buffer.truncate (0);
4438 native_encode_rtx (data->desc->mode, data->desc->constant,
4439 buffer, 0, data->size);
4440 memcpy (bytes + size, buffer.address (), data->size);
4441 data->bytes = bytes + size;
4442 data->hash = iterative_hash (data->bytes,
4443 data->size * sizeof (target_unit), 0);
4444 size += data->size;
4445 constant_descriptor_rtx_data **slot
4446 = htab->find_slot_with_hash (data, data->hash, INSERT);
4447 if (*slot)
4448 {
4449 data->desc->mark = ~(*slot)->desc->labelno;
4450 data->desc->offset = (*slot)->offset;
4451 }
4452 else
4453 {
4454 unsigned int sz = 1 << floor_log2 (data->size);
4455
4456 *slot = data;
4457 for (sz >>= 1; sz >= min_size; sz >>= 1)
4458 for (unsigned off = 0; off + sz <= data->size; off += sz)
4459 {
4460 constant_descriptor_rtx_data tmp;
4461 tmp.desc = data->desc;
4462 tmp.bytes = data->bytes + off;
4463 tmp.size = sz;
4464 tmp.offset = off;
4465 tmp.hash = iterative_hash (tmp.bytes,
4466 sz * sizeof (target_unit), 0);
4467 slot = htab->find_slot_with_hash (&tmp, tmp.hash, INSERT);
4468 if (*slot == NULL)
4469 {
4470 *slot = data_pool.allocate ();
4471 **slot = tmp;
4472 }
4473 }
4474 }
4475 }
4476 delete htab;
4477 XDELETE (bytes);
4478 }
4479 data_pool.release ();
4480 }
4481
4482 /* Mark all constants that are used in the current function, then write
4483 out the function's private constant pool. */
4484
4485 static void
output_constant_pool(const char * fnname ATTRIBUTE_UNUSED,tree fndecl ATTRIBUTE_UNUSED)4486 output_constant_pool (const char *fnname ATTRIBUTE_UNUSED,
4487 tree fndecl ATTRIBUTE_UNUSED)
4488 {
4489 struct rtx_constant_pool *pool = crtl->varasm.pool;
4490
4491 /* It is possible for gcc to call force_const_mem and then to later
4492 discard the instructions which refer to the constant. In such a
4493 case we do not need to output the constant. */
4494 mark_constant_pool ();
4495
4496 /* Having marked the constant pool entries we'll actually emit, we
4497 now need to rebuild the offset information, which may have become
4498 stale. */
4499 recompute_pool_offsets (pool);
4500
4501 #ifdef ASM_OUTPUT_POOL_PROLOGUE
4502 ASM_OUTPUT_POOL_PROLOGUE (asm_out_file, fnname, fndecl, pool->offset);
4503 #endif
4504
4505 output_constant_pool_contents (pool);
4506
4507 #ifdef ASM_OUTPUT_POOL_EPILOGUE
4508 ASM_OUTPUT_POOL_EPILOGUE (asm_out_file, fnname, fndecl, pool->offset);
4509 #endif
4510 }
4511
4512 /* Write the contents of the shared constant pool. */
4513
4514 void
output_shared_constant_pool(void)4515 output_shared_constant_pool (void)
4516 {
4517 if (optimize
4518 && TARGET_SUPPORTS_ALIASES)
4519 optimize_constant_pool (shared_constant_pool);
4520
4521 output_constant_pool_contents (shared_constant_pool);
4522 }
4523
4524 /* Determine what kind of relocations EXP may need. */
4525
4526 int
compute_reloc_for_constant(tree exp)4527 compute_reloc_for_constant (tree exp)
4528 {
4529 int reloc = 0, reloc2;
4530 tree tem;
4531
4532 switch (TREE_CODE (exp))
4533 {
4534 case ADDR_EXPR:
4535 case FDESC_EXPR:
4536 /* Go inside any operations that get_inner_reference can handle and see
4537 if what's inside is a constant: no need to do anything here for
4538 addresses of variables or functions. */
4539 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem);
4540 tem = TREE_OPERAND (tem, 0))
4541 ;
4542
4543 if (TREE_CODE (tem) == MEM_REF
4544 && TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR)
4545 {
4546 reloc = compute_reloc_for_constant (TREE_OPERAND (tem, 0));
4547 break;
4548 }
4549
4550 if (!targetm.binds_local_p (tem))
4551 reloc |= 2;
4552 else
4553 reloc |= 1;
4554 break;
4555
4556 case PLUS_EXPR:
4557 case POINTER_PLUS_EXPR:
4558 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4559 reloc |= compute_reloc_for_constant (TREE_OPERAND (exp, 1));
4560 break;
4561
4562 case MINUS_EXPR:
4563 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4564 reloc2 = compute_reloc_for_constant (TREE_OPERAND (exp, 1));
4565 /* The difference of two local labels is computable at link time. */
4566 if (reloc == 1 && reloc2 == 1)
4567 reloc = 0;
4568 else
4569 reloc |= reloc2;
4570 break;
4571
4572 CASE_CONVERT:
4573 case VIEW_CONVERT_EXPR:
4574 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4575 break;
4576
4577 case CONSTRUCTOR:
4578 {
4579 unsigned HOST_WIDE_INT idx;
4580 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem)
4581 if (tem != 0)
4582 reloc |= compute_reloc_for_constant (tem);
4583 }
4584 break;
4585
4586 default:
4587 break;
4588 }
4589 return reloc;
4590 }
4591
4592 /* Find all the constants whose addresses are referenced inside of EXP,
4593 and make sure assembler code with a label has been output for each one.
4594 Indicate whether an ADDR_EXPR has been encountered. */
4595
4596 static void
output_addressed_constants(tree exp,int defer)4597 output_addressed_constants (tree exp, int defer)
4598 {
4599 tree tem;
4600
4601 switch (TREE_CODE (exp))
4602 {
4603 case ADDR_EXPR:
4604 case FDESC_EXPR:
4605 /* Go inside any operations that get_inner_reference can handle and see
4606 if what's inside is a constant: no need to do anything here for
4607 addresses of variables or functions. */
4608 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem);
4609 tem = TREE_OPERAND (tem, 0))
4610 ;
4611
4612 /* If we have an initialized CONST_DECL, retrieve the initializer. */
4613 if (TREE_CODE (tem) == CONST_DECL && DECL_INITIAL (tem))
4614 tem = DECL_INITIAL (tem);
4615
4616 if (CONSTANT_CLASS_P (tem) || TREE_CODE (tem) == CONSTRUCTOR)
4617 output_constant_def (tem, defer);
4618
4619 if (TREE_CODE (tem) == MEM_REF)
4620 output_addressed_constants (TREE_OPERAND (tem, 0), defer);
4621 break;
4622
4623 case PLUS_EXPR:
4624 case POINTER_PLUS_EXPR:
4625 case MINUS_EXPR:
4626 output_addressed_constants (TREE_OPERAND (exp, 1), defer);
4627 gcc_fallthrough ();
4628
4629 CASE_CONVERT:
4630 case VIEW_CONVERT_EXPR:
4631 output_addressed_constants (TREE_OPERAND (exp, 0), defer);
4632 break;
4633
4634 case CONSTRUCTOR:
4635 {
4636 unsigned HOST_WIDE_INT idx;
4637 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem)
4638 if (tem != 0)
4639 output_addressed_constants (tem, defer);
4640 }
4641 break;
4642
4643 default:
4644 break;
4645 }
4646 }
4647
4648 /* Whether a constructor CTOR is a valid static constant initializer if all
4649 its elements are. This used to be internal to initializer_constant_valid_p
4650 and has been exposed to let other functions like categorize_ctor_elements
4651 evaluate the property while walking a constructor for other purposes. */
4652
4653 bool
constructor_static_from_elts_p(const_tree ctor)4654 constructor_static_from_elts_p (const_tree ctor)
4655 {
4656 return (TREE_CONSTANT (ctor)
4657 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4658 || TREE_CODE (TREE_TYPE (ctor)) == RECORD_TYPE
4659 || TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE));
4660 }
4661
4662 static tree initializer_constant_valid_p_1 (tree value, tree endtype,
4663 tree *cache);
4664
4665 /* A subroutine of initializer_constant_valid_p. VALUE is a MINUS_EXPR,
4666 PLUS_EXPR or POINTER_PLUS_EXPR. This looks for cases of VALUE
4667 which are valid when ENDTYPE is an integer of any size; in
4668 particular, this does not accept a pointer minus a constant. This
4669 returns null_pointer_node if the VALUE is an absolute constant
4670 which can be used to initialize a static variable. Otherwise it
4671 returns NULL. */
4672
4673 static tree
narrowing_initializer_constant_valid_p(tree value,tree endtype,tree * cache)4674 narrowing_initializer_constant_valid_p (tree value, tree endtype, tree *cache)
4675 {
4676 tree op0, op1;
4677
4678 if (!INTEGRAL_TYPE_P (endtype))
4679 return NULL_TREE;
4680
4681 op0 = TREE_OPERAND (value, 0);
4682 op1 = TREE_OPERAND (value, 1);
4683
4684 /* Like STRIP_NOPS except allow the operand mode to widen. This
4685 works around a feature of fold that simplifies (int)(p1 - p2) to
4686 ((int)p1 - (int)p2) under the theory that the narrower operation
4687 is cheaper. */
4688
4689 while (CONVERT_EXPR_P (op0)
4690 || TREE_CODE (op0) == NON_LVALUE_EXPR)
4691 {
4692 tree inner = TREE_OPERAND (op0, 0);
4693 if (inner == error_mark_node
4694 || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
4695 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op0)))
4696 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
4697 break;
4698 op0 = inner;
4699 }
4700
4701 while (CONVERT_EXPR_P (op1)
4702 || TREE_CODE (op1) == NON_LVALUE_EXPR)
4703 {
4704 tree inner = TREE_OPERAND (op1, 0);
4705 if (inner == error_mark_node
4706 || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
4707 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op1)))
4708 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
4709 break;
4710 op1 = inner;
4711 }
4712
4713 op0 = initializer_constant_valid_p_1 (op0, endtype, cache);
4714 if (!op0)
4715 return NULL_TREE;
4716
4717 op1 = initializer_constant_valid_p_1 (op1, endtype,
4718 cache ? cache + 2 : NULL);
4719 /* Both initializers must be known. */
4720 if (op1)
4721 {
4722 if (op0 == op1
4723 && (op0 == null_pointer_node
4724 || TREE_CODE (value) == MINUS_EXPR))
4725 return null_pointer_node;
4726
4727 /* Support differences between labels. */
4728 if (TREE_CODE (op0) == LABEL_DECL
4729 && TREE_CODE (op1) == LABEL_DECL)
4730 return null_pointer_node;
4731
4732 if (TREE_CODE (op0) == STRING_CST
4733 && TREE_CODE (op1) == STRING_CST
4734 && operand_equal_p (op0, op1, 1))
4735 return null_pointer_node;
4736 }
4737
4738 return NULL_TREE;
4739 }
4740
4741 /* Helper function of initializer_constant_valid_p.
4742 Return nonzero if VALUE is a valid constant-valued expression
4743 for use in initializing a static variable; one that can be an
4744 element of a "constant" initializer.
4745
4746 Return null_pointer_node if the value is absolute;
4747 if it is relocatable, return the variable that determines the relocation.
4748 We assume that VALUE has been folded as much as possible;
4749 therefore, we do not need to check for such things as
4750 arithmetic-combinations of integers.
4751
4752 Use CACHE (pointer to 2 tree values) for caching if non-NULL. */
4753
4754 static tree
initializer_constant_valid_p_1(tree value,tree endtype,tree * cache)4755 initializer_constant_valid_p_1 (tree value, tree endtype, tree *cache)
4756 {
4757 tree ret;
4758
4759 switch (TREE_CODE (value))
4760 {
4761 case CONSTRUCTOR:
4762 if (constructor_static_from_elts_p (value))
4763 {
4764 unsigned HOST_WIDE_INT idx;
4765 tree elt;
4766 bool absolute = true;
4767
4768 if (cache && cache[0] == value)
4769 return cache[1];
4770 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt)
4771 {
4772 tree reloc;
4773 reloc = initializer_constant_valid_p_1 (elt, TREE_TYPE (elt),
4774 NULL);
4775 if (!reloc
4776 /* An absolute value is required with reverse SSO. */
4777 || (reloc != null_pointer_node
4778 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (value))
4779 && !AGGREGATE_TYPE_P (TREE_TYPE (elt))))
4780 {
4781 if (cache)
4782 {
4783 cache[0] = value;
4784 cache[1] = NULL_TREE;
4785 }
4786 return NULL_TREE;
4787 }
4788 if (reloc != null_pointer_node)
4789 absolute = false;
4790 }
4791 /* For a non-absolute relocation, there is no single
4792 variable that can be "the variable that determines the
4793 relocation." */
4794 if (cache)
4795 {
4796 cache[0] = value;
4797 cache[1] = absolute ? null_pointer_node : error_mark_node;
4798 }
4799 return absolute ? null_pointer_node : error_mark_node;
4800 }
4801
4802 return TREE_STATIC (value) ? null_pointer_node : NULL_TREE;
4803
4804 case INTEGER_CST:
4805 case VECTOR_CST:
4806 case REAL_CST:
4807 case FIXED_CST:
4808 case STRING_CST:
4809 case COMPLEX_CST:
4810 return null_pointer_node;
4811
4812 case ADDR_EXPR:
4813 case FDESC_EXPR:
4814 {
4815 tree op0 = staticp (TREE_OPERAND (value, 0));
4816 if (op0)
4817 {
4818 /* "&(*a).f" is like unto pointer arithmetic. If "a" turns out
4819 to be a constant, this is old-skool offsetof-like nonsense. */
4820 if (TREE_CODE (op0) == INDIRECT_REF
4821 && TREE_CONSTANT (TREE_OPERAND (op0, 0)))
4822 return null_pointer_node;
4823 /* Taking the address of a nested function involves a trampoline,
4824 unless we don't need or want one. */
4825 if (TREE_CODE (op0) == FUNCTION_DECL
4826 && DECL_STATIC_CHAIN (op0)
4827 && !TREE_NO_TRAMPOLINE (value))
4828 return NULL_TREE;
4829 /* "&{...}" requires a temporary to hold the constructed
4830 object. */
4831 if (TREE_CODE (op0) == CONSTRUCTOR)
4832 return NULL_TREE;
4833 }
4834 return op0;
4835 }
4836
4837 case NON_LVALUE_EXPR:
4838 return initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4839 endtype, cache);
4840
4841 case VIEW_CONVERT_EXPR:
4842 {
4843 tree src = TREE_OPERAND (value, 0);
4844 tree src_type = TREE_TYPE (src);
4845 tree dest_type = TREE_TYPE (value);
4846
4847 /* Allow view-conversions from aggregate to non-aggregate type only
4848 if the bit pattern is fully preserved afterwards; otherwise, the
4849 RTL expander won't be able to apply a subsequent transformation
4850 to the underlying constructor. */
4851 if (AGGREGATE_TYPE_P (src_type) && !AGGREGATE_TYPE_P (dest_type))
4852 {
4853 if (TYPE_MODE (endtype) == TYPE_MODE (dest_type))
4854 return initializer_constant_valid_p_1 (src, endtype, cache);
4855 else
4856 return NULL_TREE;
4857 }
4858
4859 /* Allow all other kinds of view-conversion. */
4860 return initializer_constant_valid_p_1 (src, endtype, cache);
4861 }
4862
4863 CASE_CONVERT:
4864 {
4865 tree src = TREE_OPERAND (value, 0);
4866 tree src_type = TREE_TYPE (src);
4867 tree dest_type = TREE_TYPE (value);
4868
4869 /* Allow conversions between pointer types, floating-point
4870 types, and offset types. */
4871 if ((POINTER_TYPE_P (dest_type) && POINTER_TYPE_P (src_type))
4872 || (FLOAT_TYPE_P (dest_type) && FLOAT_TYPE_P (src_type))
4873 || (TREE_CODE (dest_type) == OFFSET_TYPE
4874 && TREE_CODE (src_type) == OFFSET_TYPE))
4875 return initializer_constant_valid_p_1 (src, endtype, cache);
4876
4877 /* Allow length-preserving conversions between integer types. */
4878 if (INTEGRAL_TYPE_P (dest_type) && INTEGRAL_TYPE_P (src_type)
4879 && (TYPE_PRECISION (dest_type) == TYPE_PRECISION (src_type)))
4880 return initializer_constant_valid_p_1 (src, endtype, cache);
4881
4882 /* Allow conversions between other integer types only if
4883 explicit value. Don't allow sign-extension to a type larger
4884 than word and pointer, there aren't relocations that would
4885 allow to sign extend it to a wider type. */
4886 if (INTEGRAL_TYPE_P (dest_type)
4887 && INTEGRAL_TYPE_P (src_type)
4888 && (TYPE_UNSIGNED (src_type)
4889 || TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type)
4890 || TYPE_PRECISION (dest_type) <= BITS_PER_WORD
4891 || TYPE_PRECISION (dest_type) <= POINTER_SIZE))
4892 {
4893 tree inner = initializer_constant_valid_p_1 (src, endtype, cache);
4894 if (inner == null_pointer_node)
4895 return null_pointer_node;
4896 break;
4897 }
4898
4899 /* Allow (int) &foo provided int is as wide as a pointer. */
4900 if (INTEGRAL_TYPE_P (dest_type) && POINTER_TYPE_P (src_type)
4901 && (TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type)))
4902 return initializer_constant_valid_p_1 (src, endtype, cache);
4903
4904 /* Likewise conversions from int to pointers, but also allow
4905 conversions from 0. */
4906 if ((POINTER_TYPE_P (dest_type)
4907 || TREE_CODE (dest_type) == OFFSET_TYPE)
4908 && INTEGRAL_TYPE_P (src_type))
4909 {
4910 if (TREE_CODE (src) == INTEGER_CST
4911 && TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type))
4912 return null_pointer_node;
4913 if (integer_zerop (src))
4914 return null_pointer_node;
4915 else if (TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type))
4916 return initializer_constant_valid_p_1 (src, endtype, cache);
4917 }
4918
4919 /* Allow conversions to struct or union types if the value
4920 inside is okay. */
4921 if (TREE_CODE (dest_type) == RECORD_TYPE
4922 || TREE_CODE (dest_type) == UNION_TYPE)
4923 return initializer_constant_valid_p_1 (src, endtype, cache);
4924 }
4925 break;
4926
4927 case POINTER_PLUS_EXPR:
4928 case PLUS_EXPR:
4929 /* Any valid floating-point constants will have been folded by now;
4930 with -frounding-math we hit this with addition of two constants. */
4931 if (TREE_CODE (endtype) == REAL_TYPE)
4932 return NULL_TREE;
4933 if (cache && cache[0] == value)
4934 return cache[1];
4935 if (! INTEGRAL_TYPE_P (endtype)
4936 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value)))
4937 {
4938 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
4939 tree valid0
4940 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4941 endtype, ncache);
4942 tree valid1
4943 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1),
4944 endtype, ncache + 2);
4945 /* If either term is absolute, use the other term's relocation. */
4946 if (valid0 == null_pointer_node)
4947 ret = valid1;
4948 else if (valid1 == null_pointer_node)
4949 ret = valid0;
4950 /* Support narrowing pointer differences. */
4951 else
4952 ret = narrowing_initializer_constant_valid_p (value, endtype,
4953 ncache);
4954 }
4955 else
4956 /* Support narrowing pointer differences. */
4957 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL);
4958 if (cache)
4959 {
4960 cache[0] = value;
4961 cache[1] = ret;
4962 }
4963 return ret;
4964
4965 case POINTER_DIFF_EXPR:
4966 case MINUS_EXPR:
4967 if (TREE_CODE (endtype) == REAL_TYPE)
4968 return NULL_TREE;
4969 if (cache && cache[0] == value)
4970 return cache[1];
4971 if (! INTEGRAL_TYPE_P (endtype)
4972 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value)))
4973 {
4974 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
4975 tree valid0
4976 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4977 endtype, ncache);
4978 tree valid1
4979 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1),
4980 endtype, ncache + 2);
4981 /* Win if second argument is absolute. */
4982 if (valid1 == null_pointer_node)
4983 ret = valid0;
4984 /* Win if both arguments have the same relocation.
4985 Then the value is absolute. */
4986 else if (valid0 == valid1 && valid0 != 0)
4987 ret = null_pointer_node;
4988 /* Since GCC guarantees that string constants are unique in the
4989 generated code, a subtraction between two copies of the same
4990 constant string is absolute. */
4991 else if (valid0 && TREE_CODE (valid0) == STRING_CST
4992 && valid1 && TREE_CODE (valid1) == STRING_CST
4993 && operand_equal_p (valid0, valid1, 1))
4994 ret = null_pointer_node;
4995 /* Support narrowing differences. */
4996 else
4997 ret = narrowing_initializer_constant_valid_p (value, endtype,
4998 ncache);
4999 }
5000 else
5001 /* Support narrowing differences. */
5002 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL);
5003 if (cache)
5004 {
5005 cache[0] = value;
5006 cache[1] = ret;
5007 }
5008 return ret;
5009
5010 default:
5011 break;
5012 }
5013
5014 return NULL_TREE;
5015 }
5016
5017 /* Return nonzero if VALUE is a valid constant-valued expression
5018 for use in initializing a static variable; one that can be an
5019 element of a "constant" initializer.
5020
5021 Return null_pointer_node if the value is absolute;
5022 if it is relocatable, return the variable that determines the relocation.
5023 We assume that VALUE has been folded as much as possible;
5024 therefore, we do not need to check for such things as
5025 arithmetic-combinations of integers. */
5026 tree
initializer_constant_valid_p(tree value,tree endtype,bool reverse)5027 initializer_constant_valid_p (tree value, tree endtype, bool reverse)
5028 {
5029 tree reloc = initializer_constant_valid_p_1 (value, endtype, NULL);
5030
5031 /* An absolute value is required with reverse storage order. */
5032 if (reloc
5033 && reloc != null_pointer_node
5034 && reverse
5035 && !AGGREGATE_TYPE_P (endtype)
5036 && !VECTOR_TYPE_P (endtype))
5037 reloc = NULL_TREE;
5038
5039 return reloc;
5040 }
5041
5042 /* Return true if VALUE is a valid constant-valued expression
5043 for use in initializing a static bit-field; one that can be
5044 an element of a "constant" initializer. */
5045
5046 bool
initializer_constant_valid_for_bitfield_p(tree value)5047 initializer_constant_valid_for_bitfield_p (tree value)
5048 {
5049 /* For bitfields we support integer constants or possibly nested aggregates
5050 of such. */
5051 switch (TREE_CODE (value))
5052 {
5053 case CONSTRUCTOR:
5054 {
5055 unsigned HOST_WIDE_INT idx;
5056 tree elt;
5057
5058 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt)
5059 if (!initializer_constant_valid_for_bitfield_p (elt))
5060 return false;
5061 return true;
5062 }
5063
5064 case INTEGER_CST:
5065 case REAL_CST:
5066 return true;
5067
5068 case VIEW_CONVERT_EXPR:
5069 case NON_LVALUE_EXPR:
5070 return
5071 initializer_constant_valid_for_bitfield_p (TREE_OPERAND (value, 0));
5072
5073 default:
5074 break;
5075 }
5076
5077 return false;
5078 }
5079
5080 /* Check if a STRING_CST fits into the field.
5081 Tolerate only the case when the NUL termination
5082 does not fit into the field. */
5083
5084 static bool
check_string_literal(tree string,unsigned HOST_WIDE_INT size)5085 check_string_literal (tree string, unsigned HOST_WIDE_INT size)
5086 {
5087 tree type = TREE_TYPE (string);
5088 tree eltype = TREE_TYPE (type);
5089 unsigned HOST_WIDE_INT elts = tree_to_uhwi (TYPE_SIZE_UNIT (eltype));
5090 unsigned HOST_WIDE_INT mem_size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
5091 int len = TREE_STRING_LENGTH (string);
5092
5093 if (elts != 1 && elts != 2 && elts != 4)
5094 return false;
5095 if (len < 0 || len % elts != 0)
5096 return false;
5097 if (size < (unsigned)len)
5098 return false;
5099 if (mem_size != size)
5100 return false;
5101 return true;
5102 }
5103
5104 /* output_constructor outer state of relevance in recursive calls, typically
5105 for nested aggregate bitfields. */
5106
5107 struct oc_outer_state {
5108 unsigned int bit_offset; /* current position in ... */
5109 int byte; /* ... the outer byte buffer. */
5110 };
5111
5112 static unsigned HOST_WIDE_INT
5113 output_constructor (tree, unsigned HOST_WIDE_INT, unsigned int, bool,
5114 oc_outer_state *);
5115
5116 /* Output assembler code for constant EXP, with no label.
5117 This includes the pseudo-op such as ".int" or ".byte", and a newline.
5118 Assumes output_addressed_constants has been done on EXP already.
5119
5120 Generate at least SIZE bytes of assembler data, padding at the end
5121 with zeros if necessary. SIZE must always be specified. The returned
5122 value is the actual number of bytes of assembler data generated, which
5123 may be bigger than SIZE if the object contains a variable length field.
5124
5125 SIZE is important for structure constructors,
5126 since trailing members may have been omitted from the constructor.
5127 It is also important for initialization of arrays from string constants
5128 since the full length of the string constant might not be wanted.
5129 It is also needed for initialization of unions, where the initializer's
5130 type is just one member, and that may not be as long as the union.
5131
5132 There a case in which we would fail to output exactly SIZE bytes:
5133 for a structure constructor that wants to produce more than SIZE bytes.
5134 But such constructors will never be generated for any possible input.
5135
5136 ALIGN is the alignment of the data in bits.
5137
5138 If REVERSE is true, EXP is output in reverse storage order. */
5139
5140 static unsigned HOST_WIDE_INT
output_constant(tree exp,unsigned HOST_WIDE_INT size,unsigned int align,bool reverse,bool merge_strings)5141 output_constant (tree exp, unsigned HOST_WIDE_INT size, unsigned int align,
5142 bool reverse, bool merge_strings)
5143 {
5144 enum tree_code code;
5145 unsigned HOST_WIDE_INT thissize;
5146 rtx cst;
5147
5148 if (size == 0 || flag_syntax_only)
5149 return size;
5150
5151 /* See if we're trying to initialize a pointer in a non-default mode
5152 to the address of some declaration somewhere. If the target says
5153 the mode is valid for pointers, assume the target has a way of
5154 resolving it. */
5155 if (TREE_CODE (exp) == NOP_EXPR
5156 && POINTER_TYPE_P (TREE_TYPE (exp))
5157 && targetm.addr_space.valid_pointer_mode
5158 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
5159 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
5160 {
5161 tree saved_type = TREE_TYPE (exp);
5162
5163 /* Peel off any intermediate conversions-to-pointer for valid
5164 pointer modes. */
5165 while (TREE_CODE (exp) == NOP_EXPR
5166 && POINTER_TYPE_P (TREE_TYPE (exp))
5167 && targetm.addr_space.valid_pointer_mode
5168 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
5169 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
5170 exp = TREE_OPERAND (exp, 0);
5171
5172 /* If what we're left with is the address of something, we can
5173 convert the address to the final type and output it that
5174 way. */
5175 if (TREE_CODE (exp) == ADDR_EXPR)
5176 exp = build1 (ADDR_EXPR, saved_type, TREE_OPERAND (exp, 0));
5177 /* Likewise for constant ints. */
5178 else if (TREE_CODE (exp) == INTEGER_CST)
5179 exp = fold_convert (saved_type, exp);
5180
5181 }
5182
5183 /* Eliminate any conversions since we'll be outputting the underlying
5184 constant. */
5185 while (CONVERT_EXPR_P (exp)
5186 || TREE_CODE (exp) == NON_LVALUE_EXPR
5187 || TREE_CODE (exp) == VIEW_CONVERT_EXPR)
5188 {
5189 HOST_WIDE_INT type_size = int_size_in_bytes (TREE_TYPE (exp));
5190 HOST_WIDE_INT op_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0)));
5191
5192 /* Make sure eliminating the conversion is really a no-op, except with
5193 VIEW_CONVERT_EXPRs to allow for wild Ada unchecked conversions and
5194 union types to allow for Ada unchecked unions. */
5195 if (type_size > op_size
5196 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5197 && TREE_CODE (TREE_TYPE (exp)) != UNION_TYPE)
5198 /* Keep the conversion. */
5199 break;
5200 else
5201 exp = TREE_OPERAND (exp, 0);
5202 }
5203
5204 code = TREE_CODE (TREE_TYPE (exp));
5205 thissize = int_size_in_bytes (TREE_TYPE (exp));
5206
5207 /* Allow a constructor with no elements for any data type.
5208 This means to fill the space with zeros. */
5209 if (TREE_CODE (exp) == CONSTRUCTOR
5210 && vec_safe_is_empty (CONSTRUCTOR_ELTS (exp)))
5211 {
5212 assemble_zeros (size);
5213 return size;
5214 }
5215
5216 if (TREE_CODE (exp) == FDESC_EXPR)
5217 {
5218 #ifdef ASM_OUTPUT_FDESC
5219 HOST_WIDE_INT part = tree_to_shwi (TREE_OPERAND (exp, 1));
5220 tree decl = TREE_OPERAND (exp, 0);
5221 ASM_OUTPUT_FDESC (asm_out_file, decl, part);
5222 #else
5223 gcc_unreachable ();
5224 #endif
5225 return size;
5226 }
5227
5228 /* Now output the underlying data. If we've handling the padding, return.
5229 Otherwise, break and ensure SIZE is the size written. */
5230 switch (code)
5231 {
5232 case BOOLEAN_TYPE:
5233 case INTEGER_TYPE:
5234 case ENUMERAL_TYPE:
5235 case POINTER_TYPE:
5236 case REFERENCE_TYPE:
5237 case OFFSET_TYPE:
5238 case FIXED_POINT_TYPE:
5239 case NULLPTR_TYPE:
5240 cst = expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
5241 if (reverse)
5242 cst = flip_storage_order (TYPE_MODE (TREE_TYPE (exp)), cst);
5243 if (!assemble_integer (cst, MIN (size, thissize), align, 0))
5244 error ("initializer for integer/fixed-point value is too complicated");
5245 break;
5246
5247 case REAL_TYPE:
5248 if (TREE_CODE (exp) != REAL_CST)
5249 error ("initializer for floating value is not a floating constant");
5250 else
5251 assemble_real (TREE_REAL_CST (exp),
5252 SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (exp)),
5253 align, reverse);
5254 break;
5255
5256 case COMPLEX_TYPE:
5257 output_constant (TREE_REALPART (exp), thissize / 2, align,
5258 reverse, false);
5259 output_constant (TREE_IMAGPART (exp), thissize / 2,
5260 min_align (align, BITS_PER_UNIT * (thissize / 2)),
5261 reverse, false);
5262 break;
5263
5264 case ARRAY_TYPE:
5265 case VECTOR_TYPE:
5266 switch (TREE_CODE (exp))
5267 {
5268 case CONSTRUCTOR:
5269 return output_constructor (exp, size, align, reverse, NULL);
5270 case STRING_CST:
5271 thissize = (unsigned HOST_WIDE_INT)TREE_STRING_LENGTH (exp);
5272 if (merge_strings
5273 && (thissize == 0
5274 || TREE_STRING_POINTER (exp) [thissize - 1] != '\0'))
5275 thissize++;
5276 gcc_checking_assert (check_string_literal (exp, size));
5277 assemble_string (TREE_STRING_POINTER (exp), thissize);
5278 break;
5279 case VECTOR_CST:
5280 {
5281 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5282 unsigned int nalign = MIN (align, GET_MODE_ALIGNMENT (inner));
5283 int elt_size = GET_MODE_SIZE (inner);
5284 output_constant (VECTOR_CST_ELT (exp, 0), elt_size, align,
5285 reverse, false);
5286 thissize = elt_size;
5287 /* Static constants must have a fixed size. */
5288 unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
5289 for (unsigned int i = 1; i < nunits; i++)
5290 {
5291 output_constant (VECTOR_CST_ELT (exp, i), elt_size, nalign,
5292 reverse, false);
5293 thissize += elt_size;
5294 }
5295 break;
5296 }
5297 default:
5298 gcc_unreachable ();
5299 }
5300 break;
5301
5302 case RECORD_TYPE:
5303 case UNION_TYPE:
5304 gcc_assert (TREE_CODE (exp) == CONSTRUCTOR);
5305 return output_constructor (exp, size, align, reverse, NULL);
5306
5307 case ERROR_MARK:
5308 return 0;
5309
5310 default:
5311 gcc_unreachable ();
5312 }
5313
5314 if (size > thissize)
5315 assemble_zeros (size - thissize);
5316
5317 return size;
5318 }
5319
5320 /* Subroutine of output_constructor, used for computing the size of
5321 arrays of unspecified length. VAL must be a CONSTRUCTOR of an array
5322 type with an unspecified upper bound. */
5323
5324 static unsigned HOST_WIDE_INT
array_size_for_constructor(tree val)5325 array_size_for_constructor (tree val)
5326 {
5327 tree max_index;
5328 unsigned HOST_WIDE_INT cnt;
5329 tree index, value, tmp;
5330 offset_int i;
5331
5332 /* This code used to attempt to handle string constants that are not
5333 arrays of single-bytes, but nothing else does, so there's no point in
5334 doing it here. */
5335 if (TREE_CODE (val) == STRING_CST)
5336 return TREE_STRING_LENGTH (val);
5337
5338 max_index = NULL_TREE;
5339 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (val), cnt, index, value)
5340 {
5341 if (TREE_CODE (index) == RANGE_EXPR)
5342 index = TREE_OPERAND (index, 1);
5343 if (max_index == NULL_TREE || tree_int_cst_lt (max_index, index))
5344 max_index = index;
5345 }
5346
5347 if (max_index == NULL_TREE)
5348 return 0;
5349
5350 /* Compute the total number of array elements. */
5351 tmp = TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (val)));
5352 i = wi::to_offset (max_index) - wi::to_offset (tmp) + 1;
5353
5354 /* Multiply by the array element unit size to find number of bytes. */
5355 i *= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (val))));
5356
5357 gcc_assert (wi::fits_uhwi_p (i));
5358 return i.to_uhwi ();
5359 }
5360
5361 /* Other datastructures + helpers for output_constructor. */
5362
5363 /* output_constructor local state to support interaction with helpers. */
5364
5365 struct oc_local_state {
5366
5367 /* Received arguments. */
5368 tree exp; /* Constructor expression. */
5369 tree type; /* Type of constructor expression. */
5370 unsigned HOST_WIDE_INT size; /* # bytes to output - pad if necessary. */
5371 unsigned int align; /* Known initial alignment. */
5372 tree min_index; /* Lower bound if specified for an array. */
5373
5374 /* Output processing state. */
5375 HOST_WIDE_INT total_bytes; /* # bytes output so far / current position. */
5376 int byte; /* Part of a bitfield byte yet to be output. */
5377 int last_relative_index; /* Implicit or explicit index of the last
5378 array element output within a bitfield. */
5379 bool byte_buffer_in_use; /* Whether BYTE is in use. */
5380 bool reverse; /* Whether reverse storage order is in use. */
5381
5382 /* Current element. */
5383 tree field; /* Current field decl in a record. */
5384 tree val; /* Current element value. */
5385 tree index; /* Current element index. */
5386
5387 };
5388
5389 /* Helper for output_constructor. From the current LOCAL state, output a
5390 RANGE_EXPR element. */
5391
5392 static void
output_constructor_array_range(oc_local_state * local)5393 output_constructor_array_range (oc_local_state *local)
5394 {
5395 /* Perform the index calculation in modulo arithmetic but
5396 sign-extend the result because Ada has negative DECL_FIELD_OFFSETs
5397 but we are using an unsigned sizetype. */
5398 unsigned prec = TYPE_PRECISION (sizetype);
5399 offset_int idx = wi::sext (wi::to_offset (TREE_OPERAND (local->index, 0))
5400 - wi::to_offset (local->min_index), prec);
5401 tree valtype = TREE_TYPE (local->val);
5402 HOST_WIDE_INT fieldpos
5403 = (idx * wi::to_offset (TYPE_SIZE_UNIT (valtype))).to_short_addr ();
5404
5405 /* Advance to offset of this element. */
5406 if (fieldpos > local->total_bytes)
5407 {
5408 assemble_zeros (fieldpos - local->total_bytes);
5409 local->total_bytes = fieldpos;
5410 }
5411 else
5412 /* Must not go backwards. */
5413 gcc_assert (fieldpos == local->total_bytes);
5414
5415 unsigned HOST_WIDE_INT fieldsize
5416 = int_size_in_bytes (TREE_TYPE (local->type));
5417
5418 HOST_WIDE_INT lo_index
5419 = tree_to_shwi (TREE_OPERAND (local->index, 0));
5420 HOST_WIDE_INT hi_index
5421 = tree_to_shwi (TREE_OPERAND (local->index, 1));
5422 HOST_WIDE_INT index;
5423
5424 unsigned int align2
5425 = min_align (local->align, fieldsize * BITS_PER_UNIT);
5426
5427 for (index = lo_index; index <= hi_index; index++)
5428 {
5429 /* Output the element's initial value. */
5430 if (local->val == NULL_TREE)
5431 assemble_zeros (fieldsize);
5432 else
5433 fieldsize = output_constant (local->val, fieldsize, align2,
5434 local->reverse, false);
5435
5436 /* Count its size. */
5437 local->total_bytes += fieldsize;
5438 }
5439 }
5440
5441 /* Helper for output_constructor. From the current LOCAL state, output a
5442 field element that is not true bitfield or part of an outer one. */
5443
5444 static void
output_constructor_regular_field(oc_local_state * local)5445 output_constructor_regular_field (oc_local_state *local)
5446 {
5447 /* Field size and position. Since this structure is static, we know the
5448 positions are constant. */
5449 unsigned HOST_WIDE_INT fieldsize;
5450 HOST_WIDE_INT fieldpos;
5451
5452 unsigned int align2;
5453
5454 /* Output any buffered-up bit-fields preceding this element. */
5455 if (local->byte_buffer_in_use)
5456 {
5457 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5458 local->total_bytes++;
5459 local->byte_buffer_in_use = false;
5460 }
5461
5462 if (local->index != NULL_TREE)
5463 {
5464 /* Perform the index calculation in modulo arithmetic but
5465 sign-extend the result because Ada has negative DECL_FIELD_OFFSETs
5466 but we are using an unsigned sizetype. */
5467 unsigned prec = TYPE_PRECISION (sizetype);
5468 offset_int idx = wi::sext (wi::to_offset (local->index)
5469 - wi::to_offset (local->min_index), prec);
5470 fieldpos = (idx * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (local->val))))
5471 .to_short_addr ();
5472 }
5473 else if (local->field != NULL_TREE)
5474 fieldpos = int_byte_position (local->field);
5475 else
5476 fieldpos = 0;
5477
5478 /* Advance to offset of this element.
5479 Note no alignment needed in an array, since that is guaranteed
5480 if each element has the proper size. */
5481 if (local->field != NULL_TREE || local->index != NULL_TREE)
5482 {
5483 if (fieldpos > local->total_bytes)
5484 {
5485 assemble_zeros (fieldpos - local->total_bytes);
5486 local->total_bytes = fieldpos;
5487 }
5488 else
5489 /* Must not go backwards. */
5490 gcc_assert (fieldpos == local->total_bytes);
5491 }
5492
5493 /* Find the alignment of this element. */
5494 align2 = min_align (local->align, BITS_PER_UNIT * fieldpos);
5495
5496 /* Determine size this element should occupy. */
5497 if (local->field)
5498 {
5499 fieldsize = 0;
5500
5501 /* If this is an array with an unspecified upper bound,
5502 the initializer determines the size. */
5503 /* ??? This ought to only checked if DECL_SIZE_UNIT is NULL,
5504 but we cannot do this until the deprecated support for
5505 initializing zero-length array members is removed. */
5506 if (TREE_CODE (TREE_TYPE (local->field)) == ARRAY_TYPE
5507 && (!TYPE_DOMAIN (TREE_TYPE (local->field))
5508 || !TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (local->field)))))
5509 {
5510 fieldsize = array_size_for_constructor (local->val);
5511 /* Given a non-empty initialization, this field had better
5512 be last. Given a flexible array member, the next field
5513 on the chain is a TYPE_DECL of the enclosing struct. */
5514 const_tree next = DECL_CHAIN (local->field);
5515 gcc_assert (!fieldsize || !next || TREE_CODE (next) != FIELD_DECL);
5516 tree size = TYPE_SIZE_UNIT (TREE_TYPE (local->val));
5517 gcc_checking_assert (compare_tree_int (size, fieldsize) == 0);
5518 }
5519 else
5520 fieldsize = tree_to_uhwi (DECL_SIZE_UNIT (local->field));
5521 }
5522 else
5523 fieldsize = int_size_in_bytes (TREE_TYPE (local->type));
5524
5525 /* Output the element's initial value. */
5526 if (local->val == NULL_TREE)
5527 assemble_zeros (fieldsize);
5528 else
5529 fieldsize = output_constant (local->val, fieldsize, align2,
5530 local->reverse, false);
5531
5532 /* Count its size. */
5533 local->total_bytes += fieldsize;
5534 }
5535
5536 /* Helper for output_constructor. From the LOCAL state, output an element
5537 that is a true bitfield or part of an outer one. BIT_OFFSET is the offset
5538 from the start of a possibly ongoing outer byte buffer. */
5539
5540 static void
output_constructor_bitfield(oc_local_state * local,unsigned int bit_offset)5541 output_constructor_bitfield (oc_local_state *local, unsigned int bit_offset)
5542 {
5543 /* Bit size of this element. */
5544 HOST_WIDE_INT ebitsize
5545 = (local->field
5546 ? tree_to_uhwi (DECL_SIZE (local->field))
5547 : tree_to_uhwi (TYPE_SIZE (TREE_TYPE (local->type))));
5548
5549 /* Relative index of this element if this is an array component. */
5550 HOST_WIDE_INT relative_index
5551 = (!local->field
5552 ? (local->index
5553 ? (tree_to_shwi (local->index)
5554 - tree_to_shwi (local->min_index))
5555 : local->last_relative_index + 1)
5556 : 0);
5557
5558 /* Bit position of this element from the start of the containing
5559 constructor. */
5560 HOST_WIDE_INT constructor_relative_ebitpos
5561 = (local->field
5562 ? int_bit_position (local->field)
5563 : ebitsize * relative_index);
5564
5565 /* Bit position of this element from the start of a possibly ongoing
5566 outer byte buffer. */
5567 HOST_WIDE_INT byte_relative_ebitpos
5568 = bit_offset + constructor_relative_ebitpos;
5569
5570 /* From the start of a possibly ongoing outer byte buffer, offsets to
5571 the first bit of this element and to the first bit past the end of
5572 this element. */
5573 HOST_WIDE_INT next_offset = byte_relative_ebitpos;
5574 HOST_WIDE_INT end_offset = byte_relative_ebitpos + ebitsize;
5575
5576 local->last_relative_index = relative_index;
5577
5578 if (local->val == NULL_TREE)
5579 local->val = integer_zero_node;
5580
5581 while (TREE_CODE (local->val) == VIEW_CONVERT_EXPR
5582 || TREE_CODE (local->val) == NON_LVALUE_EXPR)
5583 local->val = TREE_OPERAND (local->val, 0);
5584
5585 if (TREE_CODE (local->val) != INTEGER_CST
5586 && TREE_CODE (local->val) != CONSTRUCTOR)
5587 {
5588 error ("invalid initial value for member %qE", DECL_NAME (local->field));
5589 return;
5590 }
5591
5592 /* If this field does not start in this (or next) byte, skip some bytes. */
5593 if (next_offset / BITS_PER_UNIT != local->total_bytes)
5594 {
5595 /* Output remnant of any bit field in previous bytes. */
5596 if (local->byte_buffer_in_use)
5597 {
5598 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5599 local->total_bytes++;
5600 local->byte_buffer_in_use = false;
5601 }
5602
5603 /* If still not at proper byte, advance to there. */
5604 if (next_offset / BITS_PER_UNIT != local->total_bytes)
5605 {
5606 gcc_assert (next_offset / BITS_PER_UNIT >= local->total_bytes);
5607 assemble_zeros (next_offset / BITS_PER_UNIT - local->total_bytes);
5608 local->total_bytes = next_offset / BITS_PER_UNIT;
5609 }
5610 }
5611
5612 /* Set up the buffer if necessary. */
5613 if (!local->byte_buffer_in_use)
5614 {
5615 local->byte = 0;
5616 if (ebitsize > 0)
5617 local->byte_buffer_in_use = true;
5618 }
5619
5620 /* If this is nested constructor, recurse passing the bit offset and the
5621 pending data, then retrieve the new pending data afterwards. */
5622 if (TREE_CODE (local->val) == CONSTRUCTOR)
5623 {
5624 oc_outer_state temp_state;
5625 temp_state.bit_offset = next_offset % BITS_PER_UNIT;
5626 temp_state.byte = local->byte;
5627 local->total_bytes
5628 += output_constructor (local->val, 0, 0, local->reverse, &temp_state);
5629 local->byte = temp_state.byte;
5630 return;
5631 }
5632
5633 /* Otherwise, we must split the element into pieces that fall within
5634 separate bytes, and combine each byte with previous or following
5635 bit-fields. */
5636 while (next_offset < end_offset)
5637 {
5638 int this_time;
5639 int shift;
5640 unsigned HOST_WIDE_INT value;
5641 HOST_WIDE_INT next_byte = next_offset / BITS_PER_UNIT;
5642 HOST_WIDE_INT next_bit = next_offset % BITS_PER_UNIT;
5643
5644 /* Advance from byte to byte within this element when necessary. */
5645 while (next_byte != local->total_bytes)
5646 {
5647 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5648 local->total_bytes++;
5649 local->byte = 0;
5650 }
5651
5652 /* Number of bits we can process at once (all part of the same byte). */
5653 this_time = MIN (end_offset - next_offset, BITS_PER_UNIT - next_bit);
5654 if (local->reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5655 {
5656 /* For big-endian data, take the most significant bits (of the
5657 bits that are significant) first and put them into bytes from
5658 the most significant end. */
5659 shift = end_offset - next_offset - this_time;
5660
5661 /* Don't try to take a bunch of bits that cross
5662 the word boundary in the INTEGER_CST. We can
5663 only select bits from one element. */
5664 if ((shift / HOST_BITS_PER_WIDE_INT)
5665 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT))
5666 {
5667 const int end = shift + this_time - 1;
5668 shift = end & -HOST_BITS_PER_WIDE_INT;
5669 this_time = end - shift + 1;
5670 }
5671
5672 /* Now get the bits we want to insert. */
5673 value = wi::extract_uhwi (wi::to_widest (local->val),
5674 shift, this_time);
5675
5676 /* Get the result. This works only when:
5677 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
5678 local->byte |= value << (BITS_PER_UNIT - this_time - next_bit);
5679 }
5680 else
5681 {
5682 /* On little-endian machines, take the least significant bits of
5683 the value first and pack them starting at the least significant
5684 bits of the bytes. */
5685 shift = next_offset - byte_relative_ebitpos;
5686
5687 /* Don't try to take a bunch of bits that cross
5688 the word boundary in the INTEGER_CST. We can
5689 only select bits from one element. */
5690 if ((shift / HOST_BITS_PER_WIDE_INT)
5691 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT))
5692 this_time
5693 = HOST_BITS_PER_WIDE_INT - (shift & (HOST_BITS_PER_WIDE_INT - 1));
5694
5695 /* Now get the bits we want to insert. */
5696 value = wi::extract_uhwi (wi::to_widest (local->val),
5697 shift, this_time);
5698
5699 /* Get the result. This works only when:
5700 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
5701 local->byte |= value << next_bit;
5702 }
5703
5704 next_offset += this_time;
5705 local->byte_buffer_in_use = true;
5706 }
5707 }
5708
5709 /* Subroutine of output_constant, used for CONSTRUCTORs (aggregate constants).
5710 Generate at least SIZE bytes, padding if necessary. OUTER designates the
5711 caller output state of relevance in recursive invocations. */
5712
5713 static unsigned HOST_WIDE_INT
output_constructor(tree exp,unsigned HOST_WIDE_INT size,unsigned int align,bool reverse,oc_outer_state * outer)5714 output_constructor (tree exp, unsigned HOST_WIDE_INT size, unsigned int align,
5715 bool reverse, oc_outer_state *outer)
5716 {
5717 unsigned HOST_WIDE_INT cnt;
5718 constructor_elt *ce;
5719 oc_local_state local;
5720
5721 /* Setup our local state to communicate with helpers. */
5722 local.exp = exp;
5723 local.type = TREE_TYPE (exp);
5724 local.size = size;
5725 local.align = align;
5726 if (TREE_CODE (local.type) == ARRAY_TYPE && TYPE_DOMAIN (local.type))
5727 local.min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (local.type));
5728 else
5729 local.min_index = integer_zero_node;
5730
5731 local.total_bytes = 0;
5732 local.byte_buffer_in_use = outer != NULL;
5733 local.byte = outer ? outer->byte : 0;
5734 local.last_relative_index = -1;
5735 /* The storage order is specified for every aggregate type. */
5736 if (AGGREGATE_TYPE_P (local.type))
5737 local.reverse = TYPE_REVERSE_STORAGE_ORDER (local.type);
5738 else
5739 local.reverse = reverse;
5740
5741 gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_UNIT);
5742
5743 /* As CE goes through the elements of the constant, FIELD goes through the
5744 structure fields if the constant is a structure. If the constant is a
5745 union, we override this by getting the field from the TREE_LIST element.
5746 But the constant could also be an array. Then FIELD is zero.
5747
5748 There is always a maximum of one element in the chain LINK for unions
5749 (even if the initializer in a source program incorrectly contains
5750 more one). */
5751
5752 if (TREE_CODE (local.type) == RECORD_TYPE)
5753 local.field = TYPE_FIELDS (local.type);
5754 else
5755 local.field = NULL_TREE;
5756
5757 for (cnt = 0;
5758 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), cnt, &ce);
5759 cnt++, local.field = local.field ? DECL_CHAIN (local.field) : 0)
5760 {
5761 local.val = ce->value;
5762 local.index = NULL_TREE;
5763
5764 /* The element in a union constructor specifies the proper field
5765 or index. */
5766 if (RECORD_OR_UNION_TYPE_P (local.type) && ce->index != NULL_TREE)
5767 local.field = ce->index;
5768
5769 else if (TREE_CODE (local.type) == ARRAY_TYPE)
5770 local.index = ce->index;
5771
5772 if (local.field && flag_verbose_asm)
5773 fprintf (asm_out_file, "%s %s:\n",
5774 ASM_COMMENT_START,
5775 DECL_NAME (local.field)
5776 ? IDENTIFIER_POINTER (DECL_NAME (local.field))
5777 : "<anonymous>");
5778
5779 /* Eliminate the marker that makes a cast not be an lvalue. */
5780 if (local.val != NULL_TREE)
5781 STRIP_NOPS (local.val);
5782
5783 /* Output the current element, using the appropriate helper ... */
5784
5785 /* For an array slice not part of an outer bitfield. */
5786 if (!outer
5787 && local.index != NULL_TREE
5788 && TREE_CODE (local.index) == RANGE_EXPR)
5789 output_constructor_array_range (&local);
5790
5791 /* For a field that is neither a true bitfield nor part of an outer one,
5792 known to be at least byte aligned and multiple-of-bytes long. */
5793 else if (!outer
5794 && (local.field == NULL_TREE
5795 || !CONSTRUCTOR_BITFIELD_P (local.field)))
5796 output_constructor_regular_field (&local);
5797
5798 /* For a true bitfield or part of an outer one. Only INTEGER_CSTs are
5799 supported for scalar fields, so we may need to convert first. */
5800 else
5801 {
5802 if (TREE_CODE (local.val) == REAL_CST)
5803 local.val
5804 = fold_unary (VIEW_CONVERT_EXPR,
5805 build_nonstandard_integer_type
5806 (TYPE_PRECISION (TREE_TYPE (local.val)), 0),
5807 local.val);
5808 output_constructor_bitfield (&local, outer ? outer->bit_offset : 0);
5809 }
5810 }
5811
5812 /* If we are not at toplevel, save the pending data for our caller.
5813 Otherwise output the pending data and padding zeros as needed. */
5814 if (outer)
5815 outer->byte = local.byte;
5816 else
5817 {
5818 if (local.byte_buffer_in_use)
5819 {
5820 assemble_integer (GEN_INT (local.byte), 1, BITS_PER_UNIT, 1);
5821 local.total_bytes++;
5822 }
5823
5824 if ((unsigned HOST_WIDE_INT)local.total_bytes < local.size)
5825 {
5826 assemble_zeros (local.size - local.total_bytes);
5827 local.total_bytes = local.size;
5828 }
5829 }
5830
5831 return local.total_bytes;
5832 }
5833
5834 /* Mark DECL as weak. */
5835
5836 static void
mark_weak(tree decl)5837 mark_weak (tree decl)
5838 {
5839 if (DECL_WEAK (decl))
5840 return;
5841
5842 struct symtab_node *n = symtab_node::get (decl);
5843 if (n && n->refuse_visibility_changes)
5844 error ("%+qD declared weak after being used", decl);
5845 DECL_WEAK (decl) = 1;
5846
5847 if (DECL_RTL_SET_P (decl)
5848 && MEM_P (DECL_RTL (decl))
5849 && XEXP (DECL_RTL (decl), 0)
5850 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == SYMBOL_REF)
5851 SYMBOL_REF_WEAK (XEXP (DECL_RTL (decl), 0)) = 1;
5852 }
5853
5854 /* Merge weak status between NEWDECL and OLDDECL. */
5855
5856 void
merge_weak(tree newdecl,tree olddecl)5857 merge_weak (tree newdecl, tree olddecl)
5858 {
5859 if (DECL_WEAK (newdecl) == DECL_WEAK (olddecl))
5860 {
5861 if (DECL_WEAK (newdecl) && TARGET_SUPPORTS_WEAK)
5862 {
5863 tree *pwd;
5864 /* We put the NEWDECL on the weak_decls list at some point
5865 and OLDDECL as well. Keep just OLDDECL on the list. */
5866 for (pwd = &weak_decls; *pwd; pwd = &TREE_CHAIN (*pwd))
5867 if (TREE_VALUE (*pwd) == newdecl)
5868 {
5869 *pwd = TREE_CHAIN (*pwd);
5870 break;
5871 }
5872 }
5873 return;
5874 }
5875
5876 if (DECL_WEAK (newdecl))
5877 {
5878 tree wd;
5879
5880 /* NEWDECL is weak, but OLDDECL is not. */
5881
5882 /* If we already output the OLDDECL, we're in trouble; we can't
5883 go back and make it weak. This should never happen in
5884 unit-at-a-time compilation. */
5885 gcc_assert (!TREE_ASM_WRITTEN (olddecl));
5886
5887 /* If we've already generated rtl referencing OLDDECL, we may
5888 have done so in a way that will not function properly with
5889 a weak symbol. Again in unit-at-a-time this should be
5890 impossible. */
5891 gcc_assert (!TREE_USED (olddecl)
5892 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (olddecl)));
5893
5894 /* PR 49899: You cannot convert a static function into a weak, public function. */
5895 if (! TREE_PUBLIC (olddecl) && TREE_PUBLIC (newdecl))
5896 error ("weak declaration of %q+D being applied to a already "
5897 "existing, static definition", newdecl);
5898
5899 if (TARGET_SUPPORTS_WEAK)
5900 {
5901 /* We put the NEWDECL on the weak_decls list at some point.
5902 Replace it with the OLDDECL. */
5903 for (wd = weak_decls; wd; wd = TREE_CHAIN (wd))
5904 if (TREE_VALUE (wd) == newdecl)
5905 {
5906 TREE_VALUE (wd) = olddecl;
5907 break;
5908 }
5909 /* We may not find the entry on the list. If NEWDECL is a
5910 weak alias, then we will have already called
5911 globalize_decl to remove the entry; in that case, we do
5912 not need to do anything. */
5913 }
5914
5915 /* Make the OLDDECL weak; it's OLDDECL that we'll be keeping. */
5916 mark_weak (olddecl);
5917 }
5918 else
5919 /* OLDDECL was weak, but NEWDECL was not explicitly marked as
5920 weak. Just update NEWDECL to indicate that it's weak too. */
5921 mark_weak (newdecl);
5922 }
5923
5924 /* Declare DECL to be a weak symbol. */
5925
5926 void
declare_weak(tree decl)5927 declare_weak (tree decl)
5928 {
5929 /* With -fsyntax-only, TREE_ASM_WRITTEN might be set on certain function
5930 decls earlier than normally, but as with -fsyntax-only nothing is really
5931 emitted, there is no harm in marking it weak later. */
5932 gcc_assert (TREE_CODE (decl) != FUNCTION_DECL
5933 || !TREE_ASM_WRITTEN (decl)
5934 || flag_syntax_only);
5935 if (! TREE_PUBLIC (decl))
5936 {
5937 error ("weak declaration of %q+D must be public", decl);
5938 return;
5939 }
5940 else if (!TARGET_SUPPORTS_WEAK)
5941 warning (0, "weak declaration of %q+D not supported", decl);
5942
5943 mark_weak (decl);
5944 if (!lookup_attribute ("weak", DECL_ATTRIBUTES (decl)))
5945 DECL_ATTRIBUTES (decl)
5946 = tree_cons (get_identifier ("weak"), NULL, DECL_ATTRIBUTES (decl));
5947 }
5948
5949 static void
weak_finish_1(tree decl)5950 weak_finish_1 (tree decl)
5951 {
5952 #if defined (ASM_WEAKEN_DECL) || defined (ASM_WEAKEN_LABEL)
5953 const char *const name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5954 #endif
5955
5956 if (! TREE_USED (decl))
5957 return;
5958
5959 #ifdef ASM_WEAKEN_DECL
5960 ASM_WEAKEN_DECL (asm_out_file, decl, name, NULL);
5961 #else
5962 #ifdef ASM_WEAKEN_LABEL
5963 ASM_WEAKEN_LABEL (asm_out_file, name);
5964 #else
5965 #ifdef ASM_OUTPUT_WEAK_ALIAS
5966 {
5967 static bool warn_once = 0;
5968 if (! warn_once)
5969 {
5970 warning (0, "only weak aliases are supported in this configuration");
5971 warn_once = 1;
5972 }
5973 return;
5974 }
5975 #endif
5976 #endif
5977 #endif
5978 }
5979
5980 /* Fiven an assembly name, find the decl it is associated with. */
5981 static tree
find_decl(tree target)5982 find_decl (tree target)
5983 {
5984 symtab_node *node = symtab_node::get_for_asmname (target);
5985 if (node)
5986 return node->decl;
5987 return NULL_TREE;
5988 }
5989
5990 /* This TREE_LIST contains weakref targets. */
5991
5992 static GTY(()) tree weakref_targets;
5993
5994 /* Emit any pending weak declarations. */
5995
5996 void
weak_finish(void)5997 weak_finish (void)
5998 {
5999 tree t;
6000
6001 for (t = weakref_targets; t; t = TREE_CHAIN (t))
6002 {
6003 tree alias_decl = TREE_PURPOSE (t);
6004 tree target = ultimate_transparent_alias_target (&TREE_VALUE (t));
6005
6006 if (! TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (alias_decl))
6007 || TREE_SYMBOL_REFERENCED (target))
6008 /* Remove alias_decl from the weak list, but leave entries for
6009 the target alone. */
6010 target = NULL_TREE;
6011 #ifndef ASM_OUTPUT_WEAKREF
6012 else if (! TREE_SYMBOL_REFERENCED (target))
6013 {
6014 /* Use ASM_WEAKEN_LABEL only if ASM_WEAKEN_DECL is not
6015 defined, otherwise we and weak_finish_1 would use
6016 different macros. */
6017 # if defined ASM_WEAKEN_LABEL && ! defined ASM_WEAKEN_DECL
6018 ASM_WEAKEN_LABEL (asm_out_file, IDENTIFIER_POINTER (target));
6019 # else
6020 tree decl = find_decl (target);
6021
6022 if (! decl)
6023 {
6024 decl = build_decl (DECL_SOURCE_LOCATION (alias_decl),
6025 TREE_CODE (alias_decl), target,
6026 TREE_TYPE (alias_decl));
6027
6028 DECL_EXTERNAL (decl) = 1;
6029 TREE_PUBLIC (decl) = 1;
6030 DECL_ARTIFICIAL (decl) = 1;
6031 TREE_NOTHROW (decl) = TREE_NOTHROW (alias_decl);
6032 TREE_USED (decl) = 1;
6033 }
6034
6035 weak_finish_1 (decl);
6036 # endif
6037 }
6038 #endif
6039
6040 {
6041 tree *p;
6042 tree t2;
6043
6044 /* Remove the alias and the target from the pending weak list
6045 so that we do not emit any .weak directives for the former,
6046 nor multiple .weak directives for the latter. */
6047 for (p = &weak_decls; (t2 = *p) ; )
6048 {
6049 if (TREE_VALUE (t2) == alias_decl
6050 || target == DECL_ASSEMBLER_NAME (TREE_VALUE (t2)))
6051 *p = TREE_CHAIN (t2);
6052 else
6053 p = &TREE_CHAIN (t2);
6054 }
6055
6056 /* Remove other weakrefs to the same target, to speed things up. */
6057 for (p = &TREE_CHAIN (t); (t2 = *p) ; )
6058 {
6059 if (target == ultimate_transparent_alias_target (&TREE_VALUE (t2)))
6060 *p = TREE_CHAIN (t2);
6061 else
6062 p = &TREE_CHAIN (t2);
6063 }
6064 }
6065 }
6066
6067 for (t = weak_decls; t; t = TREE_CHAIN (t))
6068 {
6069 tree decl = TREE_VALUE (t);
6070
6071 weak_finish_1 (decl);
6072 }
6073 }
6074
6075 /* Emit the assembly bits to indicate that DECL is globally visible. */
6076
6077 static void
globalize_decl(tree decl)6078 globalize_decl (tree decl)
6079 {
6080
6081 #if defined (ASM_WEAKEN_LABEL) || defined (ASM_WEAKEN_DECL)
6082 if (DECL_WEAK (decl))
6083 {
6084 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6085 tree *p, t;
6086
6087 #ifdef ASM_WEAKEN_DECL
6088 ASM_WEAKEN_DECL (asm_out_file, decl, name, 0);
6089 #else
6090 ASM_WEAKEN_LABEL (asm_out_file, name);
6091 #endif
6092
6093 /* Remove this function from the pending weak list so that
6094 we do not emit multiple .weak directives for it. */
6095 for (p = &weak_decls; (t = *p) ; )
6096 {
6097 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t)))
6098 *p = TREE_CHAIN (t);
6099 else
6100 p = &TREE_CHAIN (t);
6101 }
6102
6103 /* Remove weakrefs to the same target from the pending weakref
6104 list, for the same reason. */
6105 for (p = &weakref_targets; (t = *p) ; )
6106 {
6107 if (DECL_ASSEMBLER_NAME (decl)
6108 == ultimate_transparent_alias_target (&TREE_VALUE (t)))
6109 *p = TREE_CHAIN (t);
6110 else
6111 p = &TREE_CHAIN (t);
6112 }
6113
6114 return;
6115 }
6116 #endif
6117
6118 targetm.asm_out.globalize_decl_name (asm_out_file, decl);
6119 }
6120
6121 vec<alias_pair, va_gc> *alias_pairs;
6122
6123 /* Output the assembler code for a define (equate) using ASM_OUTPUT_DEF
6124 or ASM_OUTPUT_DEF_FROM_DECLS. The function defines the symbol whose
6125 tree node is DECL to have the value of the tree node TARGET. */
6126
6127 void
do_assemble_alias(tree decl,tree target)6128 do_assemble_alias (tree decl, tree target)
6129 {
6130 tree id;
6131
6132 /* Emulated TLS had better not get this var. */
6133 gcc_assert (!(!targetm.have_tls
6134 && VAR_P (decl)
6135 && DECL_THREAD_LOCAL_P (decl)));
6136
6137 if (TREE_ASM_WRITTEN (decl))
6138 return;
6139
6140 id = DECL_ASSEMBLER_NAME (decl);
6141 ultimate_transparent_alias_target (&id);
6142 ultimate_transparent_alias_target (&target);
6143
6144 /* We must force creation of DECL_RTL for debug info generation, even though
6145 we don't use it here. */
6146 make_decl_rtl (decl);
6147
6148 TREE_ASM_WRITTEN (decl) = 1;
6149 TREE_ASM_WRITTEN (DECL_ASSEMBLER_NAME (decl)) = 1;
6150 TREE_ASM_WRITTEN (id) = 1;
6151
6152 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
6153 {
6154 if (!TREE_SYMBOL_REFERENCED (target))
6155 weakref_targets = tree_cons (decl, target, weakref_targets);
6156
6157 #ifdef ASM_OUTPUT_WEAKREF
6158 ASM_OUTPUT_WEAKREF (asm_out_file, decl,
6159 IDENTIFIER_POINTER (id),
6160 IDENTIFIER_POINTER (target));
6161 #else
6162 if (!TARGET_SUPPORTS_WEAK)
6163 {
6164 error_at (DECL_SOURCE_LOCATION (decl),
6165 "weakref is not supported in this configuration");
6166 return;
6167 }
6168 #endif
6169 return;
6170 }
6171
6172 #ifdef ASM_OUTPUT_DEF
6173 tree orig_decl = decl;
6174
6175 /* Make name accessible from other files, if appropriate. */
6176
6177 if (TREE_PUBLIC (decl) || TREE_PUBLIC (orig_decl))
6178 {
6179 globalize_decl (decl);
6180 maybe_assemble_visibility (decl);
6181 }
6182 if (TREE_CODE (decl) == FUNCTION_DECL
6183 && cgraph_node::get (decl)->ifunc_resolver)
6184 {
6185 #if defined (ASM_OUTPUT_TYPE_DIRECTIVE)
6186 if (targetm.has_ifunc_p ())
6187 ASM_OUTPUT_TYPE_DIRECTIVE
6188 (asm_out_file, IDENTIFIER_POINTER (id),
6189 IFUNC_ASM_TYPE);
6190 else
6191 #endif
6192 error_at (DECL_SOURCE_LOCATION (decl),
6193 "%qs is not supported on this target", "ifunc");
6194 }
6195
6196 # ifdef ASM_OUTPUT_DEF_FROM_DECLS
6197 ASM_OUTPUT_DEF_FROM_DECLS (asm_out_file, decl, target);
6198 # else
6199 ASM_OUTPUT_DEF (asm_out_file,
6200 IDENTIFIER_POINTER (id),
6201 IDENTIFIER_POINTER (target));
6202 # endif
6203 #elif defined (ASM_OUTPUT_WEAK_ALIAS) || defined (ASM_WEAKEN_DECL)
6204 {
6205 const char *name;
6206 tree *p, t;
6207
6208 name = IDENTIFIER_POINTER (id);
6209 # ifdef ASM_WEAKEN_DECL
6210 ASM_WEAKEN_DECL (asm_out_file, decl, name, IDENTIFIER_POINTER (target));
6211 # else
6212 ASM_OUTPUT_WEAK_ALIAS (asm_out_file, name, IDENTIFIER_POINTER (target));
6213 # endif
6214 /* Remove this function from the pending weak list so that
6215 we do not emit multiple .weak directives for it. */
6216 for (p = &weak_decls; (t = *p) ; )
6217 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t))
6218 || id == DECL_ASSEMBLER_NAME (TREE_VALUE (t)))
6219 *p = TREE_CHAIN (t);
6220 else
6221 p = &TREE_CHAIN (t);
6222
6223 /* Remove weakrefs to the same target from the pending weakref
6224 list, for the same reason. */
6225 for (p = &weakref_targets; (t = *p) ; )
6226 {
6227 if (id == ultimate_transparent_alias_target (&TREE_VALUE (t)))
6228 *p = TREE_CHAIN (t);
6229 else
6230 p = &TREE_CHAIN (t);
6231 }
6232 }
6233 #endif
6234 }
6235
6236 /* Output .symver directive. */
6237
6238 void
do_assemble_symver(tree decl,tree target)6239 do_assemble_symver (tree decl, tree target)
6240 {
6241 tree id = DECL_ASSEMBLER_NAME (decl);
6242 ultimate_transparent_alias_target (&id);
6243 ultimate_transparent_alias_target (&target);
6244 #ifdef ASM_OUTPUT_SYMVER_DIRECTIVE
6245 ASM_OUTPUT_SYMVER_DIRECTIVE (asm_out_file,
6246 IDENTIFIER_POINTER (target),
6247 IDENTIFIER_POINTER (id));
6248 #else
6249 error ("symver is only supported on ELF platforms");
6250 #endif
6251 }
6252
6253 /* Emit an assembler directive to make the symbol for DECL an alias to
6254 the symbol for TARGET. */
6255
6256 void
assemble_alias(tree decl,tree target)6257 assemble_alias (tree decl, tree target)
6258 {
6259 tree target_decl;
6260
6261 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
6262 {
6263 tree alias = DECL_ASSEMBLER_NAME (decl);
6264
6265 ultimate_transparent_alias_target (&target);
6266
6267 if (alias == target)
6268 error ("%qs symbol %q+D ultimately targets itself", "weakref", decl);
6269 if (TREE_PUBLIC (decl))
6270 error ("%qs symbol %q+D must have static linkage", "weakref", decl);
6271 }
6272 else
6273 {
6274 #if !defined (ASM_OUTPUT_DEF)
6275 # if !defined(ASM_OUTPUT_WEAK_ALIAS) && !defined (ASM_WEAKEN_DECL)
6276 error_at (DECL_SOURCE_LOCATION (decl),
6277 "alias definitions not supported in this configuration");
6278 TREE_ASM_WRITTEN (decl) = 1;
6279 return;
6280 # else
6281 if (!DECL_WEAK (decl))
6282 {
6283 /* NB: ifunc_resolver isn't set when an error is detected. */
6284 if (TREE_CODE (decl) == FUNCTION_DECL
6285 && lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
6286 error_at (DECL_SOURCE_LOCATION (decl),
6287 "%qs is not supported in this configuration", "ifunc");
6288 else
6289 error_at (DECL_SOURCE_LOCATION (decl),
6290 "only weak aliases are supported in this configuration");
6291 TREE_ASM_WRITTEN (decl) = 1;
6292 return;
6293 }
6294 # endif
6295 #endif
6296 }
6297 TREE_USED (decl) = 1;
6298
6299 /* Allow aliases to aliases. */
6300 if (TREE_CODE (decl) == FUNCTION_DECL)
6301 cgraph_node::get_create (decl)->alias = true;
6302 else
6303 varpool_node::get_create (decl)->alias = true;
6304
6305 /* If the target has already been emitted, we don't have to queue the
6306 alias. This saves a tad of memory. */
6307 if (symtab->global_info_ready)
6308 target_decl = find_decl (target);
6309 else
6310 target_decl= NULL;
6311 if ((target_decl && TREE_ASM_WRITTEN (target_decl))
6312 || symtab->state >= EXPANSION)
6313 do_assemble_alias (decl, target);
6314 else
6315 {
6316 alias_pair p = {decl, target};
6317 vec_safe_push (alias_pairs, p);
6318 }
6319 }
6320
6321 /* Record and output a table of translations from original function
6322 to its transaction aware clone. Note that tm_pure functions are
6323 considered to be their own clone. */
6324
6325 struct tm_clone_hasher : ggc_cache_ptr_hash<tree_map>
6326 {
hashtm_clone_hasher6327 static hashval_t hash (tree_map *m) { return tree_map_hash (m); }
equaltm_clone_hasher6328 static bool equal (tree_map *a, tree_map *b) { return tree_map_eq (a, b); }
6329
6330 static int
keep_cache_entrytm_clone_hasher6331 keep_cache_entry (tree_map *&e)
6332 {
6333 return ggc_marked_p (e->base.from);
6334 }
6335 };
6336
6337 static GTY((cache)) hash_table<tm_clone_hasher> *tm_clone_hash;
6338
6339 void
record_tm_clone_pair(tree o,tree n)6340 record_tm_clone_pair (tree o, tree n)
6341 {
6342 struct tree_map **slot, *h;
6343
6344 if (tm_clone_hash == NULL)
6345 tm_clone_hash = hash_table<tm_clone_hasher>::create_ggc (32);
6346
6347 h = ggc_alloc<tree_map> ();
6348 h->hash = htab_hash_pointer (o);
6349 h->base.from = o;
6350 h->to = n;
6351
6352 slot = tm_clone_hash->find_slot_with_hash (h, h->hash, INSERT);
6353 *slot = h;
6354 }
6355
6356 tree
get_tm_clone_pair(tree o)6357 get_tm_clone_pair (tree o)
6358 {
6359 if (tm_clone_hash)
6360 {
6361 struct tree_map *h, in;
6362
6363 in.base.from = o;
6364 in.hash = htab_hash_pointer (o);
6365 h = tm_clone_hash->find_with_hash (&in, in.hash);
6366 if (h)
6367 return h->to;
6368 }
6369 return NULL_TREE;
6370 }
6371
6372 struct tm_alias_pair
6373 {
6374 unsigned int uid;
6375 tree from;
6376 tree to;
6377 };
6378
6379
6380 /* Dump the actual pairs to the .tm_clone_table section. */
6381
6382 static void
dump_tm_clone_pairs(vec<tm_alias_pair> tm_alias_pairs)6383 dump_tm_clone_pairs (vec<tm_alias_pair> tm_alias_pairs)
6384 {
6385 unsigned i;
6386 tm_alias_pair *p;
6387 bool switched = false;
6388
6389 FOR_EACH_VEC_ELT (tm_alias_pairs, i, p)
6390 {
6391 tree src = p->from;
6392 tree dst = p->to;
6393 struct cgraph_node *src_n = cgraph_node::get (src);
6394 struct cgraph_node *dst_n = cgraph_node::get (dst);
6395
6396 /* The function ipa_tm_create_version() marks the clone as needed if
6397 the original function was needed. But we also mark the clone as
6398 needed if we ever called the clone indirectly through
6399 TM_GETTMCLONE. If neither of these are true, we didn't generate
6400 a clone, and we didn't call it indirectly... no sense keeping it
6401 in the clone table. */
6402 if (!dst_n || !dst_n->definition)
6403 continue;
6404
6405 /* This covers the case where we have optimized the original
6406 function away, and only access the transactional clone. */
6407 if (!src_n || !src_n->definition)
6408 continue;
6409
6410 if (!switched)
6411 {
6412 switch_to_section (targetm.asm_out.tm_clone_table_section ());
6413 assemble_align (POINTER_SIZE);
6414 switched = true;
6415 }
6416
6417 assemble_integer (XEXP (DECL_RTL (src), 0),
6418 POINTER_SIZE_UNITS, POINTER_SIZE, 1);
6419 assemble_integer (XEXP (DECL_RTL (dst), 0),
6420 POINTER_SIZE_UNITS, POINTER_SIZE, 1);
6421 }
6422 }
6423
6424 /* Provide a default for the tm_clone_table section. */
6425
6426 section *
default_clone_table_section(void)6427 default_clone_table_section (void)
6428 {
6429 return get_named_section (NULL, ".tm_clone_table", 3);
6430 }
6431
6432 /* Helper comparison function for qsorting by the DECL_UID stored in
6433 alias_pair->emitted_diags. */
6434
6435 static int
tm_alias_pair_cmp(const void * x,const void * y)6436 tm_alias_pair_cmp (const void *x, const void *y)
6437 {
6438 const tm_alias_pair *p1 = (const tm_alias_pair *) x;
6439 const tm_alias_pair *p2 = (const tm_alias_pair *) y;
6440 if (p1->uid < p2->uid)
6441 return -1;
6442 if (p1->uid > p2->uid)
6443 return 1;
6444 return 0;
6445 }
6446
6447 void
finish_tm_clone_pairs(void)6448 finish_tm_clone_pairs (void)
6449 {
6450 vec<tm_alias_pair> tm_alias_pairs = vNULL;
6451
6452 if (tm_clone_hash == NULL)
6453 return;
6454
6455 /* We need a determenistic order for the .tm_clone_table, otherwise
6456 we will get bootstrap comparison failures, so dump the hash table
6457 to a vector, sort it, and dump the vector. */
6458
6459 /* Dump the hashtable to a vector. */
6460 tree_map *map;
6461 hash_table<tm_clone_hasher>::iterator iter;
6462 FOR_EACH_HASH_TABLE_ELEMENT (*tm_clone_hash, map, tree_map *, iter)
6463 {
6464 tm_alias_pair p = {DECL_UID (map->base.from), map->base.from, map->to};
6465 tm_alias_pairs.safe_push (p);
6466 }
6467 /* Sort it. */
6468 tm_alias_pairs.qsort (tm_alias_pair_cmp);
6469
6470 /* Dump it. */
6471 dump_tm_clone_pairs (tm_alias_pairs);
6472
6473 tm_clone_hash->empty ();
6474 tm_clone_hash = NULL;
6475 tm_alias_pairs.release ();
6476 }
6477
6478
6479 /* Emit an assembler directive to set symbol for DECL visibility to
6480 the visibility type VIS, which must not be VISIBILITY_DEFAULT. */
6481
6482 void
default_assemble_visibility(tree decl ATTRIBUTE_UNUSED,int vis ATTRIBUTE_UNUSED)6483 default_assemble_visibility (tree decl ATTRIBUTE_UNUSED,
6484 int vis ATTRIBUTE_UNUSED)
6485 {
6486 #ifdef HAVE_GAS_HIDDEN
6487 static const char * const visibility_types[] = {
6488 NULL, "protected", "hidden", "internal"
6489 };
6490
6491 const char *name, *type;
6492 tree id;
6493
6494 id = DECL_ASSEMBLER_NAME (decl);
6495 ultimate_transparent_alias_target (&id);
6496 name = IDENTIFIER_POINTER (id);
6497
6498 type = visibility_types[vis];
6499
6500 fprintf (asm_out_file, "\t.%s\t", type);
6501 assemble_name (asm_out_file, name);
6502 fprintf (asm_out_file, "\n");
6503 #else
6504 if (!DECL_ARTIFICIAL (decl))
6505 warning (OPT_Wattributes, "visibility attribute not supported "
6506 "in this configuration; ignored");
6507 #endif
6508 }
6509
6510 /* A helper function to call assemble_visibility when needed for a decl. */
6511
6512 int
maybe_assemble_visibility(tree decl)6513 maybe_assemble_visibility (tree decl)
6514 {
6515 enum symbol_visibility vis = DECL_VISIBILITY (decl);
6516 if (vis != VISIBILITY_DEFAULT)
6517 {
6518 targetm.asm_out.assemble_visibility (decl, vis);
6519 return 1;
6520 }
6521 else
6522 return 0;
6523 }
6524
6525 /* Returns 1 if the target configuration supports defining public symbols
6526 so that one of them will be chosen at link time instead of generating a
6527 multiply-defined symbol error, whether through the use of weak symbols or
6528 a target-specific mechanism for having duplicates discarded. */
6529
6530 int
supports_one_only(void)6531 supports_one_only (void)
6532 {
6533 if (SUPPORTS_ONE_ONLY)
6534 return 1;
6535 return TARGET_SUPPORTS_WEAK;
6536 }
6537
6538 /* Set up DECL as a public symbol that can be defined in multiple
6539 translation units without generating a linker error. */
6540
6541 void
make_decl_one_only(tree decl,tree comdat_group)6542 make_decl_one_only (tree decl, tree comdat_group)
6543 {
6544 struct symtab_node *symbol;
6545 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6546
6547 TREE_PUBLIC (decl) = 1;
6548
6549 if (VAR_P (decl))
6550 symbol = varpool_node::get_create (decl);
6551 else
6552 symbol = cgraph_node::get_create (decl);
6553
6554 if (SUPPORTS_ONE_ONLY)
6555 {
6556 #ifdef MAKE_DECL_ONE_ONLY
6557 MAKE_DECL_ONE_ONLY (decl);
6558 #endif
6559 symbol->set_comdat_group (comdat_group);
6560 }
6561 else if (VAR_P (decl)
6562 && (DECL_INITIAL (decl) == 0
6563 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node)))
6564 DECL_COMMON (decl) = 1;
6565 else
6566 {
6567 gcc_assert (TARGET_SUPPORTS_WEAK);
6568 DECL_WEAK (decl) = 1;
6569 }
6570 }
6571
6572 void
init_varasm_once(void)6573 init_varasm_once (void)
6574 {
6575 section_htab = hash_table<section_hasher>::create_ggc (31);
6576 object_block_htab = hash_table<object_block_hasher>::create_ggc (31);
6577 const_desc_htab = hash_table<tree_descriptor_hasher>::create_ggc (1009);
6578
6579 shared_constant_pool = create_constant_pool ();
6580
6581 #ifdef TEXT_SECTION_ASM_OP
6582 text_section = get_unnamed_section (SECTION_CODE, output_section_asm_op,
6583 TEXT_SECTION_ASM_OP);
6584 #endif
6585
6586 #ifdef DATA_SECTION_ASM_OP
6587 data_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
6588 DATA_SECTION_ASM_OP);
6589 #endif
6590
6591 #ifdef SDATA_SECTION_ASM_OP
6592 sdata_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
6593 SDATA_SECTION_ASM_OP);
6594 #endif
6595
6596 #ifdef READONLY_DATA_SECTION_ASM_OP
6597 readonly_data_section = get_unnamed_section (0, output_section_asm_op,
6598 READONLY_DATA_SECTION_ASM_OP);
6599 #endif
6600
6601 #ifdef CTORS_SECTION_ASM_OP
6602 ctors_section = get_unnamed_section (0, output_section_asm_op,
6603 CTORS_SECTION_ASM_OP);
6604 #endif
6605
6606 #ifdef DTORS_SECTION_ASM_OP
6607 dtors_section = get_unnamed_section (0, output_section_asm_op,
6608 DTORS_SECTION_ASM_OP);
6609 #endif
6610
6611 #ifdef BSS_SECTION_ASM_OP
6612 bss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
6613 output_section_asm_op,
6614 BSS_SECTION_ASM_OP);
6615 #endif
6616
6617 #ifdef SBSS_SECTION_ASM_OP
6618 sbss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
6619 output_section_asm_op,
6620 SBSS_SECTION_ASM_OP);
6621 #endif
6622
6623 tls_comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6624 | SECTION_COMMON, emit_tls_common);
6625 lcomm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6626 | SECTION_COMMON, emit_local);
6627 comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6628 | SECTION_COMMON, emit_common);
6629
6630 #if defined ASM_OUTPUT_ALIGNED_BSS
6631 bss_noswitch_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS,
6632 emit_bss);
6633 #endif
6634
6635 targetm.asm_out.init_sections ();
6636
6637 if (readonly_data_section == NULL)
6638 readonly_data_section = text_section;
6639
6640 #ifdef ASM_OUTPUT_EXTERNAL
6641 pending_assemble_externals_set = new hash_set<tree>;
6642 #endif
6643 }
6644
6645 enum tls_model
decl_default_tls_model(const_tree decl)6646 decl_default_tls_model (const_tree decl)
6647 {
6648 enum tls_model kind;
6649 bool is_local;
6650
6651 is_local = targetm.binds_local_p (decl);
6652 if (!flag_shlib)
6653 {
6654 if (is_local)
6655 kind = TLS_MODEL_LOCAL_EXEC;
6656 else
6657 kind = TLS_MODEL_INITIAL_EXEC;
6658 }
6659
6660 /* Local dynamic is inefficient when we're not combining the
6661 parts of the address. */
6662 else if (optimize && is_local)
6663 kind = TLS_MODEL_LOCAL_DYNAMIC;
6664 else
6665 kind = TLS_MODEL_GLOBAL_DYNAMIC;
6666 if (kind < flag_tls_default)
6667 kind = flag_tls_default;
6668
6669 return kind;
6670 }
6671
6672 /* Select a set of attributes for section NAME based on the properties
6673 of DECL and whether or not RELOC indicates that DECL's initializer
6674 might contain runtime relocations.
6675
6676 We make the section read-only and executable for a function decl,
6677 read-only for a const data decl, and writable for a non-const data decl. */
6678
6679 unsigned int
default_section_type_flags(tree decl,const char * name,int reloc)6680 default_section_type_flags (tree decl, const char *name, int reloc)
6681 {
6682 unsigned int flags;
6683
6684 if (decl && TREE_CODE (decl) == FUNCTION_DECL)
6685 flags = SECTION_CODE;
6686 else if (decl)
6687 {
6688 enum section_category category
6689 = categorize_decl_for_section (decl, reloc);
6690 if (decl_readonly_section_1 (category))
6691 flags = 0;
6692 else if (category == SECCAT_DATA_REL_RO
6693 || category == SECCAT_DATA_REL_RO_LOCAL)
6694 flags = SECTION_WRITE | SECTION_RELRO;
6695 else
6696 flags = SECTION_WRITE;
6697 }
6698 else
6699 {
6700 flags = SECTION_WRITE;
6701 if (strcmp (name, ".data.rel.ro") == 0
6702 || strcmp (name, ".data.rel.ro.local") == 0)
6703 flags |= SECTION_RELRO;
6704 }
6705
6706 if (decl && DECL_P (decl) && DECL_COMDAT_GROUP (decl))
6707 flags |= SECTION_LINKONCE;
6708
6709 if (strcmp (name, ".vtable_map_vars") == 0)
6710 flags |= SECTION_LINKONCE;
6711
6712 if (decl && VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
6713 flags |= SECTION_TLS | SECTION_WRITE;
6714
6715 if (strcmp (name, ".bss") == 0
6716 || strncmp (name, ".bss.", 5) == 0
6717 || strncmp (name, ".gnu.linkonce.b.", 16) == 0
6718 || strcmp (name, ".persistent.bss") == 0
6719 || strcmp (name, ".sbss") == 0
6720 || strncmp (name, ".sbss.", 6) == 0
6721 || strncmp (name, ".gnu.linkonce.sb.", 17) == 0)
6722 flags |= SECTION_BSS;
6723
6724 if (strcmp (name, ".tdata") == 0
6725 || strncmp (name, ".tdata.", 7) == 0
6726 || strncmp (name, ".gnu.linkonce.td.", 17) == 0)
6727 flags |= SECTION_TLS;
6728
6729 if (strcmp (name, ".tbss") == 0
6730 || strncmp (name, ".tbss.", 6) == 0
6731 || strncmp (name, ".gnu.linkonce.tb.", 17) == 0)
6732 flags |= SECTION_TLS | SECTION_BSS;
6733
6734 if (strcmp (name, ".noinit") == 0)
6735 flags |= SECTION_WRITE | SECTION_BSS | SECTION_NOTYPE;
6736
6737 if (strcmp (name, ".persistent") == 0)
6738 flags |= SECTION_WRITE | SECTION_NOTYPE;
6739
6740 /* Various sections have special ELF types that the assembler will
6741 assign by default based on the name. They are neither SHT_PROGBITS
6742 nor SHT_NOBITS, so when changing sections we don't want to print a
6743 section type (@progbits or @nobits). Rather than duplicating the
6744 assembler's knowledge of what those special name patterns are, just
6745 let the assembler choose the type if we don't know a specific
6746 reason to set it to something other than the default. SHT_PROGBITS
6747 is the default for sections whose name is not specially known to
6748 the assembler, so it does no harm to leave the choice to the
6749 assembler when @progbits is the best thing we know to use. If
6750 someone is silly enough to emit code or TLS variables to one of
6751 these sections, then don't handle them specially.
6752
6753 default_elf_asm_named_section (below) handles the BSS, TLS, ENTSIZE, and
6754 LINKONCE cases when NOTYPE is not set, so leave those to its logic. */
6755 if (!(flags & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE))
6756 && !(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)))
6757 flags |= SECTION_NOTYPE;
6758
6759 return flags;
6760 }
6761
6762 /* Return true if the target supports some form of global BSS,
6763 either through bss_noswitch_section, or by selecting a BSS
6764 section in TARGET_ASM_SELECT_SECTION. */
6765
6766 bool
have_global_bss_p(void)6767 have_global_bss_p (void)
6768 {
6769 return bss_noswitch_section || targetm.have_switchable_bss_sections;
6770 }
6771
6772 /* Output assembly to switch to section NAME with attribute FLAGS.
6773 Four variants for common object file formats. */
6774
6775 void
default_no_named_section(const char * name ATTRIBUTE_UNUSED,unsigned int flags ATTRIBUTE_UNUSED,tree decl ATTRIBUTE_UNUSED)6776 default_no_named_section (const char *name ATTRIBUTE_UNUSED,
6777 unsigned int flags ATTRIBUTE_UNUSED,
6778 tree decl ATTRIBUTE_UNUSED)
6779 {
6780 /* Some object formats don't support named sections at all. The
6781 front-end should already have flagged this as an error. */
6782 gcc_unreachable ();
6783 }
6784
6785 #ifndef TLS_SECTION_ASM_FLAG
6786 #define TLS_SECTION_ASM_FLAG 'T'
6787 #endif
6788
6789 void
default_elf_asm_named_section(const char * name,unsigned int flags,tree decl)6790 default_elf_asm_named_section (const char *name, unsigned int flags,
6791 tree decl)
6792 {
6793 char flagchars[11], *f = flagchars;
6794 unsigned int numeric_value = 0;
6795
6796 /* If we have already declared this section, we can use an
6797 abbreviated form to switch back to it -- unless this section is
6798 part of a COMDAT groups or with SHF_GNU_RETAIN or with SHF_LINK_ORDER,
6799 in which case GAS requires the full declaration every time. */
6800 if (!(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6801 && !(flags & (SECTION_RETAIN | SECTION_LINK_ORDER))
6802 && (flags & SECTION_DECLARED))
6803 {
6804 fprintf (asm_out_file, "\t.section\t%s\n", name);
6805 return;
6806 }
6807
6808 /* If we have a machine specific flag, then use the numeric value to pass
6809 this on to GAS. */
6810 if (targetm.asm_out.elf_flags_numeric (flags, &numeric_value))
6811 snprintf (f, sizeof (flagchars), "0x%08x", numeric_value);
6812 else
6813 {
6814 if (!(flags & SECTION_DEBUG))
6815 *f++ = 'a';
6816 #if HAVE_GAS_SECTION_EXCLUDE
6817 if (flags & SECTION_EXCLUDE)
6818 *f++ = 'e';
6819 #endif
6820 if (flags & SECTION_WRITE)
6821 *f++ = 'w';
6822 if (flags & SECTION_CODE)
6823 *f++ = 'x';
6824 if (flags & SECTION_SMALL)
6825 *f++ = 's';
6826 if (flags & SECTION_MERGE)
6827 *f++ = 'M';
6828 if (flags & SECTION_STRINGS)
6829 *f++ = 'S';
6830 if (flags & SECTION_TLS)
6831 *f++ = TLS_SECTION_ASM_FLAG;
6832 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6833 *f++ = 'G';
6834 if (flags & SECTION_RETAIN)
6835 *f++ = 'R';
6836 if (flags & SECTION_LINK_ORDER)
6837 *f++ = 'o';
6838 #ifdef MACH_DEP_SECTION_ASM_FLAG
6839 if (flags & SECTION_MACH_DEP)
6840 *f++ = MACH_DEP_SECTION_ASM_FLAG;
6841 #endif
6842 *f = '\0';
6843 }
6844
6845 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
6846
6847 /* default_section_type_flags (above) knows which flags need special
6848 handling here, and sets NOTYPE when none of these apply so that the
6849 assembler's logic for default types can apply to user-chosen
6850 section names. */
6851 if (!(flags & SECTION_NOTYPE))
6852 {
6853 const char *type;
6854 const char *format;
6855
6856 if (flags & SECTION_BSS)
6857 type = "nobits";
6858 else
6859 type = "progbits";
6860
6861 format = ",@%s";
6862 /* On platforms that use "@" as the assembly comment character,
6863 use "%" instead. */
6864 if (strcmp (ASM_COMMENT_START, "@") == 0)
6865 format = ",%%%s";
6866 fprintf (asm_out_file, format, type);
6867
6868 if (flags & SECTION_ENTSIZE)
6869 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
6870 if (flags & SECTION_LINK_ORDER)
6871 {
6872 tree id = DECL_ASSEMBLER_NAME (decl);
6873 ultimate_transparent_alias_target (&id);
6874 const char *name = IDENTIFIER_POINTER (id);
6875 name = targetm.strip_name_encoding (name);
6876 fprintf (asm_out_file, ",%s", name);
6877 }
6878 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6879 {
6880 if (TREE_CODE (decl) == IDENTIFIER_NODE)
6881 fprintf (asm_out_file, ",%s,comdat", IDENTIFIER_POINTER (decl));
6882 else
6883 fprintf (asm_out_file, ",%s,comdat",
6884 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (decl)));
6885 }
6886 }
6887
6888 putc ('\n', asm_out_file);
6889 }
6890
6891 void
default_coff_asm_named_section(const char * name,unsigned int flags,tree decl ATTRIBUTE_UNUSED)6892 default_coff_asm_named_section (const char *name, unsigned int flags,
6893 tree decl ATTRIBUTE_UNUSED)
6894 {
6895 char flagchars[8], *f = flagchars;
6896
6897 if (flags & SECTION_WRITE)
6898 *f++ = 'w';
6899 if (flags & SECTION_CODE)
6900 *f++ = 'x';
6901 *f = '\0';
6902
6903 fprintf (asm_out_file, "\t.section\t%s,\"%s\"\n", name, flagchars);
6904 }
6905
6906 void
default_pe_asm_named_section(const char * name,unsigned int flags,tree decl)6907 default_pe_asm_named_section (const char *name, unsigned int flags,
6908 tree decl)
6909 {
6910 default_coff_asm_named_section (name, flags, decl);
6911
6912 if (flags & SECTION_LINKONCE)
6913 {
6914 /* Functions may have been compiled at various levels of
6915 optimization so we can't use `same_size' here.
6916 Instead, have the linker pick one. */
6917 fprintf (asm_out_file, "\t.linkonce %s\n",
6918 (flags & SECTION_CODE ? "discard" : "same_size"));
6919 }
6920 }
6921
6922 /* The lame default section selector. */
6923
6924 section *
default_select_section(tree decl,int reloc,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)6925 default_select_section (tree decl, int reloc,
6926 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
6927 {
6928 if (DECL_P (decl))
6929 {
6930 if (decl_readonly_section (decl, reloc))
6931 return readonly_data_section;
6932 }
6933 else if (TREE_CODE (decl) == CONSTRUCTOR)
6934 {
6935 if (! ((flag_pic && reloc)
6936 || !TREE_READONLY (decl)
6937 || TREE_SIDE_EFFECTS (decl)
6938 || !TREE_CONSTANT (decl)))
6939 return readonly_data_section;
6940 }
6941 else if (TREE_CODE (decl) == STRING_CST)
6942 return readonly_data_section;
6943 else if (! (flag_pic && reloc))
6944 return readonly_data_section;
6945
6946 return data_section;
6947 }
6948
6949 enum section_category
categorize_decl_for_section(const_tree decl,int reloc)6950 categorize_decl_for_section (const_tree decl, int reloc)
6951 {
6952 enum section_category ret;
6953
6954 if (TREE_CODE (decl) == FUNCTION_DECL)
6955 return SECCAT_TEXT;
6956 else if (TREE_CODE (decl) == STRING_CST)
6957 {
6958 if ((flag_sanitize & SANITIZE_ADDRESS)
6959 && asan_protect_global (CONST_CAST_TREE (decl)))
6960 /* or !flag_merge_constants */
6961 return SECCAT_RODATA;
6962 else
6963 return SECCAT_RODATA_MERGE_STR;
6964 }
6965 else if (VAR_P (decl))
6966 {
6967 tree d = CONST_CAST_TREE (decl);
6968 if (bss_initializer_p (decl))
6969 ret = SECCAT_BSS;
6970 else if (! TREE_READONLY (decl)
6971 || TREE_SIDE_EFFECTS (decl)
6972 || (DECL_INITIAL (decl)
6973 && ! TREE_CONSTANT (DECL_INITIAL (decl))))
6974 {
6975 /* Here the reloc_rw_mask is not testing whether the section should
6976 be read-only or not, but whether the dynamic link will have to
6977 do something. If so, we wish to segregate the data in order to
6978 minimize cache misses inside the dynamic linker. */
6979 if (reloc & targetm.asm_out.reloc_rw_mask ())
6980 ret = reloc == 1 ? SECCAT_DATA_REL_LOCAL : SECCAT_DATA_REL;
6981 else
6982 ret = SECCAT_DATA;
6983 }
6984 else if (reloc & targetm.asm_out.reloc_rw_mask ())
6985 ret = reloc == 1 ? SECCAT_DATA_REL_RO_LOCAL : SECCAT_DATA_REL_RO;
6986 else if (reloc || flag_merge_constants < 2
6987 || ((flag_sanitize & SANITIZE_ADDRESS)
6988 /* PR 81697: for architectures that use section anchors we
6989 need to ignore DECL_RTL_SET_P (decl) for string constants
6990 inside this asan_protect_global call because otherwise
6991 we'll wrongly put them into SECCAT_RODATA_MERGE_CONST
6992 section, set DECL_RTL (decl) later on and add DECL to
6993 protected globals via successive asan_protect_global
6994 calls. In this scenario we'll end up with wrong
6995 alignment of these strings at runtime and possible ASan
6996 false positives. */
6997 && asan_protect_global (d, use_object_blocks_p ()
6998 && use_blocks_for_decl_p (d))))
6999 /* C and C++ don't allow different variables to share the same
7000 location. -fmerge-all-constants allows even that (at the
7001 expense of not conforming). */
7002 ret = SECCAT_RODATA;
7003 else if (DECL_INITIAL (decl)
7004 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST)
7005 ret = SECCAT_RODATA_MERGE_STR_INIT;
7006 else
7007 ret = SECCAT_RODATA_MERGE_CONST;
7008 }
7009 else if (TREE_CODE (decl) == CONSTRUCTOR)
7010 {
7011 if ((reloc & targetm.asm_out.reloc_rw_mask ())
7012 || TREE_SIDE_EFFECTS (decl)
7013 || ! TREE_CONSTANT (decl))
7014 ret = SECCAT_DATA;
7015 else
7016 ret = SECCAT_RODATA;
7017 }
7018 else
7019 ret = SECCAT_RODATA;
7020
7021 /* There are no read-only thread-local sections. */
7022 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
7023 {
7024 /* Note that this would be *just* SECCAT_BSS, except that there's
7025 no concept of a read-only thread-local-data section. */
7026 if (ret == SECCAT_BSS
7027 || DECL_INITIAL (decl) == NULL
7028 || (flag_zero_initialized_in_bss
7029 && initializer_zerop (DECL_INITIAL (decl))))
7030 ret = SECCAT_TBSS;
7031 else
7032 ret = SECCAT_TDATA;
7033 }
7034
7035 /* If the target uses small data sections, select it. */
7036 else if (targetm.in_small_data_p (decl))
7037 {
7038 if (ret == SECCAT_BSS)
7039 ret = SECCAT_SBSS;
7040 else if (targetm.have_srodata_section && ret == SECCAT_RODATA)
7041 ret = SECCAT_SRODATA;
7042 else
7043 ret = SECCAT_SDATA;
7044 }
7045
7046 return ret;
7047 }
7048
7049 static bool
decl_readonly_section_1(enum section_category category)7050 decl_readonly_section_1 (enum section_category category)
7051 {
7052 switch (category)
7053 {
7054 case SECCAT_RODATA:
7055 case SECCAT_RODATA_MERGE_STR:
7056 case SECCAT_RODATA_MERGE_STR_INIT:
7057 case SECCAT_RODATA_MERGE_CONST:
7058 case SECCAT_SRODATA:
7059 return true;
7060 default:
7061 return false;
7062 }
7063 }
7064
7065 bool
decl_readonly_section(const_tree decl,int reloc)7066 decl_readonly_section (const_tree decl, int reloc)
7067 {
7068 return decl_readonly_section_1 (categorize_decl_for_section (decl, reloc));
7069 }
7070
7071 /* Select a section based on the above categorization. */
7072
7073 section *
default_elf_select_section(tree decl,int reloc,unsigned HOST_WIDE_INT align)7074 default_elf_select_section (tree decl, int reloc,
7075 unsigned HOST_WIDE_INT align)
7076 {
7077 const char *sname;
7078
7079 switch (categorize_decl_for_section (decl, reloc))
7080 {
7081 case SECCAT_TEXT:
7082 /* We're not supposed to be called on FUNCTION_DECLs. */
7083 gcc_unreachable ();
7084 case SECCAT_RODATA:
7085 return readonly_data_section;
7086 case SECCAT_RODATA_MERGE_STR:
7087 return mergeable_string_section (decl, align, 0);
7088 case SECCAT_RODATA_MERGE_STR_INIT:
7089 return mergeable_string_section (DECL_INITIAL (decl), align, 0);
7090 case SECCAT_RODATA_MERGE_CONST:
7091 return mergeable_constant_section (DECL_MODE (decl), align, 0);
7092 case SECCAT_SRODATA:
7093 sname = ".sdata2";
7094 break;
7095 case SECCAT_DATA:
7096 if (DECL_P (decl) && DECL_PERSISTENT_P (decl))
7097 {
7098 sname = ".persistent";
7099 break;
7100 }
7101 return data_section;
7102 case SECCAT_DATA_REL:
7103 sname = ".data.rel";
7104 break;
7105 case SECCAT_DATA_REL_LOCAL:
7106 sname = ".data.rel.local";
7107 break;
7108 case SECCAT_DATA_REL_RO:
7109 sname = ".data.rel.ro";
7110 break;
7111 case SECCAT_DATA_REL_RO_LOCAL:
7112 sname = ".data.rel.ro.local";
7113 break;
7114 case SECCAT_SDATA:
7115 sname = ".sdata";
7116 break;
7117 case SECCAT_TDATA:
7118 sname = ".tdata";
7119 break;
7120 case SECCAT_BSS:
7121 if (DECL_P (decl) && DECL_NOINIT_P (decl))
7122 {
7123 sname = ".noinit";
7124 break;
7125 }
7126 if (bss_section)
7127 return bss_section;
7128 sname = ".bss";
7129 break;
7130 case SECCAT_SBSS:
7131 sname = ".sbss";
7132 break;
7133 case SECCAT_TBSS:
7134 sname = ".tbss";
7135 break;
7136 default:
7137 gcc_unreachable ();
7138 }
7139
7140 return get_named_section (decl, sname, reloc);
7141 }
7142
7143 /* Construct a unique section name based on the decl name and the
7144 categorization performed above. */
7145
7146 void
default_unique_section(tree decl,int reloc)7147 default_unique_section (tree decl, int reloc)
7148 {
7149 /* We only need to use .gnu.linkonce if we don't have COMDAT groups. */
7150 bool one_only = DECL_ONE_ONLY (decl) && !HAVE_COMDAT_GROUP;
7151 const char *prefix, *name, *linkonce;
7152 char *string;
7153 tree id;
7154
7155 switch (categorize_decl_for_section (decl, reloc))
7156 {
7157 case SECCAT_TEXT:
7158 prefix = one_only ? ".t" : ".text";
7159 break;
7160 case SECCAT_RODATA:
7161 case SECCAT_RODATA_MERGE_STR:
7162 case SECCAT_RODATA_MERGE_STR_INIT:
7163 case SECCAT_RODATA_MERGE_CONST:
7164 prefix = one_only ? ".r" : ".rodata";
7165 break;
7166 case SECCAT_SRODATA:
7167 prefix = one_only ? ".s2" : ".sdata2";
7168 break;
7169 case SECCAT_DATA:
7170 prefix = one_only ? ".d" : ".data";
7171 if (DECL_P (decl) && DECL_PERSISTENT_P (decl))
7172 {
7173 prefix = one_only ? ".p" : ".persistent";
7174 break;
7175 }
7176 break;
7177 case SECCAT_DATA_REL:
7178 prefix = one_only ? ".d.rel" : ".data.rel";
7179 break;
7180 case SECCAT_DATA_REL_LOCAL:
7181 prefix = one_only ? ".d.rel.local" : ".data.rel.local";
7182 break;
7183 case SECCAT_DATA_REL_RO:
7184 prefix = one_only ? ".d.rel.ro" : ".data.rel.ro";
7185 break;
7186 case SECCAT_DATA_REL_RO_LOCAL:
7187 prefix = one_only ? ".d.rel.ro.local" : ".data.rel.ro.local";
7188 break;
7189 case SECCAT_SDATA:
7190 prefix = one_only ? ".s" : ".sdata";
7191 break;
7192 case SECCAT_BSS:
7193 if (DECL_P (decl) && DECL_NOINIT_P (decl))
7194 {
7195 prefix = one_only ? ".n" : ".noinit";
7196 break;
7197 }
7198 prefix = one_only ? ".b" : ".bss";
7199 break;
7200 case SECCAT_SBSS:
7201 prefix = one_only ? ".sb" : ".sbss";
7202 break;
7203 case SECCAT_TDATA:
7204 prefix = one_only ? ".td" : ".tdata";
7205 break;
7206 case SECCAT_TBSS:
7207 prefix = one_only ? ".tb" : ".tbss";
7208 break;
7209 default:
7210 gcc_unreachable ();
7211 }
7212
7213 id = DECL_ASSEMBLER_NAME (decl);
7214 ultimate_transparent_alias_target (&id);
7215 name = IDENTIFIER_POINTER (id);
7216 name = targetm.strip_name_encoding (name);
7217
7218 /* If we're using one_only, then there needs to be a .gnu.linkonce
7219 prefix to the section name. */
7220 linkonce = one_only ? ".gnu.linkonce" : "";
7221
7222 string = ACONCAT ((linkonce, prefix, ".", name, NULL));
7223
7224 set_decl_section_name (decl, string);
7225 }
7226
7227 /* Subroutine of compute_reloc_for_rtx for leaf rtxes. */
7228
7229 static int
compute_reloc_for_rtx_1(const_rtx x)7230 compute_reloc_for_rtx_1 (const_rtx x)
7231 {
7232 switch (GET_CODE (x))
7233 {
7234 case SYMBOL_REF:
7235 return SYMBOL_REF_LOCAL_P (x) ? 1 : 2;
7236 case LABEL_REF:
7237 return 1;
7238 default:
7239 return 0;
7240 }
7241 }
7242
7243 /* Like compute_reloc_for_constant, except for an RTX. The return value
7244 is a mask for which bit 1 indicates a global relocation, and bit 0
7245 indicates a local relocation. */
7246
7247 static int
compute_reloc_for_rtx(const_rtx x)7248 compute_reloc_for_rtx (const_rtx x)
7249 {
7250 switch (GET_CODE (x))
7251 {
7252 case SYMBOL_REF:
7253 case LABEL_REF:
7254 return compute_reloc_for_rtx_1 (x);
7255
7256 case CONST:
7257 {
7258 int reloc = 0;
7259 subrtx_iterator::array_type array;
7260 FOR_EACH_SUBRTX (iter, array, x, ALL)
7261 reloc |= compute_reloc_for_rtx_1 (*iter);
7262 return reloc;
7263 }
7264
7265 default:
7266 return 0;
7267 }
7268 }
7269
7270 section *
default_select_rtx_section(machine_mode mode ATTRIBUTE_UNUSED,rtx x,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)7271 default_select_rtx_section (machine_mode mode ATTRIBUTE_UNUSED,
7272 rtx x,
7273 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
7274 {
7275 if (compute_reloc_for_rtx (x) & targetm.asm_out.reloc_rw_mask ())
7276 return data_section;
7277 else
7278 return readonly_data_section;
7279 }
7280
7281 section *
default_elf_select_rtx_section(machine_mode mode,rtx x,unsigned HOST_WIDE_INT align)7282 default_elf_select_rtx_section (machine_mode mode, rtx x,
7283 unsigned HOST_WIDE_INT align)
7284 {
7285 int reloc = compute_reloc_for_rtx (x);
7286
7287 /* ??? Handle small data here somehow. */
7288
7289 if (reloc & targetm.asm_out.reloc_rw_mask ())
7290 {
7291 if (reloc == 1)
7292 return get_named_section (NULL, ".data.rel.ro.local", 1);
7293 else
7294 return get_named_section (NULL, ".data.rel.ro", 3);
7295 }
7296
7297 return mergeable_constant_section (mode, align, 0);
7298 }
7299
7300 /* Set the generally applicable flags on the SYMBOL_REF for EXP. */
7301
7302 void
default_encode_section_info(tree decl,rtx rtl,int first ATTRIBUTE_UNUSED)7303 default_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
7304 {
7305 rtx symbol;
7306 int flags;
7307
7308 /* Careful not to prod global register variables. */
7309 if (!MEM_P (rtl))
7310 return;
7311 symbol = XEXP (rtl, 0);
7312 if (GET_CODE (symbol) != SYMBOL_REF)
7313 return;
7314
7315 flags = SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_HAS_BLOCK_INFO;
7316 if (TREE_CODE (decl) == FUNCTION_DECL)
7317 flags |= SYMBOL_FLAG_FUNCTION;
7318 if (targetm.binds_local_p (decl))
7319 flags |= SYMBOL_FLAG_LOCAL;
7320 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
7321 flags |= DECL_TLS_MODEL (decl) << SYMBOL_FLAG_TLS_SHIFT;
7322 else if (targetm.in_small_data_p (decl))
7323 flags |= SYMBOL_FLAG_SMALL;
7324 /* ??? Why is DECL_EXTERNAL ever set for non-PUBLIC names? Without
7325 being PUBLIC, the thing *must* be defined in this translation unit.
7326 Prevent this buglet from being propagated into rtl code as well. */
7327 if (DECL_P (decl) && DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
7328 flags |= SYMBOL_FLAG_EXTERNAL;
7329
7330 SYMBOL_REF_FLAGS (symbol) = flags;
7331 }
7332
7333 /* By default, we do nothing for encode_section_info, so we need not
7334 do anything but discard the '*' marker. */
7335
7336 const char *
default_strip_name_encoding(const char * str)7337 default_strip_name_encoding (const char *str)
7338 {
7339 return str + (*str == '*');
7340 }
7341
7342 #ifdef ASM_OUTPUT_DEF
7343 /* The default implementation of TARGET_ASM_OUTPUT_ANCHOR. Define the
7344 anchor relative to ".", the current section position. */
7345
7346 void
default_asm_output_anchor(rtx symbol)7347 default_asm_output_anchor (rtx symbol)
7348 {
7349 char buffer[100];
7350
7351 sprintf (buffer, "*. + " HOST_WIDE_INT_PRINT_DEC,
7352 SYMBOL_REF_BLOCK_OFFSET (symbol));
7353 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
7354 }
7355 #endif
7356
7357 /* The default implementation of TARGET_USE_ANCHORS_FOR_SYMBOL_P. */
7358
7359 bool
default_use_anchors_for_symbol_p(const_rtx symbol)7360 default_use_anchors_for_symbol_p (const_rtx symbol)
7361 {
7362 tree decl;
7363 section *sect = SYMBOL_REF_BLOCK (symbol)->sect;
7364
7365 /* This function should only be called with non-zero SYMBOL_REF_BLOCK,
7366 furthermore get_block_for_section should not create object blocks
7367 for mergeable sections. */
7368 gcc_checking_assert (sect && !(sect->common.flags & SECTION_MERGE));
7369
7370 /* Don't use anchors for small data sections. The small data register
7371 acts as an anchor for such sections. */
7372 if (sect->common.flags & SECTION_SMALL)
7373 return false;
7374
7375 decl = SYMBOL_REF_DECL (symbol);
7376 if (decl && DECL_P (decl))
7377 {
7378 /* Don't use section anchors for decls that might be defined or
7379 usurped by other modules. */
7380 if (TREE_PUBLIC (decl) && !decl_binds_to_current_def_p (decl))
7381 return false;
7382
7383 /* Don't use section anchors for decls that will be placed in a
7384 small data section. */
7385 /* ??? Ideally, this check would be redundant with the SECTION_SMALL
7386 one above. The problem is that we only use SECTION_SMALL for
7387 sections that should be marked as small in the section directive. */
7388 if (targetm.in_small_data_p (decl))
7389 return false;
7390
7391 /* Don't use section anchors for decls that won't fit inside a single
7392 anchor range to reduce the amount of instructions required to refer
7393 to the entire declaration. */
7394 if (DECL_SIZE_UNIT (decl) == NULL_TREE
7395 || !tree_fits_uhwi_p (DECL_SIZE_UNIT (decl))
7396 || (tree_to_uhwi (DECL_SIZE_UNIT (decl))
7397 >= (unsigned HOST_WIDE_INT) targetm.max_anchor_offset))
7398 return false;
7399
7400 }
7401 return true;
7402 }
7403
7404 /* Return true when RESOLUTION indicate that symbol will be bound to the
7405 definition provided by current .o file. */
7406
7407 static bool
resolution_to_local_definition_p(enum ld_plugin_symbol_resolution resolution)7408 resolution_to_local_definition_p (enum ld_plugin_symbol_resolution resolution)
7409 {
7410 return (resolution == LDPR_PREVAILING_DEF
7411 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP
7412 || resolution == LDPR_PREVAILING_DEF_IRONLY);
7413 }
7414
7415 /* Return true when RESOLUTION indicate that symbol will be bound locally
7416 within current executable or DSO. */
7417
7418 static bool
resolution_local_p(enum ld_plugin_symbol_resolution resolution)7419 resolution_local_p (enum ld_plugin_symbol_resolution resolution)
7420 {
7421 return (resolution == LDPR_PREVAILING_DEF
7422 || resolution == LDPR_PREVAILING_DEF_IRONLY
7423 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP
7424 || resolution == LDPR_PREEMPTED_REG
7425 || resolution == LDPR_PREEMPTED_IR
7426 || resolution == LDPR_RESOLVED_IR
7427 || resolution == LDPR_RESOLVED_EXEC);
7428 }
7429
7430 /* COMMON_LOCAL_P is true means that the linker can guarantee that an
7431 uninitialized common symbol in the executable will still be defined
7432 (through COPY relocation) in the executable. */
7433
7434 bool
default_binds_local_p_3(const_tree exp,bool shlib,bool weak_dominate,bool extern_protected_data,bool common_local_p)7435 default_binds_local_p_3 (const_tree exp, bool shlib, bool weak_dominate,
7436 bool extern_protected_data, bool common_local_p)
7437 {
7438 /* A non-decl is an entry in the constant pool. */
7439 if (!DECL_P (exp))
7440 return true;
7441
7442 /* Weakrefs may not bind locally, even though the weakref itself is always
7443 static and therefore local. Similarly, the resolver for ifunc functions
7444 might resolve to a non-local function.
7445 FIXME: We can resolve the weakref case more curefuly by looking at the
7446 weakref alias. */
7447 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (exp))
7448 || (TREE_CODE (exp) == FUNCTION_DECL
7449 && cgraph_node::get (exp)
7450 && cgraph_node::get (exp)->ifunc_resolver))
7451 return false;
7452
7453 /* Static variables are always local. */
7454 if (! TREE_PUBLIC (exp))
7455 return true;
7456
7457 /* With resolution file in hand, take look into resolutions.
7458 We can't just return true for resolved_locally symbols,
7459 because dynamic linking might overwrite symbols
7460 in shared libraries. */
7461 bool resolved_locally = false;
7462
7463 bool uninited_common = (DECL_COMMON (exp)
7464 && (DECL_INITIAL (exp) == NULL
7465 || (!in_lto_p
7466 && DECL_INITIAL (exp) == error_mark_node)));
7467
7468 /* A non-external variable is defined locally only if it isn't
7469 uninitialized COMMON variable or common_local_p is true. */
7470 bool defined_locally = (!DECL_EXTERNAL (exp)
7471 && (!uninited_common || common_local_p));
7472 if (symtab_node *node = symtab_node::get (exp))
7473 {
7474 if (node->in_other_partition)
7475 defined_locally = true;
7476 if (node->can_be_discarded_p ())
7477 ;
7478 else if (resolution_to_local_definition_p (node->resolution))
7479 defined_locally = resolved_locally = true;
7480 else if (resolution_local_p (node->resolution))
7481 resolved_locally = true;
7482 }
7483 if (defined_locally && weak_dominate && !shlib)
7484 resolved_locally = true;
7485
7486 /* Undefined weak symbols are never defined locally. */
7487 if (DECL_WEAK (exp) && !defined_locally)
7488 return false;
7489
7490 /* A symbol is local if the user has said explicitly that it will be,
7491 or if we have a definition for the symbol. We cannot infer visibility
7492 for undefined symbols. */
7493 if (DECL_VISIBILITY (exp) != VISIBILITY_DEFAULT
7494 && (TREE_CODE (exp) == FUNCTION_DECL
7495 || !extern_protected_data
7496 || DECL_VISIBILITY (exp) != VISIBILITY_PROTECTED)
7497 && (DECL_VISIBILITY_SPECIFIED (exp) || defined_locally))
7498 return true;
7499
7500 /* If PIC, then assume that any global name can be overridden by
7501 symbols resolved from other modules. */
7502 if (shlib)
7503 return false;
7504
7505 /* Variables defined outside this object might not be local. */
7506 if (DECL_EXTERNAL (exp) && !resolved_locally)
7507 return false;
7508
7509 /* Non-dominant weak symbols are not defined locally. */
7510 if (DECL_WEAK (exp) && !resolved_locally)
7511 return false;
7512
7513 /* Uninitialized COMMON variable may be unified with symbols
7514 resolved from other modules. */
7515 if (uninited_common && !resolved_locally)
7516 return false;
7517
7518 /* Otherwise we're left with initialized (or non-common) global data
7519 which is of necessity defined locally. */
7520 return true;
7521 }
7522
7523 /* Assume ELF-ish defaults, since that's pretty much the most liberal
7524 wrt cross-module name binding. */
7525
7526 bool
default_binds_local_p(const_tree exp)7527 default_binds_local_p (const_tree exp)
7528 {
7529 return default_binds_local_p_3 (exp, flag_shlib != 0, true, false, false);
7530 }
7531
7532 /* Similar to default_binds_local_p, but common symbol may be local and
7533 extern protected data is non-local. */
7534
7535 bool
default_binds_local_p_2(const_tree exp)7536 default_binds_local_p_2 (const_tree exp)
7537 {
7538 return default_binds_local_p_3 (exp, flag_shlib != 0, true, true,
7539 !flag_pic);
7540 }
7541
7542 bool
default_binds_local_p_1(const_tree exp,int shlib)7543 default_binds_local_p_1 (const_tree exp, int shlib)
7544 {
7545 return default_binds_local_p_3 (exp, shlib != 0, false, false, false);
7546 }
7547
7548 /* Return true when references to DECL must bind to current definition in
7549 final executable.
7550
7551 The condition is usually equivalent to whether the function binds to the
7552 current module (shared library or executable), that is to binds_local_p.
7553 We use this fact to avoid need for another target hook and implement
7554 the logic using binds_local_p and just special cases where
7555 decl_binds_to_current_def_p is stronger than binds_local_p. In particular
7556 the weak definitions (that can be overwritten at linktime by other
7557 definition from different object file) and when resolution info is available
7558 we simply use the knowledge passed to us by linker plugin. */
7559 bool
decl_binds_to_current_def_p(const_tree decl)7560 decl_binds_to_current_def_p (const_tree decl)
7561 {
7562 gcc_assert (DECL_P (decl));
7563 if (!targetm.binds_local_p (decl))
7564 return false;
7565 if (!TREE_PUBLIC (decl))
7566 return true;
7567
7568 /* When resolution is available, just use it. */
7569 if (symtab_node *node = symtab_node::get (decl))
7570 {
7571 if (node->resolution != LDPR_UNKNOWN
7572 && !node->can_be_discarded_p ())
7573 return resolution_to_local_definition_p (node->resolution);
7574 }
7575
7576 /* Otherwise we have to assume the worst for DECL_WEAK (hidden weaks
7577 binds locally but still can be overwritten), DECL_COMMON (can be merged
7578 with a non-common definition somewhere in the same module) or
7579 DECL_EXTERNAL.
7580 This rely on fact that binds_local_p behave as decl_replaceable_p
7581 for all other declaration types. */
7582 if (DECL_WEAK (decl))
7583 return false;
7584 if (DECL_COMMON (decl)
7585 && (DECL_INITIAL (decl) == NULL
7586 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node)))
7587 return false;
7588 if (DECL_EXTERNAL (decl))
7589 return false;
7590 return true;
7591 }
7592
7593 /* A replaceable function or variable is one which may be replaced
7594 at link-time with an entirely different definition, provided that the
7595 replacement has the same type. For example, functions declared
7596 with __attribute__((weak)) on most systems are replaceable.
7597
7598 COMDAT functions are not replaceable, since all definitions of the
7599 function must be equivalent. It is important that COMDAT functions
7600 not be treated as replaceable so that use of C++ template
7601 instantiations is not penalized. */
7602
7603 bool
decl_replaceable_p(tree decl)7604 decl_replaceable_p (tree decl)
7605 {
7606 gcc_assert (DECL_P (decl));
7607 if (!TREE_PUBLIC (decl) || DECL_COMDAT (decl))
7608 return false;
7609 if (!flag_semantic_interposition
7610 && !DECL_WEAK (decl))
7611 return false;
7612 return !decl_binds_to_current_def_p (decl);
7613 }
7614
7615 /* Default function to output code that will globalize a label. A
7616 target must define GLOBAL_ASM_OP or provide its own function to
7617 globalize a label. */
7618 #ifdef GLOBAL_ASM_OP
7619 void
default_globalize_label(FILE * stream,const char * name)7620 default_globalize_label (FILE * stream, const char *name)
7621 {
7622 fputs (GLOBAL_ASM_OP, stream);
7623 assemble_name (stream, name);
7624 putc ('\n', stream);
7625 }
7626 #endif /* GLOBAL_ASM_OP */
7627
7628 /* Default function to output code that will globalize a declaration. */
7629 void
default_globalize_decl_name(FILE * stream,tree decl)7630 default_globalize_decl_name (FILE * stream, tree decl)
7631 {
7632 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
7633 targetm.asm_out.globalize_label (stream, name);
7634 }
7635
7636 /* Default function to output a label for unwind information. The
7637 default is to do nothing. A target that needs nonlocal labels for
7638 unwind information must provide its own function to do this. */
7639 void
default_emit_unwind_label(FILE * stream ATTRIBUTE_UNUSED,tree decl ATTRIBUTE_UNUSED,int for_eh ATTRIBUTE_UNUSED,int empty ATTRIBUTE_UNUSED)7640 default_emit_unwind_label (FILE * stream ATTRIBUTE_UNUSED,
7641 tree decl ATTRIBUTE_UNUSED,
7642 int for_eh ATTRIBUTE_UNUSED,
7643 int empty ATTRIBUTE_UNUSED)
7644 {
7645 }
7646
7647 /* Default function to output a label to divide up the exception table.
7648 The default is to do nothing. A target that needs/wants to divide
7649 up the table must provide it's own function to do this. */
7650 void
default_emit_except_table_label(FILE * stream ATTRIBUTE_UNUSED)7651 default_emit_except_table_label (FILE * stream ATTRIBUTE_UNUSED)
7652 {
7653 }
7654
7655 /* This is how to output an internal numbered label where PREFIX is
7656 the class of label and LABELNO is the number within the class. */
7657
7658 void
default_generate_internal_label(char * buf,const char * prefix,unsigned long labelno)7659 default_generate_internal_label (char *buf, const char *prefix,
7660 unsigned long labelno)
7661 {
7662 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno);
7663 }
7664
7665 /* This is how to output an internal numbered label where PREFIX is
7666 the class of label and LABELNO is the number within the class. */
7667
7668 void
default_internal_label(FILE * stream,const char * prefix,unsigned long labelno)7669 default_internal_label (FILE *stream, const char *prefix,
7670 unsigned long labelno)
7671 {
7672 char *const buf = (char *) alloca (40 + strlen (prefix));
7673 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno);
7674 ASM_OUTPUT_INTERNAL_LABEL (stream, buf);
7675 }
7676
7677
7678 /* The default implementation of ASM_DECLARE_CONSTANT_NAME. */
7679
7680 void
default_asm_declare_constant_name(FILE * file,const char * name,const_tree exp ATTRIBUTE_UNUSED,HOST_WIDE_INT size ATTRIBUTE_UNUSED)7681 default_asm_declare_constant_name (FILE *file, const char *name,
7682 const_tree exp ATTRIBUTE_UNUSED,
7683 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
7684 {
7685 assemble_label (file, name);
7686 }
7687
7688 /* This is the default behavior at the beginning of a file. It's
7689 controlled by two other target-hook toggles. */
7690 void
default_file_start(void)7691 default_file_start (void)
7692 {
7693 if (targetm.asm_file_start_app_off
7694 && !(flag_verbose_asm || flag_debug_asm || flag_dump_rtl_in_asm))
7695 fputs (ASM_APP_OFF, asm_out_file);
7696
7697 if (targetm.asm_file_start_file_directive)
7698 {
7699 /* LTO produced units have no meaningful main_input_filename. */
7700 if (in_lto_p)
7701 output_file_directive (asm_out_file, "<artificial>");
7702 else
7703 output_file_directive (asm_out_file, main_input_filename);
7704 }
7705 }
7706
7707 /* This is a generic routine suitable for use as TARGET_ASM_FILE_END
7708 which emits a special section directive used to indicate whether or
7709 not this object file needs an executable stack. This is primarily
7710 a GNU extension to ELF but could be used on other targets. */
7711
7712 int trampolines_created;
7713
7714 void
file_end_indicate_exec_stack(void)7715 file_end_indicate_exec_stack (void)
7716 {
7717 unsigned int flags = SECTION_DEBUG;
7718 if (trampolines_created)
7719 flags |= SECTION_CODE;
7720
7721 switch_to_section (get_section (".note.GNU-stack", flags, NULL));
7722 }
7723
7724 /* Emit a special section directive to indicate that this object file
7725 was compiled with -fsplit-stack. This is used to let the linker
7726 detect calls between split-stack code and non-split-stack code, so
7727 that it can modify the split-stack code to allocate a sufficiently
7728 large stack. We emit another special section if there are any
7729 functions in this file which have the no_split_stack attribute, to
7730 prevent the linker from warning about being unable to convert the
7731 functions if they call non-split-stack code. */
7732
7733 void
file_end_indicate_split_stack(void)7734 file_end_indicate_split_stack (void)
7735 {
7736 if (flag_split_stack)
7737 {
7738 switch_to_section (get_section (".note.GNU-split-stack", SECTION_DEBUG,
7739 NULL));
7740 if (saw_no_split_stack)
7741 switch_to_section (get_section (".note.GNU-no-split-stack",
7742 SECTION_DEBUG, NULL));
7743 }
7744 }
7745
7746 /* Output DIRECTIVE (a C string) followed by a newline. This is used as
7747 a get_unnamed_section callback. */
7748
7749 void
output_section_asm_op(const void * directive)7750 output_section_asm_op (const void *directive)
7751 {
7752 fprintf (asm_out_file, "%s\n", (const char *) directive);
7753 }
7754
7755 /* Emit assembly code to switch to section NEW_SECTION. Do nothing if
7756 the current section is NEW_SECTION. */
7757
7758 void
switch_to_section(section * new_section,tree decl)7759 switch_to_section (section *new_section, tree decl)
7760 {
7761 if (in_section == new_section)
7762 {
7763 bool retain_p;
7764 if ((new_section->common.flags & SECTION_NAMED)
7765 && decl != nullptr
7766 && DECL_P (decl)
7767 && ((retain_p = !!lookup_attribute ("retain",
7768 DECL_ATTRIBUTES (decl)))
7769 != !!(new_section->common.flags & SECTION_RETAIN)))
7770 {
7771 /* If the SECTION_RETAIN bit doesn't match, switch to a new
7772 section. */
7773 tree used_decl, no_used_decl;
7774
7775 if (retain_p)
7776 {
7777 new_section->common.flags |= SECTION_RETAIN;
7778 used_decl = decl;
7779 no_used_decl = new_section->named.decl;
7780 }
7781 else
7782 {
7783 new_section->common.flags &= ~(SECTION_RETAIN
7784 | SECTION_DECLARED);
7785 used_decl = new_section->named.decl;
7786 no_used_decl = decl;
7787 }
7788 warning (OPT_Wattributes,
7789 "%+qD without %<retain%> attribute and %qD with "
7790 "%<retain%> attribute are placed in a section with "
7791 "the same name", no_used_decl, used_decl);
7792 inform (DECL_SOURCE_LOCATION (used_decl),
7793 "%qD was declared here", used_decl);
7794 }
7795 else
7796 return;
7797 }
7798
7799 if (new_section->common.flags & SECTION_FORGET)
7800 in_section = NULL;
7801 else
7802 in_section = new_section;
7803
7804 switch (SECTION_STYLE (new_section))
7805 {
7806 case SECTION_NAMED:
7807 targetm.asm_out.named_section (new_section->named.name,
7808 new_section->named.common.flags,
7809 new_section->named.decl);
7810 break;
7811
7812 case SECTION_UNNAMED:
7813 new_section->unnamed.callback (new_section->unnamed.data);
7814 break;
7815
7816 case SECTION_NOSWITCH:
7817 gcc_unreachable ();
7818 break;
7819 }
7820
7821 new_section->common.flags |= SECTION_DECLARED;
7822 }
7823
7824 /* If block symbol SYMBOL has not yet been assigned an offset, place
7825 it at the end of its block. */
7826
7827 void
place_block_symbol(rtx symbol)7828 place_block_symbol (rtx symbol)
7829 {
7830 unsigned HOST_WIDE_INT size, mask, offset;
7831 class constant_descriptor_rtx *desc;
7832 unsigned int alignment;
7833 struct object_block *block;
7834 tree decl;
7835
7836 gcc_assert (SYMBOL_REF_BLOCK (symbol));
7837 if (SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0)
7838 return;
7839
7840 /* Work out the symbol's size and alignment. */
7841 if (CONSTANT_POOL_ADDRESS_P (symbol))
7842 {
7843 desc = SYMBOL_REF_CONSTANT (symbol);
7844 alignment = desc->align;
7845 size = GET_MODE_SIZE (desc->mode);
7846 }
7847 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
7848 {
7849 decl = SYMBOL_REF_DECL (symbol);
7850 gcc_checking_assert (DECL_IN_CONSTANT_POOL (decl));
7851 alignment = DECL_ALIGN (decl);
7852 size = get_constant_size (DECL_INITIAL (decl));
7853 if ((flag_sanitize & SANITIZE_ADDRESS)
7854 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST
7855 && asan_protect_global (DECL_INITIAL (decl)))
7856 {
7857 size += asan_red_zone_size (size);
7858 alignment = MAX (alignment,
7859 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
7860 }
7861 }
7862 else
7863 {
7864 struct symtab_node *snode;
7865 decl = SYMBOL_REF_DECL (symbol);
7866
7867 snode = symtab_node::get (decl);
7868 if (snode->alias)
7869 {
7870 rtx target = DECL_RTL (snode->ultimate_alias_target ()->decl);
7871
7872 gcc_assert (MEM_P (target)
7873 && GET_CODE (XEXP (target, 0)) == SYMBOL_REF
7874 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (target, 0)));
7875 target = XEXP (target, 0);
7876 place_block_symbol (target);
7877 SYMBOL_REF_BLOCK_OFFSET (symbol) = SYMBOL_REF_BLOCK_OFFSET (target);
7878 return;
7879 }
7880 alignment = get_variable_align (decl);
7881 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
7882 if ((flag_sanitize & SANITIZE_ADDRESS)
7883 && asan_protect_global (decl))
7884 {
7885 size += asan_red_zone_size (size);
7886 alignment = MAX (alignment,
7887 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
7888 }
7889 }
7890
7891 /* Calculate the object's offset from the start of the block. */
7892 block = SYMBOL_REF_BLOCK (symbol);
7893 mask = alignment / BITS_PER_UNIT - 1;
7894 offset = (block->size + mask) & ~mask;
7895 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
7896
7897 /* Record the block's new alignment and size. */
7898 block->alignment = MAX (block->alignment, alignment);
7899 block->size = offset + size;
7900
7901 vec_safe_push (block->objects, symbol);
7902 }
7903
7904 /* Return the anchor that should be used to address byte offset OFFSET
7905 from the first object in BLOCK. MODEL is the TLS model used
7906 to access it. */
7907
7908 rtx
get_section_anchor(struct object_block * block,HOST_WIDE_INT offset,enum tls_model model)7909 get_section_anchor (struct object_block *block, HOST_WIDE_INT offset,
7910 enum tls_model model)
7911 {
7912 char label[100];
7913 unsigned int begin, middle, end;
7914 unsigned HOST_WIDE_INT min_offset, max_offset, range, bias, delta;
7915 rtx anchor;
7916
7917 /* Work out the anchor's offset. Use an offset of 0 for the first
7918 anchor so that we don't pessimize the case where we take the address
7919 of a variable at the beginning of the block. This is particularly
7920 useful when a block has only one variable assigned to it.
7921
7922 We try to place anchors RANGE bytes apart, so there can then be
7923 anchors at +/-RANGE, +/-2 * RANGE, and so on, up to the limits of
7924 a ptr_mode offset. With some target settings, the lowest such
7925 anchor might be out of range for the lowest ptr_mode offset;
7926 likewise the highest anchor for the highest offset. Use anchors
7927 at the extreme ends of the ptr_mode range in such cases.
7928
7929 All arithmetic uses unsigned integers in order to avoid
7930 signed overflow. */
7931 max_offset = (unsigned HOST_WIDE_INT) targetm.max_anchor_offset;
7932 min_offset = (unsigned HOST_WIDE_INT) targetm.min_anchor_offset;
7933 range = max_offset - min_offset + 1;
7934 if (range == 0)
7935 offset = 0;
7936 else
7937 {
7938 bias = HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (ptr_mode) - 1);
7939 if (offset < 0)
7940 {
7941 delta = -(unsigned HOST_WIDE_INT) offset + max_offset;
7942 delta -= delta % range;
7943 if (delta > bias)
7944 delta = bias;
7945 offset = (HOST_WIDE_INT) (-delta);
7946 }
7947 else
7948 {
7949 delta = (unsigned HOST_WIDE_INT) offset - min_offset;
7950 delta -= delta % range;
7951 if (delta > bias - 1)
7952 delta = bias - 1;
7953 offset = (HOST_WIDE_INT) delta;
7954 }
7955 }
7956
7957 /* Do a binary search to see if there's already an anchor we can use.
7958 Set BEGIN to the new anchor's index if not. */
7959 begin = 0;
7960 end = vec_safe_length (block->anchors);
7961 while (begin != end)
7962 {
7963 middle = (end + begin) / 2;
7964 anchor = (*block->anchors)[middle];
7965 if (SYMBOL_REF_BLOCK_OFFSET (anchor) > offset)
7966 end = middle;
7967 else if (SYMBOL_REF_BLOCK_OFFSET (anchor) < offset)
7968 begin = middle + 1;
7969 else if (SYMBOL_REF_TLS_MODEL (anchor) > model)
7970 end = middle;
7971 else if (SYMBOL_REF_TLS_MODEL (anchor) < model)
7972 begin = middle + 1;
7973 else
7974 return anchor;
7975 }
7976
7977 /* Create a new anchor with a unique label. */
7978 ASM_GENERATE_INTERNAL_LABEL (label, "LANCHOR", anchor_labelno++);
7979 anchor = create_block_symbol (ggc_strdup (label), block, offset);
7980 SYMBOL_REF_FLAGS (anchor) |= SYMBOL_FLAG_LOCAL | SYMBOL_FLAG_ANCHOR;
7981 SYMBOL_REF_FLAGS (anchor) |= model << SYMBOL_FLAG_TLS_SHIFT;
7982
7983 /* Insert it at index BEGIN. */
7984 vec_safe_insert (block->anchors, begin, anchor);
7985 return anchor;
7986 }
7987
7988 /* Output the objects in BLOCK. */
7989
7990 static void
output_object_block(struct object_block * block)7991 output_object_block (struct object_block *block)
7992 {
7993 class constant_descriptor_rtx *desc;
7994 unsigned int i;
7995 HOST_WIDE_INT offset;
7996 tree decl;
7997 rtx symbol;
7998
7999 if (!block->objects)
8000 return;
8001
8002 /* Switch to the section and make sure that the first byte is
8003 suitably aligned. */
8004 /* Special case VTV comdat sections similar to assemble_variable. */
8005 if (SECTION_STYLE (block->sect) == SECTION_NAMED
8006 && block->sect->named.name
8007 && (strcmp (block->sect->named.name, ".vtable_map_vars") == 0))
8008 handle_vtv_comdat_section (block->sect, block->sect->named.decl);
8009 else
8010 switch_to_section (block->sect);
8011
8012 gcc_checking_assert (!(block->sect->common.flags & SECTION_MERGE));
8013 assemble_align (block->alignment);
8014
8015 /* Define the values of all anchors relative to the current section
8016 position. */
8017 FOR_EACH_VEC_SAFE_ELT (block->anchors, i, symbol)
8018 targetm.asm_out.output_anchor (symbol);
8019
8020 /* Output the objects themselves. */
8021 offset = 0;
8022 FOR_EACH_VEC_ELT (*block->objects, i, symbol)
8023 {
8024 /* Move to the object's offset, padding with zeros if necessary. */
8025 assemble_zeros (SYMBOL_REF_BLOCK_OFFSET (symbol) - offset);
8026 offset = SYMBOL_REF_BLOCK_OFFSET (symbol);
8027 if (CONSTANT_POOL_ADDRESS_P (symbol))
8028 {
8029 desc = SYMBOL_REF_CONSTANT (symbol);
8030 /* Pass 1 for align as we have already laid out everything in the block.
8031 So aligning shouldn't be necessary. */
8032 output_constant_pool_1 (desc, 1);
8033 offset += GET_MODE_SIZE (desc->mode);
8034 }
8035 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
8036 {
8037 HOST_WIDE_INT size;
8038 decl = SYMBOL_REF_DECL (symbol);
8039 assemble_constant_contents (DECL_INITIAL (decl), XSTR (symbol, 0),
8040 DECL_ALIGN (decl), false);
8041
8042 size = get_constant_size (DECL_INITIAL (decl));
8043 offset += size;
8044 if ((flag_sanitize & SANITIZE_ADDRESS)
8045 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST
8046 && asan_protect_global (DECL_INITIAL (decl)))
8047 {
8048 size = asan_red_zone_size (size);
8049 assemble_zeros (size);
8050 offset += size;
8051 }
8052 }
8053 else
8054 {
8055 HOST_WIDE_INT size;
8056 decl = SYMBOL_REF_DECL (symbol);
8057 assemble_variable_contents (decl, XSTR (symbol, 0), false, false);
8058 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
8059 offset += size;
8060 if ((flag_sanitize & SANITIZE_ADDRESS)
8061 && asan_protect_global (decl))
8062 {
8063 size = asan_red_zone_size (size);
8064 assemble_zeros (size);
8065 offset += size;
8066 }
8067 }
8068 }
8069 }
8070
8071 /* A callback for qsort to compare object_blocks. */
8072
8073 static int
output_object_block_compare(const void * x,const void * y)8074 output_object_block_compare (const void *x, const void *y)
8075 {
8076 object_block *p1 = *(object_block * const*)x;
8077 object_block *p2 = *(object_block * const*)y;
8078
8079 if (p1->sect->common.flags & SECTION_NAMED
8080 && !(p2->sect->common.flags & SECTION_NAMED))
8081 return 1;
8082
8083 if (!(p1->sect->common.flags & SECTION_NAMED)
8084 && p2->sect->common.flags & SECTION_NAMED)
8085 return -1;
8086
8087 if (p1->sect->common.flags & SECTION_NAMED
8088 && p2->sect->common.flags & SECTION_NAMED)
8089 return strcmp (p1->sect->named.name, p2->sect->named.name);
8090
8091 unsigned f1 = p1->sect->common.flags;
8092 unsigned f2 = p2->sect->common.flags;
8093 if (f1 == f2)
8094 return 0;
8095 return f1 < f2 ? -1 : 1;
8096 }
8097
8098 /* Output the definitions of all object_blocks. */
8099
8100 void
output_object_blocks(void)8101 output_object_blocks (void)
8102 {
8103 vec<object_block *, va_heap> v;
8104 v.create (object_block_htab->elements ());
8105 object_block *obj;
8106 hash_table<object_block_hasher>::iterator hi;
8107
8108 FOR_EACH_HASH_TABLE_ELEMENT (*object_block_htab, obj, object_block *, hi)
8109 v.quick_push (obj);
8110
8111 /* Sort them in order to output them in a deterministic manner,
8112 otherwise we may get .rodata sections in different orders with
8113 and without -g. */
8114 v.qsort (output_object_block_compare);
8115 unsigned i;
8116 FOR_EACH_VEC_ELT (v, i, obj)
8117 output_object_block (obj);
8118
8119 v.release ();
8120 }
8121
8122 /* This function provides a possible implementation of the
8123 TARGET_ASM_RECORD_GCC_SWITCHES target hook for ELF targets. When triggered
8124 by -frecord-gcc-switches it creates a new mergeable, string section in the
8125 assembler output file called TARGET_ASM_RECORD_GCC_SWITCHES_SECTION which
8126 contains the switches in ASCII format.
8127
8128 FIXME: This code does not correctly handle double quote characters
8129 that appear inside strings, (it strips them rather than preserving them).
8130 FIXME: ASM_OUTPUT_ASCII, as defined in config/elfos.h will not emit NUL
8131 characters - instead it treats them as sub-string separators. Since
8132 we want to emit NUL strings terminators into the object file we have to use
8133 ASM_OUTPUT_SKIP. */
8134
8135 void
elf_record_gcc_switches(const char * options)8136 elf_record_gcc_switches (const char *options)
8137 {
8138 section *sec = get_section (targetm.asm_out.record_gcc_switches_section,
8139 SECTION_DEBUG | SECTION_MERGE
8140 | SECTION_STRINGS | (SECTION_ENTSIZE & 1), NULL);
8141 switch_to_section (sec);
8142 ASM_OUTPUT_ASCII (asm_out_file, options, strlen (options) + 1);
8143 }
8144
8145 /* Emit text to declare externally defined symbols. It is needed to
8146 properly support non-default visibility. */
8147 void
default_elf_asm_output_external(FILE * file ATTRIBUTE_UNUSED,tree decl,const char * name ATTRIBUTE_UNUSED)8148 default_elf_asm_output_external (FILE *file ATTRIBUTE_UNUSED,
8149 tree decl,
8150 const char *name ATTRIBUTE_UNUSED)
8151 {
8152 /* We output the name if and only if TREE_SYMBOL_REFERENCED is
8153 set in order to avoid putting out names that are never really
8154 used. Always output visibility specified in the source. */
8155 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
8156 && (DECL_VISIBILITY_SPECIFIED (decl)
8157 || targetm.binds_local_p (decl)))
8158 maybe_assemble_visibility (decl);
8159 }
8160
8161 /* The default hook for TARGET_ASM_OUTPUT_SOURCE_FILENAME. */
8162
8163 void
default_asm_output_source_filename(FILE * file,const char * name)8164 default_asm_output_source_filename (FILE *file, const char *name)
8165 {
8166 #ifdef ASM_OUTPUT_SOURCE_FILENAME
8167 ASM_OUTPUT_SOURCE_FILENAME (file, name);
8168 #else
8169 fprintf (file, "\t.file\t");
8170 output_quoted_string (file, name);
8171 putc ('\n', file);
8172 #endif
8173 }
8174
8175 /* Output a file name in the form wanted by System V. */
8176
8177 void
output_file_directive(FILE * asm_file,const char * input_name)8178 output_file_directive (FILE *asm_file, const char *input_name)
8179 {
8180 int len;
8181 const char *na;
8182
8183 if (input_name == NULL)
8184 input_name = "<stdin>";
8185 else
8186 input_name = remap_debug_filename (input_name);
8187
8188 len = strlen (input_name);
8189 na = input_name + len;
8190
8191 /* NA gets INPUT_NAME sans directory names. */
8192 while (na > input_name)
8193 {
8194 if (IS_DIR_SEPARATOR (na[-1]))
8195 break;
8196 na--;
8197 }
8198
8199 targetm.asm_out.output_source_filename (asm_file, na);
8200 }
8201
8202 /* Create a DEBUG_EXPR_DECL / DEBUG_EXPR pair from RTL expression
8203 EXP. */
8204 rtx
make_debug_expr_from_rtl(const_rtx exp)8205 make_debug_expr_from_rtl (const_rtx exp)
8206 {
8207 tree ddecl = make_node (DEBUG_EXPR_DECL), type;
8208 machine_mode mode = GET_MODE (exp);
8209 rtx dval;
8210
8211 DECL_ARTIFICIAL (ddecl) = 1;
8212 if (REG_P (exp) && REG_EXPR (exp))
8213 type = TREE_TYPE (REG_EXPR (exp));
8214 else if (MEM_P (exp) && MEM_EXPR (exp))
8215 type = TREE_TYPE (MEM_EXPR (exp));
8216 else
8217 type = NULL_TREE;
8218 if (type && TYPE_MODE (type) == mode)
8219 TREE_TYPE (ddecl) = type;
8220 else
8221 TREE_TYPE (ddecl) = lang_hooks.types.type_for_mode (mode, 1);
8222 SET_DECL_MODE (ddecl, mode);
8223 dval = gen_rtx_DEBUG_EXPR (mode);
8224 DEBUG_EXPR_TREE_DECL (dval) = ddecl;
8225 SET_DECL_RTL (ddecl, dval);
8226 return dval;
8227 }
8228
8229 #ifdef ELF_ASCII_ESCAPES
8230 /* Default ASM_OUTPUT_LIMITED_STRING for ELF targets. */
8231
8232 void
default_elf_asm_output_limited_string(FILE * f,const char * s)8233 default_elf_asm_output_limited_string (FILE *f, const char *s)
8234 {
8235 int escape;
8236 unsigned char c;
8237
8238 fputs (STRING_ASM_OP, f);
8239 putc ('"', f);
8240 while (*s != '\0')
8241 {
8242 c = *s;
8243 escape = ELF_ASCII_ESCAPES[c];
8244 switch (escape)
8245 {
8246 case 0:
8247 putc (c, f);
8248 break;
8249 case 1:
8250 putc ('\\', f);
8251 putc ('0'+((c>>6)&7), f);
8252 putc ('0'+((c>>3)&7), f);
8253 putc ('0'+(c&7), f);
8254 break;
8255 default:
8256 putc ('\\', f);
8257 putc (escape, f);
8258 break;
8259 }
8260 s++;
8261 }
8262 putc ('\"', f);
8263 putc ('\n', f);
8264 }
8265
8266 /* Default ASM_OUTPUT_ASCII for ELF targets. */
8267
8268 void
default_elf_asm_output_ascii(FILE * f,const char * s,unsigned int len)8269 default_elf_asm_output_ascii (FILE *f, const char *s, unsigned int len)
8270 {
8271 const char *limit = s + len;
8272 const char *last_null = NULL;
8273 unsigned bytes_in_chunk = 0;
8274 unsigned char c;
8275 int escape;
8276
8277 for (; s < limit; s++)
8278 {
8279 const char *p;
8280
8281 if (bytes_in_chunk >= 60)
8282 {
8283 putc ('\"', f);
8284 putc ('\n', f);
8285 bytes_in_chunk = 0;
8286 }
8287
8288 if (s > last_null)
8289 {
8290 for (p = s; p < limit && *p != '\0'; p++)
8291 continue;
8292 last_null = p;
8293 }
8294 else
8295 p = last_null;
8296
8297 if (p < limit && (p - s) <= (long) ELF_STRING_LIMIT)
8298 {
8299 if (bytes_in_chunk > 0)
8300 {
8301 putc ('\"', f);
8302 putc ('\n', f);
8303 bytes_in_chunk = 0;
8304 }
8305
8306 default_elf_asm_output_limited_string (f, s);
8307 s = p;
8308 }
8309 else
8310 {
8311 if (bytes_in_chunk == 0)
8312 fputs (ASCII_DATA_ASM_OP "\"", f);
8313
8314 c = *s;
8315 escape = ELF_ASCII_ESCAPES[c];
8316 switch (escape)
8317 {
8318 case 0:
8319 putc (c, f);
8320 bytes_in_chunk++;
8321 break;
8322 case 1:
8323 putc ('\\', f);
8324 putc ('0'+((c>>6)&7), f);
8325 putc ('0'+((c>>3)&7), f);
8326 putc ('0'+(c&7), f);
8327 bytes_in_chunk += 4;
8328 break;
8329 default:
8330 putc ('\\', f);
8331 putc (escape, f);
8332 bytes_in_chunk += 2;
8333 break;
8334 }
8335
8336 }
8337 }
8338
8339 if (bytes_in_chunk > 0)
8340 {
8341 putc ('\"', f);
8342 putc ('\n', f);
8343 }
8344 }
8345 #endif
8346
8347 static GTY(()) section *elf_init_array_section;
8348 static GTY(()) section *elf_fini_array_section;
8349
8350 static section *
get_elf_initfini_array_priority_section(int priority,bool constructor_p)8351 get_elf_initfini_array_priority_section (int priority,
8352 bool constructor_p)
8353 {
8354 section *sec;
8355 if (priority != DEFAULT_INIT_PRIORITY)
8356 {
8357 char buf[18];
8358 sprintf (buf, "%s.%.5u",
8359 constructor_p ? ".init_array" : ".fini_array",
8360 priority);
8361 sec = get_section (buf, SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8362 }
8363 else
8364 {
8365 if (constructor_p)
8366 {
8367 if (elf_init_array_section == NULL)
8368 elf_init_array_section
8369 = get_section (".init_array",
8370 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8371 sec = elf_init_array_section;
8372 }
8373 else
8374 {
8375 if (elf_fini_array_section == NULL)
8376 elf_fini_array_section
8377 = get_section (".fini_array",
8378 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8379 sec = elf_fini_array_section;
8380 }
8381 }
8382 return sec;
8383 }
8384
8385 /* Use .init_array section for constructors. */
8386
8387 void
default_elf_init_array_asm_out_constructor(rtx symbol,int priority)8388 default_elf_init_array_asm_out_constructor (rtx symbol, int priority)
8389 {
8390 section *sec = get_elf_initfini_array_priority_section (priority,
8391 true);
8392 assemble_addr_to_section (symbol, sec);
8393 }
8394
8395 /* Use .fini_array section for destructors. */
8396
8397 void
default_elf_fini_array_asm_out_destructor(rtx symbol,int priority)8398 default_elf_fini_array_asm_out_destructor (rtx symbol, int priority)
8399 {
8400 section *sec = get_elf_initfini_array_priority_section (priority,
8401 false);
8402 assemble_addr_to_section (symbol, sec);
8403 }
8404
8405 /* Default TARGET_ASM_OUTPUT_IDENT hook.
8406
8407 This is a bit of a cheat. The real default is a no-op, but this
8408 hook is the default for all targets with a .ident directive. */
8409
8410 void
default_asm_output_ident_directive(const char * ident_str)8411 default_asm_output_ident_directive (const char *ident_str)
8412 {
8413 const char *ident_asm_op = "\t.ident\t";
8414
8415 /* If we are still in the front end, do not write out the string
8416 to asm_out_file. Instead, add a fake top-level asm statement.
8417 This allows the front ends to use this hook without actually
8418 writing to asm_out_file, to handle #ident or Pragma Ident. */
8419 if (symtab->state == PARSING)
8420 {
8421 char *buf = ACONCAT ((ident_asm_op, "\"", ident_str, "\"\n", NULL));
8422 symtab->finalize_toplevel_asm (build_string (strlen (buf), buf));
8423 }
8424 else
8425 fprintf (asm_out_file, "%s\"%s\"\n", ident_asm_op, ident_str);
8426 }
8427
8428
8429 /* This function ensures that vtable_map variables are not only
8430 in the comdat section, but that each variable has its own unique
8431 comdat name. Without this the variables end up in the same section
8432 with a single comdat name.
8433
8434 FIXME: resolve_unique_section needs to deal better with
8435 decls with both DECL_SECTION_NAME and DECL_ONE_ONLY. Once
8436 that is fixed, this if-else statement can be replaced with
8437 a single call to "switch_to_section (sect)". */
8438
8439 static void
handle_vtv_comdat_section(section * sect,const_tree decl ATTRIBUTE_UNUSED)8440 handle_vtv_comdat_section (section *sect, const_tree decl ATTRIBUTE_UNUSED)
8441 {
8442 #if defined (OBJECT_FORMAT_ELF)
8443 targetm.asm_out.named_section (sect->named.name,
8444 sect->named.common.flags
8445 | SECTION_LINKONCE,
8446 DECL_NAME (decl));
8447 in_section = sect;
8448 #else
8449 /* Neither OBJECT_FORMAT_PE, nor OBJECT_FORMAT_COFF is set here.
8450 Therefore the following check is used.
8451 In case a the target is PE or COFF a comdat group section
8452 is created, e.g. .vtable_map_vars$foo. The linker places
8453 everything in .vtable_map_vars at the end.
8454
8455 A fix could be made in
8456 gcc/config/i386/winnt.c: i386_pe_unique_section. */
8457 if (TARGET_PECOFF)
8458 {
8459 char *name;
8460
8461 if (TREE_CODE (DECL_NAME (decl)) == IDENTIFIER_NODE)
8462 name = ACONCAT ((sect->named.name, "$",
8463 IDENTIFIER_POINTER (DECL_NAME (decl)), NULL));
8464 else
8465 name = ACONCAT ((sect->named.name, "$",
8466 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (DECL_NAME (decl))),
8467 NULL));
8468
8469 targetm.asm_out.named_section (name,
8470 sect->named.common.flags
8471 | SECTION_LINKONCE,
8472 DECL_NAME (decl));
8473 in_section = sect;
8474 }
8475 else
8476 switch_to_section (sect);
8477 #endif
8478 }
8479
8480 #include "gt-varasm.h"
8481