xref: /netbsd/external/gpl3/gcc/dist/gcc/cp/module.cc (revision f0fbc68b)
1 /* C++ modules.  Experimental!
2    Copyright (C) 2017-2022 Free Software Foundation, Inc.
3    Written by Nathan Sidwell <nathan@acm.org> while at FaceBook
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify it
8    under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3, or (at your option)
10    any later version.
11 
12    GCC is distributed in the hope that it will be useful, but
13    WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* Comments in this file have a non-negligible chance of being wrong
22    or at least inaccurate.  Due to (a) my misunderstanding, (b)
23    ambiguities that I have interpretted differently to original intent
24    (c) changes in the specification, (d) my poor wording, (e) source
25    changes.  */
26 
27 /* (Incomplete) Design Notes
28 
29    A hash table contains all module names.  Imported modules are
30    present in a modules array, which by construction places an
31    import's dependencies before the import itself.  The single
32    exception is the current TU, which always occupies slot zero (even
33    when it is not a module).
34 
35    Imported decls occupy an entity_ary, an array of binding_slots, indexed
36    by importing module and index within that module.  A flat index is
37    used, as each module reserves a contiguous range of indices.
38    Initially each slot indicates the CMI section containing the
39    streamed decl.  When the decl is imported it will point to the decl
40    itself.
41 
42    Additionally each imported decl is mapped in the entity_map via its
43    DECL_UID to the flat index in the entity_ary.  Thus we can locate
44    the index for any imported decl by using this map and then
45    de-flattening the index via a binary seach of the module vector.
46    Cross-module references are by (remapped) module number and
47    module-local index.
48 
49    Each importable DECL contains several flags.  The simple set are
50    DECL_EXPORT_P, DECL_MODULE_PURVIEW_P and DECL_MODULE_IMPORT_P.  The
51    first indicates whether it is exported, the second whether it is in
52    the module purview (as opposed to the global module fragment), and
53    the third indicates whether it was an import into this TU or not.
54 
55    The more detailed flags are DECL_MODULE_PARTITION_P,
56    DECL_MODULE_ENTITY_P.  The first is set in a primary interface unit
57    on decls that were read from module partitions (these will have
58    DECL_MODULE_IMPORT_P set too).  Such decls will be streamed out to
59    the primary's CMI.  DECL_MODULE_ENTITY_P is set when an entity is
60    imported, even if it matched a non-imported entity.  Such a decl
61    will not have DECL_MODULE_IMPORT_P set, even though it has an entry
62    in the entity map and array.
63 
64    Header units are module-like.
65 
66    For namespace-scope lookup, the decls for a particular module are
67    held located in a sparse array hanging off the binding of the name.
68    This is partitioned into two: a few fixed slots at the start
69    followed by the sparse slots afterwards.  By construction we only
70    need to append new slots to the end -- there is never a need to
71    insert in the middle.  The fixed slots are MODULE_SLOT_CURRENT for
72    the current TU (regardless of whether it is a module or not),
73    MODULE_SLOT_GLOBAL and MODULE_SLOT_PARTITION.  These latter two
74    slots are used for merging entities across the global module and
75    module partitions respectively.  MODULE_SLOT_PARTITION is only
76    present in a module.  Neither of those two slots is searched during
77    name lookup -- they are internal use only.  This vector is created
78    lazily once we require it, if there is only a declaration from the
79    current TU, a regular binding is present.  It is converted on
80    demand.
81 
82    OPTIMIZATION: Outside of the current TU, we only need ADL to work.
83    We could optimize regular lookup for the current TU by glomming all
84    the visible decls on its slot.  Perhaps wait until design is a
85    little more settled though.
86 
87    There is only one instance of each extern-linkage namespace.  It
88    appears in every module slot that makes it visible.  It also
89    appears in MODULE_SLOT_GLOBAL.  (It is an ODR violation if they
90    collide with some other global module entity.)  We also have an
91    optimization that shares the slot for adjacent modules that declare
92    the same such namespace.
93 
94    A module interface compilation produces a Compiled Module Interface
95    (CMI).  The format used is Encapsulated Lazy Records Of Numbered
96    Declarations, which is essentially ELF's section encapsulation. (As
97    all good nerds are aware, Elrond is half Elf.)  Some sections are
98    named, and contain information about the module as a whole (indices
99    etc), and other sections are referenced by number.  Although I
100    don't defend against actively hostile CMIs, there is some
101    checksumming involved to verify data integrity.  When dumping out
102    an interface, we generate a graph of all the
103    independently-redeclarable DECLS that are needed, and the decls
104    they reference.  From that we determine the strongly connected
105    components (SCC) within this TU.  Each SCC is dumped to a separate
106    numbered section of the CMI.  We generate a binding table section,
107    mapping each namespace&name to a defining section.  This allows
108    lazy loading.
109 
110    Lazy loading employs mmap to map a read-only image of the CMI.
111    It thus only occupies address space and is paged in on demand,
112    backed by the CMI file itself.  If mmap is unavailable, regular
113    FILEIO is used.  Also, there's a bespoke ELF reader/writer here,
114    which implements just the section table and sections (including
115    string sections) of a 32-bit ELF in host byte-order.  You can of
116    course inspect it with readelf.  I figured 32-bit is sufficient,
117    for a single module.  I detect running out of section numbers, but
118    do not implement the ELF overflow mechanism.  At least you'll get
119    an error if that happens.
120 
121    We do not separate declarations and definitions.  My guess is that
122    if you refer to the declaration, you'll also need the definition
123    (template body, inline function, class definition etc).  But this
124    does mean we can get larger SCCs than if we separated them.  It is
125    unclear whether this is a win or not.
126 
127    Notice that we embed section indices into the contents of other
128    sections.  Thus random manipulation of the CMI file by ELF tools
129    may well break it.  The kosher way would probably be to introduce
130    indirection via section symbols, but that would require defining a
131    relocation type.
132 
133    Notice that lazy loading of one module's decls can cause lazy
134    loading of other decls in the same or another module.  Clearly we
135    want to avoid loops.  In a correct program there can be no loops in
136    the module dependency graph, and the above-mentioned SCC algorithm
137    places all intra-module circular dependencies in the same SCC.  It
138    also orders the SCCs wrt each other, so dependent SCCs come first.
139    As we load dependent modules first, we know there can be no
140    reference to a higher-numbered module, and because we write out
141    dependent SCCs first, likewise for SCCs within the module.  This
142    allows us to immediately detect broken references.  When loading,
143    we must ensure the rest of the compiler doesn't cause some
144    unconnected load to occur (for instance, instantiate a template).
145 
146 Classes used:
147 
148    dumper - logger
149 
150    data - buffer
151 
152    bytes - data streamer
153    bytes_in : bytes - scalar reader
154    bytes_out : bytes - scalar writer
155 
156    elf - ELROND format
157    elf_in : elf - ELROND reader
158    elf_out : elf - ELROND writer
159 
160    trees_in : bytes_in - tree reader
161    trees_out : bytes_out - tree writer
162 
163    depset - dependency set
164    depset::hash - hash table of depsets
165    depset::tarjan - SCC determinator
166 
167    uidset<T> - set T's related to a UID
168    uidset<T>::hash hash table of uidset<T>
169 
170    loc_spans - location map data
171 
172    module_state - module object
173 
174    slurping - data needed during loading
175 
176    macro_import - imported macro data
177    macro_export - exported macro data
178 
179    The ELROND objects use mmap, for both reading and writing.  If mmap
180    is unavailable, fileno IO is used to read and write blocks of data.
181 
182    The mapper object uses fileno IO to communicate with the server or
183    program.   */
184 
185 /* In expermental (trunk) sources, MODULE_VERSION is a #define passed
186    in from the Makefile.  It records the modification date of the
187    source directory -- that's the only way to stay sane.  In release
188    sources, we (plan to) use the compiler's major.minor versioning.
189    While the format might not change between at minor versions, it
190    seems simplest to tie the two together.  There's no concept of
191    inter-version compatibility.  */
192 #define IS_EXPERIMENTAL(V) ((V) >= (1U << 20))
193 #define MODULE_MAJOR(V) ((V) / 10000)
194 #define MODULE_MINOR(V) ((V) % 10000)
195 #define EXPERIMENT(A,B) (IS_EXPERIMENTAL (MODULE_VERSION) ? (A) : (B))
196 #ifndef MODULE_VERSION
197 #include "bversion.h"
198 #define MODULE_VERSION (BUILDING_GCC_MAJOR * 10000U + BUILDING_GCC_MINOR)
199 #elif !IS_EXPERIMENTAL (MODULE_VERSION)
200 #error "This is not the version I was looking for."
201 #endif
202 
203 #define _DEFAULT_SOURCE 1 /* To get TZ field of struct tm, if available.  */
204 #include "config.h"
205 #define INCLUDE_MEMORY
206 #define INCLUDE_STRING
207 #define INCLUDE_VECTOR
208 #include "system.h"
209 #include "coretypes.h"
210 #include "cp-tree.h"
211 #include "timevar.h"
212 #include "stringpool.h"
213 #include "dumpfile.h"
214 #include "bitmap.h"
215 #include "cgraph.h"
216 #include "tree-iterator.h"
217 #include "cpplib.h"
218 #include "mkdeps.h"
219 #include "incpath.h"
220 #include "libiberty.h"
221 #include "stor-layout.h"
222 #include "version.h"
223 #include "tree-diagnostic.h"
224 #include "toplev.h"
225 #include "opts.h"
226 #include "attribs.h"
227 #include "intl.h"
228 #include "langhooks.h"
229 /* This TU doesn't need or want to see the networking.  */
230 #define CODY_NETWORKING 0
231 #include "mapper-client.h"
232 
233 #if 0 // 1 for testing no mmap
234 #define MAPPED_READING 0
235 #define MAPPED_WRITING 0
236 #else
237 #if HAVE_MMAP_FILE && _POSIX_MAPPED_FILES > 0
238 /* mmap, munmap.  */
239 #define MAPPED_READING 1
240 #if HAVE_SYSCONF && defined (_SC_PAGE_SIZE)
241 /* msync, sysconf (_SC_PAGE_SIZE), ftruncate  */
242 /* posix_fallocate used if available.  */
243 #define MAPPED_WRITING 1
244 #else
245 #define MAPPED_WRITING 0
246 #endif
247 #else
248 #define MAPPED_READING 0
249 #define MAPPED_WRITING 0
250 #endif
251 #endif
252 
253 /* Some open(2) flag differences, what a colourful world it is!  */
254 #if defined (O_CLOEXEC)
255 // OK
256 #elif defined (_O_NOINHERIT)
257 /* Windows' _O_NOINHERIT matches O_CLOEXEC flag */
258 #define O_CLOEXEC _O_NOINHERIT
259 #else
260 #define O_CLOEXEC 0
261 #endif
262 #if defined (O_BINARY)
263 // Ok?
264 #elif defined (_O_BINARY)
265 /* Windows' open(2) call defaults to text!  */
266 #define O_BINARY _O_BINARY
267 #else
268 #define O_BINARY 0
269 #endif
270 
cpp_node(tree id)271 static inline cpp_hashnode *cpp_node (tree id)
272 {
273   return CPP_HASHNODE (GCC_IDENT_TO_HT_IDENT (id));
274 }
275 
identifier(const cpp_hashnode * node)276 static inline tree identifier (const cpp_hashnode *node)
277 {
278   /* HT_NODE() expands to node->ident that HT_IDENT_TO_GCC_IDENT()
279      then subtracts a nonzero constant, deriving a pointer to
280      a different member than ident.  That's strictly undefined
281      and detected by -Warray-bounds.  Suppress it.  See PR 101372.  */
282 #pragma GCC diagnostic push
283 #pragma GCC diagnostic ignored "-Warray-bounds"
284   return HT_IDENT_TO_GCC_IDENT (HT_NODE (const_cast<cpp_hashnode *> (node)));
285 #pragma GCC diagnostic pop
286 }
287 
288 /* Id for dumping module information.  */
289 int module_dump_id;
290 
291 /* We have a special module owner.  */
292 #define MODULE_UNKNOWN (~0U)    /* Not yet known.  */
293 
294 /* Prefix for section names.  */
295 #define MOD_SNAME_PFX ".gnu.c++"
296 
297 /* Format a version for user consumption.  */
298 
299 typedef char verstr_t[32];
300 static void
version2string(unsigned version,verstr_t & out)301 version2string (unsigned version, verstr_t &out)
302 {
303   unsigned major = MODULE_MAJOR (version);
304   unsigned minor = MODULE_MINOR (version);
305 
306   if (IS_EXPERIMENTAL (version))
307     sprintf (out, "%04u/%02u/%02u-%02u:%02u%s",
308 	     2000 + major / 10000, (major / 100) % 100, (major % 100),
309 	     minor / 100, minor % 100,
310 	     EXPERIMENT ("", " (experimental)"));
311   else
312     sprintf (out, "%u.%u", major, minor);
313 }
314 
315 /* Include files to note translation for.  */
316 static vec<const char *, va_heap, vl_embed> *note_includes;
317 
318 /* Modules to note CMI pathames.  */
319 static vec<const char *, va_heap, vl_embed> *note_cmis;
320 
321 /* Traits to hash an arbitrary pointer.  Entries are not deletable,
322    and removal is a noop (removal needed upon destruction).  */
323 template <typename T>
324 struct nodel_ptr_hash : pointer_hash<T>, typed_noop_remove <T *> {
325   /* Nothing is deletable.  Everything is insertable.  */
is_deletednodel_ptr_hash326   static bool is_deleted (T *) { return false; }
mark_deletednodel_ptr_hash327   static void mark_deleted (T *) { gcc_unreachable (); }
328 };
329 
330 /* Map from pointer to signed integer.   */
331 typedef simple_hashmap_traits<nodel_ptr_hash<void>, int> ptr_int_traits;
332 typedef hash_map<void *,signed,ptr_int_traits> ptr_int_hash_map;
333 
334 /********************************************************************/
335 /* Basic streaming & ELF.  Serialization is usually via mmap.  For
336    writing we slide a buffer over the output file, syncing it
337    approproiately.  For reading we simply map the whole file (as a
338    file-backed read-only map -- it's just address space, leaving the
339    OS pager to deal with getting the data to us).  Some buffers need
340    to be more conventional malloc'd contents.   */
341 
342 /* Variable length buffer.  */
343 
344 class data {
345 public:
346   class allocator {
347   public:
348     /* Tools tend to moan if the dtor's not virtual.  */
~allocator()349     virtual ~allocator () {}
350 
351   public:
352     void grow (data &obj, unsigned needed, bool exact);
353     void shrink (data &obj);
354 
355   public:
356     virtual char *grow (char *ptr, unsigned needed);
357     virtual void shrink (char *ptr);
358   };
359 
360 public:
361   char *buffer;		/* Buffer being transferred.  */
362   /* Although size_t would be the usual size, we know we never get
363      more than 4GB of buffer -- because that's the limit of the
364      encapsulation format.  And if you need bigger imports, you're
365      doing it wrong.  */
366   unsigned size;	/* Allocated size of buffer.  */
367   unsigned pos;		/* Position in buffer.  */
368 
369 public:
data()370   data ()
371     :buffer (NULL), size (0), pos (0)
372   {
373   }
~data()374   ~data ()
375   {
376     /* Make sure the derived and/or using class know what they're
377        doing.  */
378     gcc_checking_assert (!buffer);
379   }
380 
381 protected:
use(unsigned count)382   char *use (unsigned count)
383   {
384     if (size < pos + count)
385       return NULL;
386     char *res = &buffer[pos];
387     pos += count;
388     return res;
389   }
390 
391 public:
unuse(unsigned count)392   void unuse (unsigned count)
393   {
394     pos -= count;
395   }
396 
397 public:
398   static allocator simple_memory;
399 };
400 
401 /* The simple data allocator.  */
402 data::allocator data::simple_memory;
403 
404 /* Grow buffer to at least size NEEDED.  */
405 
406 void
grow(data & obj,unsigned needed,bool exact)407 data::allocator::grow (data &obj, unsigned needed, bool exact)
408 {
409   gcc_checking_assert (needed ? needed > obj.size : !obj.size);
410   if (!needed)
411     /* Pick a default size.  */
412     needed = EXPERIMENT (100, 1000);
413 
414   if (!exact)
415     needed *= 2;
416   obj.buffer = grow (obj.buffer, needed);
417   if (obj.buffer)
418     obj.size = needed;
419   else
420     obj.pos = obj.size = 0;
421 }
422 
423 /* Free a buffer.  */
424 
425 void
shrink(data & obj)426 data::allocator::shrink (data &obj)
427 {
428   shrink (obj.buffer);
429   obj.buffer = NULL;
430   obj.size = 0;
431 }
432 
433 char *
grow(char * ptr,unsigned needed)434 data::allocator::grow (char *ptr, unsigned needed)
435 {
436   return XRESIZEVAR (char, ptr, needed);
437 }
438 
439 void
shrink(char * ptr)440 data::allocator::shrink (char *ptr)
441 {
442   XDELETEVEC (ptr);
443 }
444 
445 /* Byte streamer base.   Buffer with read/write position and smarts
446    for single bits.  */
447 
448 class bytes : public data {
449 public:
450   typedef data parent;
451 
452 protected:
453   uint32_t bit_val;	/* Bit buffer.  */
454   unsigned bit_pos;	/* Next bit in bit buffer.  */
455 
456 public:
bytes()457   bytes ()
458     :parent (), bit_val (0), bit_pos (0)
459   {}
~bytes()460   ~bytes ()
461   {
462   }
463 
464 protected:
465   unsigned calc_crc (unsigned) const;
466 
467 protected:
468   /* Finish bit packet.  Rewind the bytes not used.  */
bit_flush()469   unsigned bit_flush ()
470   {
471     gcc_assert (bit_pos);
472     unsigned bytes = (bit_pos + 7) / 8;
473     unuse (4 - bytes);
474     bit_pos = 0;
475     bit_val = 0;
476     return bytes;
477   }
478 };
479 
480 /* Calculate the crc32 of the buffer.  Note the CRC is stored in the
481    first 4 bytes, so don't include them.  */
482 
483 unsigned
calc_crc(unsigned l) const484 bytes::calc_crc (unsigned l) const
485 {
486   unsigned crc = 0;
487   for (size_t ix = 4; ix < l; ix++)
488     crc = crc32_byte (crc, buffer[ix]);
489   return crc;
490 }
491 
492 class elf_in;
493 
494 /* Byte stream reader.  */
495 
496 class bytes_in : public bytes {
497   typedef bytes parent;
498 
499 protected:
500   bool overrun;  /* Sticky read-too-much flag.  */
501 
502 public:
bytes_in()503   bytes_in ()
504     : parent (), overrun (false)
505   {
506   }
~bytes_in()507   ~bytes_in ()
508   {
509   }
510 
511 public:
512   /* Begin reading a named section.  */
513   bool begin (location_t loc, elf_in *src, const char *name);
514   /* Begin reading a numbered section with optional name.  */
515   bool begin (location_t loc, elf_in *src, unsigned, const char * = NULL);
516   /* Complete reading a buffer.  Propagate errors and return true on
517      success.  */
518   bool end (elf_in *src);
519   /* Return true if there is unread data.  */
more_p() const520   bool more_p () const
521   {
522     return pos != size;
523   }
524 
525 public:
526   /* Start reading at OFFSET.  */
random_access(unsigned offset)527   void random_access (unsigned offset)
528   {
529     if (offset > size)
530       set_overrun ();
531     pos = offset;
532     bit_pos = bit_val = 0;
533   }
534 
535 public:
align(unsigned boundary)536   void align (unsigned boundary)
537   {
538     if (unsigned pad = pos & (boundary - 1))
539       read (boundary - pad);
540   }
541 
542 public:
read(unsigned count)543   const char *read (unsigned count)
544   {
545     char *ptr = use (count);
546     if (!ptr)
547       set_overrun ();
548     return ptr;
549   }
550 
551 public:
552   bool check_crc () const;
553   /* We store the CRC in the first 4 bytes, using host endianness.  */
get_crc() const554   unsigned get_crc () const
555   {
556     return *(const unsigned *)&buffer[0];
557   }
558 
559 public:
560   /* Manipulate the overrun flag.  */
get_overrun() const561   bool get_overrun () const
562   {
563     return overrun;
564   }
set_overrun()565   void set_overrun ()
566   {
567     overrun = true;
568   }
569 
570 public:
571   unsigned u32 ();  	/* Read uncompressed integer.  */
572 
573 public:
574   bool b ();	    	/* Read a bool.  */
575   void bflush ();	/* Completed a block of bools.  */
576 
577 private:
578   void bfill ();	/* Get the next block of bools.  */
579 
580 public:
581   int c ();		/* Read a char.  */
582   int i ();		/* Read a signed int.  */
583   unsigned u ();	/* Read an unsigned int.  */
584   size_t z ();		/* Read a size_t.  */
585   HOST_WIDE_INT wi ();  /* Read a HOST_WIDE_INT.  */
586   unsigned HOST_WIDE_INT wu (); /* Read an unsigned HOST_WIDE_INT.  */
587   const char *str (size_t * = NULL); /* Read a string.  */
588   const void *buf (size_t); /* Read a fixed-length buffer.  */
589   cpp_hashnode *cpp_node (); /* Read a cpp node.  */
590 };
591 
592 /* Verify the buffer's CRC is correct.  */
593 
594 bool
check_crc() const595 bytes_in::check_crc () const
596 {
597   if (size < 4)
598     return false;
599 
600   unsigned c_crc = calc_crc (size);
601   if (c_crc != get_crc ())
602     return false;
603 
604   return true;
605 }
606 
607 class elf_out;
608 
609 /* Byte stream writer.  */
610 
611 class bytes_out : public bytes {
612   typedef bytes parent;
613 
614 public:
615   allocator *memory;	/* Obtainer of memory.  */
616 
617 public:
bytes_out(allocator * memory)618   bytes_out (allocator *memory)
619     : parent (), memory (memory)
620   {
621   }
~bytes_out()622   ~bytes_out ()
623   {
624   }
625 
626 public:
streaming_p() const627   bool streaming_p () const
628   {
629     return memory != NULL;
630   }
631 
632 public:
633   void set_crc (unsigned *crc_ptr);
634 
635 public:
636   /* Begin writing, maybe reserve space for CRC.  */
637   void begin (bool need_crc = true);
638   /* Finish writing.  Spill to section by number.  */
639   unsigned end (elf_out *, unsigned, unsigned *crc_ptr = NULL);
640 
641 public:
align(unsigned boundary)642   void align (unsigned boundary)
643   {
644     if (unsigned pad = pos & (boundary - 1))
645       write (boundary - pad);
646   }
647 
648 public:
write(unsigned count,bool exact=false)649   char *write (unsigned count, bool exact = false)
650   {
651     if (size < pos + count)
652       memory->grow (*this, pos + count, exact);
653     return use (count);
654   }
655 
656 public:
657   void u32 (unsigned);  /* Write uncompressed integer.  */
658 
659 public:
660   void b (bool);	/* Write bool.  */
661   void bflush ();	/* Finish block of bools.  */
662 
663 public:
664   void c (unsigned char); /* Write unsigned char.  */
665   void i (int);		/* Write signed int.  */
666   void u (unsigned);	/* Write unsigned int.  */
667   void z (size_t s);	/* Write size_t.  */
668   void wi (HOST_WIDE_INT); /* Write HOST_WIDE_INT.  */
669   void wu (unsigned HOST_WIDE_INT);  /* Write unsigned HOST_WIDE_INT.  */
str(const char * ptr)670   void str (const char *ptr)
671   {
672     str (ptr, strlen (ptr));
673   }
cpp_node(const cpp_hashnode * node)674   void cpp_node (const cpp_hashnode *node)
675   {
676     str ((const char *)NODE_NAME (node), NODE_LEN (node));
677   }
678   void str (const char *, size_t);  /* Write string of known length.  */
679   void buf (const void *, size_t);  /* Write fixed length buffer.  */
680   void *buf (size_t); /* Create a writable buffer */
681 
682 public:
683   /* Format a NUL-terminated raw string.  */
684   void printf (const char *, ...) ATTRIBUTE_PRINTF_2;
685   void print_time (const char *, const tm *, const char *);
686 
687 public:
688   /* Dump instrumentation.  */
689   static void instrument ();
690 
691 protected:
692   /* Instrumentation.  */
693   static unsigned spans[4];
694   static unsigned lengths[4];
695   static int is_set;
696 };
697 
698 /* Instrumentation.  */
699 unsigned bytes_out::spans[4];
700 unsigned bytes_out::lengths[4];
701 int bytes_out::is_set = -1;
702 
703 /* If CRC_PTR non-null, set the CRC of the buffer.  Mix the CRC into
704    that pointed to by CRC_PTR.  */
705 
706 void
set_crc(unsigned * crc_ptr)707 bytes_out::set_crc (unsigned *crc_ptr)
708 {
709   if (crc_ptr)
710     {
711       gcc_checking_assert (pos >= 4);
712 
713       unsigned crc = calc_crc (pos);
714       unsigned accum = *crc_ptr;
715       /* Only mix the existing *CRC_PTR if it is non-zero.  */
716       accum = accum ? crc32_unsigned (accum, crc) : crc;
717       *crc_ptr = accum;
718 
719       /* Buffer will be sufficiently aligned.  */
720       *(unsigned *)buffer = crc;
721     }
722 }
723 
724 /* Finish a set of bools.  */
725 
726 void
bflush()727 bytes_out::bflush ()
728 {
729   if (bit_pos)
730     {
731       u32 (bit_val);
732       lengths[2] += bit_flush ();
733     }
734   spans[2]++;
735   is_set = -1;
736 }
737 
738 void
bflush()739 bytes_in::bflush ()
740 {
741   if (bit_pos)
742     bit_flush ();
743 }
744 
745 /* When reading, we don't know how many bools we'll read in.  So read
746    4 bytes-worth, and then rewind when flushing if we didn't need them
747    all.  You can't have a block of bools closer than 4 bytes to the
748    end of the buffer.  */
749 
750 void
bfill()751 bytes_in::bfill ()
752 {
753   bit_val = u32 ();
754 }
755 
756 /* Bools are packed into bytes.  You cannot mix bools and non-bools.
757    You must call bflush before emitting another type.  So batch your
758    bools.
759 
760    It may be worth optimizing for most bools being zero.  Some kind of
761    run-length encoding?  */
762 
763 void
b(bool x)764 bytes_out::b (bool x)
765 {
766   if (is_set != x)
767     {
768       is_set = x;
769       spans[x]++;
770     }
771   lengths[x]++;
772   bit_val |= unsigned (x) << bit_pos++;
773   if (bit_pos == 32)
774     {
775       u32 (bit_val);
776       lengths[2] += bit_flush ();
777     }
778 }
779 
780 bool
b()781 bytes_in::b ()
782 {
783   if (!bit_pos)
784     bfill ();
785   bool v = (bit_val >> bit_pos++) & 1;
786   if (bit_pos == 32)
787     bit_flush ();
788   return v;
789 }
790 
791 /* Exactly 4 bytes.  Used internally for bool packing and a few other
792    places.  We can't simply use uint32_t because (a) alignment and
793    (b) we need little-endian for the bool streaming rewinding to make
794    sense.  */
795 
796 void
u32(unsigned val)797 bytes_out::u32 (unsigned val)
798 {
799   if (char *ptr = write (4))
800     {
801       ptr[0] = val;
802       ptr[1] = val >> 8;
803       ptr[2] = val >> 16;
804       ptr[3] = val >> 24;
805     }
806 }
807 
808 unsigned
u32()809 bytes_in::u32 ()
810 {
811   unsigned val = 0;
812   if (const char *ptr = read (4))
813     {
814       val |= (unsigned char)ptr[0];
815       val |= (unsigned char)ptr[1] << 8;
816       val |= (unsigned char)ptr[2] << 16;
817       val |= (unsigned char)ptr[3] << 24;
818     }
819 
820   return val;
821 }
822 
823 /* Chars are unsigned and written as single bytes. */
824 
825 void
c(unsigned char v)826 bytes_out::c (unsigned char v)
827 {
828   if (char *ptr = write (1))
829     *ptr = v;
830 }
831 
832 int
c()833 bytes_in::c ()
834 {
835   int v = 0;
836   if (const char *ptr = read (1))
837     v = (unsigned char)ptr[0];
838   return v;
839 }
840 
841 /* Ints 7-bit as a byte. Otherwise a 3bit count of following bytes in
842    big-endian form.  4 bits are in the first byte.  */
843 
844 void
i(int v)845 bytes_out::i (int v)
846 {
847   if (char *ptr = write (1))
848     {
849       if (v <= 0x3f && v >= -0x40)
850 	*ptr = v & 0x7f;
851       else
852 	{
853 	  unsigned bytes = 0;
854 	  int probe;
855 	  if (v >= 0)
856 	    for (probe = v >> 8; probe > 0x7; probe >>= 8)
857 	      bytes++;
858 	  else
859 	    for (probe = v >> 8; probe < -0x8; probe >>= 8)
860 	      bytes++;
861 	  *ptr = 0x80 | bytes << 4 | (probe & 0xf);
862 	  if ((ptr = write (++bytes)))
863 	    for (; bytes--; v >>= 8)
864 	      ptr[bytes] = v & 0xff;
865 	}
866     }
867 }
868 
869 int
i()870 bytes_in::i ()
871 {
872   int v = 0;
873   if (const char *ptr = read (1))
874     {
875       v = *ptr & 0xff;
876       if (v & 0x80)
877 	{
878 	  unsigned bytes = (v >> 4) & 0x7;
879 	  v &= 0xf;
880 	  if (v & 0x8)
881 	    v |= -1 ^ 0x7;
882 	  /* unsigned necessary due to left shifts of -ve values.  */
883 	  unsigned uv = unsigned (v);
884 	  if ((ptr = read (++bytes)))
885 	    while (bytes--)
886 	      uv = (uv << 8) | (*ptr++ & 0xff);
887 	  v = int (uv);
888 	}
889       else if (v & 0x40)
890 	v |= -1 ^ 0x3f;
891     }
892 
893   return v;
894 }
895 
896 void
u(unsigned v)897 bytes_out::u (unsigned v)
898 {
899   if (char *ptr = write (1))
900     {
901       if (v <= 0x7f)
902 	*ptr = v;
903       else
904 	{
905 	  unsigned bytes = 0;
906 	  unsigned probe;
907 	  for (probe = v >> 8; probe > 0xf; probe >>= 8)
908 	    bytes++;
909 	  *ptr = 0x80 | bytes << 4 | probe;
910 	  if ((ptr = write (++bytes)))
911 	    for (; bytes--; v >>= 8)
912 	      ptr[bytes] = v & 0xff;
913 	}
914     }
915 }
916 
917 unsigned
u()918 bytes_in::u ()
919 {
920   unsigned v = 0;
921 
922   if (const char *ptr = read (1))
923     {
924       v = *ptr & 0xff;
925       if (v & 0x80)
926 	{
927 	  unsigned bytes = (v >> 4) & 0x7;
928 	  v &= 0xf;
929 	  if ((ptr = read (++bytes)))
930 	    while (bytes--)
931 	      v = (v << 8) | (*ptr++ & 0xff);
932 	}
933     }
934 
935   return v;
936 }
937 
938 void
wi(HOST_WIDE_INT v)939 bytes_out::wi (HOST_WIDE_INT v)
940 {
941   if (char *ptr = write (1))
942     {
943       if (v <= 0x3f && v >= -0x40)
944 	*ptr = v & 0x7f;
945       else
946 	{
947 	  unsigned bytes = 0;
948 	  HOST_WIDE_INT probe;
949 	  if (v >= 0)
950 	    for (probe = v >> 8; probe > 0x7; probe >>= 8)
951 	      bytes++;
952 	  else
953 	    for (probe = v >> 8; probe < -0x8; probe >>= 8)
954 	      bytes++;
955 	  *ptr = 0x80 | bytes << 4 | (probe & 0xf);
956 	  if ((ptr = write (++bytes)))
957 	    for (; bytes--; v >>= 8)
958 	      ptr[bytes] = v & 0xff;
959 	}
960     }
961 }
962 
963 HOST_WIDE_INT
wi()964 bytes_in::wi ()
965 {
966   HOST_WIDE_INT v = 0;
967   if (const char *ptr = read (1))
968     {
969       v = *ptr & 0xff;
970       if (v & 0x80)
971 	{
972 	  unsigned bytes = (v >> 4) & 0x7;
973 	  v &= 0xf;
974 	  if (v & 0x8)
975 	    v |= -1 ^ 0x7;
976 	  /* unsigned necessary due to left shifts of -ve values.  */
977 	  unsigned HOST_WIDE_INT uv = (unsigned HOST_WIDE_INT) v;
978 	  if ((ptr = read (++bytes)))
979 	    while (bytes--)
980 	      uv = (uv << 8) | (*ptr++ & 0xff);
981 	  v = (HOST_WIDE_INT) uv;
982 	}
983       else if (v & 0x40)
984 	v |= -1 ^ 0x3f;
985     }
986 
987   return v;
988 }
989 
990 /* unsigned wide ints are just written as signed wide ints.  */
991 
992 inline void
wu(unsigned HOST_WIDE_INT v)993 bytes_out::wu (unsigned HOST_WIDE_INT v)
994 {
995   wi ((HOST_WIDE_INT) v);
996 }
997 
998 inline unsigned HOST_WIDE_INT
wu()999 bytes_in::wu ()
1000 {
1001   return (unsigned HOST_WIDE_INT) wi ();
1002 }
1003 
1004 /* size_t written as unsigned or unsigned wide int.  */
1005 
1006 inline void
z(size_t s)1007 bytes_out::z (size_t s)
1008 {
1009   if (sizeof (s) == sizeof (unsigned))
1010     u (s);
1011   else
1012     wu (s);
1013 }
1014 
1015 inline size_t
z()1016 bytes_in::z ()
1017 {
1018   if (sizeof (size_t) == sizeof (unsigned))
1019     return u ();
1020   else
1021     return wu ();
1022 }
1023 
1024 /* Buffer simply memcpied.  */
1025 void *
buf(size_t len)1026 bytes_out::buf (size_t len)
1027 {
1028   align (sizeof (void *) * 2);
1029   return write (len);
1030 }
1031 
1032 void
buf(const void * src,size_t len)1033 bytes_out::buf (const void *src, size_t len)
1034 {
1035   if (void *ptr = buf (len))
1036     memcpy (ptr, src, len);
1037 }
1038 
1039 const void *
buf(size_t len)1040 bytes_in::buf (size_t len)
1041 {
1042   align (sizeof (void *) * 2);
1043   const char *ptr = read (len);
1044 
1045   return ptr;
1046 }
1047 
1048 /* strings as an size_t length, followed by the buffer.  Make sure
1049    there's a NUL terminator on read.  */
1050 
1051 void
str(const char * string,size_t len)1052 bytes_out::str (const char *string, size_t len)
1053 {
1054   z (len);
1055   if (len)
1056     {
1057       gcc_checking_assert (!string[len]);
1058       buf (string, len + 1);
1059     }
1060 }
1061 
1062 const char *
str(size_t * len_p)1063 bytes_in::str (size_t *len_p)
1064 {
1065   size_t len = z ();
1066 
1067   /* We're about to trust some user data.  */
1068   if (overrun)
1069     len = 0;
1070   if (len_p)
1071     *len_p = len;
1072   const char *str = NULL;
1073   if (len)
1074     {
1075       str = reinterpret_cast<const char *> (buf (len + 1));
1076       if (!str || str[len])
1077 	{
1078 	  set_overrun ();
1079 	  str = NULL;
1080 	}
1081     }
1082   return str ? str : "";
1083 }
1084 
1085 cpp_hashnode *
cpp_node()1086 bytes_in::cpp_node ()
1087 {
1088   size_t len;
1089   const char *s = str (&len);
1090   if (!len)
1091     return NULL;
1092   return ::cpp_node (get_identifier_with_length (s, len));
1093 }
1094 
1095 /* Format a string directly to the buffer, including a terminating
1096    NUL.  Intended for human consumption.  */
1097 
1098 void
printf(const char * format,...)1099 bytes_out::printf (const char *format, ...)
1100 {
1101   va_list args;
1102   /* Exercise buffer expansion.  */
1103   size_t len = EXPERIMENT (10, 500);
1104 
1105   while (char *ptr = write (len))
1106     {
1107       va_start (args, format);
1108       size_t actual = vsnprintf (ptr, len, format, args) + 1;
1109       va_end (args);
1110       if (actual <= len)
1111 	{
1112 	  unuse (len - actual);
1113 	  break;
1114 	}
1115       unuse (len);
1116       len = actual;
1117     }
1118 }
1119 
1120 void
print_time(const char * kind,const tm * time,const char * tz)1121 bytes_out::print_time (const char *kind, const tm *time, const char *tz)
1122 {
1123   printf ("%stime: %4u/%02u/%02u %02u:%02u:%02u %s",
1124 	  kind, time->tm_year + 1900, time->tm_mon + 1, time->tm_mday,
1125 	  time->tm_hour, time->tm_min, time->tm_sec, tz);
1126 }
1127 
1128 /* Encapsulated Lazy Records Of Named Declarations.
1129    Header: Stunningly Elf32_Ehdr-like
1130    Sections: Sectional data
1131      [1-N) : User data sections
1132      N .strtab  : strings, stunningly ELF STRTAB-like
1133    Index: Section table, stunningly ELF32_Shdr-like.   */
1134 
1135 class elf {
1136 protected:
1137   /* Constants used within the format.  */
1138   enum private_constants {
1139     /* File kind. */
1140     ET_NONE = 0,
1141     EM_NONE = 0,
1142     OSABI_NONE = 0,
1143 
1144     /* File format. */
1145     EV_CURRENT = 1,
1146     CLASS32 = 1,
1147     DATA2LSB = 1,
1148     DATA2MSB = 2,
1149 
1150     /* Section numbering.  */
1151     SHN_UNDEF = 0,
1152     SHN_LORESERVE = 0xff00,
1153     SHN_XINDEX = 0xffff,
1154 
1155     /* Section types.  */
1156     SHT_NONE = 0,	/* No contents.  */
1157     SHT_PROGBITS = 1, /* Random bytes.  */
1158     SHT_STRTAB = 3,	/* A string table.  */
1159 
1160     /* Section flags.  */
1161     SHF_NONE = 0x00,	/* Nothing.  */
1162     SHF_STRINGS = 0x20,  /* NUL-Terminated strings.  */
1163 
1164     /* I really hope we do not get CMI files larger than 4GB.  */
1165     MY_CLASS = CLASS32,
1166     /* It is host endianness that is relevant.  */
1167     MY_ENDIAN = DATA2LSB
1168 #ifdef WORDS_BIGENDIAN
1169     ^ DATA2LSB ^ DATA2MSB
1170 #endif
1171   };
1172 
1173 public:
1174   /* Constants visible to users.  */
1175   enum public_constants {
1176     /* Special error codes.  Breaking layering a bit.  */
1177     E_BAD_DATA = -1,  /* Random unexpected data errors.  */
1178     E_BAD_LAZY = -2,  /* Badly ordered laziness.  */
1179     E_BAD_IMPORT = -3 /* A nested import failed.  */
1180   };
1181 
1182 protected:
1183   /* File identification.  On-disk representation.  */
1184   struct ident {
1185     uint8_t magic[4];	/* 0x7f, 'E', 'L', 'F' */
1186     uint8_t klass;	/* 4:CLASS32 */
1187     uint8_t data;	/* 5:DATA2[LM]SB */
1188     uint8_t version;	/* 6:EV_CURRENT  */
1189     uint8_t osabi;	/* 7:OSABI_NONE */
1190     uint8_t abiver;	/* 8: 0 */
1191     uint8_t pad[7];	/* 9-15 */
1192   };
1193   /* File header.  On-disk representation.  */
1194   struct header {
1195     struct ident ident;
1196     uint16_t type;	/* ET_NONE */
1197     uint16_t machine;	/* EM_NONE */
1198     uint32_t version;	/* EV_CURRENT */
1199     uint32_t entry;	/* 0 */
1200     uint32_t phoff;	/* 0 */
1201     uint32_t shoff;	/* Section Header Offset in file */
1202     uint32_t flags;
1203     uint16_t ehsize;	/* ELROND Header SIZE -- sizeof (header) */
1204     uint16_t phentsize; /* 0 */
1205     uint16_t phnum;	/* 0 */
1206     uint16_t shentsize; /* Section Header SIZE -- sizeof (section) */
1207     uint16_t shnum;	/* Section Header NUM */
1208     uint16_t shstrndx;	/* Section Header STRing iNDeX */
1209   };
1210   /* File section.  On-disk representation.  */
1211   struct section {
1212     uint32_t name;	/* String table offset.  */
1213     uint32_t type;	/* SHT_* */
1214     uint32_t flags;	/* SHF_* */
1215     uint32_t addr;	/* 0 */
1216     uint32_t offset;	/* OFFSET in file */
1217     uint32_t size;	/* SIZE of section */
1218     uint32_t link;	/* 0 */
1219     uint32_t info;	/* 0 */
1220     uint32_t addralign; /* 0 */
1221     uint32_t entsize;	/* ENTry SIZE, usually 0 */
1222   };
1223 
1224 protected:
1225   data hdr;	/* The header.  */
1226   data sectab; 	/* The section table.  */
1227   data strtab;  /* String table.  */
1228   int fd;   	/* File descriptor we're reading or writing.  */
1229   int err; 	/* Sticky error code.  */
1230 
1231 public:
1232   /* Construct from STREAM.  E is errno if STREAM NULL.  */
elf(int fd,int e)1233   elf (int fd, int e)
1234     :hdr (), sectab (), strtab (), fd (fd), err (fd >= 0 ? 0 : e)
1235   {}
~elf()1236   ~elf ()
1237   {
1238     gcc_checking_assert (fd < 0 && !hdr.buffer
1239 			 && !sectab.buffer && !strtab.buffer);
1240   }
1241 
1242 public:
1243   /* Return the error, if we have an error.  */
get_error() const1244   int get_error () const
1245   {
1246     return err;
1247   }
1248   /* Set the error, unless it's already been set.  */
set_error(int e=E_BAD_DATA)1249   void set_error (int e = E_BAD_DATA)
1250   {
1251     if (!err)
1252       err = e;
1253   }
1254   /* Get an error string.  */
1255   const char *get_error (const char *) const;
1256 
1257 public:
1258   /* Begin reading/writing file.  Return false on error.  */
begin() const1259   bool begin () const
1260   {
1261     return !get_error ();
1262   }
1263   /* Finish reading/writing file.  Return false on error.  */
1264   bool end ();
1265 };
1266 
1267 /* Return error string.  */
1268 
1269 const char *
get_error(const char * name) const1270 elf::get_error (const char *name) const
1271 {
1272   if (!name)
1273     return "Unknown CMI mapping";
1274 
1275   switch (err)
1276     {
1277     case 0:
1278       gcc_unreachable ();
1279     case E_BAD_DATA:
1280       return "Bad file data";
1281     case E_BAD_IMPORT:
1282       return "Bad import dependency";
1283     case E_BAD_LAZY:
1284       return "Bad lazy ordering";
1285     default:
1286       return xstrerror (err);
1287     }
1288 }
1289 
1290 /* Finish file, return true if there's an error.  */
1291 
1292 bool
end()1293 elf::end ()
1294 {
1295   /* Close the stream and free the section table.  */
1296   if (fd >= 0 && close (fd))
1297     set_error (errno);
1298   fd = -1;
1299 
1300   return !get_error ();
1301 }
1302 
1303 /* ELROND reader.  */
1304 
1305 class elf_in : public elf {
1306   typedef elf parent;
1307 
1308 private:
1309   /* For freezing & defrosting.  */
1310 #if !defined (HOST_LACKS_INODE_NUMBERS)
1311   dev_t device;
1312   ino_t inode;
1313 #endif
1314 
1315 public:
elf_in(int fd,int e)1316   elf_in (int fd, int e)
1317     :parent (fd, e)
1318   {
1319   }
~elf_in()1320   ~elf_in ()
1321   {
1322   }
1323 
1324 public:
is_frozen() const1325   bool is_frozen () const
1326   {
1327     return fd < 0 && hdr.pos;
1328   }
is_freezable() const1329   bool is_freezable () const
1330   {
1331     return fd >= 0 && hdr.pos;
1332   }
1333   void freeze ();
1334   bool defrost (const char *);
1335 
1336   /* If BYTES is in the mmapped area, allocate a new buffer for it.  */
preserve(bytes_in & bytes ATTRIBUTE_UNUSED)1337   void preserve (bytes_in &bytes ATTRIBUTE_UNUSED)
1338   {
1339 #if MAPPED_READING
1340     if (hdr.buffer && bytes.buffer >= hdr.buffer
1341 	&& bytes.buffer < hdr.buffer + hdr.pos)
1342       {
1343 	char *buf = bytes.buffer;
1344 	bytes.buffer = data::simple_memory.grow (NULL, bytes.size);
1345 	memcpy (bytes.buffer, buf, bytes.size);
1346       }
1347 #endif
1348   }
1349   /* If BYTES is not in SELF's mmapped area, free it.  SELF might be
1350      NULL. */
release(elf_in * self ATTRIBUTE_UNUSED,bytes_in & bytes)1351   static void release (elf_in *self ATTRIBUTE_UNUSED, bytes_in &bytes)
1352   {
1353 #if MAPPED_READING
1354     if (!(self && self->hdr.buffer && bytes.buffer >= self->hdr.buffer
1355 	  && bytes.buffer < self->hdr.buffer + self->hdr.pos))
1356 #endif
1357       data::simple_memory.shrink (bytes.buffer);
1358     bytes.buffer = NULL;
1359     bytes.size = 0;
1360   }
1361 
1362 public:
grow(data & data,unsigned needed)1363   static void grow (data &data, unsigned needed)
1364   {
1365     gcc_checking_assert (!data.buffer);
1366 #if !MAPPED_READING
1367     data.buffer = XNEWVEC (char, needed);
1368 #endif
1369     data.size = needed;
1370   }
shrink(data & data)1371   static void shrink (data &data)
1372   {
1373 #if !MAPPED_READING
1374     XDELETEVEC (data.buffer);
1375 #endif
1376     data.buffer = NULL;
1377     data.size = 0;
1378   }
1379 
1380 public:
get_section(unsigned s) const1381   const section *get_section (unsigned s) const
1382   {
1383     if (s * sizeof (section) < sectab.size)
1384       return reinterpret_cast<const section *>
1385 	(&sectab.buffer[s * sizeof (section)]);
1386     else
1387       return NULL;
1388   }
get_section_limit() const1389   unsigned get_section_limit () const
1390   {
1391     return sectab.size / sizeof (section);
1392   }
1393 
1394 protected:
1395   const char *read (data *, unsigned, unsigned);
1396 
1397 public:
1398   /* Read section by number.  */
read(data * d,const section * s)1399   bool read (data *d, const section *s)
1400   {
1401     return s && read (d, s->offset, s->size);
1402   }
1403 
1404   /* Find section by name.  */
1405   unsigned find (const char *name);
1406   /* Find section by index.  */
1407   const section *find (unsigned snum, unsigned type = SHT_PROGBITS);
1408 
1409 public:
1410   /* Release the string table, when we're done with it.  */
release()1411   void release ()
1412   {
1413     shrink (strtab);
1414   }
1415 
1416 public:
1417   bool begin (location_t);
end()1418   bool end ()
1419   {
1420     release ();
1421 #if MAPPED_READING
1422     if (hdr.buffer)
1423       munmap (hdr.buffer, hdr.pos);
1424     hdr.buffer = NULL;
1425 #endif
1426     shrink (sectab);
1427 
1428     return parent::end ();
1429   }
1430 
1431 public:
1432   /* Return string name at OFFSET.  Checks OFFSET range.  Always
1433      returns non-NULL.  We know offset 0 is an empty string.  */
name(unsigned offset)1434   const char *name (unsigned offset)
1435   {
1436     return &strtab.buffer[offset < strtab.size ? offset : 0];
1437   }
1438 };
1439 
1440 /* ELROND writer.  */
1441 
1442 class elf_out : public elf, public data::allocator {
1443   typedef elf parent;
1444   /* Desired section alignment on disk.  */
1445   static const int SECTION_ALIGN = 16;
1446 
1447 private:
1448   ptr_int_hash_map identtab;	/* Map of IDENTIFIERS to strtab offsets. */
1449   unsigned pos;			/* Write position in file.  */
1450 #if MAPPED_WRITING
1451   unsigned offset;		/* Offset of the mapping.  */
1452   unsigned extent;		/* Length of mapping.  */
1453   unsigned page_size;		/* System page size.  */
1454 #endif
1455 
1456 public:
elf_out(int fd,int e)1457   elf_out (int fd, int e)
1458     :parent (fd, e), identtab (500), pos (0)
1459   {
1460 #if MAPPED_WRITING
1461     offset = extent = 0;
1462     page_size = sysconf (_SC_PAGE_SIZE);
1463     if (page_size < SECTION_ALIGN)
1464       /* Something really strange.  */
1465       set_error (EINVAL);
1466 #endif
1467   }
~elf_out()1468   ~elf_out ()
1469   {
1470     data::simple_memory.shrink (hdr);
1471     data::simple_memory.shrink (sectab);
1472     data::simple_memory.shrink (strtab);
1473   }
1474 
1475 #if MAPPED_WRITING
1476 private:
1477   void create_mapping (unsigned ext, bool extending = true);
1478   void remove_mapping ();
1479 #endif
1480 
1481 protected:
1482   using allocator::grow;
1483   virtual char *grow (char *, unsigned needed);
1484 #if MAPPED_WRITING
1485   using allocator::shrink;
1486   virtual void shrink (char *);
1487 #endif
1488 
1489 public:
get_section_limit() const1490   unsigned get_section_limit () const
1491   {
1492     return sectab.pos / sizeof (section);
1493   }
1494 
1495 protected:
1496   unsigned add (unsigned type, unsigned name = 0,
1497 		unsigned off = 0, unsigned size = 0, unsigned flags = SHF_NONE);
1498   unsigned write (const data &);
1499 #if MAPPED_WRITING
1500   unsigned write (const bytes_out &);
1501 #endif
1502 
1503 public:
1504   /* IDENTIFIER to strtab offset.  */
1505   unsigned name (tree ident);
1506   /* String literal to strtab offset.  */
1507   unsigned name (const char *n);
1508   /* Qualified name of DECL to strtab offset.  */
1509   unsigned qualified_name (tree decl, bool is_defn);
1510 
1511 private:
1512   unsigned strtab_write (const char *s, unsigned l);
1513   void strtab_write (tree decl, int);
1514 
1515 public:
1516   /* Add a section with contents or strings.  */
1517   unsigned add (const bytes_out &, bool string_p, unsigned name);
1518 
1519 public:
1520   /* Begin and end writing.  */
1521   bool begin ();
1522   bool end ();
1523 };
1524 
1525 /* Begin reading section NAME (of type PROGBITS) from SOURCE.
1526    Data always checked for CRC.  */
1527 
1528 bool
begin(location_t loc,elf_in * source,const char * name)1529 bytes_in::begin (location_t loc, elf_in *source, const char *name)
1530 {
1531   unsigned snum = source->find (name);
1532 
1533   return begin (loc, source, snum, name);
1534 }
1535 
1536 /* Begin reading section numbered SNUM with NAME (may be NULL).  */
1537 
1538 bool
begin(location_t loc,elf_in * source,unsigned snum,const char * name)1539 bytes_in::begin (location_t loc, elf_in *source, unsigned snum, const char *name)
1540 {
1541   if (!source->read (this, source->find (snum))
1542       || !size || !check_crc ())
1543     {
1544       source->set_error (elf::E_BAD_DATA);
1545       source->shrink (*this);
1546       if (name)
1547 	error_at (loc, "section %qs is missing or corrupted", name);
1548       else
1549 	error_at (loc, "section #%u is missing or corrupted", snum);
1550       return false;
1551     }
1552   pos = 4;
1553   return true;
1554 }
1555 
1556 /* Finish reading a section.  */
1557 
1558 bool
end(elf_in * src)1559 bytes_in::end (elf_in *src)
1560 {
1561   if (more_p ())
1562     set_overrun ();
1563   if (overrun)
1564     src->set_error ();
1565 
1566   src->shrink (*this);
1567 
1568   return !overrun;
1569 }
1570 
1571 /* Begin writing buffer.  */
1572 
1573 void
begin(bool need_crc)1574 bytes_out::begin (bool need_crc)
1575 {
1576   if (need_crc)
1577     pos = 4;
1578   memory->grow (*this, 0, false);
1579 }
1580 
1581 /* Finish writing buffer.  Stream out to SINK as named section NAME.
1582    Return section number or 0 on failure.  If CRC_PTR is true, crc
1583    the data.  Otherwise it is a string section.  */
1584 
1585 unsigned
end(elf_out * sink,unsigned name,unsigned * crc_ptr)1586 bytes_out::end (elf_out *sink, unsigned name, unsigned *crc_ptr)
1587 {
1588   lengths[3] += pos;
1589   spans[3]++;
1590 
1591   set_crc (crc_ptr);
1592   unsigned sec_num = sink->add (*this, !crc_ptr, name);
1593   memory->shrink (*this);
1594 
1595   return sec_num;
1596 }
1597 
1598 /* Close and open the file, without destroying it.  */
1599 
1600 void
freeze()1601 elf_in::freeze ()
1602 {
1603   gcc_checking_assert (!is_frozen ());
1604 #if MAPPED_READING
1605   if (munmap (hdr.buffer, hdr.pos) < 0)
1606     set_error (errno);
1607 #endif
1608   if (close (fd) < 0)
1609     set_error (errno);
1610   fd = -1;
1611 }
1612 
1613 bool
defrost(const char * name)1614 elf_in::defrost (const char *name)
1615 {
1616   gcc_checking_assert (is_frozen ());
1617   struct stat stat;
1618 
1619   fd = open (name, O_RDONLY | O_CLOEXEC | O_BINARY);
1620   if (fd < 0 || fstat (fd, &stat) < 0)
1621     set_error (errno);
1622   else
1623     {
1624       bool ok = hdr.pos == unsigned (stat.st_size);
1625 #ifndef HOST_LACKS_INODE_NUMBERS
1626       if (device != stat.st_dev
1627 	  || inode != stat.st_ino)
1628 	ok = false;
1629 #endif
1630       if (!ok)
1631 	set_error (EMFILE);
1632 #if MAPPED_READING
1633       if (ok)
1634 	{
1635 	  char *mapping = reinterpret_cast<char *>
1636 	    (mmap (NULL, hdr.pos, PROT_READ, MAP_SHARED, fd, 0));
1637 	  if (mapping == MAP_FAILED)
1638 	  fail:
1639 	      set_error (errno);
1640 	  else
1641 	    {
1642 	      if (madvise (mapping, hdr.pos, MADV_RANDOM))
1643 		goto fail;
1644 
1645 	      /* These buffers are never NULL in this case.  */
1646 	      strtab.buffer = mapping + strtab.pos;
1647 	      sectab.buffer = mapping + sectab.pos;
1648 	      hdr.buffer = mapping;
1649 	    }
1650 	}
1651 #endif
1652     }
1653 
1654   return !get_error ();
1655 }
1656 
1657 /* Read at current position into BUFFER.  Return true on success.  */
1658 
1659 const char *
read(data * data,unsigned pos,unsigned length)1660 elf_in::read (data *data, unsigned pos, unsigned length)
1661 {
1662 #if MAPPED_READING
1663   if (pos + length > hdr.pos)
1664     {
1665       set_error (EINVAL);
1666       return NULL;
1667     }
1668 #else
1669   if (pos != ~0u && lseek (fd, pos, SEEK_SET) < 0)
1670     {
1671       set_error (errno);
1672       return NULL;
1673     }
1674 #endif
1675   grow (*data, length);
1676 #if MAPPED_READING
1677   data->buffer = hdr.buffer + pos;
1678 #else
1679   if (::read (fd, data->buffer, data->size) != ssize_t (length))
1680     {
1681       set_error (errno);
1682       shrink (*data);
1683       return NULL;
1684     }
1685 #endif
1686 
1687   return data->buffer;
1688 }
1689 
1690 /* Read section SNUM of TYPE.  Return section pointer or NULL on error.  */
1691 
1692 const elf::section *
find(unsigned snum,unsigned type)1693 elf_in::find (unsigned snum, unsigned type)
1694 {
1695   const section *sec = get_section (snum);
1696   if (!snum || !sec || sec->type != type)
1697     return NULL;
1698   return sec;
1699 }
1700 
1701 /* Find a section NAME and TYPE.  Return section number, or zero on
1702    failure.  */
1703 
1704 unsigned
find(const char * sname)1705 elf_in::find (const char *sname)
1706 {
1707   for (unsigned pos = sectab.size; pos -= sizeof (section); )
1708     {
1709       const section *sec
1710 	= reinterpret_cast<const section *> (&sectab.buffer[pos]);
1711 
1712       if (0 == strcmp (sname, name (sec->name)))
1713 	return pos / sizeof (section);
1714     }
1715 
1716   return 0;
1717 }
1718 
1719 /* Begin reading file.  Verify header.  Pull in section and string
1720    tables.  Return true on success.  */
1721 
1722 bool
begin(location_t loc)1723 elf_in::begin (location_t loc)
1724 {
1725   if (!parent::begin ())
1726     return false;
1727 
1728   struct stat stat;
1729   unsigned size = 0;
1730   if (!fstat (fd, &stat))
1731     {
1732 #if !defined (HOST_LACKS_INODE_NUMBERS)
1733       device = stat.st_dev;
1734       inode = stat.st_ino;
1735 #endif
1736       /* Never generate files > 4GB, check we've not been given one.  */
1737       if (stat.st_size == unsigned (stat.st_size))
1738 	size = unsigned (stat.st_size);
1739     }
1740 
1741 #if MAPPED_READING
1742   /* MAP_SHARED so that the file is backing store.  If someone else
1743      concurrently writes it, they're wrong.  */
1744   void *mapping = mmap (NULL, size, PROT_READ, MAP_SHARED, fd, 0);
1745   if (mapping == MAP_FAILED)
1746     {
1747     fail:
1748       set_error (errno);
1749       return false;
1750     }
1751   /* We'll be hopping over this randomly.  Some systems declare the
1752      first parm as char *, and other declare it as void *.  */
1753   if (madvise (reinterpret_cast <char *> (mapping), size, MADV_RANDOM))
1754     goto fail;
1755 
1756   hdr.buffer = (char *)mapping;
1757 #else
1758   read (&hdr, 0, sizeof (header));
1759 #endif
1760   hdr.pos = size; /* Record size of the file.  */
1761 
1762   const header *h = reinterpret_cast<const header *> (hdr.buffer);
1763   if (!h)
1764     return false;
1765 
1766   if (h->ident.magic[0] != 0x7f
1767       || h->ident.magic[1] != 'E'
1768       || h->ident.magic[2] != 'L'
1769       || h->ident.magic[3] != 'F')
1770     {
1771       error_at (loc, "not Encapsulated Lazy Records of Named Declarations");
1772     failed:
1773       shrink (hdr);
1774       return false;
1775     }
1776 
1777   /* We expect a particular format -- the ELF is not intended to be
1778      distributable.  */
1779   if (h->ident.klass != MY_CLASS
1780       || h->ident.data != MY_ENDIAN
1781       || h->ident.version != EV_CURRENT
1782       || h->type != ET_NONE
1783       || h->machine != EM_NONE
1784       || h->ident.osabi != OSABI_NONE)
1785     {
1786       error_at (loc, "unexpected encapsulation format or type");
1787       goto failed;
1788     }
1789 
1790   int e = -1;
1791   if (!h->shoff || h->shentsize != sizeof (section))
1792     {
1793     malformed:
1794       set_error (e);
1795       error_at (loc, "encapsulation is malformed");
1796       goto failed;
1797     }
1798 
1799   unsigned strndx = h->shstrndx;
1800   unsigned shnum = h->shnum;
1801   if (shnum == SHN_XINDEX)
1802     {
1803       if (!read (&sectab, h->shoff, sizeof (section)))
1804 	{
1805 	section_table_fail:
1806 	  e = errno;
1807 	  goto malformed;
1808 	}
1809       shnum = get_section (0)->size;
1810       /* Freeing does mean we'll re-read it in the case we're not
1811 	 mapping, but this is going to be rare.  */
1812       shrink (sectab);
1813     }
1814 
1815   if (!shnum)
1816     goto malformed;
1817 
1818   if (!read (&sectab, h->shoff, shnum * sizeof (section)))
1819     goto section_table_fail;
1820 
1821   if (strndx == SHN_XINDEX)
1822     strndx = get_section (0)->link;
1823 
1824   if (!read (&strtab, find (strndx, SHT_STRTAB)))
1825     goto malformed;
1826 
1827   /* The string table should be at least one byte, with NUL chars
1828      at either end.  */
1829   if (!(strtab.size && !strtab.buffer[0]
1830 	&& !strtab.buffer[strtab.size - 1]))
1831     goto malformed;
1832 
1833 #if MAPPED_READING
1834   /* Record the offsets of the section and string tables.  */
1835   sectab.pos = h->shoff;
1836   strtab.pos = shnum * sizeof (section);
1837 #else
1838   shrink (hdr);
1839 #endif
1840 
1841   return true;
1842 }
1843 
1844 /* Create a new mapping.  */
1845 
1846 #if MAPPED_WRITING
1847 void
create_mapping(unsigned ext,bool extending)1848 elf_out::create_mapping (unsigned ext, bool extending)
1849 {
1850 #ifndef HAVE_POSIX_FALLOCATE
1851 #define posix_fallocate(fd,off,len) ftruncate (fd, off + len)
1852 #endif
1853   void *mapping = MAP_FAILED;
1854   if (extending && ext < 1024 * 1024)
1855     {
1856       if (!posix_fallocate (fd, offset, ext * 2))
1857 	mapping = mmap (NULL, ext * 2, PROT_READ | PROT_WRITE,
1858 			MAP_SHARED, fd, offset);
1859       if (mapping != MAP_FAILED)
1860 	ext *= 2;
1861     }
1862   if (mapping == MAP_FAILED)
1863     {
1864       if (!extending || !posix_fallocate (fd, offset, ext))
1865 	mapping = mmap (NULL, ext, PROT_READ | PROT_WRITE,
1866 			MAP_SHARED, fd, offset);
1867       if (mapping == MAP_FAILED)
1868 	{
1869 	  set_error (errno);
1870 	  mapping = NULL;
1871 	  ext = 0;
1872 	}
1873     }
1874 #undef posix_fallocate
1875   hdr.buffer = (char *)mapping;
1876   extent = ext;
1877 }
1878 #endif
1879 
1880 /* Flush out the current mapping.  */
1881 
1882 #if MAPPED_WRITING
1883 void
remove_mapping()1884 elf_out::remove_mapping ()
1885 {
1886   if (hdr.buffer)
1887     {
1888       /* MS_ASYNC dtrt with the removed mapping, including a
1889 	 subsequent overlapping remap.  */
1890       if (msync (hdr.buffer, extent, MS_ASYNC)
1891 	  || munmap (hdr.buffer, extent))
1892 	/* We're somewhat screwed at this point.  */
1893 	set_error (errno);
1894     }
1895 
1896   hdr.buffer = NULL;
1897 }
1898 #endif
1899 
1900 /* Grow a mapping of PTR to be NEEDED bytes long.  This gets
1901    interesting if the new size grows the EXTENT.  */
1902 
1903 char *
grow(char * data,unsigned needed)1904 elf_out::grow (char *data, unsigned needed)
1905 {
1906   if (!data)
1907     {
1908       /* First allocation, check we're aligned.  */
1909       gcc_checking_assert (!(pos & (SECTION_ALIGN - 1)));
1910 #if MAPPED_WRITING
1911       data = hdr.buffer + (pos - offset);
1912 #endif
1913     }
1914 
1915 #if MAPPED_WRITING
1916   unsigned off = data - hdr.buffer;
1917   if (off + needed > extent)
1918     {
1919       /* We need to grow the mapping.  */
1920       unsigned lwm = off & ~(page_size - 1);
1921       unsigned hwm = (off + needed + page_size - 1) & ~(page_size - 1);
1922 
1923       gcc_checking_assert (hwm > extent);
1924 
1925       remove_mapping ();
1926 
1927       offset += lwm;
1928       create_mapping (extent < hwm - lwm ? hwm - lwm : extent);
1929 
1930       data = hdr.buffer + (off - lwm);
1931     }
1932 #else
1933   data = allocator::grow (data, needed);
1934 #endif
1935 
1936   return data;
1937 }
1938 
1939 #if MAPPED_WRITING
1940 /* Shrinking is a NOP.  */
1941 void
shrink(char *)1942 elf_out::shrink (char *)
1943 {
1944 }
1945 #endif
1946 
1947 /* Write S of length L to the strtab buffer.  L must include the ending
1948    NUL, if that's what you want.  */
1949 
1950 unsigned
strtab_write(const char * s,unsigned l)1951 elf_out::strtab_write (const char *s, unsigned l)
1952 {
1953   if (strtab.pos + l > strtab.size)
1954     data::simple_memory.grow (strtab, strtab.pos + l, false);
1955   memcpy (strtab.buffer + strtab.pos, s, l);
1956   unsigned res = strtab.pos;
1957   strtab.pos += l;
1958   return res;
1959 }
1960 
1961 /* Write qualified name of decl.  INNER >0 if this is a definition, <0
1962    if this is a qualifier of an outer name.  */
1963 
1964 void
strtab_write(tree decl,int inner)1965 elf_out::strtab_write (tree decl, int inner)
1966 {
1967   tree ctx = CP_DECL_CONTEXT (decl);
1968   if (TYPE_P (ctx))
1969     ctx = TYPE_NAME (ctx);
1970   if (ctx != global_namespace)
1971     strtab_write (ctx, -1);
1972 
1973   tree name = DECL_NAME (decl);
1974   if (!name)
1975     name = DECL_ASSEMBLER_NAME_RAW (decl);
1976   strtab_write (IDENTIFIER_POINTER (name), IDENTIFIER_LENGTH (name));
1977 
1978   if (inner)
1979     strtab_write (&"::{}"[inner+1], 2);
1980 }
1981 
1982 /* Map IDENTIFIER IDENT to strtab offset.  Inserts into strtab if not
1983    already there.  */
1984 
1985 unsigned
name(tree ident)1986 elf_out::name (tree ident)
1987 {
1988   unsigned res = 0;
1989   if (ident)
1990     {
1991       bool existed;
1992       int *slot = &identtab.get_or_insert (ident, &existed);
1993       if (!existed)
1994 	*slot = strtab_write (IDENTIFIER_POINTER (ident),
1995 			      IDENTIFIER_LENGTH (ident) + 1);
1996       res = *slot;
1997     }
1998   return res;
1999 }
2000 
2001 /* Map LITERAL to strtab offset.  Does not detect duplicates and
2002    expects LITERAL to remain live until strtab is written out.  */
2003 
2004 unsigned
name(const char * literal)2005 elf_out::name (const char *literal)
2006 {
2007   return strtab_write (literal, strlen (literal) + 1);
2008 }
2009 
2010 /* Map a DECL's qualified name to strtab offset.  Does not detect
2011    duplicates.  */
2012 
2013 unsigned
qualified_name(tree decl,bool is_defn)2014 elf_out::qualified_name (tree decl, bool is_defn)
2015 {
2016   gcc_checking_assert (DECL_P (decl) && decl != global_namespace);
2017   unsigned result = strtab.pos;
2018 
2019   strtab_write (decl, is_defn);
2020   strtab_write ("", 1);
2021 
2022   return result;
2023 }
2024 
2025 /* Add section to file.  Return section number.  TYPE & NAME identify
2026    the section.  OFF and SIZE identify the file location of its
2027    data.  FLAGS contains additional info.  */
2028 
2029 unsigned
add(unsigned type,unsigned name,unsigned off,unsigned size,unsigned flags)2030 elf_out::add (unsigned type, unsigned name, unsigned off, unsigned size,
2031 	      unsigned flags)
2032 {
2033   gcc_checking_assert (!(off & (SECTION_ALIGN - 1)));
2034   if (sectab.pos + sizeof (section) > sectab.size)
2035     data::simple_memory.grow (sectab, sectab.pos + sizeof (section), false);
2036   section *sec = reinterpret_cast<section *> (sectab.buffer + sectab.pos);
2037   memset (sec, 0, sizeof (section));
2038   sec->type = type;
2039   sec->flags = flags;
2040   sec->name = name;
2041   sec->offset = off;
2042   sec->size = size;
2043   if (flags & SHF_STRINGS)
2044     sec->entsize = 1;
2045 
2046   unsigned res = sectab.pos;
2047   sectab.pos += sizeof (section);
2048   return res / sizeof (section);
2049 }
2050 
2051 /* Pad to the next alignment boundary, then write BUFFER to disk.
2052    Return the position of the start of the write, or zero on failure.   */
2053 
2054 unsigned
write(const data & buffer)2055 elf_out::write (const data &buffer)
2056 {
2057 #if MAPPED_WRITING
2058   /* HDR is always mapped.  */
2059   if (&buffer != &hdr)
2060     {
2061       bytes_out out (this);
2062       grow (out, buffer.pos, true);
2063       if (out.buffer)
2064 	memcpy (out.buffer, buffer.buffer, buffer.pos);
2065       shrink (out);
2066     }
2067   else
2068     /* We should have been aligned during the first allocation.  */
2069     gcc_checking_assert (!(pos & (SECTION_ALIGN - 1)));
2070 #else
2071   if (::write (fd, buffer.buffer, buffer.pos) != ssize_t (buffer.pos))
2072     {
2073       set_error (errno);
2074       return 0;
2075     }
2076 #endif
2077   unsigned res = pos;
2078   pos += buffer.pos;
2079 
2080   if (unsigned padding = -pos & (SECTION_ALIGN - 1))
2081     {
2082 #if !MAPPED_WRITING
2083       /* Align the section on disk, should help the necessary copies.
2084 	 fseeking to extend is non-portable.  */
2085       static char zero[SECTION_ALIGN];
2086       if (::write (fd, &zero, padding) != ssize_t (padding))
2087 	set_error (errno);
2088 #endif
2089       pos += padding;
2090     }
2091   return res;
2092 }
2093 
2094 /* Write a streaming buffer.  It must be using us as an allocator.  */
2095 
2096 #if MAPPED_WRITING
2097 unsigned
write(const bytes_out & buf)2098 elf_out::write (const bytes_out &buf)
2099 {
2100   gcc_checking_assert (buf.memory == this);
2101   /* A directly mapped buffer.  */
2102   gcc_checking_assert (buf.buffer - hdr.buffer >= 0
2103 		       && buf.buffer - hdr.buffer + buf.size <= extent);
2104   unsigned res = pos;
2105   pos += buf.pos;
2106 
2107   /* Align up.  We're not going to advance into the next page. */
2108   pos += -pos & (SECTION_ALIGN - 1);
2109 
2110   return res;
2111 }
2112 #endif
2113 
2114 /* Write data and add section.  STRING_P is true for a string
2115    section, false for PROGBITS.  NAME identifies the section (0 is the
2116    empty name).  DATA is the contents.  Return section number or 0 on
2117    failure (0 is the undef section).  */
2118 
2119 unsigned
add(const bytes_out & data,bool string_p,unsigned name)2120 elf_out::add (const bytes_out &data, bool string_p, unsigned name)
2121 {
2122   unsigned off = write (data);
2123 
2124   return add (string_p ? SHT_STRTAB : SHT_PROGBITS, name,
2125 	      off, data.pos, string_p ? SHF_STRINGS : SHF_NONE);
2126 }
2127 
2128 /* Begin writing the file.  Initialize the section table and write an
2129    empty header.  Return false on failure.  */
2130 
2131 bool
begin()2132 elf_out::begin ()
2133 {
2134   if (!parent::begin ())
2135     return false;
2136 
2137   /* Let the allocators pick a default.  */
2138   data::simple_memory.grow (strtab, 0, false);
2139   data::simple_memory.grow (sectab, 0, false);
2140 
2141   /* The string table starts with an empty string.  */
2142   name ("");
2143 
2144   /* Create the UNDEF section.  */
2145   add (SHT_NONE);
2146 
2147 #if MAPPED_WRITING
2148   /* Start a mapping.  */
2149   create_mapping (EXPERIMENT (page_size,
2150 			      (32767 + page_size) & ~(page_size - 1)));
2151   if (!hdr.buffer)
2152     return false;
2153 #endif
2154 
2155   /* Write an empty header.  */
2156   grow (hdr, sizeof (header), true);
2157   header *h = reinterpret_cast<header *> (hdr.buffer);
2158   memset (h, 0, sizeof (header));
2159   hdr.pos = hdr.size;
2160   write (hdr);
2161   return !get_error ();
2162 }
2163 
2164 /* Finish writing the file.  Write out the string & section tables.
2165    Fill in the header.  Return true on error.  */
2166 
2167 bool
end()2168 elf_out::end ()
2169 {
2170   if (fd >= 0)
2171     {
2172       /* Write the string table.  */
2173       unsigned strnam = name (".strtab");
2174       unsigned stroff = write (strtab);
2175       unsigned strndx = add (SHT_STRTAB, strnam, stroff, strtab.pos,
2176 			     SHF_STRINGS);
2177 
2178       /* Store escape values in section[0].  */
2179       if (strndx >= SHN_LORESERVE)
2180 	{
2181 	  reinterpret_cast<section *> (sectab.buffer)->link = strndx;
2182 	  strndx = SHN_XINDEX;
2183 	}
2184       unsigned shnum = sectab.pos / sizeof (section);
2185       if (shnum >= SHN_LORESERVE)
2186 	{
2187 	  reinterpret_cast<section *> (sectab.buffer)->size = shnum;
2188 	  shnum = SHN_XINDEX;
2189 	}
2190 
2191       unsigned shoff = write (sectab);
2192 
2193 #if MAPPED_WRITING
2194       if (offset)
2195 	{
2196 	  remove_mapping ();
2197 	  offset = 0;
2198 	  create_mapping ((sizeof (header) + page_size - 1) & ~(page_size - 1),
2199 			  false);
2200 	}
2201       unsigned length = pos;
2202 #else
2203       if (lseek (fd, 0, SEEK_SET) < 0)
2204 	set_error (errno);
2205 #endif
2206       /* Write header.  */
2207       if (!get_error ())
2208 	{
2209 	  /* Write the correct header now.  */
2210 	  header *h = reinterpret_cast<header *> (hdr.buffer);
2211 	  h->ident.magic[0] = 0x7f;
2212 	  h->ident.magic[1] = 'E';	/* Elrond */
2213 	  h->ident.magic[2] = 'L';	/* is an */
2214 	  h->ident.magic[3] = 'F';	/* elf.  */
2215 	  h->ident.klass = MY_CLASS;
2216 	  h->ident.data =  MY_ENDIAN;
2217 	  h->ident.version = EV_CURRENT;
2218 	  h->ident.osabi = OSABI_NONE;
2219 	  h->type = ET_NONE;
2220 	  h->machine = EM_NONE;
2221 	  h->version = EV_CURRENT;
2222 	  h->shoff = shoff;
2223 	  h->ehsize = sizeof (header);
2224 	  h->shentsize = sizeof (section);
2225 	  h->shnum = shnum;
2226 	  h->shstrndx = strndx;
2227 
2228 	  pos = 0;
2229 	  write (hdr);
2230 	}
2231 
2232 #if MAPPED_WRITING
2233       remove_mapping ();
2234       if (ftruncate (fd, length))
2235 	set_error (errno);
2236 #endif
2237     }
2238 
2239   data::simple_memory.shrink (sectab);
2240   data::simple_memory.shrink (strtab);
2241 
2242   return parent::end ();
2243 }
2244 
2245 /********************************************************************/
2246 
2247 /* A dependency set.  This is used during stream out to determine the
2248    connectivity of the graph.  Every namespace-scope declaration that
2249    needs writing has a depset.  The depset is filled with the (depsets
2250    of) declarations within this module that it references.  For a
2251    declaration that'll generally be named types.  For definitions
2252    it'll also be declarations in the body.
2253 
2254    From that we can convert the graph to a DAG, via determining the
2255    Strongly Connected Clusters.  Each cluster is streamed
2256    independently, and thus we achieve lazy loading.
2257 
2258    Other decls that get a depset are namespaces themselves and
2259    unnameable declarations.   */
2260 
2261 class depset {
2262 private:
2263   tree entity;  /* Entity, or containing namespace.  */
2264   uintptr_t discriminator;  /* Flags or identifier.  */
2265 
2266 public:
2267   /* The kinds of entity the depset could describe.  The ordering is
2268      significant, see entity_kind_name.  */
2269   enum entity_kind
2270   {
2271     EK_DECL,		/* A decl.  */
2272     EK_SPECIALIZATION,  /* A specialization.  */
2273     EK_PARTIAL,		/* A partial specialization.  */
2274     EK_USING,		/* A using declaration (at namespace scope).  */
2275     EK_NAMESPACE,	/* A namespace.  */
2276     EK_REDIRECT,	/* Redirect to a template_decl.  */
2277     EK_EXPLICIT_HWM,
2278     EK_BINDING = EK_EXPLICIT_HWM, /* Implicitly encoded.  */
2279     EK_FOR_BINDING,	/* A decl being inserted for a binding.  */
2280     EK_INNER_DECL,	/* A decl defined outside of it's imported
2281 			   context.  */
2282     EK_DIRECT_HWM = EK_PARTIAL + 1,
2283 
2284     EK_BITS = 3		/* Only need to encode below EK_EXPLICIT_HWM.  */
2285   };
2286 
2287 private:
2288   /* Placement of bit fields in discriminator.  */
2289   enum disc_bits
2290   {
2291     DB_ZERO_BIT, /* Set to disambiguate identifier from flags  */
2292     DB_SPECIAL_BIT, /* First dep slot is special.  */
2293     DB_KIND_BIT, /* Kind of the entity.  */
2294     DB_KIND_BITS = EK_BITS,
2295     DB_DEFN_BIT = DB_KIND_BIT + DB_KIND_BITS,
2296     DB_IS_MEMBER_BIT,		/* Is an out-of-class member.  */
2297     DB_IS_INTERNAL_BIT,		/* It is an (erroneous)
2298 				   internal-linkage entity.  */
2299     DB_REFS_INTERNAL_BIT,	/* Refers to an internal-linkage
2300 				   entity. */
2301     DB_IMPORTED_BIT,		/* An imported entity.  */
2302     DB_UNREACHED_BIT,		/* A yet-to-be reached entity.  */
2303     DB_HIDDEN_BIT,		/* A hidden binding.  */
2304     /* The following bits are not independent, but enumerating them is
2305        awkward.  */
2306     DB_ALIAS_TMPL_INST_BIT,	/* An alias template instantiation. */
2307     DB_ALIAS_SPEC_BIT,		/* Specialization of an alias template
2308 				   (in both spec tables).  */
2309     DB_TYPE_SPEC_BIT,		/* Specialization in the type table.
2310 				   */
2311     DB_FRIEND_SPEC_BIT,		/* An instantiated template friend.  */
2312   };
2313 
2314 public:
2315   /* The first slot is special for EK_SPECIALIZATIONS it is a
2316      spec_entry pointer.  It is not relevant for the SCC
2317      determination.  */
2318   vec<depset *> deps;  /* Depsets we reference.  */
2319 
2320 public:
2321   unsigned cluster; /* Strongly connected cluster, later entity number  */
2322   unsigned section; /* Section written to.  */
2323   /* During SCC construction, section is lowlink, until the depset is
2324      removed from the stack.  See Tarjan algorithm for details.  */
2325 
2326 private:
2327   /* Construction via factories.  Destruction via hash traits.  */
2328   depset (tree entity);
2329   ~depset ();
2330 
2331 public:
2332   static depset *make_binding (tree, tree);
2333   static depset *make_entity (tree, entity_kind, bool = false);
2334   /* Late setting a binding name -- /then/ insert into hash!  */
set_binding_name(tree name)2335   inline void set_binding_name (tree name)
2336   {
2337     gcc_checking_assert (!get_name ());
2338     discriminator = reinterpret_cast<uintptr_t> (name);
2339   }
2340 
2341 private:
set_flag_bit()2342   template<unsigned I> void set_flag_bit ()
2343   {
2344     gcc_checking_assert (I < 2 || !is_binding ());
2345     discriminator |= 1u << I;
2346   }
clear_flag_bit()2347   template<unsigned I> void clear_flag_bit ()
2348   {
2349     gcc_checking_assert (I < 2 || !is_binding ());
2350     discriminator &= ~(1u << I);
2351   }
get_flag_bit() const2352   template<unsigned I> bool get_flag_bit () const
2353   {
2354     gcc_checking_assert (I < 2 || !is_binding ());
2355     return bool ((discriminator >> I) & 1);
2356   }
2357 
2358 public:
is_binding() const2359   bool is_binding () const
2360   {
2361     return !get_flag_bit<DB_ZERO_BIT> ();
2362   }
get_entity_kind() const2363   entity_kind get_entity_kind () const
2364   {
2365     if (is_binding ())
2366       return EK_BINDING;
2367     return entity_kind ((discriminator >> DB_KIND_BIT) & ((1u << EK_BITS) - 1));
2368   }
2369   const char *entity_kind_name () const;
2370 
2371 public:
has_defn() const2372   bool has_defn () const
2373   {
2374     return get_flag_bit<DB_DEFN_BIT> ();
2375   }
2376 
2377 public:
2378   /* This class-member is defined here, but the class was imported.  */
is_member() const2379   bool is_member () const
2380   {
2381     gcc_checking_assert (get_entity_kind () == EK_DECL);
2382     return get_flag_bit<DB_IS_MEMBER_BIT> ();
2383   }
2384 public:
is_internal() const2385   bool is_internal () const
2386   {
2387     return get_flag_bit<DB_IS_INTERNAL_BIT> ();
2388   }
refs_internal() const2389   bool refs_internal () const
2390   {
2391     return get_flag_bit<DB_REFS_INTERNAL_BIT> ();
2392   }
is_import() const2393   bool is_import () const
2394   {
2395     return get_flag_bit<DB_IMPORTED_BIT> ();
2396   }
is_unreached() const2397   bool is_unreached () const
2398   {
2399     return get_flag_bit<DB_UNREACHED_BIT> ();
2400   }
is_alias_tmpl_inst() const2401   bool is_alias_tmpl_inst () const
2402   {
2403     return get_flag_bit<DB_ALIAS_TMPL_INST_BIT> ();
2404   }
is_alias() const2405   bool is_alias () const
2406   {
2407     return get_flag_bit<DB_ALIAS_SPEC_BIT> ();
2408   }
is_hidden() const2409   bool is_hidden () const
2410   {
2411     return get_flag_bit<DB_HIDDEN_BIT> ();
2412   }
is_type_spec() const2413   bool is_type_spec () const
2414   {
2415     return get_flag_bit<DB_TYPE_SPEC_BIT> ();
2416   }
is_friend_spec() const2417   bool is_friend_spec () const
2418   {
2419     return get_flag_bit<DB_FRIEND_SPEC_BIT> ();
2420   }
2421 
2422 public:
2423   /* We set these bit outside of depset.  */
set_hidden_binding()2424   void set_hidden_binding ()
2425   {
2426     set_flag_bit<DB_HIDDEN_BIT> ();
2427   }
clear_hidden_binding()2428   void clear_hidden_binding ()
2429   {
2430     clear_flag_bit<DB_HIDDEN_BIT> ();
2431   }
2432 
2433 public:
is_special() const2434   bool is_special () const
2435   {
2436     return get_flag_bit<DB_SPECIAL_BIT> ();
2437   }
set_special()2438   void set_special ()
2439   {
2440     set_flag_bit<DB_SPECIAL_BIT> ();
2441   }
2442 
2443 public:
get_entity() const2444   tree get_entity () const
2445   {
2446     return entity;
2447   }
get_name() const2448   tree get_name () const
2449   {
2450     gcc_checking_assert (is_binding ());
2451     return reinterpret_cast <tree> (discriminator);
2452   }
2453 
2454 public:
2455   /* Traits for a hash table of pointers to bindings.  */
2456   struct traits {
2457     /* Each entry is a pointer to a depset. */
2458     typedef depset *value_type;
2459     /* We lookup by container:maybe-identifier pair.  */
2460     typedef std::pair<tree,tree> compare_type;
2461 
2462     static const bool empty_zero_p = true;
2463 
2464     /* hash and equality for compare_type.  */
hashdepset::traits2465     inline static hashval_t hash (const compare_type &p)
2466     {
2467       hashval_t h = pointer_hash<tree_node>::hash (p.first);
2468       if (p.second)
2469 	{
2470 	  hashval_t nh = IDENTIFIER_HASH_VALUE (p.second);
2471 	  h = iterative_hash_hashval_t (h, nh);
2472 	}
2473       return h;
2474     }
equaldepset::traits2475     inline static bool equal (const value_type b, const compare_type &p)
2476     {
2477       if (b->entity != p.first)
2478 	return false;
2479 
2480       if (p.second)
2481 	return b->discriminator == reinterpret_cast<uintptr_t> (p.second);
2482       else
2483 	return !b->is_binding ();
2484     }
2485 
2486     /* (re)hasher for a binding itself.  */
hashdepset::traits2487     inline static hashval_t hash (const value_type b)
2488     {
2489       hashval_t h = pointer_hash<tree_node>::hash (b->entity);
2490       if (b->is_binding ())
2491 	{
2492 	  hashval_t nh = IDENTIFIER_HASH_VALUE (b->get_name ());
2493 	  h = iterative_hash_hashval_t (h, nh);
2494 	}
2495       return h;
2496     }
2497 
2498     /* Empty via NULL.  */
mark_emptydepset::traits2499     static inline void mark_empty (value_type &p) {p = NULL;}
is_emptydepset::traits2500     static inline bool is_empty (value_type p) {return !p;}
2501 
2502     /* Nothing is deletable.  Everything is insertable.  */
is_deleteddepset::traits2503     static bool is_deleted (value_type) { return false; }
mark_deleteddepset::traits2504     static void mark_deleted (value_type) { gcc_unreachable (); }
2505 
2506     /* We own the entities in the hash table.  */
removedepset::traits2507     static void remove (value_type p)
2508     {
2509       delete (p);
2510     }
2511   };
2512 
2513 public:
2514   class hash : public hash_table<traits> {
2515     typedef traits::compare_type key_t;
2516     typedef hash_table<traits> parent;
2517 
2518   public:
2519     vec<depset *> worklist;  /* Worklist of decls to walk.  */
2520     hash *chain;	     /* Original table.  */
2521     depset *current;         /* Current depset being depended.  */
2522     unsigned section;	     /* When writing out, the section.  */
2523     bool sneakoscope;        /* Detecting dark magic (of a voldemort).  */
2524     bool reached_unreached;  /* We reached an unreached entity.  */
2525 
2526   public:
hash(size_t size,hash * c=NULL)2527     hash (size_t size, hash *c = NULL)
2528       : parent (size), chain (c), current (NULL), section (0),
2529 	sneakoscope (false), reached_unreached (false)
2530     {
2531       worklist.create (size);
2532     }
~hash()2533     ~hash ()
2534     {
2535       worklist.release ();
2536     }
2537 
2538   public:
is_key_order() const2539     bool is_key_order () const
2540     {
2541       return chain != NULL;
2542     }
2543 
2544   private:
2545     depset **entity_slot (tree entity, bool = true);
2546     depset **binding_slot (tree ctx, tree name, bool = true);
2547     depset *maybe_add_declaration (tree decl);
2548 
2549   public:
2550     depset *find_dependency (tree entity);
2551     depset *find_binding (tree ctx, tree name);
2552     depset *make_dependency (tree decl, entity_kind);
2553     void add_dependency (depset *);
2554 
2555   public:
2556     void add_mergeable (depset *);
2557     depset *add_dependency (tree decl, entity_kind);
2558     void add_namespace_context (depset *, tree ns);
2559 
2560   private:
2561     static bool add_binding_entity (tree, WMB_Flags, void *);
2562 
2563   public:
2564     bool add_namespace_entities (tree ns, bitmap partitions);
2565     void add_specializations (bool decl_p);
2566     void add_partial_entities (vec<tree, va_gc> *);
2567     void add_class_entities (vec<tree, va_gc> *);
2568 
2569   public:
2570     void find_dependencies (module_state *);
2571     bool finalize_dependencies ();
2572     vec<depset *> connect ();
2573   };
2574 
2575 public:
2576   struct tarjan {
2577     vec<depset *> result;
2578     vec<depset *> stack;
2579     unsigned index;
2580 
tarjandepset::tarjan2581     tarjan (unsigned size)
2582       : index (0)
2583     {
2584       result.create (size);
2585       stack.create (50);
2586     }
~tarjandepset::tarjan2587     ~tarjan ()
2588     {
2589       gcc_assert (!stack.length ());
2590       stack.release ();
2591     }
2592 
2593   public:
2594     void connect (depset *);
2595   };
2596 };
2597 
2598 inline
depset(tree entity)2599 depset::depset (tree entity)
2600   :entity (entity), discriminator (0), cluster (0), section (0)
2601 {
2602   deps.create (0);
2603 }
2604 
2605 inline
~depset()2606 depset::~depset ()
2607 {
2608   deps.release ();
2609 }
2610 
2611 const char *
entity_kind_name() const2612 depset::entity_kind_name () const
2613 {
2614   /* Same order as entity_kind.  */
2615   static const char *const names[] =
2616     {"decl", "specialization", "partial", "using",
2617      "namespace", "redirect", "binding"};
2618   entity_kind kind = get_entity_kind ();
2619   gcc_checking_assert (kind < sizeof (names) / sizeof(names[0]));
2620   return names[kind];
2621 }
2622 
2623 /* Create a depset for a namespace binding NS::NAME.  */
2624 
make_binding(tree ns,tree name)2625 depset *depset::make_binding (tree ns, tree name)
2626 {
2627   depset *binding = new depset (ns);
2628 
2629   binding->discriminator = reinterpret_cast <uintptr_t> (name);
2630 
2631   return binding;
2632 }
2633 
make_entity(tree entity,entity_kind ek,bool is_defn)2634 depset *depset::make_entity (tree entity, entity_kind ek, bool is_defn)
2635 {
2636   depset *r = new depset (entity);
2637 
2638   r->discriminator = ((1 << DB_ZERO_BIT)
2639 		      | (ek << DB_KIND_BIT)
2640 		      | is_defn << DB_DEFN_BIT);
2641 
2642   return r;
2643 }
2644 
2645 class pending_key
2646 {
2647 public:
2648   tree ns;
2649   tree id;
2650 };
2651 
2652 template<>
2653 struct default_hash_traits<pending_key>
2654 {
2655   using value_type = pending_key;
2656 
2657   static const bool empty_zero_p = false;
hashdefault_hash_traits2658   static hashval_t hash (const value_type &k)
2659   {
2660     hashval_t h = IDENTIFIER_HASH_VALUE (k.id);
2661     h = iterative_hash_hashval_t (DECL_UID (k.ns), h);
2662 
2663     return h;
2664   }
equaldefault_hash_traits2665   static bool equal (const value_type &k, const value_type &l)
2666   {
2667     return k.ns == l.ns && k.id == l.id;
2668   }
mark_emptydefault_hash_traits2669   static void mark_empty (value_type &k)
2670   {
2671     k.ns = k.id = NULL_TREE;
2672   }
mark_deleteddefault_hash_traits2673   static void mark_deleted (value_type &k)
2674   {
2675     k.ns = NULL_TREE;
2676     gcc_checking_assert (k.id);
2677   }
is_emptydefault_hash_traits2678   static bool is_empty (const value_type &k)
2679   {
2680     return k.ns == NULL_TREE && k.id == NULL_TREE;
2681   }
is_deleteddefault_hash_traits2682   static bool is_deleted (const value_type &k)
2683   {
2684     return k.ns == NULL_TREE && k.id != NULL_TREE;
2685   }
removedefault_hash_traits2686   static void remove (value_type &)
2687   {
2688   }
2689 };
2690 
2691 typedef hash_map<pending_key, auto_vec<unsigned>> pending_map_t;
2692 
2693 /* Not-loaded entities that are keyed to a namespace-scope
2694    identifier.  See module_state::write_pendings for details.  */
2695 pending_map_t *pending_table;
2696 
2697 /* Decls that need some post processing once a batch of lazy loads has
2698    completed.  */
2699 vec<tree, va_heap, vl_embed> *post_load_decls;
2700 
2701 /* Some entities are attached to another entitity for ODR purposes.
2702    For example, at namespace scope, 'inline auto var = []{};', that
2703    lambda is attached to 'var', and follows its ODRness.  */
2704 typedef hash_map<tree, auto_vec<tree>> attached_map_t;
2705 static attached_map_t *attached_table;
2706 
2707 /********************************************************************/
2708 /* Tree streaming.   The tree streaming is very specific to the tree
2709    structures themselves.  A tag indicates the kind of tree being
2710    streamed.  -ve tags indicate backreferences to already-streamed
2711    trees.  Backreferences are auto-numbered.  */
2712 
2713 /* Tree tags.  */
2714 enum tree_tag {
2715   tt_null,		/* NULL_TREE.  */
2716   tt_fixed,		/* Fixed vector index.  */
2717 
2718   tt_node,		/* By-value node.  */
2719   tt_decl,		/* By-value mergeable decl.  */
2720   tt_tpl_parm,		/* Template parm.  */
2721 
2722   /* The ordering of the following 4 is relied upon in
2723      trees_out::tree_node.  */
2724   tt_id,  		/* Identifier node.  */
2725   tt_conv_id,		/* Conversion operator name.  */
2726   tt_anon_id,		/* Anonymous name.  */
2727   tt_lambda_id,		/* Lambda name.  */
2728 
2729   tt_typedef_type,	/* A (possibly implicit) typedefed type.  */
2730   tt_derived_type,	/* A type derived from another type.  */
2731   tt_variant_type,	/* A variant of another type.  */
2732 
2733   tt_tinfo_var,		/* Typeinfo object. */
2734   tt_tinfo_typedef,	/* Typeinfo typedef.  */
2735   tt_ptrmem_type,	/* Pointer to member type.  */
2736 
2737   tt_parm,		/* Function parameter or result.  */
2738   tt_enum_value,	/* An enum value.  */
2739   tt_enum_decl,		/* An enum decl.  */
2740   tt_data_member,	/* Data member/using-decl.  */
2741 
2742   tt_binfo,		/* A BINFO.  */
2743   tt_vtable,		/* A vtable.  */
2744   tt_thunk,		/* A thunk.  */
2745   tt_clone_ref,
2746 
2747   tt_entity,		/* A extra-cluster entity.  */
2748 
2749   tt_template,		/* The TEMPLATE_RESULT of a template.  */
2750 };
2751 
2752 enum walk_kind {
2753   WK_none,	/* No walk to do (a back- or fixed-ref happened).  */
2754   WK_normal,	/* Normal walk (by-name if possible).  */
2755 
2756   WK_value,	/* By-value walk.  */
2757 };
2758 
2759 enum merge_kind
2760 {
2761   MK_unique,	/* Known unique.  */
2762   MK_named,	/* Found by CTX, NAME + maybe_arg types etc.  */
2763   MK_field,	/* Found by CTX and index on TYPE_FIELDS  */
2764   MK_vtable,	/* Found by CTX and index on TYPE_VTABLES  */
2765   MK_as_base,	/* Found by CTX.  */
2766 
2767   MK_partial,
2768 
2769   MK_enum,	/* Found by CTX, & 1stMemberNAME.  */
2770   MK_attached,  /* Found by attachee & index.  */
2771 
2772   MK_friend_spec,  /* Like named, but has a tmpl & args too.  */
2773   MK_local_friend, /* Found by CTX, index.  */
2774 
2775   MK_indirect_lwm = MK_enum,
2776 
2777   /* Template specialization kinds below. These are all found via
2778      primary template and specialization args.  */
2779   MK_template_mask = 0x10,  /* A template specialization.  */
2780 
2781   MK_tmpl_decl_mask = 0x4, /* In decl table.  */
2782   MK_tmpl_alias_mask = 0x2, /* Also in type table  */
2783 
2784   MK_tmpl_tmpl_mask = 0x1, /* We want TEMPLATE_DECL.  */
2785 
2786   MK_type_spec = MK_template_mask,
2787   MK_decl_spec = MK_template_mask | MK_tmpl_decl_mask,
2788   MK_alias_spec = MK_decl_spec | MK_tmpl_alias_mask,
2789 
2790   MK_hwm = 0x20
2791 };
2792 /* This is more than a debugging array.  NULLs are used to determine
2793    an invalid merge_kind number.  */
2794 static char const *const merge_kind_name[MK_hwm] =
2795   {
2796     "unique", "named", "field", "vtable",	/* 0...3  */
2797     "asbase", "partial", "enum", "attached",	/* 4...7  */
2798 
2799     "friend spec", "local friend", NULL, NULL,  /* 8...11 */
2800     NULL, NULL, NULL, NULL,
2801 
2802     "type spec", "type tmpl spec",	/* 16,17 type (template).  */
2803     NULL, NULL,
2804 
2805     "decl spec", "decl tmpl spec",	/* 20,21 decl (template).  */
2806     "alias spec", "alias tmpl spec",	/* 22,23 alias (template). */
2807     NULL, NULL, NULL, NULL,
2808     NULL, NULL, NULL, NULL,
2809   };
2810 
2811 /* Mergeable entity location data.  */
2812 struct merge_key {
2813   cp_ref_qualifier ref_q : 2;
2814   unsigned index;
2815 
2816   tree ret;  /* Return type, if appropriate.  */
2817   tree args; /* Arg types, if appropriate.  */
2818 
2819   tree constraints;  /* Constraints.  */
2820 
merge_keymerge_key2821   merge_key ()
2822     :ref_q (REF_QUAL_NONE), index (0),
2823      ret (NULL_TREE), args (NULL_TREE),
2824      constraints (NULL_TREE)
2825   {
2826   }
2827 };
2828 
2829 struct duplicate_hash : nodel_ptr_hash<tree_node>
2830 {
2831 #if 0
2832   /* This breaks variadic bases in the xtreme_header tests.  Since ::equal is
2833      the default pointer_hash::equal, let's use the default hash as well.  */
2834   inline static hashval_t hash (value_type decl)
2835   {
2836     if (TREE_CODE (decl) == TREE_BINFO)
2837       decl = TYPE_NAME (BINFO_TYPE (decl));
2838     return hashval_t (DECL_UID (decl));
2839   }
2840 #endif
2841 };
2842 
2843 /* Hashmap of merged duplicates.  Usually decls, but can contain
2844    BINFOs.  */
2845 typedef hash_map<tree,uintptr_t,
2846 		 simple_hashmap_traits<duplicate_hash,uintptr_t> >
2847 duplicate_hash_map;
2848 
2849 /* Tree stream reader.  Note that reading a stream doesn't mark the
2850    read trees with TREE_VISITED.  Thus it's quite safe to have
2851    multiple concurrent readers.  Which is good, because lazy
2852    loading. */
2853 class trees_in : public bytes_in {
2854   typedef bytes_in parent;
2855 
2856 private:
2857   module_state *state;		/* Module being imported.  */
2858   vec<tree> back_refs;		/* Back references.  */
2859   duplicate_hash_map *duplicates;	/* Map from existings to duplicate.  */
2860   vec<tree> post_decls;		/* Decls to post process.  */
2861   unsigned unused;		/* Inhibit any interior TREE_USED
2862 				   marking.  */
2863 
2864 public:
2865   trees_in (module_state *);
2866   ~trees_in ();
2867 
2868 public:
2869   int insert (tree);
2870   tree back_ref (int);
2871 
2872 private:
2873   tree start (unsigned = 0);
2874 
2875 public:
2876   /* Needed for binfo writing  */
2877   bool core_bools (tree);
2878 
2879 private:
2880   /* Stream tree_core, lang_decl_specific and lang_type_specific
2881      bits.  */
2882   bool core_vals (tree);
2883   bool lang_type_bools (tree);
2884   bool lang_type_vals (tree);
2885   bool lang_decl_bools (tree);
2886   bool lang_decl_vals (tree);
2887   bool lang_vals (tree);
2888   bool tree_node_bools (tree);
2889   bool tree_node_vals (tree);
2890   tree tree_value ();
2891   tree decl_value ();
2892   tree tpl_parm_value ();
2893 
2894 private:
2895   tree chained_decls ();  /* Follow DECL_CHAIN.  */
2896   vec<tree, va_heap> *vec_chained_decls ();
2897   vec<tree, va_gc> *tree_vec (); /* vec of tree.  */
2898   vec<tree_pair_s, va_gc> *tree_pair_vec (); /* vec of tree_pair.  */
2899   tree tree_list (bool has_purpose);
2900 
2901 public:
2902   /* Read a tree node.  */
2903   tree tree_node (bool is_use = false);
2904 
2905 private:
2906   bool install_entity (tree decl);
2907   tree tpl_parms (unsigned &tpl_levels);
2908   bool tpl_parms_fini (tree decl, unsigned tpl_levels);
2909   bool tpl_header (tree decl, unsigned *tpl_levels);
2910   int fn_parms_init (tree);
2911   void fn_parms_fini (int tag, tree fn, tree existing, bool has_defn);
2912   unsigned add_indirect_tpl_parms (tree);
2913 public:
2914   bool add_indirects (tree);
2915 
2916 public:
2917   /* Serialize various definitions. */
2918   bool read_definition (tree decl);
2919 
2920 private:
2921   bool is_matching_decl (tree existing, tree decl, bool is_typedef);
2922   static bool install_implicit_member (tree decl);
2923   bool read_function_def (tree decl, tree maybe_template);
2924   bool read_var_def (tree decl, tree maybe_template);
2925   bool read_class_def (tree decl, tree maybe_template);
2926   bool read_enum_def (tree decl, tree maybe_template);
2927 
2928 public:
2929   tree decl_container ();
2930   tree key_mergeable (int tag, merge_kind, tree decl, tree inner, tree type,
2931 		      tree container, bool is_mod);
2932   unsigned binfo_mergeable (tree *);
2933 
2934 private:
2935   uintptr_t *find_duplicate (tree existing);
2936   void register_duplicate (tree decl, tree existing);
2937   /* Mark as an already diagnosed bad duplicate.  */
unmatched_duplicate(tree existing)2938   void unmatched_duplicate (tree existing)
2939   {
2940     *find_duplicate (existing) |= 1;
2941   }
2942 
2943 public:
is_duplicate(tree decl)2944   bool is_duplicate (tree decl)
2945   {
2946     return find_duplicate (decl) != NULL;
2947   }
maybe_duplicate(tree decl)2948   tree maybe_duplicate (tree decl)
2949   {
2950     if (uintptr_t *dup = find_duplicate (decl))
2951       return reinterpret_cast<tree> (*dup & ~uintptr_t (1));
2952     return decl;
2953   }
2954   tree odr_duplicate (tree decl, bool has_defn);
2955 
2956 public:
2957   /* Return the next decl to postprocess, or NULL.  */
post_process()2958   tree post_process ()
2959   {
2960     return post_decls.length () ? post_decls.pop () : NULL_TREE;
2961   }
2962 private:
2963   /* Register DECL for postprocessing.  */
post_process(tree decl)2964   void post_process (tree decl)
2965   {
2966     post_decls.safe_push (decl);
2967   }
2968 
2969 private:
2970   void assert_definition (tree, bool installing);
2971 };
2972 
trees_in(module_state * state)2973 trees_in::trees_in (module_state *state)
2974   :parent (), state (state), unused (0)
2975 {
2976   duplicates = NULL;
2977   back_refs.create (500);
2978   post_decls.create (0);
2979 }
2980 
~trees_in()2981 trees_in::~trees_in ()
2982 {
2983   delete (duplicates);
2984   back_refs.release ();
2985   post_decls.release ();
2986 }
2987 
2988 /* Tree stream writer.  */
2989 class trees_out : public bytes_out {
2990   typedef bytes_out parent;
2991 
2992 private:
2993   module_state *state;		/* The module we are writing.  */
2994   ptr_int_hash_map tree_map; 	/* Trees to references */
2995   depset::hash *dep_hash;    	/* Dependency table.  */
2996   int ref_num;			/* Back reference number.  */
2997   unsigned section;
2998 #if CHECKING_P
2999   int importedness;		/* Checker that imports not occurring
3000 				   inappropriately.  +ve imports ok,
3001 				   -ve imports not ok.  */
3002 #endif
3003 
3004 public:
3005   trees_out (allocator *, module_state *, depset::hash &deps, unsigned sec = 0);
3006   ~trees_out ();
3007 
3008 private:
3009   void mark_trees ();
3010   void unmark_trees ();
3011 
3012 public:
3013   /* Hey, let's ignore the well known STL iterator idiom.  */
3014   void begin ();
3015   unsigned end (elf_out *sink, unsigned name, unsigned *crc_ptr);
3016   void end ();
3017 
3018 public:
3019   enum tags
3020   {
3021     tag_backref = -1,	/* Upper bound on the backrefs.  */
3022     tag_value = 0,	/* Write by value.  */
3023     tag_fixed		/* Lower bound on the fixed trees.  */
3024   };
3025 
3026 public:
is_key_order() const3027   bool is_key_order () const
3028   {
3029     return dep_hash->is_key_order ();
3030   }
3031 
3032 public:
3033   int insert (tree, walk_kind = WK_normal);
3034 
3035 private:
3036   void start (tree, bool = false);
3037 
3038 private:
3039   walk_kind ref_node (tree);
3040 public:
3041   int get_tag (tree);
set_importing(int i ATTRIBUTE_UNUSED)3042   void set_importing (int i ATTRIBUTE_UNUSED)
3043   {
3044 #if CHECKING_P
3045     importedness = i;
3046 #endif
3047   }
3048 
3049 private:
3050   void core_bools (tree);
3051   void core_vals (tree);
3052   void lang_type_bools (tree);
3053   void lang_type_vals (tree);
3054   void lang_decl_bools (tree);
3055   void lang_decl_vals (tree);
3056   void lang_vals (tree);
3057   void tree_node_bools (tree);
3058   void tree_node_vals (tree);
3059 
3060 private:
3061   void chained_decls (tree);
3062   void vec_chained_decls (tree);
3063   void tree_vec (vec<tree, va_gc> *);
3064   void tree_pair_vec (vec<tree_pair_s, va_gc> *);
3065   void tree_list (tree, bool has_purpose);
3066 
3067 public:
3068   /* Mark a node for by-value walking.  */
3069   void mark_by_value (tree);
3070 
3071 public:
3072   void tree_node (tree);
3073 
3074 private:
3075   void install_entity (tree decl, depset *);
3076   void tpl_parms (tree parms, unsigned &tpl_levels);
3077   void tpl_parms_fini (tree decl, unsigned tpl_levels);
fn_parms_fini(tree)3078   void fn_parms_fini (tree) {}
3079   unsigned add_indirect_tpl_parms (tree);
3080 public:
3081   void add_indirects (tree);
3082   void fn_parms_init (tree);
3083   void tpl_header (tree decl, unsigned *tpl_levels);
3084 
3085 public:
3086   merge_kind get_merge_kind (tree decl, depset *maybe_dep);
3087   tree decl_container (tree decl);
3088   void key_mergeable (int tag, merge_kind, tree decl, tree inner,
3089 		      tree container, depset *maybe_dep);
3090   void binfo_mergeable (tree binfo);
3091 
3092 private:
3093   bool decl_node (tree, walk_kind ref);
3094   void type_node (tree);
3095   void tree_value (tree);
3096   void tpl_parm_value (tree);
3097 
3098 public:
3099   void decl_value (tree, depset *);
3100 
3101 public:
3102   /* Serialize various definitions. */
3103   void write_definition (tree decl);
3104   void mark_declaration (tree decl, bool do_defn);
3105 
3106 private:
3107   void mark_function_def (tree decl);
3108   void mark_var_def (tree decl);
3109   void mark_class_def (tree decl);
3110   void mark_enum_def (tree decl);
3111   void mark_class_member (tree decl, bool do_defn = true);
3112   void mark_binfos (tree type);
3113 
3114 private:
3115   void write_var_def (tree decl);
3116   void write_function_def (tree decl);
3117   void write_class_def (tree decl);
3118   void write_enum_def (tree decl);
3119 
3120 private:
3121   static void assert_definition (tree);
3122 
3123 public:
3124   static void instrument ();
3125 
3126 private:
3127   /* Tree instrumentation. */
3128   static unsigned tree_val_count;
3129   static unsigned decl_val_count;
3130   static unsigned back_ref_count;
3131   static unsigned null_count;
3132 };
3133 
3134 /* Instrumentation counters.  */
3135 unsigned trees_out::tree_val_count;
3136 unsigned trees_out::decl_val_count;
3137 unsigned trees_out::back_ref_count;
3138 unsigned trees_out::null_count;
3139 
trees_out(allocator * mem,module_state * state,depset::hash & deps,unsigned section)3140 trees_out::trees_out (allocator *mem, module_state *state, depset::hash &deps,
3141 		      unsigned section)
3142   :parent (mem), state (state), tree_map (500),
3143    dep_hash (&deps), ref_num (0), section (section)
3144 {
3145 #if CHECKING_P
3146   importedness = 0;
3147 #endif
3148 }
3149 
~trees_out()3150 trees_out::~trees_out ()
3151 {
3152 }
3153 
3154 /********************************************************************/
3155 /* Location.  We're aware of the line-map concept and reproduce it
3156    here.  Each imported module allocates a contiguous span of ordinary
3157    maps, and of macro maps.  adhoc maps are serialized by contents,
3158    not pre-allocated.   The scattered linemaps of a module are
3159    coalesced when writing.  */
3160 
3161 
3162 /* I use half-open [first,second) ranges.  */
3163 typedef std::pair<unsigned,unsigned> range_t;
3164 
3165 /* A range of locations.  */
3166 typedef std::pair<location_t,location_t> loc_range_t;
3167 
3168 /* Spans of the line maps that are occupied by this TU.  I.e. not
3169    within imports.  Only extended when in an interface unit.
3170    Interval zero corresponds to the forced header linemap(s).  This
3171    is a singleton object.  */
3172 
3173 class loc_spans {
3174 public:
3175   /* An interval of line maps.  The line maps here represent a contiguous
3176      non-imported range.  */
3177   struct span {
3178     loc_range_t ordinary;	/* Ordinary map location range. */
3179     loc_range_t macro;		/* Macro map location range.  */
3180     int ordinary_delta;	/* Add to ordinary loc to get serialized loc.  */
3181     int macro_delta;	/* Likewise for macro loc.  */
3182   };
3183 
3184 private:
3185   vec<span> *spans;
3186 
3187 public:
loc_spans()3188   loc_spans ()
3189     /* Do not preallocate spans, as that causes
3190        --enable-detailed-mem-stats problems.  */
3191     : spans (nullptr)
3192   {
3193   }
~loc_spans()3194   ~loc_spans ()
3195   {
3196     delete spans;
3197   }
3198 
3199 public:
operator [](unsigned ix)3200   span &operator[] (unsigned ix)
3201   {
3202     return (*spans)[ix];
3203   }
length() const3204   unsigned length () const
3205   {
3206     return spans->length ();
3207   }
3208 
3209 public:
init_p() const3210   bool init_p () const
3211   {
3212     return spans != nullptr;
3213   }
3214   /* Initializer.  */
3215   void init (const line_maps *lmaps, const line_map_ordinary *map);
3216 
3217   /* Slightly skewed preprocessed files can cause us to miss an
3218      initialization in some places.  Fallback initializer.  */
maybe_init()3219   void maybe_init ()
3220   {
3221     if (!init_p ())
3222       init (line_table, nullptr);
3223   }
3224 
3225 public:
3226   enum {
3227     SPAN_RESERVED = 0,	/* Reserved (fixed) locations.  */
3228     SPAN_FIRST = 1,	/* LWM of locations to stream  */
3229     SPAN_MAIN = 2	/* Main file and onwards.  */
3230   };
3231 
3232 public:
main_start() const3233   location_t main_start () const
3234   {
3235     return (*spans)[SPAN_MAIN].ordinary.first;
3236   }
3237 
3238 public:
3239   void open (location_t);
3240   void close ();
3241 
3242 public:
3243   /* Propagate imported linemaps to us, if needed.  */
3244   bool maybe_propagate (module_state *import, location_t loc);
3245 
3246 public:
3247   const span *ordinary (location_t);
3248   const span *macro (location_t);
3249 };
3250 
3251 static loc_spans spans;
3252 /* Indirection to allow bsearching imports by ordinary location.  */
3253 static vec<module_state *> *ool;
3254 
3255 /********************************************************************/
3256 /* Data needed by a module during the process of loading.  */
3257 struct GTY(()) slurping {
3258 
3259   /* Remap import's module numbering to our numbering.  Values are
3260      shifted by 1.  Bit0 encodes if the import is direct.  */
3261   vec<unsigned, va_heap, vl_embed> *
3262     GTY((skip)) remap;			/* Module owner remapping.  */
3263 
3264   elf_in *GTY((skip)) from;     	/* The elf loader.  */
3265 
3266   /* This map is only for header imports themselves -- the global
3267      headers bitmap hold it for the current TU.  */
3268   bitmap headers;	/* Transitive set of direct imports, including
3269 			   self.  Used for macro visibility and
3270 			   priority.  */
3271 
3272   /* These objects point into the mmapped area, unless we're not doing
3273      that, or we got frozen or closed.  In those cases they point to
3274      buffers we own.  */
3275   bytes_in macro_defs;	/* Macro definitions.  */
3276   bytes_in macro_tbl;	/* Macro table.  */
3277 
3278   /* Location remapping.  first->ordinary, second->macro.  */
3279   range_t GTY((skip)) loc_deltas;
3280 
3281   unsigned current;	/* Section currently being loaded.  */
3282   unsigned remaining;	/* Number of lazy sections yet to read.  */
3283   unsigned lru;		/* An LRU counter.  */
3284 
3285  public:
3286   slurping (elf_in *);
3287   ~slurping ();
3288 
3289  public:
3290   /* Close the ELF file, if it's open.  */
closeslurping3291   void close ()
3292   {
3293     if (from)
3294       {
3295 	from->end ();
3296 	delete from;
3297 	from = NULL;
3298       }
3299   }
3300 
3301  public:
3302   void release_macros ();
3303 
3304  public:
alloc_remapslurping3305   void alloc_remap (unsigned size)
3306   {
3307     gcc_assert (!remap);
3308     vec_safe_reserve (remap, size);
3309     for (unsigned ix = size; ix--;)
3310       remap->quick_push (0);
3311   }
remap_moduleslurping3312   unsigned remap_module (unsigned owner)
3313   {
3314     if (owner < remap->length ())
3315       return (*remap)[owner] >> 1;
3316     return 0;
3317   }
3318 
3319  public:
3320   /* GC allocation.  But we must explicitly delete it.   */
operator newslurping3321   static void *operator new (size_t x)
3322   {
3323     return ggc_alloc_atomic (x);
3324   }
operator deleteslurping3325   static void operator delete (void *p)
3326   {
3327     ggc_free (p);
3328   }
3329 };
3330 
slurping(elf_in * from)3331 slurping::slurping (elf_in *from)
3332   : remap (NULL), from (from),
3333     headers (BITMAP_GGC_ALLOC ()), macro_defs (), macro_tbl (),
3334     loc_deltas (0, 0),
3335     current (~0u), remaining (0), lru (0)
3336 {
3337 }
3338 
~slurping()3339 slurping::~slurping ()
3340 {
3341   vec_free (remap);
3342   remap = NULL;
3343   release_macros ();
3344   close ();
3345 }
3346 
release_macros()3347 void slurping::release_macros ()
3348 {
3349   if (macro_defs.size)
3350     elf_in::release (from, macro_defs);
3351   if (macro_tbl.size)
3352     elf_in::release (from, macro_tbl);
3353 }
3354 
3355 /* Information about location maps used during writing.  */
3356 
3357 struct location_map_info {
3358   range_t num_maps;
3359 
3360   unsigned max_range;
3361 };
3362 
3363 /* Flage for extensions that end up being streamed.  */
3364 
3365 enum streamed_extensions {
3366   SE_OPENMP = 1 << 0,
3367   SE_BITS = 1
3368 };
3369 
3370 /********************************************************************/
3371 struct module_state_config;
3372 
3373 /* Increasing levels of loadedness.  */
3374 enum module_loadedness {
3375   ML_NONE,		/* Not loaded.  */
3376   ML_CONFIG,		/* Config loaed.  */
3377   ML_PREPROCESSOR,	/* Preprocessor loaded.  */
3378   ML_LANGUAGE,		/* Language loaded.  */
3379 };
3380 
3381 /* Increasing levels of directness (toplevel) of import.  */
3382 enum module_directness {
3383   MD_NONE,  		/* Not direct.  */
3384   MD_PARTITION_DIRECT,	/* Direct import of a partition.  */
3385   MD_DIRECT,		/* Direct import.  */
3386   MD_PURVIEW_DIRECT,	/* direct import in purview.  */
3387 };
3388 
3389 /* State of a particular module. */
3390 
3391 class GTY((chain_next ("%h.parent"), for_user)) module_state {
3392  public:
3393   /* We always import & export ourselves.  */
3394   bitmap imports;	/* Transitive modules we're importing.  */
3395   bitmap exports;	/* Subset of that, that we're exporting.  */
3396 
3397   module_state *parent;
3398   tree name;		/* Name of the module.  */
3399 
3400   slurping *slurp;	/* Data for loading.  */
3401 
3402   const char *flatname;	/* Flatname of module.  */
3403   char *filename;	/* CMI Filename */
3404 
3405   /* Indices into the entity_ary.  */
3406   unsigned entity_lwm;
3407   unsigned entity_num;
3408 
3409   /* Location ranges for this module.  adhoc-locs are decomposed, so
3410      don't have a range.  */
3411   loc_range_t GTY((skip)) ordinary_locs;
3412   loc_range_t GTY((skip)) macro_locs;
3413 
3414   /* LOC is first set too the importing location.  When initially
3415      loaded it refers to a module loc whose parent is the importing
3416      location.  */
3417   location_t loc; 	/* Location referring to module itself.  */
3418   unsigned crc;		/* CRC we saw reading it in. */
3419 
3420   unsigned mod;		/* Module owner number.  */
3421   unsigned remap;	/* Remapping during writing.  */
3422 
3423   unsigned short subst;	/* Mangle subst if !0.  */
3424 
3425   /* How loaded this module is.  */
3426   enum module_loadedness loadedness : 2;
3427 
3428   bool module_p : 1;    /* /The/ module of this TU.  */
3429   bool header_p : 1;	/* Is a header unit.  */
3430   bool interface_p : 1; /* An interface.  */
3431   bool partition_p : 1; /* A partition.  */
3432 
3433   /* How directly this module is imported.  */
3434   enum module_directness directness : 2;
3435 
3436   bool exported_p : 1;	/* directness != MD_NONE && exported.  */
3437   bool cmi_noted_p : 1; /* We've told the user about the CMI, don't
3438 			   do it again  */
3439   bool call_init_p : 1; /* This module's global initializer needs
3440 			   calling.  */
3441   bool inform_cmi_p : 1; /* Inform of a read/write.  */
3442   bool visited_p : 1;    /* A walk-once flag. */
3443   /* Record extensions emitted or permitted.  */
3444   unsigned extensions : SE_BITS;
3445   /* 14 bits used, 2 bits remain  */
3446 
3447  public:
3448   module_state (tree name, module_state *, bool);
3449   ~module_state ();
3450 
3451  public:
release()3452   void release ()
3453   {
3454     imports = exports = NULL;
3455     slurped ();
3456   }
slurped()3457   void slurped ()
3458   {
3459     delete slurp;
3460     slurp = NULL;
3461   }
from() const3462   elf_in *from () const
3463   {
3464     return slurp->from;
3465   }
3466 
3467  public:
3468   /* Kind of this module.  */
is_module() const3469   bool is_module () const
3470   {
3471     return module_p;
3472   }
is_header() const3473   bool is_header () const
3474   {
3475     return header_p;
3476   }
is_interface() const3477   bool is_interface () const
3478   {
3479     return interface_p;
3480   }
is_partition() const3481   bool is_partition () const
3482   {
3483     return partition_p;
3484   }
3485 
3486   /* How this module is used in the current TU.  */
is_exported() const3487   bool is_exported () const
3488   {
3489     return exported_p;
3490   }
is_direct() const3491   bool is_direct () const
3492   {
3493     return directness >= MD_DIRECT;
3494   }
is_purview_direct() const3495   bool is_purview_direct () const
3496   {
3497     return directness == MD_PURVIEW_DIRECT;
3498   }
is_partition_direct() const3499   bool is_partition_direct () const
3500   {
3501     return directness == MD_PARTITION_DIRECT;
3502   }
3503 
3504  public:
3505   /* Is this a real module?  */
has_location() const3506   bool has_location () const
3507   {
3508     return loc != UNKNOWN_LOCATION;
3509   }
3510 
3511  public:
3512   bool check_not_purview (location_t loc);
3513 
3514  public:
3515   void mangle (bool include_partition);
3516 
3517  public:
3518   void set_import (module_state const *, bool is_export);
3519   void announce (const char *) const;
3520 
3521  public:
3522   /* Read and write module.  */
3523   void write (elf_out *to, cpp_reader *);
3524   bool read_initial (cpp_reader *);
3525   bool read_preprocessor (bool);
3526   bool read_language (bool);
3527 
3528  public:
3529   /* Read a section.  */
3530   bool load_section (unsigned snum, binding_slot *mslot);
3531   /* Lazily read a section.  */
3532   bool lazy_load (unsigned index, binding_slot *mslot);
3533 
3534  public:
3535   /* Juggle a limited number of file numbers.  */
3536   static void freeze_an_elf ();
3537   bool maybe_defrost ();
3538 
3539  public:
3540   void maybe_completed_reading ();
3541   bool check_read (bool outermost, bool ok);
3542 
3543  private:
3544   /* The README, for human consumption.  */
3545   void write_readme (elf_out *to, cpp_reader *,
3546 		     const char *dialect, unsigned extensions);
3547   void write_env (elf_out *to);
3548 
3549  private:
3550   /* Import tables. */
3551   void write_imports (bytes_out &cfg, bool direct);
3552   unsigned read_imports (bytes_in &cfg, cpp_reader *, line_maps *maps);
3553 
3554  private:
3555   void write_imports (elf_out *to, unsigned *crc_ptr);
3556   bool read_imports (cpp_reader *, line_maps *);
3557 
3558  private:
3559   void write_partitions (elf_out *to, unsigned, unsigned *crc_ptr);
3560   bool read_partitions (unsigned);
3561 
3562  private:
3563   void write_config (elf_out *to, struct module_state_config &, unsigned crc);
3564   bool read_config (struct module_state_config &);
3565   static void write_counts (elf_out *to, unsigned [], unsigned *crc_ptr);
3566   bool read_counts (unsigned []);
3567 
3568  public:
3569   void note_cmi_name ();
3570 
3571  private:
3572   static unsigned write_bindings (elf_out *to, vec<depset *> depsets,
3573 				  unsigned *crc_ptr);
3574   bool read_bindings (unsigned count, unsigned lwm, unsigned hwm);
3575 
3576   static void write_namespace (bytes_out &sec, depset *ns_dep);
3577   tree read_namespace (bytes_in &sec);
3578 
3579   void write_namespaces (elf_out *to, vec<depset *> spaces,
3580 			 unsigned, unsigned *crc_ptr);
3581   bool read_namespaces (unsigned);
3582 
3583   void intercluster_seed (trees_out &sec, unsigned index, depset *dep);
3584   unsigned write_cluster (elf_out *to, depset *depsets[], unsigned size,
3585 			  depset::hash &, unsigned *counts, unsigned *crc_ptr);
3586   bool read_cluster (unsigned snum);
3587 
3588  private:
3589   unsigned write_inits (elf_out *to, depset::hash &, unsigned *crc_ptr);
3590   bool read_inits (unsigned count);
3591 
3592  private:
3593   unsigned write_pendings (elf_out *to, vec<depset *> depsets,
3594 			   depset::hash &, unsigned *crc_ptr);
3595   bool read_pendings (unsigned count);
3596 
3597  private:
3598   void write_entities (elf_out *to, vec<depset *> depsets,
3599 		       unsigned count, unsigned *crc_ptr);
3600   bool read_entities (unsigned count, unsigned lwm, unsigned hwm);
3601 
3602  private:
3603   location_map_info write_prepare_maps (module_state_config *);
3604   bool read_prepare_maps (const module_state_config *);
3605 
3606   void write_ordinary_maps (elf_out *to, location_map_info &,
3607 			    module_state_config *, bool, unsigned *crc_ptr);
3608   bool read_ordinary_maps ();
3609   void write_macro_maps (elf_out *to, location_map_info &,
3610 			 module_state_config *, unsigned *crc_ptr);
3611   bool read_macro_maps ();
3612 
3613  private:
3614   void write_define (bytes_out &, const cpp_macro *, bool located = true);
3615   cpp_macro *read_define (bytes_in &, cpp_reader *, bool located = true) const;
3616   unsigned write_macros (elf_out *to, cpp_reader *, unsigned *crc_ptr);
3617   bool read_macros ();
3618   void install_macros ();
3619 
3620  public:
3621   void import_macros ();
3622 
3623  public:
3624   static void undef_macro (cpp_reader *, location_t, cpp_hashnode *);
3625   static cpp_macro *deferred_macro (cpp_reader *, location_t, cpp_hashnode *);
3626 
3627  public:
3628   static void write_location (bytes_out &, location_t);
3629   location_t read_location (bytes_in &) const;
3630 
3631  public:
3632   void set_flatname ();
get_flatname() const3633   const char *get_flatname () const
3634   {
3635     return flatname;
3636   }
3637   location_t imported_from () const;
3638 
3639  public:
3640   void set_filename (const Cody::Packet &);
3641   bool do_import (cpp_reader *, bool outermost);
3642 };
3643 
3644 /* Hash module state by name.  This cannot be a member of
3645    module_state, because of GTY restrictions.  We never delete from
3646    the hash table, but ggc_ptr_hash doesn't support that
3647    simplification.  */
3648 
3649 struct module_state_hash : ggc_ptr_hash<module_state> {
3650   typedef std::pair<tree,uintptr_t> compare_type; /* {name,parent} */
3651 
3652   static inline hashval_t hash (const value_type m);
3653   static inline hashval_t hash (const compare_type &n);
3654   static inline bool equal (const value_type existing,
3655 			    const compare_type &candidate);
3656 };
3657 
module_state(tree name,module_state * parent,bool partition)3658 module_state::module_state (tree name, module_state *parent, bool partition)
3659   : imports (BITMAP_GGC_ALLOC ()), exports (BITMAP_GGC_ALLOC ()),
3660     parent (parent), name (name), slurp (NULL),
3661     flatname (NULL), filename (NULL),
3662     entity_lwm (~0u >> 1), entity_num (0),
3663     ordinary_locs (0, 0), macro_locs (0, 0),
3664     loc (UNKNOWN_LOCATION),
3665     crc (0), mod (MODULE_UNKNOWN), remap (0), subst (0)
3666 {
3667   loadedness = ML_NONE;
3668 
3669   module_p = header_p = interface_p = partition_p = false;
3670 
3671   directness = MD_NONE;
3672   exported_p = false;
3673 
3674   cmi_noted_p = false;
3675   call_init_p = false;
3676 
3677   partition_p = partition;
3678 
3679   inform_cmi_p = false;
3680   visited_p = false;
3681 
3682   extensions = 0;
3683   if (name && TREE_CODE (name) == STRING_CST)
3684     {
3685       header_p = true;
3686 
3687       const char *string = TREE_STRING_POINTER (name);
3688       gcc_checking_assert (string[0] == '.'
3689 			   ? IS_DIR_SEPARATOR (string[1])
3690 			   : IS_ABSOLUTE_PATH (string));
3691     }
3692 
3693   gcc_checking_assert (!(parent && header_p));
3694 }
3695 
~module_state()3696 module_state::~module_state ()
3697 {
3698   release ();
3699 }
3700 
3701 /* Hash module state.  */
3702 static hashval_t
module_name_hash(const_tree name)3703 module_name_hash (const_tree name)
3704 {
3705   if (TREE_CODE (name) == STRING_CST)
3706     return htab_hash_string (TREE_STRING_POINTER (name));
3707   else
3708     return IDENTIFIER_HASH_VALUE (name);
3709 }
3710 
3711 hashval_t
hash(const value_type m)3712 module_state_hash::hash (const value_type m)
3713 {
3714   hashval_t ph = pointer_hash<void>::hash
3715     (reinterpret_cast<void *> (reinterpret_cast<uintptr_t> (m->parent)
3716 			       | m->is_partition ()));
3717   hashval_t nh = module_name_hash (m->name);
3718   return iterative_hash_hashval_t (ph, nh);
3719 }
3720 
3721 /* Hash a name.  */
3722 hashval_t
hash(const compare_type & c)3723 module_state_hash::hash (const compare_type &c)
3724 {
3725   hashval_t ph = pointer_hash<void>::hash (reinterpret_cast<void *> (c.second));
3726   hashval_t nh = module_name_hash (c.first);
3727 
3728   return iterative_hash_hashval_t (ph, nh);
3729 }
3730 
3731 bool
equal(const value_type existing,const compare_type & candidate)3732 module_state_hash::equal (const value_type existing,
3733 			  const compare_type &candidate)
3734 {
3735   uintptr_t ep = (reinterpret_cast<uintptr_t> (existing->parent)
3736 		  | existing->is_partition ());
3737   if (ep != candidate.second)
3738     return false;
3739 
3740   /* Identifier comparison is by pointer.  If the string_csts happen
3741      to be the same object, then they're equal too.  */
3742   if (existing->name == candidate.first)
3743     return true;
3744 
3745   /* If neither are string csts, they can't be equal.  */
3746   if (TREE_CODE (candidate.first) != STRING_CST
3747       || TREE_CODE (existing->name) != STRING_CST)
3748     return false;
3749 
3750   /* String equality.  */
3751   if (TREE_STRING_LENGTH (existing->name)
3752       == TREE_STRING_LENGTH (candidate.first)
3753       && !memcmp (TREE_STRING_POINTER (existing->name),
3754 		  TREE_STRING_POINTER (candidate.first),
3755 		  TREE_STRING_LENGTH (existing->name)))
3756     return true;
3757 
3758   return false;
3759 }
3760 
3761 /********************************************************************/
3762 /* Global state */
3763 
3764 /* Mapper name.  */
3765 static const char *module_mapper_name;
3766 
3767 /* Deferred import queue (FIFO).  */
3768 static vec<module_state *, va_heap, vl_embed> *pending_imports;
3769 
3770 /* CMI repository path and workspace.  */
3771 static char *cmi_repo;
3772 static size_t cmi_repo_length;
3773 static char *cmi_path;
3774 static size_t cmi_path_alloc;
3775 
3776 /* Count of available and loaded clusters.  */
3777 static unsigned available_clusters;
3778 static unsigned loaded_clusters;
3779 
3780 /* What the current TU is.  */
3781 unsigned module_kind;
3782 
3783 /* Number of global init calls needed.  */
3784 unsigned num_init_calls_needed = 0;
3785 
3786 /* Global trees.  */
3787 static const std::pair<tree *, unsigned> global_tree_arys[] =
3788   {
3789     std::pair<tree *, unsigned> (sizetype_tab, stk_type_kind_last),
3790     std::pair<tree *, unsigned> (integer_types, itk_none),
3791     std::pair<tree *, unsigned> (global_trees, TI_MODULE_HWM),
3792     std::pair<tree *, unsigned> (c_global_trees, CTI_MODULE_HWM),
3793     std::pair<tree *, unsigned> (cp_global_trees, CPTI_MODULE_HWM),
3794     std::pair<tree *, unsigned> (NULL, 0)
3795   };
3796 static GTY(()) vec<tree, va_gc> *fixed_trees;
3797 static unsigned global_crc;
3798 
3799 /* Lazy loading can open many files concurrently, there are
3800    per-process limits on that.  We pay attention to the process limit,
3801    and attempt to increase it when we run out.  Otherwise we use an
3802    LRU scheme to figure out who to flush.  Note that if the import
3803    graph /depth/ exceeds lazy_limit, we'll exceed the limit.  */
3804 static unsigned lazy_lru;  /* LRU counter.  */
3805 static unsigned lazy_open; /* Number of open modules */
3806 static unsigned lazy_limit; /* Current limit of open modules.  */
3807 static unsigned lazy_hard_limit; /* Hard limit on open modules.  */
3808 /* Account for source, assembler and dump files & directory searches.
3809    We don't keep the source file's open, so we don't have to account
3810    for #include depth.  I think dump files are opened and closed per
3811    pass, but ICBW.  */
3812 #define LAZY_HEADROOM 15 /* File descriptor headroom.  */
3813 
3814 /* Vector of module state.  Indexed by OWNER.  Has at least 2 slots.  */
3815 static GTY(()) vec<module_state *, va_gc> *modules;
3816 
3817 /* Hash of module state, findable by {name, parent}. */
3818 static GTY(()) hash_table<module_state_hash> *modules_hash;
3819 
3820 /* Map of imported entities.  We map DECL_UID to index of entity
3821    vector.  */
3822 typedef hash_map<unsigned/*UID*/, unsigned/*index*/,
3823 		 simple_hashmap_traits<int_hash<unsigned,0>, unsigned>
3824 		 > entity_map_t;
3825 static entity_map_t *entity_map;
3826 /* Doesn't need GTYing, because any tree referenced here is also
3827    findable by, symbol table, specialization table, return type of
3828    reachable function.  */
3829 static vec<binding_slot, va_heap, vl_embed> *entity_ary;
3830 
3831 /* Members entities of imported classes that are defined in this TU.
3832    These are where the entity's context is not from the current TU.
3833    We need to emit the definition (but not the enclosing class).
3834 
3835    We could find these by walking ALL the imported classes that we
3836    could provide a member definition.  But that's expensive,
3837    especially when you consider lazy implicit member declarations,
3838    which could be ANY imported class.  */
3839 static GTY(()) vec<tree, va_gc> *class_members;
3840 
3841 /* The same problem exists for class template partial
3842    specializations.  Now that we have constraints, the invariant of
3843    expecting them in the instantiation table no longer holds.  One of
3844    the constrained partial specializations will be there, but the
3845    others not so much.  It's not even an unconstrained partial
3846    spacialization in the table :(  so any partial template declaration
3847    is added to this list too.  */
3848 static GTY(()) vec<tree, va_gc> *partial_specializations;
3849 
3850 /********************************************************************/
3851 
3852 /* Our module mapper (created lazily).  */
3853 module_client *mapper;
3854 
3855 static module_client *make_mapper (location_t loc);
get_mapper(location_t loc)3856 inline module_client *get_mapper (location_t loc)
3857 {
3858   auto *res = mapper;
3859   if (!res)
3860     res = make_mapper (loc);
3861   return res;
3862 }
3863 
3864 /********************************************************************/
3865 static tree
get_clone_target(tree decl)3866 get_clone_target (tree decl)
3867 {
3868   tree target;
3869 
3870   if (TREE_CODE (decl) == TEMPLATE_DECL)
3871     {
3872       tree res_orig = DECL_CLONED_FUNCTION (DECL_TEMPLATE_RESULT (decl));
3873 
3874       target = DECL_TI_TEMPLATE (res_orig);
3875     }
3876   else
3877     target = DECL_CLONED_FUNCTION (decl);
3878 
3879   gcc_checking_assert (DECL_MAYBE_IN_CHARGE_CDTOR_P (target));
3880 
3881   return target;
3882 }
3883 
3884 /* Like FOR_EACH_CLONE, but will walk cloned templates.  */
3885 #define FOR_EVERY_CLONE(CLONE, FN)			\
3886   if (!DECL_MAYBE_IN_CHARGE_CDTOR_P (FN));		\
3887   else							\
3888     for (CLONE = DECL_CHAIN (FN);			\
3889 	 CLONE && DECL_CLONED_FUNCTION_P (CLONE);	\
3890 	 CLONE = DECL_CHAIN (CLONE))
3891 
3892 /* It'd be nice if USE_TEMPLATE was a field of template_info
3893    (a) it'd solve the enum case dealt with below,
3894    (b) both class templates and decl templates would store this in the
3895    same place
3896    (c) this function wouldn't need the by-ref arg, which is annoying.  */
3897 
3898 static tree
node_template_info(tree decl,int & use)3899 node_template_info (tree decl, int &use)
3900 {
3901   tree ti = NULL_TREE;
3902   int use_tpl = -1;
3903   if (DECL_IMPLICIT_TYPEDEF_P (decl))
3904     {
3905       tree type = TREE_TYPE (decl);
3906 
3907       ti = TYPE_TEMPLATE_INFO (type);
3908       if (ti)
3909 	{
3910 	  if (TYPE_LANG_SPECIFIC (type))
3911 	    use_tpl = CLASSTYPE_USE_TEMPLATE (type);
3912 	  else
3913 	    {
3914 	      /* An enum, where we don't explicitly encode use_tpl.
3915 		 If the containing context (a type or a function), is
3916 		 an ({im,ex}plicit) instantiation, then this is too.
3917 		 If it's a partial or explicit specialization, then
3918 		 this is not!.  */
3919 	      tree ctx = CP_DECL_CONTEXT (decl);
3920 	      if (TYPE_P (ctx))
3921 		ctx = TYPE_NAME (ctx);
3922 	      node_template_info (ctx, use);
3923 	      use_tpl = use != 2 ? use : 0;
3924 	    }
3925 	}
3926     }
3927   else if (DECL_LANG_SPECIFIC (decl)
3928 	   && (TREE_CODE (decl) == VAR_DECL
3929 	       || TREE_CODE (decl) == TYPE_DECL
3930 	       || TREE_CODE (decl) == FUNCTION_DECL
3931 	       || TREE_CODE (decl) == FIELD_DECL
3932 	       || TREE_CODE (decl) == TEMPLATE_DECL))
3933     {
3934       use_tpl = DECL_USE_TEMPLATE (decl);
3935       ti = DECL_TEMPLATE_INFO (decl);
3936     }
3937 
3938   use = use_tpl;
3939   return ti;
3940 }
3941 
3942 /* Find the index in entity_ary for an imported DECL.  It should
3943    always be there, but bugs can cause it to be missing, and that can
3944    crash the crash reporting -- let's not do that!  When streaming
3945    out we place entities from this module there too -- with negated
3946    indices.  */
3947 
3948 static unsigned
import_entity_index(tree decl,bool null_ok=false)3949 import_entity_index (tree decl, bool null_ok = false)
3950 {
3951   if (unsigned *slot = entity_map->get (DECL_UID (decl)))
3952     return *slot;
3953 
3954   gcc_checking_assert (null_ok);
3955   return ~(~0u >> 1);
3956 }
3957 
3958 /* Find the module for an imported entity at INDEX in the entity ary.
3959    There must be one.  */
3960 
3961 static module_state *
import_entity_module(unsigned index)3962 import_entity_module (unsigned index)
3963 {
3964   if (index > ~(~0u >> 1))
3965     /* This is an index for an exported entity.  */
3966     return (*modules)[0];
3967 
3968   /* Do not include the current TU (not an off-by-one error).  */
3969   unsigned pos = 1;
3970   unsigned len = modules->length () - pos;
3971   while (len)
3972     {
3973       unsigned half = len / 2;
3974       module_state *probe = (*modules)[pos + half];
3975       if (index < probe->entity_lwm)
3976 	len = half;
3977       else if (index < probe->entity_lwm + probe->entity_num)
3978 	return probe;
3979       else
3980 	{
3981 	  pos += half + 1;
3982 	  len = len - (half + 1);
3983 	}
3984     }
3985   gcc_unreachable ();
3986 }
3987 
3988 
3989 /********************************************************************/
3990 /* A dumping machinery.  */
3991 
3992 class dumper {
3993 public:
3994   enum {
3995     LOCATION = TDF_LINENO,  /* -lineno:Source location streaming.  */
3996     DEPEND = TDF_GRAPH,	/* -graph:Dependency graph construction.  */
3997     CLUSTER = TDF_BLOCKS,   /* -blocks:Clusters.  */
3998     TREE = TDF_UID, 	/* -uid:Tree streaming.  */
3999     MERGE = TDF_ALIAS,	/* -alias:Mergeable Entities.  */
4000     ELF = TDF_ASMNAME,	/* -asmname:Elf data.  */
4001     MACRO = TDF_VOPS	/* -vops:Macros.  */
4002   };
4003 
4004 private:
4005   struct impl {
4006     typedef vec<module_state *, va_heap, vl_embed> stack_t;
4007 
4008     FILE *stream;	/* Dump stream.  */
4009     unsigned indent; 	/* Local indentation.  */
4010     bool bol; 		/* Beginning of line.  */
4011     stack_t stack;	/* Trailing array of module_state.  */
4012 
4013     bool nested_name (tree);  /* Dump a name following DECL_CONTEXT.  */
4014   };
4015 
4016 public:
4017   /* The dumper.  */
4018   impl *dumps;
4019   dump_flags_t flags;
4020 
4021 public:
4022   /* Push/pop module state dumping.  */
4023   unsigned push (module_state *);
4024   void pop (unsigned);
4025 
4026 public:
4027   /* Change local indentation.  */
indent()4028   void indent ()
4029   {
4030     if (dumps)
4031       dumps->indent++;
4032   }
outdent()4033   void outdent ()
4034   {
4035     if (dumps)
4036       {
4037 	gcc_checking_assert (dumps->indent);
4038 	dumps->indent--;
4039       }
4040   }
4041 
4042 public:
4043   /* Is dump enabled?.  */
operator ()(int mask=0)4044   bool operator () (int mask = 0)
4045   {
4046     if (!dumps || !dumps->stream)
4047       return false;
4048     if (mask && !(mask & flags))
4049       return false;
4050     return true;
4051   }
4052   /* Dump some information.  */
4053   bool operator () (const char *, ...);
4054 };
4055 
4056 /* The dumper.  */
4057 static dumper dump = {0, dump_flags_t (0)};
4058 
4059 /* Push to dumping M.  Return previous indentation level.  */
4060 
4061 unsigned
push(module_state * m)4062 dumper::push (module_state *m)
4063 {
4064   FILE *stream = NULL;
4065   if (!dumps || !dumps->stack.length ())
4066     {
4067       stream = dump_begin (module_dump_id, &flags);
4068       if (!stream)
4069 	return 0;
4070     }
4071 
4072   if (!dumps || !dumps->stack.space (1))
4073     {
4074       /* Create or extend the dump implementor.  */
4075       unsigned current = dumps ? dumps->stack.length () : 0;
4076       unsigned count = current ? current * 2 : EXPERIMENT (1, 20);
4077       size_t alloc = (offsetof (impl, stack)
4078 		      + impl::stack_t::embedded_size (count));
4079       dumps = XRESIZEVAR (impl, dumps, alloc);
4080       dumps->stack.embedded_init (count, current);
4081     }
4082   if (stream)
4083     dumps->stream = stream;
4084 
4085   unsigned n = dumps->indent;
4086   dumps->indent = 0;
4087   dumps->bol = true;
4088   dumps->stack.quick_push (m);
4089   if (m)
4090     {
4091       module_state *from = NULL;
4092 
4093       if (dumps->stack.length () > 1)
4094 	from = dumps->stack[dumps->stack.length () - 2];
4095       else
4096 	dump ("");
4097       dump (from ? "Starting module %M (from %M)"
4098 	    : "Starting module %M", m, from);
4099     }
4100 
4101   return n;
4102 }
4103 
4104 /* Pop from dumping.  Restore indentation to N.  */
4105 
pop(unsigned n)4106 void dumper::pop (unsigned n)
4107 {
4108   if (!dumps)
4109     return;
4110 
4111   gcc_checking_assert (dump () && !dumps->indent);
4112   if (module_state *m = dumps->stack[dumps->stack.length () - 1])
4113     {
4114       module_state *from = (dumps->stack.length () > 1
4115 			    ? dumps->stack[dumps->stack.length () - 2] : NULL);
4116       dump (from ? "Finishing module %M (returning to %M)"
4117 	    : "Finishing module %M", m, from);
4118     }
4119   dumps->stack.pop ();
4120   dumps->indent = n;
4121   if (!dumps->stack.length ())
4122     {
4123       dump_end (module_dump_id, dumps->stream);
4124       dumps->stream = NULL;
4125     }
4126 }
4127 
4128 /* Dump a nested name for arbitrary tree T.  Sometimes it won't have a
4129    name.  */
4130 
4131 bool
nested_name(tree t)4132 dumper::impl::nested_name (tree t)
4133 {
4134   tree ti = NULL_TREE;
4135   int origin = -1;
4136   tree name = NULL_TREE;
4137 
4138   if (t && TREE_CODE (t) == TREE_BINFO)
4139     t = BINFO_TYPE (t);
4140 
4141   if (t && TYPE_P (t))
4142     t = TYPE_NAME (t);
4143 
4144   if (t && DECL_P (t))
4145     {
4146       if (t == global_namespace || DECL_TEMPLATE_PARM_P (t))
4147 	;
4148       else if (tree ctx = DECL_CONTEXT (t))
4149 	if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
4150 	    || nested_name (ctx))
4151 	  fputs ("::", stream);
4152 
4153       int use_tpl;
4154       ti = node_template_info (t, use_tpl);
4155       if (ti && TREE_CODE (TI_TEMPLATE (ti)) == TEMPLATE_DECL
4156 	  && (DECL_TEMPLATE_RESULT (TI_TEMPLATE (ti)) == t))
4157 	t = TI_TEMPLATE (ti);
4158       tree not_tmpl = t;
4159       if (TREE_CODE (t) == TEMPLATE_DECL)
4160 	{
4161 	  fputs ("template ", stream);
4162 	  not_tmpl = DECL_TEMPLATE_RESULT (t);
4163 	}
4164 
4165       if (not_tmpl
4166 	  && DECL_P (not_tmpl)
4167 	  && DECL_LANG_SPECIFIC (not_tmpl)
4168 	  && DECL_MODULE_IMPORT_P (not_tmpl))
4169 	{
4170 	  /* We need to be careful here, so as to not explode on
4171 	     inconsistent data -- we're probably debugging, because
4172 	     Something Is Wrong.  */
4173 	  unsigned index = import_entity_index (t, true);
4174 	  if (!(index & ~(~0u >> 1)))
4175 	    origin = import_entity_module (index)->mod;
4176 	  else if (index > ~(~0u >> 1))
4177 	    /* An imported partition member that we're emitting.  */
4178 	    origin = 0;
4179 	  else
4180 	    origin = -2;
4181 	}
4182 
4183       name = DECL_NAME (t) ? DECL_NAME (t)
4184 	: HAS_DECL_ASSEMBLER_NAME_P (t) ? DECL_ASSEMBLER_NAME_RAW (t)
4185 	: NULL_TREE;
4186     }
4187   else
4188     name = t;
4189 
4190   if (name)
4191     switch (TREE_CODE (name))
4192       {
4193       default:
4194 	fputs ("#unnamed#", stream);
4195 	break;
4196 
4197       case IDENTIFIER_NODE:
4198 	fwrite (IDENTIFIER_POINTER (name), 1, IDENTIFIER_LENGTH (name), stream);
4199 	break;
4200 
4201       case INTEGER_CST:
4202 	print_hex (wi::to_wide (name), stream);
4203 	break;
4204 
4205       case STRING_CST:
4206 	/* If TREE_TYPE is NULL, this is a raw string.  */
4207 	fwrite (TREE_STRING_POINTER (name), 1,
4208 		TREE_STRING_LENGTH (name) - (TREE_TYPE (name) != NULL_TREE),
4209 		stream);
4210 	break;
4211       }
4212   else
4213     fputs ("#null#", stream);
4214 
4215   if (origin >= 0)
4216     {
4217       const module_state *module = (*modules)[origin];
4218       fprintf (stream, "@%s:%d", !module ? "" : !module->name ? "(unnamed)"
4219 	       : module->get_flatname (), origin);
4220     }
4221   else if (origin == -2)
4222     fprintf (stream, "@???");
4223 
4224   if (ti)
4225     {
4226       tree args = INNERMOST_TEMPLATE_ARGS (TI_ARGS (ti));
4227       fputs ("<", stream);
4228       if (args)
4229 	for (int ix = 0; ix != TREE_VEC_LENGTH (args); ix++)
4230 	  {
4231 	    if (ix)
4232 	      fputs (",", stream);
4233 	    nested_name (TREE_VEC_ELT (args, ix));
4234 	  }
4235       fputs (">", stream);
4236     }
4237 
4238   return true;
4239 }
4240 
4241 /* Formatted dumping.  FORMAT begins with '+' do not emit a trailing
4242    new line.  (Normally it is appended.)
4243    Escapes:
4244       %C - tree_code
4245       %I - identifier
4246       %M - module_state
4247       %N - name -- DECL_NAME
4248       %P - context:name pair
4249       %R - unsigned:unsigned ratio
4250       %S - symbol -- DECL_ASSEMBLER_NAME
4251       %U - long unsigned
4252       %V - version
4253       --- the following are printf-like, but without its flexibility
4254       %d - decimal int
4255       %p - pointer
4256       %s - string
4257       %u - unsigned int
4258       %x - hex int
4259 
4260   We do not implement the printf modifiers.  */
4261 
4262 bool
operator ()(const char * format,...)4263 dumper::operator () (const char *format, ...)
4264 {
4265   if (!(*this) ())
4266     return false;
4267 
4268   bool no_nl = format[0] == '+';
4269   format += no_nl;
4270 
4271   if (dumps->bol)
4272     {
4273       /* Module import indent.  */
4274       if (unsigned depth = dumps->stack.length () - 1)
4275 	{
4276 	  const char *prefix = ">>>>";
4277 	  fprintf (dumps->stream, (depth <= strlen (prefix)
4278 				   ? &prefix[strlen (prefix) - depth]
4279 				   : ">.%d.>"), depth);
4280 	}
4281 
4282       /* Local indent.  */
4283       if (unsigned indent = dumps->indent)
4284 	{
4285 	  const char *prefix = "      ";
4286 	  fprintf (dumps->stream, (indent <= strlen (prefix)
4287 				   ? &prefix[strlen (prefix) - indent]
4288 				   : "  .%d.  "), indent);
4289 	}
4290       dumps->bol = false;
4291     }
4292 
4293   va_list args;
4294   va_start (args, format);
4295   while (const char *esc = strchr (format, '%'))
4296     {
4297       fwrite (format, 1, (size_t)(esc - format), dumps->stream);
4298       format = ++esc;
4299       switch (*format++)
4300 	{
4301 	default:
4302 	  gcc_unreachable ();
4303 
4304 	case '%':
4305 	  fputc ('%', dumps->stream);
4306 	  break;
4307 
4308 	case 'C': /* Code */
4309 	  {
4310 	    tree_code code = (tree_code)va_arg (args, unsigned);
4311 	    fputs (get_tree_code_name (code), dumps->stream);
4312 	  }
4313 	  break;
4314 
4315 	case 'I': /* Identifier.  */
4316 	  {
4317 	    tree t = va_arg (args, tree);
4318 	    dumps->nested_name (t);
4319 	  }
4320 	  break;
4321 
4322 	case 'M': /* Module. */
4323 	  {
4324 	    const char *str = "(none)";
4325 	    if (module_state *m = va_arg (args, module_state *))
4326 	      {
4327 		if (!m->has_location ())
4328 		  str = "(detached)";
4329 		else
4330 		  str = m->get_flatname ();
4331 	      }
4332 	    fputs (str, dumps->stream);
4333 	  }
4334 	  break;
4335 
4336 	case 'N': /* Name.  */
4337 	  {
4338 	    tree t = va_arg (args, tree);
4339 	    while (t && TREE_CODE (t) == OVERLOAD)
4340 	      t = OVL_FUNCTION (t);
4341 	    fputc ('\'', dumps->stream);
4342 	    dumps->nested_name (t);
4343 	    fputc ('\'', dumps->stream);
4344 	  }
4345 	  break;
4346 
4347 	case 'P': /* Pair.  */
4348 	  {
4349 	    tree ctx = va_arg (args, tree);
4350 	    tree name = va_arg (args, tree);
4351 	    fputc ('\'', dumps->stream);
4352 	    dumps->nested_name (ctx);
4353 	    if (ctx && ctx != global_namespace)
4354 	      fputs ("::", dumps->stream);
4355 	    dumps->nested_name (name);
4356 	    fputc ('\'', dumps->stream);
4357 	  }
4358 	  break;
4359 
4360 	case 'R': /* Ratio */
4361 	  {
4362 	    unsigned a = va_arg (args, unsigned);
4363 	    unsigned b = va_arg (args, unsigned);
4364 	    fprintf (dumps->stream, "%.1f", (float) a / (b + !b));
4365 	  }
4366 	  break;
4367 
4368 	case 'S': /* Symbol name */
4369 	  {
4370 	    tree t = va_arg (args, tree);
4371 	    if (t && TYPE_P (t))
4372 	      t = TYPE_NAME (t);
4373 	    if (t && HAS_DECL_ASSEMBLER_NAME_P (t)
4374 		&& DECL_ASSEMBLER_NAME_SET_P (t))
4375 	      {
4376 		fputc ('(', dumps->stream);
4377 		fputs (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t)),
4378 		       dumps->stream);
4379 		fputc (')', dumps->stream);
4380 	      }
4381 	  }
4382 	  break;
4383 
4384 	case 'U': /* long unsigned.  */
4385 	  {
4386 	    unsigned long u = va_arg (args, unsigned long);
4387 	    fprintf (dumps->stream, "%lu", u);
4388 	  }
4389 	  break;
4390 
4391 	case 'V': /* Verson.  */
4392 	  {
4393 	    unsigned v = va_arg (args, unsigned);
4394 	    verstr_t string;
4395 
4396 	    version2string (v, string);
4397 	    fputs (string, dumps->stream);
4398 	  }
4399 	  break;
4400 
4401 	case 'c': /* Character.  */
4402 	  {
4403 	    int c = va_arg (args, int);
4404 	    fputc (c, dumps->stream);
4405 	  }
4406 	  break;
4407 
4408 	case 'd': /* Decimal Int.  */
4409 	  {
4410 	    int d = va_arg (args, int);
4411 	    fprintf (dumps->stream, "%d", d);
4412 	  }
4413 	  break;
4414 
4415 	case 'p': /* Pointer. */
4416 	  {
4417 	    void *p = va_arg (args, void *);
4418 	    fprintf (dumps->stream, "%p", p);
4419 	  }
4420 	  break;
4421 
4422 	case 's': /* String. */
4423 	  {
4424 	    const char *s = va_arg (args, char *);
4425 	    gcc_checking_assert (s);
4426 	    fputs (s, dumps->stream);
4427 	  }
4428 	  break;
4429 
4430 	case 'u': /* Unsigned.  */
4431 	  {
4432 	    unsigned u = va_arg (args, unsigned);
4433 	    fprintf (dumps->stream, "%u", u);
4434 	  }
4435 	  break;
4436 
4437 	case 'x': /* Hex. */
4438 	  {
4439 	    unsigned x = va_arg (args, unsigned);
4440 	    fprintf (dumps->stream, "%x", x);
4441 	  }
4442 	  break;
4443 	}
4444     }
4445   fputs (format, dumps->stream);
4446   va_end (args);
4447   if (!no_nl)
4448     {
4449       dumps->bol = true;
4450       fputc ('\n', dumps->stream);
4451     }
4452   return true;
4453 }
4454 
4455 struct note_def_cache_hasher : ggc_cache_ptr_hash<tree_node>
4456 {
keep_cache_entrynote_def_cache_hasher4457   static int keep_cache_entry (tree t)
4458   {
4459     if (!CHECKING_P)
4460       /* GTY is unfortunately not clever enough to conditionalize
4461 	 this.  */
4462       gcc_unreachable ();
4463 
4464     if (ggc_marked_p (t))
4465       return -1;
4466 
4467     unsigned n = dump.push (NULL);
4468     /* This might or might not be an error.  We should note its
4469        dropping whichever.  */
4470     dump () && dump ("Dropping %N from note_defs table", t);
4471     dump.pop (n);
4472 
4473     return 0;
4474   }
4475 };
4476 
4477 /* We should stream each definition at most once.
4478    This needs to be a cache because there are cases where a definition
4479    ends up being not retained, and we need to drop those so we don't
4480    get confused if memory is reallocated.  */
4481 typedef hash_table<note_def_cache_hasher> note_defs_table_t;
4482 static GTY((cache)) note_defs_table_t *note_defs;
4483 
4484 void
assert_definition(tree decl ATTRIBUTE_UNUSED,bool installing ATTRIBUTE_UNUSED)4485 trees_in::assert_definition (tree decl ATTRIBUTE_UNUSED,
4486 			     bool installing ATTRIBUTE_UNUSED)
4487 {
4488 #if CHECKING_P
4489   tree *slot = note_defs->find_slot (decl, installing ? INSERT : NO_INSERT);
4490   tree not_tmpl = STRIP_TEMPLATE (decl);
4491   if (installing)
4492     {
4493       /* We must be inserting for the first time.  */
4494       gcc_assert (!*slot);
4495       *slot = decl;
4496     }
4497   else
4498     /* If this is not the mergeable entity, it should not be in the
4499        table.  If it is a non-global-module mergeable entity, it
4500        should be in the table.  Global module entities could have been
4501        defined textually in the current TU and so might or might not
4502        be present.  */
4503     gcc_assert (!is_duplicate (decl)
4504 		? !slot
4505 		: (slot
4506 		   || !DECL_LANG_SPECIFIC (not_tmpl)
4507 		   || !DECL_MODULE_PURVIEW_P (not_tmpl)
4508 		   || (!DECL_MODULE_IMPORT_P (not_tmpl)
4509 		       && header_module_p ())));
4510 
4511   if (not_tmpl != decl)
4512     gcc_assert (!note_defs->find_slot (not_tmpl, NO_INSERT));
4513 #endif
4514 }
4515 
4516 void
assert_definition(tree decl ATTRIBUTE_UNUSED)4517 trees_out::assert_definition (tree decl ATTRIBUTE_UNUSED)
4518 {
4519 #if CHECKING_P
4520   tree *slot = note_defs->find_slot (decl, INSERT);
4521   gcc_assert (!*slot);
4522   *slot = decl;
4523   if (TREE_CODE (decl) == TEMPLATE_DECL)
4524     gcc_assert (!note_defs->find_slot (DECL_TEMPLATE_RESULT (decl), NO_INSERT));
4525 #endif
4526 }
4527 
4528 /********************************************************************/
4529 static bool
noisy_p()4530 noisy_p ()
4531 {
4532   if (quiet_flag)
4533     return false;
4534 
4535   pp_needs_newline (global_dc->printer) = true;
4536   diagnostic_set_last_function (global_dc, (diagnostic_info *) NULL);
4537 
4538   return true;
4539 }
4540 
4541 /* Set the cmi repo.  Strip trailing '/', '.' becomes NULL.  */
4542 
4543 static void
set_cmi_repo(const char * r)4544 set_cmi_repo (const char *r)
4545 {
4546   XDELETEVEC (cmi_repo);
4547   XDELETEVEC (cmi_path);
4548   cmi_path_alloc = 0;
4549 
4550   cmi_repo = NULL;
4551   cmi_repo_length = 0;
4552 
4553   if (!r || !r[0])
4554     return;
4555 
4556   size_t len = strlen (r);
4557   cmi_repo = XNEWVEC (char, len + 1);
4558   memcpy (cmi_repo, r, len + 1);
4559 
4560   if (len > 1 && IS_DIR_SEPARATOR (cmi_repo[len-1]))
4561     len--;
4562   if (len == 1 && cmi_repo[0] == '.')
4563     len--;
4564   cmi_repo[len] = 0;
4565   cmi_repo_length = len;
4566 }
4567 
4568 /* TO is a repo-relative name.  Provide one that we may use from where
4569    we are.  */
4570 
4571 static const char *
maybe_add_cmi_prefix(const char * to,size_t * len_p=NULL)4572 maybe_add_cmi_prefix (const char *to, size_t *len_p = NULL)
4573 {
4574   size_t len = len_p || cmi_repo_length ? strlen (to) : 0;
4575 
4576   if (cmi_repo_length && !IS_ABSOLUTE_PATH (to))
4577     {
4578       if (cmi_path_alloc < cmi_repo_length + len + 2)
4579 	{
4580 	  XDELETEVEC (cmi_path);
4581 	  cmi_path_alloc = cmi_repo_length + len * 2 + 2;
4582 	  cmi_path = XNEWVEC (char, cmi_path_alloc);
4583 
4584 	  memcpy (cmi_path, cmi_repo, cmi_repo_length);
4585 	  cmi_path[cmi_repo_length] = DIR_SEPARATOR;
4586 	}
4587 
4588       memcpy (&cmi_path[cmi_repo_length + 1], to, len + 1);
4589       len += cmi_repo_length + 1;
4590       to = cmi_path;
4591     }
4592 
4593   if (len_p)
4594     *len_p = len;
4595 
4596   return to;
4597 }
4598 
4599 /* Try and create the directories of PATH.  */
4600 
4601 static void
create_dirs(char * path)4602 create_dirs (char *path)
4603 {
4604   /* Try and create the missing directories.  */
4605   for (char *base = path; *base; base++)
4606     if (IS_DIR_SEPARATOR (*base))
4607       {
4608 	char sep = *base;
4609 	*base = 0;
4610 	int failed = mkdir (path, S_IRWXU | S_IRWXG | S_IRWXO);
4611 	dump () && dump ("Mkdir ('%s') errno:=%u", path, failed ? errno : 0);
4612 	*base = sep;
4613 	if (failed
4614 	    /* Maybe racing with another creator (of a *different*
4615 	       module).  */
4616 	    && errno != EEXIST)
4617 	  break;
4618       }
4619 }
4620 
4621 /* Given a CLASSTYPE_DECL_LIST VALUE get the template friend decl,
4622    if that's what this is.  */
4623 
4624 static tree
friend_from_decl_list(tree frnd)4625 friend_from_decl_list (tree frnd)
4626 {
4627   tree res = frnd;
4628 
4629   if (TREE_CODE (frnd) != TEMPLATE_DECL)
4630     {
4631       tree tmpl = NULL_TREE;
4632       if (TYPE_P (frnd))
4633 	{
4634 	  res = TYPE_NAME (frnd);
4635 	  if (CLASSTYPE_TEMPLATE_INFO (frnd))
4636 	    tmpl = CLASSTYPE_TI_TEMPLATE (frnd);
4637 	}
4638       else if (DECL_TEMPLATE_INFO (frnd))
4639 	{
4640 	  tmpl = DECL_TI_TEMPLATE (frnd);
4641 	  if (TREE_CODE (tmpl) != TEMPLATE_DECL)
4642 	    tmpl = NULL_TREE;
4643 	}
4644 
4645       if (tmpl && DECL_TEMPLATE_RESULT (tmpl) == res)
4646 	res = tmpl;
4647     }
4648 
4649   return res;
4650 }
4651 
4652 static tree
find_enum_member(tree ctx,tree name)4653 find_enum_member (tree ctx, tree name)
4654 {
4655   for (tree values = TYPE_VALUES (ctx);
4656        values; values = TREE_CHAIN (values))
4657     if (DECL_NAME (TREE_VALUE (values)) == name)
4658       return TREE_VALUE (values);
4659 
4660   return NULL_TREE;
4661 }
4662 
4663 /********************************************************************/
4664 /* Instrumentation gathered writing bytes.  */
4665 
4666 void
instrument()4667 bytes_out::instrument ()
4668 {
4669   dump ("Wrote %u bytes in %u blocks", lengths[3], spans[3]);
4670   dump ("Wrote %u bits in %u bytes", lengths[0] + lengths[1], lengths[2]);
4671   for (unsigned ix = 0; ix < 2; ix++)
4672     dump ("  %u %s spans of %R bits", spans[ix],
4673 	  ix ? "one" : "zero", lengths[ix], spans[ix]);
4674   dump ("  %u blocks with %R bits padding", spans[2],
4675 	lengths[2] * 8 - (lengths[0] + lengths[1]), spans[2]);
4676 }
4677 
4678 /* Instrumentation gathered writing trees.  */
4679 void
instrument()4680 trees_out::instrument ()
4681 {
4682   if (dump (""))
4683     {
4684       bytes_out::instrument ();
4685       dump ("Wrote:");
4686       dump ("  %u decl trees", decl_val_count);
4687       dump ("  %u other trees", tree_val_count);
4688       dump ("  %u back references", back_ref_count);
4689       dump ("  %u null trees", null_count);
4690     }
4691 }
4692 
4693 /* Setup and teardown for a tree walk.  */
4694 
4695 void
begin()4696 trees_out::begin ()
4697 {
4698   gcc_assert (!streaming_p () || !tree_map.elements ());
4699 
4700   mark_trees ();
4701   if (streaming_p ())
4702     parent::begin ();
4703 }
4704 
4705 unsigned
end(elf_out * sink,unsigned name,unsigned * crc_ptr)4706 trees_out::end (elf_out *sink, unsigned name, unsigned *crc_ptr)
4707 {
4708   gcc_checking_assert (streaming_p ());
4709 
4710   unmark_trees ();
4711   return parent::end (sink, name, crc_ptr);
4712 }
4713 
4714 void
end()4715 trees_out::end ()
4716 {
4717   gcc_assert (!streaming_p ());
4718 
4719   unmark_trees ();
4720   /* Do not parent::end -- we weren't streaming.  */
4721 }
4722 
4723 void
mark_trees()4724 trees_out::mark_trees ()
4725 {
4726   if (size_t size = tree_map.elements ())
4727     {
4728       /* This isn't our first rodeo, destroy and recreate the
4729 	 tree_map.  I'm a bad bad man.  Use the previous size as a
4730 	 guess for the next one (so not all bad).  */
4731       tree_map.~ptr_int_hash_map ();
4732       new (&tree_map) ptr_int_hash_map (size);
4733     }
4734 
4735   /* Install the fixed trees, with +ve references.  */
4736   unsigned limit = fixed_trees->length ();
4737   for (unsigned ix = 0; ix != limit; ix++)
4738     {
4739       tree val = (*fixed_trees)[ix];
4740       bool existed = tree_map.put (val, ix + tag_fixed);
4741       gcc_checking_assert (!TREE_VISITED (val) && !existed);
4742       TREE_VISITED (val) = true;
4743     }
4744 
4745   ref_num = 0;
4746 }
4747 
4748 /* Unmark the trees we encountered  */
4749 
4750 void
unmark_trees()4751 trees_out::unmark_trees ()
4752 {
4753   ptr_int_hash_map::iterator end (tree_map.end ());
4754   for (ptr_int_hash_map::iterator iter (tree_map.begin ()); iter != end; ++iter)
4755     {
4756       tree node = reinterpret_cast<tree> ((*iter).first);
4757       int ref = (*iter).second;
4758       /* We should have visited the node, and converted its mergeable
4759 	 reference to a regular reference.  */
4760       gcc_checking_assert (TREE_VISITED (node)
4761 			   && (ref <= tag_backref || ref >= tag_fixed));
4762       TREE_VISITED (node) = false;
4763     }
4764 }
4765 
4766 /* Mark DECL for by-value walking.  We do this by inserting it into
4767    the tree map with a reference of zero.  May be called multiple
4768    times on the same node.  */
4769 
4770 void
mark_by_value(tree decl)4771 trees_out::mark_by_value (tree decl)
4772 {
4773   gcc_checking_assert (DECL_P (decl)
4774 		       /* Enum consts are INTEGER_CSTS.  */
4775 		       || TREE_CODE (decl) == INTEGER_CST
4776 		       || TREE_CODE (decl) == TREE_BINFO);
4777 
4778   if (TREE_VISITED (decl))
4779     /* Must already be forced or fixed.  */
4780     gcc_checking_assert (*tree_map.get (decl) >= tag_value);
4781   else
4782     {
4783       bool existed = tree_map.put (decl, tag_value);
4784       gcc_checking_assert (!existed);
4785       TREE_VISITED (decl) = true;
4786     }
4787 }
4788 
4789 int
get_tag(tree t)4790 trees_out::get_tag (tree t)
4791 {
4792   gcc_checking_assert (TREE_VISITED (t));
4793   return *tree_map.get (t);
4794 }
4795 
4796 /* Insert T into the map, return its tag number.    */
4797 
4798 int
insert(tree t,walk_kind walk)4799 trees_out::insert (tree t, walk_kind walk)
4800 {
4801   gcc_checking_assert (walk != WK_normal || !TREE_VISITED (t));
4802   int tag = --ref_num;
4803   bool existed;
4804   int &slot = tree_map.get_or_insert (t, &existed);
4805   gcc_checking_assert (TREE_VISITED (t) == existed
4806 		       && (!existed
4807 			   || (walk == WK_value && slot == tag_value)));
4808   TREE_VISITED (t) = true;
4809   slot = tag;
4810 
4811   return tag;
4812 }
4813 
4814 /* Insert T into the backreference array.  Return its back reference
4815    number.  */
4816 
4817 int
insert(tree t)4818 trees_in::insert (tree t)
4819 {
4820   gcc_checking_assert (t || get_overrun ());
4821   back_refs.safe_push (t);
4822   return -(int)back_refs.length ();
4823 }
4824 
4825 /* A chained set of decls.  */
4826 
4827 void
chained_decls(tree decls)4828 trees_out::chained_decls (tree decls)
4829 {
4830   for (; decls; decls = DECL_CHAIN (decls))
4831     {
4832       if (VAR_OR_FUNCTION_DECL_P (decls)
4833 	  && DECL_LOCAL_DECL_P (decls))
4834 	{
4835 	  /* Make sure this is the first encounter, and mark for
4836 	     walk-by-value.  */
4837 	  gcc_checking_assert (!TREE_VISITED (decls)
4838 			       && !DECL_TEMPLATE_INFO (decls));
4839 	  mark_by_value (decls);
4840 	}
4841       tree_node (decls);
4842     }
4843   tree_node (NULL_TREE);
4844 }
4845 
4846 tree
chained_decls()4847 trees_in::chained_decls ()
4848 {
4849   tree decls = NULL_TREE;
4850   for (tree *chain = &decls;;)
4851     if (tree decl = tree_node ())
4852       {
4853 	if (!DECL_P (decl) || DECL_CHAIN (decl))
4854 	  {
4855 	    set_overrun ();
4856 	    break;
4857 	  }
4858 	*chain = decl;
4859 	chain = &DECL_CHAIN (decl);
4860       }
4861     else
4862       break;
4863 
4864   return decls;
4865 }
4866 
4867 /* A vector of decls following DECL_CHAIN.  */
4868 
4869 void
vec_chained_decls(tree decls)4870 trees_out::vec_chained_decls (tree decls)
4871 {
4872   if (streaming_p ())
4873     {
4874       unsigned len = 0;
4875 
4876       for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
4877 	len++;
4878       u (len);
4879     }
4880 
4881   for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
4882     {
4883       if (DECL_IMPLICIT_TYPEDEF_P (decl)
4884 	  && TYPE_NAME (TREE_TYPE (decl)) != decl)
4885 	/* An anonynmous struct with a typedef name.  An odd thing to
4886 	   write.  */
4887 	tree_node (NULL_TREE);
4888       else
4889 	tree_node (decl);
4890     }
4891 }
4892 
4893 vec<tree, va_heap> *
vec_chained_decls()4894 trees_in::vec_chained_decls ()
4895 {
4896   vec<tree, va_heap> *v = NULL;
4897 
4898   if (unsigned len = u ())
4899     {
4900       vec_alloc (v, len);
4901 
4902       for (unsigned ix = 0; ix < len; ix++)
4903 	{
4904 	  tree decl = tree_node ();
4905 	  if (decl && !DECL_P (decl))
4906 	    {
4907 	      set_overrun ();
4908 	      break;
4909 	    }
4910 	  v->quick_push (decl);
4911 	}
4912 
4913       if (get_overrun ())
4914 	{
4915 	  vec_free (v);
4916 	  v = NULL;
4917 	}
4918     }
4919 
4920   return v;
4921 }
4922 
4923 /* A vector of trees.  */
4924 
4925 void
tree_vec(vec<tree,va_gc> * v)4926 trees_out::tree_vec (vec<tree, va_gc> *v)
4927 {
4928   unsigned len = vec_safe_length (v);
4929   if (streaming_p ())
4930     u (len);
4931   for (unsigned ix = 0; ix != len; ix++)
4932     tree_node ((*v)[ix]);
4933 }
4934 
4935 vec<tree, va_gc> *
tree_vec()4936 trees_in::tree_vec ()
4937 {
4938   vec<tree, va_gc> *v = NULL;
4939   if (unsigned len = u ())
4940     {
4941       vec_alloc (v, len);
4942       for (unsigned ix = 0; ix != len; ix++)
4943 	v->quick_push (tree_node ());
4944     }
4945   return v;
4946 }
4947 
4948 /* A vector of tree pairs.  */
4949 
4950 void
tree_pair_vec(vec<tree_pair_s,va_gc> * v)4951 trees_out::tree_pair_vec (vec<tree_pair_s, va_gc> *v)
4952 {
4953   unsigned len = vec_safe_length (v);
4954   if (streaming_p ())
4955     u (len);
4956   if (len)
4957     for (unsigned ix = 0; ix != len; ix++)
4958       {
4959 	tree_pair_s const &s = (*v)[ix];
4960 	tree_node (s.purpose);
4961 	tree_node (s.value);
4962       }
4963 }
4964 
4965 vec<tree_pair_s, va_gc> *
tree_pair_vec()4966 trees_in::tree_pair_vec ()
4967 {
4968   vec<tree_pair_s, va_gc> *v = NULL;
4969   if (unsigned len = u ())
4970     {
4971       vec_alloc (v, len);
4972       for (unsigned ix = 0; ix != len; ix++)
4973 	{
4974 	  tree_pair_s s;
4975 	  s.purpose = tree_node ();
4976 	  s.value = tree_node ();
4977 	  v->quick_push (s);
4978       }
4979     }
4980   return v;
4981 }
4982 
4983 void
tree_list(tree list,bool has_purpose)4984 trees_out::tree_list (tree list, bool has_purpose)
4985 {
4986   for (; list; list = TREE_CHAIN (list))
4987     {
4988       gcc_checking_assert (TREE_VALUE (list));
4989       tree_node (TREE_VALUE (list));
4990       if (has_purpose)
4991 	tree_node (TREE_PURPOSE (list));
4992     }
4993   tree_node (NULL_TREE);
4994 }
4995 
4996 tree
tree_list(bool has_purpose)4997 trees_in::tree_list (bool has_purpose)
4998 {
4999   tree res = NULL_TREE;
5000 
5001   for (tree *chain = &res; tree value = tree_node ();
5002        chain = &TREE_CHAIN (*chain))
5003     {
5004       tree purpose = has_purpose ? tree_node () : NULL_TREE;
5005       *chain = build_tree_list (purpose, value);
5006     }
5007 
5008   return res;
5009 }
5010 /* Start tree write.  Write information to allocate the receiving
5011    node.  */
5012 
5013 void
start(tree t,bool code_streamed)5014 trees_out::start (tree t, bool code_streamed)
5015 {
5016   if (TYPE_P (t))
5017     {
5018       enum tree_code code = TREE_CODE (t);
5019       gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
5020       /* All these types are TYPE_NON_COMMON.  */
5021       gcc_checking_assert (code == RECORD_TYPE
5022 			   || code == UNION_TYPE
5023 			   || code == ENUMERAL_TYPE
5024 			   || code == TEMPLATE_TYPE_PARM
5025 			   || code == TEMPLATE_TEMPLATE_PARM
5026 			   || code == BOUND_TEMPLATE_TEMPLATE_PARM);
5027     }
5028 
5029   if (!code_streamed)
5030     u (TREE_CODE (t));
5031 
5032   switch (TREE_CODE (t))
5033     {
5034     default:
5035       if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_vl_exp)
5036 	u (VL_EXP_OPERAND_LENGTH (t));
5037       break;
5038 
5039     case INTEGER_CST:
5040       u (TREE_INT_CST_NUNITS (t));
5041       u (TREE_INT_CST_EXT_NUNITS (t));
5042       u (TREE_INT_CST_OFFSET_NUNITS (t));
5043       break;
5044 
5045     case OMP_CLAUSE:
5046       state->extensions |= SE_OPENMP;
5047       u (OMP_CLAUSE_CODE (t));
5048       break;
5049 
5050     case STRING_CST:
5051       str (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
5052       break;
5053 
5054     case VECTOR_CST:
5055       u (VECTOR_CST_LOG2_NPATTERNS (t));
5056       u (VECTOR_CST_NELTS_PER_PATTERN (t));
5057       break;
5058 
5059     case TREE_BINFO:
5060       u (BINFO_N_BASE_BINFOS (t));
5061       break;
5062 
5063     case TREE_VEC:
5064       u (TREE_VEC_LENGTH (t));
5065       break;
5066 
5067     case FIXED_CST:
5068     case POLY_INT_CST:
5069       gcc_unreachable (); /* Not supported in C++.  */
5070       break;
5071 
5072     case IDENTIFIER_NODE:
5073     case SSA_NAME:
5074     case TARGET_MEM_REF:
5075     case TRANSLATION_UNIT_DECL:
5076       /* We shouldn't meet these.  */
5077       gcc_unreachable ();
5078       break;
5079     }
5080 }
5081 
5082 /* Start tree read.  Allocate the receiving node.  */
5083 
5084 tree
start(unsigned code)5085 trees_in::start (unsigned code)
5086 {
5087   tree t = NULL_TREE;
5088 
5089   if (!code)
5090     code = u ();
5091 
5092   switch (code)
5093     {
5094     default:
5095       if (code >= MAX_TREE_CODES)
5096 	{
5097 	fail:
5098 	  set_overrun ();
5099 	  return NULL_TREE;
5100 	}
5101       else if (TREE_CODE_CLASS (code) == tcc_vl_exp)
5102 	{
5103 	  unsigned ops = u ();
5104 	  t = build_vl_exp (tree_code (code), ops);
5105 	}
5106       else
5107 	t = make_node (tree_code (code));
5108       break;
5109 
5110     case INTEGER_CST:
5111       {
5112 	unsigned n = u ();
5113 	unsigned e = u ();
5114 	t = make_int_cst (n, e);
5115 	TREE_INT_CST_OFFSET_NUNITS(t) = u ();
5116       }
5117       break;
5118 
5119     case OMP_CLAUSE:
5120       {
5121 	if (!(state->extensions & SE_OPENMP))
5122 	  goto fail;
5123 
5124 	unsigned omp_code = u ();
5125 	t = build_omp_clause (UNKNOWN_LOCATION, omp_clause_code (omp_code));
5126       }
5127       break;
5128 
5129     case STRING_CST:
5130       {
5131 	size_t l;
5132 	const char *chars = str (&l);
5133 	t = build_string (l, chars);
5134       }
5135       break;
5136 
5137     case VECTOR_CST:
5138       {
5139 	unsigned log2_npats = u ();
5140 	unsigned elts_per = u ();
5141 	t = make_vector (log2_npats, elts_per);
5142       }
5143       break;
5144 
5145     case TREE_BINFO:
5146       t = make_tree_binfo (u ());
5147       break;
5148 
5149     case TREE_VEC:
5150       t = make_tree_vec (u ());
5151       break;
5152 
5153     case FIXED_CST:
5154     case IDENTIFIER_NODE:
5155     case POLY_INT_CST:
5156     case SSA_NAME:
5157     case TARGET_MEM_REF:
5158     case TRANSLATION_UNIT_DECL:
5159       goto fail;
5160     }
5161 
5162   return t;
5163 }
5164 
5165 /* The structure streamers access the raw fields, because the
5166    alternative, of using the accessor macros can require using
5167    different accessors for the same underlying field, depending on the
5168    tree code.  That's both confusing and annoying.  */
5169 
5170 /* Read & write the core boolean flags.  */
5171 
5172 void
core_bools(tree t)5173 trees_out::core_bools (tree t)
5174 {
5175 #define WB(X) (b (X))
5176   tree_code code = TREE_CODE (t);
5177 
5178   WB (t->base.side_effects_flag);
5179   WB (t->base.constant_flag);
5180   WB (t->base.addressable_flag);
5181   WB (t->base.volatile_flag);
5182   WB (t->base.readonly_flag);
5183   /* base.asm_written_flag is a property of the current TU's use of
5184      this decl.  */
5185   WB (t->base.nowarning_flag);
5186   /* base.visited read as zero (it's set for writer, because that's
5187      how we mark nodes).  */
5188   /* base.used_flag is not streamed.  Readers may set TREE_USED of
5189      decls they use.  */
5190   WB (t->base.nothrow_flag);
5191   WB (t->base.static_flag);
5192   if (TREE_CODE_CLASS (code) != tcc_type)
5193     /* This is TYPE_CACHED_VALUES_P for types.  */
5194     WB (t->base.public_flag);
5195   WB (t->base.private_flag);
5196   WB (t->base.protected_flag);
5197   WB (t->base.deprecated_flag);
5198   WB (t->base.default_def_flag);
5199 
5200   switch (code)
5201     {
5202     case CALL_EXPR:
5203     case INTEGER_CST:
5204     case SSA_NAME:
5205     case TARGET_MEM_REF:
5206     case TREE_VEC:
5207       /* These use different base.u fields.  */
5208       break;
5209 
5210     default:
5211       WB (t->base.u.bits.lang_flag_0);
5212       bool flag_1 = t->base.u.bits.lang_flag_1;
5213       if (!flag_1)
5214 	;
5215       else if (code == TEMPLATE_INFO)
5216 	/* This is TI_PENDING_TEMPLATE_FLAG, not relevant to reader.  */
5217 	flag_1 = false;
5218       else if (code == VAR_DECL)
5219 	{
5220 	  /* This is DECL_INITIALIZED_P.  */
5221 	  if (TREE_CODE (DECL_CONTEXT (t)) != FUNCTION_DECL)
5222 	    /* We'll set this when reading the definition.  */
5223 	    flag_1 = false;
5224 	}
5225       WB (flag_1);
5226       WB (t->base.u.bits.lang_flag_2);
5227       WB (t->base.u.bits.lang_flag_3);
5228       WB (t->base.u.bits.lang_flag_4);
5229       WB (t->base.u.bits.lang_flag_5);
5230       WB (t->base.u.bits.lang_flag_6);
5231       WB (t->base.u.bits.saturating_flag);
5232       WB (t->base.u.bits.unsigned_flag);
5233       WB (t->base.u.bits.packed_flag);
5234       WB (t->base.u.bits.user_align);
5235       WB (t->base.u.bits.nameless_flag);
5236       WB (t->base.u.bits.atomic_flag);
5237       break;
5238     }
5239 
5240   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
5241     {
5242       WB (t->type_common.no_force_blk_flag);
5243       WB (t->type_common.needs_constructing_flag);
5244       WB (t->type_common.transparent_aggr_flag);
5245       WB (t->type_common.restrict_flag);
5246       WB (t->type_common.string_flag);
5247       WB (t->type_common.lang_flag_0);
5248       WB (t->type_common.lang_flag_1);
5249       WB (t->type_common.lang_flag_2);
5250       WB (t->type_common.lang_flag_3);
5251       WB (t->type_common.lang_flag_4);
5252       WB (t->type_common.lang_flag_5);
5253       WB (t->type_common.lang_flag_6);
5254       WB (t->type_common.typeless_storage);
5255     }
5256 
5257   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
5258     {
5259       WB (t->decl_common.nonlocal_flag);
5260       WB (t->decl_common.virtual_flag);
5261       WB (t->decl_common.ignored_flag);
5262       WB (t->decl_common.abstract_flag);
5263       WB (t->decl_common.artificial_flag);
5264       WB (t->decl_common.preserve_flag);
5265       WB (t->decl_common.debug_expr_is_from);
5266       WB (t->decl_common.lang_flag_0);
5267       WB (t->decl_common.lang_flag_1);
5268       WB (t->decl_common.lang_flag_2);
5269       WB (t->decl_common.lang_flag_3);
5270       WB (t->decl_common.lang_flag_4);
5271       WB (t->decl_common.lang_flag_5);
5272       WB (t->decl_common.lang_flag_6);
5273       WB (t->decl_common.lang_flag_7);
5274       WB (t->decl_common.lang_flag_8);
5275       WB (t->decl_common.decl_flag_0);
5276 
5277       {
5278 	/* DECL_EXTERNAL -> decl_flag_1
5279 	     == it is defined elsewhere
5280 	   DECL_NOT_REALLY_EXTERN -> base.not_really_extern
5281 	     == that was a lie, it is here  */
5282 
5283 	bool is_external = t->decl_common.decl_flag_1;
5284 	if (!is_external)
5285 	  /* decl_flag_1 is DECL_EXTERNAL. Things we emit here, might
5286 	     well be external from the POV of an importer.  */
5287 	  // FIXME: Do we need to know if this is a TEMPLATE_RESULT --
5288 	  // a flag from the caller?
5289 	  switch (code)
5290 	    {
5291 	    default:
5292 	      break;
5293 
5294 	    case VAR_DECL:
5295 	      if (TREE_PUBLIC (t)
5296 		  && !DECL_VAR_DECLARED_INLINE_P (t))
5297 		is_external = true;
5298 	      break;
5299 
5300 	    case FUNCTION_DECL:
5301 	      if (TREE_PUBLIC (t)
5302 		  && !DECL_DECLARED_INLINE_P (t))
5303 		is_external = true;
5304 	      break;
5305 	    }
5306 	WB (is_external);
5307       }
5308 
5309       WB (t->decl_common.decl_flag_2);
5310       WB (t->decl_common.decl_flag_3);
5311       WB (t->decl_common.not_gimple_reg_flag);
5312       WB (t->decl_common.decl_by_reference_flag);
5313       WB (t->decl_common.decl_read_flag);
5314       WB (t->decl_common.decl_nonshareable_flag);
5315     }
5316 
5317   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
5318     {
5319       WB (t->decl_with_vis.defer_output);
5320       WB (t->decl_with_vis.hard_register);
5321       WB (t->decl_with_vis.common_flag);
5322       WB (t->decl_with_vis.in_text_section);
5323       WB (t->decl_with_vis.in_constant_pool);
5324       WB (t->decl_with_vis.dllimport_flag);
5325       WB (t->decl_with_vis.weak_flag);
5326       WB (t->decl_with_vis.seen_in_bind_expr);
5327       WB (t->decl_with_vis.comdat_flag);
5328       WB (t->decl_with_vis.visibility_specified);
5329       WB (t->decl_with_vis.init_priority_p);
5330       WB (t->decl_with_vis.shadowed_for_var_p);
5331       WB (t->decl_with_vis.cxx_constructor);
5332       WB (t->decl_with_vis.cxx_destructor);
5333       WB (t->decl_with_vis.final);
5334       WB (t->decl_with_vis.regdecl_flag);
5335     }
5336 
5337   if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
5338     {
5339       WB (t->function_decl.static_ctor_flag);
5340       WB (t->function_decl.static_dtor_flag);
5341       WB (t->function_decl.uninlinable);
5342       WB (t->function_decl.possibly_inlined);
5343       WB (t->function_decl.novops_flag);
5344       WB (t->function_decl.returns_twice_flag);
5345       WB (t->function_decl.malloc_flag);
5346       WB (t->function_decl.declared_inline_flag);
5347       WB (t->function_decl.no_inline_warning_flag);
5348       WB (t->function_decl.no_instrument_function_entry_exit);
5349       WB (t->function_decl.no_limit_stack);
5350       WB (t->function_decl.disregard_inline_limits);
5351       WB (t->function_decl.pure_flag);
5352       WB (t->function_decl.looping_const_or_pure_flag);
5353 
5354       WB (t->function_decl.has_debug_args_flag);
5355       WB (t->function_decl.versioned_function);
5356 
5357       /* decl_type is a (misnamed) 2 bit discriminator.	 */
5358       unsigned kind = t->function_decl.decl_type;
5359       WB ((kind >> 0) & 1);
5360       WB ((kind >> 1) & 1);
5361     }
5362 #undef WB
5363 }
5364 
5365 bool
core_bools(tree t)5366 trees_in::core_bools (tree t)
5367 {
5368 #define RB(X) ((X) = b ())
5369   tree_code code = TREE_CODE (t);
5370 
5371   RB (t->base.side_effects_flag);
5372   RB (t->base.constant_flag);
5373   RB (t->base.addressable_flag);
5374   RB (t->base.volatile_flag);
5375   RB (t->base.readonly_flag);
5376   /* base.asm_written_flag is not streamed.  */
5377   RB (t->base.nowarning_flag);
5378   /* base.visited is not streamed.  */
5379   /* base.used_flag is not streamed.  */
5380   RB (t->base.nothrow_flag);
5381   RB (t->base.static_flag);
5382   if (TREE_CODE_CLASS (code) != tcc_type)
5383     RB (t->base.public_flag);
5384   RB (t->base.private_flag);
5385   RB (t->base.protected_flag);
5386   RB (t->base.deprecated_flag);
5387   RB (t->base.default_def_flag);
5388 
5389   switch (code)
5390     {
5391     case CALL_EXPR:
5392     case INTEGER_CST:
5393     case SSA_NAME:
5394     case TARGET_MEM_REF:
5395     case TREE_VEC:
5396       /* These use different base.u fields.  */
5397       break;
5398 
5399     default:
5400       RB (t->base.u.bits.lang_flag_0);
5401       RB (t->base.u.bits.lang_flag_1);
5402       RB (t->base.u.bits.lang_flag_2);
5403       RB (t->base.u.bits.lang_flag_3);
5404       RB (t->base.u.bits.lang_flag_4);
5405       RB (t->base.u.bits.lang_flag_5);
5406       RB (t->base.u.bits.lang_flag_6);
5407       RB (t->base.u.bits.saturating_flag);
5408       RB (t->base.u.bits.unsigned_flag);
5409       RB (t->base.u.bits.packed_flag);
5410       RB (t->base.u.bits.user_align);
5411       RB (t->base.u.bits.nameless_flag);
5412       RB (t->base.u.bits.atomic_flag);
5413       break;
5414     }
5415 
5416   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
5417     {
5418       RB (t->type_common.no_force_blk_flag);
5419       RB (t->type_common.needs_constructing_flag);
5420       RB (t->type_common.transparent_aggr_flag);
5421       RB (t->type_common.restrict_flag);
5422       RB (t->type_common.string_flag);
5423       RB (t->type_common.lang_flag_0);
5424       RB (t->type_common.lang_flag_1);
5425       RB (t->type_common.lang_flag_2);
5426       RB (t->type_common.lang_flag_3);
5427       RB (t->type_common.lang_flag_4);
5428       RB (t->type_common.lang_flag_5);
5429       RB (t->type_common.lang_flag_6);
5430       RB (t->type_common.typeless_storage);
5431     }
5432 
5433   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
5434     {
5435       RB (t->decl_common.nonlocal_flag);
5436       RB (t->decl_common.virtual_flag);
5437       RB (t->decl_common.ignored_flag);
5438       RB (t->decl_common.abstract_flag);
5439       RB (t->decl_common.artificial_flag);
5440       RB (t->decl_common.preserve_flag);
5441       RB (t->decl_common.debug_expr_is_from);
5442       RB (t->decl_common.lang_flag_0);
5443       RB (t->decl_common.lang_flag_1);
5444       RB (t->decl_common.lang_flag_2);
5445       RB (t->decl_common.lang_flag_3);
5446       RB (t->decl_common.lang_flag_4);
5447       RB (t->decl_common.lang_flag_5);
5448       RB (t->decl_common.lang_flag_6);
5449       RB (t->decl_common.lang_flag_7);
5450       RB (t->decl_common.lang_flag_8);
5451       RB (t->decl_common.decl_flag_0);
5452       RB (t->decl_common.decl_flag_1);
5453       RB (t->decl_common.decl_flag_2);
5454       RB (t->decl_common.decl_flag_3);
5455       RB (t->decl_common.not_gimple_reg_flag);
5456       RB (t->decl_common.decl_by_reference_flag);
5457       RB (t->decl_common.decl_read_flag);
5458       RB (t->decl_common.decl_nonshareable_flag);
5459     }
5460 
5461   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
5462     {
5463       RB (t->decl_with_vis.defer_output);
5464       RB (t->decl_with_vis.hard_register);
5465       RB (t->decl_with_vis.common_flag);
5466       RB (t->decl_with_vis.in_text_section);
5467       RB (t->decl_with_vis.in_constant_pool);
5468       RB (t->decl_with_vis.dllimport_flag);
5469       RB (t->decl_with_vis.weak_flag);
5470       RB (t->decl_with_vis.seen_in_bind_expr);
5471       RB (t->decl_with_vis.comdat_flag);
5472       RB (t->decl_with_vis.visibility_specified);
5473       RB (t->decl_with_vis.init_priority_p);
5474       RB (t->decl_with_vis.shadowed_for_var_p);
5475       RB (t->decl_with_vis.cxx_constructor);
5476       RB (t->decl_with_vis.cxx_destructor);
5477       RB (t->decl_with_vis.final);
5478       RB (t->decl_with_vis.regdecl_flag);
5479     }
5480 
5481   if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
5482     {
5483       RB (t->function_decl.static_ctor_flag);
5484       RB (t->function_decl.static_dtor_flag);
5485       RB (t->function_decl.uninlinable);
5486       RB (t->function_decl.possibly_inlined);
5487       RB (t->function_decl.novops_flag);
5488       RB (t->function_decl.returns_twice_flag);
5489       RB (t->function_decl.malloc_flag);
5490       RB (t->function_decl.declared_inline_flag);
5491       RB (t->function_decl.no_inline_warning_flag);
5492       RB (t->function_decl.no_instrument_function_entry_exit);
5493       RB (t->function_decl.no_limit_stack);
5494       RB (t->function_decl.disregard_inline_limits);
5495       RB (t->function_decl.pure_flag);
5496       RB (t->function_decl.looping_const_or_pure_flag);
5497 
5498       RB (t->function_decl.has_debug_args_flag);
5499       RB (t->function_decl.versioned_function);
5500 
5501       /* decl_type is a (misnamed) 2 bit discriminator.	 */
5502       unsigned kind = 0;
5503       kind |= unsigned (b ()) << 0;
5504       kind |= unsigned (b ()) << 1;
5505       t->function_decl.decl_type = function_decl_type (kind);
5506     }
5507 #undef RB
5508   return !get_overrun ();
5509 }
5510 
5511 void
lang_decl_bools(tree t)5512 trees_out::lang_decl_bools (tree t)
5513 {
5514 #define WB(X) (b (X))
5515   const struct lang_decl *lang = DECL_LANG_SPECIFIC (t);
5516 
5517   WB (lang->u.base.language == lang_cplusplus);
5518   WB ((lang->u.base.use_template >> 0) & 1);
5519   WB ((lang->u.base.use_template >> 1) & 1);
5520   /* Do not write lang->u.base.not_really_extern, importer will set
5521      when reading the definition (if any).  */
5522   WB (lang->u.base.initialized_in_class);
5523   WB (lang->u.base.threadprivate_or_deleted_p);
5524   /* Do not write lang->u.base.anticipated_p, it is a property of the
5525      current TU.  */
5526   WB (lang->u.base.friend_or_tls);
5527   WB (lang->u.base.unknown_bound_p);
5528   /* Do not write lang->u.base.odr_used, importer will recalculate if
5529      they do ODR use this decl.  */
5530   WB (lang->u.base.concept_p);
5531   WB (lang->u.base.var_declared_inline_p);
5532   WB (lang->u.base.dependent_init_p);
5533   /* When building a header unit, everthing is marked as purview, but
5534      that's the GM purview, so not what the importer will mean  */
5535   WB (lang->u.base.module_purview_p && !header_module_p ());
5536   if (VAR_OR_FUNCTION_DECL_P (t))
5537     WB (lang->u.base.module_attached_p);
5538   switch (lang->u.base.selector)
5539     {
5540     default:
5541       gcc_unreachable ();
5542 
5543     case lds_fn:  /* lang_decl_fn.  */
5544       WB (lang->u.fn.global_ctor_p);
5545       WB (lang->u.fn.global_dtor_p);
5546       WB (lang->u.fn.static_function);
5547       WB (lang->u.fn.pure_virtual);
5548       WB (lang->u.fn.defaulted_p);
5549       WB (lang->u.fn.has_in_charge_parm_p);
5550       WB (lang->u.fn.has_vtt_parm_p);
5551       /* There shouldn't be a pending inline at this point.  */
5552       gcc_assert (!lang->u.fn.pending_inline_p);
5553       WB (lang->u.fn.nonconverting);
5554       WB (lang->u.fn.thunk_p);
5555       WB (lang->u.fn.this_thunk_p);
5556       /* Do not stream lang->u.hidden_friend_p, it is a property of
5557 	 the TU.  */
5558       WB (lang->u.fn.omp_declare_reduction_p);
5559       WB (lang->u.fn.has_dependent_explicit_spec_p);
5560       WB (lang->u.fn.immediate_fn_p);
5561       WB (lang->u.fn.maybe_deleted);
5562       goto lds_min;
5563 
5564     case lds_decomp:  /* lang_decl_decomp.  */
5565       /* No bools.  */
5566       goto lds_min;
5567 
5568     case lds_min:  /* lang_decl_min.  */
5569     lds_min:
5570       /* No bools.  */
5571       break;
5572 
5573     case lds_ns:  /* lang_decl_ns.  */
5574       /* No bools.  */
5575       break;
5576 
5577     case lds_parm:  /* lang_decl_parm.  */
5578       /* No bools.  */
5579       break;
5580     }
5581 #undef WB
5582 }
5583 
5584 bool
lang_decl_bools(tree t)5585 trees_in::lang_decl_bools (tree t)
5586 {
5587 #define RB(X) ((X) = b ())
5588   struct lang_decl *lang = DECL_LANG_SPECIFIC (t);
5589 
5590   lang->u.base.language = b () ? lang_cplusplus : lang_c;
5591   unsigned v;
5592   v = b () << 0;
5593   v |= b () << 1;
5594   lang->u.base.use_template = v;
5595   /* lang->u.base.not_really_extern is not streamed.  */
5596   RB (lang->u.base.initialized_in_class);
5597   RB (lang->u.base.threadprivate_or_deleted_p);
5598   /* lang->u.base.anticipated_p is not streamed.  */
5599   RB (lang->u.base.friend_or_tls);
5600   RB (lang->u.base.unknown_bound_p);
5601   /* lang->u.base.odr_used is not streamed.  */
5602   RB (lang->u.base.concept_p);
5603   RB (lang->u.base.var_declared_inline_p);
5604   RB (lang->u.base.dependent_init_p);
5605   RB (lang->u.base.module_purview_p);
5606   if (VAR_OR_FUNCTION_DECL_P (t))
5607     RB (lang->u.base.module_attached_p);
5608   switch (lang->u.base.selector)
5609     {
5610     default:
5611       gcc_unreachable ();
5612 
5613     case lds_fn:  /* lang_decl_fn.  */
5614       RB (lang->u.fn.global_ctor_p);
5615       RB (lang->u.fn.global_dtor_p);
5616       RB (lang->u.fn.static_function);
5617       RB (lang->u.fn.pure_virtual);
5618       RB (lang->u.fn.defaulted_p);
5619       RB (lang->u.fn.has_in_charge_parm_p);
5620       RB (lang->u.fn.has_vtt_parm_p);
5621       RB (lang->u.fn.nonconverting);
5622       RB (lang->u.fn.thunk_p);
5623       RB (lang->u.fn.this_thunk_p);
5624       /* lang->u.fn.hidden_friend_p is not streamed.  */
5625       RB (lang->u.fn.omp_declare_reduction_p);
5626       RB (lang->u.fn.has_dependent_explicit_spec_p);
5627       RB (lang->u.fn.immediate_fn_p);
5628       RB (lang->u.fn.maybe_deleted);
5629       goto lds_min;
5630 
5631     case lds_decomp:  /* lang_decl_decomp.  */
5632       /* No bools.  */
5633       goto lds_min;
5634 
5635     case lds_min:  /* lang_decl_min.  */
5636     lds_min:
5637       /* No bools.  */
5638       break;
5639 
5640     case lds_ns:  /* lang_decl_ns.  */
5641       /* No bools.  */
5642       break;
5643 
5644     case lds_parm:  /* lang_decl_parm.  */
5645       /* No bools.  */
5646       break;
5647     }
5648 #undef RB
5649   return !get_overrun ();
5650 }
5651 
5652 void
lang_type_bools(tree t)5653 trees_out::lang_type_bools (tree t)
5654 {
5655 #define WB(X) (b (X))
5656   const struct lang_type *lang = TYPE_LANG_SPECIFIC (t);
5657 
5658   WB (lang->has_type_conversion);
5659   WB (lang->has_copy_ctor);
5660   WB (lang->has_default_ctor);
5661   WB (lang->const_needs_init);
5662   WB (lang->ref_needs_init);
5663   WB (lang->has_const_copy_assign);
5664   WB ((lang->use_template >> 0) & 1);
5665   WB ((lang->use_template >> 1) & 1);
5666 
5667   WB (lang->has_mutable);
5668   WB (lang->com_interface);
5669   WB (lang->non_pod_class);
5670   WB (lang->nearly_empty_p);
5671   WB (lang->user_align);
5672   WB (lang->has_copy_assign);
5673   WB (lang->has_new);
5674   WB (lang->has_array_new);
5675 
5676   WB ((lang->gets_delete >> 0) & 1);
5677   WB ((lang->gets_delete >> 1) & 1);
5678   // Interfaceness is recalculated upon reading.  May have to revisit?
5679   // How do dllexport and dllimport interact across a module?
5680   // lang->interface_only
5681   // lang->interface_unknown
5682   WB (lang->contains_empty_class_p);
5683   WB (lang->anon_aggr);
5684   WB (lang->non_zero_init);
5685   WB (lang->empty_p);
5686 
5687   WB (lang->vec_new_uses_cookie);
5688   WB (lang->declared_class);
5689   WB (lang->diamond_shaped);
5690   WB (lang->repeated_base);
5691   gcc_assert (!lang->being_defined);
5692   // lang->debug_requested
5693   WB (lang->fields_readonly);
5694   WB (lang->ptrmemfunc_flag);
5695 
5696   WB (lang->lazy_default_ctor);
5697   WB (lang->lazy_copy_ctor);
5698   WB (lang->lazy_copy_assign);
5699   WB (lang->lazy_destructor);
5700   WB (lang->has_const_copy_ctor);
5701   WB (lang->has_complex_copy_ctor);
5702   WB (lang->has_complex_copy_assign);
5703   WB (lang->non_aggregate);
5704 
5705   WB (lang->has_complex_dflt);
5706   WB (lang->has_list_ctor);
5707   WB (lang->non_std_layout);
5708   WB (lang->is_literal);
5709   WB (lang->lazy_move_ctor);
5710   WB (lang->lazy_move_assign);
5711   WB (lang->has_complex_move_ctor);
5712   WB (lang->has_complex_move_assign);
5713 
5714   WB (lang->has_constexpr_ctor);
5715   WB (lang->unique_obj_representations);
5716   WB (lang->unique_obj_representations_set);
5717 #undef WB
5718 }
5719 
5720 bool
lang_type_bools(tree t)5721 trees_in::lang_type_bools (tree t)
5722 {
5723 #define RB(X) ((X) = b ())
5724   struct lang_type *lang = TYPE_LANG_SPECIFIC (t);
5725 
5726   RB (lang->has_type_conversion);
5727   RB (lang->has_copy_ctor);
5728   RB (lang->has_default_ctor);
5729   RB (lang->const_needs_init);
5730   RB (lang->ref_needs_init);
5731   RB (lang->has_const_copy_assign);
5732   unsigned v;
5733   v = b () << 0;
5734   v |= b () << 1;
5735   lang->use_template = v;
5736 
5737   RB (lang->has_mutable);
5738   RB (lang->com_interface);
5739   RB (lang->non_pod_class);
5740   RB (lang->nearly_empty_p);
5741   RB (lang->user_align);
5742   RB (lang->has_copy_assign);
5743   RB (lang->has_new);
5744   RB (lang->has_array_new);
5745 
5746   v = b () << 0;
5747   v |= b () << 1;
5748   lang->gets_delete = v;
5749   // lang->interface_only
5750   // lang->interface_unknown
5751   lang->interface_unknown = true; // Redetermine interface
5752   RB (lang->contains_empty_class_p);
5753   RB (lang->anon_aggr);
5754   RB (lang->non_zero_init);
5755   RB (lang->empty_p);
5756 
5757   RB (lang->vec_new_uses_cookie);
5758   RB (lang->declared_class);
5759   RB (lang->diamond_shaped);
5760   RB (lang->repeated_base);
5761   gcc_assert (!lang->being_defined);
5762   gcc_assert (!lang->debug_requested);
5763   RB (lang->fields_readonly);
5764   RB (lang->ptrmemfunc_flag);
5765 
5766   RB (lang->lazy_default_ctor);
5767   RB (lang->lazy_copy_ctor);
5768   RB (lang->lazy_copy_assign);
5769   RB (lang->lazy_destructor);
5770   RB (lang->has_const_copy_ctor);
5771   RB (lang->has_complex_copy_ctor);
5772   RB (lang->has_complex_copy_assign);
5773   RB (lang->non_aggregate);
5774 
5775   RB (lang->has_complex_dflt);
5776   RB (lang->has_list_ctor);
5777   RB (lang->non_std_layout);
5778   RB (lang->is_literal);
5779   RB (lang->lazy_move_ctor);
5780   RB (lang->lazy_move_assign);
5781   RB (lang->has_complex_move_ctor);
5782   RB (lang->has_complex_move_assign);
5783 
5784   RB (lang->has_constexpr_ctor);
5785   RB (lang->unique_obj_representations);
5786   RB (lang->unique_obj_representations_set);
5787 #undef RB
5788   return !get_overrun ();
5789 }
5790 
5791 /* Read & write the core values and pointers.  */
5792 
5793 void
core_vals(tree t)5794 trees_out::core_vals (tree t)
5795 {
5796 #define WU(X) (u (X))
5797 #define WT(X) (tree_node (X))
5798   tree_code code = TREE_CODE (t);
5799 
5800   /* First by shape of the tree.  */
5801 
5802   if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
5803     {
5804       /* Write this early, for better log information.  */
5805       WT (t->decl_minimal.name);
5806       if (!DECL_TEMPLATE_PARM_P (t))
5807 	WT (t->decl_minimal.context);
5808 
5809       if (state)
5810 	state->write_location (*this, t->decl_minimal.locus);
5811     }
5812 
5813   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
5814     {
5815       /* The only types we write also have TYPE_NON_COMMON.  */
5816       gcc_checking_assert (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON));
5817 
5818       /* We only stream the main variant.  */
5819       gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
5820 
5821       /* Stream the name & context first, for better log information  */
5822       WT (t->type_common.name);
5823       WT (t->type_common.context);
5824 
5825       /* By construction we want to make sure we have the canonical
5826 	 and main variants already in the type table, so emit them
5827 	 now.  */
5828       WT (t->type_common.main_variant);
5829 
5830       tree canonical = t->type_common.canonical;
5831       if (canonical && DECL_TEMPLATE_PARM_P (TYPE_NAME (t)))
5832 	/* We do not want to wander into different templates.
5833 	   Reconstructed on stream in.  */
5834 	canonical = t;
5835       WT (canonical);
5836 
5837       /* type_common.next_variant is internally manipulated.  */
5838       /* type_common.pointer_to, type_common.reference_to.  */
5839 
5840       if (streaming_p ())
5841 	{
5842 	  WU (t->type_common.precision);
5843 	  WU (t->type_common.contains_placeholder_bits);
5844 	  WU (t->type_common.mode);
5845 	  WU (t->type_common.align);
5846 	}
5847 
5848       if (!RECORD_OR_UNION_CODE_P (code))
5849 	{
5850 	  WT (t->type_common.size);
5851 	  WT (t->type_common.size_unit);
5852 	}
5853       WT (t->type_common.attributes);
5854 
5855       WT (t->type_common.common.chain); /* TYPE_STUB_DECL.  */
5856     }
5857 
5858   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
5859     {
5860       if (streaming_p ())
5861 	{
5862 	  WU (t->decl_common.mode);
5863 	  WU (t->decl_common.off_align);
5864 	  WU (t->decl_common.align);
5865 	}
5866 
5867       /* For templates these hold instantiation (partial and/or
5868 	 specialization) information.  */
5869       if (code != TEMPLATE_DECL)
5870 	{
5871 	  WT (t->decl_common.size);
5872 	  WT (t->decl_common.size_unit);
5873 	}
5874 
5875       WT (t->decl_common.attributes);
5876       // FIXME: Does this introduce cross-decl links?  For instance
5877       // from instantiation to the template.  If so, we'll need more
5878       // deduplication logic.  I think we'll need to walk the blocks
5879       // of the owning function_decl's abstract origin in tandem, to
5880       // generate the locating data needed?
5881       WT (t->decl_common.abstract_origin);
5882     }
5883 
5884   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
5885     {
5886       WT (t->decl_with_vis.assembler_name);
5887       if (streaming_p ())
5888 	WU (t->decl_with_vis.visibility);
5889     }
5890 
5891   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
5892     {
5893       /* Records and unions hold FIELDS, VFIELD & BINFO on these
5894 	 things.  */
5895       if (!RECORD_OR_UNION_CODE_P (code) && code != ENUMERAL_TYPE)
5896 	{
5897 	  // FIXME: These are from tpl_parm_value's 'type' writing.
5898 	  // Perhaps it should just be doing them directly?
5899 	  gcc_checking_assert (code == TEMPLATE_TYPE_PARM
5900 			       || code == TEMPLATE_TEMPLATE_PARM
5901 			       || code == BOUND_TEMPLATE_TEMPLATE_PARM);
5902 	  gcc_checking_assert (!TYPE_CACHED_VALUES_P (t));
5903 	  WT (t->type_non_common.values);
5904 	  WT (t->type_non_common.maxval);
5905 	  WT (t->type_non_common.minval);
5906 	}
5907 
5908       WT (t->type_non_common.lang_1);
5909     }
5910 
5911   if (CODE_CONTAINS_STRUCT (code, TS_EXP))
5912     {
5913       if (state)
5914 	state->write_location (*this, t->exp.locus);
5915 
5916       /* Walk in forward order, as (for instance) REQUIRES_EXPR has a
5917          bunch of unscoped parms on its first operand.  It's safer to
5918          create those in order.  */
5919       bool vl = TREE_CODE_CLASS (code) == tcc_vl_exp;
5920       for (unsigned limit = (vl ? VL_EXP_OPERAND_LENGTH (t)
5921 			     : TREE_OPERAND_LENGTH (t)),
5922 	     ix = unsigned (vl); ix != limit; ix++)
5923 	WT (TREE_OPERAND (t, ix));
5924     }
5925   else
5926     /* The CODE_CONTAINS tables were inaccurate when I started.  */
5927     gcc_checking_assert (TREE_CODE_CLASS (code) != tcc_expression
5928 			 && TREE_CODE_CLASS (code) != tcc_binary
5929 			 && TREE_CODE_CLASS (code) != tcc_unary
5930 			 && TREE_CODE_CLASS (code) != tcc_reference
5931 			 && TREE_CODE_CLASS (code) != tcc_comparison
5932 			 && TREE_CODE_CLASS (code) != tcc_statement
5933 			 && TREE_CODE_CLASS (code) != tcc_vl_exp);
5934 
5935   /* Then by CODE.  Special cases and/or 1:1 tree shape
5936      correspondance. */
5937   switch (code)
5938     {
5939     default:
5940       break;
5941 
5942     case ARGUMENT_PACK_SELECT:  /* Transient during instantiation.  */
5943     case DEFERRED_PARSE:	/* Expanded upon completion of
5944 				   outermost class.  */
5945     case IDENTIFIER_NODE:	/* Streamed specially.  */
5946     case BINDING_VECTOR:		/* Only in namespace-scope symbol
5947 				   table.  */
5948     case SSA_NAME:
5949     case TRANSLATION_UNIT_DECL: /* There is only one, it is a
5950 				   global_tree.  */
5951     case USERDEF_LITERAL:  	/* Expanded during parsing.  */
5952       gcc_unreachable (); /* Should never meet.  */
5953 
5954       /* Constants.  */
5955     case COMPLEX_CST:
5956       WT (TREE_REALPART (t));
5957       WT (TREE_IMAGPART (t));
5958       break;
5959 
5960     case FIXED_CST:
5961       gcc_unreachable (); /* Not supported in C++.  */
5962 
5963     case INTEGER_CST:
5964       if (streaming_p ())
5965 	{
5966 	  unsigned num = TREE_INT_CST_EXT_NUNITS (t);
5967 	  for (unsigned ix = 0; ix != num; ix++)
5968 	    wu (TREE_INT_CST_ELT (t, ix));
5969 	}
5970       break;
5971 
5972     case POLY_INT_CST:
5973       gcc_unreachable (); /* Not supported in C++.  */
5974 
5975     case REAL_CST:
5976       if (streaming_p ())
5977 	buf (TREE_REAL_CST_PTR (t), sizeof (real_value));
5978       break;
5979 
5980     case STRING_CST:
5981       /* Streamed during start.  */
5982       break;
5983 
5984     case VECTOR_CST:
5985       for (unsigned ix = vector_cst_encoded_nelts (t); ix--;)
5986 	WT (VECTOR_CST_ENCODED_ELT (t, ix));
5987       break;
5988 
5989       /* Decls.  */
5990     case VAR_DECL:
5991       if (DECL_CONTEXT (t)
5992 	  && TREE_CODE (DECL_CONTEXT (t)) != FUNCTION_DECL)
5993 	break;
5994       /* FALLTHROUGH  */
5995 
5996     case RESULT_DECL:
5997     case PARM_DECL:
5998       if (DECL_HAS_VALUE_EXPR_P (t))
5999 	WT (DECL_VALUE_EXPR (t));
6000       /* FALLTHROUGH  */
6001 
6002     case CONST_DECL:
6003     case IMPORTED_DECL:
6004       WT (t->decl_common.initial);
6005       break;
6006 
6007     case FIELD_DECL:
6008       WT (t->field_decl.offset);
6009       WT (t->field_decl.bit_field_type);
6010       WT (t->field_decl.qualifier); /* bitfield unit.  */
6011       WT (t->field_decl.bit_offset);
6012       WT (t->field_decl.fcontext);
6013       WT (t->decl_common.initial);
6014       break;
6015 
6016     case LABEL_DECL:
6017       if (streaming_p ())
6018 	{
6019 	  WU (t->label_decl.label_decl_uid);
6020 	  WU (t->label_decl.eh_landing_pad_nr);
6021 	}
6022       break;
6023 
6024     case FUNCTION_DECL:
6025       if (streaming_p ())
6026 	{
6027 	  /* Builtins can be streamed by value when a header declares
6028 	     them.  */
6029 	  WU (DECL_BUILT_IN_CLASS (t));
6030 	  if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
6031 	    WU (DECL_UNCHECKED_FUNCTION_CODE (t));
6032 	}
6033 
6034       WT (t->function_decl.personality);
6035       WT (t->function_decl.function_specific_target);
6036       WT (t->function_decl.function_specific_optimization);
6037       WT (t->function_decl.vindex);
6038 
6039       if (DECL_HAS_DEPENDENT_EXPLICIT_SPEC_P (t))
6040 	WT (lookup_explicit_specifier (t));
6041       break;
6042 
6043     case USING_DECL:
6044       /* USING_DECL_DECLS  */
6045       WT (t->decl_common.initial);
6046       /* FALLTHROUGH  */
6047 
6048     case TYPE_DECL:
6049       /* USING_DECL: USING_DECL_SCOPE  */
6050       /* TYPE_DECL: DECL_ORIGINAL_TYPE */
6051       WT (t->decl_non_common.result);
6052       break;
6053 
6054       /* Miscellaneous common nodes.  */
6055     case BLOCK:
6056       if (state)
6057 	{
6058 	  state->write_location (*this, t->block.locus);
6059 	  state->write_location (*this, t->block.end_locus);
6060 	}
6061 
6062       /* DECL_LOCAL_DECL_P decls are first encountered here and
6063          streamed by value.  */
6064       chained_decls (t->block.vars);
6065       /* nonlocalized_vars is a middle-end thing.  */
6066       WT (t->block.subblocks);
6067       WT (t->block.supercontext);
6068       // FIXME: As for decl's abstract_origin, does this introduce crosslinks?
6069       WT (t->block.abstract_origin);
6070       /* fragment_origin, fragment_chain are middle-end things.  */
6071       WT (t->block.chain);
6072       /* nonlocalized_vars, block_num & die are middle endy/debug
6073 	 things.  */
6074       break;
6075 
6076     case CALL_EXPR:
6077       if (streaming_p ())
6078 	WU (t->base.u.ifn);
6079       break;
6080 
6081     case CONSTRUCTOR:
6082       {
6083 	unsigned len = vec_safe_length (t->constructor.elts);
6084 	if (streaming_p ())
6085 	  WU (len);
6086 	if (len)
6087 	  for (unsigned ix = 0; ix != len; ix++)
6088 	    {
6089 	      const constructor_elt &elt = (*t->constructor.elts)[ix];
6090 
6091 	      WT (elt.index);
6092 	      WT (elt.value);
6093 	    }
6094       }
6095       break;
6096 
6097     case OMP_CLAUSE:
6098       {
6099 	/* The ompcode is serialized in start.  */
6100 	if (streaming_p ())
6101 	  WU (t->omp_clause.subcode.map_kind);
6102 	if (state)
6103 	  state->write_location (*this, t->omp_clause.locus);
6104 
6105 	unsigned len = omp_clause_num_ops[OMP_CLAUSE_CODE (t)];
6106 	for (unsigned ix = 0; ix != len; ix++)
6107 	  WT (t->omp_clause.ops[ix]);
6108       }
6109       break;
6110 
6111     case STATEMENT_LIST:
6112       for (tree stmt : tsi_range (t))
6113 	if (stmt)
6114 	  WT (stmt);
6115       WT (NULL_TREE);
6116       break;
6117 
6118     case OPTIMIZATION_NODE:
6119     case TARGET_OPTION_NODE:
6120       // FIXME: Our representation for these two nodes is a cache of
6121       // the resulting set of options.  Not a record of the options
6122       // that got changed by a particular attribute or pragma.  Should
6123       // we record that, or should we record the diff from the command
6124       // line options?  The latter seems the right behaviour, but is
6125       // (a) harder, and I guess could introduce strangeness if the
6126       // importer has set some incompatible set of optimization flags?
6127       gcc_unreachable ();
6128       break;
6129 
6130     case TREE_BINFO:
6131       {
6132 	WT (t->binfo.common.chain);
6133 	WT (t->binfo.offset);
6134 	WT (t->binfo.inheritance);
6135 	WT (t->binfo.vptr_field);
6136 
6137 	WT (t->binfo.vtable);
6138 	WT (t->binfo.virtuals);
6139 	WT (t->binfo.vtt_subvtt);
6140 	WT (t->binfo.vtt_vptr);
6141 
6142 	tree_vec (BINFO_BASE_ACCESSES (t));
6143 	unsigned num = vec_safe_length (BINFO_BASE_ACCESSES (t));
6144 	for (unsigned ix = 0; ix != num; ix++)
6145 	  WT (BINFO_BASE_BINFO (t, ix));
6146       }
6147       break;
6148 
6149     case TREE_LIST:
6150       WT (t->list.purpose);
6151       WT (t->list.value);
6152       WT (t->list.common.chain);
6153       break;
6154 
6155     case TREE_VEC:
6156       for (unsigned ix = TREE_VEC_LENGTH (t); ix--;)
6157 	WT (TREE_VEC_ELT (t, ix));
6158       /* We stash NON_DEFAULT_TEMPLATE_ARGS_COUNT on TREE_CHAIN!  */
6159       gcc_checking_assert (!t->type_common.common.chain
6160 			   || (TREE_CODE (t->type_common.common.chain)
6161 			       == INTEGER_CST));
6162       WT (t->type_common.common.chain);
6163       break;
6164 
6165       /* C++-specific nodes ...  */
6166     case BASELINK:
6167       WT (((lang_tree_node *)t)->baselink.binfo);
6168       WT (((lang_tree_node *)t)->baselink.functions);
6169       WT (((lang_tree_node *)t)->baselink.access_binfo);
6170       break;
6171 
6172     case CONSTRAINT_INFO:
6173       WT (((lang_tree_node *)t)->constraint_info.template_reqs);
6174       WT (((lang_tree_node *)t)->constraint_info.declarator_reqs);
6175       WT (((lang_tree_node *)t)->constraint_info.associated_constr);
6176       break;
6177 
6178     case DEFERRED_NOEXCEPT:
6179       WT (((lang_tree_node *)t)->deferred_noexcept.pattern);
6180       WT (((lang_tree_node *)t)->deferred_noexcept.args);
6181       break;
6182 
6183     case LAMBDA_EXPR:
6184       WT (((lang_tree_node *)t)->lambda_expression.capture_list);
6185       WT (((lang_tree_node *)t)->lambda_expression.this_capture);
6186       WT (((lang_tree_node *)t)->lambda_expression.extra_scope);
6187       /* pending_proxies is a parse-time thing.  */
6188       gcc_assert (!((lang_tree_node *)t)->lambda_expression.pending_proxies);
6189       if (state)
6190 	state->write_location
6191 	  (*this, ((lang_tree_node *)t)->lambda_expression.locus);
6192       if (streaming_p ())
6193 	{
6194 	  WU (((lang_tree_node *)t)->lambda_expression.default_capture_mode);
6195 	  WU (((lang_tree_node *)t)->lambda_expression.discriminator);
6196 	}
6197       break;
6198 
6199     case OVERLOAD:
6200       WT (((lang_tree_node *)t)->overload.function);
6201       WT (t->common.chain);
6202       break;
6203 
6204     case PTRMEM_CST:
6205       WT (((lang_tree_node *)t)->ptrmem.member);
6206       break;
6207 
6208     case STATIC_ASSERT:
6209       WT (((lang_tree_node *)t)->static_assertion.condition);
6210       WT (((lang_tree_node *)t)->static_assertion.message);
6211       if (state)
6212 	state->write_location
6213 	  (*this, ((lang_tree_node *)t)->static_assertion.location);
6214       break;
6215 
6216     case TEMPLATE_DECL:
6217       /* Streamed with the template_decl node itself.  */
6218       gcc_checking_assert
6219       	(TREE_VISITED (((lang_tree_node *)t)->template_decl.arguments));
6220       gcc_checking_assert
6221 	(TREE_VISITED (((lang_tree_node *)t)->template_decl.result)
6222 	 || dep_hash->find_dependency (t)->is_alias_tmpl_inst ());
6223       if (DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (t))
6224 	WT (DECL_CHAIN (t));
6225       break;
6226 
6227     case TEMPLATE_INFO:
6228       {
6229 	WT (((lang_tree_node *)t)->template_info.tmpl);
6230 	WT (((lang_tree_node *)t)->template_info.args);
6231 
6232 	const auto *ac = (((lang_tree_node *)t)
6233 			  ->template_info.deferred_access_checks);
6234 	unsigned len = vec_safe_length (ac);
6235 	if (streaming_p ())
6236 	  u (len);
6237 	if (len)
6238 	  {
6239 	    for (unsigned ix = 0; ix != len; ix++)
6240 	      {
6241 		const auto &m = (*ac)[ix];
6242 		WT (m.binfo);
6243 		WT (m.decl);
6244 		WT (m.diag_decl);
6245 		if (state)
6246 		  state->write_location (*this, m.loc);
6247 	      }
6248 	  }
6249       }
6250       break;
6251 
6252     case TEMPLATE_PARM_INDEX:
6253       if (streaming_p ())
6254 	{
6255 	  WU (((lang_tree_node *)t)->tpi.index);
6256 	  WU (((lang_tree_node *)t)->tpi.level);
6257 	  WU (((lang_tree_node *)t)->tpi.orig_level);
6258 	}
6259       WT (((lang_tree_node *)t)->tpi.decl);
6260       /* TEMPLATE_PARM_DESCENDANTS (AKA TREE_CHAIN) is an internal
6261 	 cache, do not stream.  */
6262       break;
6263 
6264     case TRAIT_EXPR:
6265       WT (((lang_tree_node *)t)->trait_expression.type1);
6266       WT (((lang_tree_node *)t)->trait_expression.type2);
6267       if (streaming_p ())
6268 	WU (((lang_tree_node *)t)->trait_expression.kind);
6269       break;
6270     }
6271 
6272   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
6273     {
6274       /* We want to stream the type of a expression-like nodes /after/
6275          we've streamed the operands.  The type often contains (bits
6276          of the) types of the operands, and with things like decltype
6277          and noexcept in play, we really want to stream the decls
6278          defining the type before we try and stream the type on its
6279          own.  Otherwise we can find ourselves trying to read in a
6280          decl, when we're already partially reading in a component of
6281          its type.  And that's bad.  */
6282       tree type = t->typed.type;
6283       unsigned prec = 0;
6284 
6285       switch (code)
6286 	{
6287 	default:
6288 	  break;
6289 
6290 	case TEMPLATE_DECL:
6291 	  /* We fill in the template's type separately.  */
6292 	  type = NULL_TREE;
6293 	  break;
6294 
6295 	case TYPE_DECL:
6296 	  if (DECL_ORIGINAL_TYPE (t) && t == TYPE_NAME (type))
6297 	    /* This is a typedef.  We set its type separately.  */
6298 	    type = NULL_TREE;
6299 	  break;
6300 
6301 	case ENUMERAL_TYPE:
6302 	  if (type && !ENUM_FIXED_UNDERLYING_TYPE_P (t))
6303 	    {
6304 	      /* Type is a restricted range integer type derived from the
6305 		 integer_types.  Find the right one.  */
6306 	      prec = TYPE_PRECISION (type);
6307 	      tree name = DECL_NAME (TYPE_NAME (type));
6308 
6309 	      for (unsigned itk = itk_none; itk--;)
6310 		if (integer_types[itk]
6311 		    && DECL_NAME (TYPE_NAME (integer_types[itk])) == name)
6312 		  {
6313 		    type = integer_types[itk];
6314 		    break;
6315 		  }
6316 	      gcc_assert (type != t->typed.type);
6317 	    }
6318 	  break;
6319 	}
6320 
6321       WT (type);
6322       if (prec && streaming_p ())
6323 	WU (prec);
6324     }
6325 
6326 #undef WT
6327 #undef WU
6328 }
6329 
6330 // Streaming in a reference to a decl can cause that decl to be
6331 // TREE_USED, which is the mark_used behaviour we need most of the
6332 // time.  The trees_in::unused can be incremented to inhibit this,
6333 // which is at least needed for vtables.
6334 
6335 bool
core_vals(tree t)6336 trees_in::core_vals (tree t)
6337 {
6338 #define RU(X) ((X) = u ())
6339 #define RUC(T,X) ((X) = T (u ()))
6340 #define RT(X) ((X) = tree_node ())
6341 #define RTU(X) ((X) = tree_node (true))
6342   tree_code code = TREE_CODE (t);
6343 
6344   /* First by tree shape.  */
6345   if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
6346     {
6347       RT (t->decl_minimal.name);
6348       if (!DECL_TEMPLATE_PARM_P (t))
6349 	RT (t->decl_minimal.context);
6350 
6351       /* Don't zap the locus just yet, we don't record it correctly
6352 	 and thus lose all location information.  */
6353       t->decl_minimal.locus = state->read_location (*this);
6354     }
6355 
6356   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
6357     {
6358       RT (t->type_common.name);
6359       RT (t->type_common.context);
6360 
6361       RT (t->type_common.main_variant);
6362       RT (t->type_common.canonical);
6363 
6364       /* type_common.next_variant is internally manipulated.  */
6365       /* type_common.pointer_to, type_common.reference_to.  */
6366 
6367       RU (t->type_common.precision);
6368       RU (t->type_common.contains_placeholder_bits);
6369       RUC (machine_mode, t->type_common.mode);
6370       RU (t->type_common.align);
6371 
6372       if (!RECORD_OR_UNION_CODE_P (code))
6373 	{
6374 	  RT (t->type_common.size);
6375 	  RT (t->type_common.size_unit);
6376 	}
6377       RT (t->type_common.attributes);
6378 
6379       RT (t->type_common.common.chain); /* TYPE_STUB_DECL.  */
6380     }
6381 
6382   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
6383     {
6384       RUC (machine_mode, t->decl_common.mode);
6385       RU (t->decl_common.off_align);
6386       RU (t->decl_common.align);
6387 
6388       if (code != TEMPLATE_DECL)
6389 	{
6390 	  RT (t->decl_common.size);
6391 	  RT (t->decl_common.size_unit);
6392 	}
6393 
6394       RT (t->decl_common.attributes);
6395       RT (t->decl_common.abstract_origin);
6396     }
6397 
6398   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
6399     {
6400       RT (t->decl_with_vis.assembler_name);
6401       RUC (symbol_visibility, t->decl_with_vis.visibility);
6402     }
6403 
6404   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
6405     {
6406       /* Records and unions hold FIELDS, VFIELD & BINFO on these
6407 	 things.  */
6408       if (!RECORD_OR_UNION_CODE_P (code) && code != ENUMERAL_TYPE)
6409 	{
6410 	  /* This is not clobbering TYPE_CACHED_VALUES, because this
6411 	     is a type that doesn't have any.  */
6412 	  gcc_checking_assert (!TYPE_CACHED_VALUES_P (t));
6413 	  RT (t->type_non_common.values);
6414 	  RT (t->type_non_common.maxval);
6415 	  RT (t->type_non_common.minval);
6416 	}
6417 
6418       RT (t->type_non_common.lang_1);
6419     }
6420 
6421   if (CODE_CONTAINS_STRUCT (code, TS_EXP))
6422     {
6423       t->exp.locus = state->read_location (*this);
6424 
6425       bool vl = TREE_CODE_CLASS (code) == tcc_vl_exp;
6426       for (unsigned limit = (vl ? VL_EXP_OPERAND_LENGTH (t)
6427 			     : TREE_OPERAND_LENGTH (t)),
6428 	     ix = unsigned (vl); ix != limit; ix++)
6429 	RTU (TREE_OPERAND (t, ix));
6430     }
6431 
6432   /* Then by CODE.  Special cases and/or 1:1 tree shape
6433      correspondance. */
6434   switch (code)
6435     {
6436     default:
6437       break;
6438 
6439     case ARGUMENT_PACK_SELECT:
6440     case DEFERRED_PARSE:
6441     case IDENTIFIER_NODE:
6442     case BINDING_VECTOR:
6443     case SSA_NAME:
6444     case TRANSLATION_UNIT_DECL:
6445     case USERDEF_LITERAL:
6446       return false; /* Should never meet.  */
6447 
6448       /* Constants.  */
6449     case COMPLEX_CST:
6450       RT (TREE_REALPART (t));
6451       RT (TREE_IMAGPART (t));
6452       break;
6453 
6454     case FIXED_CST:
6455       /* Not suported in C++.  */
6456       return false;
6457 
6458     case INTEGER_CST:
6459       {
6460 	unsigned num = TREE_INT_CST_EXT_NUNITS (t);
6461 	for (unsigned ix = 0; ix != num; ix++)
6462 	  TREE_INT_CST_ELT (t, ix) = wu ();
6463       }
6464       break;
6465 
6466     case POLY_INT_CST:
6467       /* Not suported in C++.  */
6468       return false;
6469 
6470     case REAL_CST:
6471       if (const void *bytes = buf (sizeof (real_value)))
6472 	TREE_REAL_CST_PTR (t)
6473 	  = reinterpret_cast<real_value *> (memcpy (ggc_alloc<real_value> (),
6474 						    bytes, sizeof (real_value)));
6475       break;
6476 
6477     case STRING_CST:
6478       /* Streamed during start.  */
6479       break;
6480 
6481     case VECTOR_CST:
6482       for (unsigned ix = vector_cst_encoded_nelts (t); ix--;)
6483 	RT (VECTOR_CST_ENCODED_ELT (t, ix));
6484       break;
6485 
6486       /* Decls.  */
6487     case VAR_DECL:
6488       if (DECL_CONTEXT (t)
6489 	  && TREE_CODE (DECL_CONTEXT (t)) != FUNCTION_DECL)
6490 	break;
6491       /* FALLTHROUGH  */
6492 
6493     case RESULT_DECL:
6494     case PARM_DECL:
6495       if (DECL_HAS_VALUE_EXPR_P (t))
6496 	{
6497 	  /* The DECL_VALUE hash table is a cache, thus if we're
6498 	     reading a duplicate (which we end up discarding), the
6499 	     value expr will also be cleaned up at the next gc.  */
6500 	  tree val = tree_node ();
6501 	  SET_DECL_VALUE_EXPR (t, val);
6502 	}
6503       /* FALLTHROUGH  */
6504 
6505     case CONST_DECL:
6506     case IMPORTED_DECL:
6507       RT (t->decl_common.initial);
6508       break;
6509 
6510     case FIELD_DECL:
6511       RT (t->field_decl.offset);
6512       RT (t->field_decl.bit_field_type);
6513       RT (t->field_decl.qualifier);
6514       RT (t->field_decl.bit_offset);
6515       RT (t->field_decl.fcontext);
6516       RT (t->decl_common.initial);
6517       break;
6518 
6519     case LABEL_DECL:
6520       RU (t->label_decl.label_decl_uid);
6521       RU (t->label_decl.eh_landing_pad_nr);
6522       break;
6523 
6524     case FUNCTION_DECL:
6525       {
6526 	unsigned bltin = u ();
6527 	t->function_decl.built_in_class = built_in_class (bltin);
6528 	if (bltin != NOT_BUILT_IN)
6529 	  {
6530 	    bltin = u ();
6531 	    DECL_UNCHECKED_FUNCTION_CODE (t) = built_in_function (bltin);
6532 	  }
6533 
6534 	RT (t->function_decl.personality);
6535 	RT (t->function_decl.function_specific_target);
6536 	RT (t->function_decl.function_specific_optimization);
6537 	RT (t->function_decl.vindex);
6538 
6539 	if (DECL_HAS_DEPENDENT_EXPLICIT_SPEC_P (t))
6540 	  {
6541 	    tree spec;
6542 	    RT (spec);
6543 	    store_explicit_specifier (t, spec);
6544 	  }
6545       }
6546       break;
6547 
6548     case USING_DECL:
6549       /* USING_DECL_DECLS  */
6550       RT (t->decl_common.initial);
6551       /* FALLTHROUGH  */
6552 
6553     case TYPE_DECL:
6554       /* USING_DECL: USING_DECL_SCOPE  */
6555       /* TYPE_DECL: DECL_ORIGINAL_TYPE */
6556       RT (t->decl_non_common.result);
6557       break;
6558 
6559       /* Miscellaneous common nodes.  */
6560     case BLOCK:
6561       t->block.locus = state->read_location (*this);
6562       t->block.end_locus = state->read_location (*this);
6563       t->block.vars = chained_decls ();
6564       /* nonlocalized_vars is middle-end.  */
6565       RT (t->block.subblocks);
6566       RT (t->block.supercontext);
6567       RT (t->block.abstract_origin);
6568       /* fragment_origin, fragment_chain are middle-end.  */
6569       RT (t->block.chain);
6570       /* nonlocalized_vars, block_num, die are middle endy/debug
6571 	 things.  */
6572       break;
6573 
6574     case CALL_EXPR:
6575       RUC (internal_fn, t->base.u.ifn);
6576       break;
6577 
6578     case CONSTRUCTOR:
6579       if (unsigned len = u ())
6580 	{
6581 	  vec_alloc (t->constructor.elts, len);
6582 	  for (unsigned ix = 0; ix != len; ix++)
6583 	    {
6584 	      constructor_elt elt;
6585 
6586 	      RT (elt.index);
6587 	      RTU (elt.value);
6588 	      t->constructor.elts->quick_push (elt);
6589 	    }
6590 	}
6591       break;
6592 
6593     case OMP_CLAUSE:
6594       {
6595 	RU (t->omp_clause.subcode.map_kind);
6596 	t->omp_clause.locus = state->read_location (*this);
6597 
6598 	unsigned len = omp_clause_num_ops[OMP_CLAUSE_CODE (t)];
6599 	for (unsigned ix = 0; ix != len; ix++)
6600 	  RT (t->omp_clause.ops[ix]);
6601       }
6602       break;
6603 
6604     case STATEMENT_LIST:
6605       {
6606 	tree_stmt_iterator iter = tsi_start (t);
6607 	for (tree stmt; RT (stmt);)
6608 	  tsi_link_after (&iter, stmt, TSI_CONTINUE_LINKING);
6609       }
6610       break;
6611 
6612     case OPTIMIZATION_NODE:
6613     case TARGET_OPTION_NODE:
6614       /* Not yet implemented, see trees_out::core_vals.  */
6615       gcc_unreachable ();
6616       break;
6617 
6618     case TREE_BINFO:
6619       RT (t->binfo.common.chain);
6620       RT (t->binfo.offset);
6621       RT (t->binfo.inheritance);
6622       RT (t->binfo.vptr_field);
6623 
6624       /* Do not mark the vtables as USED in the address expressions
6625 	 here.  */
6626       unused++;
6627       RT (t->binfo.vtable);
6628       RT (t->binfo.virtuals);
6629       RT (t->binfo.vtt_subvtt);
6630       RT (t->binfo.vtt_vptr);
6631       unused--;
6632 
6633       BINFO_BASE_ACCESSES (t) = tree_vec ();
6634       if (!get_overrun ())
6635 	{
6636 	  unsigned num = vec_safe_length (BINFO_BASE_ACCESSES (t));
6637 	  for (unsigned ix = 0; ix != num; ix++)
6638 	    BINFO_BASE_APPEND (t, tree_node ());
6639 	}
6640       break;
6641 
6642     case TREE_LIST:
6643       RT (t->list.purpose);
6644       RT (t->list.value);
6645       RT (t->list.common.chain);
6646       break;
6647 
6648     case TREE_VEC:
6649       for (unsigned ix = TREE_VEC_LENGTH (t); ix--;)
6650 	RT (TREE_VEC_ELT (t, ix));
6651       RT (t->type_common.common.chain);
6652       break;
6653 
6654       /* C++-specific nodes ...  */
6655     case BASELINK:
6656       RT (((lang_tree_node *)t)->baselink.binfo);
6657       RTU (((lang_tree_node *)t)->baselink.functions);
6658       RT (((lang_tree_node *)t)->baselink.access_binfo);
6659       break;
6660 
6661     case CONSTRAINT_INFO:
6662       RT (((lang_tree_node *)t)->constraint_info.template_reqs);
6663       RT (((lang_tree_node *)t)->constraint_info.declarator_reqs);
6664       RT (((lang_tree_node *)t)->constraint_info.associated_constr);
6665       break;
6666 
6667     case DEFERRED_NOEXCEPT:
6668       RT (((lang_tree_node *)t)->deferred_noexcept.pattern);
6669       RT (((lang_tree_node *)t)->deferred_noexcept.args);
6670       break;
6671 
6672     case LAMBDA_EXPR:
6673       RT (((lang_tree_node *)t)->lambda_expression.capture_list);
6674       RT (((lang_tree_node *)t)->lambda_expression.this_capture);
6675       RT (((lang_tree_node *)t)->lambda_expression.extra_scope);
6676       /* lambda_expression.pending_proxies is NULL  */
6677       ((lang_tree_node *)t)->lambda_expression.locus
6678 	= state->read_location (*this);
6679       RUC (cp_lambda_default_capture_mode_type,
6680 	   ((lang_tree_node *)t)->lambda_expression.default_capture_mode);
6681       RU (((lang_tree_node *)t)->lambda_expression.discriminator);
6682       break;
6683 
6684     case OVERLOAD:
6685       RT (((lang_tree_node *)t)->overload.function);
6686       RT (t->common.chain);
6687       break;
6688 
6689     case PTRMEM_CST:
6690       RT (((lang_tree_node *)t)->ptrmem.member);
6691       break;
6692 
6693     case STATIC_ASSERT:
6694       RT (((lang_tree_node *)t)->static_assertion.condition);
6695       RT (((lang_tree_node *)t)->static_assertion.message);
6696       ((lang_tree_node *)t)->static_assertion.location
6697 	= state->read_location (*this);
6698       break;
6699 
6700     case TEMPLATE_DECL:
6701       /* Streamed when reading the raw template decl itself.  */
6702       gcc_assert (((lang_tree_node *)t)->template_decl.arguments);
6703       gcc_assert (((lang_tree_node *)t)->template_decl.result);
6704       if (DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (t))
6705 	RT (DECL_CHAIN (t));
6706       break;
6707 
6708     case TEMPLATE_INFO:
6709       RT (((lang_tree_node *)t)->template_info.tmpl);
6710       RT (((lang_tree_node *)t)->template_info.args);
6711       if (unsigned len = u ())
6712 	{
6713 	  auto &ac = (((lang_tree_node *)t)
6714 		      ->template_info.deferred_access_checks);
6715 	  vec_alloc (ac, len);
6716 	  for (unsigned ix = 0; ix != len; ix++)
6717 	    {
6718 	      deferred_access_check m;
6719 
6720 	      RT (m.binfo);
6721 	      RT (m.decl);
6722 	      RT (m.diag_decl);
6723 	      m.loc = state->read_location (*this);
6724 	      ac->quick_push (m);
6725 	    }
6726 	}
6727       break;
6728 
6729     case TEMPLATE_PARM_INDEX:
6730       RU (((lang_tree_node *)t)->tpi.index);
6731       RU (((lang_tree_node *)t)->tpi.level);
6732       RU (((lang_tree_node *)t)->tpi.orig_level);
6733       RT (((lang_tree_node *)t)->tpi.decl);
6734       break;
6735 
6736     case TRAIT_EXPR:
6737       RT (((lang_tree_node *)t)->trait_expression.type1);
6738       RT (((lang_tree_node *)t)->trait_expression.type2);
6739       RUC (cp_trait_kind, ((lang_tree_node *)t)->trait_expression.kind);
6740       break;
6741     }
6742 
6743   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
6744     {
6745       tree type = tree_node ();
6746 
6747       if (type && code == ENUMERAL_TYPE && !ENUM_FIXED_UNDERLYING_TYPE_P (t))
6748 	{
6749 	  unsigned precision = u ();
6750 
6751 	  type = build_distinct_type_copy (type);
6752 	  TYPE_PRECISION (type) = precision;
6753 	  set_min_and_max_values_for_integral_type (type, precision,
6754 						    TYPE_SIGN (type));
6755 	}
6756 
6757       if (code != TEMPLATE_DECL)
6758 	t->typed.type = type;
6759     }
6760 
6761 #undef RT
6762 #undef RM
6763 #undef RU
6764   return !get_overrun ();
6765 }
6766 
6767 void
lang_decl_vals(tree t)6768 trees_out::lang_decl_vals (tree t)
6769 {
6770   const struct lang_decl *lang = DECL_LANG_SPECIFIC (t);
6771 #define WU(X) (u (X))
6772 #define WT(X) (tree_node (X))
6773   /* Module index already written.  */
6774   switch (lang->u.base.selector)
6775     {
6776     default:
6777       gcc_unreachable ();
6778 
6779     case lds_fn:  /* lang_decl_fn.  */
6780       if (streaming_p ())
6781 	{
6782 	  if (DECL_NAME (t) && IDENTIFIER_OVL_OP_P (DECL_NAME (t)))
6783 	    WU (lang->u.fn.ovl_op_code);
6784 	}
6785 
6786       if (DECL_CLASS_SCOPE_P (t))
6787 	WT (lang->u.fn.context);
6788 
6789       if (lang->u.fn.thunk_p)
6790 	{
6791 	  /* The thunked-to function.  */
6792 	  WT (lang->u.fn.befriending_classes);
6793 	  if (streaming_p ())
6794 	    wi (lang->u.fn.u5.fixed_offset);
6795 	}
6796       else
6797 	WT (lang->u.fn.u5.cloned_function);
6798 
6799       if (FNDECL_USED_AUTO (t))
6800 	WT (lang->u.fn.u.saved_auto_return_type);
6801 
6802       goto lds_min;
6803 
6804     case lds_decomp:  /* lang_decl_decomp.  */
6805       WT (lang->u.decomp.base);
6806       goto lds_min;
6807 
6808     case lds_min:  /* lang_decl_min.  */
6809     lds_min:
6810       WT (lang->u.min.template_info);
6811       {
6812 	tree access = lang->u.min.access;
6813 
6814 	/* DECL_ACCESS needs to be maintained by the definition of the
6815 	   (derived) class that changes the access.  The other users
6816 	   of DECL_ACCESS need to write it here.  */
6817 	if (!DECL_THUNK_P (t)
6818 	    && (DECL_CONTEXT (t) && TYPE_P (DECL_CONTEXT (t))))
6819 	  access = NULL_TREE;
6820 
6821 	WT (access);
6822       }
6823       break;
6824 
6825     case lds_ns:  /* lang_decl_ns.  */
6826       break;
6827 
6828     case lds_parm:  /* lang_decl_parm.  */
6829       if (streaming_p ())
6830 	{
6831 	  WU (lang->u.parm.level);
6832 	  WU (lang->u.parm.index);
6833 	}
6834       break;
6835     }
6836 #undef WU
6837 #undef WT
6838 }
6839 
6840 bool
lang_decl_vals(tree t)6841 trees_in::lang_decl_vals (tree t)
6842 {
6843   struct lang_decl *lang = DECL_LANG_SPECIFIC (t);
6844 #define RU(X) ((X) = u ())
6845 #define RT(X) ((X) = tree_node ())
6846 
6847   /* Module index already read.  */
6848   switch (lang->u.base.selector)
6849     {
6850     default:
6851       gcc_unreachable ();
6852 
6853     case lds_fn:  /* lang_decl_fn.  */
6854       if (DECL_NAME (t) && IDENTIFIER_OVL_OP_P (DECL_NAME (t)))
6855 	{
6856 	  unsigned code = u ();
6857 
6858 	  /* Check consistency.  */
6859 	  if (code >= OVL_OP_MAX
6860 	      || (ovl_op_info[IDENTIFIER_ASSIGN_OP_P (DECL_NAME (t))][code]
6861 		  .ovl_op_code) == OVL_OP_ERROR_MARK)
6862 	    set_overrun ();
6863 	  else
6864 	    lang->u.fn.ovl_op_code = code;
6865 	}
6866 
6867       if (DECL_CLASS_SCOPE_P (t))
6868 	RT (lang->u.fn.context);
6869 
6870       if (lang->u.fn.thunk_p)
6871 	{
6872 	  RT (lang->u.fn.befriending_classes);
6873 	  lang->u.fn.u5.fixed_offset = wi ();
6874 	}
6875       else
6876 	RT (lang->u.fn.u5.cloned_function);
6877 
6878       if (FNDECL_USED_AUTO (t))
6879 	RT (lang->u.fn.u.saved_auto_return_type);
6880       goto lds_min;
6881 
6882     case lds_decomp:  /* lang_decl_decomp.  */
6883       RT (lang->u.decomp.base);
6884       goto lds_min;
6885 
6886     case lds_min:  /* lang_decl_min.  */
6887     lds_min:
6888       RT (lang->u.min.template_info);
6889       RT (lang->u.min.access);
6890       break;
6891 
6892     case lds_ns:  /* lang_decl_ns.  */
6893       break;
6894 
6895     case lds_parm:  /* lang_decl_parm.  */
6896       RU (lang->u.parm.level);
6897       RU (lang->u.parm.index);
6898       break;
6899     }
6900 #undef RU
6901 #undef RT
6902   return !get_overrun ();
6903 }
6904 
6905 /* Most of the value contents of lang_type is streamed in
6906    define_class.  */
6907 
6908 void
lang_type_vals(tree t)6909 trees_out::lang_type_vals (tree t)
6910 {
6911   const struct lang_type *lang = TYPE_LANG_SPECIFIC (t);
6912 #define WU(X) (u (X))
6913 #define WT(X) (tree_node (X))
6914   if (streaming_p ())
6915     WU (lang->align);
6916 #undef WU
6917 #undef WT
6918 }
6919 
6920 bool
lang_type_vals(tree t)6921 trees_in::lang_type_vals (tree t)
6922 {
6923   struct lang_type *lang = TYPE_LANG_SPECIFIC (t);
6924 #define RU(X) ((X) = u ())
6925 #define RT(X) ((X) = tree_node ())
6926   RU (lang->align);
6927 #undef RU
6928 #undef RT
6929   return !get_overrun ();
6930 }
6931 
6932 /* Write out the bools of T, including information about any
6933    LANG_SPECIFIC information.  Including allocation of any lang
6934    specific object.  */
6935 
6936 void
tree_node_bools(tree t)6937 trees_out::tree_node_bools (tree t)
6938 {
6939   gcc_checking_assert (streaming_p ());
6940 
6941   /* We should never stream a namespace.  */
6942   gcc_checking_assert (TREE_CODE (t) != NAMESPACE_DECL
6943 		       || DECL_NAMESPACE_ALIAS (t));
6944 
6945   core_bools (t);
6946 
6947   switch (TREE_CODE_CLASS (TREE_CODE (t)))
6948     {
6949     case tcc_declaration:
6950       {
6951 	bool specific = DECL_LANG_SPECIFIC (t) != NULL;
6952 	b (specific);
6953 	if (specific && VAR_P (t))
6954 	  b (DECL_DECOMPOSITION_P (t));
6955 	if (specific)
6956 	  lang_decl_bools (t);
6957       }
6958       break;
6959 
6960     case tcc_type:
6961       {
6962 	bool specific = (TYPE_MAIN_VARIANT (t) == t
6963 			 && TYPE_LANG_SPECIFIC (t) != NULL);
6964 	gcc_assert (TYPE_LANG_SPECIFIC (t)
6965 		    == TYPE_LANG_SPECIFIC (TYPE_MAIN_VARIANT (t)));
6966 
6967 	b (specific);
6968 	if (specific)
6969 	  lang_type_bools (t);
6970       }
6971       break;
6972 
6973     default:
6974       break;
6975     }
6976 
6977   bflush ();
6978 }
6979 
6980 bool
tree_node_bools(tree t)6981 trees_in::tree_node_bools (tree t)
6982 {
6983   bool ok = core_bools (t);
6984 
6985   if (ok)
6986     switch (TREE_CODE_CLASS (TREE_CODE (t)))
6987       {
6988       case tcc_declaration:
6989 	if (b ())
6990 	  {
6991 	    bool decomp = VAR_P (t) && b ();
6992 
6993 	    ok = maybe_add_lang_decl_raw (t, decomp);
6994 	    if (ok)
6995 	      ok = lang_decl_bools (t);
6996 	}
6997 	break;
6998 
6999       case tcc_type:
7000 	if (b ())
7001 	  {
7002 	    ok = maybe_add_lang_type_raw (t);
7003 	    if (ok)
7004 	      ok = lang_type_bools (t);
7005 	  }
7006 	break;
7007 
7008       default:
7009 	break;
7010       }
7011 
7012   bflush ();
7013   if (!ok || get_overrun ())
7014     return false;
7015 
7016   return true;
7017 }
7018 
7019 
7020 /* Write out the lang-specifc vals of node T.  */
7021 
7022 void
lang_vals(tree t)7023 trees_out::lang_vals (tree t)
7024 {
7025   switch (TREE_CODE_CLASS (TREE_CODE (t)))
7026     {
7027     case tcc_declaration:
7028       if (DECL_LANG_SPECIFIC (t))
7029 	lang_decl_vals (t);
7030       break;
7031 
7032     case tcc_type:
7033       if (TYPE_MAIN_VARIANT (t) == t && TYPE_LANG_SPECIFIC (t))
7034 	lang_type_vals (t);
7035       break;
7036 
7037     default:
7038       break;
7039     }
7040 }
7041 
7042 bool
lang_vals(tree t)7043 trees_in::lang_vals (tree t)
7044 {
7045   bool ok = true;
7046 
7047   switch (TREE_CODE_CLASS (TREE_CODE (t)))
7048     {
7049     case tcc_declaration:
7050       if (DECL_LANG_SPECIFIC (t))
7051 	ok = lang_decl_vals (t);
7052       break;
7053 
7054     case tcc_type:
7055       if (TYPE_LANG_SPECIFIC (t))
7056 	ok = lang_type_vals (t);
7057       else
7058 	TYPE_LANG_SPECIFIC (t) = TYPE_LANG_SPECIFIC (TYPE_MAIN_VARIANT (t));
7059       break;
7060 
7061     default:
7062       break;
7063     }
7064 
7065   return ok;
7066 }
7067 
7068 /* Write out the value fields of node T.  */
7069 
7070 void
tree_node_vals(tree t)7071 trees_out::tree_node_vals (tree t)
7072 {
7073   core_vals (t);
7074   lang_vals (t);
7075 }
7076 
7077 bool
tree_node_vals(tree t)7078 trees_in::tree_node_vals (tree t)
7079 {
7080   bool ok = core_vals (t);
7081   if (ok)
7082     ok = lang_vals (t);
7083 
7084   return ok;
7085 }
7086 
7087 
7088 /* If T is a back reference, fixed reference or NULL, write out it's
7089    code and return WK_none.  Otherwise return WK_value if we must write
7090    by value, or WK_normal otherwise.  */
7091 
7092 walk_kind
ref_node(tree t)7093 trees_out::ref_node (tree t)
7094 {
7095   if (!t)
7096     {
7097       if (streaming_p ())
7098 	{
7099 	  /* NULL_TREE -> tt_null.  */
7100 	  null_count++;
7101 	  i (tt_null);
7102 	}
7103       return WK_none;
7104     }
7105 
7106   if (!TREE_VISITED (t))
7107     return WK_normal;
7108 
7109   /* An already-visited tree.  It must be in the map.  */
7110   int val = get_tag (t);
7111 
7112   if (val == tag_value)
7113     /* An entry we should walk into.  */
7114     return WK_value;
7115 
7116   const char *kind;
7117 
7118   if (val <= tag_backref)
7119     {
7120       /* Back reference -> -ve number  */
7121       if (streaming_p ())
7122 	i (val);
7123       kind = "backref";
7124     }
7125   else if (val >= tag_fixed)
7126     {
7127       /* Fixed reference -> tt_fixed */
7128       val -= tag_fixed;
7129       if (streaming_p ())
7130 	i (tt_fixed), u (val);
7131       kind = "fixed";
7132     }
7133 
7134   if (streaming_p ())
7135     {
7136       back_ref_count++;
7137       dump (dumper::TREE)
7138 	&& dump ("Wrote %s:%d %C:%N%S", kind, val, TREE_CODE (t), t, t);
7139     }
7140   return WK_none;
7141 }
7142 
7143 tree
back_ref(int tag)7144 trees_in::back_ref (int tag)
7145 {
7146   tree res = NULL_TREE;
7147 
7148   if (tag < 0 && unsigned (~tag) < back_refs.length ())
7149     res = back_refs[~tag];
7150 
7151   if (!res
7152       /* Checking TREE_CODE is a dereference, so we know this is not a
7153 	 wild pointer.  Checking the code provides evidence we've not
7154 	 corrupted something.  */
7155       || TREE_CODE (res) >= MAX_TREE_CODES)
7156     set_overrun ();
7157   else
7158     dump (dumper::TREE) && dump ("Read backref:%d found %C:%N%S", tag,
7159 				 TREE_CODE (res), res, res);
7160   return res;
7161 }
7162 
7163 unsigned
add_indirect_tpl_parms(tree parms)7164 trees_out::add_indirect_tpl_parms (tree parms)
7165 {
7166   unsigned len = 0;
7167   for (; parms; parms = TREE_CHAIN (parms), len++)
7168     {
7169       if (TREE_VISITED (parms))
7170 	break;
7171 
7172       int tag = insert (parms);
7173       if (streaming_p ())
7174 	dump (dumper::TREE)
7175 	  && dump ("Indirect:%d template's parameter %u %C:%N",
7176 		   tag, len, TREE_CODE (parms), parms);
7177     }
7178 
7179   if (streaming_p ())
7180     u (len);
7181 
7182   return len;
7183 }
7184 
7185 unsigned
add_indirect_tpl_parms(tree parms)7186 trees_in::add_indirect_tpl_parms (tree parms)
7187 {
7188   unsigned len = u ();
7189   for (unsigned ix = 0; ix != len; parms = TREE_CHAIN (parms), ix++)
7190     {
7191       int tag = insert (parms);
7192       dump (dumper::TREE)
7193 	&& dump ("Indirect:%d template's parameter %u %C:%N",
7194 		 tag, ix, TREE_CODE (parms), parms);
7195     }
7196 
7197   return len;
7198 }
7199 
7200 /* We've just found DECL by name.  Insert nodes that come with it, but
7201    cannot be found by name, so we'll not accidentally walk into them.  */
7202 
7203 void
add_indirects(tree decl)7204 trees_out::add_indirects (tree decl)
7205 {
7206   unsigned count = 0;
7207 
7208   // FIXME:OPTIMIZATION We'll eventually want default fn parms of
7209   // templates and perhaps default template parms too.  The former can
7210   // be referenced from instantiations (as they are lazily
7211   // instantiated).  Also (deferred?) exception specifications of
7212   // templates.  See the note about PARM_DECLs in trees_out::decl_node.
7213   tree inner = decl;
7214   if (TREE_CODE (decl) == TEMPLATE_DECL)
7215     {
7216       count += add_indirect_tpl_parms (DECL_TEMPLATE_PARMS (decl));
7217 
7218       inner = DECL_TEMPLATE_RESULT (decl);
7219       int tag = insert (inner);
7220       if (streaming_p ())
7221 	dump (dumper::TREE)
7222 	  && dump ("Indirect:%d template's result %C:%N",
7223 		   tag, TREE_CODE (inner), inner);
7224       count++;
7225     }
7226 
7227   if (TREE_CODE (inner) == TYPE_DECL)
7228     {
7229       /* Make sure the type is in the map too.  Otherwise we get
7230 	 different RECORD_TYPEs for the same type, and things go
7231 	 south.  */
7232       tree type = TREE_TYPE (inner);
7233       gcc_checking_assert (DECL_ORIGINAL_TYPE (inner)
7234 			   || TYPE_NAME (type) == inner);
7235       int tag = insert (type);
7236       if (streaming_p ())
7237 	dump (dumper::TREE) && dump ("Indirect:%d decl's type %C:%N", tag,
7238 				     TREE_CODE (type), type);
7239       count++;
7240     }
7241 
7242   if (streaming_p ())
7243     {
7244       u (count);
7245       dump (dumper::TREE) && dump ("Inserted %u indirects", count);
7246     }
7247 }
7248 
7249 bool
add_indirects(tree decl)7250 trees_in::add_indirects (tree decl)
7251 {
7252   unsigned count = 0;
7253 
7254   tree inner = decl;
7255   if (TREE_CODE (inner) == TEMPLATE_DECL)
7256     {
7257       count += add_indirect_tpl_parms (DECL_TEMPLATE_PARMS (decl));
7258 
7259       inner = DECL_TEMPLATE_RESULT (decl);
7260       int tag = insert (inner);
7261       dump (dumper::TREE)
7262 	&& dump ("Indirect:%d templates's result %C:%N", tag,
7263 		 TREE_CODE (inner), inner);
7264       count++;
7265     }
7266 
7267   if (TREE_CODE (inner) == TYPE_DECL)
7268     {
7269       tree type = TREE_TYPE (inner);
7270       gcc_checking_assert (DECL_ORIGINAL_TYPE (inner)
7271 			   || TYPE_NAME (type) == inner);
7272       int tag = insert (type);
7273       dump (dumper::TREE)
7274 	&& dump ("Indirect:%d decl's type %C:%N", tag, TREE_CODE (type), type);
7275       count++;
7276     }
7277 
7278   dump (dumper::TREE) && dump ("Inserted %u indirects", count);
7279   return count == u ();
7280 }
7281 
7282 /* Stream a template parameter.  There are 4.5 kinds of parameter:
7283    a) Template - TEMPLATE_DECL->TYPE_DECL->TEMPLATE_TEMPLATE_PARM
7284    	TEMPLATE_TYPE_PARM_INDEX TPI
7285    b) Type - TYPE_DECL->TEMPLATE_TYPE_PARM TEMPLATE_TYPE_PARM_INDEX TPI
7286    c.1) NonTYPE - PARM_DECL DECL_INITIAL TPI We meet this first
7287    c.2) NonTYPE - CONST_DECL DECL_INITIAL Same TPI
7288    d) BoundTemplate - TYPE_DECL->BOUND_TEMPLATE_TEMPLATE_PARM
7289        TEMPLATE_TYPE_PARM_INDEX->TPI
7290        TEMPLATE_TEMPLATE_PARM_INFO->TEMPLATE_INFO
7291 
7292    All of these point to a TEMPLATE_PARM_INDEX, and #B also has a TEMPLATE_INFO
7293 */
7294 
7295 void
tpl_parm_value(tree parm)7296 trees_out::tpl_parm_value (tree parm)
7297 {
7298   gcc_checking_assert (DECL_P (parm) && DECL_TEMPLATE_PARM_P (parm));
7299 
7300   int parm_tag = insert (parm);
7301   if (streaming_p ())
7302     {
7303       i (tt_tpl_parm);
7304       dump (dumper::TREE) && dump ("Writing template parm:%d %C:%N",
7305 				   parm_tag, TREE_CODE (parm), parm);
7306       start (parm);
7307       tree_node_bools (parm);
7308     }
7309 
7310   tree inner = parm;
7311   if (TREE_CODE (inner) == TEMPLATE_DECL)
7312     {
7313       inner = DECL_TEMPLATE_RESULT (inner);
7314       int inner_tag = insert (inner);
7315       if (streaming_p ())
7316 	{
7317 	  dump (dumper::TREE) && dump ("Writing inner template parm:%d %C:%N",
7318 				       inner_tag, TREE_CODE (inner), inner);
7319 	  start (inner);
7320 	  tree_node_bools (inner);
7321 	}
7322     }
7323 
7324   tree type = NULL_TREE;
7325   if (TREE_CODE (inner) == TYPE_DECL)
7326     {
7327       type = TREE_TYPE (inner);
7328       int type_tag = insert (type);
7329       if (streaming_p ())
7330 	{
7331 	  dump (dumper::TREE) && dump ("Writing template parm type:%d %C:%N",
7332 				       type_tag, TREE_CODE (type), type);
7333 	  start (type);
7334 	  tree_node_bools (type);
7335 	}
7336     }
7337 
7338   if (inner != parm)
7339     {
7340       /* This is a template-template parameter.  */
7341       unsigned tpl_levels = 0;
7342       tpl_header (parm, &tpl_levels);
7343       tpl_parms_fini (parm, tpl_levels);
7344     }
7345 
7346   tree_node_vals (parm);
7347   if (inner != parm)
7348     tree_node_vals (inner);
7349   if (type)
7350     {
7351       tree_node_vals (type);
7352       if (DECL_NAME (inner) == auto_identifier
7353 	  || DECL_NAME (inner) == decltype_auto_identifier)
7354 	{
7355 	  /* Placeholder auto.  */
7356 	  tree_node (DECL_INITIAL (inner));
7357 	  tree_node (DECL_SIZE_UNIT (inner));
7358 	}
7359     }
7360 
7361   if (streaming_p ())
7362     dump (dumper::TREE) && dump ("Wrote template parm:%d %C:%N",
7363 				 parm_tag, TREE_CODE (parm), parm);
7364 }
7365 
7366 tree
tpl_parm_value()7367 trees_in::tpl_parm_value ()
7368 {
7369   tree parm = start ();
7370   if (!parm || !tree_node_bools (parm))
7371     return NULL_TREE;
7372 
7373   int parm_tag = insert (parm);
7374   dump (dumper::TREE) && dump ("Reading template parm:%d %C:%N",
7375 			       parm_tag, TREE_CODE (parm), parm);
7376 
7377   tree inner = parm;
7378   if (TREE_CODE (inner) == TEMPLATE_DECL)
7379     {
7380       inner = start ();
7381       if (!inner || !tree_node_bools (inner))
7382 	return NULL_TREE;
7383       int inner_tag = insert (inner);
7384       dump (dumper::TREE) && dump ("Reading inner template parm:%d %C:%N",
7385 				   inner_tag, TREE_CODE (inner), inner);
7386       DECL_TEMPLATE_RESULT (parm) = inner;
7387     }
7388 
7389   tree type = NULL_TREE;
7390   if (TREE_CODE (inner) == TYPE_DECL)
7391     {
7392       type = start ();
7393       if (!type || !tree_node_bools (type))
7394 	return NULL_TREE;
7395       int type_tag = insert (type);
7396       dump (dumper::TREE) && dump ("Reading template parm type:%d %C:%N",
7397 				   type_tag, TREE_CODE (type), type);
7398 
7399       TREE_TYPE (inner) = TREE_TYPE (parm) = type;
7400       TYPE_NAME (type) = parm;
7401     }
7402 
7403   if (inner != parm)
7404     {
7405       /* A template template parameter.  */
7406       unsigned tpl_levels = 0;
7407       tpl_header (parm, &tpl_levels);
7408       tpl_parms_fini (parm, tpl_levels);
7409     }
7410 
7411   tree_node_vals (parm);
7412   if (inner != parm)
7413     tree_node_vals (inner);
7414   if (type)
7415     {
7416       tree_node_vals (type);
7417       if (DECL_NAME (inner) == auto_identifier
7418 	  || DECL_NAME (inner) == decltype_auto_identifier)
7419 	{
7420 	  /* Placeholder auto.  */
7421 	  DECL_INITIAL (inner) = tree_node ();
7422 	  DECL_SIZE_UNIT (inner) = tree_node ();
7423 	}
7424       if (TYPE_CANONICAL (type))
7425 	{
7426 	  gcc_checking_assert (TYPE_CANONICAL (type) == type);
7427 	  TYPE_CANONICAL (type) = canonical_type_parameter (type);
7428 	}
7429     }
7430 
7431   dump (dumper::TREE) && dump ("Read template parm:%d %C:%N",
7432 			       parm_tag, TREE_CODE (parm), parm);
7433 
7434   return parm;
7435 }
7436 
7437 void
install_entity(tree decl,depset * dep)7438 trees_out::install_entity (tree decl, depset *dep)
7439 {
7440   gcc_checking_assert (streaming_p ());
7441 
7442   /* Write the entity index, so we can insert it as soon as we
7443      know this is new.  */
7444   u (dep ? dep->cluster + 1 : 0);
7445   if (CHECKING_P && dep)
7446     {
7447       /* Add it to the entity map, such that we can tell it is
7448 	 part of us.  */
7449       bool existed;
7450       unsigned *slot = &entity_map->get_or_insert
7451 	(DECL_UID (decl), &existed);
7452       if (existed)
7453 	/* If it existed, it should match.  */
7454 	gcc_checking_assert (decl == (*entity_ary)[*slot]);
7455       *slot = ~dep->cluster;
7456     }
7457 }
7458 
7459 bool
install_entity(tree decl)7460 trees_in::install_entity (tree decl)
7461 {
7462   unsigned entity_index = u ();
7463   if (!entity_index)
7464     return false;
7465 
7466   if (entity_index > state->entity_num)
7467     {
7468       set_overrun ();
7469       return false;
7470     }
7471 
7472   /* Insert the real decl into the entity ary.  */
7473   unsigned ident = state->entity_lwm + entity_index - 1;
7474   (*entity_ary)[ident] = decl;
7475 
7476   /* And into the entity map, if it's not already there.  */
7477   tree not_tmpl = STRIP_TEMPLATE (decl);
7478   if (!DECL_LANG_SPECIFIC (not_tmpl)
7479       || !DECL_MODULE_ENTITY_P (not_tmpl))
7480     {
7481       retrofit_lang_decl (not_tmpl);
7482       DECL_MODULE_ENTITY_P (not_tmpl) = true;
7483 
7484       /* Insert into the entity hash (it cannot already be there).  */
7485       bool existed;
7486       unsigned &slot = entity_map->get_or_insert (DECL_UID (decl), &existed);
7487       gcc_checking_assert (!existed);
7488       slot = ident;
7489     }
7490 
7491   return true;
7492 }
7493 
7494 static bool has_definition (tree decl);
7495 
7496 /* DECL is a decl node that must be written by value.  DEP is the
7497    decl's depset.  */
7498 
7499 void
decl_value(tree decl,depset * dep)7500 trees_out::decl_value (tree decl, depset *dep)
7501 {
7502   /* We should not be writing clones or template parms.  */
7503   gcc_checking_assert (DECL_P (decl)
7504 		       && !DECL_CLONED_FUNCTION_P (decl)
7505 		       && !DECL_TEMPLATE_PARM_P (decl));
7506 
7507   /* We should never be writing non-typedef ptrmemfuncs by value.  */
7508   gcc_checking_assert (TREE_CODE (decl) != TYPE_DECL
7509 		       || DECL_ORIGINAL_TYPE (decl)
7510 		       || !TYPE_PTRMEMFUNC_P (TREE_TYPE (decl)));
7511 
7512   merge_kind mk = get_merge_kind (decl, dep);
7513 
7514   if (CHECKING_P)
7515     {
7516       /* Never start in the middle of a template.  */
7517       int use_tpl = -1;
7518       if (tree ti = node_template_info (decl, use_tpl))
7519 	gcc_checking_assert (TREE_CODE (TI_TEMPLATE (ti)) == OVERLOAD
7520 			     || TREE_CODE (TI_TEMPLATE (ti)) == FIELD_DECL
7521 			     || (DECL_TEMPLATE_RESULT (TI_TEMPLATE (ti))
7522 				 != decl));
7523     }
7524 
7525   if (streaming_p ())
7526     {
7527       /* A new node -> tt_decl.  */
7528       decl_val_count++;
7529       i (tt_decl);
7530       u (mk);
7531       start (decl);
7532 
7533       if (mk != MK_unique)
7534 	{
7535 	  if (!(mk & MK_template_mask) && !state->is_header ())
7536 	    {
7537 	      /* Tell the importer whether this is a global module entity,
7538 		 or a module entity.  This bool merges into the next block
7539 		 of bools.  Sneaky.  */
7540 	      tree o = get_originating_module_decl (decl);
7541 	      bool is_mod = false;
7542 
7543 	      tree not_tmpl = STRIP_TEMPLATE (o);
7544 	      if (DECL_LANG_SPECIFIC (not_tmpl)
7545 		  && DECL_MODULE_PURVIEW_P (not_tmpl))
7546 		is_mod = true;
7547 
7548 	      b (is_mod);
7549 	    }
7550 	  b (dep && dep->has_defn ());
7551 	}
7552       tree_node_bools (decl);
7553     }
7554 
7555   int tag = insert (decl, WK_value);
7556   if (streaming_p ())
7557     dump (dumper::TREE)
7558       && dump ("Writing %s:%d %C:%N%S", merge_kind_name[mk], tag,
7559 	       TREE_CODE (decl), decl, decl);
7560 
7561   tree inner = decl;
7562   int inner_tag = 0;
7563   if (TREE_CODE (decl) == TEMPLATE_DECL)
7564     {
7565       inner = DECL_TEMPLATE_RESULT (decl);
7566       inner_tag = insert (inner, WK_value);
7567 
7568       if (streaming_p ())
7569 	{
7570 	  int code = TREE_CODE (inner);
7571 	  u (code);
7572 	  start (inner, true);
7573 	  tree_node_bools (inner);
7574 	  dump (dumper::TREE)
7575 	    && dump ("Writing %s:%d %C:%N%S", merge_kind_name[mk], inner_tag,
7576 		     TREE_CODE (inner), inner, inner);
7577 	}
7578     }
7579 
7580   tree type = NULL_TREE;
7581   int type_tag = 0;
7582   tree stub_decl = NULL_TREE;
7583   int stub_tag = 0;
7584   if (TREE_CODE (inner) == TYPE_DECL)
7585     {
7586       type = TREE_TYPE (inner);
7587       bool has_type = (type == TYPE_MAIN_VARIANT (type)
7588 		       && TYPE_NAME (type) == inner);
7589 
7590       if (streaming_p ())
7591 	u (has_type ? TREE_CODE (type) : 0);
7592 
7593       if (has_type)
7594 	{
7595 	  type_tag = insert (type, WK_value);
7596 	  if (streaming_p ())
7597 	    {
7598 	      start (type, true);
7599 	      tree_node_bools (type);
7600 	      dump (dumper::TREE)
7601 		&& dump ("Writing type:%d %C:%N", type_tag,
7602 			 TREE_CODE (type), type);
7603 	    }
7604 
7605 	  stub_decl = TYPE_STUB_DECL (type);
7606 	  bool has_stub = inner != stub_decl;
7607 	  if (streaming_p ())
7608 	    u (has_stub ? TREE_CODE (stub_decl) : 0);
7609 	  if (has_stub)
7610 	    {
7611 	      stub_tag = insert (stub_decl);
7612 	      if (streaming_p ())
7613 		{
7614 		  start (stub_decl, true);
7615 		  tree_node_bools (stub_decl);
7616 		  dump (dumper::TREE)
7617 		    && dump ("Writing stub_decl:%d %C:%N", stub_tag,
7618 			     TREE_CODE (stub_decl), stub_decl);
7619 		}
7620 	    }
7621 	  else
7622 	    stub_decl = NULL_TREE;
7623 	}
7624       else
7625 	/* Regular typedef.  */
7626 	type = NULL_TREE;
7627     }
7628 
7629   /* Stream the container, we want it correctly canonicalized before
7630      we start emitting keys for this decl.  */
7631   tree container = decl_container (decl);
7632 
7633   unsigned tpl_levels = 0;
7634   if (decl != inner)
7635     tpl_header (decl, &tpl_levels);
7636   if (TREE_CODE (inner) == FUNCTION_DECL)
7637     fn_parms_init (inner);
7638 
7639   /* Now write out the merging information, and then really
7640      install the tag values.  */
7641   key_mergeable (tag, mk, decl, inner, container, dep);
7642 
7643   if (streaming_p ())
7644     dump (dumper::MERGE)
7645       && dump ("Wrote:%d's %s merge key %C:%N", tag,
7646 	       merge_kind_name[mk], TREE_CODE (decl), decl);
7647 
7648   if (TREE_CODE (inner) == FUNCTION_DECL)
7649     fn_parms_fini (inner);
7650 
7651   if (!is_key_order ())
7652     tree_node_vals (decl);
7653 
7654   if (inner_tag)
7655     {
7656       if (!is_key_order ())
7657 	tree_node_vals (inner);
7658       tpl_parms_fini (decl, tpl_levels);
7659     }
7660 
7661   if (type && !is_key_order ())
7662     {
7663       tree_node_vals (type);
7664       if (stub_decl)
7665 	tree_node_vals (stub_decl);
7666     }
7667 
7668   if (!is_key_order ())
7669     {
7670       if (mk & MK_template_mask
7671 	  || mk == MK_partial
7672 	  || mk == MK_friend_spec)
7673 	{
7674 	  if (mk != MK_partial)
7675 	    {
7676 	      // FIXME: We should make use of the merge-key by
7677 	      // exposing it outside of key_mergeable.  But this gets
7678 	      // the job done.
7679 	      auto *entry = reinterpret_cast <spec_entry *> (dep->deps[0]);
7680 
7681 	      if (streaming_p ())
7682 		u (get_mergeable_specialization_flags (entry->tmpl, decl));
7683 	      tree_node (entry->tmpl);
7684 	      tree_node (entry->args);
7685 	    }
7686 	  else
7687 	    {
7688 	      tree_node (CLASSTYPE_TI_TEMPLATE (TREE_TYPE (inner)));
7689 	      tree_node (CLASSTYPE_TI_ARGS (TREE_TYPE (inner)));
7690 	    }
7691 	}
7692       tree_node (get_constraints (decl));
7693     }
7694 
7695   if (streaming_p ())
7696     {
7697       /* Do not stray outside this section.  */
7698       gcc_checking_assert (!dep || dep->section == dep_hash->section);
7699 
7700       /* Write the entity index, so we can insert it as soon as we
7701 	 know this is new.  */
7702       install_entity (decl, dep);
7703     }
7704 
7705   if (VAR_OR_FUNCTION_DECL_P (inner)
7706       && DECL_LANG_SPECIFIC (inner)
7707       && DECL_MODULE_ATTACHMENTS_P (inner)
7708       && !is_key_order ())
7709     {
7710       /* Stream the attached entities.  */
7711       auto *attach_vec = attached_table->get (inner);
7712       unsigned num = attach_vec->length ();
7713       if (streaming_p ())
7714 	u (num);
7715       for (unsigned ix = 0; ix != num; ix++)
7716 	{
7717 	  tree attached = (*attach_vec)[ix];
7718 	  tree_node (attached);
7719 	  if (streaming_p ())
7720 	    dump (dumper::MERGE)
7721 	      && dump ("Written %d[%u] attached decl %N", tag, ix, attached);
7722 	}
7723     }
7724 
7725   bool is_typedef = false;
7726   if (!type && TREE_CODE (inner) == TYPE_DECL)
7727     {
7728       tree t = TREE_TYPE (inner);
7729       unsigned tdef_flags = 0;
7730       if (DECL_ORIGINAL_TYPE (inner)
7731 	  && TYPE_NAME (TREE_TYPE (inner)) == inner)
7732 	{
7733 	  tdef_flags |= 1;
7734 	  if (TYPE_STRUCTURAL_EQUALITY_P (t)
7735 	      && TYPE_DEPENDENT_P_VALID (t)
7736 	      && TYPE_DEPENDENT_P (t))
7737 	    tdef_flags |= 2;
7738 	}
7739       if (streaming_p ())
7740 	u (tdef_flags);
7741 
7742       if (tdef_flags & 1)
7743 	{
7744 	  /* A typedef type.  */
7745 	  int type_tag = insert (t);
7746 	  if (streaming_p ())
7747 	    dump (dumper::TREE)
7748 	      && dump ("Cloned:%d %s %C:%N", type_tag,
7749 		       tdef_flags & 2 ? "depalias" : "typedef",
7750 		       TREE_CODE (t), t);
7751 
7752 	  is_typedef = true;
7753 	}
7754     }
7755 
7756   if (streaming_p () && DECL_MAYBE_IN_CHARGE_CDTOR_P (decl))
7757     {
7758       bool cloned_p
7759 	= (DECL_CHAIN (decl) && DECL_CLONED_FUNCTION_P (DECL_CHAIN (decl)));
7760       bool needs_vtt_parm_p
7761 	= (cloned_p && CLASSTYPE_VBASECLASSES (DECL_CONTEXT (decl)));
7762       bool omit_inherited_parms_p
7763 	= (cloned_p && DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (decl)
7764 	   && base_ctor_omit_inherited_parms (decl));
7765       unsigned flags = (int (cloned_p) << 0
7766 			| int (needs_vtt_parm_p) << 1
7767 			| int (omit_inherited_parms_p) << 2);
7768       u (flags);
7769       dump (dumper::TREE) && dump ("CDTOR %N is %scloned",
7770 				   decl, cloned_p ? "" : "not ");
7771     }
7772 
7773   if (streaming_p ())
7774     dump (dumper::TREE) && dump ("Written decl:%d %C:%N", tag,
7775 				 TREE_CODE (decl), decl);
7776 
7777   if (NAMESPACE_SCOPE_P (inner))
7778     gcc_checking_assert (!dep == (VAR_OR_FUNCTION_DECL_P (inner)
7779 				  && DECL_LOCAL_DECL_P (inner)));
7780   else if ((TREE_CODE (inner) == TYPE_DECL
7781 	    && !is_typedef
7782 	    && TYPE_NAME (TREE_TYPE (inner)) == inner)
7783 	   || TREE_CODE (inner) == FUNCTION_DECL)
7784     {
7785       bool write_defn = !dep && has_definition (decl);
7786       if (streaming_p ())
7787 	u (write_defn);
7788       if (write_defn)
7789 	write_definition (decl);
7790     }
7791 }
7792 
7793 tree
decl_value()7794 trees_in::decl_value ()
7795 {
7796   int tag = 0;
7797   bool is_mod = false;
7798   bool has_defn = false;
7799   unsigned mk_u = u ();
7800   if (mk_u >= MK_hwm || !merge_kind_name[mk_u])
7801     {
7802       set_overrun ();
7803       return NULL_TREE;
7804     }
7805 
7806   unsigned saved_unused = unused;
7807   unused = 0;
7808 
7809   merge_kind mk = merge_kind (mk_u);
7810 
7811   tree decl = start ();
7812   if (decl)
7813     {
7814       if (mk != MK_unique)
7815 	{
7816 	  if (!(mk & MK_template_mask) && !state->is_header ())
7817 	    /* See note in trees_out about where this bool is sequenced.  */
7818 	    is_mod = b ();
7819 
7820 	  has_defn = b ();
7821 	}
7822 
7823       if (!tree_node_bools (decl))
7824 	decl = NULL_TREE;
7825     }
7826 
7827   /* Insert into map.  */
7828   tag = insert (decl);
7829   if (decl)
7830     dump (dumper::TREE)
7831       && dump ("Reading:%d %C", tag, TREE_CODE (decl));
7832 
7833   tree inner = decl;
7834   int inner_tag = 0;
7835   if (decl && TREE_CODE (decl) == TEMPLATE_DECL)
7836     {
7837       int code = u ();
7838       inner = start (code);
7839       if (inner && tree_node_bools (inner))
7840 	DECL_TEMPLATE_RESULT (decl) = inner;
7841       else
7842 	decl = NULL_TREE;
7843 
7844       inner_tag = insert (inner);
7845       if (decl)
7846 	dump (dumper::TREE)
7847 	  && dump ("Reading:%d %C", inner_tag, TREE_CODE (inner));
7848     }
7849 
7850   tree type = NULL_TREE;
7851   int type_tag = 0;
7852   tree stub_decl = NULL_TREE;
7853   int stub_tag = 0;
7854   if (decl && TREE_CODE (inner) == TYPE_DECL)
7855     {
7856       if (unsigned type_code = u ())
7857 	{
7858 	  type = start (type_code);
7859 	  if (type && tree_node_bools (type))
7860 	    {
7861 	      TREE_TYPE (inner) = type;
7862 	      TYPE_NAME (type) = inner;
7863 	    }
7864 	  else
7865 	    decl = NULL_TREE;
7866 
7867 	  type_tag = insert (type);
7868 	  if (decl)
7869 	    dump (dumper::TREE)
7870 	      && dump ("Reading type:%d %C", type_tag, TREE_CODE (type));
7871 
7872 	  if (unsigned stub_code = u ())
7873 	    {
7874 	      stub_decl = start (stub_code);
7875 	      if (stub_decl && tree_node_bools (stub_decl))
7876 		{
7877 		  TREE_TYPE (stub_decl) = type;
7878 		  TYPE_STUB_DECL (type) = stub_decl;
7879 		}
7880 	      else
7881 		decl = NULL_TREE;
7882 
7883 	      stub_tag = insert (stub_decl);
7884 	      if (decl)
7885 		dump (dumper::TREE)
7886 		  && dump ("Reading stub_decl:%d %C", stub_tag,
7887 			   TREE_CODE (stub_decl));
7888 	    }
7889 	}
7890     }
7891 
7892   if (!decl)
7893     {
7894     bail:
7895       if (inner_tag != 0)
7896 	back_refs[~inner_tag] = NULL_TREE;
7897       if (type_tag != 0)
7898 	back_refs[~type_tag] = NULL_TREE;
7899       if (stub_tag != 0)
7900 	back_refs[~stub_tag] = NULL_TREE;
7901       if (tag != 0)
7902 	back_refs[~tag] = NULL_TREE;
7903       set_overrun ();
7904       /* Bail.  */
7905       unused = saved_unused;
7906       return NULL_TREE;
7907     }
7908 
7909   /* Read the container, to ensure it's already been streamed in.  */
7910   tree container = decl_container ();
7911   unsigned tpl_levels = 0;
7912 
7913   /* Figure out if this decl is already known about.  */
7914   int parm_tag = 0;
7915 
7916   if (decl != inner)
7917     if (!tpl_header (decl, &tpl_levels))
7918       goto bail;
7919   if (TREE_CODE (inner) == FUNCTION_DECL)
7920     parm_tag = fn_parms_init (inner);
7921 
7922   tree existing = key_mergeable (tag, mk, decl, inner, type, container, is_mod);
7923   tree existing_inner = existing;
7924   if (existing)
7925     {
7926       if (existing == error_mark_node)
7927 	goto bail;
7928 
7929       if (TREE_CODE (STRIP_TEMPLATE (existing)) == TYPE_DECL)
7930 	{
7931 	  tree etype = TREE_TYPE (existing);
7932 	  if (TYPE_LANG_SPECIFIC (etype)
7933 	      && COMPLETE_TYPE_P (etype)
7934 	      && !CLASSTYPE_MEMBER_VEC (etype))
7935 	    /* Give it a member vec, we're likely gonna be looking
7936 	       inside it.  */
7937 	    set_class_bindings (etype, -1);
7938 	}
7939 
7940       /* Install the existing decl into the back ref array.  */
7941       register_duplicate (decl, existing);
7942       back_refs[~tag] = existing;
7943       if (inner_tag != 0)
7944 	{
7945 	  existing_inner = DECL_TEMPLATE_RESULT (existing);
7946 	  back_refs[~inner_tag] = existing_inner;
7947 	}
7948 
7949       if (type_tag != 0)
7950 	{
7951 	  tree existing_type = TREE_TYPE (existing);
7952 	  back_refs[~type_tag] = existing_type;
7953 	  if (stub_tag != 0)
7954 	    back_refs[~stub_tag] = TYPE_STUB_DECL (existing_type);
7955 	}
7956     }
7957 
7958   if (parm_tag)
7959     fn_parms_fini (parm_tag, inner, existing_inner, has_defn);
7960 
7961   if (!tree_node_vals (decl))
7962     goto bail;
7963 
7964   if (inner_tag)
7965     {
7966       gcc_checking_assert (DECL_TEMPLATE_RESULT (decl) == inner);
7967 
7968       if (!tree_node_vals (inner))
7969 	goto bail;
7970 
7971       if (!tpl_parms_fini (decl, tpl_levels))
7972 	goto bail;
7973     }
7974 
7975   if (type && (!tree_node_vals (type)
7976 	       || (stub_decl && !tree_node_vals (stub_decl))))
7977     goto bail;
7978 
7979   spec_entry spec;
7980   unsigned spec_flags = 0;
7981   if (mk & MK_template_mask
7982       || mk == MK_partial
7983       || mk == MK_friend_spec)
7984     {
7985       if (mk == MK_partial)
7986 	spec_flags = 2;
7987       else
7988 	spec_flags = u ();
7989 
7990       spec.tmpl = tree_node ();
7991       spec.args = tree_node ();
7992     }
7993   /* Hold constraints on the spec field, for a short while.  */
7994   spec.spec = tree_node ();
7995 
7996   dump (dumper::TREE) && dump ("Read:%d %C:%N", tag, TREE_CODE (decl), decl);
7997 
7998   existing = back_refs[~tag];
7999   bool installed = install_entity (existing);
8000   bool is_new = existing == decl;
8001 
8002   if (VAR_OR_FUNCTION_DECL_P (inner)
8003       && DECL_LANG_SPECIFIC (inner)
8004       && DECL_MODULE_ATTACHMENTS_P (inner))
8005     {
8006       /* Read and maybe install the attached entities.  */
8007       bool existed;
8008       auto &set = attached_table->get_or_insert (STRIP_TEMPLATE (existing),
8009 						 &existed);
8010       unsigned num = u ();
8011       if (is_new == existed)
8012 	set_overrun ();
8013       if (is_new)
8014 	set.reserve (num);
8015       for (unsigned ix = 0; !get_overrun () && ix != num; ix++)
8016 	{
8017 	  tree attached = tree_node ();
8018 	  dump (dumper::MERGE)
8019 	    && dump ("Read %d[%u] %s attached decl %N", tag, ix,
8020 		     is_new ? "new" : "matched", attached);
8021 	  if (is_new)
8022 	    set.quick_push (attached);
8023 	  else if (set[ix] != attached)
8024 	    set_overrun ();
8025 	}
8026     }
8027 
8028   /* Regular typedefs will have a NULL TREE_TYPE at this point.  */
8029   unsigned tdef_flags = 0;
8030   bool is_typedef = false;
8031   if (!type && TREE_CODE (inner) == TYPE_DECL)
8032     {
8033       tdef_flags = u ();
8034       if (tdef_flags & 1)
8035 	is_typedef = true;
8036     }
8037 
8038   if (is_new)
8039     {
8040       /* A newly discovered node.  */
8041       if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8042 	/* Mark this identifier as naming a virtual function --
8043 	   lookup_overrides relies on this optimization.  */
8044 	IDENTIFIER_VIRTUAL_P (DECL_NAME (decl)) = true;
8045 
8046       if (installed)
8047 	{
8048 	  /* Mark the entity as imported.  */
8049 	  retrofit_lang_decl (inner);
8050 	  DECL_MODULE_IMPORT_P (inner) = true;
8051 	}
8052 
8053       if (spec.spec)
8054 	set_constraints (decl, spec.spec);
8055 
8056       if (TREE_CODE (decl) == INTEGER_CST && !TREE_OVERFLOW (decl))
8057 	{
8058 	  decl = cache_integer_cst (decl, true);
8059 	  back_refs[~tag] = decl;
8060 	}
8061 
8062       if (is_typedef)
8063 	{
8064 	  /* Frob it to be ready for cloning.  */
8065 	  TREE_TYPE (inner) = DECL_ORIGINAL_TYPE (inner);
8066 	  DECL_ORIGINAL_TYPE (inner) = NULL_TREE;
8067 	  set_underlying_type (inner);
8068 	  if (tdef_flags & 2)
8069 	    {
8070 	      /* Match instantiate_alias_template's handling.  */
8071 	      tree type = TREE_TYPE (inner);
8072 	      TYPE_DEPENDENT_P (type) = true;
8073 	      TYPE_DEPENDENT_P_VALID (type) = true;
8074 	      SET_TYPE_STRUCTURAL_EQUALITY (type);
8075 	    }
8076 	}
8077 
8078       if (inner_tag)
8079 	/* Set the TEMPLATE_DECL's type.  */
8080 	TREE_TYPE (decl) = TREE_TYPE (inner);
8081 
8082       if (mk & MK_template_mask
8083 	  || mk == MK_partial)
8084 	{
8085 	  /* Add to specialization tables now that constraints etc are
8086 	     added.  */
8087 	  bool is_type = mk == MK_partial || !(mk & MK_tmpl_decl_mask);
8088 
8089 	  spec.spec = is_type ? type : mk & MK_tmpl_tmpl_mask ? inner : decl;
8090 	  add_mergeable_specialization (!is_type,
8091 					!is_type && mk & MK_tmpl_alias_mask,
8092 					&spec, decl, spec_flags);
8093 	}
8094 
8095       if (NAMESPACE_SCOPE_P (decl)
8096 	  && (mk == MK_named || mk == MK_unique
8097 	      || mk == MK_enum || mk == MK_friend_spec)
8098 	  && !(VAR_OR_FUNCTION_DECL_P (decl) && DECL_LOCAL_DECL_P (decl)))
8099 	add_module_namespace_decl (CP_DECL_CONTEXT (decl), decl);
8100 
8101       if (DECL_ARTIFICIAL (decl)
8102 	  && TREE_CODE (decl) == FUNCTION_DECL
8103 	  && !DECL_TEMPLATE_INFO (decl)
8104 	  && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
8105 	  && TYPE_SIZE (DECL_CONTEXT (decl))
8106 	  && !DECL_THUNK_P (decl))
8107 	/* A new implicit member function, when the class is
8108 	   complete.  This means the importee declared it, and
8109 	   we must now add it to the class.  Note that implicit
8110 	   member fns of template instantiations do not themselves
8111 	   look like templates.  */
8112 	if (!install_implicit_member (inner))
8113 	  set_overrun ();
8114     }
8115   else
8116     {
8117       /* DECL is the to-be-discarded decl.  Its internal pointers will
8118 	 be to the EXISTING's structure.  Frob it to point to its
8119 	 own other structures, so loading its definition will alter
8120 	 it, and not the existing decl.  */
8121       dump (dumper::MERGE) && dump ("Deduping %N", existing);
8122 
8123       if (inner_tag)
8124 	DECL_TEMPLATE_RESULT (decl) = inner;
8125 
8126       if (type)
8127 	{
8128 	  /* Point at the to-be-discarded type & decl.  */
8129 	  TYPE_NAME (type) = inner;
8130 	  TREE_TYPE (inner) = type;
8131 
8132 	  TYPE_STUB_DECL (type) = stub_decl ? stub_decl : inner;
8133 	  if (stub_decl)
8134 	    TREE_TYPE (stub_decl) = type;
8135 	}
8136 
8137       if (inner_tag)
8138 	/* Set the TEMPLATE_DECL's type.  */
8139 	TREE_TYPE (decl) = TREE_TYPE (inner);
8140 
8141       if (!is_matching_decl (existing, decl, is_typedef))
8142 	unmatched_duplicate (existing);
8143 
8144       if (TREE_CODE (inner) == FUNCTION_DECL)
8145 	{
8146 	  tree e_inner = STRIP_TEMPLATE (existing);
8147 	  for (auto parm = DECL_ARGUMENTS (inner);
8148 	       parm; parm = DECL_CHAIN (parm))
8149 	    DECL_CONTEXT (parm) = e_inner;
8150 	}
8151 
8152       /* And our result is the existing node.  */
8153       decl = existing;
8154     }
8155 
8156   if (mk == MK_friend_spec)
8157     {
8158       tree e = match_mergeable_specialization (true, &spec);
8159       if (!e)
8160 	{
8161 	  spec.spec = inner;
8162 	  add_mergeable_specialization (true, false, &spec, decl, spec_flags);
8163 	}
8164       else if (e != existing)
8165 	set_overrun ();
8166     }
8167 
8168   if (is_typedef)
8169     {
8170       /* Insert the type into the array now.  */
8171       tag = insert (TREE_TYPE (decl));
8172       dump (dumper::TREE)
8173 	&& dump ("Cloned:%d typedef %C:%N",
8174 		 tag, TREE_CODE (TREE_TYPE (decl)), TREE_TYPE (decl));
8175     }
8176 
8177   unused = saved_unused;
8178 
8179   if (DECL_MAYBE_IN_CHARGE_CDTOR_P (decl))
8180     {
8181       unsigned flags = u ();
8182 
8183       if (is_new)
8184 	{
8185 	  bool cloned_p = flags & 1;
8186 	  dump (dumper::TREE) && dump ("CDTOR %N is %scloned",
8187 				       decl, cloned_p ? "" : "not ");
8188 	  if (cloned_p)
8189 	    build_cdtor_clones (decl, flags & 2, flags & 4,
8190 				/* Update the member vec, if there is
8191 				   one (we're in a different cluster
8192 				   to the class defn).  */
8193 				CLASSTYPE_MEMBER_VEC (DECL_CONTEXT (decl)));
8194 	}
8195     }
8196 
8197   if (!NAMESPACE_SCOPE_P (inner)
8198       && ((TREE_CODE (inner) == TYPE_DECL
8199 	   && !is_typedef
8200 	   && TYPE_NAME (TREE_TYPE (inner)) == inner)
8201 	  || TREE_CODE (inner) == FUNCTION_DECL)
8202       && u ())
8203     read_definition (decl);
8204 
8205   return decl;
8206 }
8207 
8208 /* DECL is an unnameable member of CTX.  Return a suitable identifying
8209    index.  */
8210 
8211 static unsigned
get_field_ident(tree ctx,tree decl)8212 get_field_ident (tree ctx, tree decl)
8213 {
8214   gcc_checking_assert (TREE_CODE (decl) == USING_DECL
8215 		       || !DECL_NAME (decl)
8216 		       || IDENTIFIER_ANON_P (DECL_NAME (decl)));
8217 
8218   unsigned ix = 0;
8219   for (tree fields = TYPE_FIELDS (ctx);
8220        fields; fields = DECL_CHAIN (fields))
8221     {
8222       if (fields == decl)
8223 	return ix;
8224 
8225       if (DECL_CONTEXT (fields) == ctx
8226 	  && (TREE_CODE (fields) == USING_DECL
8227 	      || (TREE_CODE (fields) == FIELD_DECL
8228 		  && (!DECL_NAME (fields)
8229 		      || IDENTIFIER_ANON_P (DECL_NAME (fields))))))
8230 	/* Count this field.  */
8231 	ix++;
8232     }
8233   gcc_unreachable ();
8234 }
8235 
8236 static tree
lookup_field_ident(tree ctx,unsigned ix)8237 lookup_field_ident (tree ctx, unsigned ix)
8238 {
8239   for (tree fields = TYPE_FIELDS (ctx);
8240        fields; fields = DECL_CHAIN (fields))
8241     if (DECL_CONTEXT (fields) == ctx
8242 	&& (TREE_CODE (fields) == USING_DECL
8243 	    || (TREE_CODE (fields) == FIELD_DECL
8244 		&& (!DECL_NAME (fields)
8245 		    || IDENTIFIER_ANON_P (DECL_NAME (fields))))))
8246       if (!ix--)
8247 	return fields;
8248 
8249   return NULL_TREE;
8250 }
8251 
8252 /* Reference DECL.  REF indicates the walk kind we are performing.
8253    Return true if we should write this decl by value.  */
8254 
8255 bool
decl_node(tree decl,walk_kind ref)8256 trees_out::decl_node (tree decl, walk_kind ref)
8257 {
8258   gcc_checking_assert (DECL_P (decl) && !DECL_TEMPLATE_PARM_P (decl)
8259 		       && DECL_CONTEXT (decl));
8260 
8261   if (ref == WK_value)
8262     {
8263       depset *dep = dep_hash->find_dependency (decl);
8264       decl_value (decl, dep);
8265       return false;
8266     }
8267 
8268   switch (TREE_CODE (decl))
8269     {
8270     default:
8271       break;
8272 
8273     case FUNCTION_DECL:
8274       gcc_checking_assert (!DECL_LOCAL_DECL_P (decl));
8275       break;
8276 
8277     case RESULT_DECL:
8278       /* Unlike PARM_DECLs, RESULT_DECLs are only generated and
8279          referenced when we're inside the function itself.  */
8280       return true;
8281 
8282     case PARM_DECL:
8283       {
8284 	if (streaming_p ())
8285 	  i (tt_parm);
8286 	tree_node (DECL_CONTEXT (decl));
8287 	if (streaming_p ())
8288 	  {
8289 	    /* That must have put this in the map.  */
8290 	    walk_kind ref = ref_node (decl);
8291 	    if (ref != WK_none)
8292 	      // FIXME:OPTIMIZATION We can wander into bits of the
8293 	      // template this was instantiated from.  For instance
8294 	      // deferred noexcept and default parms.  Currently we'll
8295 	      // end up cloning those bits of tree.  It would be nice
8296 	      // to reference those specific nodes.  I think putting
8297 	      // those things in the map when we reference their
8298 	      // template by name.  See the note in add_indirects.
8299 	      return true;
8300 
8301 	    dump (dumper::TREE)
8302 	      && dump ("Wrote %s reference %N",
8303 		       TREE_CODE (decl) == PARM_DECL ? "parameter" : "result",
8304 		       decl);
8305 	  }
8306       }
8307       return false;
8308 
8309     case IMPORTED_DECL:
8310       /* This describes a USING_DECL to the ME's debug machinery.  It
8311 	 originates from the fortran FE, and has nothing to do with
8312 	 C++ modules.  */
8313       return true;
8314 
8315     case LABEL_DECL:
8316       return true;
8317 
8318     case CONST_DECL:
8319       {
8320 	/* If I end up cloning enum decls, implementing C++20 using
8321 	   E::v, this will need tweaking.   */
8322 	if (streaming_p ())
8323 	  i (tt_enum_decl);
8324 	tree ctx = DECL_CONTEXT (decl);
8325 	gcc_checking_assert (TREE_CODE (ctx) == ENUMERAL_TYPE);
8326 	tree_node (ctx);
8327 	tree_node (DECL_NAME (decl));
8328 
8329 	int tag = insert (decl);
8330 	if (streaming_p ())
8331 	  dump (dumper::TREE)
8332 	    && dump ("Wrote enum decl:%d %C:%N", tag, TREE_CODE (decl), decl);
8333 	return false;
8334       }
8335       break;
8336 
8337     case USING_DECL:
8338       if (TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
8339 	break;
8340       /* FALLTHROUGH  */
8341 
8342     case FIELD_DECL:
8343       {
8344 	if (streaming_p ())
8345 	  i (tt_data_member);
8346 
8347 	tree ctx = DECL_CONTEXT (decl);
8348 	tree_node (ctx);
8349 
8350 	tree name = NULL_TREE;
8351 
8352 	if (TREE_CODE (decl) == USING_DECL)
8353 	  ;
8354 	else
8355 	  {
8356 	    name = DECL_NAME (decl);
8357 	    if (name && IDENTIFIER_ANON_P (name))
8358 	      name = NULL_TREE;
8359 	  }
8360 
8361 	tree_node (name);
8362 	if (!name && streaming_p ())
8363 	  {
8364 	    unsigned ix = get_field_ident (ctx, decl);
8365 	    u (ix);
8366 	  }
8367 
8368 	int tag = insert (decl);
8369 	if (streaming_p ())
8370 	  dump (dumper::TREE)
8371 	    && dump ("Wrote member:%d %C:%N", tag, TREE_CODE (decl), decl);
8372 	return false;
8373       }
8374       break;
8375 
8376     case VAR_DECL:
8377       gcc_checking_assert (!DECL_LOCAL_DECL_P (decl));
8378       if (DECL_VTABLE_OR_VTT_P (decl))
8379 	{
8380 	  /* VTT or VTABLE, they are all on the vtables list.  */
8381 	  tree ctx = CP_DECL_CONTEXT (decl);
8382 	  tree vtable = CLASSTYPE_VTABLES (ctx);
8383 	  for (unsigned ix = 0; ; vtable = DECL_CHAIN (vtable), ix++)
8384 	    if (vtable == decl)
8385 	      {
8386 		gcc_checking_assert (DECL_VIRTUAL_P (decl));
8387 		if (streaming_p ())
8388 		  {
8389 		    u (tt_vtable);
8390 		    u (ix);
8391 		    dump (dumper::TREE)
8392 		      && dump ("Writing vtable %N[%u]", ctx, ix);
8393 		  }
8394 		tree_node (ctx);
8395 		return false;
8396 	      }
8397 	  gcc_unreachable ();
8398 	}
8399 
8400       if (DECL_TINFO_P (decl))
8401 	{
8402 	tinfo:
8403 	  /* A typeinfo, tt_tinfo_typedef or tt_tinfo_var.  */
8404 	  bool is_var = TREE_CODE (decl) == VAR_DECL;
8405 	  tree type = TREE_TYPE (decl);
8406 	  unsigned ix = get_pseudo_tinfo_index (type);
8407 	  if (streaming_p ())
8408 	    {
8409 	      i (is_var ? tt_tinfo_var : tt_tinfo_typedef);
8410 	      u (ix);
8411 	    }
8412 
8413 	  if (is_var)
8414 	    {
8415 	      /* We also need the type it is for and mangled name, so
8416 		 the reader doesn't need to complete the type (which
8417 		 would break section ordering).  The type it is for is
8418 		 stashed on the name's TREE_TYPE.  */
8419 	      tree name = DECL_NAME (decl);
8420 	      tree_node (name);
8421 	      type = TREE_TYPE (name);
8422 	      tree_node (type);
8423 	    }
8424 
8425 	  int tag = insert (decl);
8426 	  if (streaming_p ())
8427 	    dump (dumper::TREE)
8428 	      && dump ("Wrote tinfo_%s:%d %u %N", is_var ? "var" : "type",
8429 		       tag, ix, type);
8430 
8431 	  if (!is_var)
8432 	    {
8433 	      tag = insert (type);
8434 	      if (streaming_p ())
8435 		dump (dumper::TREE)
8436 		  && dump ("Wrote tinfo_type:%d %u %N", tag, ix, type);
8437 	    }
8438 	  return false;
8439 	}
8440       break;
8441 
8442     case TYPE_DECL:
8443       if (DECL_TINFO_P (decl))
8444 	goto tinfo;
8445       break;
8446     }
8447 
8448   if (DECL_THUNK_P (decl))
8449     {
8450       /* Thunks are similar to binfos -- write the thunked-to decl and
8451 	 then thunk-specific key info.  */
8452       if (streaming_p ())
8453 	{
8454 	  i (tt_thunk);
8455 	  i (THUNK_FIXED_OFFSET (decl));
8456 	}
8457 
8458       tree target = decl;
8459       while (DECL_THUNK_P (target))
8460 	target = THUNK_TARGET (target);
8461       tree_node (target);
8462       tree_node (THUNK_VIRTUAL_OFFSET (decl));
8463       int tag = insert (decl);
8464       if (streaming_p ())
8465 	dump (dumper::TREE)
8466 	  && dump ("Wrote:%d thunk %N to %N", tag, DECL_NAME (decl), target);
8467       return false;
8468     }
8469 
8470   if (DECL_CLONED_FUNCTION_P (decl))
8471     {
8472       tree target = get_clone_target (decl);
8473       if (streaming_p ())
8474 	i (tt_clone_ref);
8475 
8476       tree_node (target);
8477       tree_node (DECL_NAME (decl));
8478       int tag = insert (decl);
8479       if (streaming_p ())
8480 	dump (dumper::TREE)
8481 	  && dump ("Wrote:%d clone %N of %N", tag, DECL_NAME (decl), target);
8482       return false;
8483     }
8484 
8485   /* Everything left should be a thing that is in the entity table.
8486      Mostly things that can be defined outside of their (original
8487      declaration) context.  */
8488   gcc_checking_assert (TREE_CODE (decl) == TEMPLATE_DECL
8489 		       || TREE_CODE (decl) == VAR_DECL
8490 		       || TREE_CODE (decl) == FUNCTION_DECL
8491 		       || TREE_CODE (decl) == TYPE_DECL
8492 		       || TREE_CODE (decl) == USING_DECL
8493 		       || TREE_CODE (decl) == CONCEPT_DECL
8494 		       || TREE_CODE (decl) == NAMESPACE_DECL);
8495 
8496   int use_tpl = -1;
8497   tree ti = node_template_info (decl, use_tpl);
8498   tree tpl = NULL_TREE;
8499 
8500   /* If this is the TEMPLATE_DECL_RESULT of a TEMPLATE_DECL, get the
8501      TEMPLATE_DECL.  Note TI_TEMPLATE is not a TEMPLATE_DECL for
8502      (some) friends, so we need to check that.  */
8503   // FIXME: Should local friend template specializations be by value?
8504   // They don't get idents so we'll never know they're imported, but I
8505   // think we can only reach them from the TU that defines the
8506   // befriending class?
8507   if (ti && TREE_CODE (TI_TEMPLATE (ti)) == TEMPLATE_DECL
8508       && DECL_TEMPLATE_RESULT (TI_TEMPLATE (ti)) == decl)
8509     {
8510       tpl = TI_TEMPLATE (ti);
8511     partial_template:
8512       if (streaming_p ())
8513 	{
8514 	  i (tt_template);
8515 	  dump (dumper::TREE)
8516 	    && dump ("Writing implicit template %C:%N%S",
8517 		     TREE_CODE (tpl), tpl, tpl);
8518 	}
8519       tree_node (tpl);
8520 
8521       /* Streaming TPL caused us to visit DECL and maybe its type.  */
8522       gcc_checking_assert (TREE_VISITED (decl));
8523       if (DECL_IMPLICIT_TYPEDEF_P (decl))
8524 	gcc_checking_assert (TREE_VISITED (TREE_TYPE (decl)));
8525       return false;
8526     }
8527 
8528   tree ctx = CP_DECL_CONTEXT (decl);
8529   depset *dep = NULL;
8530   if (streaming_p ())
8531     dep = dep_hash->find_dependency (decl);
8532   else if (TREE_CODE (ctx) != FUNCTION_DECL
8533 	   || TREE_CODE (decl) == TEMPLATE_DECL
8534 	   || (dep_hash->sneakoscope && DECL_IMPLICIT_TYPEDEF_P (decl))
8535 	   || (DECL_LANG_SPECIFIC (decl)
8536 	       && DECL_MODULE_IMPORT_P (decl)))
8537     {
8538       auto kind = (TREE_CODE (decl) == NAMESPACE_DECL
8539 		   && !DECL_NAMESPACE_ALIAS (decl)
8540 		   ? depset::EK_NAMESPACE : depset::EK_DECL);
8541       dep = dep_hash->add_dependency (decl, kind);
8542     }
8543 
8544   if (!dep)
8545     {
8546       /* Some internal entity of context.  Do by value.  */
8547       decl_value (decl, NULL);
8548       return false;
8549     }
8550 
8551   if (dep->get_entity_kind () == depset::EK_REDIRECT)
8552     {
8553       /* The DECL_TEMPLATE_RESULT of a partial specialization.
8554 	 Write the partial specialization's template.  */
8555       depset *redirect = dep->deps[0];
8556       gcc_checking_assert (redirect->get_entity_kind () == depset::EK_PARTIAL);
8557       tpl = redirect->get_entity ();
8558       goto partial_template;
8559     }
8560 
8561   if (streaming_p ())
8562     {
8563       /* Locate the entity.  */
8564       unsigned index = dep->cluster;
8565       unsigned import = 0;
8566 
8567       if (dep->is_import ())
8568 	import = dep->section;
8569       else if (CHECKING_P)
8570 	/* It should be what we put there.  */
8571 	gcc_checking_assert (index == ~import_entity_index (decl));
8572 
8573 #if CHECKING_P
8574       gcc_assert (!import || importedness >= 0);
8575 #endif
8576       i (tt_entity);
8577       u (import);
8578       u (index);
8579     }
8580 
8581   int tag = insert (decl);
8582   if (streaming_p () && dump (dumper::TREE))
8583     {
8584       char const *kind = "import";
8585       module_state *from = (*modules)[0];
8586       if (dep->is_import ())
8587 	/* Rediscover the unremapped index.  */
8588 	from = import_entity_module (import_entity_index (decl));
8589       else
8590 	{
8591 	  tree o = get_originating_module_decl (decl);
8592 	  o = STRIP_TEMPLATE (o);
8593 	  kind = (DECL_LANG_SPECIFIC (o) && DECL_MODULE_PURVIEW_P (o)
8594 		  ? "purview" : "GMF");
8595 	}
8596       dump ("Wrote %s:%d %C:%N@%M", kind,
8597 	    tag, TREE_CODE (decl), decl, from);
8598     }
8599 
8600   add_indirects (decl);
8601 
8602   return false;
8603 }
8604 
8605 void
type_node(tree type)8606 trees_out::type_node (tree type)
8607 {
8608   gcc_assert (TYPE_P (type));
8609 
8610   tree root = (TYPE_NAME (type)
8611 	       ? TREE_TYPE (TYPE_NAME (type)) : TYPE_MAIN_VARIANT (type));
8612 
8613   if (type != root)
8614     {
8615       if (streaming_p ())
8616 	i (tt_variant_type);
8617       tree_node (root);
8618 
8619       int flags = -1;
8620 
8621       if (TREE_CODE (type) == FUNCTION_TYPE
8622 	  || TREE_CODE (type) == METHOD_TYPE)
8623 	{
8624 	  int quals = type_memfn_quals (type);
8625 	  int rquals = type_memfn_rqual (type);
8626 	  tree raises = TYPE_RAISES_EXCEPTIONS (type);
8627 	  bool late = TYPE_HAS_LATE_RETURN_TYPE (type);
8628 
8629 	  if (raises != TYPE_RAISES_EXCEPTIONS (root)
8630 	      || rquals != type_memfn_rqual (root)
8631 	      || quals != type_memfn_quals (root)
8632 	      || late != TYPE_HAS_LATE_RETURN_TYPE (root))
8633 	    flags = rquals | (int (late) << 2) | (quals << 3);
8634 	}
8635       else
8636 	{
8637 	  if (TYPE_USER_ALIGN (type))
8638 	    flags = TYPE_ALIGN_RAW (type);
8639 	}
8640 
8641       if (streaming_p ())
8642 	i (flags);
8643 
8644       if (flags < 0)
8645 	;
8646       else if (TREE_CODE (type) == FUNCTION_TYPE
8647 	       || TREE_CODE (type) == METHOD_TYPE)
8648 	{
8649 	  tree raises = TYPE_RAISES_EXCEPTIONS (type);
8650 	  if (raises == TYPE_RAISES_EXCEPTIONS (root))
8651 	    raises = error_mark_node;
8652 	  tree_node (raises);
8653 	}
8654 
8655       tree_node (TYPE_ATTRIBUTES (type));
8656 
8657       if (streaming_p ())
8658 	{
8659 	  /* Qualifiers.  */
8660 	  int rquals = cp_type_quals (root);
8661 	  int quals = cp_type_quals (type);
8662 	  if (quals == rquals)
8663 	    quals = -1;
8664 	  i (quals);
8665 	}
8666 
8667       if (ref_node (type) != WK_none)
8668 	{
8669 	  int tag = insert (type);
8670 	  if (streaming_p ())
8671 	    {
8672 	      i (0);
8673 	      dump (dumper::TREE)
8674 		&& dump ("Wrote:%d variant type %C", tag, TREE_CODE (type));
8675 	    }
8676 	}
8677       return;
8678     }
8679 
8680   if (tree name = TYPE_NAME (type))
8681     if ((TREE_CODE (name) == TYPE_DECL && DECL_ORIGINAL_TYPE (name))
8682 	|| DECL_TEMPLATE_PARM_P (name)
8683 	|| TREE_CODE (type) == RECORD_TYPE
8684 	|| TREE_CODE (type) == UNION_TYPE
8685 	|| TREE_CODE (type) == ENUMERAL_TYPE)
8686       {
8687 	/* We can meet template parms that we didn't meet in the
8688 	   tpl_parms walk, because we're referring to a derived type
8689 	   that was previously constructed from equivalent template
8690 	   parms. */
8691 	if (streaming_p ())
8692 	  {
8693 	    i (tt_typedef_type);
8694 	    dump (dumper::TREE)
8695 	      && dump ("Writing %stypedef %C:%N",
8696 		       DECL_IMPLICIT_TYPEDEF_P (name) ? "implicit " : "",
8697 		       TREE_CODE (name), name);
8698 	  }
8699 	tree_node (name);
8700 	if (streaming_p ())
8701 	  dump (dumper::TREE) && dump ("Wrote typedef %C:%N%S",
8702 				       TREE_CODE (name), name, name);
8703 	gcc_checking_assert (TREE_VISITED (type));
8704 	return;
8705       }
8706 
8707   if (TYPE_PTRMEMFUNC_P (type))
8708     {
8709       /* This is a distinct type node, masquerading as a structure. */
8710       tree fn_type = TYPE_PTRMEMFUNC_FN_TYPE (type);
8711       if (streaming_p ())
8712 	i (tt_ptrmem_type);
8713       tree_node (fn_type);
8714       int tag = insert (type);
8715       if (streaming_p ())
8716 	dump (dumper::TREE) && dump ("Written:%d ptrmem type", tag);
8717       return;
8718     }
8719 
8720   if (streaming_p ())
8721     {
8722       u (tt_derived_type);
8723       u (TREE_CODE (type));
8724     }
8725 
8726   tree_node (TREE_TYPE (type));
8727   switch (TREE_CODE (type))
8728     {
8729     default:
8730       /* We should never meet a type here that is indescribable in
8731 	 terms of other types.  */
8732       gcc_unreachable ();
8733 
8734     case ARRAY_TYPE:
8735       tree_node (TYPE_DOMAIN (type));
8736       if (streaming_p ())
8737 	/* Dependent arrays are constructed with TYPE_DEPENENT_P
8738 	   already set.  */
8739 	u (TYPE_DEPENDENT_P (type));
8740       break;
8741 
8742     case COMPLEX_TYPE:
8743       /* No additional data.  */
8744       break;
8745 
8746     case BOOLEAN_TYPE:
8747       /* A non-standard boolean type.  */
8748       if (streaming_p ())
8749 	u (TYPE_PRECISION (type));
8750       break;
8751 
8752     case INTEGER_TYPE:
8753       if (TREE_TYPE (type))
8754 	{
8755 	  /* A range type (representing an array domain).  */
8756 	  tree_node (TYPE_MIN_VALUE (type));
8757 	  tree_node (TYPE_MAX_VALUE (type));
8758 	}
8759       else
8760 	{
8761 	  /* A new integral type (representing a bitfield).  */
8762 	  if (streaming_p ())
8763 	    {
8764 	      unsigned prec = TYPE_PRECISION (type);
8765 	      bool unsigned_p = TYPE_UNSIGNED (type);
8766 
8767 	      u ((prec << 1) | unsigned_p);
8768 	    }
8769 	}
8770       break;
8771 
8772     case METHOD_TYPE:
8773     case FUNCTION_TYPE:
8774       {
8775 	gcc_checking_assert (type_memfn_rqual (type) == REF_QUAL_NONE);
8776 
8777 	tree arg_types = TYPE_ARG_TYPES (type);
8778 	if (TREE_CODE (type) == METHOD_TYPE)
8779 	  {
8780 	    tree_node (TREE_TYPE (TREE_VALUE (arg_types)));
8781 	    arg_types = TREE_CHAIN (arg_types);
8782 	  }
8783 	tree_node (arg_types);
8784       }
8785       break;
8786 
8787     case OFFSET_TYPE:
8788       tree_node (TYPE_OFFSET_BASETYPE (type));
8789       break;
8790 
8791     case POINTER_TYPE:
8792       /* No additional data.  */
8793       break;
8794 
8795     case REFERENCE_TYPE:
8796       if (streaming_p ())
8797 	u (TYPE_REF_IS_RVALUE (type));
8798       break;
8799 
8800     case DECLTYPE_TYPE:
8801     case TYPEOF_TYPE:
8802     case UNDERLYING_TYPE:
8803     case DEPENDENT_OPERATOR_TYPE:
8804       tree_node (TYPE_VALUES_RAW (type));
8805       if (TREE_CODE (type) == DECLTYPE_TYPE)
8806 	/* We stash a whole bunch of things into decltype's
8807 	   flags.  */
8808 	if (streaming_p ())
8809 	  tree_node_bools (type);
8810       break;
8811 
8812     case TYPE_ARGUMENT_PACK:
8813       /* No additional data.  */
8814       break;
8815 
8816     case TYPE_PACK_EXPANSION:
8817       if (streaming_p ())
8818 	u (PACK_EXPANSION_LOCAL_P (type));
8819       tree_node (PACK_EXPANSION_PARAMETER_PACKS (type));
8820       break;
8821 
8822     case TYPENAME_TYPE:
8823       {
8824 	tree_node (TYPE_CONTEXT (type));
8825 	tree_node (DECL_NAME (TYPE_NAME (type)));
8826 	tree_node (TYPENAME_TYPE_FULLNAME (type));
8827 	if (streaming_p ())
8828 	  {
8829 	    enum tag_types tag_type = none_type;
8830 	    if (TYPENAME_IS_ENUM_P (type))
8831 	      tag_type = enum_type;
8832 	    else if (TYPENAME_IS_CLASS_P (type))
8833 	      tag_type = class_type;
8834 	    u (int (tag_type));
8835 	  }
8836 	}
8837       break;
8838 
8839     case UNBOUND_CLASS_TEMPLATE:
8840       {
8841 	tree decl = TYPE_NAME (type);
8842 	tree_node (DECL_CONTEXT (decl));
8843 	tree_node (DECL_NAME (decl));
8844 	tree_node (DECL_TEMPLATE_PARMS (decl));
8845       }
8846       break;
8847 
8848     case VECTOR_TYPE:
8849       if (streaming_p ())
8850 	{
8851 	  poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (type);
8852 	  /* to_constant asserts that only coeff[0] is of interest.  */
8853 	  wu (static_cast<unsigned HOST_WIDE_INT> (nunits.to_constant ()));
8854 	}
8855       break;
8856     }
8857 
8858   /* We may have met the type during emitting the above.  */
8859   if (ref_node (type) != WK_none)
8860     {
8861       int tag = insert (type);
8862       if (streaming_p ())
8863 	{
8864 	  i (0);
8865 	  dump (dumper::TREE)
8866 	    && dump ("Wrote:%d derived type %C", tag, TREE_CODE (type));
8867 	}
8868     }
8869 
8870   return;
8871 }
8872 
8873 /* T is (mostly*) a non-mergeable node that must be written by value.
8874    The mergeable case is a BINFO, which are as-if DECLSs.   */
8875 
8876 void
tree_value(tree t)8877 trees_out::tree_value (tree t)
8878 {
8879   /* We should never be writing a type by value.  tree_type should
8880      have streamed it, or we're going via its TYPE_DECL.  */
8881   gcc_checking_assert (!TYPE_P (t));
8882 
8883   if (DECL_P (t))
8884     /* No template, type, var or function, except anonymous
8885        non-context vars.  */
8886     gcc_checking_assert ((TREE_CODE (t) != TEMPLATE_DECL
8887 			  && TREE_CODE (t) != TYPE_DECL
8888 			  && (TREE_CODE (t) != VAR_DECL
8889 			      || (!DECL_NAME (t) && !DECL_CONTEXT (t)))
8890 			  && TREE_CODE (t) != FUNCTION_DECL));
8891 
8892   if (streaming_p ())
8893     {
8894       /* A new node -> tt_node.  */
8895       tree_val_count++;
8896       i (tt_node);
8897       start (t);
8898       tree_node_bools (t);
8899     }
8900 
8901   if  (TREE_CODE (t) == TREE_BINFO)
8902     /* Binfos are decl-like and need merging information.  */
8903     binfo_mergeable (t);
8904 
8905   int tag = insert (t, WK_value);
8906   if (streaming_p ())
8907     dump (dumper::TREE)
8908       && dump ("Writing tree:%d %C:%N", tag, TREE_CODE (t), t);
8909 
8910   tree_node_vals (t);
8911 
8912   if (streaming_p ())
8913     dump (dumper::TREE) && dump ("Written tree:%d %C:%N", tag, TREE_CODE (t), t);
8914 }
8915 
8916 tree
tree_value()8917 trees_in::tree_value ()
8918 {
8919   tree t = start ();
8920   if (!t || !tree_node_bools (t))
8921     return NULL_TREE;
8922 
8923   tree existing = t;
8924   if (TREE_CODE (t) == TREE_BINFO)
8925     {
8926       tree type;
8927       unsigned ix = binfo_mergeable (&type);
8928       if (TYPE_BINFO (type))
8929 	{
8930 	  /* We already have a definition, this must be a duplicate.  */
8931 	  dump (dumper::MERGE)
8932 	    && dump ("Deduping binfo %N[%u]", type, ix);
8933 	  existing = TYPE_BINFO (type);
8934 	  while (existing && ix--)
8935 	    existing = TREE_CHAIN (existing);
8936 	  if (existing)
8937 	    register_duplicate (t, existing);
8938 	  else
8939 	    /* Error, mismatch -- diagnose in read_class_def's
8940 	       checking.  */
8941 	    existing = t;
8942 	}
8943     }
8944 
8945   /* Insert into map.  */
8946   int tag = insert (existing);
8947   dump (dumper::TREE)
8948     && dump ("Reading tree:%d %C", tag, TREE_CODE (t));
8949 
8950   if (!tree_node_vals (t))
8951     {
8952       back_refs[~tag] = NULL_TREE;
8953       set_overrun ();
8954       /* Bail.  */
8955       return NULL_TREE;
8956     }
8957 
8958   dump (dumper::TREE) && dump ("Read tree:%d %C:%N", tag, TREE_CODE (t), t);
8959 
8960   if (TREE_CODE (existing) == INTEGER_CST && !TREE_OVERFLOW (existing))
8961     {
8962       existing = cache_integer_cst (t, true);
8963       back_refs[~tag] = existing;
8964     }
8965 
8966   return existing;
8967 }
8968 
8969 /* Stream out tree node T.  We automatically create local back
8970    references, which is essentially a single pass lisp
8971    self-referential structure pretty-printer.  */
8972 
8973 void
tree_node(tree t)8974 trees_out::tree_node (tree t)
8975 {
8976   dump.indent ();
8977   walk_kind ref = ref_node (t);
8978   if (ref == WK_none)
8979     goto done;
8980 
8981   if (ref != WK_normal)
8982     goto skip_normal;
8983 
8984   if (TREE_CODE (t) == IDENTIFIER_NODE)
8985     {
8986       /* An identifier node -> tt_id, tt_conv_id, tt_anon_id, tt_lambda_id.  */
8987       int code = tt_id;
8988       if (IDENTIFIER_ANON_P (t))
8989 	code = IDENTIFIER_LAMBDA_P (t) ? tt_lambda_id : tt_anon_id;
8990       else if (IDENTIFIER_CONV_OP_P (t))
8991 	code = tt_conv_id;
8992 
8993       if (streaming_p ())
8994 	i (code);
8995 
8996       if (code == tt_conv_id)
8997 	{
8998 	  tree type = TREE_TYPE (t);
8999 	  gcc_checking_assert (type || t == conv_op_identifier);
9000 	  tree_node (type);
9001 	}
9002       else if (code == tt_id && streaming_p ())
9003 	str (IDENTIFIER_POINTER (t), IDENTIFIER_LENGTH (t));
9004 
9005       int tag = insert (t);
9006       if (streaming_p ())
9007 	{
9008 	  /* We know the ordering of the 4 id tags.  */
9009 	  static const char *const kinds[] =
9010 	    {"", "conv_op ", "anon ", "lambda "};
9011 	  dump (dumper::TREE)
9012 	    && dump ("Written:%d %sidentifier:%N", tag,
9013 		     kinds[code - tt_id],
9014 		     code == tt_conv_id ? TREE_TYPE (t) : t);
9015 	}
9016       goto done;
9017     }
9018 
9019   if (TREE_CODE (t) == TREE_BINFO)
9020     {
9021       /* A BINFO -> tt_binfo.
9022 	 We must do this by reference.  We stream the binfo tree
9023 	 itself when streaming its owning RECORD_TYPE.  That we got
9024 	 here means the dominating type is not in this SCC.  */
9025       if (streaming_p ())
9026 	i (tt_binfo);
9027       binfo_mergeable (t);
9028       gcc_checking_assert (!TREE_VISITED (t));
9029       int tag = insert (t);
9030       if (streaming_p ())
9031 	dump (dumper::TREE) && dump ("Inserting binfo:%d %N", tag, t);
9032       goto done;
9033     }
9034 
9035   if (TREE_CODE (t) == INTEGER_CST
9036       && !TREE_OVERFLOW (t)
9037       && TREE_CODE (TREE_TYPE (t)) == ENUMERAL_TYPE)
9038     {
9039       /* An integral constant of enumeral type.  See if it matches one
9040 	 of the enumeration values.  */
9041       for (tree values = TYPE_VALUES (TREE_TYPE (t));
9042 	   values; values = TREE_CHAIN (values))
9043 	{
9044 	  tree decl = TREE_VALUE (values);
9045 	  if (tree_int_cst_equal (DECL_INITIAL (decl), t))
9046 	    {
9047 	      if (streaming_p ())
9048 		u (tt_enum_value);
9049 	      tree_node (decl);
9050 	      dump (dumper::TREE) && dump ("Written enum value %N", decl);
9051 	      goto done;
9052 	    }
9053 	}
9054       /* It didn't match.  We'll write it a an explicit INTEGER_CST
9055 	 node.  */
9056     }
9057 
9058   if (TYPE_P (t))
9059     {
9060       type_node (t);
9061       goto done;
9062     }
9063 
9064   if (DECL_P (t))
9065     {
9066       if (DECL_TEMPLATE_PARM_P (t))
9067 	{
9068 	  tpl_parm_value (t);
9069 	  goto done;
9070 	}
9071 
9072       if (!DECL_CONTEXT (t))
9073 	{
9074 	  /* There are a few cases of decls with no context.  We'll write
9075 	     these by value, but first assert they are cases we expect.  */
9076 	  gcc_checking_assert (ref == WK_normal);
9077 	  switch (TREE_CODE (t))
9078 	    {
9079 	    default: gcc_unreachable ();
9080 
9081 	    case LABEL_DECL:
9082 	      /* CASE_LABEL_EXPRs contain uncontexted LABEL_DECLs.  */
9083 	      gcc_checking_assert (!DECL_NAME (t));
9084 	      break;
9085 
9086 	    case VAR_DECL:
9087 	      /* AGGR_INIT_EXPRs cons up anonymous uncontexted VAR_DECLs.  */
9088 	      gcc_checking_assert (!DECL_NAME (t)
9089 				   && DECL_ARTIFICIAL (t));
9090 	      break;
9091 
9092 	    case PARM_DECL:
9093 	      /* REQUIRES_EXPRs have a tree list of uncontexted
9094 		 PARM_DECLS.  It'd be nice if they had a
9095 		 distinguishing flag to double check.  */
9096 	      break;
9097 	    }
9098 	  goto by_value;
9099 	}
9100     }
9101 
9102  skip_normal:
9103   if (DECL_P (t) && !decl_node (t, ref))
9104     goto done;
9105 
9106   /* Otherwise by value */
9107  by_value:
9108   tree_value (t);
9109 
9110  done:
9111   /* And, breath out.  */
9112   dump.outdent ();
9113 }
9114 
9115 /* Stream in a tree node.  */
9116 
9117 tree
tree_node(bool is_use)9118 trees_in::tree_node (bool is_use)
9119 {
9120   if (get_overrun ())
9121     return NULL_TREE;
9122 
9123   dump.indent ();
9124   int tag = i ();
9125   tree res = NULL_TREE;
9126   switch (tag)
9127     {
9128     default:
9129       /* backref, pull it out of the map.  */
9130       res = back_ref (tag);
9131       break;
9132 
9133     case tt_null:
9134       /* NULL_TREE.  */
9135       break;
9136 
9137     case tt_fixed:
9138       /* A fixed ref, find it in the fixed_ref array.   */
9139       {
9140 	unsigned fix = u ();
9141 	if (fix < (*fixed_trees).length ())
9142 	  {
9143 	    res = (*fixed_trees)[fix];
9144 	    dump (dumper::TREE) && dump ("Read fixed:%u %C:%N%S", fix,
9145 					 TREE_CODE (res), res, res);
9146 	  }
9147 
9148 	if (!res)
9149 	  set_overrun ();
9150       }
9151       break;
9152 
9153     case tt_parm:
9154       {
9155 	tree fn = tree_node ();
9156 	if (fn && TREE_CODE (fn) == FUNCTION_DECL)
9157 	  res = tree_node ();
9158 	if (res)
9159 	  dump (dumper::TREE)
9160 	    && dump ("Read %s reference %N",
9161 		     TREE_CODE (res) == PARM_DECL ? "parameter" : "result",
9162 		     res);
9163       }
9164       break;
9165 
9166     case tt_node:
9167       /* A new node.  Stream it in.  */
9168       res = tree_value ();
9169       break;
9170 
9171     case tt_decl:
9172       /* A new decl.  Stream it in.  */
9173       res = decl_value ();
9174       break;
9175 
9176     case tt_tpl_parm:
9177       /* A template parameter.  Stream it in.  */
9178       res = tpl_parm_value ();
9179       break;
9180 
9181     case tt_id:
9182       /* An identifier node.  */
9183       {
9184 	size_t l;
9185 	const char *chars = str (&l);
9186 	res = get_identifier_with_length (chars, l);
9187 	int tag = insert (res);
9188 	dump (dumper::TREE)
9189 	  && dump ("Read identifier:%d %N", tag, res);
9190       }
9191       break;
9192 
9193     case tt_conv_id:
9194       /* A conversion operator.  Get the type and recreate the
9195 	 identifier.  */
9196       {
9197 	tree type = tree_node ();
9198 	if (!get_overrun ())
9199 	  {
9200 	    res = type ? make_conv_op_name (type) : conv_op_identifier;
9201 	    int tag = insert (res);
9202 	    dump (dumper::TREE)
9203 	      && dump ("Created conv_op:%d %S for %N", tag, res, type);
9204 	  }
9205       }
9206       break;
9207 
9208     case tt_anon_id:
9209     case tt_lambda_id:
9210       /* An anonymous or lambda id.  */
9211       {
9212 	res = make_anon_name ();
9213 	if (tag == tt_lambda_id)
9214 	  IDENTIFIER_LAMBDA_P (res) = true;
9215 	int tag = insert (res);
9216 	dump (dumper::TREE)
9217 	  && dump ("Read %s identifier:%d %N",
9218 		   IDENTIFIER_LAMBDA_P (res) ? "lambda" : "anon", tag, res);
9219       }
9220       break;
9221 
9222     case tt_typedef_type:
9223       res = tree_node ();
9224       if (res)
9225 	{
9226 	  dump (dumper::TREE)
9227 	    && dump ("Read %stypedef %C:%N",
9228 		     DECL_IMPLICIT_TYPEDEF_P (res) ? "implicit " : "",
9229 		     TREE_CODE (res), res);
9230 	  res = TREE_TYPE (res);
9231 	}
9232       break;
9233 
9234     case tt_derived_type:
9235       /* A type derived from some other type.  */
9236       {
9237 	enum tree_code code = tree_code (u ());
9238 	res = tree_node ();
9239 
9240 	switch (code)
9241 	  {
9242 	  default:
9243 	    set_overrun ();
9244 	    break;
9245 
9246 	  case ARRAY_TYPE:
9247 	    {
9248 	      tree domain = tree_node ();
9249 	      int dep = u ();
9250 	      if (!get_overrun ())
9251 		res = build_cplus_array_type (res, domain, dep);
9252 	    }
9253 	    break;
9254 
9255 	  case COMPLEX_TYPE:
9256 	    if (!get_overrun ())
9257 	      res = build_complex_type (res);
9258 	    break;
9259 
9260 	  case BOOLEAN_TYPE:
9261 	    {
9262 	      unsigned precision = u ();
9263 	      if (!get_overrun ())
9264 		res = build_nonstandard_boolean_type (precision);
9265 	    }
9266 	    break;
9267 
9268 	  case INTEGER_TYPE:
9269 	    if (res)
9270 	      {
9271 		/* A range type (representing an array domain).  */
9272 		tree min = tree_node ();
9273 		tree max = tree_node ();
9274 
9275 		if (!get_overrun ())
9276 		  res = build_range_type (res, min, max);
9277 	      }
9278 	    else
9279 	      {
9280 		/* A new integral type (representing a bitfield).  */
9281 		unsigned enc = u ();
9282 		if (!get_overrun ())
9283 		  res = build_nonstandard_integer_type (enc >> 1, enc & 1);
9284 	      }
9285 	    break;
9286 
9287 	  case FUNCTION_TYPE:
9288 	  case METHOD_TYPE:
9289 	    {
9290 	      tree klass =  code == METHOD_TYPE ? tree_node () : NULL_TREE;
9291 	      tree args = tree_node ();
9292 	      if (!get_overrun ())
9293 		{
9294 		  if (klass)
9295 		    res = build_method_type_directly (klass, res, args);
9296 		  else
9297 		    res = build_function_type (res, args);
9298 		}
9299 	    }
9300 	    break;
9301 
9302 	  case OFFSET_TYPE:
9303 	    {
9304 	      tree base = tree_node ();
9305 	      if (!get_overrun ())
9306 		res = build_offset_type (base, res);
9307 	    }
9308 	    break;
9309 
9310 	  case POINTER_TYPE:
9311 	    if (!get_overrun ())
9312 	      res = build_pointer_type (res);
9313 	    break;
9314 
9315 	  case REFERENCE_TYPE:
9316 	    {
9317 	      bool rval = bool (u ());
9318 	      if (!get_overrun ())
9319 		res = cp_build_reference_type (res, rval);
9320 	    }
9321 	    break;
9322 
9323 	  case DECLTYPE_TYPE:
9324 	  case TYPEOF_TYPE:
9325 	  case UNDERLYING_TYPE:
9326 	  case DEPENDENT_OPERATOR_TYPE:
9327 	    {
9328 	      tree expr = tree_node ();
9329 	      if (!get_overrun ())
9330 		{
9331 		  res = cxx_make_type (code);
9332 		  TYPE_VALUES_RAW (res) = expr;
9333 		  if (code == DECLTYPE_TYPE)
9334 		    tree_node_bools (res);
9335 		  SET_TYPE_STRUCTURAL_EQUALITY (res);
9336 		}
9337 	    }
9338 	    break;
9339 
9340 	  case TYPE_ARGUMENT_PACK:
9341 	    if (!get_overrun ())
9342 	      {
9343 		tree pack = cxx_make_type (TYPE_ARGUMENT_PACK);
9344 		SET_ARGUMENT_PACK_ARGS (pack, res);
9345 		res = pack;
9346 	      }
9347 	    break;
9348 
9349 	  case TYPE_PACK_EXPANSION:
9350 	    {
9351 	      bool local = u ();
9352 	      tree param_packs = tree_node ();
9353 	      if (!get_overrun ())
9354 		{
9355 		  tree expn = cxx_make_type (TYPE_PACK_EXPANSION);
9356 		  SET_TYPE_STRUCTURAL_EQUALITY (expn);
9357 		  SET_PACK_EXPANSION_PATTERN (expn, res);
9358 		  PACK_EXPANSION_PARAMETER_PACKS (expn) = param_packs;
9359 		  PACK_EXPANSION_LOCAL_P (expn) = local;
9360 		  res = expn;
9361 		}
9362 	    }
9363 	    break;
9364 
9365 	  case TYPENAME_TYPE:
9366 	    {
9367 	      tree ctx = tree_node ();
9368 	      tree name = tree_node ();
9369 	      tree fullname = tree_node ();
9370 	      enum tag_types tag_type = tag_types (u ());
9371 
9372 	      if (!get_overrun ())
9373 		res = build_typename_type (ctx, name, fullname, tag_type);
9374 	    }
9375 	    break;
9376 
9377 	  case UNBOUND_CLASS_TEMPLATE:
9378 	    {
9379 	      tree ctx = tree_node ();
9380 	      tree name = tree_node ();
9381 	      tree parms = tree_node ();
9382 
9383 	      if (!get_overrun ())
9384 		res = make_unbound_class_template_raw (ctx, name, parms);
9385 	    }
9386 	    break;
9387 
9388 	  case VECTOR_TYPE:
9389 	    {
9390 	      unsigned HOST_WIDE_INT nunits = wu ();
9391 	      if (!get_overrun ())
9392 		res = build_vector_type (res, static_cast<poly_int64> (nunits));
9393 	    }
9394 	    break;
9395 	  }
9396 
9397 	int tag = i ();
9398 	if (!tag)
9399 	  {
9400 	    tag = insert (res);
9401 	    if (res)
9402 	      dump (dumper::TREE)
9403 		&& dump ("Created:%d derived type %C", tag, code);
9404 	  }
9405 	else
9406 	  res = back_ref (tag);
9407       }
9408       break;
9409 
9410     case tt_variant_type:
9411       /* Variant of some type.  */
9412       {
9413 	res = tree_node ();
9414 	int flags = i ();
9415 	if (get_overrun ())
9416 	  ;
9417 	else if (flags < 0)
9418 	  /* No change.  */;
9419 	else if (TREE_CODE (res) == FUNCTION_TYPE
9420 		 || TREE_CODE (res) == METHOD_TYPE)
9421 	  {
9422 	    cp_ref_qualifier rqual = cp_ref_qualifier (flags & 3);
9423 	    bool late = (flags >> 2) & 1;
9424 	    cp_cv_quals quals = cp_cv_quals (flags >> 3);
9425 
9426 	    tree raises = tree_node ();
9427 	    if (raises == error_mark_node)
9428 	      raises = TYPE_RAISES_EXCEPTIONS (res);
9429 
9430 	    res = build_cp_fntype_variant (res, rqual, raises, late);
9431 	    if (TREE_CODE (res) == FUNCTION_TYPE)
9432 	      res = apply_memfn_quals (res, quals, rqual);
9433 	  }
9434 	else
9435 	  {
9436 	    res = build_aligned_type (res, (1u << flags) >> 1);
9437 	    TYPE_USER_ALIGN (res) = true;
9438 	  }
9439 
9440 	if (tree attribs = tree_node ())
9441 	  res = cp_build_type_attribute_variant (res, attribs);
9442 
9443 	int quals = i ();
9444 	if (quals >= 0 && !get_overrun ())
9445 	  res = cp_build_qualified_type (res, quals);
9446 
9447 	int tag = i ();
9448 	if (!tag)
9449 	  {
9450 	    tag = insert (res);
9451 	    if (res)
9452 	      dump (dumper::TREE)
9453 		&& dump ("Created:%d variant type %C", tag, TREE_CODE (res));
9454 	  }
9455 	else
9456 	  res = back_ref (tag);
9457       }
9458       break;
9459 
9460     case tt_tinfo_var:
9461     case tt_tinfo_typedef:
9462       /* A tinfo var or typedef.  */
9463       {
9464 	bool is_var = tag == tt_tinfo_var;
9465 	unsigned ix = u ();
9466 	tree type = NULL_TREE;
9467 
9468 	if (is_var)
9469 	  {
9470 	    tree name = tree_node ();
9471 	    type = tree_node ();
9472 
9473 	    if (!get_overrun ())
9474 	      res = get_tinfo_decl_direct (type, name, int (ix));
9475 	  }
9476 	else
9477 	  {
9478 	    if (!get_overrun ())
9479 	      {
9480 		type = get_pseudo_tinfo_type (ix);
9481 		res = TYPE_NAME (type);
9482 	      }
9483 	  }
9484 	if (res)
9485 	  {
9486 	    int tag = insert (res);
9487 	    dump (dumper::TREE)
9488 	      && dump ("Created tinfo_%s:%d %S:%u for %N",
9489 		       is_var ? "var" : "decl", tag, res, ix, type);
9490 	    if (!is_var)
9491 	      {
9492 		tag = insert (type);
9493 		dump (dumper::TREE)
9494 		  && dump ("Created tinfo_type:%d %u %N", tag, ix, type);
9495 	      }
9496 	  }
9497       }
9498       break;
9499 
9500     case tt_ptrmem_type:
9501       /* A pointer to member function.  */
9502       {
9503 	tree type = tree_node ();
9504 	if (type && TREE_CODE (type) == POINTER_TYPE
9505 	    && TREE_CODE (TREE_TYPE (type)) == METHOD_TYPE)
9506 	  {
9507 	    res = build_ptrmemfunc_type (type);
9508 	    int tag = insert (res);
9509 	    dump (dumper::TREE) && dump ("Created:%d ptrmem type", tag);
9510 	  }
9511 	else
9512 	  set_overrun ();
9513       }
9514       break;
9515 
9516     case tt_enum_value:
9517       /* An enum const value.  */
9518       {
9519 	if (tree decl = tree_node ())
9520 	  {
9521 	    dump (dumper::TREE) && dump ("Read enum value %N", decl);
9522 	    res = DECL_INITIAL (decl);
9523 	  }
9524 
9525 	if (!res)
9526 	  set_overrun ();
9527       }
9528       break;
9529 
9530     case tt_enum_decl:
9531       /* An enum decl.  */
9532       {
9533 	tree ctx = tree_node ();
9534 	tree name = tree_node ();
9535 
9536 	if (!get_overrun ()
9537 	    && TREE_CODE (ctx) == ENUMERAL_TYPE)
9538 	  res = find_enum_member (ctx, name);
9539 
9540 	if (!res)
9541 	  set_overrun ();
9542 	else
9543 	  {
9544 	    int tag = insert (res);
9545 	    dump (dumper::TREE)
9546 	      && dump ("Read enum decl:%d %C:%N", tag, TREE_CODE (res), res);
9547 	  }
9548       }
9549       break;
9550 
9551     case tt_data_member:
9552       /* A data member.  */
9553       {
9554 	tree ctx = tree_node ();
9555 	tree name = tree_node ();
9556 
9557 	if (!get_overrun ()
9558 	    && RECORD_OR_UNION_TYPE_P (ctx))
9559 	  {
9560 	    if (name)
9561 	      res = lookup_class_binding (ctx, name);
9562 	    else
9563 	      res = lookup_field_ident (ctx, u ());
9564 
9565 	    if (!res
9566 		|| TREE_CODE (res) != FIELD_DECL
9567 		|| DECL_CONTEXT (res) != ctx)
9568 	      res = NULL_TREE;
9569 	  }
9570 
9571 	if (!res)
9572 	  set_overrun ();
9573 	else
9574 	  {
9575 	    int tag = insert (res);
9576 	    dump (dumper::TREE)
9577 	      && dump ("Read member:%d %C:%N", tag, TREE_CODE (res), res);
9578 	  }
9579       }
9580       break;
9581 
9582     case tt_binfo:
9583       /* A BINFO.  Walk the tree of the dominating type.  */
9584       {
9585 	tree type;
9586 	unsigned ix = binfo_mergeable (&type);
9587 	if (type)
9588 	  {
9589 	    res = TYPE_BINFO (type);
9590 	    for (; ix && res; res = TREE_CHAIN (res))
9591 	      ix--;
9592 	    if (!res)
9593 	      set_overrun ();
9594 	  }
9595 
9596 	if (get_overrun ())
9597 	  break;
9598 
9599 	/* Insert binfo into backreferences.  */
9600 	tag = insert (res);
9601 	dump (dumper::TREE) && dump ("Read binfo:%d %N", tag, res);
9602       }
9603       break;
9604 
9605     case tt_vtable:
9606       {
9607 	unsigned ix = u ();
9608 	tree ctx = tree_node ();
9609 	dump (dumper::TREE) && dump ("Reading vtable %N[%u]", ctx, ix);
9610 	if (TREE_CODE (ctx) == RECORD_TYPE && TYPE_LANG_SPECIFIC (ctx))
9611 	  for (res = CLASSTYPE_VTABLES (ctx); res; res = DECL_CHAIN (res))
9612 	    if (!ix--)
9613 	      break;
9614 	if (!res)
9615 	  set_overrun ();
9616       }
9617       break;
9618 
9619     case tt_thunk:
9620       {
9621 	int fixed = i ();
9622 	tree target = tree_node ();
9623 	tree virt = tree_node ();
9624 
9625 	for (tree thunk = DECL_THUNKS (target);
9626 	     thunk; thunk = DECL_CHAIN (thunk))
9627 	  if (THUNK_FIXED_OFFSET (thunk) == fixed
9628 	      && !THUNK_VIRTUAL_OFFSET (thunk) == !virt
9629 	      && (!virt
9630 		  || tree_int_cst_equal (virt, THUNK_VIRTUAL_OFFSET (thunk))))
9631 	    {
9632 	      res = thunk;
9633 	      break;
9634 	    }
9635 
9636 	int tag = insert (res);
9637 	if (res)
9638 	  dump (dumper::TREE)
9639 	    && dump ("Read:%d thunk %N to %N", tag, DECL_NAME (res), target);
9640 	else
9641 	  set_overrun ();
9642       }
9643       break;
9644 
9645     case tt_clone_ref:
9646       {
9647 	tree target = tree_node ();
9648 	tree name = tree_node ();
9649 
9650 	if (DECL_P (target) && DECL_MAYBE_IN_CHARGE_CDTOR_P (target))
9651 	  {
9652 	    tree clone;
9653 	    FOR_EVERY_CLONE (clone, target)
9654 	      if (DECL_NAME (clone) == name)
9655 		{
9656 		  res = clone;
9657 		  break;
9658 		}
9659 	  }
9660 
9661 	if (!res)
9662 	  set_overrun ();
9663 	int tag = insert (res);
9664 	if (res)
9665 	  dump (dumper::TREE)
9666 	    && dump ("Read:%d clone %N of %N", tag, DECL_NAME (res), target);
9667 	else
9668 	  set_overrun ();
9669        }
9670       break;
9671 
9672     case tt_entity:
9673       /* Index into the entity table.  Perhaps not loaded yet!  */
9674       {
9675 	unsigned origin = state->slurp->remap_module (u ());
9676 	unsigned ident = u ();
9677 	module_state *from = (*modules)[origin];
9678 
9679 	if (!origin || ident >= from->entity_num)
9680 	  set_overrun ();
9681 	if (!get_overrun ())
9682 	  {
9683 	    binding_slot *slot = &(*entity_ary)[from->entity_lwm + ident];
9684 	    if (slot->is_lazy ())
9685 	      if (!from->lazy_load (ident, slot))
9686 		set_overrun ();
9687 	    res = *slot;
9688 	  }
9689 
9690 	if (res)
9691 	  {
9692 	    const char *kind = (origin != state->mod ? "Imported" : "Named");
9693 	    int tag = insert (res);
9694 	    dump (dumper::TREE)
9695 	      && dump ("%s:%d %C:%N@%M", kind, tag, TREE_CODE (res),
9696 		       res, (*modules)[origin]);
9697 
9698 	    if (!add_indirects (res))
9699 	      {
9700 		set_overrun ();
9701 		res = NULL_TREE;
9702 	      }
9703 	  }
9704       }
9705       break;
9706 
9707     case tt_template:
9708       /* A template.  */
9709       if (tree tpl = tree_node ())
9710 	{
9711 	  res = DECL_TEMPLATE_RESULT (tpl);
9712 	  dump (dumper::TREE)
9713 	    && dump ("Read template %C:%N", TREE_CODE (res), res);
9714 	}
9715       break;
9716     }
9717 
9718   if (is_use && !unused && res && DECL_P (res) && !TREE_USED (res))
9719     {
9720       /* Mark decl used as mark_used does -- we cannot call
9721 	 mark_used in the middle of streaming, we only need a subset
9722 	 of its functionality.   */
9723       TREE_USED (res) = true;
9724 
9725       /* And for structured bindings also the underlying decl.  */
9726       if (DECL_DECOMPOSITION_P (res) && DECL_DECOMP_BASE (res))
9727 	TREE_USED (DECL_DECOMP_BASE (res)) = true;
9728 
9729       if (DECL_CLONED_FUNCTION_P (res))
9730 	TREE_USED (DECL_CLONED_FUNCTION (res)) = true;
9731     }
9732 
9733   dump.outdent ();
9734   return res;
9735 }
9736 
9737 void
tpl_parms(tree parms,unsigned & tpl_levels)9738 trees_out::tpl_parms (tree parms, unsigned &tpl_levels)
9739 {
9740   if (!parms)
9741     return;
9742 
9743   if (TREE_VISITED (parms))
9744     {
9745       ref_node (parms);
9746       return;
9747     }
9748 
9749   tpl_parms (TREE_CHAIN (parms), tpl_levels);
9750 
9751   tree vec = TREE_VALUE (parms);
9752   unsigned len = TREE_VEC_LENGTH (vec);
9753   /* Depth.  */
9754   int tag = insert (parms);
9755   if (streaming_p ())
9756     {
9757       i (len + 1);
9758       dump (dumper::TREE)
9759 	&& dump ("Writing template parms:%d level:%N length:%d",
9760 		 tag, TREE_PURPOSE (parms), len);
9761     }
9762   tree_node (TREE_PURPOSE (parms));
9763 
9764   for (unsigned ix = 0; ix != len; ix++)
9765     {
9766       tree parm = TREE_VEC_ELT (vec, ix);
9767       tree decl = TREE_VALUE (parm);
9768 
9769       gcc_checking_assert (DECL_TEMPLATE_PARM_P (decl));
9770       if (CHECKING_P)
9771 	switch (TREE_CODE (decl))
9772 	  {
9773 	  default: gcc_unreachable ();
9774 
9775 	  case TEMPLATE_DECL:
9776 	    gcc_assert ((TREE_CODE (TREE_TYPE (decl)) == TEMPLATE_TEMPLATE_PARM)
9777 			&& (TREE_CODE (DECL_TEMPLATE_RESULT (decl)) == TYPE_DECL)
9778 			&& (TYPE_NAME (TREE_TYPE (decl)) == decl));
9779 	    break;
9780 
9781 	  case TYPE_DECL:
9782 	    gcc_assert ((TREE_CODE (TREE_TYPE (decl)) == TEMPLATE_TYPE_PARM)
9783 			&& (TYPE_NAME (TREE_TYPE (decl)) == decl));
9784 	    break;
9785 
9786 	  case PARM_DECL:
9787 	    gcc_assert ((TREE_CODE (DECL_INITIAL (decl)) == TEMPLATE_PARM_INDEX)
9788 			&& (TREE_CODE (TEMPLATE_PARM_DECL (DECL_INITIAL (decl)))
9789 			    == CONST_DECL)
9790 			&& (DECL_TEMPLATE_PARM_P
9791 			    (TEMPLATE_PARM_DECL (DECL_INITIAL (decl)))));
9792 	    break;
9793 	  }
9794 
9795       tree_node (decl);
9796       tree_node (TEMPLATE_PARM_CONSTRAINTS (parm));
9797     }
9798 
9799   tpl_levels++;
9800 }
9801 
9802 tree
tpl_parms(unsigned & tpl_levels)9803 trees_in::tpl_parms (unsigned &tpl_levels)
9804 {
9805   tree parms = NULL_TREE;
9806 
9807   while (int len = i ())
9808     {
9809       if (len < 0)
9810 	{
9811 	  parms = back_ref (len);
9812 	  continue;
9813 	}
9814 
9815       len -= 1;
9816       parms = tree_cons (NULL_TREE, NULL_TREE, parms);
9817       int tag = insert (parms);
9818       TREE_PURPOSE (parms) = tree_node ();
9819 
9820       dump (dumper::TREE)
9821 	&& dump ("Reading template parms:%d level:%N length:%d",
9822 		 tag, TREE_PURPOSE (parms), len);
9823 
9824       tree vec = make_tree_vec (len);
9825       for (int ix = 0; ix != len; ix++)
9826 	{
9827 	  tree decl = tree_node ();
9828 	  if (!decl)
9829 	    return NULL_TREE;
9830 
9831 	  tree parm = build_tree_list (NULL, decl);
9832 	  TEMPLATE_PARM_CONSTRAINTS (parm) = tree_node ();
9833 
9834 	  TREE_VEC_ELT (vec, ix) = parm;
9835 	}
9836 
9837       TREE_VALUE (parms) = vec;
9838       tpl_levels++;
9839     }
9840 
9841   return parms;
9842 }
9843 
9844 void
tpl_parms_fini(tree tmpl,unsigned tpl_levels)9845 trees_out::tpl_parms_fini (tree tmpl, unsigned tpl_levels)
9846 {
9847   for (tree parms = DECL_TEMPLATE_PARMS (tmpl);
9848        tpl_levels--; parms = TREE_CHAIN (parms))
9849     {
9850       tree vec = TREE_VALUE (parms);
9851 
9852       tree_node (TREE_TYPE (vec));
9853       tree dflt = error_mark_node;
9854       for (unsigned ix = TREE_VEC_LENGTH (vec); ix--;)
9855 	{
9856 	  tree parm = TREE_VEC_ELT (vec, ix);
9857 	  if (dflt)
9858 	    {
9859 	      dflt = TREE_PURPOSE (parm);
9860 	      tree_node (dflt);
9861 	    }
9862 
9863 	  if (streaming_p ())
9864 	    {
9865 	      tree decl = TREE_VALUE (parm);
9866 	      if (TREE_CODE (decl) == TEMPLATE_DECL)
9867 		{
9868 		  tree ctx = DECL_CONTEXT (decl);
9869 		  tree inner = DECL_TEMPLATE_RESULT (decl);
9870 		  tree tpi = (TREE_CODE (inner) == TYPE_DECL
9871 			      ? TEMPLATE_TYPE_PARM_INDEX (TREE_TYPE (decl))
9872 			      : DECL_INITIAL (inner));
9873 		  bool original = (TEMPLATE_PARM_LEVEL (tpi)
9874 				   == TEMPLATE_PARM_ORIG_LEVEL (tpi));
9875 		  /* Original template template parms have a context
9876 		     of their owning template.  Reduced ones do not.  */
9877 		  gcc_checking_assert (original ? ctx == tmpl : !ctx);
9878 		}
9879 	    }
9880 	}
9881     }
9882 }
9883 
9884 bool
tpl_parms_fini(tree tmpl,unsigned tpl_levels)9885 trees_in::tpl_parms_fini (tree tmpl, unsigned tpl_levels)
9886 {
9887   for (tree parms = DECL_TEMPLATE_PARMS (tmpl);
9888        tpl_levels--; parms = TREE_CHAIN (parms))
9889     {
9890       tree vec = TREE_VALUE (parms);
9891       tree dflt = error_mark_node;
9892 
9893       TREE_TYPE (vec) = tree_node ();
9894       for (unsigned ix = TREE_VEC_LENGTH (vec); ix--;)
9895 	{
9896 	  tree parm = TREE_VEC_ELT (vec, ix);
9897 	  if (dflt)
9898 	    {
9899 	      dflt = tree_node ();
9900 	      if (get_overrun ())
9901 		return false;
9902 	      TREE_PURPOSE (parm) = dflt;
9903 	    }
9904 
9905 	  tree decl = TREE_VALUE (parm);
9906 	  if (TREE_CODE (decl) == TEMPLATE_DECL)
9907 	    {
9908 	      tree inner = DECL_TEMPLATE_RESULT (decl);
9909 	      tree tpi = (TREE_CODE (inner) == TYPE_DECL
9910 			  ? TEMPLATE_TYPE_PARM_INDEX (TREE_TYPE (decl))
9911 			  : DECL_INITIAL (inner));
9912 	      bool original = (TEMPLATE_PARM_LEVEL (tpi)
9913 			       == TEMPLATE_PARM_ORIG_LEVEL (tpi));
9914 	      /* Original template template parms have a context
9915 		 of their owning template.  Reduced ones do not.  */
9916 	      if (original)
9917 		DECL_CONTEXT (decl) = tmpl;
9918 	    }
9919 	}
9920     }
9921   return true;
9922 }
9923 
9924 /* PARMS is a LIST, one node per level.
9925    TREE_VALUE is a TREE_VEC of parm info for that level.
9926    each ELT is a TREE_LIST
9927    TREE_VALUE is PARM_DECL, TYPE_DECL or TEMPLATE_DECL
9928    TREE_PURPOSE is the default value.  */
9929 
9930 void
tpl_header(tree tpl,unsigned * tpl_levels)9931 trees_out::tpl_header (tree tpl, unsigned *tpl_levels)
9932 {
9933   tree parms = DECL_TEMPLATE_PARMS (tpl);
9934   tpl_parms (parms, *tpl_levels);
9935 
9936   /* Mark end.  */
9937   if (streaming_p ())
9938     u (0);
9939 
9940   if (*tpl_levels)
9941     tree_node (TEMPLATE_PARMS_CONSTRAINTS (parms));
9942 }
9943 
9944 bool
tpl_header(tree tpl,unsigned * tpl_levels)9945 trees_in::tpl_header (tree tpl, unsigned *tpl_levels)
9946 {
9947   tree parms = tpl_parms (*tpl_levels);
9948   if (!parms)
9949     return false;
9950 
9951   DECL_TEMPLATE_PARMS (tpl) = parms;
9952 
9953   if (*tpl_levels)
9954     TEMPLATE_PARMS_CONSTRAINTS (parms) = tree_node ();
9955 
9956   return true;
9957 }
9958 
9959 /* Stream skeleton parm nodes, with their flags, type & parm indices.
9960    All the parms will have consecutive tags.  */
9961 
9962 void
fn_parms_init(tree fn)9963 trees_out::fn_parms_init (tree fn)
9964 {
9965   /* First init them.  */
9966   int base_tag = ref_num - 1;
9967   int ix = 0;
9968   for (tree parm = DECL_ARGUMENTS (fn);
9969        parm; parm = DECL_CHAIN (parm), ix++)
9970     {
9971       if (streaming_p ())
9972 	{
9973 	  start (parm);
9974 	  tree_node_bools (parm);
9975 	}
9976       int tag = insert (parm);
9977       gcc_checking_assert (base_tag - ix == tag);
9978     }
9979   /* Mark the end.  */
9980   if (streaming_p ())
9981     u (0);
9982 
9983   /* Now stream their contents.  */
9984   ix = 0;
9985   for (tree parm = DECL_ARGUMENTS (fn);
9986        parm; parm = DECL_CHAIN (parm), ix++)
9987     {
9988       if (streaming_p ())
9989 	dump (dumper::TREE)
9990 	  && dump ("Writing parm:%d %u (%N) of %N",
9991 		   base_tag - ix, ix, parm, fn);
9992       tree_node_vals (parm);
9993     }
9994 }
9995 
9996 /* Build skeleton parm nodes, read their flags, type & parm indices.  */
9997 
9998 int
fn_parms_init(tree fn)9999 trees_in::fn_parms_init (tree fn)
10000 {
10001   int base_tag = ~(int)back_refs.length ();
10002 
10003   tree *parm_ptr = &DECL_ARGUMENTS (fn);
10004   int ix = 0;
10005   for (; int code = u (); ix++)
10006     {
10007       tree parm = start (code);
10008       if (!tree_node_bools (parm))
10009 	return 0;
10010 
10011       int tag = insert (parm);
10012       gcc_checking_assert (base_tag - ix == tag);
10013       *parm_ptr = parm;
10014       parm_ptr = &DECL_CHAIN (parm);
10015     }
10016 
10017   ix = 0;
10018   for (tree parm = DECL_ARGUMENTS (fn);
10019        parm; parm = DECL_CHAIN (parm), ix++)
10020     {
10021       dump (dumper::TREE)
10022 	&& dump ("Reading parm:%d %u (%N) of %N",
10023 		 base_tag - ix, ix, parm, fn);
10024       if (!tree_node_vals (parm))
10025 	return 0;
10026     }
10027 
10028   return base_tag;
10029 }
10030 
10031 /* Read the remaining parm node data.  Replace with existing (if
10032    non-null) in the map.  */
10033 
10034 void
fn_parms_fini(int tag,tree fn,tree existing,bool is_defn)10035 trees_in::fn_parms_fini (int tag, tree fn, tree existing, bool is_defn)
10036 {
10037   tree existing_parm = existing ? DECL_ARGUMENTS (existing) : NULL_TREE;
10038   tree parms = DECL_ARGUMENTS (fn);
10039   unsigned ix = 0;
10040   for (tree parm = parms; parm; parm = DECL_CHAIN (parm), ix++)
10041     {
10042       if (existing_parm)
10043 	{
10044 	  if (is_defn && !DECL_SAVED_TREE (existing))
10045 	    {
10046 	      /* If we're about to become the definition, set the
10047 		 names of the parms from us.  */
10048 	      DECL_NAME (existing_parm) = DECL_NAME (parm);
10049 	      DECL_SOURCE_LOCATION (existing_parm) = DECL_SOURCE_LOCATION (parm);
10050 	    }
10051 
10052 	  back_refs[~tag] = existing_parm;
10053 	  existing_parm = DECL_CHAIN (existing_parm);
10054 	}
10055       tag--;
10056     }
10057 }
10058 
10059 /* DEP is the depset of some decl we're streaming by value.  Determine
10060    the merging behaviour.  */
10061 
10062 merge_kind
get_merge_kind(tree decl,depset * dep)10063 trees_out::get_merge_kind (tree decl, depset *dep)
10064 {
10065   if (!dep)
10066     {
10067       if (VAR_OR_FUNCTION_DECL_P (decl))
10068 	{
10069 	  /* Any var or function with template info should have DEP.  */
10070 	  gcc_checking_assert (!DECL_LANG_SPECIFIC (decl)
10071 			       || !DECL_TEMPLATE_INFO (decl));
10072 	  if (DECL_LOCAL_DECL_P (decl))
10073 	    return MK_unique;
10074 	}
10075 
10076       /* Either unique, or some member of a class that cannot have an
10077 	 out-of-class definition.  For instance a FIELD_DECL.  */
10078       tree ctx = CP_DECL_CONTEXT (decl);
10079       if (TREE_CODE (ctx) == FUNCTION_DECL)
10080 	{
10081 	  /* USING_DECLs and NAMESPACE_DECLs cannot have DECL_TEMPLATE_INFO --
10082 	     this isn't permitting them to have one.   */
10083 	  gcc_checking_assert (TREE_CODE (decl) == USING_DECL
10084 			       || TREE_CODE (decl) == NAMESPACE_DECL
10085 			       || !DECL_LANG_SPECIFIC (decl)
10086 			       || !DECL_TEMPLATE_INFO (decl));
10087 
10088 	  return MK_unique;
10089 	}
10090 
10091       if (TREE_CODE (decl) == TEMPLATE_DECL
10092 	  && DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (decl))
10093 	return MK_local_friend;
10094 
10095       gcc_checking_assert (TYPE_P (ctx));
10096       if (TREE_CODE (decl) == USING_DECL)
10097 	return MK_field;
10098 
10099       if (TREE_CODE (decl) == FIELD_DECL)
10100 	{
10101 	  if (DECL_NAME (decl))
10102 	    {
10103 	      /* Anonymous FIELD_DECLs have a NULL name.  */
10104 	      gcc_checking_assert (!IDENTIFIER_ANON_P (DECL_NAME (decl)));
10105 	      return MK_named;
10106 	    }
10107 
10108 	  if (!DECL_NAME (decl)
10109 	      && !RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl))
10110 	      && !DECL_BIT_FIELD_REPRESENTATIVE (decl))
10111 	    {
10112 	      /* The underlying storage unit for a bitfield.  We do not
10113 		 need to dedup it, because it's only reachable through
10114 		 the bitfields it represents.  And those are deduped.  */
10115 	      // FIXME: Is that assertion correct -- do we ever fish it
10116 	      // out and put it in an expr?
10117 	      gcc_checking_assert ((TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
10118 				    ? TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
10119 				    : TREE_CODE (TREE_TYPE (decl)))
10120 				   == INTEGER_TYPE);
10121 	      return MK_unique;
10122 	    }
10123 
10124 	  return MK_field;
10125 	}
10126 
10127       if (TREE_CODE (decl) == CONST_DECL)
10128 	return MK_named;
10129 
10130       if (TREE_CODE (decl) == VAR_DECL
10131 	  && DECL_VTABLE_OR_VTT_P (decl))
10132 	return MK_vtable;
10133 
10134       if (DECL_THUNK_P (decl))
10135 	/* Thunks are unique-enough, because they're only referenced
10136 	   from the vtable.  And that's either new (so we want the
10137 	   thunks), or it's a duplicate (so it will be dropped).  */
10138 	return MK_unique;
10139 
10140       /* There should be no other cases.  */
10141       gcc_unreachable ();
10142     }
10143 
10144   gcc_checking_assert (TREE_CODE (decl) != FIELD_DECL
10145 		       && TREE_CODE (decl) != USING_DECL
10146 		       && TREE_CODE (decl) != CONST_DECL);
10147 
10148   if (is_key_order ())
10149     {
10150       /* When doing the mergeablilty graph, there's an indirection to
10151 	 the actual depset.  */
10152       gcc_assert (dep->is_special ());
10153       dep = dep->deps[0];
10154     }
10155 
10156   gcc_checking_assert (decl == dep->get_entity ());
10157 
10158   merge_kind mk = MK_named;
10159   switch (dep->get_entity_kind ())
10160     {
10161     default:
10162       gcc_unreachable ();
10163 
10164     case depset::EK_PARTIAL:
10165       mk = MK_partial;
10166       break;
10167 
10168     case depset::EK_DECL:
10169       {
10170 	tree ctx = CP_DECL_CONTEXT (decl);
10171 
10172 	switch (TREE_CODE (ctx))
10173 	  {
10174 	  default:
10175 	    gcc_unreachable ();
10176 
10177 	  case FUNCTION_DECL:
10178 	    // FIXME: This can occur for (a) voldemorty TYPE_DECLS
10179 	    // (which are returned from a function), or (b)
10180 	    // block-scope class definitions in template functions.
10181 	    // These are as unique as the containing function.  While
10182 	    // on read-back we can discover if the CTX was a
10183 	    // duplicate, we don't have a mechanism to get from the
10184 	    // existing CTX to the existing version of this decl.
10185 	    gcc_checking_assert
10186 	      (DECL_IMPLICIT_TYPEDEF_P (STRIP_TEMPLATE (decl)));
10187 
10188 	    mk = MK_unique;
10189 	    break;
10190 
10191 	  case RECORD_TYPE:
10192 	  case UNION_TYPE:
10193 	    if (DECL_NAME (decl) == as_base_identifier)
10194 	      mk = MK_as_base;
10195 	    else if (IDENTIFIER_ANON_P (DECL_NAME (decl)))
10196 	      mk = MK_field;
10197 	    break;
10198 
10199 	  case NAMESPACE_DECL:
10200 	    if (DECL_IMPLICIT_TYPEDEF_P (STRIP_TEMPLATE (decl))
10201 		&& LAMBDA_TYPE_P (TREE_TYPE (decl)))
10202 	      if (tree scope
10203 		  = LAMBDA_EXPR_EXTRA_SCOPE (CLASSTYPE_LAMBDA_EXPR
10204 					     (TREE_TYPE (decl))))
10205 		if (TREE_CODE (scope) == VAR_DECL
10206 		    && DECL_MODULE_ATTACHMENTS_P (scope))
10207 		  {
10208 		    mk = MK_attached;
10209 		    break;
10210 		  }
10211 
10212 	    if (TREE_CODE (decl) == TEMPLATE_DECL
10213 		&& DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (decl))
10214 	      mk = MK_local_friend;
10215 	    else if (IDENTIFIER_ANON_P (DECL_NAME (decl)))
10216 	      {
10217 		if (DECL_IMPLICIT_TYPEDEF_P (decl)
10218 		    && UNSCOPED_ENUM_P (TREE_TYPE (decl))
10219 		    && TYPE_VALUES (TREE_TYPE (decl)))
10220 		  /* Keyed by first enum value, and underlying type.  */
10221 		  mk = MK_enum;
10222 		else
10223 		  /* No way to merge it, it is an ODR land-mine.  */
10224 		  mk = MK_unique;
10225 	      }
10226 	  }
10227       }
10228       break;
10229 
10230     case depset::EK_SPECIALIZATION:
10231       {
10232 	gcc_checking_assert (dep->is_special ());
10233 
10234 	if (TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
10235 	  /* An block-scope classes of templates are themselves
10236 	     templates.  */
10237 	  gcc_checking_assert (DECL_IMPLICIT_TYPEDEF_P (decl));
10238 
10239 	if (dep->is_friend_spec ())
10240 	  mk = MK_friend_spec;
10241 	else if (dep->is_type_spec ())
10242 	  mk = MK_type_spec;
10243 	else if (dep->is_alias ())
10244 	  mk = MK_alias_spec;
10245 	else
10246 	  mk = MK_decl_spec;
10247 
10248 	if (TREE_CODE (decl) == TEMPLATE_DECL)
10249 	  {
10250 	    spec_entry *entry = reinterpret_cast <spec_entry *> (dep->deps[0]);
10251 	    if (TREE_CODE (entry->spec) != TEMPLATE_DECL)
10252 	      mk = merge_kind (mk | MK_tmpl_tmpl_mask);
10253 	  }
10254       }
10255       break;
10256     }
10257 
10258   return mk;
10259 }
10260 
10261 
10262 /* The container of DECL -- not necessarily its context!  */
10263 
10264 tree
decl_container(tree decl)10265 trees_out::decl_container (tree decl)
10266 {
10267   int use_tpl;
10268   tree tpl = NULL_TREE;
10269   if (tree template_info = node_template_info (decl, use_tpl))
10270     tpl = TI_TEMPLATE (template_info);
10271   if (tpl == decl)
10272     tpl = nullptr;
10273 
10274   /* Stream the template we're instantiated from.  */
10275   tree_node (tpl);
10276 
10277   tree container = NULL_TREE;
10278   if (TREE_CODE (decl) == TEMPLATE_DECL
10279       && DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (decl))
10280     container = DECL_CHAIN (decl);
10281   else
10282     container = CP_DECL_CONTEXT (decl);
10283 
10284   if (TYPE_P (container))
10285     container = TYPE_NAME (container);
10286 
10287   tree_node (container);
10288 
10289   return container;
10290 }
10291 
10292 tree
decl_container()10293 trees_in::decl_container ()
10294 {
10295   /* The maybe-template.  */
10296   (void)tree_node ();
10297 
10298   tree container = tree_node ();
10299 
10300   return container;
10301 }
10302 
10303 /* Write out key information about a mergeable DEP.  Does not write
10304    the contents of DEP itself.  The context has already been
10305    written.  The container has already been streamed.  */
10306 
10307 void
key_mergeable(int tag,merge_kind mk,tree decl,tree inner,tree container,depset * dep)10308 trees_out::key_mergeable (int tag, merge_kind mk, tree decl, tree inner,
10309 			  tree container, depset *dep)
10310 {
10311   if (dep && is_key_order ())
10312     {
10313       gcc_checking_assert (dep->is_special ());
10314       dep = dep->deps[0];
10315     }
10316 
10317   if (streaming_p ())
10318     dump (dumper::MERGE)
10319       && dump ("Writing:%d's %s merge key (%s) %C:%N", tag, merge_kind_name[mk],
10320 	       dep ? dep->entity_kind_name () : "contained",
10321 	       TREE_CODE (decl), decl);
10322 
10323   /* Now write the locating information. */
10324   if (mk & MK_template_mask)
10325     {
10326       /* Specializations are located via their originating template,
10327 	 and the set of template args they specialize.  */
10328       gcc_checking_assert (dep && dep->is_special ());
10329       spec_entry *entry = reinterpret_cast <spec_entry *> (dep->deps[0]);
10330 
10331       tree_node (entry->tmpl);
10332       tree_node (entry->args);
10333       if (mk & MK_tmpl_decl_mask)
10334 	if (flag_concepts && TREE_CODE (inner) == VAR_DECL)
10335 	  {
10336 	    /* Variable template partial specializations might need
10337 	       constraints (see spec_hasher::equal).  It's simpler to
10338 	       write NULL when we don't need them.  */
10339 	    tree constraints = NULL_TREE;
10340 
10341 	    if (uses_template_parms (entry->args))
10342 	      constraints = get_constraints (inner);
10343 	    tree_node (constraints);
10344 	  }
10345 
10346       if (CHECKING_P)
10347 	{
10348 	  /* Make sure we can locate the decl.  */
10349 	  tree existing = match_mergeable_specialization
10350 	    (bool (mk & MK_tmpl_decl_mask), entry);
10351 
10352 	  gcc_assert (existing);
10353 	  if (mk & MK_tmpl_decl_mask)
10354 	    {
10355 	      if (mk & MK_tmpl_alias_mask)
10356 		/* It should be in both tables.  */
10357 		gcc_checking_assert
10358 		  (same_type_p (match_mergeable_specialization (false, entry),
10359 				TREE_TYPE (existing)));
10360 	      if (mk & MK_tmpl_tmpl_mask)
10361 		existing = DECL_TI_TEMPLATE (existing);
10362 	    }
10363 	  else
10364 	    {
10365 	      if (mk & MK_tmpl_tmpl_mask)
10366 		existing = CLASSTYPE_TI_TEMPLATE (existing);
10367 	      else
10368 		existing = TYPE_NAME (existing);
10369 	    }
10370 
10371 	  /* The walkabout should have found ourselves.  */
10372 	  gcc_checking_assert (TREE_CODE (decl) == TYPE_DECL
10373 			       ? same_type_p (TREE_TYPE (decl),
10374 					      TREE_TYPE (existing))
10375 			       : existing == decl);
10376 	}
10377     }
10378   else if (mk != MK_unique)
10379     {
10380       merge_key key;
10381       tree name = DECL_NAME (decl);
10382 
10383       switch (mk)
10384 	{
10385 	default:
10386 	  gcc_unreachable ();
10387 
10388 	case MK_named:
10389 	case MK_friend_spec:
10390 	  if (IDENTIFIER_CONV_OP_P (name))
10391 	    name = conv_op_identifier;
10392 
10393 	  if (TREE_CODE (inner) == FUNCTION_DECL)
10394 	    {
10395 	      /* Functions are distinguished by parameter types.  */
10396 	      tree fn_type = TREE_TYPE (inner);
10397 
10398 	      key.ref_q = type_memfn_rqual (fn_type);
10399 	      key.args = TYPE_ARG_TYPES (fn_type);
10400 
10401 	      if (tree reqs = get_constraints (inner))
10402 		{
10403 		  if (cxx_dialect < cxx20)
10404 		    reqs = CI_ASSOCIATED_CONSTRAINTS (reqs);
10405 		  else
10406 		    reqs = CI_DECLARATOR_REQS (reqs);
10407 		  key.constraints = reqs;
10408 		}
10409 
10410 	      if (IDENTIFIER_CONV_OP_P (name)
10411 		  || (decl != inner
10412 		      && !(name == fun_identifier
10413 			   /* In case the user names something _FUN  */
10414 			   && LAMBDA_TYPE_P (DECL_CONTEXT (inner)))))
10415 		/* And a function template, or conversion operator needs
10416 		   the return type.  Except for the _FUN thunk of a
10417 		   generic lambda, which has a recursive decl_type'd
10418 		   return type.  */
10419 		// FIXME: What if the return type is a voldemort?
10420 		key.ret = fndecl_declared_return_type (inner);
10421 	    }
10422 	  break;
10423 
10424 	case MK_field:
10425 	  {
10426 	    unsigned ix = 0;
10427 	    if (TREE_CODE (inner) != FIELD_DECL)
10428 	      name = NULL_TREE;
10429 	    else
10430 	      gcc_checking_assert (!name || !IDENTIFIER_ANON_P (name));
10431 
10432 	    for (tree field = TYPE_FIELDS (TREE_TYPE (container));
10433 		 ; field = DECL_CHAIN (field))
10434 	      {
10435 		tree finner = STRIP_TEMPLATE (field);
10436 		if (TREE_CODE (finner) == TREE_CODE (inner))
10437 		  {
10438 		    if (finner == inner)
10439 		      break;
10440 		    ix++;
10441 		  }
10442 	      }
10443 	    key.index = ix;
10444 	  }
10445 	  break;
10446 
10447 	case MK_vtable:
10448 	  {
10449 	    tree vtable = CLASSTYPE_VTABLES (TREE_TYPE (container));
10450 	    for (unsigned ix = 0; ; vtable = DECL_CHAIN (vtable), ix++)
10451 	      if (vtable == decl)
10452 		{
10453 		  key.index = ix;
10454 		  break;
10455 		}
10456 	    name = NULL_TREE;
10457 	  }
10458 	  break;
10459 
10460 	case MK_as_base:
10461 	  gcc_checking_assert
10462 	    (decl == TYPE_NAME (CLASSTYPE_AS_BASE (TREE_TYPE (container))));
10463 	  break;
10464 
10465 	case MK_local_friend:
10466 	  {
10467 	    /* Find by index on the class's DECL_LIST  */
10468 	    unsigned ix = 0;
10469 	    for (tree decls = CLASSTYPE_DECL_LIST (TREE_CHAIN (decl));
10470 		 decls; decls = TREE_CHAIN (decls))
10471 	      if (!TREE_PURPOSE (decls))
10472 		{
10473 		  tree frnd = friend_from_decl_list (TREE_VALUE (decls));
10474 		  if (frnd == decl)
10475 		    break;
10476 		  ix++;
10477 		}
10478 	    key.index = ix;
10479 	    name = NULL_TREE;
10480 	  }
10481 	  break;
10482 
10483 	case MK_enum:
10484 	  {
10485 	    /* Anonymous enums are located by their first identifier,
10486 	       and underlying type.  */
10487 	    tree type = TREE_TYPE (decl);
10488 
10489 	    gcc_checking_assert (UNSCOPED_ENUM_P (type));
10490 	    /* Using the type name drops the bit precision we might
10491 	       have been using on the enum.  */
10492 	    key.ret = TYPE_NAME (ENUM_UNDERLYING_TYPE (type));
10493 	    if (tree values = TYPE_VALUES (type))
10494 	      name = DECL_NAME (TREE_VALUE (values));
10495 	  }
10496 	  break;
10497 
10498 	case MK_attached:
10499 	  {
10500 	    gcc_checking_assert (LAMBDA_TYPE_P (TREE_TYPE (inner)));
10501 	    tree scope = LAMBDA_EXPR_EXTRA_SCOPE (CLASSTYPE_LAMBDA_EXPR
10502 						  (TREE_TYPE (inner)));
10503 	    gcc_checking_assert (TREE_CODE (scope) == VAR_DECL);
10504 	    auto *root = attached_table->get (scope);
10505 	    unsigned ix = root->length ();
10506 	    /* If we don't find it, we'll write a really big number
10507 	       that the reader will ignore.  */
10508 	    while (ix--)
10509 	      if ((*root)[ix] == inner)
10510 		break;
10511 
10512 	    /* Use the attached-to decl as the 'name'.  */
10513 	    name = scope;
10514 	    key.index = ix;
10515 	  }
10516 	  break;
10517 
10518 	case MK_partial:
10519 	  {
10520 	    key.constraints = get_constraints (inner);
10521 	    key.ret = CLASSTYPE_TI_TEMPLATE (TREE_TYPE (inner));
10522 	    key.args = CLASSTYPE_TI_ARGS (TREE_TYPE (inner));
10523 	  }
10524 	  break;
10525 	}
10526 
10527       tree_node (name);
10528       if (streaming_p ())
10529 	{
10530 	  unsigned code = (key.ref_q << 0) | (key.index << 2);
10531 	  u (code);
10532 	}
10533 
10534       if (mk == MK_enum)
10535 	tree_node (key.ret);
10536       else if (mk == MK_partial
10537 	       || (mk == MK_named && inner
10538 		   && TREE_CODE (inner) == FUNCTION_DECL))
10539 	{
10540 	  tree_node (key.ret);
10541 	  tree arg = key.args;
10542 	  if (mk == MK_named)
10543 	    while (arg && arg != void_list_node)
10544 	      {
10545 		tree_node (TREE_VALUE (arg));
10546 		arg = TREE_CHAIN (arg);
10547 	      }
10548 	  tree_node (arg);
10549 	  tree_node (key.constraints);
10550 	}
10551     }
10552 }
10553 
10554 /* DECL is a new declaration that may be duplicated in OVL.  Use RET &
10555    ARGS to find its clone, or NULL.  If DECL's DECL_NAME is NULL, this
10556    has been found by a proxy.  It will be an enum type located by it's
10557    first member.
10558 
10559    We're conservative with matches, so ambiguous decls will be
10560    registered as different, then lead to a lookup error if the two
10561    modules are both visible.  Perhaps we want to do something similar
10562    to duplicate decls to get ODR errors on loading?  We already have
10563    some special casing for namespaces.  */
10564 
10565 static tree
check_mergeable_decl(merge_kind mk,tree decl,tree ovl,merge_key const & key)10566 check_mergeable_decl (merge_kind mk, tree decl, tree ovl, merge_key const &key)
10567 {
10568   tree found = NULL_TREE;
10569   for (ovl_iterator iter (ovl); !found && iter; ++iter)
10570     {
10571       tree match = *iter;
10572 
10573       tree d_inner = decl;
10574       tree m_inner = match;
10575 
10576     again:
10577       if (TREE_CODE (d_inner) != TREE_CODE (m_inner))
10578 	{
10579 	  if (TREE_CODE (match) == NAMESPACE_DECL
10580 	      && !DECL_NAMESPACE_ALIAS (match))
10581 	    /* Namespaces are never overloaded.  */
10582 	    found = match;
10583 
10584 	  continue;
10585 	}
10586 
10587       switch (TREE_CODE (d_inner))
10588 	{
10589 	case TEMPLATE_DECL:
10590 	  if (template_heads_equivalent_p (d_inner, m_inner))
10591 	    {
10592 	      d_inner = DECL_TEMPLATE_RESULT (d_inner);
10593 	      m_inner = DECL_TEMPLATE_RESULT (m_inner);
10594 	      if (d_inner == error_mark_node
10595 		  && TYPE_DECL_ALIAS_P (m_inner))
10596 		{
10597 		  found = match;
10598 		  break;
10599 		}
10600 	      goto again;
10601 	    }
10602 	  break;
10603 
10604 	case FUNCTION_DECL:
10605 	  if (tree m_type = TREE_TYPE (m_inner))
10606 	    if ((!key.ret
10607 		 || same_type_p (key.ret, fndecl_declared_return_type (m_inner)))
10608 		&& type_memfn_rqual (m_type) == key.ref_q
10609 		&& compparms (key.args, TYPE_ARG_TYPES (m_type))
10610 		/* Reject if old is a "C" builtin and new is not "C".
10611 		   Matches decls_match behaviour.  */
10612 		&& (!DECL_IS_UNDECLARED_BUILTIN (m_inner)
10613 		    || !DECL_EXTERN_C_P (m_inner)
10614 		    || DECL_EXTERN_C_P (d_inner)))
10615 	      {
10616 		tree m_reqs = get_constraints (m_inner);
10617 		if (m_reqs)
10618 		  {
10619 		    if (cxx_dialect < cxx20)
10620 		      m_reqs = CI_ASSOCIATED_CONSTRAINTS (m_reqs);
10621 		    else
10622 		      m_reqs = CI_DECLARATOR_REQS (m_reqs);
10623 		  }
10624 
10625 		if (cp_tree_equal (key.constraints, m_reqs))
10626 		  found = match;
10627 	      }
10628 	  break;
10629 
10630 	case TYPE_DECL:
10631 	  if (DECL_IMPLICIT_TYPEDEF_P (d_inner)
10632 	      == DECL_IMPLICIT_TYPEDEF_P (m_inner))
10633 	    {
10634 	      if (!IDENTIFIER_ANON_P (DECL_NAME (m_inner)))
10635 		return match;
10636 	      else if (mk == MK_enum
10637 		       && (TYPE_NAME (ENUM_UNDERLYING_TYPE (TREE_TYPE (m_inner)))
10638 			   == key.ret))
10639 		found = match;
10640 	    }
10641 	  break;
10642 
10643 	default:
10644 	  found = match;
10645 	  break;
10646 	}
10647     }
10648 
10649   return found;
10650 }
10651 
10652 /* DECL, INNER & TYPE are a skeleton set of nodes for a decl.  Only
10653    the bools have been filled in.  Read its merging key and merge it.
10654    Returns the existing decl if there is one.  */
10655 
10656 tree
key_mergeable(int tag,merge_kind mk,tree decl,tree inner,tree type,tree container,bool is_mod)10657 trees_in::key_mergeable (int tag, merge_kind mk, tree decl, tree inner,
10658 			 tree type, tree container, bool is_mod)
10659 {
10660   const char *kind = "new";
10661   tree existing = NULL_TREE;
10662 
10663   if (mk & MK_template_mask)
10664     {
10665       // FIXME: We could stream the specialization hash?
10666       spec_entry spec;
10667       spec.tmpl = tree_node ();
10668       spec.args = tree_node ();
10669 
10670       if (get_overrun ())
10671 	return error_mark_node;
10672 
10673       DECL_NAME (decl) = DECL_NAME (spec.tmpl);
10674       DECL_CONTEXT (decl) = DECL_CONTEXT (spec.tmpl);
10675       DECL_NAME (inner) = DECL_NAME (decl);
10676       DECL_CONTEXT (inner) = DECL_CONTEXT (decl);
10677 
10678       tree constr = NULL_TREE;
10679       bool is_decl = mk & MK_tmpl_decl_mask;
10680       if (is_decl)
10681 	{
10682 	  if (flag_concepts && TREE_CODE (inner) == VAR_DECL)
10683 	    {
10684 	      constr = tree_node ();
10685 	      if (constr)
10686 		set_constraints (inner, constr);
10687 	    }
10688 	  spec.spec = (mk & MK_tmpl_tmpl_mask) ? inner : decl;
10689 	}
10690       else
10691 	spec.spec = type;
10692       existing = match_mergeable_specialization (is_decl, &spec);
10693       if (constr)
10694 	/* We'll add these back later, if this is the new decl.  */
10695 	remove_constraints (inner);
10696 
10697       if (!existing)
10698 	; /* We'll add to the table once read.  */
10699       else if (mk & MK_tmpl_decl_mask)
10700 	{
10701 	  /* A declaration specialization.  */
10702 	  if (mk & MK_tmpl_tmpl_mask)
10703 	    existing = DECL_TI_TEMPLATE (existing);
10704 	}
10705       else
10706 	{
10707 	  /* A type specialization.  */
10708 	  if (mk & MK_tmpl_tmpl_mask)
10709 	    existing = CLASSTYPE_TI_TEMPLATE (existing);
10710 	  else
10711 	    existing = TYPE_NAME (existing);
10712 	}
10713     }
10714   else if (mk == MK_unique)
10715     kind = "unique";
10716   else
10717     {
10718       tree name = tree_node ();
10719 
10720       merge_key key;
10721       unsigned code = u ();
10722       key.ref_q = cp_ref_qualifier ((code >> 0) & 3);
10723       key.index = code >> 2;
10724 
10725       if (mk == MK_enum)
10726 	key.ret = tree_node ();
10727       else if (mk == MK_partial
10728 	       || ((mk == MK_named || mk == MK_friend_spec)
10729 		   && TREE_CODE (inner) == FUNCTION_DECL))
10730 	{
10731 	  key.ret = tree_node ();
10732 	  tree arg, *arg_ptr = &key.args;
10733 	  while ((arg = tree_node ())
10734 		 && arg != void_list_node
10735 		 && mk != MK_partial)
10736 	    {
10737 	      *arg_ptr = tree_cons (NULL_TREE, arg, NULL_TREE);
10738 	      arg_ptr = &TREE_CHAIN (*arg_ptr);
10739 	    }
10740 	  *arg_ptr = arg;
10741 	  key.constraints = tree_node ();
10742 	}
10743 
10744       if (get_overrun ())
10745 	return error_mark_node;
10746 
10747       if (mk < MK_indirect_lwm)
10748 	{
10749 	  DECL_NAME (decl) = name;
10750 	  DECL_CONTEXT (decl) = FROB_CONTEXT (container);
10751 	}
10752       DECL_NAME (inner) = DECL_NAME (decl);
10753       DECL_CONTEXT (inner) = DECL_CONTEXT (decl);
10754 
10755       if (mk == MK_partial)
10756 	{
10757 	  for (tree spec = DECL_TEMPLATE_SPECIALIZATIONS (key.ret);
10758 	       spec; spec = TREE_CHAIN (spec))
10759 	    {
10760 	      tree tmpl = TREE_VALUE (spec);
10761 	      if (template_args_equal (key.args,
10762 				       CLASSTYPE_TI_ARGS (TREE_TYPE (tmpl)))
10763 		  && cp_tree_equal (key.constraints,
10764 				    get_constraints
10765 				    (DECL_TEMPLATE_RESULT (tmpl))))
10766 		{
10767 		  existing = tmpl;
10768 		  break;
10769 		}
10770 	    }
10771 	}
10772       else
10773 	switch (TREE_CODE (container))
10774 	  {
10775 	  default:
10776 	    gcc_unreachable ();
10777 
10778 	  case NAMESPACE_DECL:
10779 	    if (mk == MK_attached)
10780 	      {
10781 		if (DECL_LANG_SPECIFIC (name)
10782 		    && VAR_OR_FUNCTION_DECL_P (name)
10783 		    && DECL_MODULE_ATTACHMENTS_P (name))
10784 		  if (auto *set = attached_table->get (name))
10785 		    if (key.index < set->length ())
10786 		      {
10787 			existing = (*set)[key.index];
10788 			if (existing)
10789 			  {
10790 			    gcc_checking_assert
10791 			      (DECL_IMPLICIT_TYPEDEF_P (existing));
10792 			    if (inner != decl)
10793 			      existing
10794 				= CLASSTYPE_TI_TEMPLATE (TREE_TYPE (existing));
10795 			  }
10796 		      }
10797 	      }
10798 	    else if (is_mod && !(state->is_module () || state->is_partition ()))
10799 	      kind = "unique";
10800 	    else
10801 	      {
10802 		gcc_checking_assert (mk == MK_named || mk == MK_enum);
10803 		tree mvec;
10804 		tree *vslot = mergeable_namespace_slots (container, name,
10805 							 !is_mod, &mvec);
10806 		existing = check_mergeable_decl (mk, decl, *vslot, key);
10807 		if (!existing)
10808 		  add_mergeable_namespace_entity (vslot, decl);
10809 		else
10810 		  {
10811 		    /* Note that we now have duplicates to deal with in
10812 		       name lookup.  */
10813 		    if (is_mod)
10814 		      BINDING_VECTOR_PARTITION_DUPS_P (mvec) = true;
10815 		    else
10816 		      BINDING_VECTOR_GLOBAL_DUPS_P (mvec) = true;
10817 		  }
10818 	      }
10819 	    break;
10820 
10821 	  case FUNCTION_DECL:
10822 	    // FIXME: What about a voldemort? how do we find what it
10823 	    // duplicates? Do we have to number vmorts relative to
10824 	    // their containing function?  But how would that work
10825 	    // when matching an in-TU declaration?
10826 	    kind = "unique";
10827 	    break;
10828 
10829 	  case TYPE_DECL:
10830 	    if (is_mod && !(state->is_module () || state->is_partition ())
10831 		/* Implicit member functions can come from
10832 		   anywhere.  */
10833 		&& !(DECL_ARTIFICIAL (decl)
10834 		     && TREE_CODE (decl) == FUNCTION_DECL
10835 		     && !DECL_THUNK_P (decl)))
10836 	      kind = "unique";
10837 	    else
10838 	      {
10839 		tree ctx = TREE_TYPE (container);
10840 
10841 		/* For some reason templated enumeral types are not marked
10842 		   as COMPLETE_TYPE_P, even though they have members.
10843 		   This may well be a bug elsewhere.  */
10844 		if (TREE_CODE (ctx) == ENUMERAL_TYPE)
10845 		  existing = find_enum_member (ctx, name);
10846 		else if (COMPLETE_TYPE_P (ctx))
10847 		  {
10848 		    switch (mk)
10849 		      {
10850 		      default:
10851 			gcc_unreachable ();
10852 
10853 		      case MK_named:
10854 			existing = lookup_class_binding (ctx, name);
10855 			if (existing)
10856 			  {
10857 			    tree inner = decl;
10858 			    if (TREE_CODE (inner) == TEMPLATE_DECL
10859 				&& !DECL_MEMBER_TEMPLATE_P (inner))
10860 			      inner = DECL_TEMPLATE_RESULT (inner);
10861 
10862 			    existing = check_mergeable_decl
10863 			      (mk, inner, existing, key);
10864 
10865 			    if (!existing && DECL_ALIAS_TEMPLATE_P (decl))
10866 			      {} // FIXME: Insert into specialization
10867 			    // tables, we'll need the arguments for that!
10868 			  }
10869 			break;
10870 
10871 		      case MK_field:
10872 			{
10873 			  unsigned ix = key.index;
10874 			  for (tree field = TYPE_FIELDS (ctx);
10875 			       field; field = DECL_CHAIN (field))
10876 			    {
10877 			      tree finner = STRIP_TEMPLATE (field);
10878 			      if (TREE_CODE (finner) == TREE_CODE (inner))
10879 				if (!ix--)
10880 				  {
10881 				    existing = field;
10882 				    break;
10883 				  }
10884 			    }
10885 			}
10886 			break;
10887 
10888 		      case MK_vtable:
10889 			{
10890 			  unsigned ix = key.index;
10891 			  for (tree vtable = CLASSTYPE_VTABLES (ctx);
10892 			       vtable; vtable = DECL_CHAIN (vtable))
10893 			    if (!ix--)
10894 			      {
10895 				existing = vtable;
10896 				break;
10897 			      }
10898 			}
10899 			break;
10900 
10901 		      case MK_as_base:
10902 			{
10903 			  tree as_base = CLASSTYPE_AS_BASE (ctx);
10904 			  if (as_base && as_base != ctx)
10905 			    existing = TYPE_NAME (as_base);
10906 			}
10907 			break;
10908 
10909 		      case MK_local_friend:
10910 			{
10911 			  unsigned ix = key.index;
10912 			  for (tree decls = CLASSTYPE_DECL_LIST (ctx);
10913 			       decls; decls = TREE_CHAIN (decls))
10914 			    if (!TREE_PURPOSE (decls) && !ix--)
10915 			      {
10916 				existing
10917 				  = friend_from_decl_list (TREE_VALUE (decls));
10918 				break;
10919 			      }
10920 			}
10921 			break;
10922 		      }
10923 
10924 		    if (existing && mk < MK_indirect_lwm && mk != MK_partial
10925 			&& TREE_CODE (decl) == TEMPLATE_DECL
10926 			&& !DECL_MEMBER_TEMPLATE_P (decl))
10927 		      {
10928 			tree ti;
10929 			if (DECL_IMPLICIT_TYPEDEF_P (existing))
10930 			  ti = TYPE_TEMPLATE_INFO (TREE_TYPE (existing));
10931 			else
10932 			  ti = DECL_TEMPLATE_INFO (existing);
10933 			existing = TI_TEMPLATE (ti);
10934 		      }
10935 		  }
10936 	      }
10937 	  }
10938     }
10939 
10940   dump (dumper::MERGE)
10941     && dump ("Read:%d's %s merge key (%s) %C:%N", tag, merge_kind_name[mk],
10942 	     existing ? "matched" : kind, TREE_CODE (decl), decl);
10943 
10944   return existing;
10945 }
10946 
10947 void
binfo_mergeable(tree binfo)10948 trees_out::binfo_mergeable (tree binfo)
10949 {
10950   tree dom = binfo;
10951   while (tree parent = BINFO_INHERITANCE_CHAIN (dom))
10952     dom = parent;
10953   tree type = BINFO_TYPE (dom);
10954   gcc_checking_assert (TYPE_BINFO (type) == dom);
10955   tree_node (type);
10956   if (streaming_p ())
10957     {
10958       unsigned ix = 0;
10959       for (; dom != binfo; dom = TREE_CHAIN (dom))
10960 	ix++;
10961       u (ix);
10962     }
10963 }
10964 
10965 unsigned
binfo_mergeable(tree * type)10966 trees_in::binfo_mergeable (tree *type)
10967 {
10968   *type = tree_node ();
10969   return u ();
10970 }
10971 
10972 /* DECL is a just streamed mergeable decl that should match EXISTING.  Check
10973    it does and issue an appropriate diagnostic if not.  Merge any
10974    bits from DECL to EXISTING.  This is stricter matching than
10975    decls_match, because we can rely on ODR-sameness, and we cannot use
10976    decls_match because it can cause instantiations of constraints.  */
10977 
10978 bool
is_matching_decl(tree existing,tree decl,bool is_typedef)10979 trees_in::is_matching_decl (tree existing, tree decl, bool is_typedef)
10980 {
10981   // FIXME: We should probably do some duplicate decl-like stuff here
10982   // (beware, default parms should be the same?)  Can we just call
10983   // duplicate_decls and teach it how to handle the module-specific
10984   // permitted/required duplications?
10985 
10986   // We know at this point that the decls have matched by key, so we
10987   // can elide some of the checking
10988   gcc_checking_assert (TREE_CODE (existing) == TREE_CODE (decl));
10989 
10990   tree d_inner = decl;
10991   tree e_inner = existing;
10992   if (TREE_CODE (decl) == TEMPLATE_DECL)
10993     {
10994       d_inner = DECL_TEMPLATE_RESULT (d_inner);
10995       e_inner = DECL_TEMPLATE_RESULT (e_inner);
10996       gcc_checking_assert (TREE_CODE (e_inner) == TREE_CODE (d_inner));
10997     }
10998 
10999   if (TREE_CODE (d_inner) == FUNCTION_DECL)
11000     {
11001       tree e_ret = fndecl_declared_return_type (existing);
11002       tree d_ret = fndecl_declared_return_type (decl);
11003 
11004       if (decl != d_inner && DECL_NAME (d_inner) == fun_identifier
11005 	  && LAMBDA_TYPE_P (DECL_CONTEXT (d_inner)))
11006 	/* This has a recursive type that will compare different.  */;
11007       else if (!same_type_p (d_ret, e_ret))
11008 	goto mismatch;
11009 
11010       tree e_type = TREE_TYPE (e_inner);
11011       tree d_type = TREE_TYPE (d_inner);
11012 
11013       if (DECL_EXTERN_C_P (d_inner) != DECL_EXTERN_C_P (e_inner))
11014 	goto mismatch;
11015 
11016       for (tree e_args = TYPE_ARG_TYPES (e_type),
11017 	     d_args = TYPE_ARG_TYPES (d_type);
11018 	   e_args != d_args && (e_args || d_args);
11019 	   e_args = TREE_CHAIN (e_args), d_args = TREE_CHAIN (d_args))
11020 	{
11021 	  if (!(e_args && d_args))
11022 	    goto mismatch;
11023 
11024 	  if (!same_type_p (TREE_VALUE (d_args), TREE_VALUE (e_args)))
11025 	    goto mismatch;
11026 
11027 	  // FIXME: Check default values
11028 	}
11029 
11030       /* If EXISTING has an undeduced or uninstantiated exception
11031 	 specification, but DECL does not, propagate the exception
11032 	 specification.  Otherwise we end up asserting or trying to
11033 	 instantiate it in the middle of loading.   */
11034       tree e_spec = TYPE_RAISES_EXCEPTIONS (e_type);
11035       tree d_spec = TYPE_RAISES_EXCEPTIONS (d_type);
11036       if (DEFERRED_NOEXCEPT_SPEC_P (e_spec))
11037 	{
11038 	  if (!DEFERRED_NOEXCEPT_SPEC_P (d_spec)
11039 	      || (UNEVALUATED_NOEXCEPT_SPEC_P (e_spec)
11040 		  && !UNEVALUATED_NOEXCEPT_SPEC_P (d_spec)))
11041 	    {
11042 	      dump (dumper::MERGE)
11043 		&& dump ("Propagating instantiated noexcept to %N", existing);
11044 	      TREE_TYPE (existing) = d_type;
11045 
11046 	      /* Propagate to existing clones.  */
11047 	      tree clone;
11048 	      FOR_EACH_CLONE (clone, existing)
11049 		{
11050 		  if (TREE_TYPE (clone) == e_type)
11051 		    TREE_TYPE (clone) = d_type;
11052 		  else
11053 		    TREE_TYPE (clone)
11054 		      = build_exception_variant (TREE_TYPE (clone), d_spec);
11055 		}
11056 	    }
11057 	}
11058       else if (!DEFERRED_NOEXCEPT_SPEC_P (d_spec)
11059 	       && !comp_except_specs (d_spec, e_spec, ce_type))
11060 	goto mismatch;
11061     }
11062   else if (is_typedef)
11063     {
11064       if (!DECL_ORIGINAL_TYPE (e_inner)
11065 	  || !same_type_p (DECL_ORIGINAL_TYPE (d_inner),
11066 			   DECL_ORIGINAL_TYPE (e_inner)))
11067 	goto mismatch;
11068     }
11069   /* Using cp_tree_equal because we can meet TYPE_ARGUMENT_PACKs
11070      here. I suspect the entities that directly do that are things
11071      that shouldn't go to duplicate_decls (FIELD_DECLs etc).   */
11072   else if (!cp_tree_equal (TREE_TYPE (decl), TREE_TYPE (existing)))
11073     {
11074     mismatch:
11075       if (DECL_IS_UNDECLARED_BUILTIN (existing))
11076 	/* Just like duplicate_decls, presum the user knows what
11077 	   they're doing in overriding a builtin.   */
11078 	TREE_TYPE (existing) = TREE_TYPE (decl);
11079       else
11080 	{
11081 	  // FIXME:QOI Might be template specialization from a module,
11082 	  // not necessarily global module
11083 	  error_at (DECL_SOURCE_LOCATION (decl),
11084 		    "conflicting global module declaration %#qD", decl);
11085 	  inform (DECL_SOURCE_LOCATION (existing),
11086 		  "existing declaration %#qD", existing);
11087 	  return false;
11088 	}
11089     }
11090 
11091   if (DECL_IS_UNDECLARED_BUILTIN (existing)
11092       && !DECL_IS_UNDECLARED_BUILTIN (decl))
11093     {
11094       /* We're matching a builtin that the user has yet to declare.
11095 	 We are the one!  This is very much duplicate-decl
11096 	 shenanigans. */
11097       DECL_SOURCE_LOCATION (existing) = DECL_SOURCE_LOCATION (decl);
11098       if (TREE_CODE (decl) != TYPE_DECL)
11099 	{
11100 	  /* Propagate exceptions etc.  */
11101 	  TREE_TYPE (existing) = TREE_TYPE (decl);
11102 	  TREE_NOTHROW (existing) = TREE_NOTHROW (decl);
11103 	}
11104       /* This is actually an import! */
11105       DECL_MODULE_IMPORT_P (existing) = true;
11106 
11107       /* Yay, sliced!  */
11108       existing->base = decl->base;
11109 
11110       if (TREE_CODE (decl) == FUNCTION_DECL)
11111 	{
11112 	  /* Ew :(  */
11113 	  memcpy (&existing->decl_common.size,
11114 		  &decl->decl_common.size,
11115 		  (offsetof (tree_decl_common, pt_uid)
11116 		   - offsetof (tree_decl_common, size)));
11117 	  auto bltin_class = DECL_BUILT_IN_CLASS (decl);
11118 	  existing->function_decl.built_in_class = bltin_class;
11119 	  auto fncode = DECL_UNCHECKED_FUNCTION_CODE (decl);
11120 	  DECL_UNCHECKED_FUNCTION_CODE (existing) = fncode;
11121 	  if (existing->function_decl.built_in_class == BUILT_IN_NORMAL)
11122 	    {
11123 	      if (builtin_decl_explicit_p (built_in_function (fncode)))
11124 		switch (fncode)
11125 		  {
11126 		  case BUILT_IN_STPCPY:
11127 		    set_builtin_decl_implicit_p
11128 		      (built_in_function (fncode), true);
11129 		    break;
11130 		  default:
11131 		    set_builtin_decl_declared_p
11132 		      (built_in_function (fncode), true);
11133 		    break;
11134 		  }
11135 	      copy_attributes_to_builtin (decl);
11136 	    }
11137 	}
11138     }
11139 
11140   if (VAR_OR_FUNCTION_DECL_P (decl)
11141       && DECL_TEMPLATE_INSTANTIATED (decl))
11142     /* Don't instantiate again!  */
11143     DECL_TEMPLATE_INSTANTIATED (existing) = true;
11144 
11145   if (TREE_CODE (d_inner) == FUNCTION_DECL
11146       && DECL_DECLARED_INLINE_P (d_inner))
11147     DECL_DECLARED_INLINE_P (e_inner) = true;
11148   if (!DECL_EXTERNAL (d_inner))
11149     DECL_EXTERNAL (e_inner) = false;
11150 
11151   // FIXME: Check default tmpl and fn parms here
11152 
11153   return true;
11154 }
11155 
11156 /* FN is an implicit member function that we've discovered is new to
11157    the class.  Add it to the TYPE_FIELDS chain and the method vector.
11158    Reset the appropriate classtype lazy flag.   */
11159 
11160 bool
install_implicit_member(tree fn)11161 trees_in::install_implicit_member (tree fn)
11162 {
11163   tree ctx = DECL_CONTEXT (fn);
11164   tree name = DECL_NAME (fn);
11165   /* We know these are synthesized, so the set of expected prototypes
11166      is quite restricted.  We're not validating correctness, just
11167      distinguishing beteeen the small set of possibilities.  */
11168   tree parm_type = TREE_VALUE (FUNCTION_FIRST_USER_PARMTYPE (fn));
11169   if (IDENTIFIER_CTOR_P (name))
11170     {
11171       if (CLASSTYPE_LAZY_DEFAULT_CTOR (ctx)
11172 	  && VOID_TYPE_P (parm_type))
11173 	CLASSTYPE_LAZY_DEFAULT_CTOR (ctx) = false;
11174       else if (!TYPE_REF_P (parm_type))
11175 	return false;
11176       else if (CLASSTYPE_LAZY_COPY_CTOR (ctx)
11177 	       && !TYPE_REF_IS_RVALUE (parm_type))
11178 	CLASSTYPE_LAZY_COPY_CTOR (ctx) = false;
11179       else if (CLASSTYPE_LAZY_MOVE_CTOR (ctx))
11180 	CLASSTYPE_LAZY_MOVE_CTOR (ctx) = false;
11181       else
11182 	return false;
11183     }
11184   else if (IDENTIFIER_DTOR_P (name))
11185     {
11186       if (CLASSTYPE_LAZY_DESTRUCTOR (ctx))
11187 	CLASSTYPE_LAZY_DESTRUCTOR (ctx) = false;
11188       else
11189 	return false;
11190       if (DECL_VIRTUAL_P (fn))
11191 	/* A virtual dtor should have been created when the class
11192 	   became complete.  */
11193 	return false;
11194     }
11195   else if (name == assign_op_identifier)
11196     {
11197       if (!TYPE_REF_P (parm_type))
11198 	return false;
11199       else if (CLASSTYPE_LAZY_COPY_ASSIGN (ctx)
11200 	       && !TYPE_REF_IS_RVALUE (parm_type))
11201 	CLASSTYPE_LAZY_COPY_ASSIGN (ctx) = false;
11202       else if (CLASSTYPE_LAZY_MOVE_ASSIGN (ctx))
11203 	CLASSTYPE_LAZY_MOVE_ASSIGN (ctx) = false;
11204       else
11205 	return false;
11206     }
11207   else
11208     return false;
11209 
11210   dump (dumper::MERGE) && dump ("Adding implicit member %N", fn);
11211 
11212   DECL_CHAIN (fn) = TYPE_FIELDS (ctx);
11213   TYPE_FIELDS (ctx) = fn;
11214 
11215   add_method (ctx, fn, false);
11216 
11217     /* Propagate TYPE_FIELDS.  */
11218   fixup_type_variants (ctx);
11219 
11220   return true;
11221 }
11222 
11223 /* Return non-zero if DECL has a definition that would be interesting to
11224    write out.  */
11225 
11226 static bool
has_definition(tree decl)11227 has_definition (tree decl)
11228 {
11229   bool is_tmpl = TREE_CODE (decl) == TEMPLATE_DECL;
11230   if (is_tmpl)
11231     decl = DECL_TEMPLATE_RESULT (decl);
11232 
11233   switch (TREE_CODE (decl))
11234     {
11235     default:
11236       break;
11237 
11238     case FUNCTION_DECL:
11239       if (!DECL_SAVED_TREE (decl))
11240 	/* Not defined.  */
11241 	break;
11242 
11243       if (DECL_DECLARED_INLINE_P (decl))
11244 	return true;
11245 
11246       if (DECL_THIS_STATIC (decl)
11247 	  && (header_module_p ()
11248 	      || (!DECL_LANG_SPECIFIC (decl) || !DECL_MODULE_PURVIEW_P (decl))))
11249 	/* GM static function.  */
11250 	return true;
11251 
11252       if (DECL_TEMPLATE_INFO (decl))
11253 	{
11254 	  int use_tpl = DECL_USE_TEMPLATE (decl);
11255 
11256 	  // FIXME: Partial specializations have definitions too.
11257 	  if (use_tpl < 2)
11258 	    return true;
11259 	}
11260       break;
11261 
11262     case TYPE_DECL:
11263       {
11264 	tree type = TREE_TYPE (decl);
11265 	if (type == TYPE_MAIN_VARIANT (type)
11266 	    && decl == TYPE_NAME (type)
11267 	    && (TREE_CODE (type) == ENUMERAL_TYPE
11268 		? TYPE_VALUES (type) : TYPE_FIELDS (type)))
11269 	  return true;
11270       }
11271       break;
11272 
11273     case VAR_DECL:
11274       if (DECL_LANG_SPECIFIC (decl)
11275 	  && DECL_TEMPLATE_INFO (decl)
11276 	  && DECL_USE_TEMPLATE (decl) < 2)
11277 	return DECL_INITIAL (decl);
11278       else
11279 	{
11280 	  if (!DECL_INITIALIZED_P (decl))
11281 	    return false;
11282 
11283 	  if (header_module_p ()
11284 	      || (!DECL_LANG_SPECIFIC (decl) || !DECL_MODULE_PURVIEW_P (decl)))
11285 	    /* GM static variable.  */
11286 	    return true;
11287 
11288 	  if (!TREE_CONSTANT (decl))
11289 	    return false;
11290 
11291 	  return true;
11292 	}
11293       break;
11294 
11295     case CONCEPT_DECL:
11296       if (DECL_INITIAL (decl))
11297 	return true;
11298 
11299       break;
11300     }
11301 
11302   return false;
11303 }
11304 
11305 uintptr_t *
find_duplicate(tree existing)11306 trees_in::find_duplicate (tree existing)
11307 {
11308   if (!duplicates)
11309     return NULL;
11310 
11311   return duplicates->get (existing);
11312 }
11313 
11314 /* We're starting to read a duplicate DECL.  EXISTING is the already
11315    known node.  */
11316 
11317 void
register_duplicate(tree decl,tree existing)11318 trees_in::register_duplicate (tree decl, tree existing)
11319 {
11320   if (!duplicates)
11321     duplicates = new duplicate_hash_map (40);
11322 
11323   bool existed;
11324   uintptr_t &slot = duplicates->get_or_insert (existing, &existed);
11325   gcc_checking_assert (!existed);
11326   slot = reinterpret_cast<uintptr_t> (decl);
11327 }
11328 
11329 /* We've read a definition of MAYBE_EXISTING.  If not a duplicate,
11330    return MAYBE_EXISTING (into which the definition should be
11331    installed).  Otherwise return NULL if already known bad, or the
11332    duplicate we read (for ODR checking, or extracting additional merge
11333    information).  */
11334 
11335 tree
odr_duplicate(tree maybe_existing,bool has_defn)11336 trees_in::odr_duplicate (tree maybe_existing, bool has_defn)
11337 {
11338   tree res = NULL_TREE;
11339 
11340   if (uintptr_t *dup = find_duplicate (maybe_existing))
11341     {
11342       if (!(*dup & 1))
11343 	res = reinterpret_cast<tree> (*dup);
11344     }
11345   else
11346     res = maybe_existing;
11347 
11348   assert_definition (maybe_existing, res && !has_defn);
11349 
11350   // FIXME: We probably need to return the template, so that the
11351   // template header can be checked?
11352   return res ? STRIP_TEMPLATE (res) : NULL_TREE;
11353 }
11354 
11355 /* The following writer functions rely on the current behaviour of
11356    depset::hash::add_dependency making the decl and defn depset nodes
11357    depend on eachother.  That way we don't have to worry about seeding
11358    the tree map with named decls that cannot be looked up by name (I.e
11359    template and function parms).  We know the decl and definition will
11360    be in the same cluster, which is what we want.  */
11361 
11362 void
write_function_def(tree decl)11363 trees_out::write_function_def (tree decl)
11364 {
11365   tree_node (DECL_RESULT (decl));
11366   tree_node (DECL_INITIAL (decl));
11367   tree_node (DECL_SAVED_TREE (decl));
11368   tree_node (DECL_FRIEND_CONTEXT (decl));
11369 
11370   constexpr_fundef *cexpr = retrieve_constexpr_fundef (decl);
11371   int tag = 0;
11372   if (cexpr)
11373     {
11374       if (cexpr->result == error_mark_node)
11375 	/* We'll stream the RESULT_DECL naturally during the
11376 	   serialization.  We never need to fish it back again, so
11377 	   that's ok.  */
11378 	tag = 0;
11379       else
11380 	tag = insert (cexpr->result);
11381     }
11382   if (streaming_p ())
11383     {
11384       i (tag);
11385       if (tag)
11386 	dump (dumper::TREE)
11387 	  && dump ("Constexpr:%d result %N", tag, cexpr->result);
11388     }
11389   if (tag)
11390     {
11391       unsigned ix = 0;
11392       for (tree parm = cexpr->parms; parm; parm = DECL_CHAIN (parm), ix++)
11393 	{
11394 	  tag = insert (parm);
11395 	  if (streaming_p ())
11396 	    dump (dumper::TREE)
11397 	      && dump ("Constexpr:%d parm:%u %N", tag, ix, parm);
11398 	}
11399       tree_node (cexpr->body);
11400     }
11401 
11402   if (streaming_p ())
11403     {
11404       unsigned flags = 0;
11405 
11406       if (DECL_NOT_REALLY_EXTERN (decl))
11407 	flags |= 1;
11408 
11409       u (flags);
11410     }
11411 }
11412 
11413 void
mark_function_def(tree)11414 trees_out::mark_function_def (tree)
11415 {
11416 }
11417 
11418 bool
read_function_def(tree decl,tree maybe_template)11419 trees_in::read_function_def (tree decl, tree maybe_template)
11420 {
11421   dump () && dump ("Reading function definition %N", decl);
11422   tree result = tree_node ();
11423   tree initial = tree_node ();
11424   tree saved = tree_node ();
11425   tree context = tree_node ();
11426   constexpr_fundef cexpr;
11427 
11428   tree maybe_dup = odr_duplicate (maybe_template, DECL_SAVED_TREE (decl));
11429   bool installing = maybe_dup && !DECL_SAVED_TREE (decl);
11430 
11431   if (int wtag = i ())
11432     {
11433       int tag = 1;
11434       cexpr.result = error_mark_node;
11435 
11436       cexpr.result = copy_decl (result);
11437       tag = insert (cexpr.result);
11438 
11439       if (wtag != tag)
11440 	set_overrun ();
11441       dump (dumper::TREE)
11442 	&& dump ("Constexpr:%d result %N", tag, cexpr.result);
11443 
11444       cexpr.parms = NULL_TREE;
11445       tree *chain = &cexpr.parms;
11446       unsigned ix = 0;
11447       for (tree parm = DECL_ARGUMENTS (maybe_dup ? maybe_dup : decl);
11448 	   parm; parm = DECL_CHAIN (parm), ix++)
11449 	{
11450 	  tree p = copy_decl (parm);
11451 	  tag = insert (p);
11452 	  dump (dumper::TREE)
11453 	    && dump ("Constexpr:%d parm:%u %N", tag, ix, p);
11454 	  *chain = p;
11455 	  chain = &DECL_CHAIN (p);
11456 	}
11457       cexpr.body = tree_node ();
11458       cexpr.decl = decl;
11459     }
11460   else
11461     cexpr.decl = NULL_TREE;
11462 
11463   unsigned flags = u ();
11464 
11465   if (get_overrun ())
11466     return NULL_TREE;
11467 
11468   if (installing)
11469     {
11470       DECL_NOT_REALLY_EXTERN (decl) = flags & 1;
11471       DECL_RESULT (decl) = result;
11472       DECL_INITIAL (decl) = initial;
11473       DECL_SAVED_TREE (decl) = saved;
11474       if (maybe_dup)
11475 	DECL_ARGUMENTS (decl) = DECL_ARGUMENTS (maybe_dup);
11476 
11477       if (context)
11478 	SET_DECL_FRIEND_CONTEXT (decl, context);
11479       if (cexpr.decl)
11480 	register_constexpr_fundef (cexpr);
11481       post_process (maybe_template);
11482     }
11483   else if (maybe_dup)
11484     {
11485       // FIXME:QOI Check matching defn
11486     }
11487 
11488   return true;
11489 }
11490 
11491 /* Also for CONCEPT_DECLs.  */
11492 
11493 void
write_var_def(tree decl)11494 trees_out::write_var_def (tree decl)
11495 {
11496   tree init = DECL_INITIAL (decl);
11497   tree_node (init);
11498   if (!init)
11499     {
11500       tree dyn_init = NULL_TREE;
11501 
11502       if (DECL_NONTRIVIALLY_INITIALIZED_P (decl))
11503 	{
11504 	  dyn_init = value_member (decl,
11505 				   CP_DECL_THREAD_LOCAL_P (decl)
11506 				   ? tls_aggregates : static_aggregates);
11507 	  gcc_checking_assert (dyn_init);
11508 	  /* Mark it so write_inits knows this is needed.  */
11509 	  TREE_LANG_FLAG_0 (dyn_init) = true;
11510 	  dyn_init = TREE_PURPOSE (dyn_init);
11511 	}
11512       tree_node (dyn_init);
11513     }
11514 }
11515 
11516 void
mark_var_def(tree)11517 trees_out::mark_var_def (tree)
11518 {
11519 }
11520 
11521 bool
read_var_def(tree decl,tree maybe_template)11522 trees_in::read_var_def (tree decl, tree maybe_template)
11523 {
11524   /* Do not mark the virtual table entries as used.  */
11525   bool vtable = TREE_CODE (decl) == VAR_DECL && DECL_VTABLE_OR_VTT_P (decl);
11526   unused += vtable;
11527   tree init = tree_node ();
11528   tree dyn_init = init ? NULL_TREE : tree_node ();
11529   unused -= vtable;
11530 
11531   if (get_overrun ())
11532     return false;
11533 
11534   bool initialized = (VAR_P (decl) ? bool (DECL_INITIALIZED_P (decl))
11535 		      : bool (DECL_INITIAL (decl)));
11536   tree maybe_dup = odr_duplicate (maybe_template, initialized);
11537   bool installing = maybe_dup && !initialized;
11538   if (installing)
11539     {
11540       if (DECL_EXTERNAL (decl))
11541 	DECL_NOT_REALLY_EXTERN (decl) = true;
11542       if (VAR_P (decl))
11543 	{
11544 	  DECL_INITIALIZED_P (decl) = true;
11545 	  if (maybe_dup && DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (maybe_dup))
11546 	    DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl) = true;
11547 	}
11548       DECL_INITIAL (decl) = init;
11549       if (!dyn_init)
11550 	;
11551       else if (CP_DECL_THREAD_LOCAL_P (decl))
11552 	tls_aggregates = tree_cons (dyn_init, decl, tls_aggregates);
11553       else
11554 	static_aggregates = tree_cons (dyn_init, decl, static_aggregates);
11555     }
11556   else if (maybe_dup)
11557     {
11558       // FIXME:QOI Check matching defn
11559     }
11560 
11561   return true;
11562 }
11563 
11564 /* If MEMBER doesn't have an independent life outside the class,
11565    return it (or it's TEMPLATE_DECL).  Otherwise NULL.  */
11566 
11567 static tree
member_owned_by_class(tree member)11568 member_owned_by_class (tree member)
11569 {
11570   gcc_assert (DECL_P (member));
11571 
11572   /* Clones are owned by their origin.  */
11573   if (DECL_CLONED_FUNCTION_P (member))
11574     return NULL;
11575 
11576   if (TREE_CODE (member) == FIELD_DECL)
11577     /* FIELD_DECLS can have template info in some cases.  We always
11578        want the FIELD_DECL though, as there's never a TEMPLATE_DECL
11579        wrapping them.  */
11580     return member;
11581 
11582   int use_tpl = -1;
11583   if (tree ti = node_template_info (member, use_tpl))
11584     {
11585       // FIXME: Don't bail on things that CANNOT have their own
11586       // template header.  No, make sure they're in the same cluster.
11587       if (use_tpl > 0)
11588 	return NULL_TREE;
11589 
11590       if (DECL_TEMPLATE_RESULT (TI_TEMPLATE (ti)) == member)
11591 	member = TI_TEMPLATE (ti);
11592     }
11593   return member;
11594 }
11595 
11596 void
write_class_def(tree defn)11597 trees_out::write_class_def (tree defn)
11598 {
11599   gcc_assert (DECL_P (defn));
11600   if (streaming_p ())
11601     dump () && dump ("Writing class definition %N", defn);
11602 
11603   tree type = TREE_TYPE (defn);
11604   tree_node (TYPE_SIZE (type));
11605   tree_node (TYPE_SIZE_UNIT (type));
11606   tree_node (TYPE_VFIELD (type));
11607   tree_node (TYPE_BINFO (type));
11608 
11609   vec_chained_decls (TYPE_FIELDS (type));
11610 
11611   /* Every class but __as_base has a type-specific.  */
11612   gcc_checking_assert (!TYPE_LANG_SPECIFIC (type) == IS_FAKE_BASE_TYPE (type));
11613 
11614   if (TYPE_LANG_SPECIFIC (type))
11615     {
11616       {
11617 	vec<tree, va_gc> *v = CLASSTYPE_MEMBER_VEC (type);
11618 	if (!v)
11619 	  {
11620 	    gcc_checking_assert (!streaming_p ());
11621 	    /* Force a class vector.  */
11622 	    v = set_class_bindings (type, -1);
11623 	    gcc_checking_assert (v);
11624 	  }
11625 
11626 	unsigned len = v->length ();
11627 	if (streaming_p ())
11628 	  u (len);
11629 	for (unsigned ix = 0; ix != len; ix++)
11630 	  {
11631 	    tree m = (*v)[ix];
11632 	    if (TREE_CODE (m) == TYPE_DECL
11633 		&& DECL_ARTIFICIAL (m)
11634 		&& TYPE_STUB_DECL (TREE_TYPE (m)) == m)
11635 	      /* This is a using-decl for a type, or an anonymous
11636 		 struct (maybe with a typedef name).  Write the type.  */
11637 	      m = TREE_TYPE (m);
11638 	    tree_node (m);
11639 	  }
11640       }
11641       tree_node (CLASSTYPE_LAMBDA_EXPR (type));
11642 
11643       /* TYPE_CONTAINS_VPTR_P looks at the vbase vector, which the
11644 	 reader won't know at this point.  */
11645       int has_vptr = TYPE_CONTAINS_VPTR_P (type);
11646 
11647       if (streaming_p ())
11648 	{
11649 	  unsigned nvbases = vec_safe_length (CLASSTYPE_VBASECLASSES (type));
11650 	  u (nvbases);
11651 	  i (has_vptr);
11652 	}
11653 
11654       if (has_vptr)
11655 	{
11656 	  tree_vec (CLASSTYPE_PURE_VIRTUALS (type));
11657 	  tree_pair_vec (CLASSTYPE_VCALL_INDICES (type));
11658 	  tree_node (CLASSTYPE_KEY_METHOD (type));
11659 	}
11660     }
11661 
11662   if (TYPE_LANG_SPECIFIC (type))
11663     {
11664       tree_node (CLASSTYPE_PRIMARY_BINFO (type));
11665 
11666       tree as_base = CLASSTYPE_AS_BASE (type);
11667       if (as_base)
11668 	as_base = TYPE_NAME (as_base);
11669       tree_node (as_base);
11670 
11671       /* Write the vtables.  */
11672       tree vtables = CLASSTYPE_VTABLES (type);
11673       vec_chained_decls (vtables);
11674       for (; vtables; vtables = TREE_CHAIN (vtables))
11675 	write_definition (vtables);
11676 
11677       /* Write the friend classes.  */
11678       tree_list (CLASSTYPE_FRIEND_CLASSES (type), false);
11679 
11680       /* Write the friend functions.  */
11681       for (tree friends = DECL_FRIENDLIST (defn);
11682 	   friends; friends = TREE_CHAIN (friends))
11683 	{
11684 	  /* Name of these friends.  */
11685 	  tree_node (TREE_PURPOSE (friends));
11686 	  tree_list (TREE_VALUE (friends), false);
11687 	}
11688       /* End of friend fns.  */
11689       tree_node (NULL_TREE);
11690 
11691       /* Write the decl list.  */
11692       tree_list (CLASSTYPE_DECL_LIST (type), true);
11693 
11694       if (TYPE_CONTAINS_VPTR_P (type))
11695 	{
11696 	  /* Write the thunks.  */
11697 	  for (tree decls = TYPE_FIELDS (type);
11698 	       decls; decls = DECL_CHAIN (decls))
11699 	    if (TREE_CODE (decls) == FUNCTION_DECL
11700 		&& DECL_VIRTUAL_P (decls)
11701 		&& DECL_THUNKS (decls))
11702 	      {
11703 		tree_node (decls);
11704 		/* Thunks are always unique, so chaining is ok.  */
11705 		chained_decls (DECL_THUNKS (decls));
11706 	      }
11707 	  tree_node (NULL_TREE);
11708 	}
11709     }
11710 }
11711 
11712 void
mark_class_member(tree member,bool do_defn)11713 trees_out::mark_class_member (tree member, bool do_defn)
11714 {
11715   gcc_assert (DECL_P (member));
11716 
11717   member = member_owned_by_class (member);
11718   if (member)
11719     mark_declaration (member, do_defn && has_definition (member));
11720 }
11721 
11722 void
mark_class_def(tree defn)11723 trees_out::mark_class_def (tree defn)
11724 {
11725   gcc_assert (DECL_P (defn));
11726   tree type = TREE_TYPE (defn);
11727   /* Mark the class members that are not type-decls and cannot have
11728      independent definitions.  */
11729   for (tree member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
11730     if (TREE_CODE (member) == FIELD_DECL
11731 	|| TREE_CODE (member) == USING_DECL
11732 	/* A cloned enum-decl from 'using enum unrelated;'   */
11733 	|| (TREE_CODE (member) == CONST_DECL
11734 	    && DECL_CONTEXT (member) == type))
11735       {
11736 	mark_class_member (member);
11737 	if (TREE_CODE (member) == FIELD_DECL)
11738 	  if (tree repr = DECL_BIT_FIELD_REPRESENTATIVE (member))
11739 	    mark_declaration (repr, false);
11740       }
11741 
11742   /* Mark the binfo hierarchy.  */
11743   for (tree child = TYPE_BINFO (type); child; child = TREE_CHAIN (child))
11744     mark_by_value (child);
11745 
11746   if (TYPE_LANG_SPECIFIC (type))
11747     {
11748       for (tree vtable = CLASSTYPE_VTABLES (type);
11749 	   vtable; vtable = TREE_CHAIN (vtable))
11750 	mark_declaration (vtable, true);
11751 
11752       if (TYPE_CONTAINS_VPTR_P (type))
11753 	/* Mark the thunks, they belong to the class definition,
11754 	   /not/ the thunked-to function.  */
11755 	for (tree decls = TYPE_FIELDS (type);
11756 	     decls; decls = DECL_CHAIN (decls))
11757 	  if (TREE_CODE (decls) == FUNCTION_DECL)
11758 	    for (tree thunks = DECL_THUNKS (decls);
11759 		 thunks; thunks = DECL_CHAIN (thunks))
11760 	      mark_declaration (thunks, false);
11761     }
11762 }
11763 
11764 /* Nop sorting, needed for resorting the member vec.  */
11765 
11766 static void
nop(void *,void *,void *)11767 nop (void *, void *, void *)
11768 {
11769 }
11770 
11771 bool
read_class_def(tree defn,tree maybe_template)11772 trees_in::read_class_def (tree defn, tree maybe_template)
11773 {
11774   gcc_assert (DECL_P (defn));
11775   dump () && dump ("Reading class definition %N", defn);
11776   tree type = TREE_TYPE (defn);
11777   tree size = tree_node ();
11778   tree size_unit = tree_node ();
11779   tree vfield = tree_node ();
11780   tree binfo = tree_node ();
11781   vec<tree, va_gc> *vbase_vec = NULL;
11782   vec<tree, va_gc> *member_vec = NULL;
11783   vec<tree, va_gc> *pure_virts = NULL;
11784   vec<tree_pair_s, va_gc> *vcall_indices = NULL;
11785   tree key_method = NULL_TREE;
11786   tree lambda = NULL_TREE;
11787 
11788   /* Read the fields.  */
11789   vec<tree, va_heap> *fields = vec_chained_decls ();
11790 
11791   if (TYPE_LANG_SPECIFIC (type))
11792     {
11793       if (unsigned len = u ())
11794 	{
11795 	  vec_alloc (member_vec, len);
11796 	  for (unsigned ix = 0; ix != len; ix++)
11797 	    {
11798 	      tree m = tree_node ();
11799 	      if (get_overrun ())
11800 		break;
11801 	      if (TYPE_P (m))
11802 		m = TYPE_STUB_DECL (m);
11803 	      member_vec->quick_push (m);
11804 	    }
11805 	}
11806       lambda = tree_node ();
11807 
11808       if (!get_overrun ())
11809 	{
11810 	  unsigned nvbases = u ();
11811 	  if (nvbases)
11812 	    {
11813 	      vec_alloc (vbase_vec, nvbases);
11814 	      for (tree child = binfo; child; child = TREE_CHAIN (child))
11815 		if (BINFO_VIRTUAL_P (child))
11816 		  vbase_vec->quick_push (child);
11817 	    }
11818 	}
11819 
11820       if (!get_overrun ())
11821 	{
11822 	  int has_vptr = i ();
11823 	  if (has_vptr)
11824 	    {
11825 	      pure_virts = tree_vec ();
11826 	      vcall_indices = tree_pair_vec ();
11827 	      key_method = tree_node ();
11828 	    }
11829 	}
11830     }
11831 
11832   tree maybe_dup = odr_duplicate (maybe_template, TYPE_SIZE (type));
11833   bool installing = maybe_dup && !TYPE_SIZE (type);
11834   if (installing)
11835     {
11836       if (DECL_EXTERNAL (defn) && TYPE_LANG_SPECIFIC (type))
11837 	{
11838 	  /* We don't deal with not-really-extern, because, for a
11839 	     module you want the import to be the interface, and for a
11840 	     header-unit, you're doing it wrong.  */
11841 	  CLASSTYPE_INTERFACE_UNKNOWN (type) = false;
11842 	  CLASSTYPE_INTERFACE_ONLY (type) = true;
11843 	}
11844 
11845       if (maybe_dup != defn)
11846 	{
11847 	  // FIXME: This is needed on other defns too, almost
11848 	  // duplicate-decl like?  See is_matching_decl too.
11849 	  /* Copy flags from the duplicate.  */
11850 	  tree type_dup = TREE_TYPE (maybe_dup);
11851 
11852 	  /* Core pieces.  */
11853 	  TYPE_MODE_RAW (type) = TYPE_MODE_RAW (type_dup);
11854 	  SET_DECL_MODE (defn, DECL_MODE (maybe_dup));
11855 	  TREE_ADDRESSABLE (type) = TREE_ADDRESSABLE (type_dup);
11856 	  DECL_SIZE (defn) = DECL_SIZE (maybe_dup);
11857 	  DECL_SIZE_UNIT (defn) = DECL_SIZE_UNIT (maybe_dup);
11858 	  DECL_ALIGN_RAW (defn) = DECL_ALIGN_RAW (maybe_dup);
11859 	  DECL_WARN_IF_NOT_ALIGN_RAW (defn)
11860 	    = DECL_WARN_IF_NOT_ALIGN_RAW (maybe_dup);
11861 	  DECL_USER_ALIGN (defn) = DECL_USER_ALIGN (maybe_dup);
11862 
11863 	  /* C++ pieces.  */
11864 	  TYPE_POLYMORPHIC_P (type) = TYPE_POLYMORPHIC_P (type_dup);
11865 	  TYPE_HAS_USER_CONSTRUCTOR (type)
11866 	    = TYPE_HAS_USER_CONSTRUCTOR (type_dup);
11867 	  TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type)
11868 	    = TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type_dup);
11869 
11870 	  if (auto ls = TYPE_LANG_SPECIFIC (type_dup))
11871 	    {
11872 	      if (TYPE_LANG_SPECIFIC (type))
11873 		{
11874 		  CLASSTYPE_BEFRIENDING_CLASSES (type_dup)
11875 		    = CLASSTYPE_BEFRIENDING_CLASSES (type);
11876 		  if (!ANON_AGGR_TYPE_P (type))
11877 		    CLASSTYPE_TYPEINFO_VAR (type_dup)
11878 		      = CLASSTYPE_TYPEINFO_VAR (type);
11879 		}
11880 	      for (tree v = type; v; v = TYPE_NEXT_VARIANT (v))
11881 		TYPE_LANG_SPECIFIC (v) = ls;
11882 	    }
11883 	}
11884 
11885       TYPE_SIZE (type) = size;
11886       TYPE_SIZE_UNIT (type) = size_unit;
11887 
11888       if (fields)
11889 	{
11890 	  tree *chain = &TYPE_FIELDS (type);
11891 	  unsigned len = fields->length ();
11892 	  for (unsigned ix = 0; ix != len; ix++)
11893 	    {
11894 	      tree decl = (*fields)[ix];
11895 
11896 	      if (!decl)
11897 		{
11898 		  /* An anonymous struct with typedef name.  */
11899 		  tree tdef = (*fields)[ix+1];
11900 		  decl = TYPE_STUB_DECL (TREE_TYPE (tdef));
11901 		  gcc_checking_assert (IDENTIFIER_ANON_P (DECL_NAME (decl))
11902 				       && decl != tdef);
11903 		}
11904 
11905 	      gcc_checking_assert (!*chain == !DECL_CLONED_FUNCTION_P (decl));
11906 	      *chain = decl;
11907 	      chain = &DECL_CHAIN (decl);
11908 
11909 	      if (TREE_CODE (decl) == FIELD_DECL
11910 		  && ANON_AGGR_TYPE_P (TREE_TYPE (decl)))
11911 		ANON_AGGR_TYPE_FIELD
11912 		  (TYPE_MAIN_VARIANT (TREE_TYPE (decl))) = decl;
11913 
11914 	      if (TREE_CODE (decl) == USING_DECL
11915 		  && TREE_CODE (USING_DECL_SCOPE (decl)) == RECORD_TYPE)
11916 		{
11917 		  /* Reconstruct DECL_ACCESS.  */
11918 		  tree decls = USING_DECL_DECLS (decl);
11919 		  tree access = declared_access (decl);
11920 
11921 		  for (ovl_iterator iter (decls); iter; ++iter)
11922 		    {
11923 		      tree d = *iter;
11924 
11925 		      retrofit_lang_decl (d);
11926 		      tree list = DECL_ACCESS (d);
11927 
11928 		      if (!purpose_member (type, list))
11929 			DECL_ACCESS (d) = tree_cons (type, access, list);
11930 		    }
11931 		}
11932 	    }
11933 	}
11934 
11935       TYPE_VFIELD (type) = vfield;
11936       TYPE_BINFO (type) = binfo;
11937 
11938       if (TYPE_LANG_SPECIFIC (type))
11939 	{
11940 	  CLASSTYPE_LAMBDA_EXPR (type) = lambda;
11941 
11942 	  CLASSTYPE_MEMBER_VEC (type) = member_vec;
11943 	  CLASSTYPE_PURE_VIRTUALS (type) = pure_virts;
11944 	  CLASSTYPE_VCALL_INDICES (type) = vcall_indices;
11945 
11946 	  CLASSTYPE_KEY_METHOD (type) = key_method;
11947 
11948 	  CLASSTYPE_VBASECLASSES (type) = vbase_vec;
11949 
11950 	  /* Resort the member vector.  */
11951 	  resort_type_member_vec (member_vec, NULL, nop, NULL);
11952 	}
11953     }
11954   else if (maybe_dup)
11955     {
11956       // FIXME:QOI Check matching defn
11957     }
11958 
11959   if (TYPE_LANG_SPECIFIC (type))
11960     {
11961       tree primary = tree_node ();
11962       tree as_base = tree_node ();
11963 
11964       if (as_base)
11965 	as_base = TREE_TYPE (as_base);
11966 
11967       /* Read the vtables.  */
11968       vec<tree, va_heap> *vtables = vec_chained_decls ();
11969       if (vtables)
11970 	{
11971 	  unsigned len = vtables->length ();
11972 	  for (unsigned ix = 0; ix != len; ix++)
11973 	    {
11974 	      tree vtable = (*vtables)[ix];
11975 	      read_var_def (vtable, vtable);
11976 	    }
11977 	}
11978 
11979       tree friend_classes = tree_list (false);
11980       tree friend_functions = NULL_TREE;
11981       for (tree *chain = &friend_functions;
11982 	   tree name = tree_node (); chain = &TREE_CHAIN (*chain))
11983 	{
11984 	  tree val = tree_list (false);
11985 	  *chain = build_tree_list (name, val);
11986 	}
11987       tree decl_list = tree_list (true);
11988 
11989       if (installing)
11990 	{
11991 	  CLASSTYPE_PRIMARY_BINFO (type) = primary;
11992 	  CLASSTYPE_AS_BASE (type) = as_base;
11993 
11994 	  if (vtables)
11995 	    {
11996 	      if (!CLASSTYPE_KEY_METHOD (type)
11997 		  /* Sneaky user may have defined it inline
11998 		     out-of-class.  */
11999 		  || DECL_DECLARED_INLINE_P (CLASSTYPE_KEY_METHOD (type)))
12000 		vec_safe_push (keyed_classes, type);
12001 	      unsigned len = vtables->length ();
12002 	      tree *chain = &CLASSTYPE_VTABLES (type);
12003 	      for (unsigned ix = 0; ix != len; ix++)
12004 		{
12005 		  tree vtable = (*vtables)[ix];
12006 		  gcc_checking_assert (!*chain);
12007 		  *chain = vtable;
12008 		  chain = &DECL_CHAIN (vtable);
12009 		}
12010 	    }
12011 	  CLASSTYPE_FRIEND_CLASSES (type) = friend_classes;
12012 	  DECL_FRIENDLIST (defn) = friend_functions;
12013 	  CLASSTYPE_DECL_LIST (type) = decl_list;
12014 
12015 	  for (; friend_classes; friend_classes = TREE_CHAIN (friend_classes))
12016 	    {
12017 	      tree f = TREE_VALUE (friend_classes);
12018 
12019 	      if (TYPE_P (f))
12020 		{
12021 		  CLASSTYPE_BEFRIENDING_CLASSES (f)
12022 		    = tree_cons (NULL_TREE, type,
12023 				 CLASSTYPE_BEFRIENDING_CLASSES (f));
12024 		  dump () && dump ("Class %N befriending %C:%N",
12025 				   type, TREE_CODE (f), f);
12026 		}
12027 	    }
12028 
12029 	  for (; friend_functions;
12030 	       friend_functions = TREE_CHAIN (friend_functions))
12031 	    for (tree friend_decls = TREE_VALUE (friend_functions);
12032 		 friend_decls; friend_decls = TREE_CHAIN (friend_decls))
12033 	      {
12034 		tree f = TREE_VALUE (friend_decls);
12035 
12036 		DECL_BEFRIENDING_CLASSES (f)
12037 		  = tree_cons (NULL_TREE, type, DECL_BEFRIENDING_CLASSES (f));
12038 		dump () && dump ("Class %N befriending %C:%N",
12039 				 type, TREE_CODE (f), f);
12040 	      }
12041 	}
12042 
12043       if (TYPE_CONTAINS_VPTR_P (type))
12044 	/* Read and install the thunks.  */
12045 	while (tree vfunc = tree_node ())
12046 	  {
12047 	    tree thunks = chained_decls ();
12048 	    if (installing)
12049 	      SET_DECL_THUNKS (vfunc, thunks);
12050 	  }
12051 
12052       vec_free (vtables);
12053     }
12054 
12055   /* Propagate to all variants.  */
12056   if (installing)
12057     fixup_type_variants (type);
12058 
12059   /* IS_FAKE_BASE_TYPE is inaccurate at this point, because if this is
12060      the fake base, we've not hooked it into the containing class's
12061      data structure yet.  Fortunately it has a unique name.  */
12062   if (installing
12063       && DECL_NAME (defn) != as_base_identifier
12064       && (!CLASSTYPE_TEMPLATE_INFO (type)
12065 	  || !uses_template_parms (TI_ARGS (CLASSTYPE_TEMPLATE_INFO (type)))))
12066     /* Emit debug info.  It'd be nice to know if the interface TU
12067        already emitted this.  */
12068     rest_of_type_compilation (type, !LOCAL_CLASS_P (type));
12069 
12070   vec_free (fields);
12071 
12072   return !get_overrun ();
12073 }
12074 
12075 void
write_enum_def(tree decl)12076 trees_out::write_enum_def (tree decl)
12077 {
12078   tree type = TREE_TYPE (decl);
12079 
12080   tree_node (TYPE_VALUES (type));
12081   tree_node (TYPE_MIN_VALUE (type));
12082   tree_node (TYPE_MAX_VALUE (type));
12083 }
12084 
12085 void
mark_enum_def(tree decl)12086 trees_out::mark_enum_def (tree decl)
12087 {
12088   tree type = TREE_TYPE (decl);
12089 
12090   for (tree values = TYPE_VALUES (type); values; values = TREE_CHAIN (values))
12091     {
12092       tree cst = TREE_VALUE (values);
12093       mark_by_value (cst);
12094       /* We must mark the init to avoid circularity in tt_enum_int.  */
12095       if (tree init = DECL_INITIAL (cst))
12096 	if (TREE_CODE (init) == INTEGER_CST)
12097 	  mark_by_value (init);
12098     }
12099 }
12100 
12101 bool
read_enum_def(tree defn,tree maybe_template)12102 trees_in::read_enum_def (tree defn, tree maybe_template)
12103 {
12104   tree type = TREE_TYPE (defn);
12105   tree values = tree_node ();
12106   tree min = tree_node ();
12107   tree max = tree_node ();
12108 
12109   if (get_overrun ())
12110     return false;
12111 
12112   tree maybe_dup = odr_duplicate (maybe_template, TYPE_VALUES (type));
12113   bool installing = maybe_dup && !TYPE_VALUES (type);
12114 
12115   if (installing)
12116     {
12117       TYPE_VALUES (type) = values;
12118       TYPE_MIN_VALUE (type) = min;
12119       TYPE_MAX_VALUE (type) = max;
12120 
12121       rest_of_type_compilation (type, DECL_NAMESPACE_SCOPE_P (defn));
12122     }
12123   else if (maybe_dup)
12124     {
12125       tree known = TYPE_VALUES (type);
12126       for (; known && values;
12127 	   known = TREE_CHAIN (known), values = TREE_CHAIN (values))
12128 	{
12129 	  tree known_decl = TREE_VALUE (known);
12130 	  tree new_decl = TREE_VALUE (values);
12131 
12132 	  if (DECL_NAME (known_decl) != DECL_NAME (new_decl))
12133 	    goto bad;
12134 
12135 	  new_decl = maybe_duplicate (new_decl);
12136 
12137 	  if (!cp_tree_equal (DECL_INITIAL (known_decl),
12138 			      DECL_INITIAL (new_decl)))
12139 	    goto bad;
12140 	}
12141 
12142       if (known || values)
12143 	goto bad;
12144 
12145       if (!cp_tree_equal (TYPE_MIN_VALUE (type), min)
12146 	  || !cp_tree_equal (TYPE_MAX_VALUE (type), max))
12147 	{
12148 	bad:;
12149 	  error_at (DECL_SOURCE_LOCATION (maybe_dup),
12150 		    "definition of %qD does not match", maybe_dup);
12151 	  inform (DECL_SOURCE_LOCATION (defn),
12152 		  "existing definition %qD", defn);
12153 
12154 	  tree known_decl = NULL_TREE, new_decl = NULL_TREE;
12155 
12156 	  if (known)
12157 	    known_decl = TREE_VALUE (known);
12158 	  if (values)
12159 	    new_decl = maybe_duplicate (TREE_VALUE (values));
12160 
12161 	  if (known_decl && new_decl)
12162 	    {
12163 	      inform (DECL_SOURCE_LOCATION (new_decl),
12164 		      "... this enumerator %qD", new_decl);
12165 	      inform (DECL_SOURCE_LOCATION (known_decl),
12166 		      "enumerator %qD does not match ...", known_decl);
12167 	    }
12168 	  else if (known_decl || new_decl)
12169 	    {
12170 	      tree extra = known_decl ? known_decl : new_decl;
12171 	      inform (DECL_SOURCE_LOCATION (extra),
12172 		      "additional enumerators beginning with %qD", extra);
12173 	    }
12174 	  else
12175 	    inform (DECL_SOURCE_LOCATION (maybe_dup),
12176 		    "enumeration range differs");
12177 
12178 	  /* Mark it bad.  */
12179 	  unmatched_duplicate (maybe_template);
12180 	}
12181     }
12182 
12183   return true;
12184 }
12185 
12186 /* Write out the body of DECL.  See above circularity note.  */
12187 
12188 void
write_definition(tree decl)12189 trees_out::write_definition (tree decl)
12190 {
12191   if (streaming_p ())
12192     {
12193       assert_definition (decl);
12194       dump ()
12195 	&& dump ("Writing definition %C:%N", TREE_CODE (decl), decl);
12196     }
12197   else
12198     dump (dumper::DEPEND)
12199       && dump ("Depending definition %C:%N", TREE_CODE (decl), decl);
12200 
12201  again:
12202   switch (TREE_CODE (decl))
12203     {
12204     default:
12205       gcc_unreachable ();
12206 
12207     case TEMPLATE_DECL:
12208       decl = DECL_TEMPLATE_RESULT (decl);
12209       goto again;
12210 
12211     case FUNCTION_DECL:
12212       write_function_def (decl);
12213       break;
12214 
12215     case TYPE_DECL:
12216       {
12217 	tree type = TREE_TYPE (decl);
12218 	gcc_assert (TYPE_MAIN_VARIANT (type) == type
12219 		    && TYPE_NAME (type) == decl);
12220 	if (TREE_CODE (type) == ENUMERAL_TYPE)
12221 	  write_enum_def (decl);
12222 	else
12223 	  write_class_def (decl);
12224       }
12225       break;
12226 
12227     case VAR_DECL:
12228     case CONCEPT_DECL:
12229       write_var_def (decl);
12230       break;
12231     }
12232 }
12233 
12234 /* Mark a declaration for by-value walking.  If DO_DEFN is true, mark
12235    its body too.  */
12236 
12237 void
mark_declaration(tree decl,bool do_defn)12238 trees_out::mark_declaration (tree decl, bool do_defn)
12239 {
12240   mark_by_value (decl);
12241 
12242   if (TREE_CODE (decl) == TEMPLATE_DECL)
12243     decl = DECL_TEMPLATE_RESULT (decl);
12244 
12245   if (!do_defn)
12246     return;
12247 
12248   switch (TREE_CODE (decl))
12249     {
12250     default:
12251       gcc_unreachable ();
12252 
12253     case FUNCTION_DECL:
12254       mark_function_def (decl);
12255       break;
12256 
12257     case TYPE_DECL:
12258       {
12259 	tree type = TREE_TYPE (decl);
12260 	gcc_assert (TYPE_MAIN_VARIANT (type) == type
12261 		    && TYPE_NAME (type) == decl);
12262 	if (TREE_CODE (type) == ENUMERAL_TYPE)
12263 	  mark_enum_def (decl);
12264 	else
12265 	  mark_class_def (decl);
12266       }
12267       break;
12268 
12269     case VAR_DECL:
12270     case CONCEPT_DECL:
12271       mark_var_def (decl);
12272       break;
12273     }
12274 }
12275 
12276 /* Read in the body of DECL.  See above circularity note.  */
12277 
12278 bool
read_definition(tree decl)12279 trees_in::read_definition (tree decl)
12280 {
12281   dump () && dump ("Reading definition %C %N", TREE_CODE (decl), decl);
12282 
12283   tree maybe_template = decl;
12284 
12285  again:
12286   switch (TREE_CODE (decl))
12287     {
12288     default:
12289       break;
12290 
12291     case TEMPLATE_DECL:
12292       decl = DECL_TEMPLATE_RESULT (decl);
12293       goto again;
12294 
12295     case FUNCTION_DECL:
12296       return read_function_def (decl, maybe_template);
12297 
12298     case TYPE_DECL:
12299       {
12300 	tree type = TREE_TYPE (decl);
12301 	gcc_assert (TYPE_MAIN_VARIANT (type) == type
12302 		    && TYPE_NAME (type) == decl);
12303 	if (TREE_CODE (type) == ENUMERAL_TYPE)
12304 	  return read_enum_def (decl, maybe_template);
12305 	else
12306 	  return read_class_def (decl, maybe_template);
12307       }
12308       break;
12309 
12310     case VAR_DECL:
12311     case CONCEPT_DECL:
12312       return read_var_def (decl, maybe_template);
12313     }
12314 
12315   return false;
12316 }
12317 
12318 /* Lookup an maybe insert a slot for depset for KEY.  */
12319 
12320 depset **
entity_slot(tree entity,bool insert)12321 depset::hash::entity_slot (tree entity, bool insert)
12322 {
12323   traits::compare_type key (entity, NULL);
12324   depset **slot = find_slot_with_hash (key, traits::hash (key),
12325 				       insert ? INSERT : NO_INSERT);
12326 
12327   return slot;
12328 }
12329 
12330 depset **
binding_slot(tree ctx,tree name,bool insert)12331 depset::hash::binding_slot (tree ctx, tree name, bool insert)
12332 {
12333   traits::compare_type key (ctx, name);
12334   depset **slot = find_slot_with_hash (key, traits::hash (key),
12335 				       insert ? INSERT : NO_INSERT);
12336 
12337   return slot;
12338 }
12339 
12340 depset *
find_dependency(tree decl)12341 depset::hash::find_dependency (tree decl)
12342 {
12343   depset **slot = entity_slot (decl, false);
12344 
12345   return slot ? *slot : NULL;
12346 }
12347 
12348 depset *
find_binding(tree ctx,tree name)12349 depset::hash::find_binding (tree ctx, tree name)
12350 {
12351   depset **slot = binding_slot (ctx, name, false);
12352 
12353   return slot ? *slot : NULL;
12354 }
12355 
12356 /* DECL is a newly discovered dependency.  Create the depset, if it
12357    doesn't already exist.  Add it to the worklist if so.
12358 
12359    DECL will be an OVL_USING_P OVERLOAD, if it's from a binding that's
12360    a using decl.
12361 
12362    We do not have to worry about adding the same dependency more than
12363    once.  First it's harmless, but secondly the TREE_VISITED marking
12364    prevents us wanting to do it anyway.  */
12365 
12366 depset *
make_dependency(tree decl,entity_kind ek)12367 depset::hash::make_dependency (tree decl, entity_kind ek)
12368 {
12369   /* Make sure we're being told consistent information.  */
12370   gcc_checking_assert ((ek == EK_NAMESPACE)
12371 		       == (TREE_CODE (decl) == NAMESPACE_DECL
12372 			   && !DECL_NAMESPACE_ALIAS (decl)));
12373   gcc_checking_assert (ek != EK_BINDING && ek != EK_REDIRECT);
12374   gcc_checking_assert (TREE_CODE (decl) != FIELD_DECL
12375 		       && (TREE_CODE (decl) != USING_DECL
12376 			   || TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL));
12377   gcc_checking_assert (!is_key_order ());
12378   if (ek == EK_USING)
12379     gcc_checking_assert (TREE_CODE (decl) == OVERLOAD);
12380 
12381   if (TREE_CODE (decl) == TEMPLATE_DECL)
12382     /* The template should have copied these from its result decl.  */
12383     gcc_checking_assert (DECL_MODULE_EXPORT_P (decl)
12384 			 == DECL_MODULE_EXPORT_P (DECL_TEMPLATE_RESULT (decl)));
12385 
12386   depset **slot = entity_slot (decl, true);
12387   depset *dep = *slot;
12388   bool for_binding = ek == EK_FOR_BINDING;
12389 
12390   if (!dep)
12391     {
12392       if (DECL_IMPLICIT_TYPEDEF_P (decl)
12393 	  /* ... not an enum, for instance.  */
12394 	  && RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl))
12395 	  && TYPE_LANG_SPECIFIC (TREE_TYPE (decl))
12396 	  && CLASSTYPE_USE_TEMPLATE (TREE_TYPE (decl)) == 2)
12397 	{
12398 	  /* A partial or explicit specialization. Partial
12399 	     specializations might not be in the hash table, because
12400 	     there can be multiple differently-constrained variants.
12401 
12402 	     template<typename T> class silly;
12403 	     template<typename T> requires true class silly {};
12404 
12405 	     We need to find them, insert their TEMPLATE_DECL in the
12406 	     dep_hash, and then convert the dep we just found into a
12407 	     redirect.  */
12408 
12409 	  tree ti = TYPE_TEMPLATE_INFO (TREE_TYPE (decl));
12410 	  tree tmpl = TI_TEMPLATE (ti);
12411 	  tree partial = NULL_TREE;
12412 	  for (tree spec = DECL_TEMPLATE_SPECIALIZATIONS (tmpl);
12413 	       spec; spec = TREE_CHAIN (spec))
12414 	    if (DECL_TEMPLATE_RESULT (TREE_VALUE (spec)) == decl)
12415 	      {
12416 		partial = TREE_VALUE (spec);
12417 		break;
12418 	      }
12419 
12420 	  if (partial)
12421 	    {
12422 	      /* Eagerly create an empty redirect.  The following
12423 	         make_dependency call could cause hash reallocation,
12424 	         and invalidate slot's value.  */
12425 	      depset *redirect = make_entity (decl, EK_REDIRECT);
12426 
12427 	      /* Redirects are never reached -- always snap to their target.  */
12428 	      redirect->set_flag_bit<DB_UNREACHED_BIT> ();
12429 
12430 	      *slot = redirect;
12431 
12432 	      depset *tmpl_dep = make_dependency (partial, EK_PARTIAL);
12433 	      gcc_checking_assert (tmpl_dep->get_entity_kind () == EK_PARTIAL);
12434 
12435 	      redirect->deps.safe_push (tmpl_dep);
12436 
12437 	      return redirect;
12438 	    }
12439 	}
12440 
12441       bool has_def = ek != EK_USING && has_definition (decl);
12442       if (ek > EK_BINDING)
12443 	ek = EK_DECL;
12444 
12445       /* The only OVERLOADS we should see are USING decls from
12446 	 bindings.  */
12447       *slot = dep = make_entity (decl, ek, has_def);
12448 
12449       if (TREE_CODE (decl) == TEMPLATE_DECL)
12450 	{
12451 	  if (DECL_ALIAS_TEMPLATE_P (decl) && DECL_TEMPLATE_INFO (decl))
12452 	    dep->set_flag_bit<DB_ALIAS_TMPL_INST_BIT> ();
12453 	  else if (CHECKING_P)
12454 	    /* The template_result should otherwise not be in the
12455 	       table, or be an empty redirect (created above).  */
12456 	    if (auto *eslot = entity_slot (DECL_TEMPLATE_RESULT (decl), false))
12457 	      gcc_checking_assert ((*eslot)->get_entity_kind () == EK_REDIRECT
12458 				   && !(*eslot)->deps.length ());
12459 	}
12460 
12461       if (ek != EK_USING)
12462 	{
12463 	  tree not_tmpl = STRIP_TEMPLATE (decl);
12464 
12465 	  if (DECL_LANG_SPECIFIC (not_tmpl)
12466 	      && DECL_MODULE_IMPORT_P (not_tmpl))
12467 	    {
12468 	      /* Store the module number and index in cluster/section,
12469 		 so we don't have to look them up again.  */
12470 	      unsigned index = import_entity_index (decl);
12471 	      module_state *from = import_entity_module (index);
12472 	      /* Remap will be zero for imports from partitions, which
12473 		 we want to treat as-if declared in this TU.  */
12474 	      if (from->remap)
12475 		{
12476 		  dep->cluster = index - from->entity_lwm;
12477 		  dep->section = from->remap;
12478 		  dep->set_flag_bit<DB_IMPORTED_BIT> ();
12479 		}
12480 	    }
12481 
12482 	  if (ek == EK_DECL
12483 	      && !dep->is_import ()
12484 	      && TREE_CODE (CP_DECL_CONTEXT (decl)) == NAMESPACE_DECL
12485 	      && !(TREE_CODE (decl) == TEMPLATE_DECL
12486 		   && DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (decl)))
12487 	    {
12488 	      tree ctx = CP_DECL_CONTEXT (decl);
12489 
12490 	      if (!TREE_PUBLIC (ctx))
12491 		/* Member of internal namespace.  */
12492 		dep->set_flag_bit<DB_IS_INTERNAL_BIT> ();
12493 	      else if (VAR_OR_FUNCTION_DECL_P (not_tmpl)
12494 		       && DECL_THIS_STATIC (not_tmpl))
12495 		{
12496 		  /* An internal decl.  This is ok in a GM entity.  */
12497 		  if (!(header_module_p ()
12498 			|| !DECL_LANG_SPECIFIC (not_tmpl)
12499 			|| !DECL_MODULE_PURVIEW_P (not_tmpl)))
12500 		    dep->set_flag_bit<DB_IS_INTERNAL_BIT> ();
12501 		}
12502 	    }
12503 	}
12504 
12505       if (!dep->is_import ())
12506 	worklist.safe_push (dep);
12507     }
12508 
12509   dump (dumper::DEPEND)
12510     && dump ("%s on %s %C:%N found",
12511 	     ek == EK_REDIRECT ? "Redirect"
12512 	     : for_binding ? "Binding" : "Dependency",
12513 	     dep->entity_kind_name (), TREE_CODE (decl), decl);
12514 
12515   return dep;
12516 }
12517 
12518 /* DEP is a newly discovered dependency.  Append it to current's
12519    depset.  */
12520 
12521 void
add_dependency(depset * dep)12522 depset::hash::add_dependency (depset *dep)
12523 {
12524   gcc_checking_assert (current && !is_key_order ());
12525   current->deps.safe_push (dep);
12526 
12527   if (dep->is_internal () && !current->is_internal ())
12528     current->set_flag_bit<DB_REFS_INTERNAL_BIT> ();
12529 
12530   if (current->get_entity_kind () == EK_USING
12531       && DECL_IMPLICIT_TYPEDEF_P (dep->get_entity ())
12532       && TREE_CODE (TREE_TYPE (dep->get_entity ())) == ENUMERAL_TYPE)
12533     {
12534       /* CURRENT is an unwrapped using-decl and DECL is an enum's
12535 	 implicit typedef.  Is CURRENT a member of the enum?  */
12536       tree c_decl = OVL_FUNCTION (current->get_entity ());
12537 
12538       if (TREE_CODE (c_decl) == CONST_DECL
12539 	  && (current->deps[0]->get_entity ()
12540 	      == CP_DECL_CONTEXT (dep->get_entity ())))
12541 	/* Make DECL depend on CURRENT.  */
12542 	dep->deps.safe_push (current);
12543     }
12544 
12545   if (dep->is_unreached ())
12546     {
12547       /* The dependency is reachable now.  */
12548       reached_unreached = true;
12549       dep->clear_flag_bit<DB_UNREACHED_BIT> ();
12550       dump (dumper::DEPEND)
12551 	&& dump ("Reaching unreached %s %C:%N", dep->entity_kind_name (),
12552 		 TREE_CODE (dep->get_entity ()), dep->get_entity ());
12553     }
12554 }
12555 
12556 depset *
add_dependency(tree decl,entity_kind ek)12557 depset::hash::add_dependency (tree decl, entity_kind ek)
12558 {
12559   depset *dep;
12560 
12561   if (is_key_order ())
12562     {
12563       dep = find_dependency (decl);
12564       if (dep)
12565 	{
12566 	  current->deps.safe_push (dep);
12567 	  dump (dumper::MERGE)
12568 	    && dump ("Key dependency on %s %C:%N found",
12569 		     dep->entity_kind_name (), TREE_CODE (decl), decl);
12570 	}
12571       else
12572 	{
12573 	  /* It's not a mergeable decl, look for it in the original
12574 	     table.  */
12575 	  dep = chain->find_dependency (decl);
12576 	  gcc_checking_assert (dep);
12577 	}
12578     }
12579   else
12580     {
12581       dep = make_dependency (decl, ek);
12582       if (dep->get_entity_kind () != EK_REDIRECT)
12583 	add_dependency (dep);
12584     }
12585 
12586   return dep;
12587 }
12588 
12589 void
add_namespace_context(depset * dep,tree ns)12590 depset::hash::add_namespace_context (depset *dep, tree ns)
12591 {
12592   depset *ns_dep = make_dependency (ns, depset::EK_NAMESPACE);
12593   dep->deps.safe_push (ns_dep);
12594 
12595   /* Mark it as special if imported so we don't walk connect when
12596      SCCing.  */
12597   if (!dep->is_binding () && ns_dep->is_import ())
12598     dep->set_special ();
12599 }
12600 
12601 struct add_binding_data
12602 {
12603   tree ns;
12604   bitmap partitions;
12605   depset *binding;
12606   depset::hash *hash;
12607   bool met_namespace;
12608 };
12609 
12610 /* Return true if we are, or contain something that is exported.  */
12611 
12612 bool
add_binding_entity(tree decl,WMB_Flags flags,void * data_)12613 depset::hash::add_binding_entity (tree decl, WMB_Flags flags, void *data_)
12614 {
12615   auto data = static_cast <add_binding_data *> (data_);
12616 
12617   if (!(TREE_CODE (decl) == NAMESPACE_DECL && !DECL_NAMESPACE_ALIAS (decl)))
12618     {
12619       tree inner = decl;
12620 
12621       if (TREE_CODE (inner) == CONST_DECL
12622 	  && TREE_CODE (DECL_CONTEXT (inner)) == ENUMERAL_TYPE)
12623 	inner = TYPE_NAME (DECL_CONTEXT (inner));
12624       else if (TREE_CODE (inner) == TEMPLATE_DECL)
12625 	inner = DECL_TEMPLATE_RESULT (inner);
12626 
12627       if (!DECL_LANG_SPECIFIC (inner) || !DECL_MODULE_PURVIEW_P (inner))
12628 	/* Ignore global module fragment entities.  */
12629 	return false;
12630 
12631       if (VAR_OR_FUNCTION_DECL_P (inner)
12632 	  && DECL_THIS_STATIC (inner))
12633 	{
12634 	  if (!header_module_p ())
12635 	    /* Ignore internal-linkage entitites.  */
12636 	    return false;
12637 	}
12638 
12639       if ((TREE_CODE (decl) == VAR_DECL
12640 	   || TREE_CODE (decl) == TYPE_DECL)
12641 	  && DECL_TINFO_P (decl))
12642 	/* Ignore TINFO things.  */
12643 	return false;
12644 
12645       if (!(flags & WMB_Using) && CP_DECL_CONTEXT (decl) != data->ns)
12646 	{
12647 	  /* A using that lost its wrapper or an unscoped enum
12648 	     constant.  */
12649 	  flags = WMB_Flags (flags | WMB_Using);
12650 	  if (DECL_MODULE_EXPORT_P (TREE_CODE (decl) == CONST_DECL
12651 				    ? TYPE_NAME (TREE_TYPE (decl))
12652 				    : STRIP_TEMPLATE (decl)))
12653 	    flags = WMB_Flags (flags | WMB_Export);
12654 	}
12655 
12656       if (!data->binding)
12657 	/* No binding to check.  */;
12658       else if (flags & WMB_Using)
12659 	{
12660 	  /* Look in the binding to see if we already have this
12661 	     using.  */
12662 	  for (unsigned ix = data->binding->deps.length (); --ix;)
12663 	    {
12664 	      depset *d = data->binding->deps[ix];
12665 	      if (d->get_entity_kind () == EK_USING
12666 		  && OVL_FUNCTION (d->get_entity ()) == decl)
12667 		{
12668 		  if (!(flags & WMB_Hidden))
12669 		    d->clear_hidden_binding ();
12670 		  if (flags & WMB_Export)
12671 		    OVL_EXPORT_P (d->get_entity ()) = true;
12672 		  return bool (flags & WMB_Export);
12673 		}
12674 	    }
12675 	}
12676       else if (flags & WMB_Dups)
12677 	{
12678 	  /* Look in the binding to see if we already have this decl.  */
12679 	  for (unsigned ix = data->binding->deps.length (); --ix;)
12680 	    {
12681 	      depset *d = data->binding->deps[ix];
12682 	      if (d->get_entity () == decl)
12683 		{
12684 		  if (!(flags & WMB_Hidden))
12685 		    d->clear_hidden_binding ();
12686 		  return false;
12687 		}
12688 	    }
12689 	}
12690 
12691       /* We're adding something.  */
12692       if (!data->binding)
12693 	{
12694 	  data->binding = make_binding (data->ns, DECL_NAME (decl));
12695 	  data->hash->add_namespace_context (data->binding, data->ns);
12696 
12697 	  depset **slot = data->hash->binding_slot (data->ns,
12698 						    DECL_NAME (decl), true);
12699 	  gcc_checking_assert (!*slot);
12700 	  *slot = data->binding;
12701 	}
12702 
12703       /* Make sure nobody left a tree visited lying about.  */
12704       gcc_checking_assert (!TREE_VISITED (decl));
12705 
12706       if (flags & WMB_Using)
12707 	{
12708 	  decl = ovl_make (decl, NULL_TREE);
12709 	  if (flags & WMB_Export)
12710 	    OVL_EXPORT_P (decl) = true;
12711 	}
12712 
12713       depset *dep = data->hash->make_dependency
12714 	(decl, flags & WMB_Using ? EK_USING : EK_FOR_BINDING);
12715       if (flags & WMB_Hidden)
12716 	dep->set_hidden_binding ();
12717       data->binding->deps.safe_push (dep);
12718       /* Binding and contents are mutually dependent.  */
12719       dep->deps.safe_push (data->binding);
12720 
12721       return (flags & WMB_Using
12722 	      ? flags & WMB_Export : DECL_MODULE_EXPORT_P (decl));
12723     }
12724   else if (DECL_NAME (decl) && !data->met_namespace)
12725     {
12726       /* Namespace, walk exactly once.  */
12727       gcc_checking_assert (TREE_PUBLIC (decl));
12728       data->met_namespace = true;
12729       if (data->hash->add_namespace_entities (decl, data->partitions))
12730 	{
12731 	  /* It contains an exported thing, so it is exported.  */
12732 	  gcc_checking_assert (DECL_MODULE_PURVIEW_P (decl));
12733 	  DECL_MODULE_EXPORT_P (decl) = true;
12734 	}
12735 
12736       if (DECL_MODULE_PURVIEW_P (decl))
12737 	{
12738 	  data->hash->make_dependency (decl, depset::EK_NAMESPACE);
12739 
12740 	  return DECL_MODULE_EXPORT_P (decl);
12741 	}
12742     }
12743 
12744   return false;
12745 }
12746 
12747 /* Recursively find all the namespace bindings of NS.  Add a depset
12748    for every binding that contains an export or module-linkage entity.
12749    Add a defining depset for every such decl that we need to write a
12750    definition.  Such defining depsets depend on the binding depset.
12751    Returns true if we contain something exported.  */
12752 
12753 bool
add_namespace_entities(tree ns,bitmap partitions)12754 depset::hash::add_namespace_entities (tree ns, bitmap partitions)
12755 {
12756   dump () && dump ("Looking for writables in %N", ns);
12757   dump.indent ();
12758 
12759   unsigned count = 0;
12760   add_binding_data data;
12761   data.ns = ns;
12762   data.partitions = partitions;
12763   data.hash = this;
12764 
12765   hash_table<named_decl_hash>::iterator end
12766     (DECL_NAMESPACE_BINDINGS (ns)->end ());
12767   for (hash_table<named_decl_hash>::iterator iter
12768 	 (DECL_NAMESPACE_BINDINGS (ns)->begin ()); iter != end; ++iter)
12769     {
12770       data.binding = nullptr;
12771       data.met_namespace = false;
12772       if (walk_module_binding (*iter, partitions, add_binding_entity, &data))
12773 	count++;
12774     }
12775 
12776   if (count)
12777     dump () && dump ("Found %u entries", count);
12778   dump.outdent ();
12779 
12780   return count != 0;
12781 }
12782 
12783 void
add_partial_entities(vec<tree,va_gc> * partial_classes)12784 depset::hash::add_partial_entities (vec<tree, va_gc> *partial_classes)
12785 {
12786   for (unsigned ix = 0; ix != partial_classes->length (); ix++)
12787     {
12788       tree inner = (*partial_classes)[ix];
12789 
12790       depset *dep = make_dependency (inner, depset::EK_DECL);
12791 
12792       if (dep->get_entity_kind () == depset::EK_REDIRECT)
12793 	/* We should have recorded the template as a partial
12794 	   specialization.  */
12795 	gcc_checking_assert (dep->deps[0]->get_entity_kind ()
12796 			     == depset::EK_PARTIAL);
12797       else
12798 	/* It was an explicit specialization, not a partial one.  */
12799 	gcc_checking_assert (dep->get_entity_kind ()
12800 			     == depset::EK_SPECIALIZATION);
12801     }
12802 }
12803 
12804 /* Add the members of imported classes that we defined in this TU.
12805    This will also include lazily created implicit member function
12806    declarations.  (All others will be definitions.)  */
12807 
12808 void
add_class_entities(vec<tree,va_gc> * class_members)12809 depset::hash::add_class_entities (vec<tree, va_gc> *class_members)
12810 {
12811   for (unsigned ix = 0; ix != class_members->length (); ix++)
12812     {
12813       tree defn = (*class_members)[ix];
12814       depset *dep = make_dependency (defn, EK_INNER_DECL);
12815 
12816       if (dep->get_entity_kind () == EK_REDIRECT)
12817 	dep = dep->deps[0];
12818 
12819       /* Only non-instantiations need marking as members.  */
12820       if (dep->get_entity_kind () == EK_DECL)
12821 	dep->set_flag_bit <DB_IS_MEMBER_BIT> ();
12822     }
12823 }
12824 
12825 /* We add the partial & explicit specializations, and the explicit
12826    instantiations.  */
12827 
12828 static void
specialization_add(bool decl_p,spec_entry * entry,void * data_)12829 specialization_add (bool decl_p, spec_entry *entry, void *data_)
12830 {
12831   vec<spec_entry *> *data = reinterpret_cast <vec<spec_entry *> *> (data_);
12832 
12833   if (!decl_p)
12834     {
12835       /* We exclusively use decls to locate things.  Make sure there's
12836 	 no mismatch between the two specialization tables we keep.
12837 	 pt.cc optimizes instantiation lookup using a complicated
12838 	 heuristic.  We don't attempt to replicate that algorithm, but
12839 	 observe its behaviour and reproduce it upon read back.  */
12840 
12841        gcc_checking_assert (DECL_ALIAS_TEMPLATE_P (entry->tmpl)
12842 			   || TREE_CODE (entry->spec) == ENUMERAL_TYPE
12843 			   || DECL_CLASS_TEMPLATE_P (entry->tmpl));
12844 
12845        /* Only alias templates can appear in both tables (and
12846 	  if they're in the type table they must also be in the decl
12847 	  table).  */
12848        gcc_checking_assert
12849 	 (!match_mergeable_specialization (true, entry)
12850 	  == !DECL_ALIAS_TEMPLATE_P (entry->tmpl));
12851     }
12852   else if (VAR_OR_FUNCTION_DECL_P (entry->spec))
12853     gcc_checking_assert (!DECL_LOCAL_DECL_P (entry->spec));
12854 
12855   data->safe_push (entry);
12856 }
12857 
12858 /* Arbitrary stable comparison.  */
12859 
12860 static int
specialization_cmp(const void * a_,const void * b_)12861 specialization_cmp (const void *a_, const void *b_)
12862 {
12863   const spec_entry *ea = *reinterpret_cast<const spec_entry *const *> (a_);
12864   const spec_entry *eb = *reinterpret_cast<const spec_entry *const *> (b_);
12865 
12866   if (ea == eb)
12867     return 0;
12868 
12869   tree a = ea->spec;
12870   tree b = eb->spec;
12871   if (TYPE_P (a))
12872     {
12873       a = TYPE_NAME (a);
12874       b = TYPE_NAME (b);
12875     }
12876 
12877   if (a == b)
12878     /* This can happen with friend specializations.  Just order by
12879        entry address.  See note in depset_cmp.  */
12880     return ea < eb ? -1 : +1;
12881 
12882   return DECL_UID (a) < DECL_UID (b) ? -1 : +1;
12883 }
12884 
12885 /* We add all kinds of specialializations.  Implicit specializations
12886    should only streamed and walked if they are reachable from
12887    elsewhere.  Hence the UNREACHED flag.  This is making the
12888    assumption that it is cheaper to reinstantiate them on demand
12889    elsewhere, rather than stream them in when we instantiate their
12890    general template.  Also, if we do stream them, we can only do that
12891    if they are not internal (which they can become if they themselves
12892    touch an internal entity?).  */
12893 
12894 void
add_specializations(bool decl_p)12895 depset::hash::add_specializations (bool decl_p)
12896 {
12897   vec<spec_entry *> data;
12898   data.create (100);
12899   walk_specializations (decl_p, specialization_add, &data);
12900   data.qsort (specialization_cmp);
12901   while (data.length ())
12902     {
12903       spec_entry *entry = data.pop ();
12904       tree spec = entry->spec;
12905       int use_tpl = 0;
12906       bool is_alias = false;
12907       bool is_friend = false;
12908 
12909       if (decl_p && DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (entry->tmpl))
12910 	/* A friend of a template.  This is keyed to the
12911 	   instantiation.  */
12912 	is_friend = true;
12913 
12914       if (!decl_p && DECL_ALIAS_TEMPLATE_P (entry->tmpl))
12915 	{
12916 	  spec = TYPE_NAME (spec);
12917 	  is_alias = true;
12918 	}
12919 
12920       if (decl_p || is_alias)
12921 	{
12922 	  if (tree ti = DECL_TEMPLATE_INFO (spec))
12923 	    {
12924 	      tree tmpl = TI_TEMPLATE (ti);
12925 
12926 	      use_tpl = DECL_USE_TEMPLATE (spec);
12927 	      if (spec == DECL_TEMPLATE_RESULT (tmpl))
12928 		{
12929 		  spec = tmpl;
12930 		  gcc_checking_assert (DECL_USE_TEMPLATE (spec) == use_tpl);
12931 		}
12932 	      else if (is_friend)
12933 		{
12934 		  if (TI_TEMPLATE (ti) != entry->tmpl
12935 		      || !template_args_equal (TI_ARGS (ti), entry->tmpl))
12936 		    goto template_friend;
12937 		}
12938 	    }
12939 	  else
12940 	    {
12941 	    template_friend:;
12942 	      gcc_checking_assert (is_friend);
12943 	      /* This is a friend of a template class, but not the one
12944 		 that generated entry->spec itself (i.e. it's an
12945 		 equivalent clone).  We do not need to record
12946 		 this.  */
12947 	      continue;
12948 	    }
12949 	}
12950       else
12951 	{
12952 	  if (TREE_CODE (spec) == ENUMERAL_TYPE)
12953 	    {
12954 	      tree ctx = DECL_CONTEXT (TYPE_NAME (spec));
12955 
12956 	      if (TYPE_P (ctx))
12957 		use_tpl = CLASSTYPE_USE_TEMPLATE (ctx);
12958 	      else
12959 		use_tpl = DECL_USE_TEMPLATE (ctx);
12960 	    }
12961 	  else
12962 	    use_tpl = CLASSTYPE_USE_TEMPLATE (spec);
12963 
12964 	  tree ti = TYPE_TEMPLATE_INFO (spec);
12965 	  tree tmpl = TI_TEMPLATE (ti);
12966 
12967 	  spec = TYPE_NAME (spec);
12968 	  if (spec == DECL_TEMPLATE_RESULT (tmpl))
12969 	    {
12970 	      spec = tmpl;
12971 	      use_tpl = DECL_USE_TEMPLATE (spec);
12972 	    }
12973 	}
12974 
12975       bool needs_reaching = false;
12976       if (use_tpl == 1)
12977 	/* Implicit instantiations only walked if we reach them.  */
12978 	needs_reaching = true;
12979       else if (!DECL_LANG_SPECIFIC (spec)
12980 	       || !DECL_MODULE_PURVIEW_P (STRIP_TEMPLATE (spec)))
12981 	/* Likewise, GMF explicit or partial specializations.  */
12982 	needs_reaching = true;
12983 
12984 #if false && CHECKING_P
12985       /* The instantiation isn't always on
12986 	 DECL_TEMPLATE_INSTANTIATIONS, */
12987       // FIXME: we probably need to remember this information?
12988       /* Verify the specialization is on the
12989 	 DECL_TEMPLATE_INSTANTIATIONS of the template.  */
12990       for (tree cons = DECL_TEMPLATE_INSTANTIATIONS (entry->tmpl);
12991 	   cons; cons = TREE_CHAIN (cons))
12992 	if (TREE_VALUE (cons) == entry->spec)
12993 	  {
12994 	    gcc_assert (entry->args == TREE_PURPOSE (cons));
12995 	    goto have_spec;
12996 	  }
12997       gcc_unreachable ();
12998     have_spec:;
12999 #endif
13000 
13001       /* Make sure nobody left a tree visited lying about.  */
13002       gcc_checking_assert (!TREE_VISITED (spec));
13003       depset *dep = make_dependency (spec, depset::EK_SPECIALIZATION);
13004       if (dep->is_special ())
13005 	{
13006 	  /* An already located specialization, this must be the TYPE
13007 	     corresponding to an alias_decl we found in the decl
13008 	     table.  */
13009 	  spec_entry *other = reinterpret_cast <spec_entry *> (dep->deps[0]);
13010 	  gcc_checking_assert (!decl_p && is_alias && !dep->is_type_spec ());
13011 	  gcc_checking_assert (other->tmpl == entry->tmpl
13012 			       && template_args_equal (other->args, entry->args)
13013 			       && TREE_TYPE (other->spec) == entry->spec);
13014 	  dep->set_flag_bit<DB_ALIAS_SPEC_BIT> ();
13015 	}
13016       else
13017 	{
13018 	  gcc_checking_assert (decl_p || !is_alias);
13019 	  if (dep->get_entity_kind () == depset::EK_REDIRECT)
13020 	    dep = dep->deps[0];
13021 	  else if (dep->get_entity_kind () == depset::EK_SPECIALIZATION)
13022 	    {
13023 	      dep->set_special ();
13024 	      dep->deps.safe_push (reinterpret_cast<depset *> (entry));
13025 	      if (!decl_p)
13026 		dep->set_flag_bit<DB_TYPE_SPEC_BIT> ();
13027 	    }
13028 
13029 	  if (needs_reaching)
13030 	    dep->set_flag_bit<DB_UNREACHED_BIT> ();
13031 	  if (is_friend)
13032 	    dep->set_flag_bit<DB_FRIEND_SPEC_BIT> ();
13033 	}
13034     }
13035   data.release ();
13036 }
13037 
13038 /* Add a depset into the mergeable hash.  */
13039 
13040 void
add_mergeable(depset * mergeable)13041 depset::hash::add_mergeable (depset *mergeable)
13042 {
13043   gcc_checking_assert (is_key_order ());
13044   entity_kind ek = mergeable->get_entity_kind ();
13045   tree decl = mergeable->get_entity ();
13046   gcc_checking_assert (ek < EK_DIRECT_HWM);
13047 
13048   depset **slot = entity_slot (decl, true);
13049   gcc_checking_assert (!*slot);
13050   depset *dep = make_entity (decl, ek);
13051   *slot = dep;
13052 
13053   worklist.safe_push (dep);
13054 
13055   /* So we can locate the mergeable depset this depset refers to,
13056      mark the first dep.  */
13057   dep->set_special ();
13058   dep->deps.safe_push (mergeable);
13059 }
13060 
13061 /* Find the innermost-namespace scope of DECL, and that
13062    namespace-scope decl.  */
13063 
13064 tree
find_pending_key(tree decl,tree * decl_p=nullptr)13065 find_pending_key (tree decl, tree *decl_p = nullptr)
13066 {
13067   tree ns = decl;
13068   do
13069     {
13070       decl = ns;
13071       ns = CP_DECL_CONTEXT (ns);
13072       if (TYPE_P (ns))
13073 	ns = TYPE_NAME (ns);
13074     }
13075   while (TREE_CODE (ns) != NAMESPACE_DECL);
13076 
13077   if (decl_p)
13078     *decl_p = decl;
13079 
13080   return ns;
13081 }
13082 
13083 /* Iteratively find dependencies.  During the walk we may find more
13084    entries on the same binding that need walking.  */
13085 
13086 void
find_dependencies(module_state * module)13087 depset::hash::find_dependencies (module_state *module)
13088 {
13089   trees_out walker (NULL, module, *this);
13090   vec<depset *> unreached;
13091   unreached.create (worklist.length ());
13092 
13093   for (;;)
13094     {
13095       reached_unreached = false;
13096       while (worklist.length ())
13097 	{
13098 	  depset *item = worklist.pop ();
13099 
13100 	  gcc_checking_assert (!item->is_binding ());
13101 	  if (item->is_unreached ())
13102 	    unreached.quick_push (item);
13103 	  else
13104 	    {
13105 	      current = item;
13106 	      tree decl = current->get_entity ();
13107 	      dump (is_key_order () ? dumper::MERGE : dumper::DEPEND)
13108 		&& dump ("Dependencies of %s %C:%N",
13109 			 is_key_order () ? "key-order"
13110 			 : current->entity_kind_name (), TREE_CODE (decl), decl);
13111 	      dump.indent ();
13112 	      walker.begin ();
13113 	      if (current->get_entity_kind () == EK_USING)
13114 		walker.tree_node (OVL_FUNCTION (decl));
13115 	      else if (TREE_VISITED (decl))
13116 		/* A global tree.  */;
13117 	      else if (item->get_entity_kind () == EK_NAMESPACE)
13118 		add_namespace_context (current, CP_DECL_CONTEXT (decl));
13119 	      else
13120 		{
13121 		  walker.mark_declaration (decl, current->has_defn ());
13122 
13123 		  if (!walker.is_key_order ()
13124 		      && (item->get_entity_kind () == EK_SPECIALIZATION
13125 			  || item->get_entity_kind () == EK_PARTIAL
13126 			  || (item->get_entity_kind () == EK_DECL
13127 			      && item->is_member ())))
13128 		    {
13129 		      tree ns = find_pending_key (decl, nullptr);
13130 		      add_namespace_context (item, ns);
13131 		    }
13132 
13133 		  // FIXME: Perhaps p1815 makes this redundant? Or at
13134 		  // least simplifies it.  Voldemort types are only
13135 		  // ever emissable when containing (inline) function
13136 		  // definition is emitted?
13137 		  /* Turn the Sneakoscope on when depending the decl.  */
13138 		  sneakoscope = true;
13139 		  walker.decl_value (decl, current);
13140 		  sneakoscope = false;
13141 		  if (current->has_defn ())
13142 		    walker.write_definition (decl);
13143 		}
13144 	      walker.end ();
13145 
13146 	      if (!walker.is_key_order ()
13147 		  && TREE_CODE (decl) == TEMPLATE_DECL
13148 		  && !DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (decl))
13149 		/* Mark all the explicit & partial specializations as
13150 		   reachable.  */
13151 		for (tree cons = DECL_TEMPLATE_INSTANTIATIONS (decl);
13152 		     cons; cons = TREE_CHAIN (cons))
13153 		  {
13154 		    tree spec = TREE_VALUE (cons);
13155 		    if (TYPE_P (spec))
13156 		      spec = TYPE_NAME (spec);
13157 		    int use_tpl;
13158 		    node_template_info (spec, use_tpl);
13159 		    if (use_tpl & 2)
13160 		      {
13161 			depset *spec_dep = find_dependency (spec);
13162 			if (spec_dep->get_entity_kind () == EK_REDIRECT)
13163 			  spec_dep = spec_dep->deps[0];
13164 			if (spec_dep->is_unreached ())
13165 			  {
13166 			    reached_unreached = true;
13167 			    spec_dep->clear_flag_bit<DB_UNREACHED_BIT> ();
13168 			    dump (dumper::DEPEND)
13169 			      && dump ("Reaching unreached specialization"
13170 				       " %C:%N", TREE_CODE (spec), spec);
13171 			  }
13172 		      }
13173 		  }
13174 
13175 	      dump.outdent ();
13176 	      current = NULL;
13177 	    }
13178 	}
13179 
13180       if (!reached_unreached)
13181 	break;
13182 
13183       /* It's possible the we reached the unreached before we
13184 	 processed it in the above loop, so we'll be doing this an
13185 	 extra time.  However, to avoid that we have to do some
13186 	 bit shuffling that also involves a scan of the list.
13187 	 Swings & roundabouts I guess.  */
13188       std::swap (worklist, unreached);
13189     }
13190 
13191   unreached.release ();
13192 }
13193 
13194 /* Compare two entries of a single binding.  TYPE_DECL before
13195    non-exported before exported.  */
13196 
13197 static int
binding_cmp(const void * a_,const void * b_)13198 binding_cmp (const void *a_, const void *b_)
13199 {
13200   depset *a = *(depset *const *)a_;
13201   depset *b = *(depset *const *)b_;
13202 
13203   tree a_ent = a->get_entity ();
13204   tree b_ent = b->get_entity ();
13205   gcc_checking_assert (a_ent != b_ent
13206 		       && !a->is_binding ()
13207 		       && !b->is_binding ());
13208 
13209   /* Implicit typedefs come first.  */
13210   bool a_implicit = DECL_IMPLICIT_TYPEDEF_P (a_ent);
13211   bool b_implicit = DECL_IMPLICIT_TYPEDEF_P (b_ent);
13212   if (a_implicit || b_implicit)
13213     {
13214       /* A binding with two implicit type decls?  That's unpossible!  */
13215       gcc_checking_assert (!(a_implicit && b_implicit));
13216       return a_implicit ? -1 : +1;  /* Implicit first.  */
13217     }
13218 
13219   /* Hidden before non-hidden.  */
13220   bool a_hidden = a->is_hidden ();
13221   bool b_hidden = b->is_hidden ();
13222   if (a_hidden != b_hidden)
13223     return a_hidden ? -1 : +1;
13224 
13225   bool a_using = a->get_entity_kind () == depset::EK_USING;
13226   bool a_export;
13227   if (a_using)
13228     {
13229       a_export = OVL_EXPORT_P (a_ent);
13230       a_ent = OVL_FUNCTION (a_ent);
13231     }
13232   else
13233     a_export = DECL_MODULE_EXPORT_P (TREE_CODE (a_ent) == CONST_DECL
13234 				     ? TYPE_NAME (TREE_TYPE (a_ent))
13235 				     : STRIP_TEMPLATE (a_ent));
13236 
13237   bool b_using = b->get_entity_kind () == depset::EK_USING;
13238   bool b_export;
13239   if (b_using)
13240     {
13241       b_export = OVL_EXPORT_P (b_ent);
13242       b_ent = OVL_FUNCTION (b_ent);
13243     }
13244   else
13245     b_export = DECL_MODULE_EXPORT_P (TREE_CODE (b_ent) == CONST_DECL
13246 				     ? TYPE_NAME (TREE_TYPE (b_ent))
13247 				     : STRIP_TEMPLATE (b_ent));
13248 
13249   /* Non-exports before exports.  */
13250   if (a_export != b_export)
13251     return a_export ? +1 : -1;
13252 
13253   /* At this point we don't care, but want a stable sort.  */
13254 
13255   if (a_using != b_using)
13256     /* using first.  */
13257     return a_using? -1 : +1;
13258 
13259   return DECL_UID (a_ent) < DECL_UID (b_ent) ? -1 : +1;
13260 }
13261 
13262 /* Sort the bindings, issue errors about bad internal refs.  */
13263 
13264 bool
finalize_dependencies()13265 depset::hash::finalize_dependencies ()
13266 {
13267   bool ok = true;
13268   depset::hash::iterator end (this->end ());
13269   for (depset::hash::iterator iter (begin ()); iter != end; ++iter)
13270     {
13271       depset *dep = *iter;
13272       if (dep->is_binding ())
13273 	{
13274 	  /* Keep the containing namespace dep first.  */
13275 	  gcc_checking_assert (dep->deps.length () > 1
13276 			       && (dep->deps[0]->get_entity_kind ()
13277 				   == EK_NAMESPACE)
13278 			       && (dep->deps[0]->get_entity ()
13279 				   == dep->get_entity ()));
13280 	  if (dep->deps.length () > 2)
13281 	    gcc_qsort (&dep->deps[1], dep->deps.length () - 1,
13282 		       sizeof (dep->deps[1]), binding_cmp);
13283 	}
13284       else if (dep->refs_internal ())
13285 	{
13286 	  for (unsigned ix = dep->deps.length (); ix--;)
13287 	    {
13288 	      depset *rdep = dep->deps[ix];
13289 	      if (rdep->is_internal ())
13290 		{
13291 		  // FIXME:QOI Better location information?  We're
13292 		  // losing, so it doesn't matter about efficiency
13293 		  tree decl = dep->get_entity ();
13294 		  error_at (DECL_SOURCE_LOCATION (decl),
13295 			    "%q#D references internal linkage entity %q#D",
13296 			    decl, rdep->get_entity ());
13297 		  break;
13298 		}
13299 	    }
13300 	  ok = false;
13301 	}
13302     }
13303 
13304   return ok;
13305 }
13306 
13307 /* Core of TARJAN's algorithm to find Strongly Connected Components
13308    within a graph.  See https://en.wikipedia.org/wiki/
13309    Tarjan%27s_strongly_connected_components_algorithm for details.
13310 
13311    We use depset::section as lowlink.  Completed nodes have
13312    depset::cluster containing the cluster number, with the top
13313    bit set.
13314 
13315    A useful property is that the output vector is a reverse
13316    topological sort of the resulting DAG.  In our case that means
13317    dependent SCCs are found before their dependers.  We make use of
13318    that property.  */
13319 
13320 void
connect(depset * v)13321 depset::tarjan::connect (depset *v)
13322 {
13323   gcc_checking_assert (v->is_binding ()
13324 		       || !(v->is_unreached () || v->is_import ()));
13325 
13326   v->cluster = v->section = ++index;
13327   stack.safe_push (v);
13328 
13329   /* Walk all our dependencies, ignore a first marked slot  */
13330   for (unsigned ix = v->is_special (); ix != v->deps.length (); ix++)
13331     {
13332       depset *dep = v->deps[ix];
13333 
13334       if (dep->is_binding () || !dep->is_import ())
13335 	{
13336 	  unsigned lwm = dep->cluster;
13337 
13338 	  if (!dep->cluster)
13339 	    {
13340 	      /* A new node.  Connect it.  */
13341 	      connect (dep);
13342 	      lwm = dep->section;
13343 	    }
13344 
13345 	  if (dep->section && v->section > lwm)
13346 	    v->section = lwm;
13347 	}
13348     }
13349 
13350   if (v->section == v->cluster)
13351     {
13352       /* Root of a new SCC.  Push all the members onto the result list. */
13353       unsigned num = v->cluster;
13354       depset *p;
13355       do
13356 	{
13357 	  p = stack.pop ();
13358 	  p->cluster = num;
13359 	  p->section = 0;
13360 	  result.quick_push (p);
13361 	}
13362       while (p != v);
13363     }
13364 }
13365 
13366 /* Compare two depsets.  The specific ordering is unimportant, we're
13367    just trying to get consistency.  */
13368 
13369 static int
depset_cmp(const void * a_,const void * b_)13370 depset_cmp (const void *a_, const void *b_)
13371 {
13372   depset *a = *(depset *const *)a_;
13373   depset *b = *(depset *const *)b_;
13374 
13375   depset::entity_kind a_kind = a->get_entity_kind ();
13376   depset::entity_kind b_kind = b->get_entity_kind ();
13377 
13378   if  (a_kind != b_kind)
13379     /* Different entity kinds, order by that.  */
13380     return a_kind < b_kind ? -1 : +1;
13381 
13382   tree a_decl = a->get_entity ();
13383   tree b_decl = b->get_entity ();
13384   if (a_kind == depset::EK_USING)
13385     {
13386       /* If one is a using, the other must be too.  */
13387       a_decl = OVL_FUNCTION (a_decl);
13388       b_decl = OVL_FUNCTION (b_decl);
13389     }
13390 
13391   if (a_decl != b_decl)
13392     /* Different entities, order by their UID.  */
13393     return DECL_UID (a_decl) < DECL_UID (b_decl) ? -1 : +1;
13394 
13395   if (a_kind == depset::EK_BINDING)
13396     {
13397       /* Both are bindings.  Order by identifier hash.  */
13398       gcc_checking_assert (a->get_name () != b->get_name ());
13399       return (IDENTIFIER_HASH_VALUE (a->get_name ())
13400 	      < IDENTIFIER_HASH_VALUE (b->get_name ())
13401 	      ? -1 : +1);
13402     }
13403 
13404   /* They are the same decl.  This can happen with two using decls
13405      pointing to the same target.  The best we can aim for is
13406      consistently telling qsort how to order them.  Hopefully we'll
13407      never have to debug a case that depends on this.  Oh, who am I
13408      kidding?  Good luck.  */
13409   gcc_checking_assert (a_kind == depset::EK_USING);
13410 
13411   /* Order by depset address.  Not the best, but it is something.  */
13412   return a < b ? -1 : +1;
13413 }
13414 
13415 /* Sort the clusters in SCC such that those that depend on one another
13416    are placed later.   */
13417 
13418 // FIXME: I am not convinced this is needed and, if needed,
13419 // sufficient.  We emit the decls in this order but that emission
13420 // could walk into later decls (from the body of the decl, or default
13421 // arg-like things).  Why doesn't that walk do the right thing?  And
13422 // if it DTRT why do we need to sort here -- won't things naturally
13423 // work?  I think part of the issue is that when we're going to refer
13424 // to an entity by name, and that entity is in the same cluster as us,
13425 // we need to actually walk that entity, if we've not already walked
13426 // it.
13427 static void
sort_cluster(depset::hash * original,depset * scc[],unsigned size)13428 sort_cluster (depset::hash *original, depset *scc[], unsigned size)
13429 {
13430   depset::hash table (size, original);
13431 
13432   dump.indent ();
13433 
13434   /* Place bindings last, usings before that.  It's not strictly
13435      necessary, but it does make things neater.  Says Mr OCD.  */
13436   unsigned bind_lwm = size;
13437   unsigned use_lwm = size;
13438   for (unsigned ix = 0; ix != use_lwm;)
13439     {
13440       depset *dep = scc[ix];
13441       switch (dep->get_entity_kind ())
13442 	{
13443 	case depset::EK_BINDING:
13444 	  /* Move to end.  No increment.  Notice this could be moving
13445 	     a using decl, which we'll then move again.  */
13446 	  if (--bind_lwm != ix)
13447 	    {
13448 	      scc[ix] = scc[bind_lwm];
13449 	      scc[bind_lwm] = dep;
13450 	    }
13451 	  if (use_lwm > bind_lwm)
13452 	    {
13453 	      use_lwm--;
13454 	      break;
13455 	    }
13456 	  /* We must have copied a using, so move it too.  */
13457 	  dep = scc[ix];
13458 	  gcc_checking_assert (dep->get_entity_kind () == depset::EK_USING);
13459 	  /* FALLTHROUGH  */
13460 
13461 	case depset::EK_USING:
13462 	  if (--use_lwm != ix)
13463 	    {
13464 	      scc[ix] = scc[use_lwm];
13465 	      scc[use_lwm] = dep;
13466 	    }
13467 	  break;
13468 
13469 	case depset::EK_DECL:
13470 	case depset::EK_SPECIALIZATION:
13471 	case depset::EK_PARTIAL:
13472 	  table.add_mergeable (dep);
13473 	  ix++;
13474 	  break;
13475 
13476 	default:
13477 	  gcc_unreachable ();
13478 	}
13479     }
13480 
13481   gcc_checking_assert (use_lwm <= bind_lwm);
13482   dump (dumper::MERGE) && dump ("Ordering %u/%u depsets", use_lwm, size);
13483 
13484   table.find_dependencies (nullptr);
13485 
13486   vec<depset *> order = table.connect ();
13487   gcc_checking_assert (order.length () == use_lwm);
13488 
13489   /* Now rewrite entries [0,lwm), in the dependency order we
13490      discovered.  Usually each entity is in its own cluster.  Rarely,
13491      we can get multi-entity clusters, in which case all but one must
13492      only be reached from within the cluster.  This happens for
13493      something like:
13494 
13495      template<typename T>
13496      auto Foo (const T &arg) -> TPL<decltype (arg)>;
13497 
13498      The instantiation of TPL will be in the specialization table, and
13499      refer to Foo via arg.  But we can only get to that specialization
13500      from Foo's declaration, so we only need to treat Foo as mergable
13501      (We'll do structural comparison of TPL<decltype (arg)>).
13502 
13503      Finding the single cluster entry dep is very tricky and
13504      expensive.  Let's just not do that.  It's harmless in this case
13505      anyway. */
13506   unsigned pos = 0;
13507   unsigned cluster = ~0u;
13508   for (unsigned ix = 0; ix != order.length (); ix++)
13509     {
13510       gcc_checking_assert (order[ix]->is_special ());
13511       depset *dep = order[ix]->deps[0];
13512       scc[pos++] = dep;
13513       dump (dumper::MERGE)
13514 	&& dump ("Mergeable %u is %N%s", ix, dep->get_entity (),
13515 		 order[ix]->cluster == cluster ? " (tight)" : "");
13516       cluster = order[ix]->cluster;
13517     }
13518 
13519   gcc_checking_assert (pos == use_lwm);
13520 
13521   order.release ();
13522   dump (dumper::MERGE) && dump ("Ordered %u keys", pos);
13523   dump.outdent ();
13524 }
13525 
13526 /* Reduce graph to SCCS clusters.  SCCS will be populated with the
13527    depsets in dependency order.  Each depset's CLUSTER field contains
13528    its cluster number.  Each SCC has a unique cluster number, and are
13529    contiguous in SCCS. Cluster numbers are otherwise arbitrary.  */
13530 
13531 vec<depset *>
connect()13532 depset::hash::connect ()
13533 {
13534   tarjan connector (size ());
13535   vec<depset *> deps;
13536   deps.create (size ());
13537   iterator end (this->end ());
13538   for (iterator iter (begin ()); iter != end; ++iter)
13539     {
13540       depset *item = *iter;
13541 
13542       entity_kind kind = item->get_entity_kind ();
13543       if (kind == EK_BINDING
13544 	  || !(kind == EK_REDIRECT
13545 	       || item->is_unreached ()
13546 	       || item->is_import ()))
13547 	deps.quick_push (item);
13548     }
13549 
13550   /* Iteration over the hash table is an unspecified ordering.  While
13551      that has advantages, it causes 2 problems.  Firstly repeatable
13552      builds are tricky.  Secondly creating testcases that check
13553      dependencies are correct by making sure a bad ordering would
13554      happen if that was wrong.  */
13555   deps.qsort (depset_cmp);
13556 
13557   while (deps.length ())
13558     {
13559       depset *v = deps.pop ();
13560       dump (dumper::CLUSTER) &&
13561 	(v->is_binding ()
13562 	 ? dump ("Connecting binding %P", v->get_entity (), v->get_name ())
13563 	 : dump ("Connecting %s %s %C:%N",
13564 		 is_key_order () ? "key-order"
13565 		 : !v->has_defn () ? "declaration" : "definition",
13566 		 v->entity_kind_name (), TREE_CODE (v->get_entity ()),
13567 		 v->get_entity ()));
13568       if (!v->cluster)
13569 	connector.connect (v);
13570     }
13571 
13572   deps.release ();
13573   return connector.result;
13574 }
13575 
13576 /* Initialize location spans.  */
13577 
13578 void
init(const line_maps * lmaps,const line_map_ordinary * map)13579 loc_spans::init (const line_maps *lmaps, const line_map_ordinary *map)
13580 {
13581   gcc_checking_assert (!init_p ());
13582   spans = new vec<span> ();
13583   spans->reserve (20);
13584 
13585   span interval;
13586   interval.ordinary.first = 0;
13587   interval.macro.second = MAX_LOCATION_T + 1;
13588   interval.ordinary_delta = interval.macro_delta = 0;
13589 
13590   /* A span for reserved fixed locs.  */
13591   interval.ordinary.second
13592     = MAP_START_LOCATION (LINEMAPS_ORDINARY_MAP_AT (line_table, 0));
13593   interval.macro.first = interval.macro.second;
13594   dump (dumper::LOCATION)
13595     && dump ("Fixed span %u ordinary:[%u,%u) macro:[%u,%u)", spans->length (),
13596 	     interval.ordinary.first, interval.ordinary.second,
13597 	     interval.macro.first, interval.macro.second);
13598   spans->quick_push (interval);
13599 
13600   /* A span for command line & forced headers.  */
13601   interval.ordinary.first = interval.ordinary.second;
13602   interval.macro.second = interval.macro.first;
13603   if (map)
13604     {
13605       interval.ordinary.second = map->start_location;
13606       interval.macro.first = LINEMAPS_MACRO_LOWEST_LOCATION (lmaps);
13607     }
13608   dump (dumper::LOCATION)
13609     && dump ("Pre span %u ordinary:[%u,%u) macro:[%u,%u)", spans->length (),
13610 	     interval.ordinary.first, interval.ordinary.second,
13611 	     interval.macro.first, interval.macro.second);
13612   spans->quick_push (interval);
13613 
13614   /* Start an interval for the main file.  */
13615   interval.ordinary.first = interval.ordinary.second;
13616   interval.macro.second = interval.macro.first;
13617   dump (dumper::LOCATION)
13618     && dump ("Main span %u ordinary:[%u,*) macro:[*,%u)", spans->length (),
13619 	     interval.ordinary.first, interval.macro.second);
13620   spans->quick_push (interval);
13621 }
13622 
13623 /* Reopen the span, if we want the about-to-be-inserted set of maps to
13624    be propagated in our own location table.  I.e. we are the primary
13625    interface and we're importing a partition.  */
13626 
13627 bool
maybe_propagate(module_state * import,location_t hwm)13628 loc_spans::maybe_propagate (module_state *import, location_t hwm)
13629 {
13630   bool opened = (module_interface_p () && !module_partition_p ()
13631 		 && import->is_partition ());
13632   if (opened)
13633     open (hwm);
13634   return opened;
13635 }
13636 
13637 /* Open a new linemap interval.  The just-created ordinary map is the
13638    first map of the interval.  */
13639 
13640 void
open(location_t hwm)13641 loc_spans::open (location_t hwm)
13642 {
13643   span interval;
13644   interval.ordinary.first = interval.ordinary.second = hwm;
13645   interval.macro.first = interval.macro.second
13646     = LINEMAPS_MACRO_LOWEST_LOCATION (line_table);
13647   interval.ordinary_delta = interval.macro_delta = 0;
13648   dump (dumper::LOCATION)
13649     && dump ("Opening span %u ordinary:[%u,... macro:...,%u)",
13650 	     spans->length (), interval.ordinary.first,
13651 	     interval.macro.second);
13652   if (spans->length ())
13653     {
13654       /* No overlapping!  */
13655       auto &last = spans->last ();
13656       gcc_checking_assert (interval.ordinary.first >= last.ordinary.second);
13657       gcc_checking_assert (interval.macro.second <= last.macro.first);
13658     }
13659   spans->safe_push (interval);
13660 }
13661 
13662 /* Close out the current linemap interval.  The last maps are within
13663    the interval.  */
13664 
13665 void
close()13666 loc_spans::close ()
13667 {
13668   span &interval = spans->last ();
13669 
13670   interval.ordinary.second
13671     = ((line_table->highest_location + (1 << line_table->default_range_bits))
13672        & ~((1u << line_table->default_range_bits) - 1));
13673   interval.macro.first = LINEMAPS_MACRO_LOWEST_LOCATION (line_table);
13674   dump (dumper::LOCATION)
13675     && dump ("Closing span %u ordinary:[%u,%u) macro:[%u,%u)",
13676 	     spans->length () - 1,
13677 	     interval.ordinary.first,interval.ordinary.second,
13678 	     interval.macro.first, interval.macro.second);
13679 }
13680 
13681 /* Given an ordinary location LOC, return the lmap_interval it resides
13682    in.  NULL if it is not in an interval.  */
13683 
13684 const loc_spans::span *
ordinary(location_t loc)13685 loc_spans::ordinary (location_t loc)
13686 {
13687   unsigned len = spans->length ();
13688   unsigned pos = 0;
13689   while (len)
13690     {
13691       unsigned half = len / 2;
13692       const span &probe = (*spans)[pos + half];
13693       if (loc < probe.ordinary.first)
13694 	len = half;
13695       else if (loc < probe.ordinary.second)
13696 	return &probe;
13697       else
13698 	{
13699 	  pos += half + 1;
13700 	  len = len - (half + 1);
13701 	}
13702     }
13703   return NULL;
13704 }
13705 
13706 /* Likewise, given a macro location LOC, return the lmap interval it
13707    resides in.   */
13708 
13709 const loc_spans::span *
macro(location_t loc)13710 loc_spans::macro (location_t loc)
13711 {
13712   unsigned len = spans->length ();
13713   unsigned pos = 0;
13714   while (len)
13715     {
13716       unsigned half = len / 2;
13717       const span &probe = (*spans)[pos + half];
13718       if (loc >= probe.macro.second)
13719 	len = half;
13720       else if (loc >= probe.macro.first)
13721 	return &probe;
13722       else
13723 	{
13724 	  pos += half + 1;
13725 	  len = len - (half + 1);
13726 	}
13727     }
13728   return NULL;
13729 }
13730 
13731 /* Return the ordinary location closest to FROM.  */
13732 
13733 static location_t
ordinary_loc_of(line_maps * lmaps,location_t from)13734 ordinary_loc_of (line_maps *lmaps, location_t from)
13735 {
13736   while (!IS_ORDINARY_LOC (from))
13737     {
13738       if (IS_ADHOC_LOC (from))
13739 	from = get_location_from_adhoc_loc (lmaps, from);
13740       if (from >= LINEMAPS_MACRO_LOWEST_LOCATION (lmaps))
13741 	{
13742 	  /* Find the ordinary location nearest FROM.  */
13743 	  const line_map *map = linemap_lookup (lmaps, from);
13744 	  const line_map_macro *mac_map = linemap_check_macro (map);
13745 	  from = MACRO_MAP_EXPANSION_POINT_LOCATION (mac_map);
13746 	}
13747     }
13748   return from;
13749 }
13750 
13751 static module_state **
get_module_slot(tree name,module_state * parent,bool partition,bool insert)13752 get_module_slot (tree name, module_state *parent, bool partition, bool insert)
13753 {
13754   module_state_hash::compare_type ct (name, uintptr_t (parent) | partition);
13755   hashval_t hv = module_state_hash::hash (ct);
13756 
13757   return modules_hash->find_slot_with_hash (ct, hv, insert ? INSERT : NO_INSERT);
13758 }
13759 
13760 static module_state *
get_primary(module_state * parent)13761 get_primary (module_state *parent)
13762 {
13763   while (parent->is_partition ())
13764     parent = parent->parent;
13765 
13766   if (!parent->name)
13767     // Implementation unit has null name
13768     parent = parent->parent;
13769 
13770   return parent;
13771 }
13772 
13773 /* Find or create module NAME & PARENT in the hash table.  */
13774 
13775 module_state *
get_module(tree name,module_state * parent,bool partition)13776 get_module (tree name, module_state *parent, bool partition)
13777 {
13778   if (partition)
13779     {
13780       if (!parent)
13781 	parent = get_primary ((*modules)[0]);
13782 
13783       if (!parent->is_partition () && !parent->flatname)
13784 	parent->set_flatname ();
13785     }
13786 
13787   module_state **slot = get_module_slot (name, parent, partition, true);
13788   module_state *state = *slot;
13789   if (!state)
13790     {
13791       state = (new (ggc_alloc<module_state> ())
13792 	       module_state (name, parent, partition));
13793       *slot = state;
13794     }
13795   return state;
13796 }
13797 
13798 /* Process string name PTR into a module_state.  */
13799 
13800 static module_state *
get_module(const char * ptr)13801 get_module (const char *ptr)
13802 {
13803   if (ptr[0] == '.' ? IS_DIR_SEPARATOR (ptr[1]) : IS_ABSOLUTE_PATH (ptr))
13804     /* A header name.  */
13805     return get_module (build_string (strlen (ptr), ptr));
13806 
13807   bool partition = false;
13808   module_state *mod = NULL;
13809 
13810   for (const char *probe = ptr;; probe++)
13811     if (!*probe || *probe == '.' || *probe == ':')
13812       {
13813 	if (probe == ptr)
13814 	  return NULL;
13815 
13816 	mod = get_module (get_identifier_with_length (ptr, probe - ptr),
13817 			  mod, partition);
13818 	ptr = probe;
13819 	if (*ptr == ':')
13820 	  {
13821 	    if (partition)
13822 	      return NULL;
13823 	    partition = true;
13824 	  }
13825 
13826 	if (!*ptr++)
13827 	  break;
13828       }
13829     else if (!(ISALPHA (*probe) || *probe == '_'
13830 	       || (probe != ptr && ISDIGIT (*probe))))
13831       return NULL;
13832 
13833   return mod;
13834 }
13835 
13836 /* Create a new mapper connecting to OPTION.  */
13837 
13838 module_client *
make_mapper(location_t loc)13839 make_mapper (location_t loc)
13840 {
13841   timevar_start (TV_MODULE_MAPPER);
13842   const char *option = module_mapper_name;
13843   if (!option)
13844     option = getenv ("CXX_MODULE_MAPPER");
13845 
13846   mapper = module_client::open_module_client
13847     (loc, option, &set_cmi_repo,
13848      (save_decoded_options[0].opt_index == OPT_SPECIAL_program_name)
13849      && save_decoded_options[0].arg != progname
13850      ? save_decoded_options[0].arg : nullptr);
13851 
13852   timevar_stop (TV_MODULE_MAPPER);
13853 
13854   return mapper;
13855 }
13856 
13857 static unsigned lazy_snum;
13858 
13859 static bool
recursive_lazy(unsigned snum=~0u)13860 recursive_lazy (unsigned snum = ~0u)
13861 {
13862   if (lazy_snum)
13863     {
13864       error_at (input_location, "recursive lazy load");
13865       return true;
13866     }
13867 
13868   lazy_snum = snum;
13869   return false;
13870 }
13871 
13872 /* If THIS is the current purview, issue an import error and return false.  */
13873 
13874 bool
check_not_purview(location_t from)13875 module_state::check_not_purview (location_t from)
13876 {
13877   module_state *imp = (*modules)[0];
13878   if (imp && !imp->name)
13879     imp = imp->parent;
13880   if (imp == this)
13881     {
13882       /* Cannot import the current module.  */
13883       error_at (from, "cannot import module in its own purview");
13884       inform (loc, "module %qs declared here", get_flatname ());
13885       return false;
13886     }
13887   return true;
13888 }
13889 
13890 /* Module name substitutions.  */
13891 static vec<module_state *,va_heap> substs;
13892 
13893 void
mangle(bool include_partition)13894 module_state::mangle (bool include_partition)
13895 {
13896   if (subst)
13897     mangle_module_substitution (subst);
13898   else
13899     {
13900       if (parent)
13901 	parent->mangle (include_partition);
13902       if (include_partition || !is_partition ())
13903 	{
13904 	  // Partitions are significant for global initializer
13905 	  // functions
13906 	  bool partition = is_partition () && !parent->is_partition ();
13907 	  subst = mangle_module_component (name, partition);
13908 	  substs.safe_push (this);
13909 	}
13910     }
13911 }
13912 
13913 void
mangle_module(int mod,bool include_partition)13914 mangle_module (int mod, bool include_partition)
13915 {
13916   module_state *imp = (*modules)[mod];
13917 
13918   gcc_checking_assert (!imp->is_header ());
13919 
13920   if (!imp->name)
13921     /* Set when importing the primary module interface.  */
13922     imp = imp->parent;
13923 
13924   imp->mangle (include_partition);
13925 }
13926 
13927 /* Clean up substitutions.  */
13928 void
mangle_module_fini()13929 mangle_module_fini ()
13930 {
13931   while (substs.length ())
13932     substs.pop ()->subst = 0;
13933 }
13934 
13935 /* Announce WHAT about the module.  */
13936 
13937 void
announce(const char * what) const13938 module_state::announce (const char *what) const
13939 {
13940   if (noisy_p ())
13941     {
13942       fprintf (stderr, " %s:%s", what, get_flatname ());
13943       fflush (stderr);
13944     }
13945 }
13946 
13947 /* A human-readable README section.  The contents of this section to
13948    not contribute to the CRC, so the contents can change per
13949    compilation.  That allows us to embed CWD, hostname, build time and
13950    what not.  It is a STRTAB that may be extracted with:
13951      readelf -pgnu.c++.README $(module).gcm */
13952 
13953 void
write_readme(elf_out * to,cpp_reader * reader,const char * dialect,unsigned extensions)13954 module_state::write_readme (elf_out *to, cpp_reader *reader,
13955 			    const char *dialect, unsigned extensions)
13956 {
13957   bytes_out readme (to);
13958 
13959   readme.begin (false);
13960 
13961   readme.printf ("GNU C++ %smodule%s%s",
13962 		 is_header () ? "header " : is_partition () ? "" : "primary ",
13963 		 is_header () ? ""
13964 		 : is_interface () ? " interface" : " implementation",
13965 		 is_partition () ? " partition" : "");
13966 
13967   /* Compiler's version.  */
13968   readme.printf ("compiler: %s", version_string);
13969 
13970   /* Module format version.  */
13971   verstr_t string;
13972   version2string (MODULE_VERSION, string);
13973   readme.printf ("version: %s", string);
13974 
13975   /* Module information.  */
13976   readme.printf ("module: %s", get_flatname ());
13977   readme.printf ("source: %s", main_input_filename);
13978   readme.printf ("dialect: %s", dialect);
13979   if (extensions)
13980     readme.printf ("extensions: %s",
13981 		   extensions & SE_OPENMP ? "-fopenmp" : "");
13982 
13983   /* The following fields could be expected to change between
13984      otherwise identical compilations.  Consider a distributed build
13985      system.  We should have a way of overriding that.  */
13986   if (char *cwd = getcwd (NULL, 0))
13987     {
13988       readme.printf ("cwd: %s", cwd);
13989       free (cwd);
13990     }
13991   readme.printf ("repository: %s", cmi_repo ? cmi_repo : ".");
13992 #if NETWORKING
13993   {
13994     char hostname[64];
13995     if (!gethostname (hostname, sizeof (hostname)))
13996       readme.printf ("host: %s", hostname);
13997   }
13998 #endif
13999   {
14000     /* This of course will change!  */
14001     time_t stampy;
14002     auto kind = cpp_get_date (reader, &stampy);
14003     if (kind != CPP_time_kind::UNKNOWN)
14004       {
14005 	struct tm *time;
14006 
14007 	time = gmtime (&stampy);
14008 	readme.print_time ("build", time, "UTC");
14009 
14010 	if (kind == CPP_time_kind::DYNAMIC)
14011 	  {
14012 	    time = localtime (&stampy);
14013 	    readme.print_time ("local", time,
14014 #if defined (__USE_MISC) || defined (__USE_BSD) /* Is there a better way?  */
14015 			       time->tm_zone
14016 #else
14017 			       ""
14018 #endif
14019 			       );
14020 	  }
14021       }
14022   }
14023 
14024   /* Its direct imports.  */
14025   for (unsigned ix = 1; ix < modules->length (); ix++)
14026     {
14027       module_state *state = (*modules)[ix];
14028 
14029       if (state->is_direct ())
14030 	readme.printf ("%s: %s %s", state->exported_p ? "export" : "import",
14031 		       state->get_flatname (), state->filename);
14032     }
14033 
14034   readme.end (to, to->name (MOD_SNAME_PFX ".README"), NULL);
14035 }
14036 
14037 /* Sort environment var names in reverse order.  */
14038 
14039 static int
env_var_cmp(const void * a_,const void * b_)14040 env_var_cmp (const void *a_, const void *b_)
14041 {
14042   const unsigned char *a = *(const unsigned char *const *)a_;
14043   const unsigned char *b = *(const unsigned char *const *)b_;
14044 
14045   for (unsigned ix = 0; ; ix++)
14046     {
14047       bool a_end = !a[ix] || a[ix] == '=';
14048       if (a[ix] == b[ix])
14049 	{
14050 	  if (a_end)
14051 	    break;
14052 	}
14053       else
14054 	{
14055 	  bool b_end = !b[ix] || b[ix] == '=';
14056 
14057 	  if (!a_end && !b_end)
14058 	    return a[ix] < b[ix] ? +1 : -1;
14059 	  if (a_end && b_end)
14060 	    break;
14061 	  return a_end ? +1 : -1;
14062 	}
14063     }
14064 
14065   return 0;
14066 }
14067 
14068 /* Write the environment. It is a STRTAB that may be extracted with:
14069      readelf -pgnu.c++.ENV $(module).gcm */
14070 
14071 void
write_env(elf_out * to)14072 module_state::write_env (elf_out *to)
14073 {
14074   vec<const char *> vars;
14075   vars.create (20);
14076 
14077   extern char **environ;
14078   while (const char *var = environ[vars.length ()])
14079     vars.safe_push (var);
14080   vars.qsort (env_var_cmp);
14081 
14082   bytes_out env (to);
14083   env.begin (false);
14084   while (vars.length ())
14085     env.printf ("%s", vars.pop ());
14086   env.end (to, to->name (MOD_SNAME_PFX ".ENV"), NULL);
14087 
14088   vars.release ();
14089 }
14090 
14091 /* Write the direct or indirect imports.
14092    u:N
14093    {
14094      u:index
14095      s:name
14096      u32:crc
14097      s:filename (direct)
14098      u:exported (direct)
14099    } imports[N]
14100  */
14101 
14102 void
write_imports(bytes_out & sec,bool direct)14103 module_state::write_imports (bytes_out &sec, bool direct)
14104 {
14105   unsigned count = 0;
14106 
14107   for (unsigned ix = 1; ix < modules->length (); ix++)
14108     {
14109       module_state *imp = (*modules)[ix];
14110 
14111       if (imp->remap && imp->is_direct () == direct)
14112 	count++;
14113     }
14114 
14115   gcc_assert (!direct || count);
14116 
14117   sec.u (count);
14118   for (unsigned ix = 1; ix < modules->length (); ix++)
14119     {
14120       module_state *imp = (*modules)[ix];
14121 
14122       if (imp->remap && imp->is_direct () == direct)
14123 	{
14124 	  dump () && dump ("Writing %simport:%u->%u %M (crc=%x)",
14125 			   !direct ? "indirect "
14126 			   : imp->exported_p ? "exported " : "",
14127 			   ix, imp->remap, imp, imp->crc);
14128 	  sec.u (imp->remap);
14129 	  sec.str (imp->get_flatname ());
14130 	  sec.u32 (imp->crc);
14131 	  if (direct)
14132 	    {
14133 	      write_location (sec, imp->imported_from ());
14134 	      sec.str (imp->filename);
14135 	      int exportedness = 0;
14136 	      if (imp->exported_p)
14137 		exportedness = +1;
14138 	      else if (!imp->is_purview_direct ())
14139 		exportedness = -1;
14140 	      sec.i (exportedness);
14141 	    }
14142 	}
14143     }
14144 }
14145 
14146 /* READER, LMAPS  != NULL == direct imports,
14147    == NUL == indirect imports.  */
14148 
14149 unsigned
read_imports(bytes_in & sec,cpp_reader * reader,line_maps * lmaps)14150 module_state::read_imports (bytes_in &sec, cpp_reader *reader, line_maps *lmaps)
14151 {
14152   unsigned count = sec.u ();
14153   unsigned loaded = 0;
14154 
14155   while (count--)
14156     {
14157       unsigned ix = sec.u ();
14158       if (ix >= slurp->remap->length () || !ix || (*slurp->remap)[ix])
14159 	{
14160 	  sec.set_overrun ();
14161 	  break;
14162 	}
14163 
14164       const char *name = sec.str (NULL);
14165       module_state *imp = get_module (name);
14166       unsigned crc = sec.u32 ();
14167       int exportedness = 0;
14168 
14169       /* If the import is a partition, it must be the same primary
14170 	 module as this TU.  */
14171       if (imp && imp->is_partition () &&
14172 	  (!named_module_p ()
14173 	   || (get_primary ((*modules)[0]) != get_primary (imp))))
14174 	imp = NULL;
14175 
14176       if (!imp)
14177 	sec.set_overrun ();
14178       if (sec.get_overrun ())
14179 	break;
14180 
14181       if (lmaps)
14182 	{
14183 	  /* A direct import, maybe load it.  */
14184 	  location_t floc = read_location (sec);
14185 	  const char *fname = sec.str (NULL);
14186 	  exportedness = sec.i ();
14187 
14188 	  if (sec.get_overrun ())
14189 	    break;
14190 
14191 	  if (!imp->check_not_purview (loc))
14192 	    continue;
14193 
14194 	  if (imp->loadedness == ML_NONE)
14195 	    {
14196 	      imp->loc = floc;
14197 	      imp->crc = crc;
14198 	      if (!imp->get_flatname ())
14199 		imp->set_flatname ();
14200 
14201 	      unsigned n = dump.push (imp);
14202 
14203 	      if (!imp->filename && fname)
14204 		imp->filename = xstrdup (fname);
14205 
14206 	      if (imp->is_partition ())
14207 		dump () && dump ("Importing elided partition %M", imp);
14208 
14209 	      if (!imp->do_import (reader, false))
14210 		imp = NULL;
14211 	      dump.pop (n);
14212 	      if (!imp)
14213 		continue;
14214 	    }
14215 
14216 	  if (is_partition ())
14217 	    {
14218 	      if (!imp->is_direct ())
14219 		imp->directness = MD_PARTITION_DIRECT;
14220 	      if (exportedness > 0)
14221 		imp->exported_p = true;
14222 	    }
14223 	}
14224       else
14225 	{
14226 	  /* An indirect import, find it, it should already be here.  */
14227 	  if (imp->loadedness == ML_NONE)
14228 	    {
14229 	      error_at (loc, "indirect import %qs is not already loaded", name);
14230 	      continue;
14231 	    }
14232 	}
14233 
14234       if (imp->crc != crc)
14235 	error_at (loc, "import %qs has CRC mismatch", imp->get_flatname ());
14236 
14237       (*slurp->remap)[ix] = (imp->mod << 1) | (lmaps != NULL);
14238 
14239       if (lmaps && exportedness >= 0)
14240 	set_import (imp, bool (exportedness));
14241       dump () && dump ("Found %simport:%u %M->%u", !lmaps ? "indirect "
14242 		       : exportedness > 0 ? "exported "
14243 		       : exportedness < 0 ? "gmf" : "", ix, imp,
14244 		       imp->mod);
14245       loaded++;
14246     }
14247 
14248   return loaded;
14249 }
14250 
14251 /* Write the import table to MOD_SNAME_PFX.imp.  */
14252 
14253 void
write_imports(elf_out * to,unsigned * crc_ptr)14254 module_state::write_imports (elf_out *to, unsigned *crc_ptr)
14255 {
14256   dump () && dump ("Writing imports");
14257   dump.indent ();
14258 
14259   bytes_out sec (to);
14260   sec.begin ();
14261 
14262   write_imports (sec, true);
14263   write_imports (sec, false);
14264 
14265   sec.end (to, to->name (MOD_SNAME_PFX ".imp"), crc_ptr);
14266   dump.outdent ();
14267 }
14268 
14269 bool
read_imports(cpp_reader * reader,line_maps * lmaps)14270 module_state::read_imports (cpp_reader *reader, line_maps *lmaps)
14271 {
14272   bytes_in sec;
14273 
14274   if (!sec.begin (loc, from (), MOD_SNAME_PFX ".imp"))
14275     return false;
14276 
14277   dump () && dump ("Reading %u imports", slurp->remap->length () - 1);
14278   dump.indent ();
14279 
14280   /* Read the imports.  */
14281   unsigned direct = read_imports (sec, reader, lmaps);
14282   unsigned indirect = read_imports (sec, NULL, NULL);
14283   if (direct + indirect + 1 != slurp->remap->length ())
14284     from ()->set_error (elf::E_BAD_IMPORT);
14285 
14286   dump.outdent ();
14287   if (!sec.end (from ()))
14288     return false;
14289   return true;
14290 }
14291 
14292 /* We're the primary module interface, but have partitions.  Document
14293    them so that non-partition module implementation units know which
14294    have already been loaded.  */
14295 
14296 void
write_partitions(elf_out * to,unsigned count,unsigned * crc_ptr)14297 module_state::write_partitions (elf_out *to, unsigned count, unsigned *crc_ptr)
14298 {
14299   dump () && dump ("Writing %u elided partitions", count);
14300   dump.indent ();
14301 
14302   bytes_out sec (to);
14303   sec.begin ();
14304 
14305   for (unsigned ix = 1; ix != modules->length (); ix++)
14306     {
14307       module_state *imp = (*modules)[ix];
14308       if (imp->is_partition ())
14309 	{
14310 	  dump () && dump ("Writing elided partition %M (crc=%x)",
14311 			   imp, imp->crc);
14312 	  sec.str (imp->get_flatname ());
14313 	  sec.u32 (imp->crc);
14314 	  write_location (sec, imp->is_direct ()
14315 			  ? imp->imported_from () : UNKNOWN_LOCATION);
14316 	  sec.str (imp->filename);
14317 	}
14318     }
14319 
14320   sec.end (to, to->name (MOD_SNAME_PFX ".prt"), crc_ptr);
14321   dump.outdent ();
14322 }
14323 
14324 bool
read_partitions(unsigned count)14325 module_state::read_partitions (unsigned count)
14326 {
14327   bytes_in sec;
14328   if (!sec.begin (loc, from (), MOD_SNAME_PFX ".prt"))
14329     return false;
14330 
14331   dump () && dump ("Reading %u elided partitions", count);
14332   dump.indent ();
14333 
14334   while (count--)
14335     {
14336       const char *name = sec.str (NULL);
14337       unsigned crc = sec.u32 ();
14338       location_t floc = read_location (sec);
14339       const char *fname = sec.str (NULL);
14340 
14341       if (sec.get_overrun ())
14342 	break;
14343 
14344       dump () && dump ("Reading elided partition %s (crc=%x)", name, crc);
14345 
14346       module_state *imp = get_module (name);
14347       if (!imp	/* Partition should be ...  */
14348 	  || !imp->is_partition () /* a partition ...  */
14349 	  || imp->loadedness != ML_NONE  /* that is not yet loaded ...  */
14350 	  || get_primary (imp) != this) /* whose primary is this.  */
14351 	{
14352 	  sec.set_overrun ();
14353 	  break;
14354 	}
14355 
14356       if (!imp->has_location ())
14357 	imp->loc = floc;
14358       imp->crc = crc;
14359       if (!imp->filename && fname[0])
14360 	imp->filename = xstrdup (fname);
14361     }
14362 
14363   dump.outdent ();
14364   if (!sec.end (from ()))
14365     return false;
14366   return true;
14367 }
14368 
14369 /* Counter indices.  */
14370 enum module_state_counts
14371 {
14372   MSC_sec_lwm,
14373   MSC_sec_hwm,
14374   MSC_pendings,
14375   MSC_entities,
14376   MSC_namespaces,
14377   MSC_bindings,
14378   MSC_macros,
14379   MSC_inits,
14380   MSC_HWM
14381 };
14382 
14383 /* Data for config reading and writing.  */
14384 struct module_state_config {
14385   const char *dialect_str;
14386   unsigned num_imports;
14387   unsigned num_partitions;
14388   unsigned num_entities;
14389   unsigned ordinary_locs;
14390   unsigned macro_locs;
14391   unsigned ordinary_loc_align;
14392 
14393 public:
module_state_configmodule_state_config14394   module_state_config ()
14395     :dialect_str (get_dialect ()),
14396      num_imports (0), num_partitions (0), num_entities (0),
14397      ordinary_locs (0), macro_locs (0), ordinary_loc_align (0)
14398   {
14399   }
14400 
releasemodule_state_config14401   static void release ()
14402   {
14403     XDELETEVEC (dialect);
14404     dialect = NULL;
14405   }
14406 
14407 private:
14408   static const char *get_dialect ();
14409   static char *dialect;
14410 };
14411 
14412 char *module_state_config::dialect;
14413 
14414 /* Generate a string of the significant compilation options.
14415    Generally assume the user knows what they're doing, in the same way
14416    that object files can be mixed.  */
14417 
14418 const char *
get_dialect()14419 module_state_config::get_dialect ()
14420 {
14421   if (!dialect)
14422     dialect = concat (get_cxx_dialect_name (cxx_dialect),
14423 		      /* C++ implies these, only show if disabled.  */
14424 		      flag_exceptions ? "" : "/no-exceptions",
14425 		      flag_rtti ? "" : "/no-rtti",
14426 		      flag_new_inheriting_ctors ? "" : "/old-inheriting-ctors",
14427 		      /* C++ 20 implies concepts.  */
14428 		      cxx_dialect < cxx20 && flag_concepts ? "/concepts" : "",
14429 		      flag_coroutines ? "/coroutines" : "",
14430 		      flag_module_implicit_inline ? "/implicit-inline" : "",
14431 		      NULL);
14432 
14433   return dialect;
14434 }
14435 
14436 /* Contents of a cluster.  */
14437 enum cluster_tag {
14438   ct_decl,	/* A decl.  */
14439   ct_defn,	/* A definition.  */
14440   ct_bind,	/* A binding.  */
14441   ct_hwm
14442 };
14443 
14444 /* Binding modifiers.  */
14445 enum ct_bind_flags
14446 {
14447   cbf_export = 0x1,	/* An exported decl.  */
14448   cbf_hidden = 0x2,	/* A hidden (friend) decl.  */
14449   cbf_using = 0x4,	/* A using decl.  */
14450   cbf_wrapped = 0x8,  	/* ... that is wrapped.  */
14451 };
14452 
14453 /* DEP belongs to a different cluster, seed it to prevent
14454    unfortunately timed duplicate import.  */
14455 // FIXME: QOI For inter-cluster references we could just only pick
14456 // one entity from an earlier cluster.  Even better track
14457 // dependencies between earlier clusters
14458 
14459 void
intercluster_seed(trees_out & sec,unsigned index_hwm,depset * dep)14460 module_state::intercluster_seed (trees_out &sec, unsigned index_hwm, depset *dep)
14461 {
14462   if (dep->is_import ()
14463       || dep->cluster < index_hwm)
14464     {
14465       tree ent = dep->get_entity ();
14466       if (!TREE_VISITED (ent))
14467 	{
14468 	  sec.tree_node (ent);
14469 	  dump (dumper::CLUSTER)
14470 	    && dump ("Seeded %s %N",
14471 		     dep->is_import () ? "import" : "intercluster", ent);
14472 	}
14473     }
14474 }
14475 
14476 /* Write the cluster of depsets in SCC[0-SIZE).
14477    dep->section -> section number
14478    dep->cluster -> entity number
14479  */
14480 
14481 unsigned
write_cluster(elf_out * to,depset * scc[],unsigned size,depset::hash & table,unsigned * counts,unsigned * crc_ptr)14482 module_state::write_cluster (elf_out *to, depset *scc[], unsigned size,
14483 			     depset::hash &table, unsigned *counts,
14484 			     unsigned *crc_ptr)
14485 {
14486   dump () && dump ("Writing section:%u %u depsets", table.section, size);
14487   dump.indent ();
14488 
14489   trees_out sec (to, this, table, table.section);
14490   sec.begin ();
14491   unsigned index_lwm = counts[MSC_entities];
14492 
14493   /* Determine entity numbers, mark for writing.   */
14494   dump (dumper::CLUSTER) && dump ("Cluster members:") && (dump.indent (), true);
14495   for (unsigned ix = 0; ix != size; ix++)
14496     {
14497       depset *b = scc[ix];
14498 
14499       switch (b->get_entity_kind ())
14500 	{
14501 	default:
14502 	  gcc_unreachable ();
14503 
14504 	case depset::EK_BINDING:
14505 	  {
14506 	    dump (dumper::CLUSTER)
14507 	      && dump ("[%u]=%s %P", ix, b->entity_kind_name (),
14508 		       b->get_entity (), b->get_name ());
14509 	    depset *ns_dep = b->deps[0];
14510 	    gcc_checking_assert (ns_dep->get_entity_kind ()
14511 				 == depset::EK_NAMESPACE
14512 				 && ns_dep->get_entity () == b->get_entity ());
14513 	    for (unsigned jx = b->deps.length (); --jx;)
14514 	      {
14515 		depset *dep = b->deps[jx];
14516 		// We could be declaring something that is also a
14517 		// (merged) import
14518 		gcc_checking_assert (dep->is_import ()
14519 				     || TREE_VISITED (dep->get_entity ())
14520 				     || (dep->get_entity_kind ()
14521 					 == depset::EK_USING));
14522 	      }
14523 	  }
14524 	  break;
14525 
14526 	case depset::EK_DECL:
14527 	case depset::EK_SPECIALIZATION:
14528 	case depset::EK_PARTIAL:
14529 	  b->cluster = counts[MSC_entities]++;
14530 	  sec.mark_declaration (b->get_entity (), b->has_defn ());
14531 	  /* FALLTHROUGH  */
14532 
14533 	case depset::EK_USING:
14534 	  gcc_checking_assert (!b->is_import ()
14535 			       && !b->is_unreached ());
14536 	  dump (dumper::CLUSTER)
14537 	    && dump ("[%u]=%s %s %N", ix, b->entity_kind_name (),
14538 		     b->has_defn () ? "definition" : "declaration",
14539 		     b->get_entity ());
14540 	  break;
14541 	}
14542     }
14543   dump (dumper::CLUSTER) && (dump.outdent (), true);
14544 
14545   /* Ensure every out-of-cluster decl is referenced before we start
14546      streaming.  We must do both imports *and* earlier clusters,
14547      because the latter could reach into the former and cause a
14548      duplicate loop.   */
14549   sec.set_importing (+1);
14550   for (unsigned ix = 0; ix != size; ix++)
14551     {
14552       depset *b = scc[ix];
14553       for (unsigned jx = (b->get_entity_kind () == depset::EK_BINDING
14554 			  || b->is_special ()) ? 1 : 0;
14555 	   jx != b->deps.length (); jx++)
14556 	{
14557 	  depset *dep = b->deps[jx];
14558 
14559 	  if (dep->is_binding ())
14560 	    {
14561 	      for (unsigned ix = dep->deps.length (); --ix;)
14562 		{
14563 		  depset *bind = dep->deps[ix];
14564 		  if (bind->get_entity_kind () == depset::EK_USING)
14565 		    bind = bind->deps[1];
14566 
14567 		  intercluster_seed (sec, index_lwm, bind);
14568 		}
14569 	      /* Also check the namespace itself.  */
14570 	      dep = dep->deps[0];
14571 	    }
14572 
14573 	  intercluster_seed (sec, index_lwm, dep);
14574 	}
14575     }
14576   sec.tree_node (NULL_TREE);
14577   /* We're done importing now.  */
14578   sec.set_importing (-1);
14579 
14580   /* Write non-definitions.  */
14581   for (unsigned ix = 0; ix != size; ix++)
14582     {
14583       depset *b = scc[ix];
14584       tree decl = b->get_entity ();
14585       switch (b->get_entity_kind ())
14586 	{
14587 	default:
14588 	  gcc_unreachable ();
14589 	  break;
14590 
14591 	case depset::EK_BINDING:
14592 	  {
14593 	    gcc_assert (TREE_CODE (decl) == NAMESPACE_DECL);
14594 	    dump () && dump ("Depset:%u binding %C:%P", ix, TREE_CODE (decl),
14595 			     decl, b->get_name ());
14596 	    sec.u (ct_bind);
14597 	    sec.tree_node (decl);
14598 	    sec.tree_node (b->get_name ());
14599 
14600 	    /* Write in reverse order, so reading will see the exports
14601 	       first, thus building the overload chain will be
14602 	       optimized.  */
14603 	    for (unsigned jx = b->deps.length (); --jx;)
14604 	      {
14605 		depset *dep = b->deps[jx];
14606 		tree bound = dep->get_entity ();
14607 		unsigned flags = 0;
14608 		if (dep->get_entity_kind () == depset::EK_USING)
14609 		  {
14610 		    tree ovl = bound;
14611 		    bound = OVL_FUNCTION (bound);
14612 		    if (!(TREE_CODE (bound) == CONST_DECL
14613 			  && UNSCOPED_ENUM_P (TREE_TYPE (bound))
14614 			  && decl == TYPE_NAME (TREE_TYPE (bound))))
14615 		      {
14616 			/* An unscope enumerator in its enumeration's
14617 			   scope is not a using.  */
14618 			flags |= cbf_using;
14619 			if (OVL_USING_P (ovl))
14620 			  flags |= cbf_wrapped;
14621 		      }
14622 		    if (OVL_EXPORT_P (ovl))
14623 		      flags |= cbf_export;
14624 		  }
14625 		else
14626 		  {
14627 		    /* An implicit typedef must be at one.  */
14628 		    gcc_assert (!DECL_IMPLICIT_TYPEDEF_P (bound) || jx == 1);
14629 		    if (dep->is_hidden ())
14630 		      flags |= cbf_hidden;
14631 		    else if (DECL_MODULE_EXPORT_P (STRIP_TEMPLATE (bound)))
14632 		      flags |= cbf_export;
14633 		  }
14634 
14635 		gcc_checking_assert (DECL_P (bound));
14636 
14637 		sec.i (flags);
14638 		sec.tree_node (bound);
14639 	      }
14640 
14641 	    /* Terminate the list.  */
14642 	    sec.i (-1);
14643 	  }
14644 	  break;
14645 
14646 	case depset::EK_USING:
14647 	  dump () && dump ("Depset:%u %s %C:%N", ix, b->entity_kind_name (),
14648 			   TREE_CODE (decl), decl);
14649 	  break;
14650 
14651 	case depset::EK_SPECIALIZATION:
14652 	case depset::EK_PARTIAL:
14653 	case depset::EK_DECL:
14654 	  dump () && dump ("Depset:%u %s entity:%u %C:%N", ix,
14655 			   b->entity_kind_name (), b->cluster,
14656 			   TREE_CODE (decl), decl);
14657 
14658 	  sec.u (ct_decl);
14659 	  sec.tree_node (decl);
14660 
14661 	  dump () && dump ("Wrote declaration entity:%u %C:%N",
14662 			   b->cluster, TREE_CODE (decl), decl);
14663 	  break;
14664 	}
14665     }
14666 
14667   depset *namer = NULL;
14668 
14669   /* Write out definitions  */
14670   for (unsigned ix = 0; ix != size; ix++)
14671     {
14672       depset *b = scc[ix];
14673       tree decl = b->get_entity ();
14674       switch (b->get_entity_kind ())
14675 	{
14676 	default:
14677 	  break;
14678 
14679 	case depset::EK_SPECIALIZATION:
14680 	case depset::EK_PARTIAL:
14681 	case depset::EK_DECL:
14682 	  if (!namer)
14683 	    namer = b;
14684 
14685 	  if (b->has_defn ())
14686 	    {
14687 	      sec.u (ct_defn);
14688 	      sec.tree_node (decl);
14689 	      dump () && dump ("Writing definition %N", decl);
14690 	      sec.write_definition (decl);
14691 
14692 	      if (!namer->has_defn ())
14693 		namer = b;
14694 	    }
14695 	  break;
14696 	}
14697     }
14698 
14699   /* We don't find the section by name.  Use depset's decl's name for
14700      human friendliness.  */
14701   unsigned name = 0;
14702   tree naming_decl = NULL_TREE;
14703   if (namer)
14704     {
14705       naming_decl = namer->get_entity ();
14706       if (namer->get_entity_kind () == depset::EK_USING)
14707 	/* This unfortunately names the section from the target of the
14708 	   using decl.  But the name is only a guide, so Do Not Care.  */
14709 	naming_decl = OVL_FUNCTION (naming_decl);
14710       if (DECL_IMPLICIT_TYPEDEF_P (naming_decl))
14711 	/* Lose any anonymousness.  */
14712 	naming_decl = TYPE_NAME (TREE_TYPE (naming_decl));
14713       name = to->qualified_name (naming_decl, namer->has_defn ());
14714     }
14715 
14716   unsigned bytes = sec.pos;
14717   unsigned snum = sec.end (to, name, crc_ptr);
14718 
14719   for (unsigned ix = size; ix--;)
14720     gcc_checking_assert (scc[ix]->section == snum);
14721 
14722   dump.outdent ();
14723   dump () && dump ("Wrote section:%u named-by:%N", table.section, naming_decl);
14724 
14725   return bytes;
14726 }
14727 
14728 /* Read a cluster from section SNUM.  */
14729 
14730 bool
read_cluster(unsigned snum)14731 module_state::read_cluster (unsigned snum)
14732 {
14733   trees_in sec (this);
14734 
14735   if (!sec.begin (loc, from (), snum))
14736     return false;
14737 
14738   dump () && dump ("Reading section:%u", snum);
14739   dump.indent ();
14740 
14741   /* We care about structural equality.  */
14742   comparing_dependent_aliases++;
14743 
14744   /* First seed the imports.  */
14745   while (tree import = sec.tree_node ())
14746     dump (dumper::CLUSTER) && dump ("Seeded import %N", import);
14747 
14748   while (!sec.get_overrun () && sec.more_p ())
14749     {
14750       unsigned ct = sec.u ();
14751       switch (ct)
14752 	{
14753 	default:
14754 	  sec.set_overrun ();
14755 	  break;
14756 
14757 	case ct_bind:
14758 	  /* A set of namespace bindings.  */
14759 	  {
14760 	    tree ns = sec.tree_node ();
14761 	    tree name = sec.tree_node ();
14762 	    tree decls = NULL_TREE;
14763 	    tree visible = NULL_TREE;
14764 	    tree type = NULL_TREE;
14765 	    bool dedup = false;
14766 
14767 	    /* We rely on the bindings being in the reverse order of
14768 	       the resulting overload set.  */
14769 	    for (;;)
14770 	      {
14771 		int flags = sec.i ();
14772 		if (flags < 0)
14773 		  break;
14774 
14775 		if ((flags & cbf_hidden)
14776 		    && (flags & (cbf_using | cbf_export)))
14777 		  sec.set_overrun ();
14778 
14779 		tree decl = sec.tree_node ();
14780 		if (sec.get_overrun ())
14781 		  break;
14782 
14783 		if (decls && TREE_CODE (decl) == TYPE_DECL)
14784 		  {
14785 		    /* Stat hack.  */
14786 		    if (type || !DECL_IMPLICIT_TYPEDEF_P (decl))
14787 		      sec.set_overrun ();
14788 		    type = decl;
14789 		  }
14790 		else
14791 		  {
14792 		    if (decls
14793 			|| (flags & (cbf_hidden | cbf_wrapped))
14794 			|| DECL_FUNCTION_TEMPLATE_P (decl))
14795 		      {
14796 			decls = ovl_make (decl, decls);
14797 			if (flags & cbf_using)
14798 			  {
14799 			    dedup = true;
14800 			    OVL_USING_P (decls) = true;
14801 			    if (flags & cbf_export)
14802 			      OVL_EXPORT_P (decls) = true;
14803 			  }
14804 
14805 			if (flags & cbf_hidden)
14806 			  OVL_HIDDEN_P (decls) = true;
14807 			else if (dedup)
14808 			  OVL_DEDUP_P (decls) = true;
14809 		      }
14810 		    else
14811 		      decls = decl;
14812 
14813 		    if (flags & cbf_export
14814 			|| (!(flags & cbf_hidden)
14815 			    && (is_module () || is_partition ())))
14816 		      visible = decls;
14817 		  }
14818 	      }
14819 
14820 	    if (!decls)
14821 	      sec.set_overrun ();
14822 
14823 	    if (sec.get_overrun ())
14824 	      break; /* Bail.  */
14825 
14826 	    dump () && dump ("Binding of %P", ns, name);
14827 	    if (!set_module_binding (ns, name, mod,
14828 				     is_header () ? -1
14829 				     : is_module () || is_partition () ? 1
14830 				     : 0,
14831 				     decls, type, visible))
14832 	      sec.set_overrun ();
14833 	  }
14834 	  break;
14835 
14836 	case ct_decl:
14837 	  /* A decl.  */
14838 	  {
14839 	    tree decl = sec.tree_node ();
14840 	    dump () && dump ("Read declaration of %N", decl);
14841 	  }
14842 	  break;
14843 
14844 	case ct_defn:
14845 	  {
14846 	    tree decl = sec.tree_node ();
14847 	    dump () && dump ("Reading definition of %N", decl);
14848 	    sec.read_definition (decl);
14849 	  }
14850 	  break;
14851 	}
14852     }
14853 
14854   /* When lazy loading is in effect, we can be in the middle of
14855      parsing or instantiating a function.  Save it away.
14856      push_function_context does too much work.   */
14857   tree old_cfd = current_function_decl;
14858   struct function *old_cfun = cfun;
14859   while (tree decl = sec.post_process ())
14860     {
14861       bool abstract = false;
14862       if (TREE_CODE (decl) == TEMPLATE_DECL)
14863 	{
14864 	  abstract = true;
14865 	  decl = DECL_TEMPLATE_RESULT (decl);
14866 	}
14867 
14868       current_function_decl = decl;
14869       allocate_struct_function (decl, abstract);
14870       cfun->language = ggc_cleared_alloc<language_function> ();
14871       cfun->language->base.x_stmt_tree.stmts_are_full_exprs_p = 1;
14872 
14873       if (abstract)
14874 	;
14875       else if (DECL_ABSTRACT_P (decl))
14876 	vec_safe_push (post_load_decls, decl);
14877       else
14878 	{
14879 	  bool aggr = aggregate_value_p (DECL_RESULT (decl), decl);
14880 #ifdef PCC_STATIC_STRUCT_RETURN
14881 	  cfun->returns_pcc_struct = aggr;
14882 #endif
14883 	  cfun->returns_struct = aggr;
14884 
14885 	  if (DECL_COMDAT (decl))
14886 	    // FIXME: Comdat grouping?
14887 	    comdat_linkage (decl);
14888 	  note_vague_linkage_fn (decl);
14889 	  cgraph_node::finalize_function (decl, true);
14890 	}
14891 
14892     }
14893   /* Look, function.cc's interface to cfun does too much for us, we
14894      just need to restore the old value.  I do not want to go
14895      redesigning that API right now.  */
14896 #undef cfun
14897   cfun = old_cfun;
14898   current_function_decl = old_cfd;
14899   comparing_dependent_aliases--;
14900 
14901   dump.outdent ();
14902   dump () && dump ("Read section:%u", snum);
14903 
14904   loaded_clusters++;
14905 
14906   if (!sec.end (from ()))
14907     return false;
14908 
14909   return true;
14910 }
14911 
14912 void
write_namespace(bytes_out & sec,depset * dep)14913 module_state::write_namespace (bytes_out &sec, depset *dep)
14914 {
14915   unsigned ns_num = dep->cluster;
14916   unsigned ns_import = 0;
14917 
14918   if (dep->is_import ())
14919     ns_import = dep->section;
14920   else if (dep->get_entity () != global_namespace)
14921     ns_num++;
14922 
14923   sec.u (ns_import);
14924   sec.u (ns_num);
14925 }
14926 
14927 tree
read_namespace(bytes_in & sec)14928 module_state::read_namespace (bytes_in &sec)
14929 {
14930   unsigned ns_import = sec.u ();
14931   unsigned ns_num = sec.u ();
14932   tree ns = NULL_TREE;
14933 
14934   if (ns_import || ns_num)
14935     {
14936       if (!ns_import)
14937 	ns_num--;
14938 
14939       if (unsigned origin = slurp->remap_module (ns_import))
14940 	{
14941 	  module_state *from = (*modules)[origin];
14942 	  if (ns_num < from->entity_num)
14943 	    {
14944 	      binding_slot &slot = (*entity_ary)[from->entity_lwm + ns_num];
14945 
14946 	      if (!slot.is_lazy ())
14947 		ns = slot;
14948 	    }
14949 	}
14950       else
14951 	sec.set_overrun ();
14952     }
14953   else
14954     ns = global_namespace;
14955 
14956   return ns;
14957 }
14958 
14959 /* SPACES is a sorted vector of namespaces.  Write out the namespaces
14960    to MOD_SNAME_PFX.nms section.   */
14961 
14962 void
write_namespaces(elf_out * to,vec<depset * > spaces,unsigned num,unsigned * crc_p)14963 module_state::write_namespaces (elf_out *to, vec<depset *> spaces,
14964 				unsigned num, unsigned *crc_p)
14965 {
14966   dump () && dump ("Writing namespaces");
14967   dump.indent ();
14968 
14969   bytes_out sec (to);
14970   sec.begin ();
14971 
14972   for (unsigned ix = 0; ix != num; ix++)
14973     {
14974       depset *b = spaces[ix];
14975       tree ns = b->get_entity ();
14976 
14977       gcc_checking_assert (TREE_CODE (ns) == NAMESPACE_DECL);
14978       /* P1815 may have something to say about this.  */
14979       gcc_checking_assert (TREE_PUBLIC (ns));
14980 
14981       unsigned flags = 0;
14982       if (TREE_PUBLIC (ns))
14983 	flags |= 1;
14984       if (DECL_NAMESPACE_INLINE_P (ns))
14985 	flags |= 2;
14986       if (DECL_MODULE_PURVIEW_P (ns))
14987 	flags |= 4;
14988       if (DECL_MODULE_EXPORT_P (ns))
14989 	flags |= 8;
14990 
14991       dump () && dump ("Writing namespace:%u %N%s%s%s%s",
14992 		       b->cluster, ns,
14993 		       flags & 1 ? ", public" : "",
14994 		       flags & 2 ? ", inline" : "",
14995 		       flags & 4 ? ", purview" : "",
14996 		       flags & 8 ? ", export" : "");
14997       sec.u (b->cluster);
14998       sec.u (to->name (DECL_NAME (ns)));
14999       write_namespace (sec, b->deps[0]);
15000 
15001       sec.u (flags);
15002       write_location (sec, DECL_SOURCE_LOCATION (ns));
15003     }
15004 
15005   sec.end (to, to->name (MOD_SNAME_PFX ".nms"), crc_p);
15006   dump.outdent ();
15007 }
15008 
15009 /* Read the namespace hierarchy from MOD_SNAME_PFX.namespace.  Fill in
15010    SPACES from that data.  */
15011 
15012 bool
read_namespaces(unsigned num)15013 module_state::read_namespaces (unsigned num)
15014 {
15015   bytes_in sec;
15016 
15017   if (!sec.begin (loc, from (), MOD_SNAME_PFX ".nms"))
15018     return false;
15019 
15020   dump () && dump ("Reading namespaces");
15021   dump.indent ();
15022 
15023   for (unsigned ix = 0; ix != num; ix++)
15024     {
15025       unsigned entity_index = sec.u ();
15026       unsigned name = sec.u ();
15027 
15028       tree parent = read_namespace (sec);
15029 
15030       /* See comment in write_namespace about why not bits.  */
15031       unsigned flags = sec.u ();
15032       location_t src_loc = read_location (sec);
15033 
15034       if (entity_index >= entity_num
15035 	  || !parent
15036 	  || (flags & 0xc) == 0x8)
15037 	sec.set_overrun ();
15038       if (sec.get_overrun ())
15039 	break;
15040 
15041       tree id = name ? get_identifier (from ()->name (name)) : NULL_TREE;
15042 
15043       dump () && dump ("Read namespace:%u %P%s%s%s%s",
15044 		       entity_index, parent, id,
15045 		       flags & 1 ? ", public" : "",
15046 		       flags & 2 ? ", inline" : "",
15047 		       flags & 4 ? ", purview" : "",
15048 		       flags & 8 ? ", export" : "");
15049       bool visible_p = ((flags & 8)
15050 			|| ((flags & 1)
15051 			    && (flags & 4)
15052 			    && (is_partition () || is_module ())));
15053       tree inner = add_imported_namespace (parent, id, src_loc, mod,
15054 					   bool (flags & 2), visible_p);
15055       if (!inner)
15056 	{
15057 	  sec.set_overrun ();
15058 	  break;
15059 	}
15060 
15061       if (is_partition ())
15062 	{
15063 	  if (flags & 4)
15064 	    DECL_MODULE_PURVIEW_P (inner) = true;
15065 	  if (flags & 8)
15066 	    DECL_MODULE_EXPORT_P (inner) = true;
15067 	}
15068 
15069       /* Install the namespace.  */
15070       (*entity_ary)[entity_lwm + entity_index] = inner;
15071       if (DECL_MODULE_IMPORT_P (inner))
15072 	{
15073 	  bool existed;
15074 	  unsigned *slot = &entity_map->get_or_insert
15075 	    (DECL_UID (inner), &existed);
15076 	  if (existed)
15077 	    /* If it existed, it should match.  */
15078 	    gcc_checking_assert (inner == (*entity_ary)[*slot]);
15079 	  else
15080 	    *slot = entity_lwm + entity_index;
15081 	}
15082     }
15083   dump.outdent ();
15084   if (!sec.end (from ()))
15085     return false;
15086   return true;
15087 }
15088 
15089 /* Write the binding TABLE to MOD_SNAME_PFX.bnd   */
15090 
15091 unsigned
write_bindings(elf_out * to,vec<depset * > sccs,unsigned * crc_p)15092 module_state::write_bindings (elf_out *to, vec<depset *> sccs, unsigned *crc_p)
15093 {
15094   dump () && dump ("Writing binding table");
15095   dump.indent ();
15096 
15097   unsigned num = 0;
15098   bytes_out sec (to);
15099   sec.begin ();
15100 
15101   for (unsigned ix = 0; ix != sccs.length (); ix++)
15102     {
15103       depset *b = sccs[ix];
15104       if (b->is_binding ())
15105 	{
15106 	  tree ns = b->get_entity ();
15107 	  dump () && dump ("Bindings %P section:%u", ns, b->get_name (),
15108 			   b->section);
15109 	  sec.u (to->name (b->get_name ()));
15110 	  write_namespace (sec, b->deps[0]);
15111 	  sec.u (b->section);
15112 	  num++;
15113 	}
15114     }
15115 
15116   sec.end (to, to->name (MOD_SNAME_PFX ".bnd"), crc_p);
15117   dump.outdent ();
15118 
15119   return num;
15120 }
15121 
15122 /* Read the binding table from MOD_SNAME_PFX.bind.  */
15123 
15124 bool
read_bindings(unsigned num,unsigned lwm,unsigned hwm)15125 module_state::read_bindings (unsigned num, unsigned lwm, unsigned hwm)
15126 {
15127   bytes_in sec;
15128 
15129   if (!sec.begin (loc, from (), MOD_SNAME_PFX ".bnd"))
15130     return false;
15131 
15132   dump () && dump ("Reading binding table");
15133   dump.indent ();
15134   for (; !sec.get_overrun () && num--;)
15135     {
15136       const char *name = from ()->name (sec.u ());
15137       tree ns = read_namespace (sec);
15138       unsigned snum = sec.u ();
15139 
15140       if (!ns || !name || (snum - lwm) >= (hwm - lwm))
15141 	sec.set_overrun ();
15142       if (!sec.get_overrun ())
15143 	{
15144 	  tree id = get_identifier (name);
15145 	  dump () && dump ("Bindings %P section:%u", ns, id, snum);
15146 	  if (mod && !import_module_binding (ns, id, mod, snum))
15147 	    break;
15148 	}
15149     }
15150 
15151   dump.outdent ();
15152   if (!sec.end (from ()))
15153     return false;
15154   return true;
15155 }
15156 
15157 /* Write the entity table to MOD_SNAME_PFX.ent
15158 
15159    Each entry is a section number.  */
15160 
15161 void
write_entities(elf_out * to,vec<depset * > depsets,unsigned count,unsigned * crc_p)15162 module_state::write_entities (elf_out *to, vec<depset *> depsets,
15163 			      unsigned count, unsigned *crc_p)
15164 {
15165   dump () && dump ("Writing entities");
15166   dump.indent ();
15167 
15168   bytes_out sec (to);
15169   sec.begin ();
15170 
15171   unsigned current = 0;
15172   for (unsigned ix = 0; ix < depsets.length (); ix++)
15173     {
15174       depset *d = depsets[ix];
15175 
15176       switch (d->get_entity_kind ())
15177 	{
15178 	default:
15179 	  break;
15180 
15181 	case depset::EK_NAMESPACE:
15182 	  if (!d->is_import () && d->get_entity () != global_namespace)
15183 	    {
15184 	      gcc_checking_assert (d->cluster == current);
15185 	      current++;
15186 	      sec.u (0);
15187 	    }
15188 	  break;
15189 
15190 	case depset::EK_DECL:
15191 	case depset::EK_SPECIALIZATION:
15192 	case depset::EK_PARTIAL:
15193 	  gcc_checking_assert (!d->is_unreached ()
15194 			       && !d->is_import ()
15195 			       && d->cluster == current
15196 			       && d->section);
15197 	  current++;
15198 	  sec.u (d->section);
15199 	  break;
15200 	}
15201     }
15202   gcc_assert (count == current);
15203   sec.end (to, to->name (MOD_SNAME_PFX ".ent"), crc_p);
15204   dump.outdent ();
15205 }
15206 
15207 bool
read_entities(unsigned count,unsigned lwm,unsigned hwm)15208 module_state::read_entities (unsigned count, unsigned lwm, unsigned hwm)
15209 {
15210   trees_in sec (this);
15211 
15212   if (!sec.begin (loc, from (), MOD_SNAME_PFX ".ent"))
15213     return false;
15214 
15215   dump () && dump ("Reading entities");
15216   dump.indent ();
15217 
15218   for (binding_slot *slot = entity_ary->begin () + entity_lwm; count--; slot++)
15219     {
15220       unsigned snum = sec.u ();
15221       if (snum && (snum - lwm) >= (hwm - lwm))
15222 	sec.set_overrun ();
15223       if (sec.get_overrun ())
15224 	break;
15225 
15226       if (snum)
15227 	slot->set_lazy (snum << 2);
15228     }
15229 
15230   dump.outdent ();
15231   if (!sec.end (from ()))
15232     return false;
15233   return true;
15234 }
15235 
15236 /* Write the pending table to MOD_SNAME_PFX.pnd
15237 
15238    The pending table holds information about clusters that need to be
15239    loaded because they contain information about something that is not
15240    found by namespace-scope lookup.
15241 
15242    The three cases are:
15243 
15244    (a) Template (maybe-partial) specializations that we have
15245    instantiated or defined.  When an importer needs to instantiate
15246    that template, they /must have/ the partial, explicit & extern
15247    specializations available.  If they have the other specializations
15248    available, they'll have less work to do.  Thus, when we're about to
15249    instantiate FOO, we have to be able to ask 'are there any
15250    specialization of FOO in our imports?'.
15251 
15252    (b) (Maybe-implicit) member functions definitions.  A class could
15253    be defined in one header, and an inline member defined in a
15254    different header (this occurs in the STL).  Similarly, like the
15255    specialization case, an implicit member function could have been
15256    'instantiated' in one module, and it'd be nice to not have to
15257    reinstantiate it in another.
15258 
15259    (c) A member classes completed elsewhere.  A member class could be
15260    declared in one header and defined in another.  We need to know to
15261    load the class definition before looking in it.  This turns out to
15262    be a specific case of #b, so we can treat these the same.  But it
15263    does highlight an issue -- there could be an intermediate import
15264    between the outermost containing namespace-scope class and the
15265    innermost being-defined member class.  This is actually possible
15266    with all of these cases, so be aware -- we're not just talking of
15267    one level of import to get to the innermost namespace.
15268 
15269    This gets complicated fast, it took me multiple attempts to even
15270    get something remotely working.  Partially because I focussed on
15271    optimizing what I think turns out to be a smaller problem, given
15272    the known need to do the more general case *anyway*.  I document
15273    the smaller problem, because it does appear to be the natural way
15274    to do it.  It's trap!
15275 
15276    **** THE TRAP
15277 
15278    Let's refer to the primary template or the containing class as the
15279    KEY.  And the specialization or member as the PENDING-ENTITY.  (To
15280    avoid having to say those mouthfuls all the time.)
15281 
15282    In either case, we have an entity and we need some way of mapping
15283    that to a set of entities that need to be loaded before we can
15284    proceed with whatever processing of the entity we were going to do.
15285 
15286    We need to link the key to the pending-entity in some way.  Given a
15287    key, tell me the pending-entities I need to have loaded.  However
15288    we tie the key to the pending-entity must not rely on the key being
15289    loaded -- that'd defeat the lazy loading scheme.
15290 
15291    As the key will be an import in we know its entity number (either
15292    because we imported it, or we're writing it out too).  Thus we can
15293    generate a map of key-indices to pending-entities.  The
15294    pending-entity indices will be into our span of the entity table,
15295    and thus allow them to be lazily loaded.  The key index will be
15296    into another slot of the entity table.  Notice that this checking
15297    could be expensive, we don't want to iterate over a bunch of
15298    pending-entity indices (across multiple imports), every time we're
15299    about do to the thing with the key.  We need to quickly determine
15300    'definitely nothing needed'.
15301 
15302    That's almost good enough, except that key indices are not unique
15303    in a couple of cases :( Specifically the Global Module or a module
15304    partition can result in multiple modules assigning an entity index
15305    for the key.  The decl-merging on loading will detect that so we
15306    only have one Key loaded, and in the entity hash it'll indicate the
15307    entity index of first load.  Which might be different to how we
15308    know it.  Notice this is restricted to GM entities or this-module
15309    entities.  Foreign imports cannot have this.
15310 
15311    We can simply resolve this in the direction of how this module
15312    referred to the key to how the importer knows it.  Look in the
15313    entity table slot that we nominate, maybe lazy load it, and then
15314    lookup the resultant entity in the entity hash to learn how the
15315    importer knows it.
15316 
15317    But we need to go in the other direction :( Given the key, find all
15318    the index-aliases of that key.  We can partially solve that by
15319    adding an alias hash table.  Whenever we load a merged decl, add or
15320    augment a mapping from the entity (or its entity-index) to the
15321    newly-discovered index.  Then when we look for pending entities of
15322    a key, we also iterate over this aliases this mapping provides.
15323 
15324    But that requires the alias to be loaded.  And that's not
15325    necessarily true.
15326 
15327    *** THE SIMPLER WAY
15328 
15329    The remaining fixed thing we have is the innermost namespace
15330    containing the ultimate namespace-scope container of the key and
15331    the name of that container (which might be the key itself).  I.e. a
15332    namespace-decl/identifier/module tuple.  Let's call this the
15333    top-key.  We'll discover that the module is not important here,
15334    because of cross-module possibilities mentioned in case #c above.
15335    We can't markup namespace-binding slots.  The best we can do is
15336    mark the binding vector with 'there's something here', and have
15337    another map from namespace/identifier pairs to a vector of pending
15338    entity indices.
15339 
15340    Maintain a pending-entity map.  This is keyed by top-key, and
15341    maps to a vector of pending-entity indices.  On the binding vector
15342    have flags saying whether the pending-name-entity map has contents.
15343    (We might want to further extend the key to be GM-vs-Partition and
15344    specialization-vs-member, but let's not get ahead of ourselves.)
15345 
15346    For every key-like entity, find the outermost namespace-scope
15347    name.  Use that to lookup in the pending-entity map and then make
15348    sure the specified entities are loaded.
15349 
15350    An optimization might be to have a flag in each key-entity saying
15351    that it's top key might be in the entity table.  It's not clear to
15352    me how to set that flag cheaply -- cheaper than just looking.
15353 
15354    FIXME: It'd be nice to have a bit in decls to tell us whether to
15355    even try this.  We can have a 'already done' flag, that we set when
15356    we've done KLASS's lazy pendings.  When we import a module that
15357    registers pendings on the same top-key as KLASS we need to clear
15358    the flag.  A recursive walk of the top-key clearing the bit will
15359    suffice.  Plus we only need to recurse on classes that have the bit
15360    set.  (That means we need to set the bit on parents of KLASS here,
15361    don't forget.)  However, first: correctness, second: efficiency.  */
15362 
15363 unsigned
write_pendings(elf_out * to,vec<depset * > depsets,depset::hash & table,unsigned * crc_p)15364 module_state::write_pendings (elf_out *to, vec<depset *> depsets,
15365 			      depset::hash &table, unsigned *crc_p)
15366 {
15367   dump () && dump ("Writing pending-entities");
15368   dump.indent ();
15369 
15370   trees_out sec (to, this, table);
15371   sec.begin ();
15372 
15373   unsigned count = 0;
15374   tree cache_ns = NULL_TREE;
15375   tree cache_id = NULL_TREE;
15376   unsigned cache_section = ~0;
15377   for (unsigned ix = 0; ix < depsets.length (); ix++)
15378     {
15379       depset *d = depsets[ix];
15380 
15381       if (d->is_binding ())
15382 	continue;
15383 
15384       if (d->is_import ())
15385 	continue;
15386 
15387       if (!(d->get_entity_kind () == depset::EK_SPECIALIZATION
15388 	    || d->get_entity_kind () == depset::EK_PARTIAL
15389 	    || (d->get_entity_kind () == depset::EK_DECL && d->is_member ())))
15390 	continue;
15391 
15392       tree key_decl = nullptr;
15393       tree key_ns = find_pending_key (d->get_entity (), &key_decl);
15394       tree key_name = DECL_NAME (key_decl);
15395 
15396       if (IDENTIFIER_ANON_P (key_name))
15397 	{
15398 	  gcc_checking_assert (IDENTIFIER_LAMBDA_P (key_name));
15399 	  if (tree attached = LAMBDA_TYPE_EXTRA_SCOPE (TREE_TYPE (key_decl)))
15400 	    key_name = DECL_NAME (attached);
15401 	  else
15402 	    {
15403 	      /* There's nothing to attach it to.  Must
15404 		 always reinstantiate.  */
15405 	      dump ()
15406 		&& dump ("Unattached lambda %N[%u] section:%u",
15407 			 d->get_entity_kind () == depset::EK_DECL
15408 			 ? "Member" : "Specialization", d->get_entity (),
15409 			 d->cluster, d->section);
15410 	      continue;
15411 	    }
15412 	}
15413 
15414       char const *also = "";
15415       if (d->section == cache_section
15416 	  && key_ns == cache_ns
15417 	  && key_name == cache_id)
15418 	/* Same section & key as previous, no need to repeat ourselves.  */
15419 	also = "also ";
15420       else
15421 	{
15422 	  cache_ns = key_ns;
15423 	  cache_id = key_name;
15424 	  cache_section = d->section;
15425 	  gcc_checking_assert (table.find_dependency (cache_ns));
15426 	  sec.tree_node (cache_ns);
15427 	  sec.tree_node (cache_id);
15428 	  sec.u (d->cluster);
15429 	  count++;
15430 	}
15431       dump () && dump ("Pending %s %N entity:%u section:%u %skeyed to %P",
15432 		       d->get_entity_kind () == depset::EK_DECL
15433 		       ? "member" : "specialization", d->get_entity (),
15434 		       d->cluster, cache_section, also, cache_ns, cache_id);
15435       }
15436   sec.end (to, to->name (MOD_SNAME_PFX ".pnd"), crc_p);
15437   dump.outdent ();
15438 
15439   return count;
15440 }
15441 
15442 bool
read_pendings(unsigned count)15443 module_state::read_pendings (unsigned count)
15444 {
15445   trees_in sec (this);
15446 
15447   if (!sec.begin (loc, from (), MOD_SNAME_PFX ".pnd"))
15448     return false;
15449 
15450   dump () && dump ("Reading %u pendings", count);
15451   dump.indent ();
15452 
15453   for (unsigned ix = 0; ix != count; ix++)
15454     {
15455       pending_key key;
15456       unsigned index;
15457 
15458       key.ns = sec.tree_node ();
15459       key.id = sec.tree_node ();
15460       index = sec.u ();
15461 
15462       if (!key.ns || !key.id
15463 	  || !(TREE_CODE (key.ns) == NAMESPACE_DECL
15464 	       && !DECL_NAMESPACE_ALIAS (key.ns))
15465 	  || !identifier_p (key.id)
15466 	  || index >= entity_num)
15467 	sec.set_overrun ();
15468 
15469       if (sec.get_overrun ())
15470 	break;
15471 
15472       dump () && dump ("Pending:%u keyed to %P", index, key.ns, key.id);
15473 
15474       index += entity_lwm;
15475       auto &vec = pending_table->get_or_insert (key);
15476       vec.safe_push (index);
15477     }
15478 
15479   dump.outdent ();
15480   if (!sec.end (from ()))
15481     return false;
15482   return true;
15483 }
15484 
15485 /* Read & write locations.  */
15486 enum loc_kind {
15487   LK_ORDINARY,
15488   LK_MACRO,
15489   LK_IMPORT_ORDINARY,
15490   LK_IMPORT_MACRO,
15491   LK_ADHOC,
15492   LK_RESERVED,
15493 };
15494 
15495 static const module_state *
module_for_ordinary_loc(location_t loc)15496 module_for_ordinary_loc (location_t loc)
15497 {
15498   unsigned pos = 0;
15499   unsigned len = ool->length () - pos;
15500 
15501   while (len)
15502     {
15503       unsigned half = len / 2;
15504       module_state *probe = (*ool)[pos + half];
15505       if (loc < probe->ordinary_locs.first)
15506 	len = half;
15507       else if (loc < probe->ordinary_locs.second)
15508 	return probe;
15509       else
15510 	{
15511 	  pos += half + 1;
15512 	  len = len - (half + 1);
15513 	}
15514     }
15515 
15516   return nullptr;
15517 }
15518 
15519 static const module_state *
module_for_macro_loc(location_t loc)15520 module_for_macro_loc (location_t loc)
15521 {
15522   unsigned pos = 1;
15523   unsigned len = modules->length () - pos;
15524 
15525   while (len)
15526     {
15527       unsigned half = len / 2;
15528       module_state *probe = (*modules)[pos + half];
15529       if (loc >= probe->macro_locs.second)
15530 	len = half;
15531       else if (loc >= probe->macro_locs.first)
15532 	return probe;
15533       else
15534 	{
15535 	  pos += half + 1;
15536 	  len = len - (half + 1);
15537 	}
15538     }
15539 
15540   return NULL;
15541 }
15542 
15543 location_t
imported_from() const15544 module_state::imported_from () const
15545 {
15546   location_t from = loc;
15547   line_map_ordinary const *fmap
15548     = linemap_check_ordinary (linemap_lookup (line_table, from));
15549 
15550   if (MAP_MODULE_P (fmap))
15551     from = linemap_included_from (fmap);
15552 
15553   return from;
15554 }
15555 
15556 /* If we're not streaming, record that we need location LOC.
15557    Otherwise stream it.  */
15558 
15559 void
write_location(bytes_out & sec,location_t loc)15560 module_state::write_location (bytes_out &sec, location_t loc)
15561 {
15562   if (!sec.streaming_p ())
15563     /* This is where we should note we use this location.  See comment
15564        about write_ordinary_maps.  */
15565     return;
15566 
15567   if (loc < RESERVED_LOCATION_COUNT)
15568     {
15569       dump (dumper::LOCATION) && dump ("Reserved location %u", unsigned (loc));
15570       sec.u (LK_RESERVED + loc);
15571     }
15572   else if (IS_ADHOC_LOC (loc))
15573     {
15574       dump (dumper::LOCATION) && dump ("Adhoc location");
15575       sec.u (LK_ADHOC);
15576       location_t locus = get_location_from_adhoc_loc (line_table, loc);
15577       write_location (sec, locus);
15578       source_range range = get_range_from_loc (line_table, loc);
15579       if (range.m_start == locus)
15580 	/* Compress.  */
15581 	range.m_start = UNKNOWN_LOCATION;
15582       write_location (sec, range.m_start);
15583       write_location (sec, range.m_finish);
15584     }
15585   else if (loc >= LINEMAPS_MACRO_LOWEST_LOCATION (line_table))
15586     {
15587       if (const loc_spans::span *span = spans.macro (loc))
15588 	{
15589 	  unsigned off = MAX_LOCATION_T - loc;
15590 
15591 	  off -= span->macro_delta;
15592 
15593 	  sec.u (LK_MACRO);
15594 	  sec.u (off);
15595 	  dump (dumper::LOCATION)
15596 	    && dump ("Macro location %u output %u", loc, off);
15597 	}
15598       else if (const module_state *import = module_for_macro_loc (loc))
15599 	{
15600 	  unsigned off = import->macro_locs.second - loc - 1;
15601 	  sec.u (LK_IMPORT_MACRO);
15602 	  sec.u (import->remap);
15603 	  sec.u (off);
15604 	  dump (dumper::LOCATION)
15605 	    && dump ("Imported macro location %u output %u:%u",
15606 		     loc, import->remap, off);
15607 	}
15608       else
15609 	gcc_unreachable ();
15610     }
15611   else if (IS_ORDINARY_LOC (loc))
15612     {
15613       if (const loc_spans::span *span = spans.ordinary (loc))
15614 	{
15615 	  unsigned off = loc;
15616 
15617 	  off += span->ordinary_delta;
15618 	  sec.u (LK_ORDINARY);
15619 	  sec.u (off);
15620 
15621 	  dump (dumper::LOCATION)
15622 	    && dump ("Ordinary location %u output %u", loc, off);
15623 	}
15624       else if (const module_state *import = module_for_ordinary_loc (loc))
15625 	{
15626 	  unsigned off = loc - import->ordinary_locs.first;
15627 	  sec.u (LK_IMPORT_ORDINARY);
15628 	  sec.u (import->remap);
15629 	  sec.u (off);
15630 	  dump (dumper::LOCATION)
15631 	    && dump ("Imported ordinary location %u output %u:%u",
15632 		     import->remap, import->remap, off);
15633 	}
15634       else
15635 	gcc_unreachable ();
15636     }
15637   else
15638     gcc_unreachable ();
15639 }
15640 
15641 location_t
read_location(bytes_in & sec) const15642 module_state::read_location (bytes_in &sec) const
15643 {
15644   location_t locus = UNKNOWN_LOCATION;
15645   unsigned kind = sec.u ();
15646   switch (kind)
15647      {
15648     default:
15649       {
15650 	if (kind < LK_RESERVED + RESERVED_LOCATION_COUNT)
15651 	  locus = location_t (kind - LK_RESERVED);
15652 	else
15653 	  sec.set_overrun ();
15654 	dump (dumper::LOCATION)
15655 	  && dump ("Reserved location %u", unsigned (locus));
15656       }
15657       break;
15658 
15659      case LK_ADHOC:
15660       {
15661 	dump (dumper::LOCATION) && dump ("Adhoc location");
15662 	locus = read_location (sec);
15663 	source_range range;
15664 	range.m_start = read_location (sec);
15665 	if (range.m_start == UNKNOWN_LOCATION)
15666 	  range.m_start = locus;
15667 	range.m_finish = read_location (sec);
15668 	if (locus != loc && range.m_start != loc && range.m_finish != loc)
15669 	  locus = get_combined_adhoc_loc (line_table, locus, range, NULL);
15670       }
15671       break;
15672 
15673     case LK_MACRO:
15674       {
15675 	unsigned off = sec.u ();
15676 
15677 	if (macro_locs.first)
15678 	  {
15679 	    location_t adjusted = MAX_LOCATION_T - off;
15680 	    adjusted -= slurp->loc_deltas.second;
15681 	    if (adjusted < macro_locs.first)
15682 	      sec.set_overrun ();
15683 	    else if (adjusted < macro_locs.second)
15684 	      locus = adjusted;
15685 	    else
15686 	      sec.set_overrun ();
15687 	  }
15688 	else
15689 	  locus = loc;
15690 	dump (dumper::LOCATION)
15691 	  && dump ("Macro %u becoming %u", off, locus);
15692       }
15693       break;
15694 
15695     case LK_ORDINARY:
15696       {
15697 	unsigned off = sec.u ();
15698 	if (ordinary_locs.second)
15699 	  {
15700 	    location_t adjusted = off;
15701 
15702 	    adjusted += slurp->loc_deltas.first;
15703 	    if (adjusted >= ordinary_locs.second)
15704 	      sec.set_overrun ();
15705 	    else if (adjusted >= ordinary_locs.first)
15706 	      locus = adjusted;
15707 	    else if (adjusted < spans.main_start ())
15708 	      locus = off;
15709 	  }
15710 	else
15711 	  locus = loc;
15712 
15713 	dump (dumper::LOCATION)
15714 	  && dump ("Ordinary location %u becoming %u", off, locus);
15715       }
15716       break;
15717 
15718      case LK_IMPORT_MACRO:
15719      case LK_IMPORT_ORDINARY:
15720        {
15721 	 unsigned mod = sec.u ();
15722 	 unsigned off = sec.u ();
15723 	 const module_state *import = NULL;
15724 
15725 	 if (!mod && !slurp->remap)
15726 	   /* This is an early read of a partition location during the
15727 	      read of our ordinary location map.  */
15728 	   import = this;
15729 	 else
15730 	   {
15731 	     mod = slurp->remap_module (mod);
15732 	     if (!mod)
15733 	       sec.set_overrun ();
15734 	     else
15735 	       import = (*modules)[mod];
15736 	   }
15737 
15738 	 if (import)
15739 	   {
15740 	     if (kind == LK_IMPORT_MACRO)
15741 	       {
15742 		 if (!import->macro_locs.first)
15743 		   locus = import->loc;
15744 		 else if (off < import->macro_locs.second - macro_locs.first)
15745 		   locus = import->macro_locs.second - off - 1;
15746 		 else
15747 		   sec.set_overrun ();
15748 	       }
15749 	     else
15750 	       {
15751 		 if (!import->ordinary_locs.second)
15752 		   locus = import->loc;
15753 		 else if (off < (import->ordinary_locs.second
15754 			    - import->ordinary_locs.first))
15755 		   locus = import->ordinary_locs.first + off;
15756 		 else
15757 		   sec.set_overrun ();
15758 	       }
15759 	   }
15760        }
15761        break;
15762     }
15763 
15764   return locus;
15765 }
15766 
15767 /* Prepare the span adjustments.  */
15768 
15769 // FIXME:QOI I do not prune the unreachable locations.  Modules with
15770 // textually-large GMFs could well cause us to run out of locations.
15771 // Regular single-file modules could also be affected.  We should
15772 // determine which locations we need to represent, so that we do not
15773 // grab more locations than necessary.  An example is in
15774 // write_macro_maps where we work around macro expansions that are not
15775 // covering any locations -- the macro expands to nothing.  Perhaps we
15776 // should decompose locations so that we can have a more graceful
15777 // degradation upon running out?
15778 
15779 location_map_info
write_prepare_maps(module_state_config *)15780 module_state::write_prepare_maps (module_state_config *)
15781 {
15782   dump () && dump ("Preparing locations");
15783   dump.indent ();
15784 
15785   dump () && dump ("Reserved locations [%u,%u) macro [%u,%u)",
15786 		   spans[loc_spans::SPAN_RESERVED].ordinary.first,
15787 		   spans[loc_spans::SPAN_RESERVED].ordinary.second,
15788 		   spans[loc_spans::SPAN_RESERVED].macro.first,
15789 		   spans[loc_spans::SPAN_RESERVED].macro.second);
15790 
15791   location_map_info info;
15792 
15793   info.num_maps.first = info.num_maps.second = 0;
15794 
15795   /* Figure the alignment of ordinary location spans.  */
15796   unsigned max_range = 0;
15797   for (unsigned ix = loc_spans::SPAN_FIRST; ix != spans.length (); ix++)
15798     {
15799       loc_spans::span &span = spans[ix];
15800 
15801       if (span.ordinary.first != span.ordinary.second)
15802 	{
15803 	  line_map_ordinary const *omap
15804 	    = linemap_check_ordinary (linemap_lookup (line_table,
15805 						      span.ordinary.first));
15806 
15807 	  /* We should exactly match up.  */
15808 	  gcc_checking_assert (MAP_START_LOCATION (omap) == span.ordinary.first);
15809 
15810 	  line_map_ordinary const *fmap = omap;
15811 	  for (; MAP_START_LOCATION (omap) < span.ordinary.second; omap++)
15812 	    {
15813 	      /* We should never find a module linemap in an interval.  */
15814 	      gcc_checking_assert (!MAP_MODULE_P (omap));
15815 
15816 	      if (max_range < omap->m_range_bits)
15817 		max_range = omap->m_range_bits;
15818 	    }
15819 
15820 	  info.num_maps.first += omap - fmap;
15821 	}
15822 
15823       if (span.macro.first != span.macro.second)
15824 	{
15825 	  /* Iterate over the span's macros, to elide the empty
15826 	     expansions.  */
15827 	  unsigned count = 0;
15828 	  for (unsigned macro
15829 		 = linemap_lookup_macro_index (line_table,
15830 					       span.macro.second - 1);
15831 	       macro < LINEMAPS_MACRO_USED (line_table);
15832 	       macro++)
15833 	    {
15834 	      line_map_macro const *mmap
15835 		= LINEMAPS_MACRO_MAP_AT (line_table, macro);
15836 	      if (MAP_START_LOCATION (mmap) < span.macro.first)
15837 		/* Fallen out of the span.  */
15838 		break;
15839 
15840 	      if (mmap->n_tokens)
15841 		count++;
15842 	    }
15843 	  dump (dumper::LOCATION) && dump ("Span:%u %u macro maps", ix, count);
15844 	  info.num_maps.second += count;
15845 	}
15846     }
15847 
15848   /* Adjust the maps.  Ordinary ones ascend, and we must maintain
15849      alignment.  Macro ones descend, but are unaligned.  */
15850   location_t ord_off = spans[loc_spans::SPAN_FIRST].ordinary.first;
15851   location_t mac_off = spans[loc_spans::SPAN_FIRST].macro.second;
15852   location_t range_mask = (1u << max_range) - 1;
15853 
15854   dump () && dump ("Ordinary maps range bits:%u, preserve:%x, zero:%u",
15855 		   max_range, ord_off & range_mask, ord_off & ~range_mask);
15856 
15857   for (unsigned ix = loc_spans::SPAN_FIRST; ix != spans.length (); ix++)
15858     {
15859       loc_spans::span &span = spans[ix];
15860 
15861       span.macro_delta = mac_off - span.macro.second;
15862       mac_off -= span.macro.second - span.macro.first;
15863       dump () && dump ("Macro span:%u [%u,%u):%u->%d(%u)", ix,
15864 		       span.macro.first, span.macro.second,
15865 		       span.macro.second - span.macro.first,
15866 		       span.macro_delta, span.macro.first + span.macro_delta);
15867 
15868       line_map_ordinary const *omap
15869 	= linemap_check_ordinary (linemap_lookup (line_table,
15870 						      span.ordinary.first));
15871       location_t base = MAP_START_LOCATION (omap);
15872 
15873       /* Preserve the low MAX_RANGE bits of base by incrementing ORD_OFF.  */
15874       unsigned low_bits = base & range_mask;
15875       if ((ord_off & range_mask) > low_bits)
15876 	low_bits += range_mask + 1;
15877       ord_off = (ord_off & ~range_mask) + low_bits;
15878       span.ordinary_delta = ord_off - base;
15879 
15880       for (; MAP_START_LOCATION (omap) < span.ordinary.second; omap++)
15881 	{
15882 	  location_t start_loc = MAP_START_LOCATION (omap);
15883 	  unsigned to = start_loc + span.ordinary_delta;
15884 	  location_t end_loc = MAP_START_LOCATION (omap + 1);
15885 
15886 	  dump () && dump ("Ordinary span:%u [%u,%u):%u->%d(%u)",
15887 			   ix, start_loc,
15888 			   end_loc, end_loc - start_loc,
15889 			   span.ordinary_delta, to);
15890 
15891 	  /* There should be no change in the low order bits.  */
15892 	  gcc_checking_assert (((start_loc ^ to) & range_mask) == 0);
15893 	}
15894 
15895       /* The ending serialized value.  */
15896       ord_off = span.ordinary.second + span.ordinary_delta;
15897     }
15898 
15899   dump () && dump ("Ordinary:%u maps hwm:%u macro:%u maps lwm:%u ",
15900 		   info.num_maps.first, ord_off,
15901 		   info.num_maps.second, mac_off);
15902 
15903   dump.outdent ();
15904 
15905   info.max_range = max_range;
15906 
15907   return info;
15908 }
15909 
15910 bool
read_prepare_maps(const module_state_config * cfg)15911 module_state::read_prepare_maps (const module_state_config *cfg)
15912 {
15913   location_t ordinary = line_table->highest_location + 1;
15914   ordinary = ((ordinary + (1u << cfg->ordinary_loc_align))
15915 	      & ~((1u << cfg->ordinary_loc_align) - 1));
15916   ordinary += cfg->ordinary_locs;
15917 
15918   location_t macro = LINEMAPS_MACRO_LOWEST_LOCATION (line_table);
15919   macro -= cfg->macro_locs;
15920 
15921   if (ordinary < LINE_MAP_MAX_LOCATION_WITH_COLS
15922       && macro >= LINE_MAP_MAX_LOCATION)
15923     /* OK, we have enough locations.  */
15924     return true;
15925 
15926   ordinary_locs.first = ordinary_locs.second = 0;
15927   macro_locs.first = macro_locs.second = 0;
15928 
15929   static bool informed = false;
15930   if (!informed)
15931     {
15932       /* Just give the notice once.  */
15933       informed = true;
15934       inform (loc, "unable to represent further imported source locations");
15935     }
15936 
15937   return false;
15938 }
15939 
15940 /* Write the location maps.  This also determines the shifts for the
15941    location spans.  */
15942 
15943 void
write_ordinary_maps(elf_out * to,location_map_info & info,module_state_config * cfg,bool has_partitions,unsigned * crc_p)15944 module_state::write_ordinary_maps (elf_out *to, location_map_info &info,
15945 				   module_state_config *cfg, bool has_partitions,
15946 				   unsigned *crc_p)
15947 {
15948   dump () && dump ("Writing ordinary location maps");
15949   dump.indent ();
15950 
15951   vec<const char *> filenames;
15952   filenames.create (20);
15953 
15954   /* Determine the unique filenames.  */
15955   // FIXME:QOI We should find the set of filenames when working out
15956   // which locations we actually need.  See write_prepare_maps.
15957   for (unsigned ix = loc_spans::SPAN_FIRST; ix != spans.length (); ix++)
15958     {
15959       loc_spans::span &span = spans[ix];
15960       line_map_ordinary const *omap
15961 	= linemap_check_ordinary (linemap_lookup (line_table,
15962 						  span.ordinary.first));
15963 
15964       /* We should exactly match up.  */
15965       gcc_checking_assert (MAP_START_LOCATION (omap) == span.ordinary.first);
15966 
15967       for (; MAP_START_LOCATION (omap) < span.ordinary.second; omap++)
15968 	{
15969 	  const char *fname = ORDINARY_MAP_FILE_NAME (omap);
15970 
15971 	  /* We should never find a module linemap in an interval.  */
15972 	  gcc_checking_assert (!MAP_MODULE_P (omap));
15973 
15974 	  /* We expect very few filenames, so just an array.
15975 	     (Not true when headers are still in play :()  */
15976 	  for (unsigned jx = filenames.length (); jx--;)
15977 	    {
15978 	      const char *name = filenames[jx];
15979 	      if (0 == strcmp (name, fname))
15980 		{
15981 		  /* Reset the linemap's name, because for things like
15982 		     preprocessed input we could have multiple
15983 		     instances of the same name, and we'd rather not
15984 		     percolate that.  */
15985 		  const_cast<line_map_ordinary *> (omap)->to_file = name;
15986 		  fname = NULL;
15987 		  break;
15988 		}
15989 	    }
15990 	  if (fname)
15991 	    filenames.safe_push (fname);
15992 	}
15993     }
15994 
15995   bytes_out sec (to);
15996   sec.begin ();
15997 
15998   /* Write the filenames.  */
15999   unsigned len = filenames.length ();
16000   sec.u (len);
16001   dump () && dump ("%u source file names", len);
16002   for (unsigned ix = 0; ix != len; ix++)
16003     {
16004       const char *fname = filenames[ix];
16005       dump (dumper::LOCATION) && dump ("Source file[%u]=%s", ix, fname);
16006       sec.str (fname);
16007     }
16008 
16009   location_t offset = spans[loc_spans::SPAN_FIRST].ordinary.first;
16010   location_t range_mask = (1u << info.max_range) - 1;
16011 
16012   dump () && dump ("Ordinary maps:%u, range bits:%u, preserve:%x, zero:%u",
16013 		   info.num_maps.first, info.max_range, offset & range_mask,
16014 		   offset & ~range_mask);
16015   sec.u (info.num_maps.first);	/* Num maps.  */
16016   sec.u (info.max_range);		/* Maximum range bits  */
16017   sec.u (offset & range_mask);	/* Bits to preserve.  */
16018   sec.u (offset & ~range_mask);
16019 
16020   for (unsigned ix = loc_spans::SPAN_FIRST; ix != spans.length (); ix++)
16021     {
16022       loc_spans::span &span = spans[ix];
16023       line_map_ordinary const *omap
16024 	= linemap_check_ordinary (linemap_lookup (line_table,
16025 						  span.ordinary.first));
16026       for (; MAP_START_LOCATION (omap) < span.ordinary.second; omap++)
16027 	{
16028 	  location_t start_loc = MAP_START_LOCATION (omap);
16029 	  unsigned to = start_loc + span.ordinary_delta;
16030 
16031 	  dump (dumper::LOCATION)
16032 	    && dump ("Span:%u ordinary [%u,%u)->%u", ix, start_loc,
16033 		     MAP_START_LOCATION (omap + 1), to);
16034 
16035 	  /* There should be no change in the low order bits.  */
16036 	  gcc_checking_assert (((start_loc ^ to) & range_mask) == 0);
16037 	  sec.u (to);
16038 
16039 	  /* Making accessors just for here, seems excessive.  */
16040 	  sec.u (omap->reason);
16041 	  sec.u (omap->sysp);
16042 	  sec.u (omap->m_range_bits);
16043 	  sec.u (omap->m_column_and_range_bits - omap->m_range_bits);
16044 
16045 	  const char *fname = ORDINARY_MAP_FILE_NAME (omap);
16046 	  for (unsigned ix = 0; ix != filenames.length (); ix++)
16047 	    if (filenames[ix] == fname)
16048 	      {
16049 		sec.u (ix);
16050 		break;
16051 	      }
16052 	  sec.u (ORDINARY_MAP_STARTING_LINE_NUMBER (omap));
16053 
16054 	  /* Write the included from location, which means reading it
16055 	     while reading in the ordinary maps.  So we'd better not
16056 	     be getting ahead of ourselves.  */
16057 	  location_t from = linemap_included_from (omap);
16058 	  gcc_checking_assert (from < MAP_START_LOCATION (omap));
16059 	  if (from != UNKNOWN_LOCATION && has_partitions)
16060 	    {
16061 	      /* A partition's span will have a from pointing at a
16062 		 MODULE_INC.  Find that map's from.  */
16063 	      line_map_ordinary const *fmap
16064 		= linemap_check_ordinary (linemap_lookup (line_table, from));
16065 	      if (MAP_MODULE_P (fmap))
16066 		from = linemap_included_from (fmap);
16067 	    }
16068 	  write_location (sec, from);
16069 	}
16070       /* The ending serialized value.  */
16071       offset = MAP_START_LOCATION (omap) + span.ordinary_delta;
16072     }
16073   dump () && dump ("Ordinary location hwm:%u", offset);
16074   sec.u (offset);
16075 
16076   // Record number of locations and alignment.
16077   cfg->ordinary_loc_align = info.max_range;
16078   cfg->ordinary_locs = offset;
16079 
16080   filenames.release ();
16081 
16082   sec.end (to, to->name (MOD_SNAME_PFX ".olm"), crc_p);
16083   dump.outdent ();
16084 }
16085 
16086 void
write_macro_maps(elf_out * to,location_map_info & info,module_state_config * cfg,unsigned * crc_p)16087 module_state::write_macro_maps (elf_out *to, location_map_info &info,
16088 				module_state_config *cfg, unsigned *crc_p)
16089 {
16090   dump () && dump ("Writing macro location maps");
16091   dump.indent ();
16092 
16093   bytes_out sec (to);
16094   sec.begin ();
16095 
16096   dump () && dump ("Macro maps:%u", info.num_maps.second);
16097   sec.u (info.num_maps.second);
16098 
16099   location_t offset = spans[loc_spans::SPAN_FIRST].macro.second;
16100   sec.u (offset);
16101 
16102   unsigned macro_num = 0;
16103   for (unsigned ix = loc_spans::SPAN_FIRST; ix != spans.length (); ix++)
16104     {
16105       loc_spans::span &span = spans[ix];
16106       if (span.macro.first == span.macro.second)
16107 	/* Empty span.  */
16108 	continue;
16109 
16110       for (unsigned macro
16111 	     = linemap_lookup_macro_index (line_table, span.macro.second - 1);
16112 	   macro < LINEMAPS_MACRO_USED (line_table);
16113 	   macro++)
16114 	{
16115 	  line_map_macro const *mmap
16116 	    = LINEMAPS_MACRO_MAP_AT (line_table, macro);
16117 	  location_t start_loc = MAP_START_LOCATION (mmap);
16118 	  if (start_loc < span.macro.first)
16119 	    /* Fallen out of the span.  */
16120 	    break;
16121 
16122 	  if (!mmap->n_tokens)
16123 	    /* Empty expansion.  */
16124 	    continue;
16125 
16126 	  sec.u (offset);
16127 	  sec.u (mmap->n_tokens);
16128 	  sec.cpp_node (mmap->macro);
16129 	  write_location (sec, mmap->expansion);
16130 	  const location_t *locs = mmap->macro_locations;
16131 	  /* There are lots of identical runs.  */
16132 	  location_t prev = UNKNOWN_LOCATION;
16133 	  unsigned count = 0;
16134 	  unsigned runs = 0;
16135 	  for (unsigned jx = mmap->n_tokens * 2; jx--;)
16136 	    {
16137 	      location_t tok_loc = locs[jx];
16138 	      if (tok_loc == prev)
16139 		{
16140 		  count++;
16141 		  continue;
16142 		}
16143 	      runs++;
16144 	      sec.u (count);
16145 	      count = 1;
16146 	      prev = tok_loc;
16147 	      write_location (sec, tok_loc);
16148 	    }
16149 	  sec.u (count);
16150 	  dump (dumper::LOCATION)
16151 	    && dump ("Span:%u macro:%u %I %u/%u*2 locations [%u,%u)->%u",
16152 		     ix, macro_num, identifier (mmap->macro),
16153 		     runs, mmap->n_tokens,
16154 		     start_loc, start_loc + mmap->n_tokens,
16155 		     start_loc + span.macro_delta);
16156 	  macro_num++;
16157 	  offset -= mmap->n_tokens;
16158 	  gcc_checking_assert (offset == start_loc + span.macro_delta);
16159 	}
16160     }
16161   dump () && dump ("Macro location lwm:%u", offset);
16162   sec.u (offset);
16163   gcc_assert (macro_num == info.num_maps.second);
16164 
16165   cfg->macro_locs = MAX_LOCATION_T + 1 - offset;
16166 
16167   sec.end (to, to->name (MOD_SNAME_PFX ".mlm"), crc_p);
16168   dump.outdent ();
16169 }
16170 
16171 bool
read_ordinary_maps()16172 module_state::read_ordinary_maps ()
16173 {
16174   bytes_in sec;
16175 
16176   if (!sec.begin (loc, from (), MOD_SNAME_PFX ".olm"))
16177     return false;
16178   dump () && dump ("Reading ordinary location maps");
16179   dump.indent ();
16180 
16181   /* Read the filename table.  */
16182   unsigned len = sec.u ();
16183   dump () && dump ("%u source file names", len);
16184   vec<const char *> filenames;
16185   filenames.create (len);
16186   for (unsigned ix = 0; ix != len; ix++)
16187     {
16188       size_t l;
16189       const char *buf = sec.str (&l);
16190       char *fname = XNEWVEC (char, l + 1);
16191       memcpy (fname, buf, l + 1);
16192       dump (dumper::LOCATION) && dump ("Source file[%u]=%s", ix, fname);
16193       /* We leak these names into the line-map table.  But it
16194 	 doesn't own them.  */
16195       filenames.quick_push (fname);
16196     }
16197 
16198   unsigned num_ordinary = sec.u ();
16199   unsigned max_range = sec.u ();
16200   unsigned low_bits = sec.u ();
16201   location_t zero = sec.u ();
16202   location_t range_mask = (1u << max_range) - 1;
16203 
16204   dump () && dump ("Ordinary maps:%u, range bits:%u, preserve:%x, zero:%u",
16205 		   num_ordinary, max_range, low_bits, zero);
16206 
16207   location_t offset = line_table->highest_location + 1;
16208   /* Ensure offset doesn't go backwards at the start.  */
16209   if ((offset & range_mask) > low_bits)
16210     offset += range_mask + 1;
16211   offset = (offset & ~range_mask);
16212 
16213   bool propagated = spans.maybe_propagate (this, offset + low_bits);
16214 
16215   line_map_ordinary *maps = static_cast<line_map_ordinary *>
16216     (line_map_new_raw (line_table, false, num_ordinary));
16217 
16218   location_t lwm = offset;
16219   slurp->loc_deltas.first = offset - zero;
16220   ordinary_locs.first = zero + low_bits + slurp->loc_deltas.first;
16221   dump () && dump ("Ordinary loc delta %d", slurp->loc_deltas.first);
16222 
16223   for (unsigned ix = 0; ix != num_ordinary && !sec.get_overrun (); ix++)
16224     {
16225       line_map_ordinary *map = &maps[ix];
16226       unsigned hwm = sec.u ();
16227 
16228       /* Record the current HWM so that the below read_location is
16229 	 ok.  */
16230       ordinary_locs.second = hwm + slurp->loc_deltas.first;
16231       map->start_location = hwm + (offset - zero);
16232       if (map->start_location < lwm)
16233 	sec.set_overrun ();
16234       lwm = map->start_location;
16235       dump (dumper::LOCATION) && dump ("Map:%u %u->%u", ix, hwm, lwm);
16236       map->reason = lc_reason (sec.u ());
16237       map->sysp = sec.u ();
16238       map->m_range_bits = sec.u ();
16239       map->m_column_and_range_bits = map->m_range_bits + sec.u ();
16240 
16241       unsigned fnum = sec.u ();
16242       map->to_file = (fnum < filenames.length () ? filenames[fnum] : "");
16243       map->to_line = sec.u ();
16244 
16245       /* Root the outermost map at our location.  */
16246       location_t from = read_location (sec);
16247       map->included_from = from != UNKNOWN_LOCATION ? from : loc;
16248     }
16249 
16250   location_t hwm = sec.u ();
16251   ordinary_locs.second = hwm + slurp->loc_deltas.first;
16252 
16253   /* highest_location is the one handed out, not the next one to
16254      hand out.  */
16255   line_table->highest_location = ordinary_locs.second - 1;
16256 
16257   if (line_table->highest_location >= LINE_MAP_MAX_LOCATION_WITH_COLS)
16258     /* We shouldn't run out of locations, as we checked before
16259        starting.  */
16260     sec.set_overrun ();
16261   dump () && dump ("Ordinary location hwm:%u", ordinary_locs.second);
16262 
16263   if (propagated)
16264     spans.close ();
16265 
16266   filenames.release ();
16267 
16268   dump.outdent ();
16269   if (!sec.end (from ()))
16270     return false;
16271 
16272   return true;
16273 }
16274 
16275 bool
read_macro_maps()16276 module_state::read_macro_maps ()
16277 {
16278   bytes_in sec;
16279 
16280   if (!sec.begin (loc, from (), MOD_SNAME_PFX ".mlm"))
16281     return false;
16282   dump () && dump ("Reading macro location maps");
16283   dump.indent ();
16284 
16285   unsigned num_macros = sec.u ();
16286   location_t zero = sec.u ();
16287   dump () && dump ("Macro maps:%u zero:%u", num_macros, zero);
16288 
16289   bool propagated = spans.maybe_propagate (this,
16290 					   line_table->highest_location + 1);
16291 
16292   location_t offset = LINEMAPS_MACRO_LOWEST_LOCATION (line_table);
16293   slurp->loc_deltas.second = zero - offset;
16294   macro_locs.second = zero - slurp->loc_deltas.second;
16295   dump () && dump ("Macro loc delta %d", slurp->loc_deltas.second);
16296 
16297   for (unsigned ix = 0; ix != num_macros && !sec.get_overrun (); ix++)
16298     {
16299       unsigned lwm = sec.u ();
16300       /* Record the current LWM so that the below read_location is
16301 	 ok.  */
16302       macro_locs.first = lwm - slurp->loc_deltas.second;
16303 
16304       unsigned n_tokens = sec.u ();
16305       cpp_hashnode *node = sec.cpp_node ();
16306       location_t exp_loc = read_location (sec);
16307 
16308       const line_map_macro *macro
16309 	= linemap_enter_macro (line_table, node, exp_loc, n_tokens);
16310       if (!macro)
16311 	/* We shouldn't run out of locations, as we checked that we
16312 	   had enough before starting.  */
16313 	break;
16314 
16315       location_t *locs = macro->macro_locations;
16316       location_t tok_loc = UNKNOWN_LOCATION;
16317       unsigned count = sec.u ();
16318       unsigned runs = 0;
16319       for (unsigned jx = macro->n_tokens * 2; jx-- && !sec.get_overrun ();)
16320 	{
16321 	  while (!count-- && !sec.get_overrun ())
16322 	    {
16323 	      runs++;
16324 	      tok_loc = read_location (sec);
16325 	      count = sec.u ();
16326 	    }
16327 	  locs[jx] = tok_loc;
16328 	}
16329       if (count)
16330 	sec.set_overrun ();
16331       dump (dumper::LOCATION)
16332 	&& dump ("Macro:%u %I %u/%u*2 locations [%u,%u)",
16333 		 ix, identifier (node), runs, n_tokens,
16334 		 MAP_START_LOCATION (macro),
16335 		 MAP_START_LOCATION (macro) + n_tokens);
16336     }
16337   location_t lwm = sec.u ();
16338   macro_locs.first = lwm - slurp->loc_deltas.second;
16339 
16340   dump () && dump ("Macro location lwm:%u", macro_locs.first);
16341 
16342   if (propagated)
16343     spans.close ();
16344 
16345   dump.outdent ();
16346   if (!sec.end (from ()))
16347     return false;
16348 
16349   return true;
16350 }
16351 
16352 /* Serialize the definition of MACRO.  */
16353 
16354 void
write_define(bytes_out & sec,const cpp_macro * macro,bool located)16355 module_state::write_define (bytes_out &sec, const cpp_macro *macro, bool located)
16356 {
16357   sec.u (macro->count);
16358 
16359   sec.b (macro->fun_like);
16360   sec.b (macro->variadic);
16361   sec.b (macro->syshdr);
16362   sec.bflush ();
16363 
16364   if (located)
16365     write_location (sec, macro->line);
16366   if (macro->fun_like)
16367     {
16368       sec.u (macro->paramc);
16369       const cpp_hashnode *const *parms = macro->parm.params;
16370       for (unsigned ix = 0; ix != macro->paramc; ix++)
16371 	sec.cpp_node (parms[ix]);
16372     }
16373 
16374   unsigned len = 0;
16375   for (unsigned ix = 0; ix != macro->count; ix++)
16376     {
16377       const cpp_token *token = &macro->exp.tokens[ix];
16378       if (located)
16379 	write_location (sec, token->src_loc);
16380       sec.u (token->type);
16381       sec.u (token->flags);
16382       switch (cpp_token_val_index (token))
16383 	{
16384 	default:
16385 	  gcc_unreachable ();
16386 
16387 	case CPP_TOKEN_FLD_ARG_NO:
16388 	  /* An argument reference.  */
16389 	  sec.u (token->val.macro_arg.arg_no);
16390 	  sec.cpp_node (token->val.macro_arg.spelling);
16391 	  break;
16392 
16393 	case CPP_TOKEN_FLD_NODE:
16394 	  /* An identifier.  */
16395 	  sec.cpp_node (token->val.node.node);
16396 	  if (token->val.node.spelling == token->val.node.node)
16397 	    /* The spelling will usually be the same.  so optimize
16398 	       that.  */
16399 	    sec.str (NULL, 0);
16400 	  else
16401 	    sec.cpp_node (token->val.node.spelling);
16402 	  break;
16403 
16404 	case CPP_TOKEN_FLD_NONE:
16405 	  break;
16406 
16407 	case CPP_TOKEN_FLD_STR:
16408 	  /* A string, number or comment.  Not always NUL terminated,
16409 	     we stream out in a single contatenation with embedded
16410 	     NULs as that's a safe default.  */
16411 	  len += token->val.str.len + 1;
16412 	  sec.u (token->val.str.len);
16413 	  break;
16414 
16415 	case CPP_TOKEN_FLD_SOURCE:
16416 	case CPP_TOKEN_FLD_TOKEN_NO:
16417 	case CPP_TOKEN_FLD_PRAGMA:
16418 	  /* These do not occur inside a macro itself.  */
16419 	  gcc_unreachable ();
16420 	}
16421     }
16422 
16423   if (len)
16424     {
16425       char *ptr = reinterpret_cast<char *> (sec.buf (len));
16426       len = 0;
16427       for (unsigned ix = 0; ix != macro->count; ix++)
16428 	{
16429 	  const cpp_token *token = &macro->exp.tokens[ix];
16430 	  if (cpp_token_val_index (token) == CPP_TOKEN_FLD_STR)
16431 	    {
16432 	      memcpy (ptr + len, token->val.str.text,
16433 		      token->val.str.len);
16434 	      len += token->val.str.len;
16435 	      ptr[len++] = 0;
16436 	    }
16437 	}
16438     }
16439 }
16440 
16441 /* Read a macro definition.  */
16442 
16443 cpp_macro *
read_define(bytes_in & sec,cpp_reader * reader,bool located) const16444 module_state::read_define (bytes_in &sec, cpp_reader *reader, bool located) const
16445 {
16446   unsigned count = sec.u ();
16447   /* We rely on knowing cpp_reader's hash table is ident_hash, and
16448      it's subobject allocator is stringpool_ggc_alloc and that is just
16449      a wrapper for ggc_alloc_atomic.  */
16450   cpp_macro *macro
16451     = (cpp_macro *)ggc_alloc_atomic (sizeof (cpp_macro)
16452 				     + sizeof (cpp_token) * (count - !!count));
16453   memset (macro, 0, sizeof (cpp_macro) + sizeof (cpp_token) * (count - !!count));
16454 
16455   macro->count = count;
16456   macro->kind = cmk_macro;
16457   macro->imported_p = true;
16458 
16459   macro->fun_like = sec.b ();
16460   macro->variadic = sec.b ();
16461   macro->syshdr = sec.b ();
16462   sec.bflush ();
16463 
16464   macro->line = located ? read_location (sec) : loc;
16465 
16466   if (macro->fun_like)
16467     {
16468       unsigned paramc = sec.u ();
16469       cpp_hashnode **params
16470 	= (cpp_hashnode **)ggc_alloc_atomic (sizeof (cpp_hashnode *) * paramc);
16471       macro->paramc = paramc;
16472       macro->parm.params = params;
16473       for (unsigned ix = 0; ix != paramc; ix++)
16474 	params[ix] = sec.cpp_node ();
16475     }
16476 
16477   unsigned len = 0;
16478   for (unsigned ix = 0; ix != count && !sec.get_overrun (); ix++)
16479     {
16480       cpp_token *token = &macro->exp.tokens[ix];
16481       token->src_loc = located ? read_location (sec) : loc;
16482       token->type = cpp_ttype (sec.u ());
16483       token->flags = sec.u ();
16484       switch (cpp_token_val_index (token))
16485 	{
16486 	default:
16487 	  sec.set_overrun ();
16488 	  break;
16489 
16490 	case CPP_TOKEN_FLD_ARG_NO:
16491 	  /* An argument reference.  */
16492 	  {
16493 	    unsigned arg_no = sec.u ();
16494 	    if (arg_no - 1 >= macro->paramc)
16495 	      sec.set_overrun ();
16496 	    token->val.macro_arg.arg_no = arg_no;
16497 	    token->val.macro_arg.spelling = sec.cpp_node ();
16498 	  }
16499 	  break;
16500 
16501 	case CPP_TOKEN_FLD_NODE:
16502 	  /* An identifier.  */
16503 	  token->val.node.node = sec.cpp_node ();
16504 	  token->val.node.spelling = sec.cpp_node ();
16505 	  if (!token->val.node.spelling)
16506 	    token->val.node.spelling = token->val.node.node;
16507 	  break;
16508 
16509 	case CPP_TOKEN_FLD_NONE:
16510 	  break;
16511 
16512 	case CPP_TOKEN_FLD_STR:
16513 	  /* A string, number or comment.  */
16514 	  token->val.str.len = sec.u ();
16515 	  len += token->val.str.len + 1;
16516 	  break;
16517 	}
16518     }
16519 
16520   if (len)
16521     if (const char *ptr = reinterpret_cast<const char *> (sec.buf (len)))
16522       {
16523 	/* There should be a final NUL.  */
16524 	if (ptr[len-1])
16525 	  sec.set_overrun ();
16526 	/* cpp_alloc_token_string will add a final NUL.  */
16527 	const unsigned char *buf
16528 	  = cpp_alloc_token_string (reader, (const unsigned char *)ptr, len - 1);
16529 	len = 0;
16530 	for (unsigned ix = 0; ix != count && !sec.get_overrun (); ix++)
16531 	  {
16532 	    cpp_token *token = &macro->exp.tokens[ix];
16533 	    if (cpp_token_val_index (token) == CPP_TOKEN_FLD_STR)
16534 	      {
16535 		token->val.str.text = buf + len;
16536 		len += token->val.str.len;
16537 		if (buf[len++])
16538 		  sec.set_overrun ();
16539 	      }
16540 	  }
16541       }
16542 
16543   if (sec.get_overrun ())
16544     return NULL;
16545   return macro;
16546 }
16547 
16548 /* Exported macro data.  */
16549 struct GTY(()) macro_export {
16550   cpp_macro *def;
16551   location_t undef_loc;
16552 
macro_exportmacro_export16553   macro_export ()
16554     :def (NULL), undef_loc (UNKNOWN_LOCATION)
16555   {
16556   }
16557 };
16558 
16559 /* Imported macro data.  */
16560 class macro_import {
16561 public:
16562   struct slot {
16563 #if defined (WORDS_BIGENDIAN) && SIZEOF_VOID_P == 8
16564     int offset;
16565 #endif
16566     /* We need to ensure we don't use the LSB for representation, as
16567        that's the union discriminator below.  */
16568     unsigned bits;
16569 
16570 #if !(defined (WORDS_BIGENDIAN) && SIZEOF_VOID_P == 8)
16571     int offset;
16572 #endif
16573 
16574   public:
16575     enum Layout {
16576       L_DEF = 1,
16577       L_UNDEF = 2,
16578       L_BOTH = 3,
16579       L_MODULE_SHIFT = 2
16580     };
16581 
16582   public:
16583     /* Not a regular ctor, because we put it in a union, and that's
16584        not allowed in C++ 98.  */
ctormacro_import::slot16585     static slot ctor (unsigned module, unsigned defness)
16586     {
16587       gcc_checking_assert (defness);
16588       slot s;
16589       s.bits = defness | (module << L_MODULE_SHIFT);
16590       s.offset = -1;
16591       return s;
16592     }
16593 
16594   public:
get_defnessmacro_import::slot16595     unsigned get_defness () const
16596     {
16597       return bits & L_BOTH;
16598     }
get_modulemacro_import::slot16599     unsigned get_module () const
16600     {
16601       return bits >> L_MODULE_SHIFT;
16602     }
become_undefmacro_import::slot16603     void become_undef ()
16604     {
16605       bits &= ~unsigned (L_DEF);
16606       bits |= unsigned (L_UNDEF);
16607     }
16608   };
16609 
16610 private:
16611   typedef vec<slot, va_heap, vl_embed> ary_t;
16612   union either {
16613     /* Discriminated by bits 0|1 != 0.  The expected case is that
16614        there will be exactly one slot per macro, hence the effort of
16615        packing that.  */
16616     ary_t *ary;
16617     slot single;
16618   } u;
16619 
16620 public:
macro_import()16621   macro_import ()
16622   {
16623     u.ary = NULL;
16624   }
16625 
16626 private:
single_p() const16627   bool single_p () const
16628   {
16629     return u.single.bits & slot::L_BOTH;
16630   }
occupied_p() const16631   bool occupied_p () const
16632   {
16633     return u.ary != NULL;
16634   }
16635 
16636 public:
length() const16637   unsigned length () const
16638   {
16639     gcc_checking_assert (occupied_p ());
16640     return single_p () ? 1 : u.ary->length ();
16641   }
operator [](unsigned ix)16642   slot &operator[] (unsigned ix)
16643   {
16644     gcc_checking_assert (occupied_p ());
16645     if (single_p ())
16646       {
16647 	gcc_checking_assert (!ix);
16648 	return u.single;
16649       }
16650     else
16651       return (*u.ary)[ix];
16652   }
16653 
16654 public:
16655   slot &exported ();
16656   slot &append (unsigned module, unsigned defness);
16657 };
16658 
16659 /* O is a new import to append to the list for.  If we're an empty
16660    set, initialize us.  */
16661 
16662 macro_import::slot &
append(unsigned module,unsigned defness)16663 macro_import::append (unsigned module, unsigned defness)
16664 {
16665   if (!occupied_p ())
16666     {
16667       u.single = slot::ctor (module, defness);
16668       return u.single;
16669     }
16670   else
16671     {
16672       bool single = single_p ();
16673       ary_t *m = single ? NULL : u.ary;
16674       vec_safe_reserve (m, 1 + single);
16675       if (single)
16676 	m->quick_push (u.single);
16677       u.ary = m;
16678       return *u.ary->quick_push (slot::ctor (module, defness));
16679     }
16680 }
16681 
16682 /* We're going to export something.  Make sure the first import slot
16683    is us.  */
16684 
16685 macro_import::slot &
exported()16686 macro_import::exported ()
16687 {
16688   if (occupied_p () && !(*this)[0].get_module ())
16689     {
16690       slot &res = (*this)[0];
16691       res.bits |= slot::L_DEF;
16692       return res;
16693     }
16694 
16695   slot *a = &append (0, slot::L_DEF);
16696   if (!single_p ())
16697     {
16698       slot &f = (*this)[0];
16699       std::swap (f, *a);
16700       a = &f;
16701     }
16702   return *a;
16703 }
16704 
16705 /* The import (&exported) macros.  cpp_hasnode's deferred field
16706    indexes this array (offset by 1, so zero means 'not present'.  */
16707 
16708 static vec<macro_import, va_heap, vl_embed> *macro_imports;
16709 
16710 /* The exported macros.  A macro_import slot's zeroth element's offset
16711    indexes this array.  If the zeroth slot is not for module zero,
16712    there is no export.  */
16713 
16714 static GTY(()) vec<macro_export, va_gc> *macro_exports;
16715 
16716 /* The reachable set of header imports from this TU.  */
16717 
16718 static GTY(()) bitmap headers;
16719 
16720 /* Get the (possibly empty) macro imports for NODE.  */
16721 
16722 static macro_import &
get_macro_imports(cpp_hashnode * node)16723 get_macro_imports (cpp_hashnode *node)
16724 {
16725   if (node->deferred)
16726     return (*macro_imports)[node->deferred - 1];
16727 
16728   vec_safe_reserve (macro_imports, 1);
16729   node->deferred = macro_imports->length () + 1;
16730   return *vec_safe_push (macro_imports, macro_import ());
16731 }
16732 
16733 /* Get the macro export for export EXP of NODE.  */
16734 
16735 static macro_export &
get_macro_export(macro_import::slot & slot)16736 get_macro_export (macro_import::slot &slot)
16737 {
16738   if (slot.offset >= 0)
16739     return (*macro_exports)[slot.offset];
16740 
16741   vec_safe_reserve (macro_exports, 1);
16742   slot.offset = macro_exports->length ();
16743   return *macro_exports->quick_push (macro_export ());
16744 }
16745 
16746 /* If NODE is an exportable macro, add it to the export set.  */
16747 
16748 static int
maybe_add_macro(cpp_reader *,cpp_hashnode * node,void * data_)16749 maybe_add_macro (cpp_reader *, cpp_hashnode *node, void *data_)
16750 {
16751   bool exporting = false;
16752 
16753   if (cpp_user_macro_p (node))
16754     if (cpp_macro *macro = node->value.macro)
16755       /* Ignore imported, builtins, command line and forced header macros.  */
16756       if (!macro->imported_p
16757 	  && !macro->lazy && macro->line >= spans.main_start ())
16758 	{
16759 	  gcc_checking_assert (macro->kind == cmk_macro);
16760 	  /* I don't want to deal with this corner case, that I suspect is
16761 	     a devil's advocate reading of the standard.  */
16762 	  gcc_checking_assert (!macro->extra_tokens);
16763 
16764 	  macro_import::slot &slot = get_macro_imports (node).exported ();
16765 	  macro_export &exp = get_macro_export (slot);
16766 	  exp.def = macro;
16767 	  exporting = true;
16768 	}
16769 
16770   if (!exporting && node->deferred)
16771     {
16772       macro_import &imports = (*macro_imports)[node->deferred - 1];
16773       macro_import::slot &slot = imports[0];
16774       if (!slot.get_module ())
16775 	{
16776 	  gcc_checking_assert (slot.get_defness ());
16777 	  exporting = true;
16778 	}
16779     }
16780 
16781   if (exporting)
16782     static_cast<vec<cpp_hashnode *> *> (data_)->safe_push (node);
16783 
16784   return 1; /* Don't stop.  */
16785 }
16786 
16787 /* Order cpp_hashnodes A_ and B_ by their exported macro locations.  */
16788 
16789 static int
macro_loc_cmp(const void * a_,const void * b_)16790 macro_loc_cmp (const void *a_, const void *b_)
16791 {
16792   const cpp_hashnode *node_a = *(const cpp_hashnode *const *)a_;
16793   macro_import &import_a = (*macro_imports)[node_a->deferred - 1];
16794   const macro_export &export_a = (*macro_exports)[import_a[0].offset];
16795   location_t loc_a = export_a.def ? export_a.def->line : export_a.undef_loc;
16796 
16797   const cpp_hashnode *node_b = *(const cpp_hashnode *const *)b_;
16798   macro_import &import_b = (*macro_imports)[node_b->deferred - 1];
16799   const macro_export &export_b = (*macro_exports)[import_b[0].offset];
16800   location_t loc_b = export_b.def ? export_b.def->line : export_b.undef_loc;
16801 
16802   if (loc_a < loc_b)
16803     return +1;
16804   else if (loc_a > loc_b)
16805     return -1;
16806   else
16807     return 0;
16808 }
16809 
16810 /* Write out the exported defines.  This is two sections, one
16811    containing the definitions, the other a table of node names.  */
16812 
16813 unsigned
write_macros(elf_out * to,cpp_reader * reader,unsigned * crc_p)16814 module_state::write_macros (elf_out *to, cpp_reader *reader, unsigned *crc_p)
16815 {
16816   dump () && dump ("Writing macros");
16817   dump.indent ();
16818 
16819   vec<cpp_hashnode *> macros;
16820   macros.create (100);
16821   cpp_forall_identifiers (reader, maybe_add_macro, &macros);
16822 
16823   dump (dumper::MACRO) && dump ("No more than %u macros", macros.length ());
16824 
16825   macros.qsort (macro_loc_cmp);
16826 
16827   /* Write the defs */
16828   bytes_out sec (to);
16829   sec.begin ();
16830 
16831   unsigned count = 0;
16832   for (unsigned ix = macros.length (); ix--;)
16833     {
16834       cpp_hashnode *node = macros[ix];
16835       macro_import::slot &slot = (*macro_imports)[node->deferred - 1][0];
16836       gcc_assert (!slot.get_module () && slot.get_defness ());
16837 
16838       macro_export &mac = (*macro_exports)[slot.offset];
16839       gcc_assert (!!(slot.get_defness () & macro_import::slot::L_UNDEF)
16840 		  == (mac.undef_loc != UNKNOWN_LOCATION)
16841 		  && !!(slot.get_defness () & macro_import::slot::L_DEF)
16842 		  == (mac.def != NULL));
16843 
16844       if (IDENTIFIER_KEYWORD_P (identifier (node)))
16845 	{
16846 	  warning_at (mac.def->line, 0,
16847 		      "not exporting %<#define %E%> as it is a keyword",
16848 		      identifier (node));
16849 	  slot.offset = 0;
16850 	  continue;
16851 	}
16852 
16853       count++;
16854       slot.offset = sec.pos;
16855       dump (dumper::MACRO)
16856 	&& dump ("Writing macro %s%s%s %I at %u",
16857 		 slot.get_defness () & macro_import::slot::L_UNDEF
16858 		 ? "#undef" : "",
16859 		 slot.get_defness () == macro_import::slot::L_BOTH
16860 		 ? " & " : "",
16861 		 slot.get_defness () & macro_import::slot::L_DEF
16862 		 ? "#define" : "",
16863 		 identifier (node), slot.offset);
16864       if (mac.undef_loc != UNKNOWN_LOCATION)
16865 	write_location (sec, mac.undef_loc);
16866       if (mac.def)
16867 	write_define (sec, mac.def);
16868     }
16869   sec.end (to, to->name (MOD_SNAME_PFX ".def"), crc_p);
16870 
16871   if (count)
16872     {
16873       /* Write the table.  */
16874       bytes_out sec (to);
16875       sec.begin ();
16876       sec.u (count);
16877 
16878       for (unsigned ix = macros.length (); ix--;)
16879 	{
16880 	  const cpp_hashnode *node = macros[ix];
16881 	  macro_import::slot &slot = (*macro_imports)[node->deferred - 1][0];
16882 
16883 	  if (slot.offset)
16884 	    {
16885 	      sec.cpp_node (node);
16886 	      sec.u (slot.get_defness ());
16887 	      sec.u (slot.offset);
16888 	    }
16889 	}
16890       sec.end (to, to->name (MOD_SNAME_PFX ".mac"), crc_p);
16891     }
16892 
16893   macros.release ();
16894   dump.outdent ();
16895   return count;
16896 }
16897 
16898 bool
read_macros()16899 module_state::read_macros ()
16900 {
16901   /* Get the def section.  */
16902   if (!slurp->macro_defs.begin (loc, from (), MOD_SNAME_PFX ".def"))
16903     return false;
16904 
16905   /* Get the tbl section, if there are defs. */
16906   if (slurp->macro_defs.more_p ()
16907       && !slurp->macro_tbl.begin (loc, from (), MOD_SNAME_PFX ".mac"))
16908     return false;
16909 
16910   return true;
16911 }
16912 
16913 /* Install the macro name table.  */
16914 
16915 void
install_macros()16916 module_state::install_macros ()
16917 {
16918   bytes_in &sec = slurp->macro_tbl;
16919   if (!sec.size)
16920     return;
16921 
16922   dump () && dump ("Reading macro table %M", this);
16923   dump.indent ();
16924 
16925   unsigned count = sec.u ();
16926   dump () && dump ("%u macros", count);
16927   while (count--)
16928     {
16929       cpp_hashnode *node = sec.cpp_node ();
16930       macro_import &imp = get_macro_imports (node);
16931       unsigned flags = sec.u () & macro_import::slot::L_BOTH;
16932       if (!flags)
16933 	sec.set_overrun ();
16934 
16935       if (sec.get_overrun ())
16936 	break;
16937 
16938       macro_import::slot &slot = imp.append (mod, flags);
16939       slot.offset = sec.u ();
16940 
16941       dump (dumper::MACRO)
16942 	&& dump ("Read %s macro %s%s%s %I at %u",
16943 		 imp.length () > 1 ? "add" : "new",
16944 		 flags & macro_import::slot::L_UNDEF ? "#undef" : "",
16945 		 flags == macro_import::slot::L_BOTH ? " & " : "",
16946 		 flags & macro_import::slot::L_DEF ? "#define" : "",
16947 		 identifier (node), slot.offset);
16948 
16949       /* We'll leak an imported definition's TOKEN_FLD_STR's data
16950 	 here.  But that only happens when we've had to resolve the
16951 	 deferred macro before this import -- why are you doing
16952 	 that?  */
16953       if (cpp_macro *cur = cpp_set_deferred_macro (node))
16954 	if (!cur->imported_p)
16955 	  {
16956 	    macro_import::slot &slot = imp.exported ();
16957 	    macro_export &exp = get_macro_export (slot);
16958 	    exp.def = cur;
16959 	    dump (dumper::MACRO)
16960 	      && dump ("Saving current #define %I", identifier (node));
16961 	  }
16962     }
16963 
16964   /* We're now done with the table.  */
16965   elf_in::release (slurp->from, sec);
16966 
16967   dump.outdent ();
16968 }
16969 
16970 /* Import the transitive macros.  */
16971 
16972 void
import_macros()16973 module_state::import_macros ()
16974 {
16975   bitmap_ior_into (headers, slurp->headers);
16976 
16977   bitmap_iterator bititer;
16978   unsigned bitnum;
16979   EXECUTE_IF_SET_IN_BITMAP (slurp->headers, 0, bitnum, bititer)
16980     (*modules)[bitnum]->install_macros ();
16981 }
16982 
16983 /* NODE is being undefined at LOC.  Record it in the export table, if
16984    necessary.  */
16985 
16986 void
undef_macro(cpp_reader *,location_t loc,cpp_hashnode * node)16987 module_state::undef_macro (cpp_reader *, location_t loc, cpp_hashnode *node)
16988 {
16989   if (!node->deferred)
16990     /* The macro is not imported, so our undef is irrelevant.  */
16991     return;
16992 
16993   unsigned n = dump.push (NULL);
16994 
16995   macro_import::slot &slot = (*macro_imports)[node->deferred - 1].exported ();
16996   macro_export &exp = get_macro_export (slot);
16997 
16998   exp.undef_loc = loc;
16999   slot.become_undef ();
17000   exp.def = NULL;
17001 
17002   dump (dumper::MACRO) && dump ("Recording macro #undef %I", identifier (node));
17003 
17004   dump.pop (n);
17005 }
17006 
17007 /* NODE is a deferred macro node.  Determine the definition and return
17008    it, with NULL if undefined.  May issue diagnostics.
17009 
17010    This can leak memory, when merging declarations -- the string
17011    contents (TOKEN_FLD_STR) of each definition are allocated in
17012    unreclaimable cpp objstack.  Only one will win.  However, I do not
17013    expect this to be common -- mostly macros have a single point of
17014    definition.  Perhaps we could restore the objstack to its position
17015    after the first imported definition (if that wins)?  The macros
17016    themselves are GC'd.  */
17017 
17018 cpp_macro *
deferred_macro(cpp_reader * reader,location_t loc,cpp_hashnode * node)17019 module_state::deferred_macro (cpp_reader *reader, location_t loc,
17020 			      cpp_hashnode *node)
17021 {
17022   macro_import &imports = (*macro_imports)[node->deferred - 1];
17023 
17024   unsigned n = dump.push (NULL);
17025   dump (dumper::MACRO) && dump ("Deferred macro %I", identifier (node));
17026 
17027   bitmap visible (BITMAP_GGC_ALLOC ());
17028 
17029   if (!((imports[0].get_defness () & macro_import::slot::L_UNDEF)
17030 	&& !imports[0].get_module ()))
17031     {
17032       /* Calculate the set of visible header imports.  */
17033       bitmap_copy (visible, headers);
17034       for (unsigned ix = imports.length (); ix--;)
17035 	{
17036 	  const macro_import::slot &slot = imports[ix];
17037 	  unsigned mod = slot.get_module ();
17038 	  if ((slot.get_defness () & macro_import::slot::L_UNDEF)
17039 	      && bitmap_bit_p (visible, mod))
17040 	    {
17041 	      bitmap arg = mod ? (*modules)[mod]->slurp->headers : headers;
17042 	      bitmap_and_compl_into (visible, arg);
17043 	      bitmap_set_bit (visible, mod);
17044 	    }
17045 	}
17046     }
17047   bitmap_set_bit (visible, 0);
17048 
17049   /* Now find the macros that are still visible.  */
17050   bool failed = false;
17051   cpp_macro *def = NULL;
17052   vec<macro_export> defs;
17053   defs.create (imports.length ());
17054   for (unsigned ix = imports.length (); ix--;)
17055     {
17056       const macro_import::slot &slot = imports[ix];
17057       unsigned mod = slot.get_module ();
17058       if (bitmap_bit_p (visible, mod))
17059 	{
17060 	  macro_export *pushed = NULL;
17061 	  if (mod)
17062 	    {
17063 	      const module_state *imp = (*modules)[mod];
17064 	      bytes_in &sec = imp->slurp->macro_defs;
17065 	      if (!sec.get_overrun ())
17066 		{
17067 		  dump (dumper::MACRO)
17068 		    && dump ("Reading macro %s%s%s %I module %M at %u",
17069 			     slot.get_defness () & macro_import::slot::L_UNDEF
17070 			     ? "#undef" : "",
17071 			     slot.get_defness () == macro_import::slot::L_BOTH
17072 			     ? " & " : "",
17073 			     slot.get_defness () & macro_import::slot::L_DEF
17074 			     ? "#define" : "",
17075 			     identifier (node), imp, slot.offset);
17076 		  sec.random_access (slot.offset);
17077 
17078 		  macro_export exp;
17079 		  if (slot.get_defness () & macro_import::slot::L_UNDEF)
17080 		    exp.undef_loc = imp->read_location (sec);
17081 		  if (slot.get_defness () & macro_import::slot::L_DEF)
17082 		    exp.def = imp->read_define (sec, reader);
17083 		  if (sec.get_overrun ())
17084 		    error_at (loc, "macro definitions of %qE corrupted",
17085 			      imp->name);
17086 		  else
17087 		    pushed = defs.quick_push (exp);
17088 		}
17089 	    }
17090 	  else
17091 	    pushed = defs.quick_push ((*macro_exports)[slot.offset]);
17092 	  if (pushed && pushed->def)
17093 	    {
17094 	      if (!def)
17095 		def = pushed->def;
17096 	      else if (cpp_compare_macros (def, pushed->def))
17097 		failed = true;
17098 	    }
17099 	}
17100     }
17101 
17102   if (failed)
17103     {
17104       /* If LOC is the first loc, this is the end of file check, which
17105 	 is a warning.  */
17106       if (loc == MAP_START_LOCATION (LINEMAPS_ORDINARY_MAP_AT (line_table, 0)))
17107 	warning_at (loc, OPT_Winvalid_imported_macros,
17108 		    "inconsistent imported macro definition %qE",
17109 		    identifier (node));
17110       else
17111 	error_at (loc, "inconsistent imported macro definition %qE",
17112 		  identifier (node));
17113       for (unsigned ix = defs.length (); ix--;)
17114 	{
17115 	  macro_export &exp = defs[ix];
17116 	  if (exp.undef_loc)
17117 	    inform (exp.undef_loc, "%<#undef %E%>", identifier (node));
17118 	  if (exp.def)
17119 	    inform (exp.def->line, "%<#define %s%>",
17120 		    cpp_macro_definition (reader, node, exp.def));
17121 	}
17122       def = NULL;
17123     }
17124 
17125   defs.release ();
17126 
17127   dump.pop (n);
17128 
17129   return def;
17130 }
17131 
17132 /* Stream the static aggregates.  Sadly some headers (ahem:
17133    iostream) contain static vars, and rely on them to run global
17134    ctors.  */
17135 unsigned
write_inits(elf_out * to,depset::hash & table,unsigned * crc_ptr)17136 module_state::write_inits (elf_out *to, depset::hash &table, unsigned *crc_ptr)
17137 {
17138   if (!static_aggregates && !tls_aggregates)
17139     return 0;
17140 
17141   dump () && dump ("Writing initializers");
17142   dump.indent ();
17143 
17144   static_aggregates = nreverse (static_aggregates);
17145   tls_aggregates = nreverse (tls_aggregates);
17146 
17147   unsigned count = 0;
17148   trees_out sec (to, this, table, ~0u);
17149   sec.begin ();
17150 
17151   tree list = static_aggregates;
17152   for (int passes = 0; passes != 2; passes++)
17153     {
17154       for (tree init = list; init; init = TREE_CHAIN (init), count++)
17155 	if (TREE_LANG_FLAG_0 (init))
17156 	  {
17157 	    tree decl = TREE_VALUE (init);
17158 
17159 	    dump ("Initializer:%u for %N", count, decl);
17160 	    sec.tree_node (decl);
17161 	  }
17162 
17163       list = tls_aggregates;
17164     }
17165 
17166   sec.end (to, to->name (MOD_SNAME_PFX ".ini"), crc_ptr);
17167   dump.outdent ();
17168 
17169   return count;
17170 }
17171 
17172 /* We have to defer some post-load processing until we've completed
17173    reading, because they can cause more reading.  */
17174 
17175 static void
post_load_processing()17176 post_load_processing ()
17177 {
17178   /* We mustn't cause a GC, our caller should have arranged for that
17179      not to happen.  */
17180   gcc_checking_assert (function_depth);
17181 
17182   if (!post_load_decls)
17183     return;
17184 
17185   tree old_cfd = current_function_decl;
17186   struct function *old_cfun = cfun;
17187   while (post_load_decls->length ())
17188     {
17189       tree decl = post_load_decls->pop ();
17190 
17191       dump () && dump ("Post-load processing of %N", decl);
17192 
17193       gcc_checking_assert (DECL_ABSTRACT_P (decl));
17194       /* Cloning can cause loading -- specifically operator delete for
17195 	 the deleting dtor.  */
17196       maybe_clone_body (decl);
17197     }
17198 
17199   cfun = old_cfun;
17200   current_function_decl = old_cfd;
17201 }
17202 
17203 bool
read_inits(unsigned count)17204 module_state::read_inits (unsigned count)
17205 {
17206   trees_in sec (this);
17207   if (!sec.begin (loc, from (), from ()->find (MOD_SNAME_PFX ".ini")))
17208     return false;
17209   dump () && dump ("Reading %u initializers", count);
17210   dump.indent ();
17211 
17212   lazy_snum = ~0u;
17213   for (unsigned ix = 0; ix != count; ix++)
17214     {
17215       /* Merely referencing the decl causes its initializer to be read
17216 	 and added to the correct list.  */
17217       tree decl = sec.tree_node ();
17218 
17219       if (sec.get_overrun ())
17220 	break;
17221       if (decl)
17222 	dump ("Initializer:%u for %N", count, decl);
17223     }
17224   lazy_snum = 0;
17225   post_load_processing ();
17226   dump.outdent ();
17227   if (!sec.end (from ()))
17228     return false;
17229   return true;
17230 }
17231 
17232 void
write_counts(elf_out * to,unsigned counts[MSC_HWM],unsigned * crc_ptr)17233 module_state::write_counts (elf_out *to, unsigned counts[MSC_HWM],
17234 			    unsigned *crc_ptr)
17235 {
17236   bytes_out cfg (to);
17237 
17238   cfg.begin ();
17239 
17240   for (unsigned ix = MSC_HWM; ix--;)
17241     cfg.u (counts[ix]);
17242 
17243   if (dump ())
17244     {
17245       dump ("Cluster sections are [%u,%u)",
17246 	    counts[MSC_sec_lwm], counts[MSC_sec_hwm]);
17247       dump ("Bindings %u", counts[MSC_bindings]);
17248       dump ("Pendings %u", counts[MSC_pendings]);
17249       dump ("Entities %u", counts[MSC_entities]);
17250       dump ("Namespaces %u", counts[MSC_namespaces]);
17251       dump ("Macros %u", counts[MSC_macros]);
17252       dump ("Initializers %u", counts[MSC_inits]);
17253     }
17254 
17255   cfg.end (to, to->name (MOD_SNAME_PFX ".cnt"), crc_ptr);
17256 }
17257 
17258 bool
read_counts(unsigned counts[MSC_HWM])17259 module_state::read_counts (unsigned counts[MSC_HWM])
17260 {
17261   bytes_in cfg;
17262 
17263   if (!cfg.begin (loc, from (), MOD_SNAME_PFX ".cnt"))
17264     return false;
17265 
17266   for (unsigned ix = MSC_HWM; ix--;)
17267     counts[ix] = cfg.u ();
17268 
17269   if (dump ())
17270     {
17271       dump ("Declaration sections are [%u,%u)",
17272 	    counts[MSC_sec_lwm], counts[MSC_sec_hwm]);
17273       dump ("Bindings %u", counts[MSC_bindings]);
17274       dump ("Pendings %u", counts[MSC_pendings]);
17275       dump ("Entities %u", counts[MSC_entities]);
17276       dump ("Namespaces %u", counts[MSC_namespaces]);
17277       dump ("Macros %u", counts[MSC_macros]);
17278       dump ("Initializers %u", counts[MSC_inits]);
17279     }
17280 
17281   return cfg.end (from ());
17282 }
17283 
17284 /* Tool configuration:  MOD_SNAME_PFX .config
17285 
17286    This is data that confirms current state (or fails).  */
17287 
17288 void
write_config(elf_out * to,module_state_config & config,unsigned inner_crc)17289 module_state::write_config (elf_out *to, module_state_config &config,
17290 			    unsigned inner_crc)
17291 {
17292   bytes_out cfg (to);
17293 
17294   cfg.begin ();
17295 
17296   /* Write version and inner crc as u32 values, for easier
17297      debug inspection.  */
17298   dump () && dump ("Writing version=%V, inner_crc=%x",
17299 		   MODULE_VERSION, inner_crc);
17300   cfg.u32 (unsigned (MODULE_VERSION));
17301   cfg.u32 (inner_crc);
17302 
17303   cfg.u (to->name (is_header () ? "" : get_flatname ()));
17304 
17305   /* Configuration. */
17306   dump () && dump ("Writing target='%s', host='%s'",
17307 		   TARGET_MACHINE, HOST_MACHINE);
17308   unsigned target = to->name (TARGET_MACHINE);
17309   unsigned host = (!strcmp (TARGET_MACHINE, HOST_MACHINE)
17310 		   ? target : to->name (HOST_MACHINE));
17311   cfg.u (target);
17312   cfg.u (host);
17313 
17314   cfg.str (config.dialect_str);
17315   cfg.u (extensions);
17316 
17317   /* Global tree information.  We write the globals crc separately,
17318      rather than mix it directly into the overall crc, as it is used
17319      to ensure data match between instances of the compiler, not
17320      integrity of the file.  */
17321   dump () && dump ("Writing globals=%u, crc=%x",
17322 		   fixed_trees->length (), global_crc);
17323   cfg.u (fixed_trees->length ());
17324   cfg.u32 (global_crc);
17325 
17326   if (is_partition ())
17327     cfg.u (is_interface ());
17328 
17329   cfg.u (config.num_imports);
17330   cfg.u (config.num_partitions);
17331   cfg.u (config.num_entities);
17332 
17333   cfg.u (config.ordinary_locs);
17334   cfg.u (config.macro_locs);
17335   cfg.u (config.ordinary_loc_align);
17336 
17337   /* Now generate CRC, we'll have incorporated the inner CRC because
17338      of its serialization above.  */
17339   cfg.end (to, to->name (MOD_SNAME_PFX ".cfg"), &crc);
17340   dump () && dump ("Writing CRC=%x", crc);
17341 }
17342 
17343 void
note_cmi_name()17344 module_state::note_cmi_name ()
17345 {
17346   if (!cmi_noted_p && filename)
17347     {
17348       cmi_noted_p = true;
17349       inform (loc, "compiled module file is %qs",
17350 	      maybe_add_cmi_prefix (filename));
17351     }
17352 }
17353 
17354 bool
read_config(module_state_config & config)17355 module_state::read_config (module_state_config &config)
17356 {
17357   bytes_in cfg;
17358 
17359   if (!cfg.begin (loc, from (), MOD_SNAME_PFX ".cfg"))
17360     return false;
17361 
17362   /* Check version.  */
17363   unsigned my_ver = MODULE_VERSION;
17364   unsigned their_ver = cfg.u32 ();
17365   dump () && dump  (my_ver == their_ver ? "Version %V"
17366 		    : "Expecting %V found %V", my_ver, their_ver);
17367   if (their_ver != my_ver)
17368     {
17369       /* The compiler versions differ.  Close enough? */
17370       verstr_t my_string, their_string;
17371 
17372       version2string (my_ver, my_string);
17373       version2string (their_ver, their_string);
17374 
17375       /* Reject when either is non-experimental or when experimental
17376 	 major versions differ.  */
17377       bool reject_p = ((!IS_EXPERIMENTAL (my_ver)
17378 			|| !IS_EXPERIMENTAL (their_ver)
17379 			|| MODULE_MAJOR (my_ver) != MODULE_MAJOR (their_ver))
17380 		       /* The 'I know what I'm doing' switch.  */
17381 		       && !flag_module_version_ignore);
17382       bool inform_p = true;
17383       if (reject_p)
17384 	{
17385 	  cfg.set_overrun ();
17386 	  error_at (loc, "compiled module is %sversion %s",
17387 		    IS_EXPERIMENTAL (their_ver) ? "experimental " : "",
17388 		    their_string);
17389 	}
17390       else
17391 	inform_p = warning_at (loc, 0, "compiled module is %sversion %s",
17392 			     IS_EXPERIMENTAL (their_ver) ? "experimental " : "",
17393 			     their_string);
17394 
17395       if (inform_p)
17396 	{
17397 	  inform (loc, "compiler is %sversion %s%s%s",
17398 		  IS_EXPERIMENTAL (my_ver) ? "experimental " : "",
17399 		  my_string,
17400 		  reject_p ? "" : flag_module_version_ignore
17401 		  ? ", be it on your own head!" : ", close enough?",
17402 		  reject_p ? "" : " \xc2\xaf\\_(\xe3\x83\x84)_/\xc2\xaf");
17403 	  note_cmi_name ();
17404 	}
17405 
17406       if (reject_p)
17407 	goto done;
17408     }
17409 
17410   /*  We wrote the inner crc merely to merge it, so simply read it
17411       back and forget it.  */
17412   cfg.u32 ();
17413 
17414   /* Check module name.  */
17415   {
17416     const char *their_name = from ()->name (cfg.u ());
17417     const char *our_name = "";
17418 
17419     if (!is_header ())
17420       our_name = get_flatname ();
17421 
17422     /* Header units can be aliased, so name checking is
17423        inappropriate.  */
17424     if (0 != strcmp (their_name, our_name))
17425       {
17426 	error_at (loc,
17427 		  their_name[0] && our_name[0] ? G_("module %qs found")
17428 		  : their_name[0]
17429 		  ? G_("header module expected, module %qs found")
17430 		  : G_("module %qs expected, header module found"),
17431 		  their_name[0] ? their_name : our_name);
17432 	cfg.set_overrun ();
17433 	goto done;
17434       }
17435   }
17436 
17437   /* Check the CRC after the above sanity checks, so that the user is
17438      clued in.  */
17439   {
17440     unsigned e_crc = crc;
17441     crc = cfg.get_crc ();
17442     dump () && dump ("Reading CRC=%x", crc);
17443     if (!is_direct () && crc != e_crc)
17444       {
17445 	error_at (loc, "module %qs CRC mismatch", get_flatname ());
17446 	cfg.set_overrun ();
17447 	goto done;
17448       }
17449   }
17450 
17451   /* Check target & host.  */
17452   {
17453     const char *their_target = from ()->name (cfg.u ());
17454     const char *their_host = from ()->name (cfg.u ());
17455     dump () && dump ("Read target='%s', host='%s'", their_target, their_host);
17456     if (strcmp (their_target, TARGET_MACHINE)
17457 	|| strcmp (their_host, HOST_MACHINE))
17458       {
17459 	error_at (loc, "target & host is %qs:%qs, expected %qs:%qs",
17460 		  their_target, TARGET_MACHINE, their_host, HOST_MACHINE);
17461 	cfg.set_overrun ();
17462 	goto done;
17463       }
17464   }
17465 
17466   /* Check compilation dialect.  This must match.  */
17467   {
17468     const char *their_dialect = cfg.str ();
17469     if (strcmp (their_dialect, config.dialect_str))
17470       {
17471 	error_at (loc, "language dialect differs %qs, expected %qs",
17472 		  their_dialect, config.dialect_str);
17473 	cfg.set_overrun ();
17474 	goto done;
17475       }
17476   }
17477 
17478   /* Check for extensions.  If they set any, we must have them set
17479      too.  */
17480   {
17481     unsigned ext = cfg.u ();
17482     unsigned allowed = (flag_openmp ? SE_OPENMP : 0);
17483 
17484     if (unsigned bad = ext & ~allowed)
17485       {
17486 	if (bad & SE_OPENMP)
17487 	  error_at (loc, "module contains OpenMP, use %<-fopenmp%> to enable");
17488 	cfg.set_overrun ();
17489 	goto done;
17490       }
17491     extensions = ext;
17492   }
17493 
17494   /* Check global trees.  */
17495   {
17496     unsigned their_fixed_length = cfg.u ();
17497     unsigned their_fixed_crc = cfg.u32 ();
17498     dump () && dump ("Read globals=%u, crc=%x",
17499 		     their_fixed_length, their_fixed_crc);
17500     if (!flag_preprocess_only
17501 	&& (their_fixed_length != fixed_trees->length ()
17502 	    || their_fixed_crc != global_crc))
17503       {
17504 	error_at (loc, "fixed tree mismatch");
17505 	cfg.set_overrun ();
17506 	goto done;
17507       }
17508   }
17509 
17510   /* All non-partitions are interfaces.  */
17511   interface_p = !is_partition () || cfg.u ();
17512 
17513   config.num_imports = cfg.u ();
17514   config.num_partitions = cfg.u ();
17515   config.num_entities = cfg.u ();
17516 
17517   config.ordinary_locs = cfg.u ();
17518   config.macro_locs = cfg.u ();
17519   config.ordinary_loc_align = cfg.u ();
17520 
17521  done:
17522   return cfg.end (from ());
17523 }
17524 
17525 /* Comparator for ordering the Ordered Ordinary Location array.  */
17526 
17527 static int
ool_cmp(const void * a_,const void * b_)17528 ool_cmp (const void *a_, const void *b_)
17529 {
17530   auto *a = *static_cast<const module_state *const *> (a_);
17531   auto *b = *static_cast<const module_state *const *> (b_);
17532   if (a == b)
17533     return 0;
17534   else if (a->ordinary_locs.first < b->ordinary_locs.second)
17535     return -1;
17536   else
17537     return +1;
17538 }
17539 
17540 /* Use ELROND format to record the following sections:
17541      qualified-names	    : binding value(s)
17542      MOD_SNAME_PFX.README   : human readable, strings
17543      MOD_SNAME_PFX.ENV      : environment strings, strings
17544      MOD_SNAME_PFX.nms 	    : namespace hierarchy
17545      MOD_SNAME_PFX.bnd      : binding table
17546      MOD_SNAME_PFX.spc      : specialization table
17547      MOD_SNAME_PFX.imp      : import table
17548      MOD_SNAME_PFX.ent      : entity table
17549      MOD_SNAME_PFX.prt      : partitions table
17550      MOD_SNAME_PFX.olm      : ordinary line maps
17551      MOD_SNAME_PFX.mlm      : macro line maps
17552      MOD_SNAME_PFX.def      : macro definitions
17553      MOD_SNAME_PFX.mac      : macro index
17554      MOD_SNAME_PFX.ini      : inits
17555      MOD_SNAME_PFX.cnt      : counts
17556      MOD_SNAME_PFX.cfg      : config data
17557 */
17558 
17559 void
write(elf_out * to,cpp_reader * reader)17560 module_state::write (elf_out *to, cpp_reader *reader)
17561 {
17562   /* Figure out remapped module numbers, which might elide
17563      partitions.  */
17564   bitmap partitions = NULL;
17565   if (!is_header () && !is_partition ())
17566     partitions = BITMAP_GGC_ALLOC ();
17567 
17568   unsigned mod_hwm = 1;
17569   for (unsigned ix = 1; ix != modules->length (); ix++)
17570     {
17571       module_state *imp = (*modules)[ix];
17572 
17573       /* Promote any non-partition direct import from a partition, unless
17574 	 we're a partition.  */
17575       if (!is_partition () && !imp->is_partition ()
17576 	  && imp->is_partition_direct ())
17577 	imp->directness = MD_PURVIEW_DIRECT;
17578 
17579       /* Write any import that is not a partition, unless we're a
17580 	 partition.  */
17581       if (!partitions || !imp->is_partition ())
17582 	imp->remap = mod_hwm++;
17583       else
17584 	{
17585 	  dump () && dump ("Partition %M %u", imp, ix);
17586 	  bitmap_set_bit (partitions, ix);
17587 	  imp->remap = 0;
17588 	  /* All interface partitions must be exported.  */
17589 	  if (imp->is_interface () && !bitmap_bit_p (exports, imp->mod))
17590 	    {
17591 	      error_at (imp->loc, "interface partition is not exported");
17592 	      bitmap_set_bit (exports, imp->mod);
17593 	    }
17594 
17595 	  /* All the partition entities should have been loaded when
17596 	     loading the partition.  */
17597 	  if (CHECKING_P)
17598 	    for (unsigned jx = 0; jx != imp->entity_num; jx++)
17599 	      {
17600 		binding_slot *slot = &(*entity_ary)[imp->entity_lwm + jx];
17601 		gcc_checking_assert (!slot->is_lazy ());
17602 	      }
17603 	}
17604     }
17605 
17606   if (partitions && bitmap_empty_p (partitions))
17607     /* No partitions present.  */
17608     partitions = nullptr;
17609 
17610   /* Find the set of decls we must write out.  */
17611   depset::hash table (DECL_NAMESPACE_BINDINGS (global_namespace)->size () * 8);
17612   /* Add the specializations before the writables, so that we can
17613      detect injected friend specializations.  */
17614   table.add_specializations (true);
17615   table.add_specializations (false);
17616   if (partial_specializations)
17617     {
17618       table.add_partial_entities (partial_specializations);
17619       partial_specializations = NULL;
17620     }
17621   table.add_namespace_entities (global_namespace, partitions);
17622   if (class_members)
17623     {
17624       table.add_class_entities (class_members);
17625       class_members = NULL;
17626     }
17627 
17628   /* Now join everything up.  */
17629   table.find_dependencies (this);
17630 
17631   if (!table.finalize_dependencies ())
17632     {
17633       to->set_error ();
17634       return;
17635     }
17636 
17637 #if CHECKING_P
17638   /* We're done verifying at-most once reading, reset to verify
17639      at-most once writing.  */
17640   note_defs = note_defs_table_t::create_ggc (1000);
17641 #endif
17642 
17643   /* Determine Strongy Connected Components.  */
17644   vec<depset *> sccs = table.connect ();
17645 
17646   vec_alloc (ool, modules->length ());
17647   for (unsigned ix = modules->length (); --ix;)
17648     {
17649       auto *import = (*modules)[ix];
17650       if (import->loadedness > ML_NONE
17651 	  && !(partitions && bitmap_bit_p (partitions, import->mod)))
17652 	ool->quick_push (import);
17653     }
17654   ool->qsort (ool_cmp);
17655 
17656   unsigned crc = 0;
17657   module_state_config config;
17658   location_map_info map_info = write_prepare_maps (&config);
17659   unsigned counts[MSC_HWM];
17660 
17661   config.num_imports = mod_hwm;
17662   config.num_partitions = modules->length () - mod_hwm;
17663   memset (counts, 0, sizeof (counts));
17664 
17665   /* depset::cluster is the cluster number,
17666      depset::section is unspecified scratch value.
17667 
17668      The following loops make use of the tarjan property that
17669      dependencies will be earlier in the SCCS array.  */
17670 
17671   /* This first loop determines the number of depsets in each SCC, and
17672      also the number of namespaces we're dealing with.  During the
17673      loop, the meaning of a couple of depset fields now change:
17674 
17675      depset::cluster -> size_of cluster, if first of cluster & !namespace
17676      depset::section -> section number of cluster (if !namespace). */
17677 
17678   unsigned n_spaces = 0;
17679   counts[MSC_sec_lwm] = counts[MSC_sec_hwm] = to->get_section_limit ();
17680   for (unsigned size, ix = 0; ix < sccs.length (); ix += size)
17681     {
17682       depset **base = &sccs[ix];
17683 
17684       if (base[0]->get_entity_kind () == depset::EK_NAMESPACE)
17685 	{
17686 	  n_spaces++;
17687 	  size = 1;
17688 	}
17689       else
17690 	{
17691 	  /* Count the members in this cluster.  */
17692 	  for (size = 1; ix + size < sccs.length (); size++)
17693 	    if (base[size]->cluster != base[0]->cluster)
17694 	      break;
17695 
17696 	  for (unsigned jx = 0; jx != size; jx++)
17697 	    {
17698 	      /* Set the section number.  */
17699 	      base[jx]->cluster = ~(~0u >> 1); /* A bad value.  */
17700 	      base[jx]->section = counts[MSC_sec_hwm];
17701 	    }
17702 
17703 	  /* Save the size in the first member's cluster slot.  */
17704 	  base[0]->cluster = size;
17705 
17706 	  counts[MSC_sec_hwm]++;
17707 	}
17708     }
17709 
17710   /* Write the clusters.  Namespace decls are put in the spaces array.
17711      The meaning of depset::cluster changes to provide the
17712      unnamed-decl count of the depset's decl (and remains zero for
17713      non-decls and non-unnamed).  */
17714   unsigned bytes = 0;
17715   vec<depset *> spaces;
17716   spaces.create (n_spaces);
17717 
17718   for (unsigned size, ix = 0; ix < sccs.length (); ix += size)
17719     {
17720       depset **base = &sccs[ix];
17721 
17722       if (base[0]->get_entity_kind () == depset::EK_NAMESPACE)
17723 	{
17724 	  tree decl = base[0]->get_entity ();
17725 	  if (decl == global_namespace)
17726 	    base[0]->cluster = 0;
17727 	  else if (!base[0]->is_import ())
17728 	    {
17729 	      base[0]->cluster = counts[MSC_entities]++;
17730 	      spaces.quick_push (base[0]);
17731 	      counts[MSC_namespaces]++;
17732 	      if (CHECKING_P)
17733 		{
17734 		  /* Add it to the entity map, such that we can tell it is
17735 		     part of us.  */
17736 		  bool existed;
17737 		  unsigned *slot = &entity_map->get_or_insert
17738 		    (DECL_UID (decl), &existed);
17739 		  if (existed)
17740 		    /* It must have come from a partition.  */
17741 		    gcc_checking_assert
17742 		      (import_entity_module (*slot)->is_partition ());
17743 		  *slot = ~base[0]->cluster;
17744 		}
17745 	      dump (dumper::CLUSTER) && dump ("Cluster namespace %N", decl);
17746 	    }
17747 	  size = 1;
17748 	}
17749       else
17750 	{
17751 	  size = base[0]->cluster;
17752 
17753 	  /* Cluster is now used to number entities.  */
17754 	  base[0]->cluster = ~(~0u >> 1); /* A bad value.  */
17755 
17756 	  sort_cluster (&table, base, size);
17757 
17758 	  /* Record the section for consistency checking during stream
17759 	     out -- we don't want to start writing decls in different
17760 	     sections.  */
17761 	  table.section = base[0]->section;
17762 	  bytes += write_cluster (to, base, size, table, counts, &crc);
17763 	  table.section = 0;
17764 	}
17765     }
17766 
17767   /* depset::cluster - entity number (on entities)
17768      depset::section - cluster number  */
17769   /* We'd better have written as many sections and found as many
17770      namespaces as we predicted.  */
17771   gcc_assert (counts[MSC_sec_hwm] == to->get_section_limit ()
17772 	      && spaces.length () == counts[MSC_namespaces]);
17773 
17774   /* Write the entitites.  None happens if we contain namespaces or
17775      nothing. */
17776   config.num_entities = counts[MSC_entities];
17777   if (counts[MSC_entities])
17778     write_entities (to, sccs, counts[MSC_entities], &crc);
17779 
17780   /* Write the namespaces.  */
17781   if (counts[MSC_namespaces])
17782     write_namespaces (to, spaces, counts[MSC_namespaces], &crc);
17783 
17784   /* Write the bindings themselves.  */
17785   counts[MSC_bindings] = write_bindings (to, sccs, &crc);
17786 
17787   /* Write the unnamed.  */
17788   counts[MSC_pendings] = write_pendings (to, sccs, table, &crc);
17789 
17790   /* Write the import table.  */
17791   if (config.num_imports > 1)
17792     write_imports (to, &crc);
17793 
17794   /* Write elided partition table.  */
17795   if (config.num_partitions)
17796     write_partitions (to, config.num_partitions, &crc);
17797 
17798   /* Write the line maps.  */
17799   write_ordinary_maps (to, map_info, &config, config.num_partitions, &crc);
17800   write_macro_maps (to, map_info, &config, &crc);
17801 
17802   if (is_header ())
17803     {
17804       counts[MSC_macros] = write_macros (to, reader, &crc);
17805       counts[MSC_inits] = write_inits (to, table, &crc);
17806     }
17807 
17808   unsigned clusters = counts[MSC_sec_hwm] - counts[MSC_sec_lwm];
17809   dump () && dump ("Wrote %u clusters, average %u bytes/cluster",
17810 		   clusters, (bytes + clusters / 2) / (clusters + !clusters));
17811 
17812   write_counts (to, counts, &crc);
17813 
17814   /* And finish up.  */
17815   write_config (to, config, crc);
17816 
17817   spaces.release ();
17818   sccs.release ();
17819 
17820   vec_free (ool);
17821 
17822   /* Human-readable info.  */
17823   write_readme (to, reader, config.dialect_str, extensions);
17824 
17825   // FIXME:QOI:  Have a command line switch to control more detailed
17826   // information (which might leak data you do not want to leak).
17827   // Perhaps (some of) the write_readme contents should also be
17828   // so-controlled.
17829   if (false)
17830     write_env (to);
17831 
17832   trees_out::instrument ();
17833   dump () && dump ("Wrote %u sections", to->get_section_limit ());
17834 }
17835 
17836 /* Initial read of a CMI.  Checks config, loads up imports and line
17837    maps.  */
17838 
17839 bool
read_initial(cpp_reader * reader)17840 module_state::read_initial (cpp_reader *reader)
17841 {
17842   module_state_config config;
17843   bool ok = true;
17844 
17845   if (ok && !from ()->begin (loc))
17846     ok = false;
17847 
17848   if (ok && !read_config (config))
17849     ok = false;
17850 
17851   bool have_locs = ok && read_prepare_maps (&config);
17852 
17853   /* Ordinary maps before the imports.  */
17854   if (have_locs && !read_ordinary_maps ())
17855     ok = false;
17856 
17857   /* Allocate the REMAP vector.  */
17858   slurp->alloc_remap (config.num_imports);
17859 
17860   if (ok)
17861     {
17862       /* Read the import table.  Decrement current to stop this CMI
17863 	 from being evicted during the import. */
17864       slurp->current--;
17865       if (config.num_imports > 1 && !read_imports (reader, line_table))
17866 	ok = false;
17867       slurp->current++;
17868     }
17869 
17870   /* Read the elided partition table, if we're the primary partition.  */
17871   if (ok && config.num_partitions && is_module ()
17872       && !read_partitions (config.num_partitions))
17873     ok = false;
17874 
17875   /* Determine the module's number.  */
17876   gcc_checking_assert (mod == MODULE_UNKNOWN);
17877   gcc_checking_assert (this != (*modules)[0]);
17878 
17879   {
17880     /* Allocate space in the entities array now -- that array must be
17881        monotionically in step with the modules array.  */
17882     entity_lwm = vec_safe_length (entity_ary);
17883     entity_num = config.num_entities;
17884     gcc_checking_assert (modules->length () == 1
17885 			 || modules->last ()->entity_lwm <= entity_lwm);
17886     vec_safe_reserve (entity_ary, config.num_entities);
17887 
17888     binding_slot slot;
17889     slot.u.binding = NULL_TREE;
17890     for (unsigned count = config.num_entities; count--;)
17891       entity_ary->quick_push (slot);
17892   }
17893 
17894   /* We'll run out of other resources before we run out of module
17895      indices.  */
17896   mod = modules->length ();
17897   vec_safe_push (modules, this);
17898 
17899   /* We always import and export ourselves. */
17900   bitmap_set_bit (imports, mod);
17901   bitmap_set_bit (exports, mod);
17902 
17903   if (ok)
17904     (*slurp->remap)[0] = mod << 1;
17905   dump () && dump ("Assigning %M module number %u", this, mod);
17906 
17907   /* We should not have been frozen during the importing done by
17908      read_config.  */
17909   gcc_assert (!from ()->is_frozen ());
17910 
17911   /* Macro maps after the imports.  */
17912   if (ok && have_locs && !read_macro_maps ())
17913     ok = false;
17914 
17915   gcc_assert (slurp->current == ~0u);
17916   return ok;
17917 }
17918 
17919 /* Read a preprocessor state.  */
17920 
17921 bool
read_preprocessor(bool outermost)17922 module_state::read_preprocessor (bool outermost)
17923 {
17924   gcc_checking_assert (is_header () && slurp
17925 		       && slurp->remap_module (0) == mod);
17926 
17927   if (loadedness == ML_PREPROCESSOR)
17928     return !(from () && from ()->get_error ());
17929 
17930   bool ok = true;
17931 
17932   /* Read direct header imports.  */
17933   unsigned len = slurp->remap->length ();
17934   for (unsigned ix = 1; ok && ix != len; ix++)
17935     {
17936       unsigned map = (*slurp->remap)[ix];
17937       if (map & 1)
17938 	{
17939 	  module_state *import = (*modules)[map >> 1];
17940 	  if (import->is_header ())
17941 	    {
17942 	      ok = import->read_preprocessor (false);
17943 	      bitmap_ior_into (slurp->headers, import->slurp->headers);
17944 	    }
17945 	}
17946     }
17947 
17948   /* Record as a direct header.  */
17949   if (ok)
17950     bitmap_set_bit (slurp->headers, mod);
17951 
17952   if (ok && !read_macros ())
17953     ok = false;
17954 
17955   loadedness = ML_PREPROCESSOR;
17956   announce ("macros");
17957 
17958   if (flag_preprocess_only)
17959     /* We're done with the string table.  */
17960     from ()->release ();
17961 
17962   return check_read (outermost, ok);
17963 }
17964 
17965 /* Read language state.  */
17966 
17967 bool
read_language(bool outermost)17968 module_state::read_language (bool outermost)
17969 {
17970   gcc_checking_assert (!lazy_snum);
17971 
17972   if (loadedness == ML_LANGUAGE)
17973     return !(slurp && from () && from ()->get_error ());
17974 
17975   gcc_checking_assert (slurp && slurp->current == ~0u
17976 		       && slurp->remap_module (0) == mod);
17977 
17978   bool ok = true;
17979 
17980   /* Read direct imports.  */
17981   unsigned len = slurp->remap->length ();
17982   for (unsigned ix = 1; ok && ix != len; ix++)
17983     {
17984       unsigned map = (*slurp->remap)[ix];
17985       if (map & 1)
17986 	{
17987 	  module_state *import = (*modules)[map >> 1];
17988 	  if (!import->read_language (false))
17989 	    ok = false;
17990 	}
17991     }
17992 
17993   unsigned counts[MSC_HWM];
17994 
17995   if (ok && !read_counts (counts))
17996     ok = false;
17997 
17998   function_depth++; /* Prevent unexpected GCs.  */
17999 
18000   if (ok && counts[MSC_entities] != entity_num)
18001     ok = false;
18002   if (ok && counts[MSC_entities]
18003       && !read_entities (counts[MSC_entities],
18004 			 counts[MSC_sec_lwm], counts[MSC_sec_hwm]))
18005     ok = false;
18006 
18007   /* Read the namespace hierarchy. */
18008   if (ok && counts[MSC_namespaces]
18009       && !read_namespaces (counts[MSC_namespaces]))
18010     ok = false;
18011 
18012   if (ok && !read_bindings (counts[MSC_bindings],
18013 			    counts[MSC_sec_lwm], counts[MSC_sec_hwm]))
18014     ok = false;
18015 
18016   /* And unnamed.  */
18017   if (ok && counts[MSC_pendings] && !read_pendings (counts[MSC_pendings]))
18018     ok = false;
18019 
18020   if (ok)
18021     {
18022       slurp->remaining = counts[MSC_sec_hwm] - counts[MSC_sec_lwm];
18023       available_clusters += counts[MSC_sec_hwm] - counts[MSC_sec_lwm];
18024     }
18025 
18026   if (!flag_module_lazy
18027       || (is_partition ()
18028 	  && module_interface_p ()
18029 	  && !module_partition_p ()))
18030     {
18031       /* Read the sections in forward order, so that dependencies are read
18032 	 first.  See note about tarjan_connect.  */
18033       ggc_collect ();
18034 
18035       lazy_snum = ~0u;
18036 
18037       unsigned hwm = counts[MSC_sec_hwm];
18038       for (unsigned ix = counts[MSC_sec_lwm]; ok && ix != hwm; ix++)
18039 	if (!load_section (ix, NULL))
18040 	  {
18041 	    ok = false;
18042 	    break;
18043 	  }
18044       lazy_snum = 0;
18045       post_load_processing ();
18046 
18047       ggc_collect ();
18048 
18049       if (ok && CHECKING_P)
18050 	for (unsigned ix = 0; ix != entity_num; ix++)
18051 	  gcc_assert (!(*entity_ary)[ix + entity_lwm].is_lazy ());
18052     }
18053 
18054   // If the import is a header-unit, we need to register initializers
18055   // of any static objects it contains (looking at you _Ioinit).
18056   // Notice, the ordering of these initializers will be that of a
18057   // dynamic initializer at this point in the current TU.  (Other
18058   // instances of these objects in other TUs will be initialized as
18059   // part of that TU's global initializers.)
18060   if (ok && counts[MSC_inits] && !read_inits (counts[MSC_inits]))
18061     ok = false;
18062 
18063   function_depth--;
18064 
18065   announce (flag_module_lazy ? "lazy" : "imported");
18066   loadedness = ML_LANGUAGE;
18067 
18068   gcc_assert (slurp->current == ~0u);
18069 
18070   /* We're done with the string table.  */
18071   from ()->release ();
18072 
18073   return check_read (outermost, ok);
18074 }
18075 
18076 bool
maybe_defrost()18077 module_state::maybe_defrost ()
18078 {
18079   bool ok = true;
18080   if (from ()->is_frozen ())
18081     {
18082       if (lazy_open >= lazy_limit)
18083 	freeze_an_elf ();
18084       dump () && dump ("Defrosting '%s'", filename);
18085       ok = from ()->defrost (maybe_add_cmi_prefix (filename));
18086       lazy_open++;
18087     }
18088 
18089   return ok;
18090 }
18091 
18092 /* Load section SNUM, dealing with laziness.  It doesn't matter if we
18093    have multiple concurrent loads, because we do not use TREE_VISITED
18094    when reading back in.  */
18095 
18096 bool
load_section(unsigned snum,binding_slot * mslot)18097 module_state::load_section (unsigned snum, binding_slot *mslot)
18098 {
18099   if (from ()->get_error ())
18100     return false;
18101 
18102   if (snum >= slurp->current)
18103     from ()->set_error (elf::E_BAD_LAZY);
18104   else if (maybe_defrost ())
18105     {
18106       unsigned old_current = slurp->current;
18107       slurp->current = snum;
18108       slurp->lru = 0;  /* Do not swap out.  */
18109       slurp->remaining--;
18110       read_cluster (snum);
18111       slurp->lru = ++lazy_lru;
18112       slurp->current = old_current;
18113     }
18114 
18115   if (mslot && mslot->is_lazy ())
18116     {
18117       /* Oops, the section didn't set this slot.  */
18118       from ()->set_error (elf::E_BAD_DATA);
18119       *mslot = NULL_TREE;
18120     }
18121 
18122   bool ok = !from ()->get_error ();
18123   if (!ok)
18124     {
18125       error_at (loc, "failed to read compiled module cluster %u: %s",
18126 		snum, from ()->get_error (filename));
18127       note_cmi_name ();
18128     }
18129 
18130   maybe_completed_reading ();
18131 
18132   return ok;
18133 }
18134 
18135 void
maybe_completed_reading()18136 module_state::maybe_completed_reading ()
18137 {
18138   if (loadedness == ML_LANGUAGE && slurp->current == ~0u && !slurp->remaining)
18139     {
18140       lazy_open--;
18141       /* We no longer need the macros, all tokenizing has been done.  */
18142       slurp->release_macros ();
18143 
18144       from ()->end ();
18145       slurp->close ();
18146       slurped ();
18147     }
18148 }
18149 
18150 /* After a reading operation, make sure things are still ok.  If not,
18151    emit an error and clean up.  */
18152 
18153 bool
check_read(bool outermost,bool ok)18154 module_state::check_read (bool outermost, bool ok)
18155 {
18156   gcc_checking_assert (!outermost || slurp->current == ~0u);
18157 
18158   if (!ok)
18159     from ()->set_error ();
18160 
18161   if (int e = from ()->get_error ())
18162     {
18163       error_at (loc, "failed to read compiled module: %s",
18164 		from ()->get_error (filename));
18165       note_cmi_name ();
18166 
18167       if (e == EMFILE
18168 	  || e == ENFILE
18169 #if MAPPED_READING
18170 	  || e == ENOMEM
18171 #endif
18172 	  || false)
18173 	inform (loc, "consider using %<-fno-module-lazy%>,"
18174 		" increasing %<-param-lazy-modules=%u%> value,"
18175 		" or increasing the per-process file descriptor limit",
18176 		param_lazy_modules);
18177       else if (e == ENOENT)
18178 	inform (loc, "imports must be built before being imported");
18179 
18180       if (outermost)
18181 	fatal_error (loc, "returning to the gate for a mechanical issue");
18182 
18183       ok = false;
18184     }
18185 
18186   maybe_completed_reading ();
18187 
18188   return ok;
18189 }
18190 
18191 /* Return the IDENTIFIER_NODE naming module IX.  This is the name
18192    including dots.  */
18193 
18194 char const *
module_name(unsigned ix,bool header_ok)18195 module_name (unsigned ix, bool header_ok)
18196 {
18197   if (modules)
18198     {
18199       module_state *imp = (*modules)[ix];
18200 
18201       if (ix && !imp->name)
18202 	imp = imp->parent;
18203 
18204       if (header_ok || !imp->is_header ())
18205 	return imp->get_flatname ();
18206     }
18207 
18208   return NULL;
18209 }
18210 
18211 /* Return the bitmap describing what modules are imported.  Remember,
18212    we always import ourselves.  */
18213 
18214 bitmap
get_import_bitmap()18215 get_import_bitmap ()
18216 {
18217   return (*modules)[0]->imports;
18218 }
18219 
18220 /* Return the visible imports and path of instantiation for an
18221    instantiation at TINST.  If TINST is nullptr, we're not in an
18222    instantiation, and thus will return the visible imports of the
18223    current TU (and NULL *PATH_MAP_P).   We cache the information on
18224    the tinst level itself.  */
18225 
18226 static bitmap
path_of_instantiation(tinst_level * tinst,bitmap * path_map_p)18227 path_of_instantiation (tinst_level *tinst,  bitmap *path_map_p)
18228 {
18229   gcc_checking_assert (modules_p ());
18230 
18231   if (!tinst)
18232     {
18233       /* Not inside an instantiation, just the regular case.  */
18234       *path_map_p = nullptr;
18235       return get_import_bitmap ();
18236     }
18237 
18238   if (!tinst->path)
18239     {
18240       /* Calculate.  */
18241       bitmap visible = path_of_instantiation (tinst->next, path_map_p);
18242       bitmap path_map = *path_map_p;
18243 
18244       if (!path_map)
18245 	{
18246 	  path_map = BITMAP_GGC_ALLOC ();
18247 	  bitmap_set_bit (path_map, 0);
18248 	}
18249 
18250       tree decl = tinst->tldcl;
18251       if (TREE_CODE (decl) == TREE_LIST)
18252 	decl = TREE_PURPOSE (decl);
18253       if (TYPE_P (decl))
18254 	decl = TYPE_NAME (decl);
18255 
18256       if (unsigned mod = get_originating_module (decl))
18257 	if (!bitmap_bit_p (path_map, mod))
18258 	  {
18259 	    /* This is brand new information!  */
18260 	    bitmap new_path = BITMAP_GGC_ALLOC ();
18261 	    bitmap_copy (new_path, path_map);
18262 	    bitmap_set_bit (new_path, mod);
18263 	    path_map = new_path;
18264 
18265 	    bitmap imports = (*modules)[mod]->imports;
18266 	    if (bitmap_intersect_compl_p (imports, visible))
18267 	      {
18268 		/* IMPORTS contains additional modules to VISIBLE.  */
18269 		bitmap new_visible = BITMAP_GGC_ALLOC ();
18270 
18271 		bitmap_ior (new_visible, visible, imports);
18272 		visible = new_visible;
18273 	      }
18274 	  }
18275 
18276       tinst->path = path_map;
18277       tinst->visible = visible;
18278     }
18279 
18280   *path_map_p = tinst->path;
18281   return tinst->visible;
18282 }
18283 
18284 /* Return the bitmap describing what modules are visible along the
18285    path of instantiation.  If we're not an instantiation, this will be
18286    the visible imports of the TU.  *PATH_MAP_P is filled in with the
18287    modules owning the instantiation path -- we see the module-linkage
18288    entities of those modules.  */
18289 
18290 bitmap
visible_instantiation_path(bitmap * path_map_p)18291 visible_instantiation_path (bitmap *path_map_p)
18292 {
18293   if (!modules_p ())
18294     return NULL;
18295 
18296   return path_of_instantiation (current_instantiation (), path_map_p);
18297 }
18298 
18299 /* We've just directly imported IMPORT.  Update our import/export
18300    bitmaps.  IS_EXPORT is true if we're reexporting the OTHER.  */
18301 
18302 void
set_import(module_state const * import,bool is_export)18303 module_state::set_import (module_state const *import, bool is_export)
18304 {
18305   gcc_checking_assert (this != import);
18306 
18307   /* We see IMPORT's exports (which includes IMPORT).  If IMPORT is
18308      the primary interface or a partition we'll see its imports.  */
18309   bitmap_ior_into (imports, import->is_module () || import->is_partition ()
18310 		   ? import->imports : import->exports);
18311 
18312   if (is_export)
18313     /* We'll export OTHER's exports.  */
18314     bitmap_ior_into (exports, import->exports);
18315 }
18316 
18317 /* Return the declaring entity of DECL.  That is the decl determining
18318    how to decorate DECL with module information.  Returns NULL_TREE if
18319    it's the global module.  */
18320 
18321 tree
get_originating_module_decl(tree decl)18322 get_originating_module_decl (tree decl)
18323 {
18324   /* An enumeration constant.  */
18325   if (TREE_CODE (decl) == CONST_DECL
18326       && DECL_CONTEXT (decl)
18327       && (TREE_CODE (DECL_CONTEXT (decl)) == ENUMERAL_TYPE))
18328     decl = TYPE_NAME (DECL_CONTEXT (decl));
18329   else if (TREE_CODE (decl) == FIELD_DECL
18330 	   || TREE_CODE (decl) == USING_DECL)
18331     {
18332       decl = DECL_CONTEXT (decl);
18333       if (TREE_CODE (decl) != FUNCTION_DECL)
18334 	decl = TYPE_NAME (decl);
18335     }
18336 
18337   gcc_checking_assert (TREE_CODE (decl) == TEMPLATE_DECL
18338 		       || TREE_CODE (decl) == FUNCTION_DECL
18339 		       || TREE_CODE (decl) == TYPE_DECL
18340 		       || TREE_CODE (decl) == VAR_DECL
18341 		       || TREE_CODE (decl) == CONCEPT_DECL
18342 		       || TREE_CODE (decl) == NAMESPACE_DECL);
18343 
18344   for (;;)
18345     {
18346       /* Uninstantiated template friends are owned by the befriending
18347 	 class -- not their context.  */
18348       if (TREE_CODE (decl) == TEMPLATE_DECL
18349 	  && DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (decl))
18350 	decl = TYPE_NAME (DECL_CHAIN (decl));
18351 
18352       int use;
18353       if (tree ti = node_template_info (decl, use))
18354 	{
18355 	  decl = TI_TEMPLATE (ti);
18356 	  if (TREE_CODE (decl) != TEMPLATE_DECL)
18357 	    {
18358 	      /* A friend template specialization.  */
18359 	      gcc_checking_assert (OVL_P (decl));
18360 	      return global_namespace;
18361 	    }
18362 	}
18363       else
18364 	{
18365 	  tree ctx = CP_DECL_CONTEXT (decl);
18366 	  if (TREE_CODE (ctx) == NAMESPACE_DECL)
18367 	    break;
18368 
18369 	  if (TYPE_P (ctx))
18370 	    {
18371 	      ctx = TYPE_NAME (ctx);
18372 	      if (!ctx)
18373 		{
18374 		  /* Some kind of internal type.  */
18375 		  gcc_checking_assert (DECL_ARTIFICIAL (decl));
18376 		  return global_namespace;
18377 		}
18378 	    }
18379 	  decl = ctx;
18380 	}
18381     }
18382 
18383   return decl;
18384 }
18385 
18386 int
get_originating_module(tree decl,bool for_mangle)18387 get_originating_module (tree decl, bool for_mangle)
18388 {
18389   tree owner = get_originating_module_decl (decl);
18390   tree not_tmpl = STRIP_TEMPLATE (owner);
18391 
18392   if (!DECL_LANG_SPECIFIC (not_tmpl))
18393     return for_mangle ? -1 : 0;
18394 
18395   if (for_mangle && !DECL_MODULE_PURVIEW_P (not_tmpl))
18396     return -1;
18397 
18398   int mod = !DECL_MODULE_IMPORT_P (not_tmpl) ? 0 : get_importing_module (owner);
18399 
18400   if (for_mangle && (*modules)[mod]->is_header ())
18401     return -1;
18402 
18403   return mod;
18404 }
18405 
18406 unsigned
get_importing_module(tree decl,bool flexible)18407 get_importing_module (tree decl, bool flexible)
18408 {
18409   unsigned index = import_entity_index (decl, flexible);
18410   if (index == ~(~0u >> 1))
18411     return -1;
18412   module_state *module = import_entity_module (index);
18413 
18414   return module->mod;
18415 }
18416 
18417 /* Is it permissible to redeclare DECL.  */
18418 
18419 bool
module_may_redeclare(tree decl)18420 module_may_redeclare (tree decl)
18421 {
18422   module_state *me = (*modules)[0];
18423   module_state *them = me;
18424   tree not_tmpl = STRIP_TEMPLATE (decl);
18425   if (DECL_LANG_SPECIFIC (not_tmpl) && DECL_MODULE_IMPORT_P (not_tmpl))
18426     {
18427       /* We can be given the TEMPLATE_RESULT.  We want the
18428 	 TEMPLATE_DECL.  */
18429       int use_tpl = -1;
18430       if (tree ti = node_template_info (decl, use_tpl))
18431 	{
18432 	  tree tmpl = TI_TEMPLATE (ti);
18433 	  if (use_tpl == 2)
18434 	    {
18435 	      /* A partial specialization.  Find that specialization's
18436 		 template_decl.  */
18437 	      for (tree list = DECL_TEMPLATE_SPECIALIZATIONS (tmpl);
18438 		   list; list = TREE_CHAIN (list))
18439 		if (DECL_TEMPLATE_RESULT (TREE_VALUE (list)) == decl)
18440 		  {
18441 		    decl = TREE_VALUE (list);
18442 		    break;
18443 		}
18444 	    }
18445 	  else if (DECL_TEMPLATE_RESULT (tmpl) == decl)
18446 	    decl = tmpl;
18447 	}
18448       unsigned index = import_entity_index (decl);
18449       them = import_entity_module (index);
18450     }
18451 
18452   if (them->is_header ())
18453     {
18454       if (!header_module_p ())
18455 	return !module_purview_p ();
18456 
18457       if (DECL_SOURCE_LOCATION (decl) == BUILTINS_LOCATION)
18458 	/* This is a builtin, being declared in header-unit.  We
18459 	   now need to mark it as an export.  */
18460 	DECL_MODULE_EXPORT_P (decl) = true;
18461 
18462       /* If it came from a header, it's in the global module.  */
18463       return true;
18464     }
18465 
18466   if (me == them)
18467     return ((DECL_LANG_SPECIFIC (not_tmpl) && DECL_MODULE_PURVIEW_P (not_tmpl))
18468 	    == module_purview_p ());
18469 
18470   if (!me->name)
18471     me = me->parent;
18472 
18473   /* We can't have found a GMF entity from a named module.  */
18474   gcc_checking_assert (DECL_LANG_SPECIFIC (not_tmpl)
18475 		       && DECL_MODULE_PURVIEW_P (not_tmpl));
18476 
18477   return me && get_primary (them) == get_primary (me);
18478 }
18479 
18480 /* DECL is being created by this TU.  Record it came from here.  We
18481    record module purview, so we can see if partial or explicit
18482    specialization needs to be written out, even though its purviewness
18483    comes from the most general template.  */
18484 
18485 void
set_instantiating_module(tree decl)18486 set_instantiating_module (tree decl)
18487 {
18488   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
18489 	      || TREE_CODE (decl) == VAR_DECL
18490 	      || TREE_CODE (decl) == TYPE_DECL
18491 	      || TREE_CODE (decl) == CONCEPT_DECL
18492 	      || TREE_CODE (decl) == TEMPLATE_DECL
18493 	      || (TREE_CODE (decl) == NAMESPACE_DECL
18494 		  && DECL_NAMESPACE_ALIAS (decl)));
18495 
18496   if (!modules_p ())
18497     return;
18498 
18499   decl = STRIP_TEMPLATE (decl);
18500 
18501   if (!DECL_LANG_SPECIFIC (decl) && module_purview_p ())
18502     retrofit_lang_decl (decl);
18503 
18504   if (DECL_LANG_SPECIFIC (decl))
18505     {
18506       DECL_MODULE_PURVIEW_P (decl) = module_purview_p ();
18507       /* If this was imported, we'll still be in the entity_hash.  */
18508       DECL_MODULE_IMPORT_P (decl) = false;
18509     }
18510 }
18511 
18512 /* If DECL is a class member, whose class is not defined in this TU
18513    (it was imported), remember this decl.  */
18514 
18515 void
set_defining_module(tree decl)18516 set_defining_module (tree decl)
18517 {
18518   gcc_checking_assert (!DECL_LANG_SPECIFIC (decl)
18519 		       || !DECL_MODULE_IMPORT_P (decl));
18520 
18521   if (module_has_cmi_p ())
18522     {
18523       tree ctx = DECL_CONTEXT (decl);
18524       if (ctx
18525 	  && (TREE_CODE (ctx) == RECORD_TYPE || TREE_CODE (ctx) == UNION_TYPE)
18526 	  && DECL_LANG_SPECIFIC (TYPE_NAME (ctx))
18527 	  && DECL_MODULE_IMPORT_P (TYPE_NAME (ctx)))
18528 	{
18529 	  /* This entity's context is from an import.  We may need to
18530 	     record this entity to make sure we emit it in the CMI.
18531 	     Template specializations are in the template hash tables,
18532 	     so we don't need to record them here as well.  */
18533 	  int use_tpl = -1;
18534 	  tree ti = node_template_info (decl, use_tpl);
18535 	  if (use_tpl <= 0)
18536 	    {
18537 	      if (ti)
18538 		{
18539 		  gcc_checking_assert (!use_tpl);
18540 		  /* Get to the TEMPLATE_DECL.  */
18541 		  decl = TI_TEMPLATE (ti);
18542 		}
18543 
18544 	      /* Record it on the class_members list.  */
18545 	      vec_safe_push (class_members, decl);
18546 	    }
18547 	}
18548       else if (DECL_IMPLICIT_TYPEDEF_P (decl)
18549 	       && CLASSTYPE_TEMPLATE_SPECIALIZATION (TREE_TYPE (decl)))
18550 	/* This is a partial or explicit specialization.  */
18551 	vec_safe_push (partial_specializations, decl);
18552     }
18553 }
18554 
18555 void
set_originating_module(tree decl,bool friend_p ATTRIBUTE_UNUSED)18556 set_originating_module (tree decl, bool friend_p ATTRIBUTE_UNUSED)
18557 {
18558   set_instantiating_module (decl);
18559 
18560   if (TREE_CODE (CP_DECL_CONTEXT (decl)) != NAMESPACE_DECL)
18561     return;
18562 
18563   gcc_checking_assert (friend_p || decl == get_originating_module_decl (decl));
18564 
18565   if (!module_exporting_p ())
18566     return;
18567 
18568   // FIXME: Check ill-formed linkage
18569   DECL_MODULE_EXPORT_P (decl) = true;
18570 }
18571 
18572 /* DECL is attached to ROOT for odr purposes.  */
18573 
18574 void
maybe_attach_decl(tree ctx,tree decl)18575 maybe_attach_decl (tree ctx, tree decl)
18576 {
18577   if (!modules_p ())
18578     return;
18579 
18580   // FIXME: For now just deal with lambdas attached to var decls.
18581   // This might be sufficient?
18582   if (TREE_CODE (ctx) != VAR_DECL)
18583     return;
18584 
18585   gcc_checking_assert (DECL_NAMESPACE_SCOPE_P (ctx));
18586 
18587  if (!attached_table)
18588     attached_table = new attached_map_t (EXPERIMENT (1, 400));
18589 
18590  auto &vec = attached_table->get_or_insert (ctx);
18591  if (!vec.length ())
18592    {
18593      retrofit_lang_decl (ctx);
18594      DECL_MODULE_ATTACHMENTS_P (ctx) = true;
18595    }
18596  vec.safe_push (decl);
18597 }
18598 
18599 /* Create the flat name string.  It is simplest to have it handy.  */
18600 
18601 void
set_flatname()18602 module_state::set_flatname ()
18603 {
18604   gcc_checking_assert (!flatname);
18605   if (parent)
18606     {
18607       auto_vec<tree,5> ids;
18608       size_t len = 0;
18609       char const *primary = NULL;
18610       size_t pfx_len = 0;
18611 
18612       for (module_state *probe = this;
18613 	   probe;
18614 	   probe = probe->parent)
18615 	if (is_partition () && !probe->is_partition ())
18616 	  {
18617 	    primary = probe->get_flatname ();
18618 	    pfx_len = strlen (primary);
18619 	    break;
18620 	  }
18621 	else
18622 	  {
18623 	    ids.safe_push (probe->name);
18624 	    len += IDENTIFIER_LENGTH (probe->name) + 1;
18625 	  }
18626 
18627       char *flat = XNEWVEC (char, pfx_len + len + is_partition ());
18628       flatname = flat;
18629 
18630       if (primary)
18631 	{
18632 	  memcpy (flat, primary, pfx_len);
18633 	  flat += pfx_len;
18634 	  *flat++ = ':';
18635 	}
18636 
18637       for (unsigned len = 0; ids.length ();)
18638 	{
18639 	  if (len)
18640 	    flat[len++] = '.';
18641 	  tree elt = ids.pop ();
18642 	  unsigned l = IDENTIFIER_LENGTH (elt);
18643 	  memcpy (flat + len, IDENTIFIER_POINTER (elt), l + 1);
18644 	  len += l;
18645 	}
18646     }
18647   else if (is_header ())
18648     flatname = TREE_STRING_POINTER (name);
18649   else
18650     flatname = IDENTIFIER_POINTER (name);
18651 }
18652 
18653 /* Read the CMI file for a module.  */
18654 
18655 bool
do_import(cpp_reader * reader,bool outermost)18656 module_state::do_import (cpp_reader *reader, bool outermost)
18657 {
18658   gcc_assert (global_namespace == current_scope () && loadedness == ML_NONE);
18659 
18660   loc = linemap_module_loc (line_table, loc, get_flatname ());
18661 
18662   if (lazy_open >= lazy_limit)
18663     freeze_an_elf ();
18664 
18665   int fd = -1;
18666   int e = ENOENT;
18667   if (filename)
18668     {
18669       const char *file = maybe_add_cmi_prefix (filename);
18670       dump () && dump ("CMI is %s", file);
18671       if (note_module_cmi_yes || inform_cmi_p)
18672 	inform (loc, "reading CMI %qs", file);
18673       fd = open (file, O_RDONLY | O_CLOEXEC | O_BINARY);
18674       e = errno;
18675     }
18676 
18677   gcc_checking_assert (!slurp);
18678   slurp = new slurping (new elf_in (fd, e));
18679 
18680   bool ok = true;
18681   if (!from ()->get_error ())
18682     {
18683       announce ("importing");
18684       loadedness = ML_CONFIG;
18685       lazy_open++;
18686       ok = read_initial (reader);
18687       slurp->lru = ++lazy_lru;
18688     }
18689 
18690   gcc_assert (slurp->current == ~0u);
18691 
18692   return check_read (outermost, ok);
18693 }
18694 
18695 /* Attempt to increase the file descriptor limit.  */
18696 
18697 static bool
try_increase_lazy(unsigned want)18698 try_increase_lazy (unsigned want)
18699 {
18700   gcc_checking_assert (lazy_open >= lazy_limit);
18701 
18702   /* If we're increasing, saturate at hard limit.  */
18703   if (want > lazy_hard_limit && lazy_limit < lazy_hard_limit)
18704     want = lazy_hard_limit;
18705 
18706 #if HAVE_SETRLIMIT
18707   if ((!lazy_limit || !param_lazy_modules)
18708       && lazy_hard_limit
18709       && want <= lazy_hard_limit)
18710     {
18711       struct rlimit rlimit;
18712       rlimit.rlim_cur = want + LAZY_HEADROOM;
18713       rlimit.rlim_max = lazy_hard_limit + LAZY_HEADROOM;
18714       if (!setrlimit (RLIMIT_NOFILE, &rlimit))
18715 	lazy_limit = want;
18716     }
18717 #endif
18718 
18719   return lazy_open < lazy_limit;
18720 }
18721 
18722 /* Pick a victim module to freeze its reader.  */
18723 
18724 void
freeze_an_elf()18725 module_state::freeze_an_elf ()
18726 {
18727   if (try_increase_lazy (lazy_open * 2))
18728     return;
18729 
18730   module_state *victim = NULL;
18731   for (unsigned ix = modules->length (); ix--;)
18732     {
18733       module_state *candidate = (*modules)[ix];
18734       if (candidate && candidate->slurp && candidate->slurp->lru
18735 	  && candidate->from ()->is_freezable ()
18736 	  && (!victim || victim->slurp->lru > candidate->slurp->lru))
18737 	victim = candidate;
18738     }
18739 
18740   if (victim)
18741     {
18742       dump () && dump ("Freezing '%s'", victim->filename);
18743       if (victim->slurp->macro_defs.size)
18744 	/* Save the macro definitions to a buffer.  */
18745 	victim->from ()->preserve (victim->slurp->macro_defs);
18746       if (victim->slurp->macro_tbl.size)
18747 	/* Save the macro definitions to a buffer.  */
18748 	victim->from ()->preserve (victim->slurp->macro_tbl);
18749       victim->from ()->freeze ();
18750       lazy_open--;
18751     }
18752   else
18753     dump () && dump ("No module available for freezing");
18754 }
18755 
18756 /* Load the lazy slot *MSLOT, INDEX'th slot of the module.  */
18757 
18758 bool
lazy_load(unsigned index,binding_slot * mslot)18759 module_state::lazy_load (unsigned index, binding_slot *mslot)
18760 {
18761   unsigned n = dump.push (this);
18762 
18763   gcc_checking_assert (function_depth);
18764 
18765   unsigned cookie = mslot->get_lazy ();
18766   unsigned snum = cookie >> 2;
18767   dump () && dump ("Loading entity %M[%u] section:%u", this, index, snum);
18768 
18769   bool ok = load_section (snum, mslot);
18770 
18771   dump.pop (n);
18772 
18773   return ok;
18774 }
18775 
18776 /* Load MOD's binding for NS::ID into *MSLOT.  *MSLOT contains the
18777    lazy cookie.  OUTER is true if this is the outermost lazy, (used
18778    for diagnostics).  */
18779 
18780 void
lazy_load_binding(unsigned mod,tree ns,tree id,binding_slot * mslot)18781 lazy_load_binding (unsigned mod, tree ns, tree id, binding_slot *mslot)
18782 {
18783   int count = errorcount + warningcount;
18784 
18785   timevar_start (TV_MODULE_IMPORT);
18786 
18787   /* Stop GC happening, even in outermost loads (because our caller
18788      could well be building up a lookup set).  */
18789   function_depth++;
18790 
18791   gcc_checking_assert (mod);
18792   module_state *module = (*modules)[mod];
18793   unsigned n = dump.push (module);
18794 
18795   unsigned snum = mslot->get_lazy ();
18796   dump () && dump ("Lazily binding %P@%N section:%u", ns, id,
18797 		   module->name, snum);
18798 
18799   bool ok = !recursive_lazy (snum);
18800   if (ok)
18801     {
18802       ok = module->load_section (snum, mslot);
18803       lazy_snum = 0;
18804       post_load_processing ();
18805     }
18806 
18807   dump.pop (n);
18808 
18809   function_depth--;
18810 
18811   timevar_stop (TV_MODULE_IMPORT);
18812 
18813   if (!ok)
18814     fatal_error (input_location,
18815 		 module->is_header ()
18816 		 ? G_("failed to load binding %<%E%s%E%>")
18817 		 : G_("failed to load binding %<%E%s%E@%s%>"),
18818 		 ns, &"::"[ns == global_namespace ? 2 : 0], id,
18819 		 module->get_flatname ());
18820 
18821   if (count != errorcount + warningcount)
18822     inform (input_location,
18823 	    module->is_header ()
18824 	    ? G_("during load of binding %<%E%s%E%>")
18825 	    : G_("during load of binding %<%E%s%E@%s%>"),
18826 	    ns, &"::"[ns == global_namespace ? 2 : 0], id,
18827 	    module->get_flatname ());
18828 }
18829 
18830 /* Load any pending entities keyed to the top-key of DECL.  */
18831 
18832 void
lazy_load_pendings(tree decl)18833 lazy_load_pendings (tree decl)
18834 {
18835   tree key_decl;
18836   pending_key key;
18837   key.ns = find_pending_key (decl, &key_decl);
18838   key.id = DECL_NAME (key_decl);
18839 
18840   auto *pending_vec = pending_table ? pending_table->get (key) : nullptr;
18841   if (!pending_vec)
18842     return;
18843 
18844   int count = errorcount + warningcount;
18845 
18846   timevar_start (TV_MODULE_IMPORT);
18847   bool ok = !recursive_lazy ();
18848   if (ok)
18849     {
18850       function_depth++; /* Prevent GC */
18851       unsigned n = dump.push (NULL);
18852       dump () && dump ("Reading %u pending entities keyed to %P",
18853 		       pending_vec->length (), key.ns, key.id);
18854       for (unsigned ix = pending_vec->length (); ix--;)
18855 	{
18856 	  unsigned index = (*pending_vec)[ix];
18857 	  binding_slot *slot = &(*entity_ary)[index];
18858 
18859 	  if (slot->is_lazy ())
18860 	    {
18861 	      module_state *import = import_entity_module (index);
18862 	      if (!import->lazy_load (index - import->entity_lwm, slot))
18863 		ok = false;
18864 	    }
18865 	  else if (dump ())
18866 	    {
18867 	      module_state *import = import_entity_module (index);
18868 	      dump () && dump ("Entity %M[%u] already loaded",
18869 			       import, index - import->entity_lwm);
18870 	    }
18871 	}
18872 
18873       pending_table->remove (key);
18874       dump.pop (n);
18875       lazy_snum = 0;
18876       post_load_processing ();
18877       function_depth--;
18878     }
18879 
18880   timevar_stop (TV_MODULE_IMPORT);
18881 
18882   if (!ok)
18883     fatal_error (input_location, "failed to load pendings for %<%E%s%E%>",
18884 		 key.ns, &"::"[key.ns == global_namespace ? 2 : 0], key.id);
18885 
18886   if (count != errorcount + warningcount)
18887     inform (input_location, "during load of pendings for %<%E%s%E%>",
18888 	    key.ns, &"::"[key.ns == global_namespace ? 2 : 0], key.id);
18889 }
18890 
18891 static void
direct_import(module_state * import,cpp_reader * reader)18892 direct_import (module_state *import, cpp_reader *reader)
18893 {
18894   timevar_start (TV_MODULE_IMPORT);
18895   unsigned n = dump.push (import);
18896 
18897   gcc_checking_assert (import->is_direct () && import->has_location ());
18898   if (import->loadedness == ML_NONE)
18899     if (!import->do_import (reader, true))
18900       gcc_unreachable ();
18901 
18902   if (import->loadedness < ML_LANGUAGE)
18903     {
18904       if (!attached_table)
18905 	attached_table = new attached_map_t (EXPERIMENT (1, 400));
18906       import->read_language (true);
18907     }
18908 
18909   (*modules)[0]->set_import (import, import->exported_p);
18910 
18911   dump.pop (n);
18912   timevar_stop (TV_MODULE_IMPORT);
18913 }
18914 
18915 /* Import module IMPORT.  */
18916 
18917 void
import_module(module_state * import,location_t from_loc,bool exporting_p,tree,cpp_reader * reader)18918 import_module (module_state *import, location_t from_loc, bool exporting_p,
18919 	       tree, cpp_reader *reader)
18920 {
18921   if (!import->check_not_purview (from_loc))
18922     return;
18923 
18924   if (!import->is_header () && current_lang_depth ())
18925     /* Only header units should appear inside language
18926        specifications.  The std doesn't specify this, but I think
18927        that's an error in resolving US 033, because language linkage
18928        is also our escape clause to getting things into the global
18929        module, so we don't want to confuse things by having to think
18930        about whether 'extern "C++" { import foo; }' puts foo's
18931        contents into the global module all of a sudden.  */
18932     warning (0, "import of named module %qs inside language-linkage block",
18933 	     import->get_flatname ());
18934 
18935   if (exporting_p || module_exporting_p ())
18936     import->exported_p = true;
18937 
18938   if (import->loadedness != ML_NONE)
18939     {
18940       from_loc = ordinary_loc_of (line_table, from_loc);
18941       linemap_module_reparent (line_table, import->loc, from_loc);
18942     }
18943   gcc_checking_assert (!import->module_p);
18944   gcc_checking_assert (import->is_direct () && import->has_location ());
18945 
18946   direct_import (import, reader);
18947 }
18948 
18949 /* Declare the name of the current module to be NAME.  EXPORTING_p is
18950    true if this TU is the exporting module unit.  */
18951 
18952 void
declare_module(module_state * module,location_t from_loc,bool exporting_p,tree,cpp_reader * reader)18953 declare_module (module_state *module, location_t from_loc, bool exporting_p,
18954 		tree, cpp_reader *reader)
18955 {
18956   gcc_assert (global_namespace == current_scope ());
18957 
18958   module_state *current = (*modules)[0];
18959   if (module_purview_p () || module->loadedness > ML_CONFIG)
18960     {
18961       error_at (from_loc, module_purview_p ()
18962 		? G_("module already declared")
18963 		: G_("module already imported"));
18964       if (module_purview_p ())
18965 	module = current;
18966       inform (module->loc, module_purview_p ()
18967 	      ? G_("module %qs declared here")
18968 	      : G_("module %qs imported here"),
18969 	      module->get_flatname ());
18970       return;
18971     }
18972 
18973   gcc_checking_assert (module->module_p);
18974   gcc_checking_assert (module->is_direct () && module->has_location ());
18975 
18976   /* Yer a module, 'arry.  */
18977   module_kind &= ~MK_GLOBAL;
18978   module_kind |= MK_MODULE;
18979 
18980   if (module->is_partition () || exporting_p)
18981     {
18982       gcc_checking_assert (module->get_flatname ());
18983 
18984       if (module->is_partition ())
18985 	module_kind |= MK_PARTITION;
18986 
18987       if (exporting_p)
18988 	{
18989 	  module->interface_p = true;
18990 	  module_kind |= MK_INTERFACE;
18991 	}
18992 
18993       if (module->is_header ())
18994 	module_kind |= MK_GLOBAL | MK_EXPORTING;
18995 
18996       /* Copy the importing information we may have already done.  We
18997 	 do not need to separate out the imports that only happen in
18998 	 the GMF, inspite of what the literal wording of the std
18999 	 might imply.  See p2191, the core list had a discussion
19000 	 where the module implementors agreed that the GMF of a named
19001 	 module is invisible to importers.  */
19002       module->imports = current->imports;
19003 
19004       module->mod = 0;
19005       (*modules)[0] = module;
19006     }
19007   else
19008     {
19009       module->interface_p = true;
19010       current->parent = module; /* So mangler knows module identity. */
19011       direct_import (module, reader);
19012     }
19013 }
19014 
19015 /* +1, we're the primary or a partition.  Therefore emitting a
19016    globally-callable idemportent initializer function.
19017    -1, we have direct imports.  Therefore emitting calls to their
19018    initializers.  */
19019 
19020 int
module_initializer_kind()19021 module_initializer_kind ()
19022 {
19023   int result = 0;
19024 
19025   if (module_has_cmi_p () && !header_module_p ())
19026     result = +1;
19027   else if (num_init_calls_needed)
19028     result = -1;
19029 
19030   return result;
19031 }
19032 
19033 /* Emit calls to each direct import's global initializer.  Including
19034    direct imports of directly imported header units.  The initializers
19035    of (static) entities in header units will be called by their
19036    importing modules (for the instance contained within that), or by
19037    the current TU (for the instances we've brought in).  Of course
19038    such header unit behaviour is evil, but iostream went through that
19039    door some time ago.  */
19040 
19041 void
module_add_import_initializers()19042 module_add_import_initializers ()
19043 {
19044   unsigned calls = 0;
19045   if (modules)
19046     {
19047       tree fntype = build_function_type (void_type_node, void_list_node);
19048       releasing_vec args;  // There are no args
19049 
19050       for (unsigned ix = modules->length (); --ix;)
19051 	{
19052 	  module_state *import = (*modules)[ix];
19053 	  if (import->call_init_p)
19054 	    {
19055 	      tree name = mangle_module_global_init (ix);
19056 	      tree fndecl = build_lang_decl (FUNCTION_DECL, name, fntype);
19057 
19058 	      DECL_CONTEXT (fndecl) = FROB_CONTEXT (global_namespace);
19059 	      SET_DECL_ASSEMBLER_NAME (fndecl, name);
19060 	      TREE_PUBLIC (fndecl) = true;
19061 	      determine_visibility (fndecl);
19062 
19063 	      tree call = cp_build_function_call_vec (fndecl, &args,
19064 						      tf_warning_or_error);
19065 	      finish_expr_stmt (call);
19066 
19067 	      calls++;
19068 	    }
19069 	}
19070     }
19071 
19072   gcc_checking_assert (calls == num_init_calls_needed);
19073 }
19074 
19075 /* NAME & LEN are a preprocessed header name, possibly including the
19076    surrounding "" or <> characters.  Return the raw string name of the
19077    module to which it refers.  This will be an absolute path, or begin
19078    with ./, so it is immediately distinguishable from a (non-header
19079    unit) module name.  If READER is non-null, ask the preprocessor to
19080    locate the header to which it refers using the appropriate include
19081    path.  Note that we do never do \ processing of the string, as that
19082    matches the preprocessor's behaviour.  */
19083 
19084 static const char *
canonicalize_header_name(cpp_reader * reader,location_t loc,bool unquoted,const char * str,size_t & len_r)19085 canonicalize_header_name (cpp_reader *reader, location_t loc, bool unquoted,
19086 			  const char *str, size_t &len_r)
19087 {
19088   size_t len = len_r;
19089   static char *buf = 0;
19090   static size_t alloc = 0;
19091 
19092   if (!unquoted)
19093     {
19094       gcc_checking_assert (len >= 2
19095 			   && ((reader && str[0] == '<' && str[len-1] == '>')
19096 			       || (str[0] == '"' && str[len-1] == '"')));
19097       str += 1;
19098       len -= 2;
19099     }
19100 
19101   if (reader)
19102     {
19103       gcc_assert (!unquoted);
19104 
19105       if (len >= alloc)
19106 	{
19107 	  alloc = len + 1;
19108 	  buf = XRESIZEVEC (char, buf, alloc);
19109 	}
19110       memcpy (buf, str, len);
19111       buf[len] = 0;
19112 
19113       if (const char *hdr
19114 	  = cpp_probe_header_unit (reader, buf, str[-1] == '<', loc))
19115 	{
19116 	  len = strlen (hdr);
19117 	  str = hdr;
19118 	}
19119       else
19120 	str = buf;
19121     }
19122 
19123   if (!(str[0] == '.' ? IS_DIR_SEPARATOR (str[1]) : IS_ABSOLUTE_PATH (str)))
19124     {
19125       /* Prepend './'  */
19126       if (len + 3 > alloc)
19127 	{
19128 	  alloc = len + 3;
19129 	  buf = XRESIZEVEC (char, buf, alloc);
19130 	}
19131 
19132       buf[0] = '.';
19133       buf[1] = DIR_SEPARATOR;
19134       memmove (buf + 2, str, len);
19135       len += 2;
19136       buf[len] = 0;
19137       str = buf;
19138     }
19139 
19140   len_r = len;
19141   return str;
19142 }
19143 
19144 /* Set the CMI name from a cody packet.  Issue an error if
19145    ill-formed.  */
19146 
set_filename(const Cody::Packet & packet)19147 void module_state::set_filename (const Cody::Packet &packet)
19148 {
19149   gcc_checking_assert (!filename);
19150   if (packet.GetCode () == Cody::Client::PC_PATHNAME)
19151     filename = xstrdup (packet.GetString ().c_str ());
19152   else
19153     {
19154       gcc_checking_assert (packet.GetCode () == Cody::Client::PC_ERROR);
19155       error_at (loc, "unknown Compiled Module Interface: %s",
19156 		packet.GetString ().c_str ());
19157     }
19158 }
19159 
19160 /* Figure out whether to treat HEADER as an include or an import.  */
19161 
19162 static char *
maybe_translate_include(cpp_reader * reader,line_maps * lmaps,location_t loc,const char * path)19163 maybe_translate_include (cpp_reader *reader, line_maps *lmaps, location_t loc,
19164 			 const char *path)
19165 {
19166   if (!modules_p ())
19167     {
19168       /* Turn off.  */
19169       cpp_get_callbacks (reader)->translate_include = NULL;
19170       return nullptr;
19171     }
19172 
19173   if (!spans.init_p ())
19174     /* Before the main file, don't divert.  */
19175     return nullptr;
19176 
19177   dump.push (NULL);
19178 
19179   dump () && dump ("Checking include translation '%s'", path);
19180   auto *mapper = get_mapper (cpp_main_loc (reader));
19181 
19182   size_t len = strlen (path);
19183   path = canonicalize_header_name (NULL, loc, true, path, len);
19184   auto packet = mapper->IncludeTranslate (path, Cody::Flags::None, len);
19185   int xlate = false;
19186   if (packet.GetCode () == Cody::Client::PC_BOOL)
19187     xlate = -int (packet.GetInteger ());
19188   else if (packet.GetCode () == Cody::Client::PC_PATHNAME)
19189     {
19190       /* Record the CMI name for when we do the import.  */
19191       module_state *import = get_module (build_string (len, path));
19192       import->set_filename (packet);
19193       xlate = +1;
19194     }
19195   else
19196     {
19197       gcc_checking_assert (packet.GetCode () == Cody::Client::PC_ERROR);
19198       error_at (loc, "cannot determine %<#include%> translation of %s: %s",
19199 		path, packet.GetString ().c_str ());
19200     }
19201 
19202   bool note = false;
19203   if (note_include_translate_yes && xlate > 1)
19204     note = true;
19205   else if (note_include_translate_no && xlate == 0)
19206     note = true;
19207   else if (note_includes)
19208     /* We do not expect the note_includes vector to be large, so O(N)
19209        iteration.  */
19210     for (unsigned ix = note_includes->length (); !note && ix--;)
19211       if (!strcmp ((*note_includes)[ix], path))
19212 	note = true;
19213 
19214   if (note)
19215     inform (loc, xlate
19216 	    ? G_("include %qs translated to import")
19217 	    : G_("include %qs processed textually") , path);
19218 
19219   dump () && dump (xlate ? "Translating include to import"
19220 		   : "Keeping include as include");
19221   dump.pop (0);
19222 
19223   if (!(xlate > 0))
19224     return nullptr;
19225 
19226   /* Create the translation text.  */
19227   loc = ordinary_loc_of (lmaps, loc);
19228   const line_map_ordinary *map
19229     = linemap_check_ordinary (linemap_lookup (lmaps, loc));
19230   unsigned col = SOURCE_COLUMN (map, loc);
19231   col -= (col != 0); /* Columns are 1-based.  */
19232 
19233   unsigned alloc = len + col + 60;
19234   char *res = XNEWVEC (char, alloc);
19235 
19236   strcpy (res, "__import");
19237   unsigned actual = 8;
19238   if (col > actual)
19239     {
19240       /* Pad out so the filename appears at the same position.  */
19241       memset (res + actual, ' ', col - actual);
19242       actual = col;
19243     }
19244   /* No need to encode characters, that's not how header names are
19245      handled.  */
19246   actual += snprintf (res + actual, alloc - actual,
19247 		      "\"%s\" [[__translated]];\n", path);
19248   gcc_checking_assert (actual < alloc);
19249 
19250   /* cpplib will delete the buffer.  */
19251   return res;
19252 }
19253 
19254 static void
begin_header_unit(cpp_reader * reader)19255 begin_header_unit (cpp_reader *reader)
19256 {
19257   /* Set the module header name from the main_input_filename.  */
19258   const char *main = main_input_filename;
19259   size_t len = strlen (main);
19260   main = canonicalize_header_name (NULL, 0, true, main, len);
19261   module_state *module = get_module (build_string (len, main));
19262 
19263   preprocess_module (module, cpp_main_loc (reader), false, false, true, reader);
19264 }
19265 
19266 /* We've just properly entered the main source file.  I.e. after the
19267    command line, builtins and forced headers.  Record the line map and
19268    location of this map.  Note we may be called more than once.  The
19269    first call sticks.  */
19270 
19271 void
module_begin_main_file(cpp_reader * reader,line_maps * lmaps,const line_map_ordinary * map)19272 module_begin_main_file (cpp_reader *reader, line_maps *lmaps,
19273 		       const line_map_ordinary *map)
19274 {
19275   gcc_checking_assert (lmaps == line_table);
19276   if (modules_p () && !spans.init_p ())
19277     {
19278       unsigned n = dump.push (NULL);
19279       spans.init (lmaps, map);
19280       dump.pop (n);
19281       if (flag_header_unit && !cpp_get_options (reader)->preprocessed)
19282 	{
19283 	  /* Tell the preprocessor this is an include file.  */
19284 	  cpp_retrofit_as_include (reader);
19285 	  begin_header_unit (reader);
19286 	}
19287     }
19288 }
19289 
19290 /* Process the pending_import queue, making sure we know the
19291    filenames.   */
19292 
19293 static void
name_pending_imports(cpp_reader * reader)19294 name_pending_imports (cpp_reader *reader)
19295 {
19296   auto *mapper = get_mapper (cpp_main_loc (reader));
19297 
19298   if (!vec_safe_length (pending_imports))
19299     /* Not doing anything.  */
19300     return;
19301 
19302   timevar_start (TV_MODULE_MAPPER);
19303 
19304   auto n = dump.push (NULL);
19305   dump () && dump ("Resolving direct import names");
19306   bool want_deps = (bool (mapper->get_flags () & Cody::Flags::NameOnly)
19307 		    || cpp_get_deps (reader));
19308   bool any = false;
19309 
19310   for (unsigned ix = 0; ix != pending_imports->length (); ix++)
19311     {
19312       module_state *module = (*pending_imports)[ix];
19313       gcc_checking_assert (module->is_direct ());
19314       if (!module->filename && !module->visited_p)
19315 	{
19316 	  bool export_p = (module->module_p
19317 			   && (module->is_partition () || module->exported_p));
19318 
19319 	  Cody::Flags flags = Cody::Flags::None;
19320 	  if (flag_preprocess_only
19321 	      && !(module->is_header () && !export_p))
19322 	    {
19323 	      if (!want_deps)
19324 		continue;
19325 	      flags = Cody::Flags::NameOnly;
19326 	    }
19327 
19328 	  if (!any)
19329 	    {
19330 	      any = true;
19331 	      mapper->Cork ();
19332 	    }
19333 	  if (export_p)
19334 	    mapper->ModuleExport (module->get_flatname (), flags);
19335 	  else
19336 	    mapper->ModuleImport (module->get_flatname (), flags);
19337 	  module->visited_p = true;
19338 	}
19339     }
19340 
19341   if (any)
19342     {
19343       auto response = mapper->Uncork ();
19344       auto r_iter = response.begin ();
19345       for (unsigned ix = 0; ix != pending_imports->length (); ix++)
19346 	{
19347 	  module_state *module = (*pending_imports)[ix];
19348 	  if (module->visited_p)
19349 	    {
19350 	      module->visited_p = false;
19351 	      gcc_checking_assert (!module->filename);
19352 
19353 	      module->set_filename (*r_iter);
19354 	      ++r_iter;
19355 	    }
19356 	}
19357     }
19358 
19359   dump.pop (n);
19360 
19361   timevar_stop (TV_MODULE_MAPPER);
19362 }
19363 
19364 /* We've just lexed a module-specific control line for MODULE.  Mark
19365    the module as a direct import, and possibly load up its macro
19366    state.  Returns the primary module, if this is a module
19367    declaration.  */
19368 /* Perhaps we should offer a preprocessing mode where we read the
19369    directives from the header unit, rather than require the header's
19370    CMI.  */
19371 
19372 module_state *
preprocess_module(module_state * module,location_t from_loc,bool in_purview,bool is_import,bool is_export,cpp_reader * reader)19373 preprocess_module (module_state *module, location_t from_loc,
19374 		   bool in_purview, bool is_import, bool is_export,
19375 		   cpp_reader *reader)
19376 {
19377   if (!is_import)
19378     {
19379       if (module->loc)
19380 	/* It's already been mentioned, so ignore its module-ness.  */
19381 	is_import = true;
19382       else
19383 	{
19384 	  /* Record it is the module.  */
19385 	  module->module_p = true;
19386 	  if (is_export)
19387 	    {
19388 	      module->exported_p = true;
19389 	      module->interface_p = true;
19390 	    }
19391 	}
19392     }
19393 
19394   if (module->directness < MD_DIRECT + in_purview)
19395     {
19396       /* Mark as a direct import.  */
19397       module->directness = module_directness (MD_DIRECT + in_purview);
19398 
19399       /* Set the location to be most informative for users.  */
19400       from_loc = ordinary_loc_of (line_table, from_loc);
19401       if (module->loadedness != ML_NONE)
19402 	linemap_module_reparent (line_table, module->loc, from_loc);
19403       else
19404 	{
19405 	  module->loc = from_loc;
19406 	  if (!module->flatname)
19407 	    module->set_flatname ();
19408 	}
19409     }
19410 
19411   auto desired = ML_CONFIG;
19412   if (is_import
19413       && module->is_header ()
19414       && (!cpp_get_options (reader)->preprocessed
19415 	  || cpp_get_options (reader)->directives_only))
19416     /* We need preprocessor state now.  */
19417     desired = ML_PREPROCESSOR;
19418 
19419   if (!is_import || module->loadedness < desired)
19420     {
19421       vec_safe_push (pending_imports, module);
19422 
19423       if (desired == ML_PREPROCESSOR)
19424 	{
19425 	  unsigned n = dump.push (NULL);
19426 
19427 	  dump () && dump ("Reading %M preprocessor state", module);
19428 	  name_pending_imports (reader);
19429 
19430 	  /* Preserve the state of the line-map.  */
19431 	  unsigned pre_hwm = LINEMAPS_ORDINARY_USED (line_table);
19432 
19433 	  /* We only need to close the span, if we're going to emit a
19434 	     CMI.  But that's a little tricky -- our token scanner
19435 	     needs to be smarter -- and this isn't much state.
19436 	     Remember, we've not parsed anything at this point, so
19437 	     our module state flags are inadequate.  */
19438 	  spans.maybe_init ();
19439 	  spans.close ();
19440 
19441 	  timevar_start (TV_MODULE_IMPORT);
19442 
19443 	  /* Load the config of each pending import -- we must assign
19444 	     module numbers monotonically.  */
19445 	  for (unsigned ix = 0; ix != pending_imports->length (); ix++)
19446 	    {
19447 	      auto *import = (*pending_imports)[ix];
19448 	      if (!(import->module_p
19449 		    && (import->is_partition () || import->exported_p))
19450 		  && import->loadedness == ML_NONE
19451 		  && (import->is_header () || !flag_preprocess_only))
19452 		{
19453 		  unsigned n = dump.push (import);
19454 		  import->do_import (reader, true);
19455 		  dump.pop (n);
19456 		}
19457 	    }
19458 	  vec_free (pending_imports);
19459 
19460 	  /* Restore the line-map state.  */
19461 	  spans.open (linemap_module_restore (line_table, pre_hwm));
19462 
19463 	  /* Now read the preprocessor state of this particular
19464 	     import.  */
19465 	  if (module->loadedness == ML_CONFIG
19466 	      && module->read_preprocessor (true))
19467 	    module->import_macros ();
19468 
19469 	  timevar_stop (TV_MODULE_IMPORT);
19470 
19471 	  dump.pop (n);
19472 	}
19473     }
19474 
19475   return is_import ? NULL : get_primary (module);
19476 }
19477 
19478 /* We've completed phase-4 translation.  Emit any dependency
19479    information for the not-yet-loaded direct imports, and fill in
19480    their file names.  We'll have already loaded up the direct header
19481    unit wavefront.  */
19482 
19483 void
preprocessed_module(cpp_reader * reader)19484 preprocessed_module (cpp_reader *reader)
19485 {
19486   unsigned n = dump.push (NULL);
19487 
19488   dump () && dump ("Completed phase-4 (tokenization) processing");
19489 
19490   name_pending_imports (reader);
19491   vec_free (pending_imports);
19492 
19493   spans.maybe_init ();
19494   spans.close ();
19495 
19496   using iterator = hash_table<module_state_hash>::iterator;
19497   if (mkdeps *deps = cpp_get_deps (reader))
19498     {
19499       /* Walk the module hash, informing the dependency machinery.  */
19500       iterator end = modules_hash->end ();
19501       for (iterator iter = modules_hash->begin (); iter != end; ++iter)
19502 	{
19503 	  module_state *module = *iter;
19504 
19505 	  if (module->is_direct ())
19506 	    {
19507 	      if (module->is_module ()
19508 		  && (module->is_interface () || module->is_partition ()))
19509 		deps_add_module_target (deps, module->get_flatname (),
19510 					maybe_add_cmi_prefix (module->filename),
19511 					module->is_header());
19512 	      else
19513 		deps_add_module_dep (deps, module->get_flatname ());
19514 	    }
19515 	}
19516     }
19517 
19518   if (flag_header_unit && !flag_preprocess_only)
19519     {
19520       /* Find the main module -- remember, it's not yet in the module
19521 	 array.  */
19522       iterator end = modules_hash->end ();
19523       for (iterator iter = modules_hash->begin (); iter != end; ++iter)
19524 	{
19525 	  module_state *module = *iter;
19526 	  if (module->is_module ())
19527 	    {
19528 	      declare_module (module, cpp_main_loc (reader), true, NULL, reader);
19529 	      break;
19530 	    }
19531 	}
19532     }
19533 
19534   dump.pop (n);
19535 }
19536 
19537 /* VAL is a global tree, add it to the global vec if it is
19538    interesting.  Add some of its targets, if they too are
19539    interesting.  We do not add identifiers, as they can be re-found
19540    via the identifier hash table.  There is a cost to the number of
19541    global trees.  */
19542 
19543 static int
maybe_add_global(tree val,unsigned & crc)19544 maybe_add_global (tree val, unsigned &crc)
19545 {
19546   int v = 0;
19547 
19548   if (val && !(identifier_p (val) || TREE_VISITED (val)))
19549     {
19550       TREE_VISITED (val) = true;
19551       crc = crc32_unsigned (crc, fixed_trees->length ());
19552       vec_safe_push (fixed_trees, val);
19553       v++;
19554 
19555       if (CODE_CONTAINS_STRUCT (TREE_CODE (val), TS_TYPED))
19556 	v += maybe_add_global (TREE_TYPE (val), crc);
19557       if (CODE_CONTAINS_STRUCT (TREE_CODE (val), TS_TYPE_COMMON))
19558 	v += maybe_add_global (TYPE_NAME (val), crc);
19559     }
19560 
19561   return v;
19562 }
19563 
19564 /* Initialize module state.  Create the hash table, determine the
19565    global trees.  Create the module for current TU.  */
19566 
19567 void
init_modules(cpp_reader * reader)19568 init_modules (cpp_reader *reader)
19569 {
19570   /* PCH should not be reachable because of lang-specs, but the
19571      user could have overriden that.  */
19572   if (pch_file)
19573     fatal_error (input_location,
19574 		 "C++ modules are incompatible with precompiled headers");
19575 
19576   if (cpp_get_options (reader)->traditional)
19577     fatal_error (input_location,
19578 		 "C++ modules are incompatible with traditional preprocessing");
19579 
19580   if (flag_preprocess_only)
19581     {
19582       cpp_options *cpp_opts = cpp_get_options (reader);
19583       if (flag_no_output
19584 	  || (cpp_opts->deps.style != DEPS_NONE
19585 	      && !cpp_opts->deps.need_preprocessor_output))
19586 	{
19587 	  warning (0, flag_dump_macros == 'M'
19588 		   ? G_("macro debug output may be incomplete with modules")
19589 		   : G_("module dependencies require preprocessing"));
19590 	  if (cpp_opts->deps.style != DEPS_NONE)
19591 	    inform (input_location, "you should use the %<-%s%> option",
19592 		    cpp_opts->deps.style == DEPS_SYSTEM ? "MD" : "MMD");
19593 	}
19594     }
19595 
19596   /* :: is always exported.  */
19597   DECL_MODULE_EXPORT_P (global_namespace) = true;
19598 
19599   modules_hash = hash_table<module_state_hash>::create_ggc (31);
19600   vec_safe_reserve (modules, 20);
19601 
19602   /* Create module for current TU.  */
19603   module_state *current
19604     = new (ggc_alloc<module_state> ()) module_state (NULL_TREE, NULL, false);
19605   current->mod = 0;
19606   bitmap_set_bit (current->imports, 0);
19607   modules->quick_push (current);
19608 
19609   gcc_checking_assert (!fixed_trees);
19610 
19611   headers = BITMAP_GGC_ALLOC ();
19612 
19613   if (note_includes)
19614     /* Canonicalize header names.  */
19615     for (unsigned ix = 0; ix != note_includes->length (); ix++)
19616       {
19617 	const char *hdr = (*note_includes)[ix];
19618 	size_t len = strlen (hdr);
19619 
19620 	bool system = hdr[0] == '<';
19621 	bool user = hdr[0] == '"';
19622 	bool delimed = system || user;
19623 
19624 	if (len <= (delimed ? 2 : 0)
19625 	    || (delimed && hdr[len-1] != (system ? '>' : '"')))
19626 	  error ("invalid header name %qs", hdr);
19627 
19628 	hdr = canonicalize_header_name (delimed ? reader : NULL,
19629 					0, !delimed, hdr, len);
19630 	char *path = XNEWVEC (char, len + 1);
19631 	memcpy (path, hdr, len);
19632 	path[len] = 0;
19633 
19634 	(*note_includes)[ix] = path;
19635       }
19636 
19637   if (note_cmis)
19638     /* Canonicalize & mark module names.  */
19639     for (unsigned ix = 0; ix != note_cmis->length (); ix++)
19640       {
19641 	const char *name = (*note_cmis)[ix];
19642 	size_t len = strlen (name);
19643 
19644 	bool is_system = name[0] == '<';
19645 	bool is_user = name[0] == '"';
19646 	bool is_pathname = false;
19647 	if (!(is_system || is_user))
19648 	  for (unsigned ix = len; !is_pathname && ix--;)
19649 	    is_pathname = IS_DIR_SEPARATOR (name[ix]);
19650 	if (is_system || is_user || is_pathname)
19651 	  {
19652 	    if (len <= (is_pathname ? 0 : 2)
19653 		|| (!is_pathname && name[len-1] != (is_system ? '>' : '"')))
19654 	      {
19655 		error ("invalid header name %qs", name);
19656 		continue;
19657 	      }
19658 	    else
19659 	      name = canonicalize_header_name (is_pathname ? nullptr : reader,
19660 					       0, is_pathname, name, len);
19661 	  }
19662 	if (auto module = get_module (name))
19663 	  module->inform_cmi_p = 1;
19664 	else
19665 	  error ("invalid module name %qs", name);
19666       }
19667 
19668   dump.push (NULL);
19669 
19670   /* Determine lazy handle bound.  */
19671   {
19672     unsigned limit = 1000;
19673 #if HAVE_GETRLIMIT
19674     struct rlimit rlimit;
19675     if (!getrlimit (RLIMIT_NOFILE, &rlimit))
19676       {
19677 	lazy_hard_limit = (rlimit.rlim_max < 1000000
19678 			   ? unsigned (rlimit.rlim_max) : 1000000);
19679 	lazy_hard_limit = (lazy_hard_limit > LAZY_HEADROOM
19680 			   ? lazy_hard_limit - LAZY_HEADROOM : 0);
19681 	if (rlimit.rlim_cur < limit)
19682 	  limit = unsigned (rlimit.rlim_cur);
19683       }
19684 #endif
19685     limit = limit > LAZY_HEADROOM ? limit - LAZY_HEADROOM : 1;
19686 
19687     if (unsigned parm = param_lazy_modules)
19688       {
19689 	if (parm <= limit || !lazy_hard_limit || !try_increase_lazy (parm))
19690 	  lazy_limit = parm;
19691       }
19692     else
19693       lazy_limit = limit;
19694   }
19695 
19696   if (dump ())
19697     {
19698       verstr_t ver;
19699       version2string (MODULE_VERSION, ver);
19700       dump ("Source: %s", main_input_filename);
19701       dump ("Compiler: %s", version_string);
19702       dump ("Modules: %s", ver);
19703       dump ("Checking: %s",
19704 #if CHECKING_P
19705 	    "checking"
19706 #elif ENABLE_ASSERT_CHECKING
19707 	    "asserting"
19708 #else
19709 	    "release"
19710 #endif
19711 	    );
19712       dump ("Compiled by: "
19713 #ifdef __GNUC__
19714 	    "GCC %d.%d, %s", __GNUC__, __GNUC_MINOR__,
19715 #ifdef __OPTIMIZE__
19716 	    "optimizing"
19717 #else
19718 	    "not optimizing"
19719 #endif
19720 #else
19721 	    "not GCC"
19722 #endif
19723 	    );
19724       dump ("Reading: %s", MAPPED_READING ? "mmap" : "fileio");
19725       dump ("Writing: %s", MAPPED_WRITING ? "mmap" : "fileio");
19726       dump ("Lazy limit: %u", lazy_limit);
19727       dump ("Lazy hard limit: %u", lazy_hard_limit);
19728       dump ("");
19729     }
19730 
19731   /* Construct the global tree array.  This is an array of unique
19732      global trees (& types).  Do this now, rather than lazily, as
19733      some global trees are lazily created and we don't want that to
19734      mess with our syndrome of fixed trees.  */
19735   unsigned crc = 0;
19736   vec_alloc (fixed_trees, 200);
19737 
19738   dump () && dump ("+Creating globals");
19739   /* Insert the TRANSLATION_UNIT_DECL.  */
19740   TREE_VISITED (DECL_CONTEXT (global_namespace)) = true;
19741   fixed_trees->quick_push (DECL_CONTEXT (global_namespace));
19742   for (unsigned jx = 0; global_tree_arys[jx].first; jx++)
19743     {
19744       const tree *ptr = global_tree_arys[jx].first;
19745       unsigned limit = global_tree_arys[jx].second;
19746 
19747       for (unsigned ix = 0; ix != limit; ix++, ptr++)
19748 	{
19749 	  !(ix & 31) && dump ("") && dump ("+\t%u:%u:", jx, ix);
19750 	  unsigned v = maybe_add_global (*ptr, crc);
19751 	  dump () && dump ("+%u", v);
19752 	}
19753     }
19754   global_crc = crc32_unsigned (crc, fixed_trees->length ());
19755   dump ("") && dump ("Created %u unique globals, crc=%x",
19756 		     fixed_trees->length (), global_crc);
19757   for (unsigned ix = fixed_trees->length (); ix--;)
19758     TREE_VISITED ((*fixed_trees)[ix]) = false;
19759 
19760   dump.pop (0);
19761 
19762   if (!flag_module_lazy)
19763     /* Get the mapper now, if we're not being lazy.  */
19764     get_mapper (cpp_main_loc (reader));
19765 
19766   if (!flag_preprocess_only)
19767     {
19768       pending_table = new pending_map_t (EXPERIMENT (1, 400));
19769       entity_map = new entity_map_t (EXPERIMENT (1, 400));
19770       vec_safe_reserve (entity_ary, EXPERIMENT (1, 400));
19771     }
19772 
19773 #if CHECKING_P
19774   note_defs = note_defs_table_t::create_ggc (1000);
19775 #endif
19776 
19777   if (flag_header_unit && cpp_get_options (reader)->preprocessed)
19778     begin_header_unit (reader);
19779 
19780   /* Collect here to make sure things are tagged correctly (when
19781      aggressively GC'd).  */
19782   ggc_collect ();
19783 }
19784 
19785 /* If NODE is a deferred macro, load it.  */
19786 
19787 static int
load_macros(cpp_reader * reader,cpp_hashnode * node,void *)19788 load_macros (cpp_reader *reader, cpp_hashnode *node, void *)
19789 {
19790   location_t main_loc
19791     = MAP_START_LOCATION (LINEMAPS_ORDINARY_MAP_AT (line_table, 0));
19792 
19793   if (cpp_user_macro_p (node)
19794       && !node->value.macro)
19795     {
19796       cpp_macro *macro = cpp_get_deferred_macro (reader, node, main_loc);
19797       dump () && dump ("Loaded macro #%s %I",
19798 		       macro ? "define" : "undef", identifier (node));
19799     }
19800 
19801   return 1;
19802 }
19803 
19804 /* At the end of tokenizing, we no longer need the macro tables of
19805    imports.  But the user might have requested some checking.  */
19806 
19807 void
maybe_check_all_macros(cpp_reader * reader)19808 maybe_check_all_macros (cpp_reader *reader)
19809 {
19810   if (!warn_imported_macros)
19811     return;
19812 
19813   /* Force loading of any remaining deferred macros.  This will
19814      produce diagnostics if they are ill-formed.  */
19815   unsigned n = dump.push (NULL);
19816   cpp_forall_identifiers (reader, load_macros, NULL);
19817   dump.pop (n);
19818 }
19819 
19820 /* Write the CMI, if we're a module interface.  */
19821 
19822 void
finish_module_processing(cpp_reader * reader)19823 finish_module_processing (cpp_reader *reader)
19824 {
19825   if (header_module_p ())
19826     module_kind &= ~MK_EXPORTING;
19827 
19828   if (!modules || !(*modules)[0]->name)
19829     {
19830       if (flag_module_only)
19831 	warning (0, "%<-fmodule-only%> used for non-interface");
19832     }
19833   else if (!flag_syntax_only)
19834     {
19835       int fd = -1;
19836       int e = ENOENT;
19837 
19838       timevar_start (TV_MODULE_EXPORT);
19839 
19840       /* Force a valid but empty line map at the end.  This simplifies
19841 	 the line table preparation and writing logic.  */
19842       linemap_add (line_table, LC_ENTER, false, "", 0);
19843 
19844       /* We write to a tmpname, and then atomically rename.  */
19845       const char *path = NULL;
19846       char *tmp_name = NULL;
19847       module_state *state = (*modules)[0];
19848 
19849       unsigned n = dump.push (state);
19850       state->announce ("creating");
19851       if (state->filename)
19852 	{
19853 	  size_t len = 0;
19854 	  path = maybe_add_cmi_prefix (state->filename, &len);
19855 	  tmp_name = XNEWVEC (char, len + 3);
19856 	  memcpy (tmp_name, path, len);
19857 	  strcpy (&tmp_name[len], "~");
19858 
19859 	  if (!errorcount)
19860 	    for (unsigned again = 2; ; again--)
19861 	      {
19862 		fd = open (tmp_name,
19863 			   O_RDWR | O_CREAT | O_TRUNC | O_CLOEXEC | O_BINARY,
19864 			   S_IRUSR|S_IWUSR|S_IRGRP|S_IWGRP|S_IROTH|S_IWOTH);
19865 		e = errno;
19866 		if (fd >= 0 || !again || e != ENOENT)
19867 		  break;
19868 		create_dirs (tmp_name);
19869 	      }
19870 	  if (note_module_cmi_yes || state->inform_cmi_p)
19871 	    inform (state->loc, "writing CMI %qs", path);
19872 	  dump () && dump ("CMI is %s", path);
19873 	}
19874 
19875       if (errorcount)
19876 	warning_at (state->loc, 0, "not writing module %qs due to errors",
19877 		    state->get_flatname ());
19878       else
19879 	{
19880 	  elf_out to (fd, e);
19881 	  if (to.begin ())
19882 	    {
19883 	      auto loc = input_location;
19884 	      /* So crashes finger-point the module decl.  */
19885 	      input_location = state->loc;
19886 	      state->write (&to, reader);
19887 	      input_location = loc;
19888 	    }
19889 	  if (to.end ())
19890 	    {
19891 	      /* Some OS's do not replace NEWNAME if it already
19892 		 exists.  This'll have a race condition in erroneous
19893 		 concurrent builds.  */
19894 	      unlink (path);
19895 	      if (rename (tmp_name, path))
19896 		{
19897 		  dump () && dump ("Rename ('%s','%s') errno=%u", errno);
19898 		  to.set_error (errno);
19899 		}
19900 	    }
19901 
19902 	  if (to.get_error ())
19903 	    {
19904 	      error_at (state->loc, "failed to write compiled module: %s",
19905 			to.get_error (state->filename));
19906 	      state->note_cmi_name ();
19907 	    }
19908 	}
19909 
19910       if (!errorcount)
19911 	{
19912 	  auto *mapper = get_mapper (cpp_main_loc (reader));
19913 
19914 	  mapper->ModuleCompiled (state->get_flatname ());
19915 	}
19916       else if (path)
19917 	{
19918 	  /* We failed, attempt to erase all evidence we even tried.  */
19919 	  unlink (tmp_name);
19920 	  unlink (path);
19921 	  XDELETEVEC (tmp_name);
19922 	}
19923 
19924       dump.pop (n);
19925       timevar_stop (TV_MODULE_EXPORT);
19926 
19927       ggc_collect ();
19928     }
19929 
19930   if (modules)
19931     {
19932       unsigned n = dump.push (NULL);
19933       dump () && dump ("Imported %u modules", modules->length () - 1);
19934       dump () && dump ("Containing %u clusters", available_clusters);
19935       dump () && dump ("Loaded %u clusters (%u%%)", loaded_clusters,
19936 		       (loaded_clusters * 100 + available_clusters / 2) /
19937 		       (available_clusters + !available_clusters));
19938       dump.pop (n);
19939     }
19940 
19941   if (modules && !header_module_p ())
19942     {
19943       /* Determine call_init_p.  We need the same bitmap allocation
19944          scheme as for the imports member.  */
19945       function_depth++; /* Disable GC.  */
19946       bitmap indirect_imports (BITMAP_GGC_ALLOC ());
19947 
19948       /* Because indirect imports are before their direct import, and
19949 	 we're scanning the array backwards, we only need one pass!  */
19950       for (unsigned ix = modules->length (); --ix;)
19951 	{
19952 	  module_state *import = (*modules)[ix];
19953 
19954 	  if (!import->is_header ()
19955 	      && !bitmap_bit_p (indirect_imports, ix))
19956 	    {
19957 	      /* Everything this imports is therefore indirectly
19958 		 imported.  */
19959 	      bitmap_ior_into (indirect_imports, import->imports);
19960 	      /* We don't have to worry about the self-import bit,
19961 		 because of the single pass.  */
19962 
19963 	      import->call_init_p = true;
19964 	      num_init_calls_needed++;
19965 	    }
19966 	}
19967       function_depth--;
19968     }
19969 }
19970 
19971 void
fini_modules()19972 fini_modules ()
19973 {
19974   /* We're done with the macro tables now.  */
19975   vec_free (macro_exports);
19976   vec_free (macro_imports);
19977   headers = NULL;
19978 
19979   /* We're now done with everything but the module names.  */
19980   set_cmi_repo (NULL);
19981   if (mapper)
19982     {
19983       timevar_start (TV_MODULE_MAPPER);
19984       module_client::close_module_client (0, mapper);
19985       mapper = nullptr;
19986       timevar_stop (TV_MODULE_MAPPER);
19987     }
19988   module_state_config::release ();
19989 
19990 #if CHECKING_P
19991   note_defs = NULL;
19992 #endif
19993 
19994   if (modules)
19995     for (unsigned ix = modules->length (); --ix;)
19996       if (module_state *state = (*modules)[ix])
19997 	state->release ();
19998 
19999   /* No need to lookup modules anymore.  */
20000   modules_hash = NULL;
20001 
20002   /* Or entity array.  We still need the entity map to find import numbers.  */
20003   vec_free (entity_ary);
20004   entity_ary = NULL;
20005 
20006   /* Or remember any pending entities.  */
20007   delete pending_table;
20008   pending_table = NULL;
20009 
20010   /* Or any attachments -- Let it go!  */
20011   delete attached_table;
20012   attached_table = NULL;
20013 
20014   /* Allow a GC, we've possibly made much data unreachable.  */
20015   ggc_collect ();
20016 }
20017 
20018 /* If CODE is a module option, handle it & return true.  Otherwise
20019    return false.  For unknown reasons I cannot get the option
20020    generation machinery to set fmodule-mapper or -fmodule-header to
20021    make a string type option variable.  */
20022 
20023 bool
handle_module_option(unsigned code,const char * str,int)20024 handle_module_option (unsigned code, const char *str, int)
20025 {
20026   auto hdr = CMS_header;
20027 
20028   switch (opt_code (code))
20029     {
20030     case OPT_fmodule_mapper_:
20031       module_mapper_name = str;
20032       return true;
20033 
20034     case OPT_fmodule_header_:
20035       {
20036 	if (!strcmp (str, "user"))
20037 	  hdr = CMS_user;
20038 	else if (!strcmp (str, "system"))
20039 	  hdr = CMS_system;
20040 	else
20041 	  error ("unknown header kind %qs", str);
20042       }
20043       /* Fallthrough.  */
20044 
20045     case OPT_fmodule_header:
20046       flag_header_unit = hdr;
20047       flag_modules = 1;
20048       return true;
20049 
20050     case OPT_flang_info_include_translate_:
20051       vec_safe_push (note_includes, str);
20052       return true;
20053 
20054     case OPT_flang_info_module_cmi_:
20055       vec_safe_push (note_cmis, str);
20056       return true;
20057 
20058     default:
20059       return false;
20060     }
20061 }
20062 
20063 /* Set preprocessor callbacks and options for modules.  */
20064 
20065 void
module_preprocess_options(cpp_reader * reader)20066 module_preprocess_options (cpp_reader *reader)
20067 {
20068   gcc_checking_assert (!lang_hooks.preprocess_undef);
20069   if (modules_p ())
20070     {
20071       auto *cb = cpp_get_callbacks (reader);
20072 
20073       cb->translate_include = maybe_translate_include;
20074       cb->user_deferred_macro = module_state::deferred_macro;
20075       if (flag_header_unit)
20076 	{
20077 	  /* If the preprocessor hook is already in use, that
20078 	     implementation will call the undef langhook.  */
20079 	  if (cb->undef)
20080 	    lang_hooks.preprocess_undef = module_state::undef_macro;
20081 	  else
20082 	    cb->undef = module_state::undef_macro;
20083 	}
20084       auto *opt = cpp_get_options (reader);
20085       opt->module_directives = true;
20086       opt->main_search = cpp_main_search (flag_header_unit);
20087     }
20088 }
20089 
20090 #include "gt-cp-module.h"
20091