1 /* C++ modules.  Experimental!
2    Copyright (C) 2017-2021 Free Software Foundation, Inc.
3    Written by Nathan Sidwell <nathan@acm.org> while at FaceBook
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify it
8    under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3, or (at your option)
10    any later version.
11 
12    GCC is distributed in the hope that it will be useful, but
13    WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* Comments in this file have a non-negligible chance of being wrong
22    or at least inaccurate.  Due to (a) my misunderstanding, (b)
23    ambiguities that I have interpretted differently to original intent
24    (c) changes in the specification, (d) my poor wording, (e) source
25    changes.  */
26 
27 /* (Incomplete) Design Notes
28 
29    A hash table contains all module names.  Imported modules are
30    present in a modules array, which by construction places an
31    import's dependencies before the import itself.  The single
32    exception is the current TU, which always occupies slot zero (even
33    when it is not a module).
34 
35    Imported decls occupy an entity_ary, an array of binding_slots, indexed
36    by importing module and index within that module.  A flat index is
37    used, as each module reserves a contiguous range of indices.
38    Initially each slot indicates the CMI section containing the
39    streamed decl.  When the decl is imported it will point to the decl
40    itself.
41 
42    Additionally each imported decl is mapped in the entity_map via its
43    DECL_UID to the flat index in the entity_ary.  Thus we can locate
44    the index for any imported decl by using this map and then
45    de-flattening the index via a binary seach of the module vector.
46    Cross-module references are by (remapped) module number and
47    module-local index.
48 
49    Each importable DECL contains several flags.  The simple set are
50    DECL_EXPORT_P, DECL_MODULE_PURVIEW_P and DECL_MODULE_IMPORT_P.  The
51    first indicates whether it is exported, the second whether it is in
52    the module purview (as opposed to the global module fragment), and
53    the third indicates whether it was an import into this TU or not.
54 
55    The more detailed flags are DECL_MODULE_PARTITION_P,
56    DECL_MODULE_ENTITY_P.  The first is set in a primary interface unit
57    on decls that were read from module partitions (these will have
58    DECL_MODULE_IMPORT_P set too).  Such decls will be streamed out to
59    the primary's CMI.  DECL_MODULE_ENTITY_P is set when an entity is
60    imported, even if it matched a non-imported entity.  Such a decl
61    will not have DECL_MODULE_IMPORT_P set, even though it has an entry
62    in the entity map and array.
63 
64    Header units are module-like.
65 
66    For namespace-scope lookup, the decls for a particular module are
67    held located in a sparse array hanging off the binding of the name.
68    This is partitioned into two: a few fixed slots at the start
69    followed by the sparse slots afterwards.  By construction we only
70    need to append new slots to the end -- there is never a need to
71    insert in the middle.  The fixed slots are MODULE_SLOT_CURRENT for
72    the current TU (regardless of whether it is a module or not),
73    MODULE_SLOT_GLOBAL and MODULE_SLOT_PARTITION.  These latter two
74    slots are used for merging entities across the global module and
75    module partitions respectively.  MODULE_SLOT_PARTITION is only
76    present in a module.  Neither of those two slots is searched during
77    name lookup -- they are internal use only.  This vector is created
78    lazily once we require it, if there is only a declaration from the
79    current TU, a regular binding is present.  It is converted on
80    demand.
81 
82    OPTIMIZATION: Outside of the current TU, we only need ADL to work.
83    We could optimize regular lookup for the current TU by glomming all
84    the visible decls on its slot.  Perhaps wait until design is a
85    little more settled though.
86 
87    There is only one instance of each extern-linkage namespace.  It
88    appears in every module slot that makes it visible.  It also
89    appears in MODULE_SLOT_GLOBAL.  (It is an ODR violation if they
90    collide with some other global module entity.)  We also have an
91    optimization that shares the slot for adjacent modules that declare
92    the same such namespace.
93 
94    A module interface compilation produces a Compiled Module Interface
95    (CMI).  The format used is Encapsulated Lazy Records Of Numbered
96    Declarations, which is essentially ELF's section encapsulation. (As
97    all good nerds are aware, Elrond is half Elf.)  Some sections are
98    named, and contain information about the module as a whole (indices
99    etc), and other sections are referenced by number.  Although I
100    don't defend against actively hostile CMIs, there is some
101    checksumming involved to verify data integrity.  When dumping out
102    an interface, we generate a graph of all the
103    independently-redeclarable DECLS that are needed, and the decls
104    they reference.  From that we determine the strongly connected
105    components (SCC) within this TU.  Each SCC is dumped to a separate
106    numbered section of the CMI.  We generate a binding table section,
107    mapping each namespace&name to a defining section.  This allows
108    lazy loading.
109 
110    Lazy loading employs mmap to map a read-only image of the CMI.
111    It thus only occupies address space and is paged in on demand,
112    backed by the CMI file itself.  If mmap is unavailable, regular
113    FILEIO is used.  Also, there's a bespoke ELF reader/writer here,
114    which implements just the section table and sections (including
115    string sections) of a 32-bit ELF in host byte-order.  You can of
116    course inspect it with readelf.  I figured 32-bit is sufficient,
117    for a single module.  I detect running out of section numbers, but
118    do not implement the ELF overflow mechanism.  At least you'll get
119    an error if that happens.
120 
121    We do not separate declarations and definitions.  My guess is that
122    if you refer to the declaration, you'll also need the definition
123    (template body, inline function, class definition etc).  But this
124    does mean we can get larger SCCs than if we separated them.  It is
125    unclear whether this is a win or not.
126 
127    Notice that we embed section indices into the contents of other
128    sections.  Thus random manipulation of the CMI file by ELF tools
129    may well break it.  The kosher way would probably be to introduce
130    indirection via section symbols, but that would require defining a
131    relocation type.
132 
133    Notice that lazy loading of one module's decls can cause lazy
134    loading of other decls in the same or another module.  Clearly we
135    want to avoid loops.  In a correct program there can be no loops in
136    the module dependency graph, and the above-mentioned SCC algorithm
137    places all intra-module circular dependencies in the same SCC.  It
138    also orders the SCCs wrt each other, so dependent SCCs come first.
139    As we load dependent modules first, we know there can be no
140    reference to a higher-numbered module, and because we write out
141    dependent SCCs first, likewise for SCCs within the module.  This
142    allows us to immediately detect broken references.  When loading,
143    we must ensure the rest of the compiler doesn't cause some
144    unconnected load to occur (for instance, instantiate a template).
145 
146 Classes used:
147 
148    dumper - logger
149 
150    data - buffer
151 
152    bytes - data streamer
153    bytes_in : bytes - scalar reader
154    bytes_out : bytes - scalar writer
155 
156    elf - ELROND format
157    elf_in : elf - ELROND reader
158    elf_out : elf - ELROND writer
159 
160    trees_in : bytes_in - tree reader
161    trees_out : bytes_out - tree writer
162 
163    depset - dependency set
164    depset::hash - hash table of depsets
165    depset::tarjan - SCC determinator
166 
167    uidset<T> - set T's related to a UID
168    uidset<T>::hash hash table of uidset<T>
169 
170    loc_spans - location map data
171 
172    module_state - module object
173 
174    slurping - data needed during loading
175 
176    macro_import - imported macro data
177    macro_export - exported macro data
178 
179    The ELROND objects use mmap, for both reading and writing.  If mmap
180    is unavailable, fileno IO is used to read and write blocks of data.
181 
182    The mapper object uses fileno IO to communicate with the server or
183    program.   */
184 
185 /* In expermental (trunk) sources, MODULE_VERSION is a #define passed
186    in from the Makefile.  It records the modification date of the
187    source directory -- that's the only way to stay sane.  In release
188    sources, we (plan to) use the compiler's major.minor versioning.
189    While the format might not change between at minor versions, it
190    seems simplest to tie the two together.  There's no concept of
191    inter-version compatibility.  */
192 #define IS_EXPERIMENTAL(V) ((V) >= (1U << 20))
193 #define MODULE_MAJOR(V) ((V) / 10000)
194 #define MODULE_MINOR(V) ((V) % 10000)
195 #define EXPERIMENT(A,B) (IS_EXPERIMENTAL (MODULE_VERSION) ? (A) : (B))
196 #ifndef MODULE_VERSION
197 #include "bversion.h"
198 #define MODULE_VERSION (BUILDING_GCC_MAJOR * 10000U + BUILDING_GCC_MINOR)
199 #elif !IS_EXPERIMENTAL (MODULE_VERSION)
200 #error "This is not the version I was looking for."
201 #endif
202 
203 #define _DEFAULT_SOURCE 1 /* To get TZ field of struct tm, if available.  */
204 #include "config.h"
205 #define INCLUDE_STRING
206 #define INCLUDE_VECTOR
207 #include "system.h"
208 #include "coretypes.h"
209 #include "cp-tree.h"
210 #include "timevar.h"
211 #include "stringpool.h"
212 #include "dumpfile.h"
213 #include "bitmap.h"
214 #include "cgraph.h"
215 #include "tree-iterator.h"
216 #include "cpplib.h"
217 #include "mkdeps.h"
218 #include "incpath.h"
219 #include "libiberty.h"
220 #include "stor-layout.h"
221 #include "version.h"
222 #include "tree-diagnostic.h"
223 #include "toplev.h"
224 #include "opts.h"
225 #include "attribs.h"
226 #include "intl.h"
227 #include "langhooks.h"
228 /* This TU doesn't need or want to see the networking.  */
229 #define CODY_NETWORKING 0
230 #include "mapper-client.h"
231 
232 #if 0 // 1 for testing no mmap
233 #define MAPPED_READING 0
234 #define MAPPED_WRITING 0
235 #else
236 #if HAVE_MMAP_FILE && _POSIX_MAPPED_FILES > 0
237 /* mmap, munmap.  */
238 #define MAPPED_READING 1
239 #if HAVE_SYSCONF && defined (_SC_PAGE_SIZE)
240 /* msync, sysconf (_SC_PAGE_SIZE), ftruncate  */
241 /* posix_fallocate used if available.  */
242 #define MAPPED_WRITING 1
243 #else
244 #define MAPPED_WRITING 0
245 #endif
246 #else
247 #define MAPPED_READING 0
248 #define MAPPED_WRITING 0
249 #endif
250 #endif
251 
252 /* Some open(2) flag differences, what a colourful world it is!  */
253 #if defined (O_CLOEXEC)
254 // OK
255 #elif defined (_O_NOINHERIT)
256 /* Windows' _O_NOINHERIT matches O_CLOEXEC flag */
257 #define O_CLOEXEC _O_NOINHERIT
258 #else
259 #define O_CLOEXEC 0
260 #endif
261 #if defined (O_BINARY)
262 // Ok?
263 #elif defined (_O_BINARY)
264 /* Windows' open(2) call defaults to text!  */
265 #define O_BINARY _O_BINARY
266 #else
267 #define O_BINARY 0
268 #endif
269 
cpp_node(tree id)270 static inline cpp_hashnode *cpp_node (tree id)
271 {
272   return CPP_HASHNODE (GCC_IDENT_TO_HT_IDENT (id));
273 }
274 
identifier(const cpp_hashnode * node)275 static inline tree identifier (const cpp_hashnode *node)
276 {
277   return HT_IDENT_TO_GCC_IDENT (HT_NODE (const_cast<cpp_hashnode *> (node)));
278 }
279 
280 /* Id for dumping module information.  */
281 int module_dump_id;
282 
283 /* We have a special module owner.  */
284 #define MODULE_UNKNOWN (~0U)    /* Not yet known.  */
285 
286 /* Prefix for section names.  */
287 #define MOD_SNAME_PFX ".gnu.c++"
288 
289 /* Format a version for user consumption.  */
290 
291 typedef char verstr_t[32];
292 static void
version2string(unsigned version,verstr_t & out)293 version2string (unsigned version, verstr_t &out)
294 {
295   unsigned major = MODULE_MAJOR (version);
296   unsigned minor = MODULE_MINOR (version);
297 
298   if (IS_EXPERIMENTAL (version))
299     sprintf (out, "%04u/%02u/%02u-%02u:%02u%s",
300 	     2000 + major / 10000, (major / 100) % 100, (major % 100),
301 	     minor / 100, minor % 100,
302 	     EXPERIMENT ("", " (experimental)"));
303   else
304     sprintf (out, "%u.%u", major, minor);
305 }
306 
307 /* Include files to note translation for.  */
308 static vec<const char *, va_heap, vl_embed> *note_includes;
309 
310 /* Modules to note CMI pathames.  */
311 static vec<const char *, va_heap, vl_embed> *note_cmis;
312 
313 /* Traits to hash an arbitrary pointer.  Entries are not deletable,
314    and removal is a noop (removal needed upon destruction).  */
315 template <typename T>
316 struct nodel_ptr_hash : pointer_hash<T>, typed_noop_remove <T *> {
317   /* Nothing is deletable.  Everything is insertable.  */
is_deletednodel_ptr_hash318   static bool is_deleted (T *) { return false; }
mark_deletednodel_ptr_hash319   static void mark_deleted (T *) { gcc_unreachable (); }
320 };
321 
322 /* Map from pointer to signed integer.   */
323 typedef simple_hashmap_traits<nodel_ptr_hash<void>, int> ptr_int_traits;
324 typedef hash_map<void *,signed,ptr_int_traits> ptr_int_hash_map;
325 
326 /********************************************************************/
327 /* Basic streaming & ELF.  Serialization is usually via mmap.  For
328    writing we slide a buffer over the output file, syncing it
329    approproiately.  For reading we simply map the whole file (as a
330    file-backed read-only map -- it's just address space, leaving the
331    OS pager to deal with getting the data to us).  Some buffers need
332    to be more conventional malloc'd contents.   */
333 
334 /* Variable length buffer.  */
335 
336 class data {
337 public:
338   class allocator {
339   public:
340     /* Tools tend to moan if the dtor's not virtual.  */
~allocator()341     virtual ~allocator () {}
342 
343   public:
344     void grow (data &obj, unsigned needed, bool exact);
345     void shrink (data &obj);
346 
347   public:
348     virtual char *grow (char *ptr, unsigned needed);
349     virtual void shrink (char *ptr);
350   };
351 
352 public:
353   char *buffer;		/* Buffer being transferred.  */
354   /* Although size_t would be the usual size, we know we never get
355      more than 4GB of buffer -- because that's the limit of the
356      encapsulation format.  And if you need bigger imports, you're
357      doing it wrong.  */
358   unsigned size;	/* Allocated size of buffer.  */
359   unsigned pos;		/* Position in buffer.  */
360 
361 public:
data()362   data ()
363     :buffer (NULL), size (0), pos (0)
364   {
365   }
~data()366   ~data ()
367   {
368     /* Make sure the derived and/or using class know what they're
369        doing.  */
370     gcc_checking_assert (!buffer);
371   }
372 
373 protected:
use(unsigned count)374   char *use (unsigned count)
375   {
376     if (size < pos + count)
377       return NULL;
378     char *res = &buffer[pos];
379     pos += count;
380     return res;
381   }
382 
383 public:
unuse(unsigned count)384   void unuse (unsigned count)
385   {
386     pos -= count;
387   }
388 
389 public:
390   static allocator simple_memory;
391 };
392 
393 /* The simple data allocator.  */
394 data::allocator data::simple_memory;
395 
396 /* Grow buffer to at least size NEEDED.  */
397 
398 void
grow(data & obj,unsigned needed,bool exact)399 data::allocator::grow (data &obj, unsigned needed, bool exact)
400 {
401   gcc_checking_assert (needed ? needed > obj.size : !obj.size);
402   if (!needed)
403     /* Pick a default size.  */
404     needed = EXPERIMENT (100, 1000);
405 
406   if (!exact)
407     needed *= 2;
408   obj.buffer = grow (obj.buffer, needed);
409   if (obj.buffer)
410     obj.size = needed;
411   else
412     obj.pos = obj.size = 0;
413 }
414 
415 /* Free a buffer.  */
416 
417 void
shrink(data & obj)418 data::allocator::shrink (data &obj)
419 {
420   shrink (obj.buffer);
421   obj.buffer = NULL;
422   obj.size = 0;
423 }
424 
425 char *
grow(char * ptr,unsigned needed)426 data::allocator::grow (char *ptr, unsigned needed)
427 {
428   return XRESIZEVAR (char, ptr, needed);
429 }
430 
431 void
shrink(char * ptr)432 data::allocator::shrink (char *ptr)
433 {
434   XDELETEVEC (ptr);
435 }
436 
437 /* Byte streamer base.   Buffer with read/write position and smarts
438    for single bits.  */
439 
440 class bytes : public data {
441 public:
442   typedef data parent;
443 
444 protected:
445   uint32_t bit_val;	/* Bit buffer.  */
446   unsigned bit_pos;	/* Next bit in bit buffer.  */
447 
448 public:
bytes()449   bytes ()
450     :parent (), bit_val (0), bit_pos (0)
451   {}
~bytes()452   ~bytes ()
453   {
454   }
455 
456 protected:
457   unsigned calc_crc (unsigned) const;
458 
459 protected:
460   /* Finish bit packet.  Rewind the bytes not used.  */
bit_flush()461   unsigned bit_flush ()
462   {
463     gcc_assert (bit_pos);
464     unsigned bytes = (bit_pos + 7) / 8;
465     unuse (4 - bytes);
466     bit_pos = 0;
467     bit_val = 0;
468     return bytes;
469   }
470 };
471 
472 /* Calculate the crc32 of the buffer.  Note the CRC is stored in the
473    first 4 bytes, so don't include them.  */
474 
475 unsigned
calc_crc(unsigned l) const476 bytes::calc_crc (unsigned l) const
477 {
478   unsigned crc = 0;
479   for (size_t ix = 4; ix < l; ix++)
480     crc = crc32_byte (crc, buffer[ix]);
481   return crc;
482 }
483 
484 class elf_in;
485 
486 /* Byte stream reader.  */
487 
488 class bytes_in : public bytes {
489   typedef bytes parent;
490 
491 protected:
492   bool overrun;  /* Sticky read-too-much flag.  */
493 
494 public:
bytes_in()495   bytes_in ()
496     : parent (), overrun (false)
497   {
498   }
~bytes_in()499   ~bytes_in ()
500   {
501   }
502 
503 public:
504   /* Begin reading a named section.  */
505   bool begin (location_t loc, elf_in *src, const char *name);
506   /* Begin reading a numbered section with optional name.  */
507   bool begin (location_t loc, elf_in *src, unsigned, const char * = NULL);
508   /* Complete reading a buffer.  Propagate errors and return true on
509      success.  */
510   bool end (elf_in *src);
511   /* Return true if there is unread data.  */
more_p() const512   bool more_p () const
513   {
514     return pos != size;
515   }
516 
517 public:
518   /* Start reading at OFFSET.  */
random_access(unsigned offset)519   void random_access (unsigned offset)
520   {
521     if (offset > size)
522       set_overrun ();
523     pos = offset;
524     bit_pos = bit_val = 0;
525   }
526 
527 public:
align(unsigned boundary)528   void align (unsigned boundary)
529   {
530     if (unsigned pad = pos & (boundary - 1))
531       read (boundary - pad);
532   }
533 
534 public:
read(unsigned count)535   const char *read (unsigned count)
536   {
537     char *ptr = use (count);
538     if (!ptr)
539       set_overrun ();
540     return ptr;
541   }
542 
543 public:
544   bool check_crc () const;
545   /* We store the CRC in the first 4 bytes, using host endianness.  */
get_crc() const546   unsigned get_crc () const
547   {
548     return *(const unsigned *)&buffer[0];
549   }
550 
551 public:
552   /* Manipulate the overrun flag.  */
get_overrun() const553   bool get_overrun () const
554   {
555     return overrun;
556   }
set_overrun()557   void set_overrun ()
558   {
559     overrun = true;
560   }
561 
562 public:
563   unsigned u32 ();  	/* Read uncompressed integer.  */
564 
565 public:
566   bool b ();	    	/* Read a bool.  */
567   void bflush ();	/* Completed a block of bools.  */
568 
569 private:
570   void bfill ();	/* Get the next block of bools.  */
571 
572 public:
573   int c ();		/* Read a char.  */
574   int i ();		/* Read a signed int.  */
575   unsigned u ();	/* Read an unsigned int.  */
576   size_t z ();		/* Read a size_t.  */
577   HOST_WIDE_INT wi ();  /* Read a HOST_WIDE_INT.  */
578   unsigned HOST_WIDE_INT wu (); /* Read an unsigned HOST_WIDE_INT.  */
579   const char *str (size_t * = NULL); /* Read a string.  */
580   const void *buf (size_t); /* Read a fixed-length buffer.  */
581   cpp_hashnode *cpp_node (); /* Read a cpp node.  */
582 };
583 
584 /* Verify the buffer's CRC is correct.  */
585 
586 bool
check_crc() const587 bytes_in::check_crc () const
588 {
589   if (size < 4)
590     return false;
591 
592   unsigned c_crc = calc_crc (size);
593   if (c_crc != get_crc ())
594     return false;
595 
596   return true;
597 }
598 
599 class elf_out;
600 
601 /* Byte stream writer.  */
602 
603 class bytes_out : public bytes {
604   typedef bytes parent;
605 
606 public:
607   allocator *memory;	/* Obtainer of memory.  */
608 
609 public:
bytes_out(allocator * memory)610   bytes_out (allocator *memory)
611     : parent (), memory (memory)
612   {
613   }
~bytes_out()614   ~bytes_out ()
615   {
616   }
617 
618 public:
streaming_p() const619   bool streaming_p () const
620   {
621     return memory != NULL;
622   }
623 
624 public:
625   void set_crc (unsigned *crc_ptr);
626 
627 public:
628   /* Begin writing, maybe reserve space for CRC.  */
629   void begin (bool need_crc = true);
630   /* Finish writing.  Spill to section by number.  */
631   unsigned end (elf_out *, unsigned, unsigned *crc_ptr = NULL);
632 
633 public:
align(unsigned boundary)634   void align (unsigned boundary)
635   {
636     if (unsigned pad = pos & (boundary - 1))
637       write (boundary - pad);
638   }
639 
640 public:
write(unsigned count,bool exact=false)641   char *write (unsigned count, bool exact = false)
642   {
643     if (size < pos + count)
644       memory->grow (*this, pos + count, exact);
645     return use (count);
646   }
647 
648 public:
649   void u32 (unsigned);  /* Write uncompressed integer.  */
650 
651 public:
652   void b (bool);	/* Write bool.  */
653   void bflush ();	/* Finish block of bools.  */
654 
655 public:
656   void c (unsigned char); /* Write unsigned char.  */
657   void i (int);		/* Write signed int.  */
658   void u (unsigned);	/* Write unsigned int.  */
659   void z (size_t s);	/* Write size_t.  */
660   void wi (HOST_WIDE_INT); /* Write HOST_WIDE_INT.  */
661   void wu (unsigned HOST_WIDE_INT);  /* Write unsigned HOST_WIDE_INT.  */
str(const char * ptr)662   void str (const char *ptr)
663   {
664     str (ptr, strlen (ptr));
665   }
cpp_node(const cpp_hashnode * node)666   void cpp_node (const cpp_hashnode *node)
667   {
668     str ((const char *)NODE_NAME (node), NODE_LEN (node));
669   }
670   void str (const char *, size_t);  /* Write string of known length.  */
671   void buf (const void *, size_t);  /* Write fixed length buffer.  */
672   void *buf (size_t); /* Create a writable buffer */
673 
674 public:
675   /* Format a NUL-terminated raw string.  */
676   void printf (const char *, ...) ATTRIBUTE_PRINTF_2;
677   void print_time (const char *, const tm *, const char *);
678 
679 public:
680   /* Dump instrumentation.  */
681   static void instrument ();
682 
683 protected:
684   /* Instrumentation.  */
685   static unsigned spans[4];
686   static unsigned lengths[4];
687   static int is_set;
688 };
689 
690 /* Instrumentation.  */
691 unsigned bytes_out::spans[4];
692 unsigned bytes_out::lengths[4];
693 int bytes_out::is_set = -1;
694 
695 /* If CRC_PTR non-null, set the CRC of the buffer.  Mix the CRC into
696    that pointed to by CRC_PTR.  */
697 
698 void
set_crc(unsigned * crc_ptr)699 bytes_out::set_crc (unsigned *crc_ptr)
700 {
701   if (crc_ptr)
702     {
703       gcc_checking_assert (pos >= 4);
704 
705       unsigned crc = calc_crc (pos);
706       unsigned accum = *crc_ptr;
707       /* Only mix the existing *CRC_PTR if it is non-zero.  */
708       accum = accum ? crc32_unsigned (accum, crc) : crc;
709       *crc_ptr = accum;
710 
711       /* Buffer will be sufficiently aligned.  */
712       *(unsigned *)buffer = crc;
713     }
714 }
715 
716 /* Finish a set of bools.  */
717 
718 void
bflush()719 bytes_out::bflush ()
720 {
721   if (bit_pos)
722     {
723       u32 (bit_val);
724       lengths[2] += bit_flush ();
725     }
726   spans[2]++;
727   is_set = -1;
728 }
729 
730 void
bflush()731 bytes_in::bflush ()
732 {
733   if (bit_pos)
734     bit_flush ();
735 }
736 
737 /* When reading, we don't know how many bools we'll read in.  So read
738    4 bytes-worth, and then rewind when flushing if we didn't need them
739    all.  You can't have a block of bools closer than 4 bytes to the
740    end of the buffer.  */
741 
742 void
bfill()743 bytes_in::bfill ()
744 {
745   bit_val = u32 ();
746 }
747 
748 /* Bools are packed into bytes.  You cannot mix bools and non-bools.
749    You must call bflush before emitting another type.  So batch your
750    bools.
751 
752    It may be worth optimizing for most bools being zero.  Some kind of
753    run-length encoding?  */
754 
755 void
b(bool x)756 bytes_out::b (bool x)
757 {
758   if (is_set != x)
759     {
760       is_set = x;
761       spans[x]++;
762     }
763   lengths[x]++;
764   bit_val |= unsigned (x) << bit_pos++;
765   if (bit_pos == 32)
766     {
767       u32 (bit_val);
768       lengths[2] += bit_flush ();
769     }
770 }
771 
772 bool
b()773 bytes_in::b ()
774 {
775   if (!bit_pos)
776     bfill ();
777   bool v = (bit_val >> bit_pos++) & 1;
778   if (bit_pos == 32)
779     bit_flush ();
780   return v;
781 }
782 
783 /* Exactly 4 bytes.  Used internally for bool packing and a few other
784    places.  We can't simply use uint32_t because (a) alignment and
785    (b) we need little-endian for the bool streaming rewinding to make
786    sense.  */
787 
788 void
u32(unsigned val)789 bytes_out::u32 (unsigned val)
790 {
791   if (char *ptr = write (4))
792     {
793       ptr[0] = val;
794       ptr[1] = val >> 8;
795       ptr[2] = val >> 16;
796       ptr[3] = val >> 24;
797     }
798 }
799 
800 unsigned
u32()801 bytes_in::u32 ()
802 {
803   unsigned val = 0;
804   if (const char *ptr = read (4))
805     {
806       val |= (unsigned char)ptr[0];
807       val |= (unsigned char)ptr[1] << 8;
808       val |= (unsigned char)ptr[2] << 16;
809       val |= (unsigned char)ptr[3] << 24;
810     }
811 
812   return val;
813 }
814 
815 /* Chars are unsigned and written as single bytes. */
816 
817 void
c(unsigned char v)818 bytes_out::c (unsigned char v)
819 {
820   if (char *ptr = write (1))
821     *ptr = v;
822 }
823 
824 int
c()825 bytes_in::c ()
826 {
827   int v = 0;
828   if (const char *ptr = read (1))
829     v = (unsigned char)ptr[0];
830   return v;
831 }
832 
833 /* Ints 7-bit as a byte. Otherwise a 3bit count of following bytes in
834    big-endian form.  4 bits are in the first byte.  */
835 
836 void
i(int v)837 bytes_out::i (int v)
838 {
839   if (char *ptr = write (1))
840     {
841       if (v <= 0x3f && v >= -0x40)
842 	*ptr = v & 0x7f;
843       else
844 	{
845 	  unsigned bytes = 0;
846 	  int probe;
847 	  if (v >= 0)
848 	    for (probe = v >> 8; probe > 0x7; probe >>= 8)
849 	      bytes++;
850 	  else
851 	    for (probe = v >> 8; probe < -0x8; probe >>= 8)
852 	      bytes++;
853 	  *ptr = 0x80 | bytes << 4 | (probe & 0xf);
854 	  if ((ptr = write (++bytes)))
855 	    for (; bytes--; v >>= 8)
856 	      ptr[bytes] = v & 0xff;
857 	}
858     }
859 }
860 
861 int
i()862 bytes_in::i ()
863 {
864   int v = 0;
865   if (const char *ptr = read (1))
866     {
867       v = *ptr & 0xff;
868       if (v & 0x80)
869 	{
870 	  unsigned bytes = (v >> 4) & 0x7;
871 	  v &= 0xf;
872 	  if (v & 0x8)
873 	    v |= -1 ^ 0x7;
874 	  /* unsigned necessary due to left shifts of -ve values.  */
875 	  unsigned uv = unsigned (v);
876 	  if ((ptr = read (++bytes)))
877 	    while (bytes--)
878 	      uv = (uv << 8) | (*ptr++ & 0xff);
879 	  v = int (uv);
880 	}
881       else if (v & 0x40)
882 	v |= -1 ^ 0x3f;
883     }
884 
885   return v;
886 }
887 
888 void
u(unsigned v)889 bytes_out::u (unsigned v)
890 {
891   if (char *ptr = write (1))
892     {
893       if (v <= 0x7f)
894 	*ptr = v;
895       else
896 	{
897 	  unsigned bytes = 0;
898 	  unsigned probe;
899 	  for (probe = v >> 8; probe > 0xf; probe >>= 8)
900 	    bytes++;
901 	  *ptr = 0x80 | bytes << 4 | probe;
902 	  if ((ptr = write (++bytes)))
903 	    for (; bytes--; v >>= 8)
904 	      ptr[bytes] = v & 0xff;
905 	}
906     }
907 }
908 
909 unsigned
u()910 bytes_in::u ()
911 {
912   unsigned v = 0;
913 
914   if (const char *ptr = read (1))
915     {
916       v = *ptr & 0xff;
917       if (v & 0x80)
918 	{
919 	  unsigned bytes = (v >> 4) & 0x7;
920 	  v &= 0xf;
921 	  if ((ptr = read (++bytes)))
922 	    while (bytes--)
923 	      v = (v << 8) | (*ptr++ & 0xff);
924 	}
925     }
926 
927   return v;
928 }
929 
930 void
wi(HOST_WIDE_INT v)931 bytes_out::wi (HOST_WIDE_INT v)
932 {
933   if (char *ptr = write (1))
934     {
935       if (v <= 0x3f && v >= -0x40)
936 	*ptr = v & 0x7f;
937       else
938 	{
939 	  unsigned bytes = 0;
940 	  HOST_WIDE_INT probe;
941 	  if (v >= 0)
942 	    for (probe = v >> 8; probe > 0x7; probe >>= 8)
943 	      bytes++;
944 	  else
945 	    for (probe = v >> 8; probe < -0x8; probe >>= 8)
946 	      bytes++;
947 	  *ptr = 0x80 | bytes << 4 | (probe & 0xf);
948 	  if ((ptr = write (++bytes)))
949 	    for (; bytes--; v >>= 8)
950 	      ptr[bytes] = v & 0xff;
951 	}
952     }
953 }
954 
955 HOST_WIDE_INT
wi()956 bytes_in::wi ()
957 {
958   HOST_WIDE_INT v = 0;
959   if (const char *ptr = read (1))
960     {
961       v = *ptr & 0xff;
962       if (v & 0x80)
963 	{
964 	  unsigned bytes = (v >> 4) & 0x7;
965 	  v &= 0xf;
966 	  if (v & 0x8)
967 	    v |= -1 ^ 0x7;
968 	  /* unsigned necessary due to left shifts of -ve values.  */
969 	  unsigned HOST_WIDE_INT uv = (unsigned HOST_WIDE_INT) v;
970 	  if ((ptr = read (++bytes)))
971 	    while (bytes--)
972 	      uv = (uv << 8) | (*ptr++ & 0xff);
973 	  v = (HOST_WIDE_INT) uv;
974 	}
975       else if (v & 0x40)
976 	v |= -1 ^ 0x3f;
977     }
978 
979   return v;
980 }
981 
982 /* unsigned wide ints are just written as signed wide ints.  */
983 
984 inline void
wu(unsigned HOST_WIDE_INT v)985 bytes_out::wu (unsigned HOST_WIDE_INT v)
986 {
987   wi ((HOST_WIDE_INT) v);
988 }
989 
990 inline unsigned HOST_WIDE_INT
wu()991 bytes_in::wu ()
992 {
993   return (unsigned HOST_WIDE_INT) wi ();
994 }
995 
996 /* size_t written as unsigned or unsigned wide int.  */
997 
998 inline void
z(size_t s)999 bytes_out::z (size_t s)
1000 {
1001   if (sizeof (s) == sizeof (unsigned))
1002     u (s);
1003   else
1004     wu (s);
1005 }
1006 
1007 inline size_t
z()1008 bytes_in::z ()
1009 {
1010   if (sizeof (size_t) == sizeof (unsigned))
1011     return u ();
1012   else
1013     return wu ();
1014 }
1015 
1016 /* Buffer simply memcpied.  */
1017 void *
buf(size_t len)1018 bytes_out::buf (size_t len)
1019 {
1020   align (sizeof (void *) * 2);
1021   return write (len);
1022 }
1023 
1024 void
buf(const void * src,size_t len)1025 bytes_out::buf (const void *src, size_t len)
1026 {
1027   if (void *ptr = buf (len))
1028     memcpy (ptr, src, len);
1029 }
1030 
1031 const void *
buf(size_t len)1032 bytes_in::buf (size_t len)
1033 {
1034   align (sizeof (void *) * 2);
1035   const char *ptr = read (len);
1036 
1037   return ptr;
1038 }
1039 
1040 /* strings as an size_t length, followed by the buffer.  Make sure
1041    there's a NUL terminator on read.  */
1042 
1043 void
str(const char * string,size_t len)1044 bytes_out::str (const char *string, size_t len)
1045 {
1046   z (len);
1047   if (len)
1048     {
1049       gcc_checking_assert (!string[len]);
1050       buf (string, len + 1);
1051     }
1052 }
1053 
1054 const char *
str(size_t * len_p)1055 bytes_in::str (size_t *len_p)
1056 {
1057   size_t len = z ();
1058 
1059   /* We're about to trust some user data.  */
1060   if (overrun)
1061     len = 0;
1062   if (len_p)
1063     *len_p = len;
1064   const char *str = NULL;
1065   if (len)
1066     {
1067       str = reinterpret_cast<const char *> (buf (len + 1));
1068       if (!str || str[len])
1069 	{
1070 	  set_overrun ();
1071 	  str = NULL;
1072 	}
1073     }
1074   return str ? str : "";
1075 }
1076 
1077 cpp_hashnode *
cpp_node()1078 bytes_in::cpp_node ()
1079 {
1080   size_t len;
1081   const char *s = str (&len);
1082   if (!len)
1083     return NULL;
1084   return ::cpp_node (get_identifier_with_length (s, len));
1085 }
1086 
1087 /* Format a string directly to the buffer, including a terminating
1088    NUL.  Intended for human consumption.  */
1089 
1090 void
printf(const char * format,...)1091 bytes_out::printf (const char *format, ...)
1092 {
1093   va_list args;
1094   /* Exercise buffer expansion.  */
1095   size_t len = EXPERIMENT (10, 500);
1096 
1097   while (char *ptr = write (len))
1098     {
1099       va_start (args, format);
1100       size_t actual = vsnprintf (ptr, len, format, args) + 1;
1101       va_end (args);
1102       if (actual <= len)
1103 	{
1104 	  unuse (len - actual);
1105 	  break;
1106 	}
1107       unuse (len);
1108       len = actual;
1109     }
1110 }
1111 
1112 void
print_time(const char * kind,const tm * time,const char * tz)1113 bytes_out::print_time (const char *kind, const tm *time, const char *tz)
1114 {
1115   printf ("%stime: %4u/%02u/%02u %02u:%02u:%02u %s",
1116 	  kind, time->tm_year + 1900, time->tm_mon + 1, time->tm_mday,
1117 	  time->tm_hour, time->tm_min, time->tm_sec, tz);
1118 }
1119 
1120 /* Encapsulated Lazy Records Of Named Declarations.
1121    Header: Stunningly Elf32_Ehdr-like
1122    Sections: Sectional data
1123      [1-N) : User data sections
1124      N .strtab  : strings, stunningly ELF STRTAB-like
1125    Index: Section table, stunningly ELF32_Shdr-like.   */
1126 
1127 class elf {
1128 protected:
1129   /* Constants used within the format.  */
1130   enum private_constants {
1131     /* File kind. */
1132     ET_NONE = 0,
1133     EM_NONE = 0,
1134     OSABI_NONE = 0,
1135 
1136     /* File format. */
1137     EV_CURRENT = 1,
1138     CLASS32 = 1,
1139     DATA2LSB = 1,
1140     DATA2MSB = 2,
1141 
1142     /* Section numbering.  */
1143     SHN_UNDEF = 0,
1144     SHN_LORESERVE = 0xff00,
1145     SHN_XINDEX = 0xffff,
1146 
1147     /* Section types.  */
1148     SHT_NONE = 0,	/* No contents.  */
1149     SHT_PROGBITS = 1, /* Random bytes.  */
1150     SHT_STRTAB = 3,	/* A string table.  */
1151 
1152     /* Section flags.  */
1153     SHF_NONE = 0x00,	/* Nothing.  */
1154     SHF_STRINGS = 0x20,  /* NUL-Terminated strings.  */
1155 
1156     /* I really hope we do not get CMI files larger than 4GB.  */
1157     MY_CLASS = CLASS32,
1158     /* It is host endianness that is relevant.  */
1159     MY_ENDIAN = DATA2LSB
1160 #ifdef WORDS_BIGENDIAN
1161     ^ DATA2LSB ^ DATA2MSB
1162 #endif
1163   };
1164 
1165 public:
1166   /* Constants visible to users.  */
1167   enum public_constants {
1168     /* Special error codes.  Breaking layering a bit.  */
1169     E_BAD_DATA = -1,  /* Random unexpected data errors.  */
1170     E_BAD_LAZY = -2,  /* Badly ordered laziness.  */
1171     E_BAD_IMPORT = -3 /* A nested import failed.  */
1172   };
1173 
1174 protected:
1175   /* File identification.  On-disk representation.  */
1176   struct ident {
1177     uint8_t magic[4];	/* 0x7f, 'E', 'L', 'F' */
1178     uint8_t klass;	/* 4:CLASS32 */
1179     uint8_t data;	/* 5:DATA2[LM]SB */
1180     uint8_t version;	/* 6:EV_CURRENT  */
1181     uint8_t osabi;	/* 7:OSABI_NONE */
1182     uint8_t abiver;	/* 8: 0 */
1183     uint8_t pad[7];	/* 9-15 */
1184   };
1185   /* File header.  On-disk representation.  */
1186   struct header {
1187     struct ident ident;
1188     uint16_t type;	/* ET_NONE */
1189     uint16_t machine;	/* EM_NONE */
1190     uint32_t version;	/* EV_CURRENT */
1191     uint32_t entry;	/* 0 */
1192     uint32_t phoff;	/* 0 */
1193     uint32_t shoff;	/* Section Header Offset in file */
1194     uint32_t flags;
1195     uint16_t ehsize;	/* ELROND Header SIZE -- sizeof (header) */
1196     uint16_t phentsize; /* 0 */
1197     uint16_t phnum;	/* 0 */
1198     uint16_t shentsize; /* Section Header SIZE -- sizeof (section) */
1199     uint16_t shnum;	/* Section Header NUM */
1200     uint16_t shstrndx;	/* Section Header STRing iNDeX */
1201   };
1202   /* File section.  On-disk representation.  */
1203   struct section {
1204     uint32_t name;	/* String table offset.  */
1205     uint32_t type;	/* SHT_* */
1206     uint32_t flags;	/* SHF_* */
1207     uint32_t addr;	/* 0 */
1208     uint32_t offset;	/* OFFSET in file */
1209     uint32_t size;	/* SIZE of section */
1210     uint32_t link;	/* 0 */
1211     uint32_t info;	/* 0 */
1212     uint32_t addralign; /* 0 */
1213     uint32_t entsize;	/* ENTry SIZE, usually 0 */
1214   };
1215 
1216 protected:
1217   data hdr;	/* The header.  */
1218   data sectab; 	/* The section table.  */
1219   data strtab;  /* String table.  */
1220   int fd;   	/* File descriptor we're reading or writing.  */
1221   int err; 	/* Sticky error code.  */
1222 
1223 public:
1224   /* Construct from STREAM.  E is errno if STREAM NULL.  */
elf(int fd,int e)1225   elf (int fd, int e)
1226     :hdr (), sectab (), strtab (), fd (fd), err (fd >= 0 ? 0 : e)
1227   {}
~elf()1228   ~elf ()
1229   {
1230     gcc_checking_assert (fd < 0 && !hdr.buffer
1231 			 && !sectab.buffer && !strtab.buffer);
1232   }
1233 
1234 public:
1235   /* Return the error, if we have an error.  */
get_error() const1236   int get_error () const
1237   {
1238     return err;
1239   }
1240   /* Set the error, unless it's already been set.  */
set_error(int e=E_BAD_DATA)1241   void set_error (int e = E_BAD_DATA)
1242   {
1243     if (!err)
1244       err = e;
1245   }
1246   /* Get an error string.  */
1247   const char *get_error (const char *) const;
1248 
1249 public:
1250   /* Begin reading/writing file.  Return false on error.  */
begin() const1251   bool begin () const
1252   {
1253     return !get_error ();
1254   }
1255   /* Finish reading/writing file.  Return false on error.  */
1256   bool end ();
1257 };
1258 
1259 /* Return error string.  */
1260 
1261 const char *
get_error(const char * name) const1262 elf::get_error (const char *name) const
1263 {
1264   if (!name)
1265     return "Unknown CMI mapping";
1266 
1267   switch (err)
1268     {
1269     case 0:
1270       gcc_unreachable ();
1271     case E_BAD_DATA:
1272       return "Bad file data";
1273     case E_BAD_IMPORT:
1274       return "Bad import dependency";
1275     case E_BAD_LAZY:
1276       return "Bad lazy ordering";
1277     default:
1278       return xstrerror (err);
1279     }
1280 }
1281 
1282 /* Finish file, return true if there's an error.  */
1283 
1284 bool
end()1285 elf::end ()
1286 {
1287   /* Close the stream and free the section table.  */
1288   if (fd >= 0 && close (fd))
1289     set_error (errno);
1290   fd = -1;
1291 
1292   return !get_error ();
1293 }
1294 
1295 /* ELROND reader.  */
1296 
1297 class elf_in : public elf {
1298   typedef elf parent;
1299 
1300 private:
1301   /* For freezing & defrosting.  */
1302 #if !defined (HOST_LACKS_INODE_NUMBERS)
1303   dev_t device;
1304   ino_t inode;
1305 #endif
1306 
1307 public:
elf_in(int fd,int e)1308   elf_in (int fd, int e)
1309     :parent (fd, e)
1310   {
1311   }
~elf_in()1312   ~elf_in ()
1313   {
1314   }
1315 
1316 public:
is_frozen() const1317   bool is_frozen () const
1318   {
1319     return fd < 0 && hdr.pos;
1320   }
is_freezable() const1321   bool is_freezable () const
1322   {
1323     return fd >= 0 && hdr.pos;
1324   }
1325   void freeze ();
1326   bool defrost (const char *);
1327 
1328   /* If BYTES is in the mmapped area, allocate a new buffer for it.  */
preserve(bytes_in & bytes ATTRIBUTE_UNUSED)1329   void preserve (bytes_in &bytes ATTRIBUTE_UNUSED)
1330   {
1331 #if MAPPED_READING
1332     if (hdr.buffer && bytes.buffer >= hdr.buffer
1333 	&& bytes.buffer < hdr.buffer + hdr.pos)
1334       {
1335 	char *buf = bytes.buffer;
1336 	bytes.buffer = data::simple_memory.grow (NULL, bytes.size);
1337 	memcpy (bytes.buffer, buf, bytes.size);
1338       }
1339 #endif
1340   }
1341   /* If BYTES is not in SELF's mmapped area, free it.  SELF might be
1342      NULL. */
release(elf_in * self ATTRIBUTE_UNUSED,bytes_in & bytes)1343   static void release (elf_in *self ATTRIBUTE_UNUSED, bytes_in &bytes)
1344   {
1345 #if MAPPED_READING
1346     if (!(self && self->hdr.buffer && bytes.buffer >= self->hdr.buffer
1347 	  && bytes.buffer < self->hdr.buffer + self->hdr.pos))
1348 #endif
1349       data::simple_memory.shrink (bytes.buffer);
1350     bytes.buffer = NULL;
1351     bytes.size = 0;
1352   }
1353 
1354 public:
grow(data & data,unsigned needed)1355   static void grow (data &data, unsigned needed)
1356   {
1357     gcc_checking_assert (!data.buffer);
1358 #if !MAPPED_READING
1359     data.buffer = XNEWVEC (char, needed);
1360 #endif
1361     data.size = needed;
1362   }
shrink(data & data)1363   static void shrink (data &data)
1364   {
1365 #if !MAPPED_READING
1366     XDELETEVEC (data.buffer);
1367 #endif
1368     data.buffer = NULL;
1369     data.size = 0;
1370   }
1371 
1372 public:
get_section(unsigned s) const1373   const section *get_section (unsigned s) const
1374   {
1375     if (s * sizeof (section) < sectab.size)
1376       return reinterpret_cast<const section *>
1377 	(&sectab.buffer[s * sizeof (section)]);
1378     else
1379       return NULL;
1380   }
get_section_limit() const1381   unsigned get_section_limit () const
1382   {
1383     return sectab.size / sizeof (section);
1384   }
1385 
1386 protected:
1387   const char *read (data *, unsigned, unsigned);
1388 
1389 public:
1390   /* Read section by number.  */
read(data * d,const section * s)1391   bool read (data *d, const section *s)
1392   {
1393     return s && read (d, s->offset, s->size);
1394   }
1395 
1396   /* Find section by name.  */
1397   unsigned find (const char *name);
1398   /* Find section by index.  */
1399   const section *find (unsigned snum, unsigned type = SHT_PROGBITS);
1400 
1401 public:
1402   /* Release the string table, when we're done with it.  */
release()1403   void release ()
1404   {
1405     shrink (strtab);
1406   }
1407 
1408 public:
1409   bool begin (location_t);
end()1410   bool end ()
1411   {
1412     release ();
1413 #if MAPPED_READING
1414     if (hdr.buffer)
1415       munmap (hdr.buffer, hdr.pos);
1416     hdr.buffer = NULL;
1417 #endif
1418     shrink (sectab);
1419 
1420     return parent::end ();
1421   }
1422 
1423 public:
1424   /* Return string name at OFFSET.  Checks OFFSET range.  Always
1425      returns non-NULL.  We know offset 0 is an empty string.  */
name(unsigned offset)1426   const char *name (unsigned offset)
1427   {
1428     return &strtab.buffer[offset < strtab.size ? offset : 0];
1429   }
1430 };
1431 
1432 /* ELROND writer.  */
1433 
1434 class elf_out : public elf, public data::allocator {
1435   typedef elf parent;
1436   /* Desired section alignment on disk.  */
1437   static const int SECTION_ALIGN = 16;
1438 
1439 private:
1440   ptr_int_hash_map identtab;	/* Map of IDENTIFIERS to strtab offsets. */
1441   unsigned pos;			/* Write position in file.  */
1442 #if MAPPED_WRITING
1443   unsigned offset;		/* Offset of the mapping.  */
1444   unsigned extent;		/* Length of mapping.  */
1445   unsigned page_size;		/* System page size.  */
1446 #endif
1447 
1448 public:
elf_out(int fd,int e)1449   elf_out (int fd, int e)
1450     :parent (fd, e), identtab (500), pos (0)
1451   {
1452 #if MAPPED_WRITING
1453     offset = extent = 0;
1454     page_size = sysconf (_SC_PAGE_SIZE);
1455     if (page_size < SECTION_ALIGN)
1456       /* Something really strange.  */
1457       set_error (EINVAL);
1458 #endif
1459   }
~elf_out()1460   ~elf_out ()
1461   {
1462     data::simple_memory.shrink (hdr);
1463     data::simple_memory.shrink (sectab);
1464     data::simple_memory.shrink (strtab);
1465   }
1466 
1467 #if MAPPED_WRITING
1468 private:
1469   void create_mapping (unsigned ext, bool extending = true);
1470   void remove_mapping ();
1471 #endif
1472 
1473 protected:
1474   using allocator::grow;
1475   virtual char *grow (char *, unsigned needed);
1476 #if MAPPED_WRITING
1477   using allocator::shrink;
1478   virtual void shrink (char *);
1479 #endif
1480 
1481 public:
get_section_limit() const1482   unsigned get_section_limit () const
1483   {
1484     return sectab.pos / sizeof (section);
1485   }
1486 
1487 protected:
1488   unsigned add (unsigned type, unsigned name = 0,
1489 		unsigned off = 0, unsigned size = 0, unsigned flags = SHF_NONE);
1490   unsigned write (const data &);
1491 #if MAPPED_WRITING
1492   unsigned write (const bytes_out &);
1493 #endif
1494 
1495 public:
1496   /* IDENTIFIER to strtab offset.  */
1497   unsigned name (tree ident);
1498   /* String literal to strtab offset.  */
1499   unsigned name (const char *n);
1500   /* Qualified name of DECL to strtab offset.  */
1501   unsigned qualified_name (tree decl, bool is_defn);
1502 
1503 private:
1504   unsigned strtab_write (const char *s, unsigned l);
1505   void strtab_write (tree decl, int);
1506 
1507 public:
1508   /* Add a section with contents or strings.  */
1509   unsigned add (const bytes_out &, bool string_p, unsigned name);
1510 
1511 public:
1512   /* Begin and end writing.  */
1513   bool begin ();
1514   bool end ();
1515 };
1516 
1517 /* Begin reading section NAME (of type PROGBITS) from SOURCE.
1518    Data always checked for CRC.  */
1519 
1520 bool
begin(location_t loc,elf_in * source,const char * name)1521 bytes_in::begin (location_t loc, elf_in *source, const char *name)
1522 {
1523   unsigned snum = source->find (name);
1524 
1525   return begin (loc, source, snum, name);
1526 }
1527 
1528 /* Begin reading section numbered SNUM with NAME (may be NULL).  */
1529 
1530 bool
begin(location_t loc,elf_in * source,unsigned snum,const char * name)1531 bytes_in::begin (location_t loc, elf_in *source, unsigned snum, const char *name)
1532 {
1533   if (!source->read (this, source->find (snum))
1534       || !size || !check_crc ())
1535     {
1536       source->set_error (elf::E_BAD_DATA);
1537       source->shrink (*this);
1538       if (name)
1539 	error_at (loc, "section %qs is missing or corrupted", name);
1540       else
1541 	error_at (loc, "section #%u is missing or corrupted", snum);
1542       return false;
1543     }
1544   pos = 4;
1545   return true;
1546 }
1547 
1548 /* Finish reading a section.  */
1549 
1550 bool
end(elf_in * src)1551 bytes_in::end (elf_in *src)
1552 {
1553   if (more_p ())
1554     set_overrun ();
1555   if (overrun)
1556     src->set_error ();
1557 
1558   src->shrink (*this);
1559 
1560   return !overrun;
1561 }
1562 
1563 /* Begin writing buffer.  */
1564 
1565 void
begin(bool need_crc)1566 bytes_out::begin (bool need_crc)
1567 {
1568   if (need_crc)
1569     pos = 4;
1570   memory->grow (*this, 0, false);
1571 }
1572 
1573 /* Finish writing buffer.  Stream out to SINK as named section NAME.
1574    Return section number or 0 on failure.  If CRC_PTR is true, crc
1575    the data.  Otherwise it is a string section.  */
1576 
1577 unsigned
end(elf_out * sink,unsigned name,unsigned * crc_ptr)1578 bytes_out::end (elf_out *sink, unsigned name, unsigned *crc_ptr)
1579 {
1580   lengths[3] += pos;
1581   spans[3]++;
1582 
1583   set_crc (crc_ptr);
1584   unsigned sec_num = sink->add (*this, !crc_ptr, name);
1585   memory->shrink (*this);
1586 
1587   return sec_num;
1588 }
1589 
1590 /* Close and open the file, without destroying it.  */
1591 
1592 void
freeze()1593 elf_in::freeze ()
1594 {
1595   gcc_checking_assert (!is_frozen ());
1596 #if MAPPED_READING
1597   if (munmap (hdr.buffer, hdr.pos) < 0)
1598     set_error (errno);
1599 #endif
1600   if (close (fd) < 0)
1601     set_error (errno);
1602   fd = -1;
1603 }
1604 
1605 bool
defrost(const char * name)1606 elf_in::defrost (const char *name)
1607 {
1608   gcc_checking_assert (is_frozen ());
1609   struct stat stat;
1610 
1611   fd = open (name, O_RDONLY | O_CLOEXEC | O_BINARY);
1612   if (fd < 0 || fstat (fd, &stat) < 0)
1613     set_error (errno);
1614   else
1615     {
1616       bool ok = hdr.pos == unsigned (stat.st_size);
1617 #ifndef HOST_LACKS_INODE_NUMBERS
1618       if (device != stat.st_dev
1619 	  || inode != stat.st_ino)
1620 	ok = false;
1621 #endif
1622       if (!ok)
1623 	set_error (EMFILE);
1624 #if MAPPED_READING
1625       if (ok)
1626 	{
1627 	  char *mapping = reinterpret_cast<char *>
1628 	    (mmap (NULL, hdr.pos, PROT_READ, MAP_SHARED, fd, 0));
1629 	  if (mapping == MAP_FAILED)
1630 	  fail:
1631 	      set_error (errno);
1632 	  else
1633 	    {
1634 	      if (madvise (mapping, hdr.pos, MADV_RANDOM))
1635 		goto fail;
1636 
1637 	      /* These buffers are never NULL in this case.  */
1638 	      strtab.buffer = mapping + strtab.pos;
1639 	      sectab.buffer = mapping + sectab.pos;
1640 	      hdr.buffer = mapping;
1641 	    }
1642 	}
1643 #endif
1644     }
1645 
1646   return !get_error ();
1647 }
1648 
1649 /* Read at current position into BUFFER.  Return true on success.  */
1650 
1651 const char *
read(data * data,unsigned pos,unsigned length)1652 elf_in::read (data *data, unsigned pos, unsigned length)
1653 {
1654 #if MAPPED_READING
1655   if (pos + length > hdr.pos)
1656     {
1657       set_error (EINVAL);
1658       return NULL;
1659     }
1660 #else
1661   if (pos != ~0u && lseek (fd, pos, SEEK_SET) < 0)
1662     {
1663       set_error (errno);
1664       return NULL;
1665     }
1666 #endif
1667   grow (*data, length);
1668 #if MAPPED_READING
1669   data->buffer = hdr.buffer + pos;
1670 #else
1671   if (::read (fd, data->buffer, data->size) != ssize_t (length))
1672     {
1673       set_error (errno);
1674       shrink (*data);
1675       return NULL;
1676     }
1677 #endif
1678 
1679   return data->buffer;
1680 }
1681 
1682 /* Read section SNUM of TYPE.  Return section pointer or NULL on error.  */
1683 
1684 const elf::section *
find(unsigned snum,unsigned type)1685 elf_in::find (unsigned snum, unsigned type)
1686 {
1687   const section *sec = get_section (snum);
1688   if (!snum || !sec || sec->type != type)
1689     return NULL;
1690   return sec;
1691 }
1692 
1693 /* Find a section NAME and TYPE.  Return section number, or zero on
1694    failure.  */
1695 
1696 unsigned
find(const char * sname)1697 elf_in::find (const char *sname)
1698 {
1699   for (unsigned pos = sectab.size; pos -= sizeof (section); )
1700     {
1701       const section *sec
1702 	= reinterpret_cast<const section *> (&sectab.buffer[pos]);
1703 
1704       if (0 == strcmp (sname, name (sec->name)))
1705 	return pos / sizeof (section);
1706     }
1707 
1708   return 0;
1709 }
1710 
1711 /* Begin reading file.  Verify header.  Pull in section and string
1712    tables.  Return true on success.  */
1713 
1714 bool
begin(location_t loc)1715 elf_in::begin (location_t loc)
1716 {
1717   if (!parent::begin ())
1718     return false;
1719 
1720   struct stat stat;
1721   unsigned size = 0;
1722   if (!fstat (fd, &stat))
1723     {
1724 #if !defined (HOST_LACKS_INODE_NUMBERS)
1725       device = stat.st_dev;
1726       inode = stat.st_ino;
1727 #endif
1728       /* Never generate files > 4GB, check we've not been given one.  */
1729       if (stat.st_size == unsigned (stat.st_size))
1730 	size = unsigned (stat.st_size);
1731     }
1732 
1733 #if MAPPED_READING
1734   /* MAP_SHARED so that the file is backing store.  If someone else
1735      concurrently writes it, they're wrong.  */
1736   void *mapping = mmap (NULL, size, PROT_READ, MAP_SHARED, fd, 0);
1737   if (mapping == MAP_FAILED)
1738     {
1739     fail:
1740       set_error (errno);
1741       return false;
1742     }
1743   /* We'll be hopping over this randomly.  Some systems declare the
1744      first parm as char *, and other declare it as void *.  */
1745   if (madvise (reinterpret_cast <char *> (mapping), size, MADV_RANDOM))
1746     goto fail;
1747 
1748   hdr.buffer = (char *)mapping;
1749 #else
1750   read (&hdr, 0, sizeof (header));
1751 #endif
1752   hdr.pos = size; /* Record size of the file.  */
1753 
1754   const header *h = reinterpret_cast<const header *> (hdr.buffer);
1755   if (!h)
1756     return false;
1757 
1758   if (h->ident.magic[0] != 0x7f
1759       || h->ident.magic[1] != 'E'
1760       || h->ident.magic[2] != 'L'
1761       || h->ident.magic[3] != 'F')
1762     {
1763       error_at (loc, "not Encapsulated Lazy Records of Named Declarations");
1764     failed:
1765       shrink (hdr);
1766       return false;
1767     }
1768 
1769   /* We expect a particular format -- the ELF is not intended to be
1770      distributable.  */
1771   if (h->ident.klass != MY_CLASS
1772       || h->ident.data != MY_ENDIAN
1773       || h->ident.version != EV_CURRENT
1774       || h->type != ET_NONE
1775       || h->machine != EM_NONE
1776       || h->ident.osabi != OSABI_NONE)
1777     {
1778       error_at (loc, "unexpected encapsulation format or type");
1779       goto failed;
1780     }
1781 
1782   int e = -1;
1783   if (!h->shoff || h->shentsize != sizeof (section))
1784     {
1785     malformed:
1786       set_error (e);
1787       error_at (loc, "encapsulation is malformed");
1788       goto failed;
1789     }
1790 
1791   unsigned strndx = h->shstrndx;
1792   unsigned shnum = h->shnum;
1793   if (shnum == SHN_XINDEX)
1794     {
1795       if (!read (&sectab, h->shoff, sizeof (section)))
1796 	{
1797 	section_table_fail:
1798 	  e = errno;
1799 	  goto malformed;
1800 	}
1801       shnum = get_section (0)->size;
1802       /* Freeing does mean we'll re-read it in the case we're not
1803 	 mapping, but this is going to be rare.  */
1804       shrink (sectab);
1805     }
1806 
1807   if (!shnum)
1808     goto malformed;
1809 
1810   if (!read (&sectab, h->shoff, shnum * sizeof (section)))
1811     goto section_table_fail;
1812 
1813   if (strndx == SHN_XINDEX)
1814     strndx = get_section (0)->link;
1815 
1816   if (!read (&strtab, find (strndx, SHT_STRTAB)))
1817     goto malformed;
1818 
1819   /* The string table should be at least one byte, with NUL chars
1820      at either end.  */
1821   if (!(strtab.size && !strtab.buffer[0]
1822 	&& !strtab.buffer[strtab.size - 1]))
1823     goto malformed;
1824 
1825 #if MAPPED_READING
1826   /* Record the offsets of the section and string tables.  */
1827   sectab.pos = h->shoff;
1828   strtab.pos = shnum * sizeof (section);
1829 #else
1830   shrink (hdr);
1831 #endif
1832 
1833   return true;
1834 }
1835 
1836 /* Create a new mapping.  */
1837 
1838 #if MAPPED_WRITING
1839 void
create_mapping(unsigned ext,bool extending)1840 elf_out::create_mapping (unsigned ext, bool extending)
1841 {
1842 #ifndef HAVE_POSIX_FALLOCATE
1843 #define posix_fallocate(fd,off,len) ftruncate (fd, off + len)
1844 #endif
1845   void *mapping = MAP_FAILED;
1846   if (extending && ext < 1024 * 1024)
1847     {
1848       if (!posix_fallocate (fd, offset, ext * 2))
1849 	mapping = mmap (NULL, ext * 2, PROT_READ | PROT_WRITE,
1850 			MAP_SHARED, fd, offset);
1851       if (mapping != MAP_FAILED)
1852 	ext *= 2;
1853     }
1854   if (mapping == MAP_FAILED)
1855     {
1856       if (!extending || !posix_fallocate (fd, offset, ext))
1857 	mapping = mmap (NULL, ext, PROT_READ | PROT_WRITE,
1858 			MAP_SHARED, fd, offset);
1859       if (mapping == MAP_FAILED)
1860 	{
1861 	  set_error (errno);
1862 	  mapping = NULL;
1863 	  ext = 0;
1864 	}
1865     }
1866 #undef posix_fallocate
1867   hdr.buffer = (char *)mapping;
1868   extent = ext;
1869 }
1870 #endif
1871 
1872 /* Flush out the current mapping.  */
1873 
1874 #if MAPPED_WRITING
1875 void
remove_mapping()1876 elf_out::remove_mapping ()
1877 {
1878   if (hdr.buffer)
1879     {
1880       /* MS_ASYNC dtrt with the removed mapping, including a
1881 	 subsequent overlapping remap.  */
1882       if (msync (hdr.buffer, extent, MS_ASYNC)
1883 	  || munmap (hdr.buffer, extent))
1884 	/* We're somewhat screwed at this point.  */
1885 	set_error (errno);
1886     }
1887 
1888   hdr.buffer = NULL;
1889 }
1890 #endif
1891 
1892 /* Grow a mapping of PTR to be NEEDED bytes long.  This gets
1893    interesting if the new size grows the EXTENT.  */
1894 
1895 char *
grow(char * data,unsigned needed)1896 elf_out::grow (char *data, unsigned needed)
1897 {
1898   if (!data)
1899     {
1900       /* First allocation, check we're aligned.  */
1901       gcc_checking_assert (!(pos & (SECTION_ALIGN - 1)));
1902 #if MAPPED_WRITING
1903       data = hdr.buffer + (pos - offset);
1904 #endif
1905     }
1906 
1907 #if MAPPED_WRITING
1908   unsigned off = data - hdr.buffer;
1909   if (off + needed > extent)
1910     {
1911       /* We need to grow the mapping.  */
1912       unsigned lwm = off & ~(page_size - 1);
1913       unsigned hwm = (off + needed + page_size - 1) & ~(page_size - 1);
1914 
1915       gcc_checking_assert (hwm > extent);
1916 
1917       remove_mapping ();
1918 
1919       offset += lwm;
1920       create_mapping (extent < hwm - lwm ? hwm - lwm : extent);
1921 
1922       data = hdr.buffer + (off - lwm);
1923     }
1924 #else
1925   data = allocator::grow (data, needed);
1926 #endif
1927 
1928   return data;
1929 }
1930 
1931 #if MAPPED_WRITING
1932 /* Shrinking is a NOP.  */
1933 void
shrink(char *)1934 elf_out::shrink (char *)
1935 {
1936 }
1937 #endif
1938 
1939 /* Write S of length L to the strtab buffer.  L must include the ending
1940    NUL, if that's what you want.  */
1941 
1942 unsigned
strtab_write(const char * s,unsigned l)1943 elf_out::strtab_write (const char *s, unsigned l)
1944 {
1945   if (strtab.pos + l > strtab.size)
1946     data::simple_memory.grow (strtab, strtab.pos + l, false);
1947   memcpy (strtab.buffer + strtab.pos, s, l);
1948   unsigned res = strtab.pos;
1949   strtab.pos += l;
1950   return res;
1951 }
1952 
1953 /* Write qualified name of decl.  INNER >0 if this is a definition, <0
1954    if this is a qualifier of an outer name.  */
1955 
1956 void
strtab_write(tree decl,int inner)1957 elf_out::strtab_write (tree decl, int inner)
1958 {
1959   tree ctx = CP_DECL_CONTEXT (decl);
1960   if (TYPE_P (ctx))
1961     ctx = TYPE_NAME (ctx);
1962   if (ctx != global_namespace)
1963     strtab_write (ctx, -1);
1964 
1965   tree name = DECL_NAME (decl);
1966   if (!name)
1967     name = DECL_ASSEMBLER_NAME_RAW (decl);
1968   strtab_write (IDENTIFIER_POINTER (name), IDENTIFIER_LENGTH (name));
1969 
1970   if (inner)
1971     strtab_write (&"::{}"[inner+1], 2);
1972 }
1973 
1974 /* Map IDENTIFIER IDENT to strtab offset.  Inserts into strtab if not
1975    already there.  */
1976 
1977 unsigned
name(tree ident)1978 elf_out::name (tree ident)
1979 {
1980   unsigned res = 0;
1981   if (ident)
1982     {
1983       bool existed;
1984       int *slot = &identtab.get_or_insert (ident, &existed);
1985       if (!existed)
1986 	*slot = strtab_write (IDENTIFIER_POINTER (ident),
1987 			      IDENTIFIER_LENGTH (ident) + 1);
1988       res = *slot;
1989     }
1990   return res;
1991 }
1992 
1993 /* Map LITERAL to strtab offset.  Does not detect duplicates and
1994    expects LITERAL to remain live until strtab is written out.  */
1995 
1996 unsigned
name(const char * literal)1997 elf_out::name (const char *literal)
1998 {
1999   return strtab_write (literal, strlen (literal) + 1);
2000 }
2001 
2002 /* Map a DECL's qualified name to strtab offset.  Does not detect
2003    duplicates.  */
2004 
2005 unsigned
qualified_name(tree decl,bool is_defn)2006 elf_out::qualified_name (tree decl, bool is_defn)
2007 {
2008   gcc_checking_assert (DECL_P (decl) && decl != global_namespace);
2009   unsigned result = strtab.pos;
2010 
2011   strtab_write (decl, is_defn);
2012   strtab_write ("", 1);
2013 
2014   return result;
2015 }
2016 
2017 /* Add section to file.  Return section number.  TYPE & NAME identify
2018    the section.  OFF and SIZE identify the file location of its
2019    data.  FLAGS contains additional info.  */
2020 
2021 unsigned
add(unsigned type,unsigned name,unsigned off,unsigned size,unsigned flags)2022 elf_out::add (unsigned type, unsigned name, unsigned off, unsigned size,
2023 	      unsigned flags)
2024 {
2025   gcc_checking_assert (!(off & (SECTION_ALIGN - 1)));
2026   if (sectab.pos + sizeof (section) > sectab.size)
2027     data::simple_memory.grow (sectab, sectab.pos + sizeof (section), false);
2028   section *sec = reinterpret_cast<section *> (sectab.buffer + sectab.pos);
2029   memset (sec, 0, sizeof (section));
2030   sec->type = type;
2031   sec->flags = flags;
2032   sec->name = name;
2033   sec->offset = off;
2034   sec->size = size;
2035   if (flags & SHF_STRINGS)
2036     sec->entsize = 1;
2037 
2038   unsigned res = sectab.pos;
2039   sectab.pos += sizeof (section);
2040   return res / sizeof (section);
2041 }
2042 
2043 /* Pad to the next alignment boundary, then write BUFFER to disk.
2044    Return the position of the start of the write, or zero on failure.   */
2045 
2046 unsigned
write(const data & buffer)2047 elf_out::write (const data &buffer)
2048 {
2049 #if MAPPED_WRITING
2050   /* HDR is always mapped.  */
2051   if (&buffer != &hdr)
2052     {
2053       bytes_out out (this);
2054       grow (out, buffer.pos, true);
2055       if (out.buffer)
2056 	memcpy (out.buffer, buffer.buffer, buffer.pos);
2057       shrink (out);
2058     }
2059   else
2060     /* We should have been aligned during the first allocation.  */
2061     gcc_checking_assert (!(pos & (SECTION_ALIGN - 1)));
2062 #else
2063   if (::write (fd, buffer.buffer, buffer.pos) != ssize_t (buffer.pos))
2064     {
2065       set_error (errno);
2066       return 0;
2067     }
2068 #endif
2069   unsigned res = pos;
2070   pos += buffer.pos;
2071 
2072   if (unsigned padding = -pos & (SECTION_ALIGN - 1))
2073     {
2074 #if !MAPPED_WRITING
2075       /* Align the section on disk, should help the necessary copies.
2076 	 fseeking to extend is non-portable.  */
2077       static char zero[SECTION_ALIGN];
2078       if (::write (fd, &zero, padding) != ssize_t (padding))
2079 	set_error (errno);
2080 #endif
2081       pos += padding;
2082     }
2083   return res;
2084 }
2085 
2086 /* Write a streaming buffer.  It must be using us as an allocator.  */
2087 
2088 #if MAPPED_WRITING
2089 unsigned
write(const bytes_out & buf)2090 elf_out::write (const bytes_out &buf)
2091 {
2092   gcc_checking_assert (buf.memory == this);
2093   /* A directly mapped buffer.  */
2094   gcc_checking_assert (buf.buffer - hdr.buffer >= 0
2095 		       && buf.buffer - hdr.buffer + buf.size <= extent);
2096   unsigned res = pos;
2097   pos += buf.pos;
2098 
2099   /* Align up.  We're not going to advance into the next page. */
2100   pos += -pos & (SECTION_ALIGN - 1);
2101 
2102   return res;
2103 }
2104 #endif
2105 
2106 /* Write data and add section.  STRING_P is true for a string
2107    section, false for PROGBITS.  NAME identifies the section (0 is the
2108    empty name).  DATA is the contents.  Return section number or 0 on
2109    failure (0 is the undef section).  */
2110 
2111 unsigned
add(const bytes_out & data,bool string_p,unsigned name)2112 elf_out::add (const bytes_out &data, bool string_p, unsigned name)
2113 {
2114   unsigned off = write (data);
2115 
2116   return add (string_p ? SHT_STRTAB : SHT_PROGBITS, name,
2117 	      off, data.pos, string_p ? SHF_STRINGS : SHF_NONE);
2118 }
2119 
2120 /* Begin writing the file.  Initialize the section table and write an
2121    empty header.  Return false on failure.  */
2122 
2123 bool
begin()2124 elf_out::begin ()
2125 {
2126   if (!parent::begin ())
2127     return false;
2128 
2129   /* Let the allocators pick a default.  */
2130   data::simple_memory.grow (strtab, 0, false);
2131   data::simple_memory.grow (sectab, 0, false);
2132 
2133   /* The string table starts with an empty string.  */
2134   name ("");
2135 
2136   /* Create the UNDEF section.  */
2137   add (SHT_NONE);
2138 
2139 #if MAPPED_WRITING
2140   /* Start a mapping.  */
2141   create_mapping (EXPERIMENT (page_size,
2142 			      (32767 + page_size) & ~(page_size - 1)));
2143   if (!hdr.buffer)
2144     return false;
2145 #endif
2146 
2147   /* Write an empty header.  */
2148   grow (hdr, sizeof (header), true);
2149   header *h = reinterpret_cast<header *> (hdr.buffer);
2150   memset (h, 0, sizeof (header));
2151   hdr.pos = hdr.size;
2152   write (hdr);
2153   return !get_error ();
2154 }
2155 
2156 /* Finish writing the file.  Write out the string & section tables.
2157    Fill in the header.  Return true on error.  */
2158 
2159 bool
end()2160 elf_out::end ()
2161 {
2162   if (fd >= 0)
2163     {
2164       /* Write the string table.  */
2165       unsigned strnam = name (".strtab");
2166       unsigned stroff = write (strtab);
2167       unsigned strndx = add (SHT_STRTAB, strnam, stroff, strtab.pos,
2168 			     SHF_STRINGS);
2169 
2170       /* Store escape values in section[0].  */
2171       if (strndx >= SHN_LORESERVE)
2172 	{
2173 	  reinterpret_cast<section *> (sectab.buffer)->link = strndx;
2174 	  strndx = SHN_XINDEX;
2175 	}
2176       unsigned shnum = sectab.pos / sizeof (section);
2177       if (shnum >= SHN_LORESERVE)
2178 	{
2179 	  reinterpret_cast<section *> (sectab.buffer)->size = shnum;
2180 	  shnum = SHN_XINDEX;
2181 	}
2182 
2183       unsigned shoff = write (sectab);
2184 
2185 #if MAPPED_WRITING
2186       if (offset)
2187 	{
2188 	  remove_mapping ();
2189 	  offset = 0;
2190 	  create_mapping ((sizeof (header) + page_size - 1) & ~(page_size - 1),
2191 			  false);
2192 	}
2193       unsigned length = pos;
2194 #else
2195       if (lseek (fd, 0, SEEK_SET) < 0)
2196 	set_error (errno);
2197 #endif
2198       /* Write header.  */
2199       if (!get_error ())
2200 	{
2201 	  /* Write the correct header now.  */
2202 	  header *h = reinterpret_cast<header *> (hdr.buffer);
2203 	  h->ident.magic[0] = 0x7f;
2204 	  h->ident.magic[1] = 'E';	/* Elrond */
2205 	  h->ident.magic[2] = 'L';	/* is an */
2206 	  h->ident.magic[3] = 'F';	/* elf.  */
2207 	  h->ident.klass = MY_CLASS;
2208 	  h->ident.data =  MY_ENDIAN;
2209 	  h->ident.version = EV_CURRENT;
2210 	  h->ident.osabi = OSABI_NONE;
2211 	  h->type = ET_NONE;
2212 	  h->machine = EM_NONE;
2213 	  h->version = EV_CURRENT;
2214 	  h->shoff = shoff;
2215 	  h->ehsize = sizeof (header);
2216 	  h->shentsize = sizeof (section);
2217 	  h->shnum = shnum;
2218 	  h->shstrndx = strndx;
2219 
2220 	  pos = 0;
2221 	  write (hdr);
2222 	}
2223 
2224 #if MAPPED_WRITING
2225       remove_mapping ();
2226       if (ftruncate (fd, length))
2227 	set_error (errno);
2228 #endif
2229     }
2230 
2231   data::simple_memory.shrink (sectab);
2232   data::simple_memory.shrink (strtab);
2233 
2234   return parent::end ();
2235 }
2236 
2237 /********************************************************************/
2238 
2239 /* A dependency set.  This is used during stream out to determine the
2240    connectivity of the graph.  Every namespace-scope declaration that
2241    needs writing has a depset.  The depset is filled with the (depsets
2242    of) declarations within this module that it references.  For a
2243    declaration that'll generally be named types.  For definitions
2244    it'll also be declarations in the body.
2245 
2246    From that we can convert the graph to a DAG, via determining the
2247    Strongly Connected Clusters.  Each cluster is streamed
2248    independently, and thus we achieve lazy loading.
2249 
2250    Other decls that get a depset are namespaces themselves and
2251    unnameable declarations.   */
2252 
2253 class depset {
2254 private:
2255   tree entity;  /* Entity, or containing namespace.  */
2256   uintptr_t discriminator;  /* Flags or identifier.  */
2257 
2258 public:
2259   /* The kinds of entity the depset could describe.  The ordering is
2260      significant, see entity_kind_name.  */
2261   enum entity_kind
2262   {
2263     EK_DECL,		/* A decl.  */
2264     EK_SPECIALIZATION,  /* A specialization.  */
2265     EK_PARTIAL,		/* A partial specialization.  */
2266     EK_USING,		/* A using declaration (at namespace scope).  */
2267     EK_NAMESPACE,	/* A namespace.  */
2268     EK_REDIRECT,	/* Redirect to a template_decl.  */
2269     EK_EXPLICIT_HWM,
2270     EK_BINDING = EK_EXPLICIT_HWM, /* Implicitly encoded.  */
2271     EK_FOR_BINDING,	/* A decl being inserted for a binding.  */
2272     EK_INNER_DECL,	/* A decl defined outside of it's imported
2273 			   context.  */
2274     EK_DIRECT_HWM = EK_PARTIAL + 1,
2275 
2276     EK_BITS = 3		/* Only need to encode below EK_EXPLICIT_HWM.  */
2277   };
2278 
2279 private:
2280   /* Placement of bit fields in discriminator.  */
2281   enum disc_bits
2282   {
2283     DB_ZERO_BIT, /* Set to disambiguate identifier from flags  */
2284     DB_SPECIAL_BIT, /* First dep slot is special.  */
2285     DB_KIND_BIT, /* Kind of the entity.  */
2286     DB_KIND_BITS = EK_BITS,
2287     DB_DEFN_BIT = DB_KIND_BIT + DB_KIND_BITS,
2288     DB_IS_MEMBER_BIT,		/* Is an out-of-class member.  */
2289     DB_IS_INTERNAL_BIT,		/* It is an (erroneous)
2290 				   internal-linkage entity.  */
2291     DB_REFS_INTERNAL_BIT,	/* Refers to an internal-linkage
2292 				   entity. */
2293     DB_IMPORTED_BIT,		/* An imported entity.  */
2294     DB_UNREACHED_BIT,		/* A yet-to-be reached entity.  */
2295     DB_HIDDEN_BIT,		/* A hidden binding.  */
2296     /* The following bits are not independent, but enumerating them is
2297        awkward.  */
2298     DB_ALIAS_TMPL_INST_BIT,	/* An alias template instantiation. */
2299     DB_ALIAS_SPEC_BIT,		/* Specialization of an alias template
2300 				   (in both spec tables).  */
2301     DB_TYPE_SPEC_BIT,		/* Specialization in the type table.
2302 				   */
2303     DB_FRIEND_SPEC_BIT,		/* An instantiated template friend.  */
2304   };
2305 
2306 public:
2307   /* The first slot is special for EK_SPECIALIZATIONS it is a
2308      spec_entry pointer.  It is not relevant for the SCC
2309      determination.  */
2310   vec<depset *> deps;  /* Depsets we reference.  */
2311 
2312 public:
2313   unsigned cluster; /* Strongly connected cluster, later entity number  */
2314   unsigned section; /* Section written to.  */
2315   /* During SCC construction, section is lowlink, until the depset is
2316      removed from the stack.  See Tarjan algorithm for details.  */
2317 
2318 private:
2319   /* Construction via factories.  Destruction via hash traits.  */
2320   depset (tree entity);
2321   ~depset ();
2322 
2323 public:
2324   static depset *make_binding (tree, tree);
2325   static depset *make_entity (tree, entity_kind, bool = false);
2326   /* Late setting a binding name -- /then/ insert into hash!  */
set_binding_name(tree name)2327   inline void set_binding_name (tree name)
2328   {
2329     gcc_checking_assert (!get_name ());
2330     discriminator = reinterpret_cast<uintptr_t> (name);
2331   }
2332 
2333 private:
set_flag_bit()2334   template<unsigned I> void set_flag_bit ()
2335   {
2336     gcc_checking_assert (I < 2 || !is_binding ());
2337     discriminator |= 1u << I;
2338   }
clear_flag_bit()2339   template<unsigned I> void clear_flag_bit ()
2340   {
2341     gcc_checking_assert (I < 2 || !is_binding ());
2342     discriminator &= ~(1u << I);
2343   }
get_flag_bit() const2344   template<unsigned I> bool get_flag_bit () const
2345   {
2346     gcc_checking_assert (I < 2 || !is_binding ());
2347     return bool ((discriminator >> I) & 1);
2348   }
2349 
2350 public:
is_binding() const2351   bool is_binding () const
2352   {
2353     return !get_flag_bit<DB_ZERO_BIT> ();
2354   }
get_entity_kind() const2355   entity_kind get_entity_kind () const
2356   {
2357     if (is_binding ())
2358       return EK_BINDING;
2359     return entity_kind ((discriminator >> DB_KIND_BIT) & ((1u << EK_BITS) - 1));
2360   }
2361   const char *entity_kind_name () const;
2362 
2363 public:
has_defn() const2364   bool has_defn () const
2365   {
2366     return get_flag_bit<DB_DEFN_BIT> ();
2367   }
2368 
2369 public:
2370   /* This class-member is defined here, but the class was imported.  */
is_member() const2371   bool is_member () const
2372   {
2373     gcc_checking_assert (get_entity_kind () == EK_DECL);
2374     return get_flag_bit<DB_IS_MEMBER_BIT> ();
2375   }
2376 public:
is_internal() const2377   bool is_internal () const
2378   {
2379     return get_flag_bit<DB_IS_INTERNAL_BIT> ();
2380   }
refs_internal() const2381   bool refs_internal () const
2382   {
2383     return get_flag_bit<DB_REFS_INTERNAL_BIT> ();
2384   }
is_import() const2385   bool is_import () const
2386   {
2387     return get_flag_bit<DB_IMPORTED_BIT> ();
2388   }
is_unreached() const2389   bool is_unreached () const
2390   {
2391     return get_flag_bit<DB_UNREACHED_BIT> ();
2392   }
is_alias_tmpl_inst() const2393   bool is_alias_tmpl_inst () const
2394   {
2395     return get_flag_bit<DB_ALIAS_TMPL_INST_BIT> ();
2396   }
is_alias() const2397   bool is_alias () const
2398   {
2399     return get_flag_bit<DB_ALIAS_SPEC_BIT> ();
2400   }
is_hidden() const2401   bool is_hidden () const
2402   {
2403     return get_flag_bit<DB_HIDDEN_BIT> ();
2404   }
is_type_spec() const2405   bool is_type_spec () const
2406   {
2407     return get_flag_bit<DB_TYPE_SPEC_BIT> ();
2408   }
is_friend_spec() const2409   bool is_friend_spec () const
2410   {
2411     return get_flag_bit<DB_FRIEND_SPEC_BIT> ();
2412   }
2413 
2414 public:
2415   /* We set these bit outside of depset.  */
set_hidden_binding()2416   void set_hidden_binding ()
2417   {
2418     set_flag_bit<DB_HIDDEN_BIT> ();
2419   }
clear_hidden_binding()2420   void clear_hidden_binding ()
2421   {
2422     clear_flag_bit<DB_HIDDEN_BIT> ();
2423   }
2424 
2425 public:
is_special() const2426   bool is_special () const
2427   {
2428     return get_flag_bit<DB_SPECIAL_BIT> ();
2429   }
set_special()2430   void set_special ()
2431   {
2432     set_flag_bit<DB_SPECIAL_BIT> ();
2433   }
2434 
2435 public:
get_entity() const2436   tree get_entity () const
2437   {
2438     return entity;
2439   }
get_name() const2440   tree get_name () const
2441   {
2442     gcc_checking_assert (is_binding ());
2443     return reinterpret_cast <tree> (discriminator);
2444   }
2445 
2446 public:
2447   /* Traits for a hash table of pointers to bindings.  */
2448   struct traits {
2449     /* Each entry is a pointer to a depset. */
2450     typedef depset *value_type;
2451     /* We lookup by container:maybe-identifier pair.  */
2452     typedef std::pair<tree,tree> compare_type;
2453 
2454     static const bool empty_zero_p = true;
2455 
2456     /* hash and equality for compare_type.  */
hashdepset::traits2457     inline static hashval_t hash (const compare_type &p)
2458     {
2459       hashval_t h = pointer_hash<tree_node>::hash (p.first);
2460       if (p.second)
2461 	{
2462 	  hashval_t nh = IDENTIFIER_HASH_VALUE (p.second);
2463 	  h = iterative_hash_hashval_t (h, nh);
2464 	}
2465       return h;
2466     }
equaldepset::traits2467     inline static bool equal (const value_type b, const compare_type &p)
2468     {
2469       if (b->entity != p.first)
2470 	return false;
2471 
2472       if (p.second)
2473 	return b->discriminator == reinterpret_cast<uintptr_t> (p.second);
2474       else
2475 	return !b->is_binding ();
2476     }
2477 
2478     /* (re)hasher for a binding itself.  */
hashdepset::traits2479     inline static hashval_t hash (const value_type b)
2480     {
2481       hashval_t h = pointer_hash<tree_node>::hash (b->entity);
2482       if (b->is_binding ())
2483 	{
2484 	  hashval_t nh = IDENTIFIER_HASH_VALUE (b->get_name ());
2485 	  h = iterative_hash_hashval_t (h, nh);
2486 	}
2487       return h;
2488     }
2489 
2490     /* Empty via NULL.  */
mark_emptydepset::traits2491     static inline void mark_empty (value_type &p) {p = NULL;}
is_emptydepset::traits2492     static inline bool is_empty (value_type p) {return !p;}
2493 
2494     /* Nothing is deletable.  Everything is insertable.  */
is_deleteddepset::traits2495     static bool is_deleted (value_type) { return false; }
mark_deleteddepset::traits2496     static void mark_deleted (value_type) { gcc_unreachable (); }
2497 
2498     /* We own the entities in the hash table.  */
removedepset::traits2499     static void remove (value_type p)
2500     {
2501       delete (p);
2502     }
2503   };
2504 
2505 public:
2506   class hash : public hash_table<traits> {
2507     typedef traits::compare_type key_t;
2508     typedef hash_table<traits> parent;
2509 
2510   public:
2511     vec<depset *> worklist;  /* Worklist of decls to walk.  */
2512     hash *chain;	     /* Original table.  */
2513     depset *current;         /* Current depset being depended.  */
2514     unsigned section;	     /* When writing out, the section.  */
2515     bool sneakoscope;        /* Detecting dark magic (of a voldemort).  */
2516     bool reached_unreached;  /* We reached an unreached entity.  */
2517 
2518   public:
hash(size_t size,hash * c=NULL)2519     hash (size_t size, hash *c = NULL)
2520       : parent (size), chain (c), current (NULL), section (0),
2521 	sneakoscope (false), reached_unreached (false)
2522     {
2523       worklist.create (size);
2524     }
~hash()2525     ~hash ()
2526     {
2527       worklist.release ();
2528     }
2529 
2530   public:
is_key_order() const2531     bool is_key_order () const
2532     {
2533       return chain != NULL;
2534     }
2535 
2536   private:
2537     depset **entity_slot (tree entity, bool = true);
2538     depset **binding_slot (tree ctx, tree name, bool = true);
2539     depset *maybe_add_declaration (tree decl);
2540 
2541   public:
2542     depset *find_dependency (tree entity);
2543     depset *find_binding (tree ctx, tree name);
2544     depset *make_dependency (tree decl, entity_kind);
2545     void add_dependency (depset *);
2546 
2547   public:
2548     void add_mergeable (depset *);
2549     depset *add_dependency (tree decl, entity_kind);
2550     void add_namespace_context (depset *, tree ns);
2551 
2552   private:
2553     static bool add_binding_entity (tree, WMB_Flags, void *);
2554 
2555   public:
2556     bool add_namespace_entities (tree ns, bitmap partitions);
2557     void add_specializations (bool decl_p);
2558     void add_partial_entities (vec<tree, va_gc> *);
2559     void add_class_entities (vec<tree, va_gc> *);
2560 
2561   public:
2562     void find_dependencies (module_state *);
2563     bool finalize_dependencies ();
2564     vec<depset *> connect ();
2565   };
2566 
2567 public:
2568   struct tarjan {
2569     vec<depset *> result;
2570     vec<depset *> stack;
2571     unsigned index;
2572 
tarjandepset::tarjan2573     tarjan (unsigned size)
2574       : index (0)
2575     {
2576       result.create (size);
2577       stack.create (50);
2578     }
~tarjandepset::tarjan2579     ~tarjan ()
2580     {
2581       gcc_assert (!stack.length ());
2582       stack.release ();
2583     }
2584 
2585   public:
2586     void connect (depset *);
2587   };
2588 };
2589 
2590 inline
depset(tree entity)2591 depset::depset (tree entity)
2592   :entity (entity), discriminator (0), cluster (0), section (0)
2593 {
2594   deps.create (0);
2595 }
2596 
2597 inline
~depset()2598 depset::~depset ()
2599 {
2600   deps.release ();
2601 }
2602 
2603 const char *
entity_kind_name() const2604 depset::entity_kind_name () const
2605 {
2606   /* Same order as entity_kind.  */
2607   static const char *const names[] =
2608     {"decl", "specialization", "partial", "using",
2609      "namespace", "redirect", "binding"};
2610   entity_kind kind = get_entity_kind ();
2611   gcc_checking_assert (kind < sizeof (names) / sizeof(names[0]));
2612   return names[kind];
2613 }
2614 
2615 /* Create a depset for a namespace binding NS::NAME.  */
2616 
make_binding(tree ns,tree name)2617 depset *depset::make_binding (tree ns, tree name)
2618 {
2619   depset *binding = new depset (ns);
2620 
2621   binding->discriminator = reinterpret_cast <uintptr_t> (name);
2622 
2623   return binding;
2624 }
2625 
make_entity(tree entity,entity_kind ek,bool is_defn)2626 depset *depset::make_entity (tree entity, entity_kind ek, bool is_defn)
2627 {
2628   depset *r = new depset (entity);
2629 
2630   r->discriminator = ((1 << DB_ZERO_BIT)
2631 		      | (ek << DB_KIND_BIT)
2632 		      | is_defn << DB_DEFN_BIT);
2633 
2634   return r;
2635 }
2636 
2637 class pending_key
2638 {
2639 public:
2640   tree ns;
2641   tree id;
2642 };
2643 
2644 template<>
2645 struct default_hash_traits<pending_key>
2646 {
2647   using value_type = pending_key;
2648 
2649   static const bool empty_zero_p = false;
hashdefault_hash_traits2650   static hashval_t hash (const value_type &k)
2651   {
2652     hashval_t h = IDENTIFIER_HASH_VALUE (k.id);
2653     h = iterative_hash_hashval_t (DECL_UID (k.ns), h);
2654 
2655     return h;
2656   }
equaldefault_hash_traits2657   static bool equal (const value_type &k, const value_type &l)
2658   {
2659     return k.ns == l.ns && k.id == l.id;
2660   }
mark_emptydefault_hash_traits2661   static void mark_empty (value_type &k)
2662   {
2663     k.ns = k.id = NULL_TREE;
2664   }
mark_deleteddefault_hash_traits2665   static void mark_deleted (value_type &k)
2666   {
2667     k.ns = NULL_TREE;
2668     gcc_checking_assert (k.id);
2669   }
is_emptydefault_hash_traits2670   static bool is_empty (const value_type &k)
2671   {
2672     return k.ns == NULL_TREE && k.id == NULL_TREE;
2673   }
is_deleteddefault_hash_traits2674   static bool is_deleted (const value_type &k)
2675   {
2676     return k.ns == NULL_TREE && k.id != NULL_TREE;
2677   }
removedefault_hash_traits2678   static void remove (value_type &)
2679   {
2680   }
2681 };
2682 
2683 typedef hash_map<pending_key, auto_vec<unsigned>> pending_map_t;
2684 
2685 /* Not-loaded entities that are keyed to a namespace-scope
2686    identifier.  See module_state::write_pendings for details.  */
2687 pending_map_t *pending_table;
2688 
2689 /* Decls that need some post processing once a batch of lazy loads has
2690    completed.  */
2691 vec<tree, va_heap, vl_embed> *post_load_decls;
2692 
2693 /* Some entities are attached to another entitity for ODR purposes.
2694    For example, at namespace scope, 'inline auto var = []{};', that
2695    lambda is attached to 'var', and follows its ODRness.  */
2696 typedef hash_map<tree, auto_vec<tree>> attached_map_t;
2697 static attached_map_t *attached_table;
2698 
2699 /********************************************************************/
2700 /* Tree streaming.   The tree streaming is very specific to the tree
2701    structures themselves.  A tag indicates the kind of tree being
2702    streamed.  -ve tags indicate backreferences to already-streamed
2703    trees.  Backreferences are auto-numbered.  */
2704 
2705 /* Tree tags.  */
2706 enum tree_tag {
2707   tt_null,		/* NULL_TREE.  */
2708   tt_fixed,		/* Fixed vector index.  */
2709 
2710   tt_node,		/* By-value node.  */
2711   tt_decl,		/* By-value mergeable decl.  */
2712   tt_tpl_parm,		/* Template parm.  */
2713 
2714   /* The ordering of the following 4 is relied upon in
2715      trees_out::tree_node.  */
2716   tt_id,  		/* Identifier node.  */
2717   tt_conv_id,		/* Conversion operator name.  */
2718   tt_anon_id,		/* Anonymous name.  */
2719   tt_lambda_id,		/* Lambda name.  */
2720 
2721   tt_typedef_type,	/* A (possibly implicit) typedefed type.  */
2722   tt_derived_type,	/* A type derived from another type.  */
2723   tt_variant_type,	/* A variant of another type.  */
2724 
2725   tt_tinfo_var,		/* Typeinfo object. */
2726   tt_tinfo_typedef,	/* Typeinfo typedef.  */
2727   tt_ptrmem_type,	/* Pointer to member type.  */
2728 
2729   tt_parm,		/* Function parameter or result.  */
2730   tt_enum_value,	/* An enum value.  */
2731   tt_enum_decl,		/* An enum decl.  */
2732   tt_data_member,	/* Data member/using-decl.  */
2733 
2734   tt_binfo,		/* A BINFO.  */
2735   tt_vtable,		/* A vtable.  */
2736   tt_thunk,		/* A thunk.  */
2737   tt_clone_ref,
2738 
2739   tt_entity,		/* A extra-cluster entity.  */
2740 
2741   tt_template,		/* The TEMPLATE_RESULT of a template.  */
2742 };
2743 
2744 enum walk_kind {
2745   WK_none,	/* No walk to do (a back- or fixed-ref happened).  */
2746   WK_normal,	/* Normal walk (by-name if possible).  */
2747 
2748   WK_value,	/* By-value walk.  */
2749 };
2750 
2751 enum merge_kind
2752 {
2753   MK_unique,	/* Known unique.  */
2754   MK_named,	/* Found by CTX, NAME + maybe_arg types etc.  */
2755   MK_field,	/* Found by CTX and index on TYPE_FIELDS  */
2756   MK_vtable,	/* Found by CTX and index on TYPE_VTABLES  */
2757   MK_as_base,	/* Found by CTX.  */
2758 
2759   MK_partial,
2760 
2761   MK_enum,	/* Found by CTX, & 1stMemberNAME.  */
2762   MK_attached,  /* Found by attachee & index.  */
2763 
2764   MK_friend_spec,  /* Like named, but has a tmpl & args too.  */
2765   MK_local_friend, /* Found by CTX, index.  */
2766 
2767   MK_indirect_lwm = MK_enum,
2768 
2769   /* Template specialization kinds below. These are all found via
2770      primary template and specialization args.  */
2771   MK_template_mask = 0x10,  /* A template specialization.  */
2772 
2773   MK_tmpl_decl_mask = 0x4, /* In decl table.  */
2774   MK_tmpl_alias_mask = 0x2, /* Also in type table  */
2775 
2776   MK_tmpl_tmpl_mask = 0x1, /* We want TEMPLATE_DECL.  */
2777 
2778   MK_type_spec = MK_template_mask,
2779   MK_decl_spec = MK_template_mask | MK_tmpl_decl_mask,
2780   MK_alias_spec = MK_decl_spec | MK_tmpl_alias_mask,
2781 
2782   MK_hwm = 0x20
2783 };
2784 /* This is more than a debugging array.  NULLs are used to determine
2785    an invalid merge_kind number.  */
2786 static char const *const merge_kind_name[MK_hwm] =
2787   {
2788     "unique", "named", "field", "vtable",	/* 0...3  */
2789     "asbase", "partial", "enum", "attached",	/* 4...7  */
2790 
2791     "friend spec", "local friend", NULL, NULL,  /* 8...11 */
2792     NULL, NULL, NULL, NULL,
2793 
2794     "type spec", "type tmpl spec",	/* 16,17 type (template).  */
2795     NULL, NULL,
2796 
2797     "decl spec", "decl tmpl spec",	/* 20,21 decl (template).  */
2798     "alias spec", "alias tmpl spec",	/* 22,23 alias (template). */
2799     NULL, NULL, NULL, NULL,
2800     NULL, NULL, NULL, NULL,
2801   };
2802 
2803 /* Mergeable entity location data.  */
2804 struct merge_key {
2805   cp_ref_qualifier ref_q : 2;
2806   unsigned index;
2807 
2808   tree ret;  /* Return type, if appropriate.  */
2809   tree args; /* Arg types, if appropriate.  */
2810 
2811   tree constraints;  /* Constraints.  */
2812 
merge_keymerge_key2813   merge_key ()
2814     :ref_q (REF_QUAL_NONE), index (0),
2815      ret (NULL_TREE), args (NULL_TREE),
2816      constraints (NULL_TREE)
2817   {
2818   }
2819 };
2820 
2821 struct duplicate_hash : nodel_ptr_hash<tree_node>
2822 {
2823 #if 0
2824   /* This breaks variadic bases in the xtreme_header tests.  Since ::equal is
2825      the default pointer_hash::equal, let's use the default hash as well.  */
2826   inline static hashval_t hash (value_type decl)
2827   {
2828     if (TREE_CODE (decl) == TREE_BINFO)
2829       decl = TYPE_NAME (BINFO_TYPE (decl));
2830     return hashval_t (DECL_UID (decl));
2831   }
2832 #endif
2833 };
2834 
2835 /* Hashmap of merged duplicates.  Usually decls, but can contain
2836    BINFOs.  */
2837 typedef hash_map<tree,uintptr_t,
2838 		 simple_hashmap_traits<duplicate_hash,uintptr_t> >
2839 duplicate_hash_map;
2840 
2841 /* Tree stream reader.  Note that reading a stream doesn't mark the
2842    read trees with TREE_VISITED.  Thus it's quite safe to have
2843    multiple concurrent readers.  Which is good, because lazy
2844    loading. */
2845 class trees_in : public bytes_in {
2846   typedef bytes_in parent;
2847 
2848 private:
2849   module_state *state;		/* Module being imported.  */
2850   vec<tree> back_refs;		/* Back references.  */
2851   duplicate_hash_map *duplicates;	/* Map from existings to duplicate.  */
2852   vec<tree> post_decls;		/* Decls to post process.  */
2853   unsigned unused;		/* Inhibit any interior TREE_USED
2854 				   marking.  */
2855 
2856 public:
2857   trees_in (module_state *);
2858   ~trees_in ();
2859 
2860 public:
2861   int insert (tree);
2862   tree back_ref (int);
2863 
2864 private:
2865   tree start (unsigned = 0);
2866 
2867 public:
2868   /* Needed for binfo writing  */
2869   bool core_bools (tree);
2870 
2871 private:
2872   /* Stream tree_core, lang_decl_specific and lang_type_specific
2873      bits.  */
2874   bool core_vals (tree);
2875   bool lang_type_bools (tree);
2876   bool lang_type_vals (tree);
2877   bool lang_decl_bools (tree);
2878   bool lang_decl_vals (tree);
2879   bool lang_vals (tree);
2880   bool tree_node_bools (tree);
2881   bool tree_node_vals (tree);
2882   tree tree_value ();
2883   tree decl_value ();
2884   tree tpl_parm_value ();
2885 
2886 private:
2887   tree chained_decls ();  /* Follow DECL_CHAIN.  */
2888   vec<tree, va_heap> *vec_chained_decls ();
2889   vec<tree, va_gc> *tree_vec (); /* vec of tree.  */
2890   vec<tree_pair_s, va_gc> *tree_pair_vec (); /* vec of tree_pair.  */
2891   tree tree_list (bool has_purpose);
2892 
2893 public:
2894   /* Read a tree node.  */
2895   tree tree_node (bool is_use = false);
2896 
2897 private:
2898   bool install_entity (tree decl);
2899   tree tpl_parms (unsigned &tpl_levels);
2900   bool tpl_parms_fini (tree decl, unsigned tpl_levels);
2901   bool tpl_header (tree decl, unsigned *tpl_levels);
2902   int fn_parms_init (tree);
2903   void fn_parms_fini (int tag, tree fn, tree existing, bool has_defn);
2904   unsigned add_indirect_tpl_parms (tree);
2905 public:
2906   bool add_indirects (tree);
2907 
2908 public:
2909   /* Serialize various definitions. */
2910   bool read_definition (tree decl);
2911 
2912 private:
2913   bool is_matching_decl (tree existing, tree decl, bool is_typedef);
2914   static bool install_implicit_member (tree decl);
2915   bool read_function_def (tree decl, tree maybe_template);
2916   bool read_var_def (tree decl, tree maybe_template);
2917   bool read_class_def (tree decl, tree maybe_template);
2918   bool read_enum_def (tree decl, tree maybe_template);
2919 
2920 public:
2921   tree decl_container ();
2922   tree key_mergeable (int tag, merge_kind, tree decl, tree inner, tree type,
2923 		      tree container, bool is_mod);
2924   unsigned binfo_mergeable (tree *);
2925 
2926 private:
2927   uintptr_t *find_duplicate (tree existing);
2928   void register_duplicate (tree decl, tree existing);
2929   /* Mark as an already diagnosed bad duplicate.  */
unmatched_duplicate(tree existing)2930   void unmatched_duplicate (tree existing)
2931   {
2932     *find_duplicate (existing) |= 1;
2933   }
2934 
2935 public:
is_duplicate(tree decl)2936   bool is_duplicate (tree decl)
2937   {
2938     return find_duplicate (decl) != NULL;
2939   }
maybe_duplicate(tree decl)2940   tree maybe_duplicate (tree decl)
2941   {
2942     if (uintptr_t *dup = find_duplicate (decl))
2943       return reinterpret_cast<tree> (*dup & ~uintptr_t (1));
2944     return decl;
2945   }
2946   tree odr_duplicate (tree decl, bool has_defn);
2947 
2948 public:
2949   /* Return the next decl to postprocess, or NULL.  */
post_process()2950   tree post_process ()
2951   {
2952     return post_decls.length () ? post_decls.pop () : NULL_TREE;
2953   }
2954 private:
2955   /* Register DECL for postprocessing.  */
post_process(tree decl)2956   void post_process (tree decl)
2957   {
2958     post_decls.safe_push (decl);
2959   }
2960 
2961 private:
2962   void assert_definition (tree, bool installing);
2963 };
2964 
trees_in(module_state * state)2965 trees_in::trees_in (module_state *state)
2966   :parent (), state (state), unused (0)
2967 {
2968   duplicates = NULL;
2969   back_refs.create (500);
2970   post_decls.create (0);
2971 }
2972 
~trees_in()2973 trees_in::~trees_in ()
2974 {
2975   delete (duplicates);
2976   back_refs.release ();
2977   post_decls.release ();
2978 }
2979 
2980 /* Tree stream writer.  */
2981 class trees_out : public bytes_out {
2982   typedef bytes_out parent;
2983 
2984 private:
2985   module_state *state;		/* The module we are writing.  */
2986   ptr_int_hash_map tree_map; 	/* Trees to references */
2987   depset::hash *dep_hash;    	/* Dependency table.  */
2988   int ref_num;			/* Back reference number.  */
2989   unsigned section;
2990 #if CHECKING_P
2991   int importedness;		/* Checker that imports not occurring
2992 				   inappropriately.  +ve imports ok,
2993 				   -ve imports not ok.  */
2994 #endif
2995 
2996 public:
2997   trees_out (allocator *, module_state *, depset::hash &deps, unsigned sec = 0);
2998   ~trees_out ();
2999 
3000 private:
3001   void mark_trees ();
3002   void unmark_trees ();
3003 
3004 public:
3005   /* Hey, let's ignore the well known STL iterator idiom.  */
3006   void begin ();
3007   unsigned end (elf_out *sink, unsigned name, unsigned *crc_ptr);
3008   void end ();
3009 
3010 public:
3011   enum tags
3012   {
3013     tag_backref = -1,	/* Upper bound on the backrefs.  */
3014     tag_value = 0,	/* Write by value.  */
3015     tag_fixed		/* Lower bound on the fixed trees.  */
3016   };
3017 
3018 public:
is_key_order() const3019   bool is_key_order () const
3020   {
3021     return dep_hash->is_key_order ();
3022   }
3023 
3024 public:
3025   int insert (tree, walk_kind = WK_normal);
3026 
3027 private:
3028   void start (tree, bool = false);
3029 
3030 private:
3031   walk_kind ref_node (tree);
3032 public:
3033   int get_tag (tree);
set_importing(int i ATTRIBUTE_UNUSED)3034   void set_importing (int i ATTRIBUTE_UNUSED)
3035   {
3036 #if CHECKING_P
3037     importedness = i;
3038 #endif
3039   }
3040 
3041 private:
3042   void core_bools (tree);
3043   void core_vals (tree);
3044   void lang_type_bools (tree);
3045   void lang_type_vals (tree);
3046   void lang_decl_bools (tree);
3047   void lang_decl_vals (tree);
3048   void lang_vals (tree);
3049   void tree_node_bools (tree);
3050   void tree_node_vals (tree);
3051 
3052 private:
3053   void chained_decls (tree);
3054   void vec_chained_decls (tree);
3055   void tree_vec (vec<tree, va_gc> *);
3056   void tree_pair_vec (vec<tree_pair_s, va_gc> *);
3057   void tree_list (tree, bool has_purpose);
3058 
3059 public:
3060   /* Mark a node for by-value walking.  */
3061   void mark_by_value (tree);
3062 
3063 public:
3064   void tree_node (tree);
3065 
3066 private:
3067   void install_entity (tree decl, depset *);
3068   void tpl_parms (tree parms, unsigned &tpl_levels);
3069   void tpl_parms_fini (tree decl, unsigned tpl_levels);
fn_parms_fini(tree)3070   void fn_parms_fini (tree) {}
3071   unsigned add_indirect_tpl_parms (tree);
3072 public:
3073   void add_indirects (tree);
3074   void fn_parms_init (tree);
3075   void tpl_header (tree decl, unsigned *tpl_levels);
3076 
3077 public:
3078   merge_kind get_merge_kind (tree decl, depset *maybe_dep);
3079   tree decl_container (tree decl);
3080   void key_mergeable (int tag, merge_kind, tree decl, tree inner,
3081 		      tree container, depset *maybe_dep);
3082   void binfo_mergeable (tree binfo);
3083 
3084 private:
3085   bool decl_node (tree, walk_kind ref);
3086   void type_node (tree);
3087   void tree_value (tree);
3088   void tpl_parm_value (tree);
3089 
3090 public:
3091   void decl_value (tree, depset *);
3092 
3093 public:
3094   /* Serialize various definitions. */
3095   void write_definition (tree decl);
3096   void mark_declaration (tree decl, bool do_defn);
3097 
3098 private:
3099   void mark_function_def (tree decl);
3100   void mark_var_def (tree decl);
3101   void mark_class_def (tree decl);
3102   void mark_enum_def (tree decl);
3103   void mark_class_member (tree decl, bool do_defn = true);
3104   void mark_binfos (tree type);
3105 
3106 private:
3107   void write_var_def (tree decl);
3108   void write_function_def (tree decl);
3109   void write_class_def (tree decl);
3110   void write_enum_def (tree decl);
3111 
3112 private:
3113   static void assert_definition (tree);
3114 
3115 public:
3116   static void instrument ();
3117 
3118 private:
3119   /* Tree instrumentation. */
3120   static unsigned tree_val_count;
3121   static unsigned decl_val_count;
3122   static unsigned back_ref_count;
3123   static unsigned null_count;
3124 };
3125 
3126 /* Instrumentation counters.  */
3127 unsigned trees_out::tree_val_count;
3128 unsigned trees_out::decl_val_count;
3129 unsigned trees_out::back_ref_count;
3130 unsigned trees_out::null_count;
3131 
trees_out(allocator * mem,module_state * state,depset::hash & deps,unsigned section)3132 trees_out::trees_out (allocator *mem, module_state *state, depset::hash &deps,
3133 		      unsigned section)
3134   :parent (mem), state (state), tree_map (500),
3135    dep_hash (&deps), ref_num (0), section (section)
3136 {
3137 #if CHECKING_P
3138   importedness = 0;
3139 #endif
3140 }
3141 
~trees_out()3142 trees_out::~trees_out ()
3143 {
3144 }
3145 
3146 /********************************************************************/
3147 /* Location.  We're aware of the line-map concept and reproduce it
3148    here.  Each imported module allocates a contiguous span of ordinary
3149    maps, and of macro maps.  adhoc maps are serialized by contents,
3150    not pre-allocated.   The scattered linemaps of a module are
3151    coalesced when writing.  */
3152 
3153 
3154 /* I use half-open [first,second) ranges.  */
3155 typedef std::pair<unsigned,unsigned> range_t;
3156 
3157 /* A range of locations.  */
3158 typedef std::pair<location_t,location_t> loc_range_t;
3159 
3160 /* Spans of the line maps that are occupied by this TU.  I.e. not
3161    within imports.  Only extended when in an interface unit.
3162    Interval zero corresponds to the forced header linemap(s).  This
3163    is a singleton object.  */
3164 
3165 class loc_spans {
3166 public:
3167   /* An interval of line maps.  The line maps here represent a contiguous
3168      non-imported range.  */
3169   struct span {
3170     loc_range_t ordinary;	/* Ordinary map location range. */
3171     loc_range_t macro;		/* Macro map location range.  */
3172     int ordinary_delta;	/* Add to ordinary loc to get serialized loc.  */
3173     int macro_delta;	/* Likewise for macro loc.  */
3174   };
3175 
3176 private:
3177   vec<span> *spans;
3178 
3179 public:
loc_spans()3180   loc_spans ()
3181     /* Do not preallocate spans, as that causes
3182        --enable-detailed-mem-stats problems.  */
3183     : spans (nullptr)
3184   {
3185   }
~loc_spans()3186   ~loc_spans ()
3187   {
3188     delete spans;
3189   }
3190 
3191 public:
operator [](unsigned ix)3192   span &operator[] (unsigned ix)
3193   {
3194     return (*spans)[ix];
3195   }
length() const3196   unsigned length () const
3197   {
3198     return spans->length ();
3199   }
3200 
3201 public:
init_p() const3202   bool init_p () const
3203   {
3204     return spans != nullptr;
3205   }
3206   /* Initializer.  */
3207   void init (const line_maps *lmaps, const line_map_ordinary *map);
3208 
3209   /* Slightly skewed preprocessed files can cause us to miss an
3210      initialization in some places.  Fallback initializer.  */
maybe_init()3211   void maybe_init ()
3212   {
3213     if (!init_p ())
3214       init (line_table, nullptr);
3215   }
3216 
3217 public:
3218   enum {
3219     SPAN_RESERVED = 0,	/* Reserved (fixed) locations.  */
3220     SPAN_FIRST = 1,	/* LWM of locations to stream  */
3221     SPAN_MAIN = 2	/* Main file and onwards.  */
3222   };
3223 
3224 public:
main_start() const3225   location_t main_start () const
3226   {
3227     return (*spans)[SPAN_MAIN].ordinary.first;
3228   }
3229 
3230 public:
3231   void open (location_t);
3232   void close ();
3233 
3234 public:
3235   /* Propagate imported linemaps to us, if needed.  */
3236   bool maybe_propagate (module_state *import, location_t loc);
3237 
3238 public:
3239   const span *ordinary (location_t);
3240   const span *macro (location_t);
3241 };
3242 
3243 static loc_spans spans;
3244 /* Indirection to allow bsearching imports by ordinary location.  */
3245 static vec<module_state *> *ool;
3246 
3247 /********************************************************************/
3248 /* Data needed by a module during the process of loading.  */
3249 struct GTY(()) slurping {
3250 
3251   /* Remap import's module numbering to our numbering.  Values are
3252      shifted by 1.  Bit0 encodes if the import is direct.  */
3253   vec<unsigned, va_heap, vl_embed> *
3254     GTY((skip)) remap;			/* Module owner remapping.  */
3255 
3256   elf_in *GTY((skip)) from;     	/* The elf loader.  */
3257 
3258   /* This map is only for header imports themselves -- the global
3259      headers bitmap hold it for the current TU.  */
3260   bitmap headers;	/* Transitive set of direct imports, including
3261 			   self.  Used for macro visibility and
3262 			   priority.  */
3263 
3264   /* These objects point into the mmapped area, unless we're not doing
3265      that, or we got frozen or closed.  In those cases they point to
3266      buffers we own.  */
3267   bytes_in macro_defs;	/* Macro definitions.  */
3268   bytes_in macro_tbl;	/* Macro table.  */
3269 
3270   /* Location remapping.  first->ordinary, second->macro.  */
3271   range_t GTY((skip)) loc_deltas;
3272 
3273   unsigned current;	/* Section currently being loaded.  */
3274   unsigned remaining;	/* Number of lazy sections yet to read.  */
3275   unsigned lru;		/* An LRU counter.  */
3276 
3277  public:
3278   slurping (elf_in *);
3279   ~slurping ();
3280 
3281  public:
3282   /* Close the ELF file, if it's open.  */
closeslurping3283   void close ()
3284   {
3285     if (from)
3286       {
3287 	from->end ();
3288 	delete from;
3289 	from = NULL;
3290       }
3291   }
3292 
3293  public:
3294   void release_macros ();
3295 
3296  public:
alloc_remapslurping3297   void alloc_remap (unsigned size)
3298   {
3299     gcc_assert (!remap);
3300     vec_safe_reserve (remap, size);
3301     for (unsigned ix = size; ix--;)
3302       remap->quick_push (0);
3303   }
remap_moduleslurping3304   unsigned remap_module (unsigned owner)
3305   {
3306     if (owner < remap->length ())
3307       return (*remap)[owner] >> 1;
3308     return 0;
3309   }
3310 
3311  public:
3312   /* GC allocation.  But we must explicitly delete it.   */
operator newslurping3313   static void *operator new (size_t x)
3314   {
3315     return ggc_alloc_atomic (x);
3316   }
operator deleteslurping3317   static void operator delete (void *p)
3318   {
3319     ggc_free (p);
3320   }
3321 };
3322 
slurping(elf_in * from)3323 slurping::slurping (elf_in *from)
3324   : remap (NULL), from (from),
3325     headers (BITMAP_GGC_ALLOC ()), macro_defs (), macro_tbl (),
3326     loc_deltas (0, 0),
3327     current (~0u), remaining (0), lru (0)
3328 {
3329 }
3330 
~slurping()3331 slurping::~slurping ()
3332 {
3333   vec_free (remap);
3334   remap = NULL;
3335   release_macros ();
3336   close ();
3337 }
3338 
release_macros()3339 void slurping::release_macros ()
3340 {
3341   if (macro_defs.size)
3342     elf_in::release (from, macro_defs);
3343   if (macro_tbl.size)
3344     elf_in::release (from, macro_tbl);
3345 }
3346 
3347 /* Information about location maps used during writing.  */
3348 
3349 struct location_map_info {
3350   range_t num_maps;
3351 
3352   unsigned max_range;
3353 };
3354 
3355 /* Flage for extensions that end up being streamed.  */
3356 
3357 enum streamed_extensions {
3358   SE_OPENMP = 1 << 0,
3359   SE_BITS = 1
3360 };
3361 
3362 /********************************************************************/
3363 struct module_state_config;
3364 
3365 /* Increasing levels of loadedness.  */
3366 enum module_loadedness {
3367   ML_NONE,		/* Not loaded.  */
3368   ML_CONFIG,		/* Config loaed.  */
3369   ML_PREPROCESSOR,	/* Preprocessor loaded.  */
3370   ML_LANGUAGE,		/* Language loaded.  */
3371 };
3372 
3373 /* Increasing levels of directness (toplevel) of import.  */
3374 enum module_directness {
3375   MD_NONE,  		/* Not direct.  */
3376   MD_PARTITION_DIRECT,	/* Direct import of a partition.  */
3377   MD_DIRECT,		/* Direct import.  */
3378   MD_PURVIEW_DIRECT,	/* direct import in purview.  */
3379 };
3380 
3381 /* State of a particular module. */
3382 
3383 class GTY((chain_next ("%h.parent"), for_user)) module_state {
3384  public:
3385   /* We always import & export ourselves.  */
3386   bitmap imports;	/* Transitive modules we're importing.  */
3387   bitmap exports;	/* Subset of that, that we're exporting.  */
3388 
3389   module_state *parent;
3390   tree name;		/* Name of the module.  */
3391 
3392   slurping *slurp;	/* Data for loading.  */
3393 
3394   const char *flatname;	/* Flatname of module.  */
3395   char *filename;	/* CMI Filename */
3396 
3397   /* Indices into the entity_ary.  */
3398   unsigned entity_lwm;
3399   unsigned entity_num;
3400 
3401   /* Location ranges for this module.  adhoc-locs are decomposed, so
3402      don't have a range.  */
3403   loc_range_t GTY((skip)) ordinary_locs;
3404   loc_range_t GTY((skip)) macro_locs;
3405 
3406   /* LOC is first set too the importing location.  When initially
3407      loaded it refers to a module loc whose parent is the importing
3408      location.  */
3409   location_t loc; 	/* Location referring to module itself.  */
3410   unsigned crc;		/* CRC we saw reading it in. */
3411 
3412   unsigned mod;		/* Module owner number.  */
3413   unsigned remap;	/* Remapping during writing.  */
3414 
3415   unsigned short subst;	/* Mangle subst if !0.  */
3416 
3417   /* How loaded this module is.  */
3418   enum module_loadedness loadedness : 2;
3419 
3420   bool module_p : 1;    /* /The/ module of this TU.  */
3421   bool header_p : 1;	/* Is a header unit.  */
3422   bool interface_p : 1; /* An interface.  */
3423   bool partition_p : 1; /* A partition.  */
3424 
3425   /* How directly this module is imported.  */
3426   enum module_directness directness : 2;
3427 
3428   bool exported_p : 1;	/* directness != MD_NONE && exported.  */
3429   bool cmi_noted_p : 1; /* We've told the user about the CMI, don't
3430 			   do it again  */
3431   bool call_init_p : 1; /* This module's global initializer needs
3432 			   calling.  */
3433   bool inform_cmi_p : 1; /* Inform of a read/write.  */
3434   bool visited_p : 1;    /* A walk-once flag. */
3435   /* Record extensions emitted or permitted.  */
3436   unsigned extensions : SE_BITS;
3437   /* 14 bits used, 2 bits remain  */
3438 
3439  public:
3440   module_state (tree name, module_state *, bool);
3441   ~module_state ();
3442 
3443  public:
release()3444   void release ()
3445   {
3446     imports = exports = NULL;
3447     slurped ();
3448   }
slurped()3449   void slurped ()
3450   {
3451     delete slurp;
3452     slurp = NULL;
3453   }
from() const3454   elf_in *from () const
3455   {
3456     return slurp->from;
3457   }
3458 
3459  public:
3460   /* Kind of this module.  */
is_module() const3461   bool is_module () const
3462   {
3463     return module_p;
3464   }
is_header() const3465   bool is_header () const
3466   {
3467     return header_p;
3468   }
is_interface() const3469   bool is_interface () const
3470   {
3471     return interface_p;
3472   }
is_partition() const3473   bool is_partition () const
3474   {
3475     return partition_p;
3476   }
3477 
3478   /* How this module is used in the current TU.  */
is_exported() const3479   bool is_exported () const
3480   {
3481     return exported_p;
3482   }
is_direct() const3483   bool is_direct () const
3484   {
3485     return directness >= MD_DIRECT;
3486   }
is_purview_direct() const3487   bool is_purview_direct () const
3488   {
3489     return directness == MD_PURVIEW_DIRECT;
3490   }
is_partition_direct() const3491   bool is_partition_direct () const
3492   {
3493     return directness == MD_PARTITION_DIRECT;
3494   }
3495 
3496  public:
3497   /* Is this a real module?  */
has_location() const3498   bool has_location () const
3499   {
3500     return loc != UNKNOWN_LOCATION;
3501   }
3502 
3503  public:
3504   bool check_not_purview (location_t loc);
3505 
3506  public:
3507   void mangle (bool include_partition);
3508 
3509  public:
3510   void set_import (module_state const *, bool is_export);
3511   void announce (const char *) const;
3512 
3513  public:
3514   /* Read and write module.  */
3515   void write (elf_out *to, cpp_reader *);
3516   bool read_initial (cpp_reader *);
3517   bool read_preprocessor (bool);
3518   bool read_language (bool);
3519 
3520  public:
3521   /* Read a section.  */
3522   bool load_section (unsigned snum, binding_slot *mslot);
3523   /* Lazily read a section.  */
3524   bool lazy_load (unsigned index, binding_slot *mslot);
3525 
3526  public:
3527   /* Juggle a limited number of file numbers.  */
3528   static void freeze_an_elf ();
3529   bool maybe_defrost ();
3530 
3531  public:
3532   void maybe_completed_reading ();
3533   bool check_read (bool outermost, bool ok);
3534 
3535  private:
3536   /* The README, for human consumption.  */
3537   void write_readme (elf_out *to, cpp_reader *,
3538 		     const char *dialect, unsigned extensions);
3539   void write_env (elf_out *to);
3540 
3541  private:
3542   /* Import tables. */
3543   void write_imports (bytes_out &cfg, bool direct);
3544   unsigned read_imports (bytes_in &cfg, cpp_reader *, line_maps *maps);
3545 
3546  private:
3547   void write_imports (elf_out *to, unsigned *crc_ptr);
3548   bool read_imports (cpp_reader *, line_maps *);
3549 
3550  private:
3551   void write_partitions (elf_out *to, unsigned, unsigned *crc_ptr);
3552   bool read_partitions (unsigned);
3553 
3554  private:
3555   void write_config (elf_out *to, struct module_state_config &, unsigned crc);
3556   bool read_config (struct module_state_config &);
3557   static void write_counts (elf_out *to, unsigned [], unsigned *crc_ptr);
3558   bool read_counts (unsigned []);
3559 
3560  public:
3561   void note_cmi_name ();
3562 
3563  private:
3564   static unsigned write_bindings (elf_out *to, vec<depset *> depsets,
3565 				  unsigned *crc_ptr);
3566   bool read_bindings (unsigned count, unsigned lwm, unsigned hwm);
3567 
3568   static void write_namespace (bytes_out &sec, depset *ns_dep);
3569   tree read_namespace (bytes_in &sec);
3570 
3571   void write_namespaces (elf_out *to, vec<depset *> spaces,
3572 			 unsigned, unsigned *crc_ptr);
3573   bool read_namespaces (unsigned);
3574 
3575   void intercluster_seed (trees_out &sec, unsigned index, depset *dep);
3576   unsigned write_cluster (elf_out *to, depset *depsets[], unsigned size,
3577 			  depset::hash &, unsigned *counts, unsigned *crc_ptr);
3578   bool read_cluster (unsigned snum);
3579 
3580  private:
3581   unsigned write_inits (elf_out *to, depset::hash &, unsigned *crc_ptr);
3582   bool read_inits (unsigned count);
3583 
3584  private:
3585   unsigned write_pendings (elf_out *to, vec<depset *> depsets,
3586 			   depset::hash &, unsigned *crc_ptr);
3587   bool read_pendings (unsigned count);
3588 
3589  private:
3590   void write_entities (elf_out *to, vec<depset *> depsets,
3591 		       unsigned count, unsigned *crc_ptr);
3592   bool read_entities (unsigned count, unsigned lwm, unsigned hwm);
3593 
3594  private:
3595   location_map_info write_prepare_maps (module_state_config *);
3596   bool read_prepare_maps (const module_state_config *);
3597 
3598   void write_ordinary_maps (elf_out *to, location_map_info &,
3599 			    module_state_config *, bool, unsigned *crc_ptr);
3600   bool read_ordinary_maps ();
3601   void write_macro_maps (elf_out *to, location_map_info &,
3602 			 module_state_config *, unsigned *crc_ptr);
3603   bool read_macro_maps ();
3604 
3605  private:
3606   void write_define (bytes_out &, const cpp_macro *, bool located = true);
3607   cpp_macro *read_define (bytes_in &, cpp_reader *, bool located = true) const;
3608   unsigned write_macros (elf_out *to, cpp_reader *, unsigned *crc_ptr);
3609   bool read_macros ();
3610   void install_macros ();
3611 
3612  public:
3613   void import_macros ();
3614 
3615  public:
3616   static void undef_macro (cpp_reader *, location_t, cpp_hashnode *);
3617   static cpp_macro *deferred_macro (cpp_reader *, location_t, cpp_hashnode *);
3618 
3619  public:
3620   static void write_location (bytes_out &, location_t);
3621   location_t read_location (bytes_in &) const;
3622 
3623  public:
3624   void set_flatname ();
get_flatname() const3625   const char *get_flatname () const
3626   {
3627     return flatname;
3628   }
3629   location_t imported_from () const;
3630 
3631  public:
3632   void set_filename (const Cody::Packet &);
3633   bool do_import (cpp_reader *, bool outermost);
3634 };
3635 
3636 /* Hash module state by name.  This cannot be a member of
3637    module_state, because of GTY restrictions.  We never delete from
3638    the hash table, but ggc_ptr_hash doesn't support that
3639    simplification.  */
3640 
3641 struct module_state_hash : ggc_ptr_hash<module_state> {
3642   typedef std::pair<tree,uintptr_t> compare_type; /* {name,parent} */
3643 
3644   static inline hashval_t hash (const value_type m);
3645   static inline hashval_t hash (const compare_type &n);
3646   static inline bool equal (const value_type existing,
3647 			    const compare_type &candidate);
3648 };
3649 
module_state(tree name,module_state * parent,bool partition)3650 module_state::module_state (tree name, module_state *parent, bool partition)
3651   : imports (BITMAP_GGC_ALLOC ()), exports (BITMAP_GGC_ALLOC ()),
3652     parent (parent), name (name), slurp (NULL),
3653     flatname (NULL), filename (NULL),
3654     entity_lwm (~0u >> 1), entity_num (0),
3655     ordinary_locs (0, 0), macro_locs (0, 0),
3656     loc (UNKNOWN_LOCATION),
3657     crc (0), mod (MODULE_UNKNOWN), remap (0), subst (0)
3658 {
3659   loadedness = ML_NONE;
3660 
3661   module_p = header_p = interface_p = partition_p = false;
3662 
3663   directness = MD_NONE;
3664   exported_p = false;
3665 
3666   cmi_noted_p = false;
3667   call_init_p = false;
3668 
3669   partition_p = partition;
3670 
3671   inform_cmi_p = false;
3672   visited_p = false;
3673 
3674   extensions = 0;
3675   if (name && TREE_CODE (name) == STRING_CST)
3676     {
3677       header_p = true;
3678 
3679       const char *string = TREE_STRING_POINTER (name);
3680       gcc_checking_assert (string[0] == '.'
3681 			   ? IS_DIR_SEPARATOR (string[1])
3682 			   : IS_ABSOLUTE_PATH (string));
3683     }
3684 
3685   gcc_checking_assert (!(parent && header_p));
3686 }
3687 
~module_state()3688 module_state::~module_state ()
3689 {
3690   release ();
3691 }
3692 
3693 /* Hash module state.  */
3694 static hashval_t
module_name_hash(const_tree name)3695 module_name_hash (const_tree name)
3696 {
3697   if (TREE_CODE (name) == STRING_CST)
3698     return htab_hash_string (TREE_STRING_POINTER (name));
3699   else
3700     return IDENTIFIER_HASH_VALUE (name);
3701 }
3702 
3703 hashval_t
hash(const value_type m)3704 module_state_hash::hash (const value_type m)
3705 {
3706   hashval_t ph = pointer_hash<void>::hash
3707     (reinterpret_cast<void *> (reinterpret_cast<uintptr_t> (m->parent)
3708 			       | m->is_partition ()));
3709   hashval_t nh = module_name_hash (m->name);
3710   return iterative_hash_hashval_t (ph, nh);
3711 }
3712 
3713 /* Hash a name.  */
3714 hashval_t
hash(const compare_type & c)3715 module_state_hash::hash (const compare_type &c)
3716 {
3717   hashval_t ph = pointer_hash<void>::hash (reinterpret_cast<void *> (c.second));
3718   hashval_t nh = module_name_hash (c.first);
3719 
3720   return iterative_hash_hashval_t (ph, nh);
3721 }
3722 
3723 bool
equal(const value_type existing,const compare_type & candidate)3724 module_state_hash::equal (const value_type existing,
3725 			  const compare_type &candidate)
3726 {
3727   uintptr_t ep = (reinterpret_cast<uintptr_t> (existing->parent)
3728 		  | existing->is_partition ());
3729   if (ep != candidate.second)
3730     return false;
3731 
3732   /* Identifier comparison is by pointer.  If the string_csts happen
3733      to be the same object, then they're equal too.  */
3734   if (existing->name == candidate.first)
3735     return true;
3736 
3737   /* If neither are string csts, they can't be equal.  */
3738   if (TREE_CODE (candidate.first) != STRING_CST
3739       || TREE_CODE (existing->name) != STRING_CST)
3740     return false;
3741 
3742   /* String equality.  */
3743   if (TREE_STRING_LENGTH (existing->name)
3744       == TREE_STRING_LENGTH (candidate.first)
3745       && !memcmp (TREE_STRING_POINTER (existing->name),
3746 		  TREE_STRING_POINTER (candidate.first),
3747 		  TREE_STRING_LENGTH (existing->name)))
3748     return true;
3749 
3750   return false;
3751 }
3752 
3753 /********************************************************************/
3754 /* Global state */
3755 
3756 /* Mapper name.  */
3757 static const char *module_mapper_name;
3758 
3759 /* Deferred import queue (FIFO).  */
3760 static vec<module_state *, va_heap, vl_embed> *pending_imports;
3761 
3762 /* CMI repository path and workspace.  */
3763 static char *cmi_repo;
3764 static size_t cmi_repo_length;
3765 static char *cmi_path;
3766 static size_t cmi_path_alloc;
3767 
3768 /* Count of available and loaded clusters.  */
3769 static unsigned available_clusters;
3770 static unsigned loaded_clusters;
3771 
3772 /* What the current TU is.  */
3773 unsigned module_kind;
3774 
3775 /* Number of global init calls needed.  */
3776 unsigned num_init_calls_needed = 0;
3777 
3778 /* Global trees.  */
3779 static const std::pair<tree *, unsigned> global_tree_arys[] =
3780   {
3781     std::pair<tree *, unsigned> (sizetype_tab, stk_type_kind_last),
3782     std::pair<tree *, unsigned> (integer_types, itk_none),
3783     std::pair<tree *, unsigned> (global_trees, TI_MODULE_HWM),
3784     std::pair<tree *, unsigned> (c_global_trees, CTI_MODULE_HWM),
3785     std::pair<tree *, unsigned> (cp_global_trees, CPTI_MODULE_HWM),
3786     std::pair<tree *, unsigned> (NULL, 0)
3787   };
3788 static GTY(()) vec<tree, va_gc> *fixed_trees;
3789 static unsigned global_crc;
3790 
3791 /* Lazy loading can open many files concurrently, there are
3792    per-process limits on that.  We pay attention to the process limit,
3793    and attempt to increase it when we run out.  Otherwise we use an
3794    LRU scheme to figure out who to flush.  Note that if the import
3795    graph /depth/ exceeds lazy_limit, we'll exceed the limit.  */
3796 static unsigned lazy_lru;  /* LRU counter.  */
3797 static unsigned lazy_open; /* Number of open modules */
3798 static unsigned lazy_limit; /* Current limit of open modules.  */
3799 static unsigned lazy_hard_limit; /* Hard limit on open modules.  */
3800 /* Account for source, assembler and dump files & directory searches.
3801    We don't keep the source file's open, so we don't have to account
3802    for #include depth.  I think dump files are opened and closed per
3803    pass, but ICBW.  */
3804 #define LAZY_HEADROOM 15 /* File descriptor headroom.  */
3805 
3806 /* Vector of module state.  Indexed by OWNER.  Has at least 2 slots.  */
3807 static GTY(()) vec<module_state *, va_gc> *modules;
3808 
3809 /* Hash of module state, findable by {name, parent}. */
3810 static GTY(()) hash_table<module_state_hash> *modules_hash;
3811 
3812 /* Map of imported entities.  We map DECL_UID to index of entity
3813    vector.  */
3814 typedef hash_map<unsigned/*UID*/, unsigned/*index*/,
3815 		 simple_hashmap_traits<int_hash<unsigned,0>, unsigned>
3816 		 > entity_map_t;
3817 static entity_map_t *entity_map;
3818 /* Doesn't need GTYing, because any tree referenced here is also
3819    findable by, symbol table, specialization table, return type of
3820    reachable function.  */
3821 static vec<binding_slot, va_heap, vl_embed> *entity_ary;
3822 
3823 /* Members entities of imported classes that are defined in this TU.
3824    These are where the entity's context is not from the current TU.
3825    We need to emit the definition (but not the enclosing class).
3826 
3827    We could find these by walking ALL the imported classes that we
3828    could provide a member definition.  But that's expensive,
3829    especially when you consider lazy implicit member declarations,
3830    which could be ANY imported class.  */
3831 static GTY(()) vec<tree, va_gc> *class_members;
3832 
3833 /* The same problem exists for class template partial
3834    specializations.  Now that we have constraints, the invariant of
3835    expecting them in the instantiation table no longer holds.  One of
3836    the constrained partial specializations will be there, but the
3837    others not so much.  It's not even an unconstrained partial
3838    spacialization in the table :(  so any partial template declaration
3839    is added to this list too.  */
3840 static GTY(()) vec<tree, va_gc> *partial_specializations;
3841 
3842 /********************************************************************/
3843 
3844 /* Our module mapper (created lazily).  */
3845 module_client *mapper;
3846 
3847 static module_client *make_mapper (location_t loc);
get_mapper(location_t loc)3848 inline module_client *get_mapper (location_t loc)
3849 {
3850   auto *res = mapper;
3851   if (!res)
3852     res = make_mapper (loc);
3853   return res;
3854 }
3855 
3856 /********************************************************************/
3857 static tree
get_clone_target(tree decl)3858 get_clone_target (tree decl)
3859 {
3860   tree target;
3861 
3862   if (TREE_CODE (decl) == TEMPLATE_DECL)
3863     {
3864       tree res_orig = DECL_CLONED_FUNCTION (DECL_TEMPLATE_RESULT (decl));
3865 
3866       target = DECL_TI_TEMPLATE (res_orig);
3867     }
3868   else
3869     target = DECL_CLONED_FUNCTION (decl);
3870 
3871   gcc_checking_assert (DECL_MAYBE_IN_CHARGE_CDTOR_P (target));
3872 
3873   return target;
3874 }
3875 
3876 /* Like FOR_EACH_CLONE, but will walk cloned templates.  */
3877 #define FOR_EVERY_CLONE(CLONE, FN)			\
3878   if (!DECL_MAYBE_IN_CHARGE_CDTOR_P (FN));		\
3879   else							\
3880     for (CLONE = DECL_CHAIN (FN);			\
3881 	 CLONE && DECL_CLONED_FUNCTION_P (CLONE);	\
3882 	 CLONE = DECL_CHAIN (CLONE))
3883 
3884 /* It'd be nice if USE_TEMPLATE was a field of template_info
3885    (a) it'd solve the enum case dealt with below,
3886    (b) both class templates and decl templates would store this in the
3887    same place
3888    (c) this function wouldn't need the by-ref arg, which is annoying.  */
3889 
3890 static tree
node_template_info(tree decl,int & use)3891 node_template_info (tree decl, int &use)
3892 {
3893   tree ti = NULL_TREE;
3894   int use_tpl = -1;
3895   if (DECL_IMPLICIT_TYPEDEF_P (decl))
3896     {
3897       tree type = TREE_TYPE (decl);
3898 
3899       ti = TYPE_TEMPLATE_INFO (type);
3900       if (ti)
3901 	{
3902 	  if (TYPE_LANG_SPECIFIC (type))
3903 	    use_tpl = CLASSTYPE_USE_TEMPLATE (type);
3904 	  else
3905 	    {
3906 	      /* An enum, where we don't explicitly encode use_tpl.
3907 		 If the containing context (a type or a function), is
3908 		 an ({im,ex}plicit) instantiation, then this is too.
3909 		 If it's a partial or explicit specialization, then
3910 		 this is not!.  */
3911 	      tree ctx = CP_DECL_CONTEXT (decl);
3912 	      if (TYPE_P (ctx))
3913 		ctx = TYPE_NAME (ctx);
3914 	      node_template_info (ctx, use);
3915 	      use_tpl = use != 2 ? use : 0;
3916 	    }
3917 	}
3918     }
3919   else if (DECL_LANG_SPECIFIC (decl)
3920 	   && (TREE_CODE (decl) == VAR_DECL
3921 	       || TREE_CODE (decl) == TYPE_DECL
3922 	       || TREE_CODE (decl) == FUNCTION_DECL
3923 	       || TREE_CODE (decl) == FIELD_DECL
3924 	       || TREE_CODE (decl) == TEMPLATE_DECL))
3925     {
3926       use_tpl = DECL_USE_TEMPLATE (decl);
3927       ti = DECL_TEMPLATE_INFO (decl);
3928     }
3929 
3930   use = use_tpl;
3931   return ti;
3932 }
3933 
3934 /* Find the index in entity_ary for an imported DECL.  It should
3935    always be there, but bugs can cause it to be missing, and that can
3936    crash the crash reporting -- let's not do that!  When streaming
3937    out we place entities from this module there too -- with negated
3938    indices.  */
3939 
3940 static unsigned
import_entity_index(tree decl,bool null_ok=false)3941 import_entity_index (tree decl, bool null_ok = false)
3942 {
3943   if (unsigned *slot = entity_map->get (DECL_UID (decl)))
3944     return *slot;
3945 
3946   gcc_checking_assert (null_ok);
3947   return ~(~0u >> 1);
3948 }
3949 
3950 /* Find the module for an imported entity at INDEX in the entity ary.
3951    There must be one.  */
3952 
3953 static module_state *
import_entity_module(unsigned index)3954 import_entity_module (unsigned index)
3955 {
3956   if (index > ~(~0u >> 1))
3957     /* This is an index for an exported entity.  */
3958     return (*modules)[0];
3959 
3960   /* Do not include the current TU (not an off-by-one error).  */
3961   unsigned pos = 1;
3962   unsigned len = modules->length () - pos;
3963   while (len)
3964     {
3965       unsigned half = len / 2;
3966       module_state *probe = (*modules)[pos + half];
3967       if (index < probe->entity_lwm)
3968 	len = half;
3969       else if (index < probe->entity_lwm + probe->entity_num)
3970 	return probe;
3971       else
3972 	{
3973 	  pos += half + 1;
3974 	  len = len - (half + 1);
3975 	}
3976     }
3977   gcc_unreachable ();
3978 }
3979 
3980 
3981 /********************************************************************/
3982 /* A dumping machinery.  */
3983 
3984 class dumper {
3985 public:
3986   enum {
3987     LOCATION = TDF_LINENO,  /* -lineno:Source location streaming.  */
3988     DEPEND = TDF_GRAPH,	/* -graph:Dependency graph construction.  */
3989     CLUSTER = TDF_BLOCKS,   /* -blocks:Clusters.  */
3990     TREE = TDF_UID, 	/* -uid:Tree streaming.  */
3991     MERGE = TDF_ALIAS,	/* -alias:Mergeable Entities.  */
3992     ELF = TDF_ASMNAME,	/* -asmname:Elf data.  */
3993     MACRO = TDF_VOPS	/* -vops:Macros.  */
3994   };
3995 
3996 private:
3997   struct impl {
3998     typedef vec<module_state *, va_heap, vl_embed> stack_t;
3999 
4000     FILE *stream;	/* Dump stream.  */
4001     unsigned indent; 	/* Local indentation.  */
4002     bool bol; 		/* Beginning of line.  */
4003     stack_t stack;	/* Trailing array of module_state.  */
4004 
4005     bool nested_name (tree);  /* Dump a name following DECL_CONTEXT.  */
4006   };
4007 
4008 public:
4009   /* The dumper.  */
4010   impl *dumps;
4011   dump_flags_t flags;
4012 
4013 public:
4014   /* Push/pop module state dumping.  */
4015   unsigned push (module_state *);
4016   void pop (unsigned);
4017 
4018 public:
4019   /* Change local indentation.  */
indent()4020   void indent ()
4021   {
4022     if (dumps)
4023       dumps->indent++;
4024   }
outdent()4025   void outdent ()
4026   {
4027     if (dumps)
4028       {
4029 	gcc_checking_assert (dumps->indent);
4030 	dumps->indent--;
4031       }
4032   }
4033 
4034 public:
4035   /* Is dump enabled?.  */
operator ()(int mask=0)4036   bool operator () (int mask = 0)
4037   {
4038     if (!dumps || !dumps->stream)
4039       return false;
4040     if (mask && !(mask & flags))
4041       return false;
4042     return true;
4043   }
4044   /* Dump some information.  */
4045   bool operator () (const char *, ...);
4046 };
4047 
4048 /* The dumper.  */
4049 static dumper dump = {0, dump_flags_t (0)};
4050 
4051 /* Push to dumping M.  Return previous indentation level.  */
4052 
4053 unsigned
push(module_state * m)4054 dumper::push (module_state *m)
4055 {
4056   FILE *stream = NULL;
4057   if (!dumps || !dumps->stack.length ())
4058     {
4059       stream = dump_begin (module_dump_id, &flags);
4060       if (!stream)
4061 	return 0;
4062     }
4063 
4064   if (!dumps || !dumps->stack.space (1))
4065     {
4066       /* Create or extend the dump implementor.  */
4067       unsigned current = dumps ? dumps->stack.length () : 0;
4068       unsigned count = current ? current * 2 : EXPERIMENT (1, 20);
4069       size_t alloc = (offsetof (impl, stack)
4070 		      + impl::stack_t::embedded_size (count));
4071       dumps = XRESIZEVAR (impl, dumps, alloc);
4072       dumps->stack.embedded_init (count, current);
4073     }
4074   if (stream)
4075     dumps->stream = stream;
4076 
4077   unsigned n = dumps->indent;
4078   dumps->indent = 0;
4079   dumps->bol = true;
4080   dumps->stack.quick_push (m);
4081   if (m)
4082     {
4083       module_state *from = NULL;
4084 
4085       if (dumps->stack.length () > 1)
4086 	from = dumps->stack[dumps->stack.length () - 2];
4087       else
4088 	dump ("");
4089       dump (from ? "Starting module %M (from %M)"
4090 	    : "Starting module %M", m, from);
4091     }
4092 
4093   return n;
4094 }
4095 
4096 /* Pop from dumping.  Restore indentation to N.  */
4097 
pop(unsigned n)4098 void dumper::pop (unsigned n)
4099 {
4100   if (!dumps)
4101     return;
4102 
4103   gcc_checking_assert (dump () && !dumps->indent);
4104   if (module_state *m = dumps->stack[dumps->stack.length () - 1])
4105     {
4106       module_state *from = (dumps->stack.length () > 1
4107 			    ? dumps->stack[dumps->stack.length () - 2] : NULL);
4108       dump (from ? "Finishing module %M (returning to %M)"
4109 	    : "Finishing module %M", m, from);
4110     }
4111   dumps->stack.pop ();
4112   dumps->indent = n;
4113   if (!dumps->stack.length ())
4114     {
4115       dump_end (module_dump_id, dumps->stream);
4116       dumps->stream = NULL;
4117     }
4118 }
4119 
4120 /* Dump a nested name for arbitrary tree T.  Sometimes it won't have a
4121    name.  */
4122 
4123 bool
nested_name(tree t)4124 dumper::impl::nested_name (tree t)
4125 {
4126   tree ti = NULL_TREE;
4127   int origin = -1;
4128   tree name = NULL_TREE;
4129 
4130   if (t && TREE_CODE (t) == TREE_BINFO)
4131     t = BINFO_TYPE (t);
4132 
4133   if (t && TYPE_P (t))
4134     t = TYPE_NAME (t);
4135 
4136   if (t && DECL_P (t))
4137     {
4138       if (t == global_namespace || DECL_TEMPLATE_PARM_P (t))
4139 	;
4140       else if (tree ctx = DECL_CONTEXT (t))
4141 	if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
4142 	    || nested_name (ctx))
4143 	  fputs ("::", stream);
4144 
4145       int use_tpl;
4146       ti = node_template_info (t, use_tpl);
4147       if (ti && TREE_CODE (TI_TEMPLATE (ti)) == TEMPLATE_DECL
4148 	  && (DECL_TEMPLATE_RESULT (TI_TEMPLATE (ti)) == t))
4149 	t = TI_TEMPLATE (ti);
4150       tree not_tmpl = t;
4151       if (TREE_CODE (t) == TEMPLATE_DECL)
4152 	{
4153 	  fputs ("template ", stream);
4154 	  not_tmpl = DECL_TEMPLATE_RESULT (t);
4155 	}
4156 
4157       if (not_tmpl
4158 	  && DECL_P (not_tmpl)
4159 	  && DECL_LANG_SPECIFIC (not_tmpl)
4160 	  && DECL_MODULE_IMPORT_P (not_tmpl))
4161 	{
4162 	  /* We need to be careful here, so as to not explode on
4163 	     inconsistent data -- we're probably debugging, because
4164 	     Something Is Wrong.  */
4165 	  unsigned index = import_entity_index (t, true);
4166 	  if (!(index & ~(~0u >> 1)))
4167 	    origin = import_entity_module (index)->mod;
4168 	  else if (index > ~(~0u >> 1))
4169 	    /* An imported partition member that we're emitting.  */
4170 	    origin = 0;
4171 	  else
4172 	    origin = -2;
4173 	}
4174 
4175       name = DECL_NAME (t) ? DECL_NAME (t)
4176 	: HAS_DECL_ASSEMBLER_NAME_P (t) ? DECL_ASSEMBLER_NAME_RAW (t)
4177 	: NULL_TREE;
4178     }
4179   else
4180     name = t;
4181 
4182   if (name)
4183     switch (TREE_CODE (name))
4184       {
4185       default:
4186 	fputs ("#unnamed#", stream);
4187 	break;
4188 
4189       case IDENTIFIER_NODE:
4190 	fwrite (IDENTIFIER_POINTER (name), 1, IDENTIFIER_LENGTH (name), stream);
4191 	break;
4192 
4193       case INTEGER_CST:
4194 	print_hex (wi::to_wide (name), stream);
4195 	break;
4196 
4197       case STRING_CST:
4198 	/* If TREE_TYPE is NULL, this is a raw string.  */
4199 	fwrite (TREE_STRING_POINTER (name), 1,
4200 		TREE_STRING_LENGTH (name) - (TREE_TYPE (name) != NULL_TREE),
4201 		stream);
4202 	break;
4203       }
4204   else
4205     fputs ("#null#", stream);
4206 
4207   if (origin >= 0)
4208     {
4209       const module_state *module = (*modules)[origin];
4210       fprintf (stream, "@%s:%d", !module ? "" : !module->name ? "(unnamed)"
4211 	       : module->get_flatname (), origin);
4212     }
4213   else if (origin == -2)
4214     fprintf (stream, "@???");
4215 
4216   if (ti)
4217     {
4218       tree args = INNERMOST_TEMPLATE_ARGS (TI_ARGS (ti));
4219       fputs ("<", stream);
4220       if (args)
4221 	for (int ix = 0; ix != TREE_VEC_LENGTH (args); ix++)
4222 	  {
4223 	    if (ix)
4224 	      fputs (",", stream);
4225 	    nested_name (TREE_VEC_ELT (args, ix));
4226 	  }
4227       fputs (">", stream);
4228     }
4229 
4230   return true;
4231 }
4232 
4233 /* Formatted dumping.  FORMAT begins with '+' do not emit a trailing
4234    new line.  (Normally it is appended.)
4235    Escapes:
4236       %C - tree_code
4237       %I - identifier
4238       %M - module_state
4239       %N - name -- DECL_NAME
4240       %P - context:name pair
4241       %R - unsigned:unsigned ratio
4242       %S - symbol -- DECL_ASSEMBLER_NAME
4243       %U - long unsigned
4244       %V - version
4245       --- the following are printf-like, but without its flexibility
4246       %d - decimal int
4247       %p - pointer
4248       %s - string
4249       %u - unsigned int
4250       %x - hex int
4251 
4252   We do not implement the printf modifiers.  */
4253 
4254 bool
operator ()(const char * format,...)4255 dumper::operator () (const char *format, ...)
4256 {
4257   if (!(*this) ())
4258     return false;
4259 
4260   bool no_nl = format[0] == '+';
4261   format += no_nl;
4262 
4263   if (dumps->bol)
4264     {
4265       /* Module import indent.  */
4266       if (unsigned depth = dumps->stack.length () - 1)
4267 	{
4268 	  const char *prefix = ">>>>";
4269 	  fprintf (dumps->stream, (depth <= strlen (prefix)
4270 				   ? &prefix[strlen (prefix) - depth]
4271 				   : ">.%d.>"), depth);
4272 	}
4273 
4274       /* Local indent.  */
4275       if (unsigned indent = dumps->indent)
4276 	{
4277 	  const char *prefix = "      ";
4278 	  fprintf (dumps->stream, (indent <= strlen (prefix)
4279 				   ? &prefix[strlen (prefix) - indent]
4280 				   : "  .%d.  "), indent);
4281 	}
4282       dumps->bol = false;
4283     }
4284 
4285   va_list args;
4286   va_start (args, format);
4287   while (const char *esc = strchr (format, '%'))
4288     {
4289       fwrite (format, 1, (size_t)(esc - format), dumps->stream);
4290       format = ++esc;
4291       switch (*format++)
4292 	{
4293 	default:
4294 	  gcc_unreachable ();
4295 
4296 	case '%':
4297 	  fputc ('%', dumps->stream);
4298 	  break;
4299 
4300 	case 'C': /* Code */
4301 	  {
4302 	    tree_code code = (tree_code)va_arg (args, unsigned);
4303 	    fputs (get_tree_code_name (code), dumps->stream);
4304 	  }
4305 	  break;
4306 
4307 	case 'I': /* Identifier.  */
4308 	  {
4309 	    tree t = va_arg (args, tree);
4310 	    dumps->nested_name (t);
4311 	  }
4312 	  break;
4313 
4314 	case 'M': /* Module. */
4315 	  {
4316 	    const char *str = "(none)";
4317 	    if (module_state *m = va_arg (args, module_state *))
4318 	      {
4319 		if (!m->has_location ())
4320 		  str = "(detached)";
4321 		else
4322 		  str = m->get_flatname ();
4323 	      }
4324 	    fputs (str, dumps->stream);
4325 	  }
4326 	  break;
4327 
4328 	case 'N': /* Name.  */
4329 	  {
4330 	    tree t = va_arg (args, tree);
4331 	    while (t && TREE_CODE (t) == OVERLOAD)
4332 	      t = OVL_FUNCTION (t);
4333 	    fputc ('\'', dumps->stream);
4334 	    dumps->nested_name (t);
4335 	    fputc ('\'', dumps->stream);
4336 	  }
4337 	  break;
4338 
4339 	case 'P': /* Pair.  */
4340 	  {
4341 	    tree ctx = va_arg (args, tree);
4342 	    tree name = va_arg (args, tree);
4343 	    fputc ('\'', dumps->stream);
4344 	    dumps->nested_name (ctx);
4345 	    if (ctx && ctx != global_namespace)
4346 	      fputs ("::", dumps->stream);
4347 	    dumps->nested_name (name);
4348 	    fputc ('\'', dumps->stream);
4349 	  }
4350 	  break;
4351 
4352 	case 'R': /* Ratio */
4353 	  {
4354 	    unsigned a = va_arg (args, unsigned);
4355 	    unsigned b = va_arg (args, unsigned);
4356 	    fprintf (dumps->stream, "%.1f", (float) a / (b + !b));
4357 	  }
4358 	  break;
4359 
4360 	case 'S': /* Symbol name */
4361 	  {
4362 	    tree t = va_arg (args, tree);
4363 	    if (t && TYPE_P (t))
4364 	      t = TYPE_NAME (t);
4365 	    if (t && HAS_DECL_ASSEMBLER_NAME_P (t)
4366 		&& DECL_ASSEMBLER_NAME_SET_P (t))
4367 	      {
4368 		fputc ('(', dumps->stream);
4369 		fputs (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t)),
4370 		       dumps->stream);
4371 		fputc (')', dumps->stream);
4372 	      }
4373 	  }
4374 	  break;
4375 
4376 	case 'U': /* long unsigned.  */
4377 	  {
4378 	    unsigned long u = va_arg (args, unsigned long);
4379 	    fprintf (dumps->stream, "%lu", u);
4380 	  }
4381 	  break;
4382 
4383 	case 'V': /* Verson.  */
4384 	  {
4385 	    unsigned v = va_arg (args, unsigned);
4386 	    verstr_t string;
4387 
4388 	    version2string (v, string);
4389 	    fputs (string, dumps->stream);
4390 	  }
4391 	  break;
4392 
4393 	case 'c': /* Character.  */
4394 	  {
4395 	    int c = va_arg (args, int);
4396 	    fputc (c, dumps->stream);
4397 	  }
4398 	  break;
4399 
4400 	case 'd': /* Decimal Int.  */
4401 	  {
4402 	    int d = va_arg (args, int);
4403 	    fprintf (dumps->stream, "%d", d);
4404 	  }
4405 	  break;
4406 
4407 	case 'p': /* Pointer. */
4408 	  {
4409 	    void *p = va_arg (args, void *);
4410 	    fprintf (dumps->stream, "%p", p);
4411 	  }
4412 	  break;
4413 
4414 	case 's': /* String. */
4415 	  {
4416 	    const char *s = va_arg (args, char *);
4417 	    gcc_checking_assert (s);
4418 	    fputs (s, dumps->stream);
4419 	  }
4420 	  break;
4421 
4422 	case 'u': /* Unsigned.  */
4423 	  {
4424 	    unsigned u = va_arg (args, unsigned);
4425 	    fprintf (dumps->stream, "%u", u);
4426 	  }
4427 	  break;
4428 
4429 	case 'x': /* Hex. */
4430 	  {
4431 	    unsigned x = va_arg (args, unsigned);
4432 	    fprintf (dumps->stream, "%x", x);
4433 	  }
4434 	  break;
4435 	}
4436     }
4437   fputs (format, dumps->stream);
4438   va_end (args);
4439   if (!no_nl)
4440     {
4441       dumps->bol = true;
4442       fputc ('\n', dumps->stream);
4443     }
4444   return true;
4445 }
4446 
4447 struct note_def_cache_hasher : ggc_cache_ptr_hash<tree_node>
4448 {
keep_cache_entrynote_def_cache_hasher4449   static int keep_cache_entry (tree t)
4450   {
4451     if (!CHECKING_P)
4452       /* GTY is unfortunately not clever enough to conditionalize
4453 	 this.  */
4454       gcc_unreachable ();
4455 
4456     if (ggc_marked_p (t))
4457       return -1;
4458 
4459     unsigned n = dump.push (NULL);
4460     /* This might or might not be an error.  We should note its
4461        dropping whichever.  */
4462     dump () && dump ("Dropping %N from note_defs table", t);
4463     dump.pop (n);
4464 
4465     return 0;
4466   }
4467 };
4468 
4469 /* We should stream each definition at most once.
4470    This needs to be a cache because there are cases where a definition
4471    ends up being not retained, and we need to drop those so we don't
4472    get confused if memory is reallocated.  */
4473 typedef hash_table<note_def_cache_hasher> note_defs_table_t;
4474 static GTY((cache)) note_defs_table_t *note_defs;
4475 
4476 void
assert_definition(tree decl ATTRIBUTE_UNUSED,bool installing ATTRIBUTE_UNUSED)4477 trees_in::assert_definition (tree decl ATTRIBUTE_UNUSED,
4478 			     bool installing ATTRIBUTE_UNUSED)
4479 {
4480 #if CHECKING_P
4481   tree *slot = note_defs->find_slot (decl, installing ? INSERT : NO_INSERT);
4482   tree not_tmpl = STRIP_TEMPLATE (decl);
4483   if (installing)
4484     {
4485       /* We must be inserting for the first time.  */
4486       gcc_assert (!*slot);
4487       *slot = decl;
4488     }
4489   else
4490     /* If this is not the mergeable entity, it should not be in the
4491        table.  If it is a non-global-module mergeable entity, it
4492        should be in the table.  Global module entities could have been
4493        defined textually in the current TU and so might or might not
4494        be present.  */
4495     gcc_assert (!is_duplicate (decl)
4496 		? !slot
4497 		: (slot
4498 		   || !DECL_LANG_SPECIFIC (not_tmpl)
4499 		   || !DECL_MODULE_PURVIEW_P (not_tmpl)
4500 		   || (!DECL_MODULE_IMPORT_P (not_tmpl)
4501 		       && header_module_p ())));
4502 
4503   if (not_tmpl != decl)
4504     gcc_assert (!note_defs->find_slot (not_tmpl, NO_INSERT));
4505 #endif
4506 }
4507 
4508 void
assert_definition(tree decl ATTRIBUTE_UNUSED)4509 trees_out::assert_definition (tree decl ATTRIBUTE_UNUSED)
4510 {
4511 #if CHECKING_P
4512   tree *slot = note_defs->find_slot (decl, INSERT);
4513   gcc_assert (!*slot);
4514   *slot = decl;
4515   if (TREE_CODE (decl) == TEMPLATE_DECL)
4516     gcc_assert (!note_defs->find_slot (DECL_TEMPLATE_RESULT (decl), NO_INSERT));
4517 #endif
4518 }
4519 
4520 /********************************************************************/
4521 static bool
noisy_p()4522 noisy_p ()
4523 {
4524   if (quiet_flag)
4525     return false;
4526 
4527   pp_needs_newline (global_dc->printer) = true;
4528   diagnostic_set_last_function (global_dc, (diagnostic_info *) NULL);
4529 
4530   return true;
4531 }
4532 
4533 /* Set the cmi repo.  Strip trailing '/', '.' becomes NULL.  */
4534 
4535 static void
set_cmi_repo(const char * r)4536 set_cmi_repo (const char *r)
4537 {
4538   XDELETEVEC (cmi_repo);
4539   XDELETEVEC (cmi_path);
4540   cmi_path_alloc = 0;
4541 
4542   cmi_repo = NULL;
4543   cmi_repo_length = 0;
4544 
4545   if (!r || !r[0])
4546     return;
4547 
4548   size_t len = strlen (r);
4549   cmi_repo = XNEWVEC (char, len + 1);
4550   memcpy (cmi_repo, r, len + 1);
4551 
4552   if (len > 1 && IS_DIR_SEPARATOR (cmi_repo[len-1]))
4553     len--;
4554   if (len == 1 && cmi_repo[0] == '.')
4555     len--;
4556   cmi_repo[len] = 0;
4557   cmi_repo_length = len;
4558 }
4559 
4560 /* TO is a repo-relative name.  Provide one that we may use from where
4561    we are.  */
4562 
4563 static const char *
maybe_add_cmi_prefix(const char * to,size_t * len_p=NULL)4564 maybe_add_cmi_prefix (const char *to, size_t *len_p = NULL)
4565 {
4566   size_t len = len_p || cmi_repo_length ? strlen (to) : 0;
4567 
4568   if (cmi_repo_length && !IS_ABSOLUTE_PATH (to))
4569     {
4570       if (cmi_path_alloc < cmi_repo_length + len + 2)
4571 	{
4572 	  XDELETEVEC (cmi_path);
4573 	  cmi_path_alloc = cmi_repo_length + len * 2 + 2;
4574 	  cmi_path = XNEWVEC (char, cmi_path_alloc);
4575 
4576 	  memcpy (cmi_path, cmi_repo, cmi_repo_length);
4577 	  cmi_path[cmi_repo_length] = DIR_SEPARATOR;
4578 	}
4579 
4580       memcpy (&cmi_path[cmi_repo_length + 1], to, len + 1);
4581       len += cmi_repo_length + 1;
4582       to = cmi_path;
4583     }
4584 
4585   if (len_p)
4586     *len_p = len;
4587 
4588   return to;
4589 }
4590 
4591 /* Try and create the directories of PATH.  */
4592 
4593 static void
create_dirs(char * path)4594 create_dirs (char *path)
4595 {
4596   /* Try and create the missing directories.  */
4597   for (char *base = path; *base; base++)
4598     if (IS_DIR_SEPARATOR (*base))
4599       {
4600 	char sep = *base;
4601 	*base = 0;
4602 	int failed = mkdir (path, S_IRWXU | S_IRWXG | S_IRWXO);
4603 	dump () && dump ("Mkdir ('%s') errno:=%u", path, failed ? errno : 0);
4604 	*base = sep;
4605 	if (failed
4606 	    /* Maybe racing with another creator (of a *different*
4607 	       module).  */
4608 	    && errno != EEXIST)
4609 	  break;
4610       }
4611 }
4612 
4613 /* Given a CLASSTYPE_DECL_LIST VALUE get the the template friend decl,
4614    if that's what this is.  */
4615 
4616 static tree
friend_from_decl_list(tree frnd)4617 friend_from_decl_list (tree frnd)
4618 {
4619   tree res = frnd;
4620 
4621   if (TREE_CODE (frnd) != TEMPLATE_DECL)
4622     {
4623       tree tmpl = NULL_TREE;
4624       if (TYPE_P (frnd))
4625 	{
4626 	  res = TYPE_NAME (frnd);
4627 	  if (CLASSTYPE_TEMPLATE_INFO (frnd))
4628 	    tmpl = CLASSTYPE_TI_TEMPLATE (frnd);
4629 	}
4630       else if (DECL_TEMPLATE_INFO (frnd))
4631 	{
4632 	  tmpl = DECL_TI_TEMPLATE (frnd);
4633 	  if (TREE_CODE (tmpl) != TEMPLATE_DECL)
4634 	    tmpl = NULL_TREE;
4635 	}
4636 
4637       if (tmpl && DECL_TEMPLATE_RESULT (tmpl) == res)
4638 	res = tmpl;
4639     }
4640 
4641   return res;
4642 }
4643 
4644 static tree
find_enum_member(tree ctx,tree name)4645 find_enum_member (tree ctx, tree name)
4646 {
4647   for (tree values = TYPE_VALUES (ctx);
4648        values; values = TREE_CHAIN (values))
4649     if (DECL_NAME (TREE_VALUE (values)) == name)
4650       return TREE_VALUE (values);
4651 
4652   return NULL_TREE;
4653 }
4654 
4655 /********************************************************************/
4656 /* Instrumentation gathered writing bytes.  */
4657 
4658 void
instrument()4659 bytes_out::instrument ()
4660 {
4661   dump ("Wrote %u bytes in %u blocks", lengths[3], spans[3]);
4662   dump ("Wrote %u bits in %u bytes", lengths[0] + lengths[1], lengths[2]);
4663   for (unsigned ix = 0; ix < 2; ix++)
4664     dump ("  %u %s spans of %R bits", spans[ix],
4665 	  ix ? "one" : "zero", lengths[ix], spans[ix]);
4666   dump ("  %u blocks with %R bits padding", spans[2],
4667 	lengths[2] * 8 - (lengths[0] + lengths[1]), spans[2]);
4668 }
4669 
4670 /* Instrumentation gathered writing trees.  */
4671 void
instrument()4672 trees_out::instrument ()
4673 {
4674   if (dump (""))
4675     {
4676       bytes_out::instrument ();
4677       dump ("Wrote:");
4678       dump ("  %u decl trees", decl_val_count);
4679       dump ("  %u other trees", tree_val_count);
4680       dump ("  %u back references", back_ref_count);
4681       dump ("  %u null trees", null_count);
4682     }
4683 }
4684 
4685 /* Setup and teardown for a tree walk.  */
4686 
4687 void
begin()4688 trees_out::begin ()
4689 {
4690   gcc_assert (!streaming_p () || !tree_map.elements ());
4691 
4692   mark_trees ();
4693   if (streaming_p ())
4694     parent::begin ();
4695 }
4696 
4697 unsigned
end(elf_out * sink,unsigned name,unsigned * crc_ptr)4698 trees_out::end (elf_out *sink, unsigned name, unsigned *crc_ptr)
4699 {
4700   gcc_checking_assert (streaming_p ());
4701 
4702   unmark_trees ();
4703   return parent::end (sink, name, crc_ptr);
4704 }
4705 
4706 void
end()4707 trees_out::end ()
4708 {
4709   gcc_assert (!streaming_p ());
4710 
4711   unmark_trees ();
4712   /* Do not parent::end -- we weren't streaming.  */
4713 }
4714 
4715 void
mark_trees()4716 trees_out::mark_trees ()
4717 {
4718   if (size_t size = tree_map.elements ())
4719     {
4720       /* This isn't our first rodeo, destroy and recreate the
4721 	 tree_map.  I'm a bad bad man.  Use the previous size as a
4722 	 guess for the next one (so not all bad).  */
4723       tree_map.~ptr_int_hash_map ();
4724       new (&tree_map) ptr_int_hash_map (size);
4725     }
4726 
4727   /* Install the fixed trees, with +ve references.  */
4728   unsigned limit = fixed_trees->length ();
4729   for (unsigned ix = 0; ix != limit; ix++)
4730     {
4731       tree val = (*fixed_trees)[ix];
4732       bool existed = tree_map.put (val, ix + tag_fixed);
4733       gcc_checking_assert (!TREE_VISITED (val) && !existed);
4734       TREE_VISITED (val) = true;
4735     }
4736 
4737   ref_num = 0;
4738 }
4739 
4740 /* Unmark the trees we encountered  */
4741 
4742 void
unmark_trees()4743 trees_out::unmark_trees ()
4744 {
4745   ptr_int_hash_map::iterator end (tree_map.end ());
4746   for (ptr_int_hash_map::iterator iter (tree_map.begin ()); iter != end; ++iter)
4747     {
4748       tree node = reinterpret_cast<tree> ((*iter).first);
4749       int ref = (*iter).second;
4750       /* We should have visited the node, and converted its mergeable
4751 	 reference to a regular reference.  */
4752       gcc_checking_assert (TREE_VISITED (node)
4753 			   && (ref <= tag_backref || ref >= tag_fixed));
4754       TREE_VISITED (node) = false;
4755     }
4756 }
4757 
4758 /* Mark DECL for by-value walking.  We do this by inserting it into
4759    the tree map with a reference of zero.  May be called multiple
4760    times on the same node.  */
4761 
4762 void
mark_by_value(tree decl)4763 trees_out::mark_by_value (tree decl)
4764 {
4765   gcc_checking_assert (DECL_P (decl)
4766 		       /* Enum consts are INTEGER_CSTS.  */
4767 		       || TREE_CODE (decl) == INTEGER_CST
4768 		       || TREE_CODE (decl) == TREE_BINFO);
4769 
4770   if (TREE_VISITED (decl))
4771     /* Must already be forced or fixed.  */
4772     gcc_checking_assert (*tree_map.get (decl) >= tag_value);
4773   else
4774     {
4775       bool existed = tree_map.put (decl, tag_value);
4776       gcc_checking_assert (!existed);
4777       TREE_VISITED (decl) = true;
4778     }
4779 }
4780 
4781 int
get_tag(tree t)4782 trees_out::get_tag (tree t)
4783 {
4784   gcc_checking_assert (TREE_VISITED (t));
4785   return *tree_map.get (t);
4786 }
4787 
4788 /* Insert T into the map, return its tag number.    */
4789 
4790 int
insert(tree t,walk_kind walk)4791 trees_out::insert (tree t, walk_kind walk)
4792 {
4793   gcc_checking_assert (walk != WK_normal || !TREE_VISITED (t));
4794   int tag = --ref_num;
4795   bool existed;
4796   int &slot = tree_map.get_or_insert (t, &existed);
4797   gcc_checking_assert (TREE_VISITED (t) == existed
4798 		       && (!existed
4799 			   || (walk == WK_value && slot == tag_value)));
4800   TREE_VISITED (t) = true;
4801   slot = tag;
4802 
4803   return tag;
4804 }
4805 
4806 /* Insert T into the backreference array.  Return its back reference
4807    number.  */
4808 
4809 int
insert(tree t)4810 trees_in::insert (tree t)
4811 {
4812   gcc_checking_assert (t || get_overrun ());
4813   back_refs.safe_push (t);
4814   return -(int)back_refs.length ();
4815 }
4816 
4817 /* A chained set of decls.  */
4818 
4819 void
chained_decls(tree decls)4820 trees_out::chained_decls (tree decls)
4821 {
4822   for (; decls; decls = DECL_CHAIN (decls))
4823     {
4824       if (VAR_OR_FUNCTION_DECL_P (decls)
4825 	  && DECL_LOCAL_DECL_P (decls))
4826 	{
4827 	  /* Make sure this is the first encounter, and mark for
4828 	     walk-by-value.  */
4829 	  gcc_checking_assert (!TREE_VISITED (decls)
4830 			       && !DECL_TEMPLATE_INFO (decls));
4831 	  mark_by_value (decls);
4832 	}
4833       tree_node (decls);
4834     }
4835   tree_node (NULL_TREE);
4836 }
4837 
4838 tree
chained_decls()4839 trees_in::chained_decls ()
4840 {
4841   tree decls = NULL_TREE;
4842   for (tree *chain = &decls;;)
4843     if (tree decl = tree_node ())
4844       {
4845 	if (!DECL_P (decl) || DECL_CHAIN (decl))
4846 	  {
4847 	    set_overrun ();
4848 	    break;
4849 	  }
4850 	*chain = decl;
4851 	chain = &DECL_CHAIN (decl);
4852       }
4853     else
4854       break;
4855 
4856   return decls;
4857 }
4858 
4859 /* A vector of decls following DECL_CHAIN.  */
4860 
4861 void
vec_chained_decls(tree decls)4862 trees_out::vec_chained_decls (tree decls)
4863 {
4864   if (streaming_p ())
4865     {
4866       unsigned len = 0;
4867 
4868       for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
4869 	len++;
4870       u (len);
4871     }
4872 
4873   for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
4874     {
4875       if (DECL_IMPLICIT_TYPEDEF_P (decl)
4876 	  && TYPE_NAME (TREE_TYPE (decl)) != decl)
4877 	/* An anonynmous struct with a typedef name.  An odd thing to
4878 	   write.  */
4879 	tree_node (NULL_TREE);
4880       else
4881 	tree_node (decl);
4882     }
4883 }
4884 
4885 vec<tree, va_heap> *
vec_chained_decls()4886 trees_in::vec_chained_decls ()
4887 {
4888   vec<tree, va_heap> *v = NULL;
4889 
4890   if (unsigned len = u ())
4891     {
4892       vec_alloc (v, len);
4893 
4894       for (unsigned ix = 0; ix < len; ix++)
4895 	{
4896 	  tree decl = tree_node ();
4897 	  if (decl && !DECL_P (decl))
4898 	    {
4899 	      set_overrun ();
4900 	      break;
4901 	    }
4902 	  v->quick_push (decl);
4903 	}
4904 
4905       if (get_overrun ())
4906 	{
4907 	  vec_free (v);
4908 	  v = NULL;
4909 	}
4910     }
4911 
4912   return v;
4913 }
4914 
4915 /* A vector of trees.  */
4916 
4917 void
tree_vec(vec<tree,va_gc> * v)4918 trees_out::tree_vec (vec<tree, va_gc> *v)
4919 {
4920   unsigned len = vec_safe_length (v);
4921   if (streaming_p ())
4922     u (len);
4923   for (unsigned ix = 0; ix != len; ix++)
4924     tree_node ((*v)[ix]);
4925 }
4926 
4927 vec<tree, va_gc> *
tree_vec()4928 trees_in::tree_vec ()
4929 {
4930   vec<tree, va_gc> *v = NULL;
4931   if (unsigned len = u ())
4932     {
4933       vec_alloc (v, len);
4934       for (unsigned ix = 0; ix != len; ix++)
4935 	v->quick_push (tree_node ());
4936     }
4937   return v;
4938 }
4939 
4940 /* A vector of tree pairs.  */
4941 
4942 void
tree_pair_vec(vec<tree_pair_s,va_gc> * v)4943 trees_out::tree_pair_vec (vec<tree_pair_s, va_gc> *v)
4944 {
4945   unsigned len = vec_safe_length (v);
4946   if (streaming_p ())
4947     u (len);
4948   if (len)
4949     for (unsigned ix = 0; ix != len; ix++)
4950       {
4951 	tree_pair_s const &s = (*v)[ix];
4952 	tree_node (s.purpose);
4953 	tree_node (s.value);
4954       }
4955 }
4956 
4957 vec<tree_pair_s, va_gc> *
tree_pair_vec()4958 trees_in::tree_pair_vec ()
4959 {
4960   vec<tree_pair_s, va_gc> *v = NULL;
4961   if (unsigned len = u ())
4962     {
4963       vec_alloc (v, len);
4964       for (unsigned ix = 0; ix != len; ix++)
4965 	{
4966 	  tree_pair_s s;
4967 	  s.purpose = tree_node ();
4968 	  s.value = tree_node ();
4969 	  v->quick_push (s);
4970       }
4971     }
4972   return v;
4973 }
4974 
4975 void
tree_list(tree list,bool has_purpose)4976 trees_out::tree_list (tree list, bool has_purpose)
4977 {
4978   for (; list; list = TREE_CHAIN (list))
4979     {
4980       gcc_checking_assert (TREE_VALUE (list));
4981       tree_node (TREE_VALUE (list));
4982       if (has_purpose)
4983 	tree_node (TREE_PURPOSE (list));
4984     }
4985   tree_node (NULL_TREE);
4986 }
4987 
4988 tree
tree_list(bool has_purpose)4989 trees_in::tree_list (bool has_purpose)
4990 {
4991   tree res = NULL_TREE;
4992 
4993   for (tree *chain = &res; tree value = tree_node ();
4994        chain = &TREE_CHAIN (*chain))
4995     {
4996       tree purpose = has_purpose ? tree_node () : NULL_TREE;
4997       *chain = build_tree_list (purpose, value);
4998     }
4999 
5000   return res;
5001 }
5002 /* Start tree write.  Write information to allocate the receiving
5003    node.  */
5004 
5005 void
start(tree t,bool code_streamed)5006 trees_out::start (tree t, bool code_streamed)
5007 {
5008   if (TYPE_P (t))
5009     {
5010       enum tree_code code = TREE_CODE (t);
5011       gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
5012       /* All these types are TYPE_NON_COMMON.  */
5013       gcc_checking_assert (code == RECORD_TYPE
5014 			   || code == UNION_TYPE
5015 			   || code == ENUMERAL_TYPE
5016 			   || code == TEMPLATE_TYPE_PARM
5017 			   || code == TEMPLATE_TEMPLATE_PARM
5018 			   || code == BOUND_TEMPLATE_TEMPLATE_PARM);
5019     }
5020 
5021   if (!code_streamed)
5022     u (TREE_CODE (t));
5023 
5024   switch (TREE_CODE (t))
5025     {
5026     default:
5027       if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_vl_exp)
5028 	u (VL_EXP_OPERAND_LENGTH (t));
5029       break;
5030 
5031     case INTEGER_CST:
5032       u (TREE_INT_CST_NUNITS (t));
5033       u (TREE_INT_CST_EXT_NUNITS (t));
5034       u (TREE_INT_CST_OFFSET_NUNITS (t));
5035       break;
5036 
5037     case OMP_CLAUSE:
5038       state->extensions |= SE_OPENMP;
5039       u (OMP_CLAUSE_CODE (t));
5040       break;
5041 
5042     case STRING_CST:
5043       str (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
5044       break;
5045 
5046     case VECTOR_CST:
5047       u (VECTOR_CST_LOG2_NPATTERNS (t));
5048       u (VECTOR_CST_NELTS_PER_PATTERN (t));
5049       break;
5050 
5051     case TREE_BINFO:
5052       u (BINFO_N_BASE_BINFOS (t));
5053       break;
5054 
5055     case TREE_VEC:
5056       u (TREE_VEC_LENGTH (t));
5057       break;
5058 
5059     case FIXED_CST:
5060     case POLY_INT_CST:
5061       gcc_unreachable (); /* Not supported in C++.  */
5062       break;
5063 
5064     case IDENTIFIER_NODE:
5065     case SSA_NAME:
5066     case TARGET_MEM_REF:
5067     case TRANSLATION_UNIT_DECL:
5068       /* We shouldn't meet these.  */
5069       gcc_unreachable ();
5070       break;
5071     }
5072 }
5073 
5074 /* Start tree read.  Allocate the receiving node.  */
5075 
5076 tree
start(unsigned code)5077 trees_in::start (unsigned code)
5078 {
5079   tree t = NULL_TREE;
5080 
5081   if (!code)
5082     code = u ();
5083 
5084   switch (code)
5085     {
5086     default:
5087       if (code >= MAX_TREE_CODES)
5088 	{
5089 	fail:
5090 	  set_overrun ();
5091 	  return NULL_TREE;
5092 	}
5093       else if (TREE_CODE_CLASS (code) == tcc_vl_exp)
5094 	{
5095 	  unsigned ops = u ();
5096 	  t = build_vl_exp (tree_code (code), ops);
5097 	}
5098       else
5099 	t = make_node (tree_code (code));
5100       break;
5101 
5102     case INTEGER_CST:
5103       {
5104 	unsigned n = u ();
5105 	unsigned e = u ();
5106 	t = make_int_cst (n, e);
5107 	TREE_INT_CST_OFFSET_NUNITS(t) = u ();
5108       }
5109       break;
5110 
5111     case OMP_CLAUSE:
5112       {
5113 	if (!(state->extensions & SE_OPENMP))
5114 	  goto fail;
5115 
5116 	unsigned omp_code = u ();
5117 	t = build_omp_clause (UNKNOWN_LOCATION, omp_clause_code (omp_code));
5118       }
5119       break;
5120 
5121     case STRING_CST:
5122       {
5123 	size_t l;
5124 	const char *chars = str (&l);
5125 	t = build_string (l, chars);
5126       }
5127       break;
5128 
5129     case VECTOR_CST:
5130       {
5131 	unsigned log2_npats = u ();
5132 	unsigned elts_per = u ();
5133 	t = make_vector (log2_npats, elts_per);
5134       }
5135       break;
5136 
5137     case TREE_BINFO:
5138       t = make_tree_binfo (u ());
5139       break;
5140 
5141     case TREE_VEC:
5142       t = make_tree_vec (u ());
5143       break;
5144 
5145     case FIXED_CST:
5146     case IDENTIFIER_NODE:
5147     case POLY_INT_CST:
5148     case SSA_NAME:
5149     case TARGET_MEM_REF:
5150     case TRANSLATION_UNIT_DECL:
5151       goto fail;
5152     }
5153 
5154   return t;
5155 }
5156 
5157 /* The structure streamers access the raw fields, because the
5158    alternative, of using the accessor macros can require using
5159    different accessors for the same underlying field, depending on the
5160    tree code.  That's both confusing and annoying.  */
5161 
5162 /* Read & write the core boolean flags.  */
5163 
5164 void
core_bools(tree t)5165 trees_out::core_bools (tree t)
5166 {
5167 #define WB(X) (b (X))
5168   tree_code code = TREE_CODE (t);
5169 
5170   WB (t->base.side_effects_flag);
5171   WB (t->base.constant_flag);
5172   WB (t->base.addressable_flag);
5173   WB (t->base.volatile_flag);
5174   WB (t->base.readonly_flag);
5175   /* base.asm_written_flag is a property of the current TU's use of
5176      this decl.  */
5177   WB (t->base.nowarning_flag);
5178   /* base.visited read as zero (it's set for writer, because that's
5179      how we mark nodes).  */
5180   /* base.used_flag is not streamed.  Readers may set TREE_USED of
5181      decls they use.  */
5182   WB (t->base.nothrow_flag);
5183   WB (t->base.static_flag);
5184   if (TREE_CODE_CLASS (code) != tcc_type)
5185     /* This is TYPE_CACHED_VALUES_P for types.  */
5186     WB (t->base.public_flag);
5187   WB (t->base.private_flag);
5188   WB (t->base.protected_flag);
5189   WB (t->base.deprecated_flag);
5190   WB (t->base.default_def_flag);
5191 
5192   switch (code)
5193     {
5194     case CALL_EXPR:
5195     case INTEGER_CST:
5196     case SSA_NAME:
5197     case TARGET_MEM_REF:
5198     case TREE_VEC:
5199       /* These use different base.u fields.  */
5200       break;
5201 
5202     default:
5203       WB (t->base.u.bits.lang_flag_0);
5204       bool flag_1 = t->base.u.bits.lang_flag_1;
5205       if (!flag_1)
5206 	;
5207       else if (code == TEMPLATE_INFO)
5208 	/* This is TI_PENDING_TEMPLATE_FLAG, not relevant to reader.  */
5209 	flag_1 = false;
5210       else if (code == VAR_DECL)
5211 	{
5212 	  /* This is DECL_INITIALIZED_P.  */
5213 	  if (TREE_CODE (DECL_CONTEXT (t)) != FUNCTION_DECL)
5214 	    /* We'll set this when reading the definition.  */
5215 	    flag_1 = false;
5216 	}
5217       WB (flag_1);
5218       WB (t->base.u.bits.lang_flag_2);
5219       WB (t->base.u.bits.lang_flag_3);
5220       WB (t->base.u.bits.lang_flag_4);
5221       WB (t->base.u.bits.lang_flag_5);
5222       WB (t->base.u.bits.lang_flag_6);
5223       WB (t->base.u.bits.saturating_flag);
5224       WB (t->base.u.bits.unsigned_flag);
5225       WB (t->base.u.bits.packed_flag);
5226       WB (t->base.u.bits.user_align);
5227       WB (t->base.u.bits.nameless_flag);
5228       WB (t->base.u.bits.atomic_flag);
5229       break;
5230     }
5231 
5232   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
5233     {
5234       WB (t->type_common.no_force_blk_flag);
5235       WB (t->type_common.needs_constructing_flag);
5236       WB (t->type_common.transparent_aggr_flag);
5237       WB (t->type_common.restrict_flag);
5238       WB (t->type_common.string_flag);
5239       WB (t->type_common.lang_flag_0);
5240       WB (t->type_common.lang_flag_1);
5241       WB (t->type_common.lang_flag_2);
5242       WB (t->type_common.lang_flag_3);
5243       WB (t->type_common.lang_flag_4);
5244       WB (t->type_common.lang_flag_5);
5245       WB (t->type_common.lang_flag_6);
5246       WB (t->type_common.typeless_storage);
5247     }
5248 
5249   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
5250     {
5251       WB (t->decl_common.nonlocal_flag);
5252       WB (t->decl_common.virtual_flag);
5253       WB (t->decl_common.ignored_flag);
5254       WB (t->decl_common.abstract_flag);
5255       WB (t->decl_common.artificial_flag);
5256       WB (t->decl_common.preserve_flag);
5257       WB (t->decl_common.debug_expr_is_from);
5258       WB (t->decl_common.lang_flag_0);
5259       WB (t->decl_common.lang_flag_1);
5260       WB (t->decl_common.lang_flag_2);
5261       WB (t->decl_common.lang_flag_3);
5262       WB (t->decl_common.lang_flag_4);
5263       WB (t->decl_common.lang_flag_5);
5264       WB (t->decl_common.lang_flag_6);
5265       WB (t->decl_common.lang_flag_7);
5266       WB (t->decl_common.lang_flag_8);
5267       WB (t->decl_common.decl_flag_0);
5268 
5269       {
5270 	/* DECL_EXTERNAL -> decl_flag_1
5271 	     == it is defined elsewhere
5272 	   DECL_NOT_REALLY_EXTERN -> base.not_really_extern
5273 	     == that was a lie, it is here  */
5274 
5275 	bool is_external = t->decl_common.decl_flag_1;
5276 	if (!is_external)
5277 	  /* decl_flag_1 is DECL_EXTERNAL. Things we emit here, might
5278 	     well be external from the POV of an importer.  */
5279 	  // FIXME: Do we need to know if this is a TEMPLATE_RESULT --
5280 	  // a flag from the caller?
5281 	  switch (code)
5282 	    {
5283 	    default:
5284 	      break;
5285 
5286 	    case VAR_DECL:
5287 	      if (TREE_PUBLIC (t)
5288 		  && !DECL_VAR_DECLARED_INLINE_P (t))
5289 		is_external = true;
5290 	      break;
5291 
5292 	    case FUNCTION_DECL:
5293 	      if (TREE_PUBLIC (t)
5294 		  && !DECL_DECLARED_INLINE_P (t))
5295 		is_external = true;
5296 	      break;
5297 	    }
5298 	WB (is_external);
5299       }
5300 
5301       WB (t->decl_common.decl_flag_2);
5302       WB (t->decl_common.decl_flag_3);
5303       WB (t->decl_common.not_gimple_reg_flag);
5304       WB (t->decl_common.decl_by_reference_flag);
5305       WB (t->decl_common.decl_read_flag);
5306       WB (t->decl_common.decl_nonshareable_flag);
5307     }
5308 
5309   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
5310     {
5311       WB (t->decl_with_vis.defer_output);
5312       WB (t->decl_with_vis.hard_register);
5313       WB (t->decl_with_vis.common_flag);
5314       WB (t->decl_with_vis.in_text_section);
5315       WB (t->decl_with_vis.in_constant_pool);
5316       WB (t->decl_with_vis.dllimport_flag);
5317       WB (t->decl_with_vis.weak_flag);
5318       WB (t->decl_with_vis.seen_in_bind_expr);
5319       WB (t->decl_with_vis.comdat_flag);
5320       WB (t->decl_with_vis.visibility_specified);
5321       WB (t->decl_with_vis.init_priority_p);
5322       WB (t->decl_with_vis.shadowed_for_var_p);
5323       WB (t->decl_with_vis.cxx_constructor);
5324       WB (t->decl_with_vis.cxx_destructor);
5325       WB (t->decl_with_vis.final);
5326       WB (t->decl_with_vis.regdecl_flag);
5327     }
5328 
5329   if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
5330     {
5331       WB (t->function_decl.static_ctor_flag);
5332       WB (t->function_decl.static_dtor_flag);
5333       WB (t->function_decl.uninlinable);
5334       WB (t->function_decl.possibly_inlined);
5335       WB (t->function_decl.novops_flag);
5336       WB (t->function_decl.returns_twice_flag);
5337       WB (t->function_decl.malloc_flag);
5338       WB (t->function_decl.declared_inline_flag);
5339       WB (t->function_decl.no_inline_warning_flag);
5340       WB (t->function_decl.no_instrument_function_entry_exit);
5341       WB (t->function_decl.no_limit_stack);
5342       WB (t->function_decl.disregard_inline_limits);
5343       WB (t->function_decl.pure_flag);
5344       WB (t->function_decl.looping_const_or_pure_flag);
5345 
5346       WB (t->function_decl.has_debug_args_flag);
5347       WB (t->function_decl.versioned_function);
5348 
5349       /* decl_type is a (misnamed) 2 bit discriminator.	 */
5350       unsigned kind = t->function_decl.decl_type;
5351       WB ((kind >> 0) & 1);
5352       WB ((kind >> 1) & 1);
5353     }
5354 #undef WB
5355 }
5356 
5357 bool
core_bools(tree t)5358 trees_in::core_bools (tree t)
5359 {
5360 #define RB(X) ((X) = b ())
5361   tree_code code = TREE_CODE (t);
5362 
5363   RB (t->base.side_effects_flag);
5364   RB (t->base.constant_flag);
5365   RB (t->base.addressable_flag);
5366   RB (t->base.volatile_flag);
5367   RB (t->base.readonly_flag);
5368   /* base.asm_written_flag is not streamed.  */
5369   RB (t->base.nowarning_flag);
5370   /* base.visited is not streamed.  */
5371   /* base.used_flag is not streamed.  */
5372   RB (t->base.nothrow_flag);
5373   RB (t->base.static_flag);
5374   if (TREE_CODE_CLASS (code) != tcc_type)
5375     RB (t->base.public_flag);
5376   RB (t->base.private_flag);
5377   RB (t->base.protected_flag);
5378   RB (t->base.deprecated_flag);
5379   RB (t->base.default_def_flag);
5380 
5381   switch (code)
5382     {
5383     case CALL_EXPR:
5384     case INTEGER_CST:
5385     case SSA_NAME:
5386     case TARGET_MEM_REF:
5387     case TREE_VEC:
5388       /* These use different base.u fields.  */
5389       break;
5390 
5391     default:
5392       RB (t->base.u.bits.lang_flag_0);
5393       RB (t->base.u.bits.lang_flag_1);
5394       RB (t->base.u.bits.lang_flag_2);
5395       RB (t->base.u.bits.lang_flag_3);
5396       RB (t->base.u.bits.lang_flag_4);
5397       RB (t->base.u.bits.lang_flag_5);
5398       RB (t->base.u.bits.lang_flag_6);
5399       RB (t->base.u.bits.saturating_flag);
5400       RB (t->base.u.bits.unsigned_flag);
5401       RB (t->base.u.bits.packed_flag);
5402       RB (t->base.u.bits.user_align);
5403       RB (t->base.u.bits.nameless_flag);
5404       RB (t->base.u.bits.atomic_flag);
5405       break;
5406     }
5407 
5408   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
5409     {
5410       RB (t->type_common.no_force_blk_flag);
5411       RB (t->type_common.needs_constructing_flag);
5412       RB (t->type_common.transparent_aggr_flag);
5413       RB (t->type_common.restrict_flag);
5414       RB (t->type_common.string_flag);
5415       RB (t->type_common.lang_flag_0);
5416       RB (t->type_common.lang_flag_1);
5417       RB (t->type_common.lang_flag_2);
5418       RB (t->type_common.lang_flag_3);
5419       RB (t->type_common.lang_flag_4);
5420       RB (t->type_common.lang_flag_5);
5421       RB (t->type_common.lang_flag_6);
5422       RB (t->type_common.typeless_storage);
5423     }
5424 
5425   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
5426     {
5427       RB (t->decl_common.nonlocal_flag);
5428       RB (t->decl_common.virtual_flag);
5429       RB (t->decl_common.ignored_flag);
5430       RB (t->decl_common.abstract_flag);
5431       RB (t->decl_common.artificial_flag);
5432       RB (t->decl_common.preserve_flag);
5433       RB (t->decl_common.debug_expr_is_from);
5434       RB (t->decl_common.lang_flag_0);
5435       RB (t->decl_common.lang_flag_1);
5436       RB (t->decl_common.lang_flag_2);
5437       RB (t->decl_common.lang_flag_3);
5438       RB (t->decl_common.lang_flag_4);
5439       RB (t->decl_common.lang_flag_5);
5440       RB (t->decl_common.lang_flag_6);
5441       RB (t->decl_common.lang_flag_7);
5442       RB (t->decl_common.lang_flag_8);
5443       RB (t->decl_common.decl_flag_0);
5444       RB (t->decl_common.decl_flag_1);
5445       RB (t->decl_common.decl_flag_2);
5446       RB (t->decl_common.decl_flag_3);
5447       RB (t->decl_common.not_gimple_reg_flag);
5448       RB (t->decl_common.decl_by_reference_flag);
5449       RB (t->decl_common.decl_read_flag);
5450       RB (t->decl_common.decl_nonshareable_flag);
5451     }
5452 
5453   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
5454     {
5455       RB (t->decl_with_vis.defer_output);
5456       RB (t->decl_with_vis.hard_register);
5457       RB (t->decl_with_vis.common_flag);
5458       RB (t->decl_with_vis.in_text_section);
5459       RB (t->decl_with_vis.in_constant_pool);
5460       RB (t->decl_with_vis.dllimport_flag);
5461       RB (t->decl_with_vis.weak_flag);
5462       RB (t->decl_with_vis.seen_in_bind_expr);
5463       RB (t->decl_with_vis.comdat_flag);
5464       RB (t->decl_with_vis.visibility_specified);
5465       RB (t->decl_with_vis.init_priority_p);
5466       RB (t->decl_with_vis.shadowed_for_var_p);
5467       RB (t->decl_with_vis.cxx_constructor);
5468       RB (t->decl_with_vis.cxx_destructor);
5469       RB (t->decl_with_vis.final);
5470       RB (t->decl_with_vis.regdecl_flag);
5471     }
5472 
5473   if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
5474     {
5475       RB (t->function_decl.static_ctor_flag);
5476       RB (t->function_decl.static_dtor_flag);
5477       RB (t->function_decl.uninlinable);
5478       RB (t->function_decl.possibly_inlined);
5479       RB (t->function_decl.novops_flag);
5480       RB (t->function_decl.returns_twice_flag);
5481       RB (t->function_decl.malloc_flag);
5482       RB (t->function_decl.declared_inline_flag);
5483       RB (t->function_decl.no_inline_warning_flag);
5484       RB (t->function_decl.no_instrument_function_entry_exit);
5485       RB (t->function_decl.no_limit_stack);
5486       RB (t->function_decl.disregard_inline_limits);
5487       RB (t->function_decl.pure_flag);
5488       RB (t->function_decl.looping_const_or_pure_flag);
5489 
5490       RB (t->function_decl.has_debug_args_flag);
5491       RB (t->function_decl.versioned_function);
5492 
5493       /* decl_type is a (misnamed) 2 bit discriminator.	 */
5494       unsigned kind = 0;
5495       kind |= unsigned (b ()) << 0;
5496       kind |= unsigned (b ()) << 1;
5497       t->function_decl.decl_type = function_decl_type (kind);
5498     }
5499 #undef RB
5500   return !get_overrun ();
5501 }
5502 
5503 void
lang_decl_bools(tree t)5504 trees_out::lang_decl_bools (tree t)
5505 {
5506 #define WB(X) (b (X))
5507   const struct lang_decl *lang = DECL_LANG_SPECIFIC (t);
5508 
5509   WB (lang->u.base.language == lang_cplusplus);
5510   WB ((lang->u.base.use_template >> 0) & 1);
5511   WB ((lang->u.base.use_template >> 1) & 1);
5512   /* Do not write lang->u.base.not_really_extern, importer will set
5513      when reading the definition (if any).  */
5514   WB (lang->u.base.initialized_in_class);
5515   WB (lang->u.base.threadprivate_or_deleted_p);
5516   /* Do not write lang->u.base.anticipated_p, it is a property of the
5517      current TU.  */
5518   WB (lang->u.base.friend_or_tls);
5519   WB (lang->u.base.unknown_bound_p);
5520   /* Do not write lang->u.base.odr_used, importer will recalculate if
5521      they do ODR use this decl.  */
5522   WB (lang->u.base.concept_p);
5523   WB (lang->u.base.var_declared_inline_p);
5524   WB (lang->u.base.dependent_init_p);
5525   /* When building a header unit, everthing is marked as purview, but
5526      that's the GM purview, so not what the importer will mean  */
5527   WB (lang->u.base.module_purview_p && !header_module_p ());
5528   if (VAR_OR_FUNCTION_DECL_P (t))
5529     WB (lang->u.base.module_attached_p);
5530   switch (lang->u.base.selector)
5531     {
5532     default:
5533       gcc_unreachable ();
5534 
5535     case lds_fn:  /* lang_decl_fn.  */
5536       WB (lang->u.fn.global_ctor_p);
5537       WB (lang->u.fn.global_dtor_p);
5538       WB (lang->u.fn.static_function);
5539       WB (lang->u.fn.pure_virtual);
5540       WB (lang->u.fn.defaulted_p);
5541       WB (lang->u.fn.has_in_charge_parm_p);
5542       WB (lang->u.fn.has_vtt_parm_p);
5543       /* There shouldn't be a pending inline at this point.  */
5544       gcc_assert (!lang->u.fn.pending_inline_p);
5545       WB (lang->u.fn.nonconverting);
5546       WB (lang->u.fn.thunk_p);
5547       WB (lang->u.fn.this_thunk_p);
5548       /* Do not stream lang->u.hidden_friend_p, it is a property of
5549 	 the TU.  */
5550       WB (lang->u.fn.omp_declare_reduction_p);
5551       WB (lang->u.fn.has_dependent_explicit_spec_p);
5552       WB (lang->u.fn.immediate_fn_p);
5553       WB (lang->u.fn.maybe_deleted);
5554       goto lds_min;
5555 
5556     case lds_decomp:  /* lang_decl_decomp.  */
5557       /* No bools.  */
5558       goto lds_min;
5559 
5560     case lds_min:  /* lang_decl_min.  */
5561     lds_min:
5562       /* No bools.  */
5563       break;
5564 
5565     case lds_ns:  /* lang_decl_ns.  */
5566       /* No bools.  */
5567       break;
5568 
5569     case lds_parm:  /* lang_decl_parm.  */
5570       /* No bools.  */
5571       break;
5572     }
5573 #undef WB
5574 }
5575 
5576 bool
lang_decl_bools(tree t)5577 trees_in::lang_decl_bools (tree t)
5578 {
5579 #define RB(X) ((X) = b ())
5580   struct lang_decl *lang = DECL_LANG_SPECIFIC (t);
5581 
5582   lang->u.base.language = b () ? lang_cplusplus : lang_c;
5583   unsigned v;
5584   v = b () << 0;
5585   v |= b () << 1;
5586   lang->u.base.use_template = v;
5587   /* lang->u.base.not_really_extern is not streamed.  */
5588   RB (lang->u.base.initialized_in_class);
5589   RB (lang->u.base.threadprivate_or_deleted_p);
5590   /* lang->u.base.anticipated_p is not streamed.  */
5591   RB (lang->u.base.friend_or_tls);
5592   RB (lang->u.base.unknown_bound_p);
5593   /* lang->u.base.odr_used is not streamed.  */
5594   RB (lang->u.base.concept_p);
5595   RB (lang->u.base.var_declared_inline_p);
5596   RB (lang->u.base.dependent_init_p);
5597   RB (lang->u.base.module_purview_p);
5598   if (VAR_OR_FUNCTION_DECL_P (t))
5599     RB (lang->u.base.module_attached_p);
5600   switch (lang->u.base.selector)
5601     {
5602     default:
5603       gcc_unreachable ();
5604 
5605     case lds_fn:  /* lang_decl_fn.  */
5606       RB (lang->u.fn.global_ctor_p);
5607       RB (lang->u.fn.global_dtor_p);
5608       RB (lang->u.fn.static_function);
5609       RB (lang->u.fn.pure_virtual);
5610       RB (lang->u.fn.defaulted_p);
5611       RB (lang->u.fn.has_in_charge_parm_p);
5612       RB (lang->u.fn.has_vtt_parm_p);
5613       RB (lang->u.fn.nonconverting);
5614       RB (lang->u.fn.thunk_p);
5615       RB (lang->u.fn.this_thunk_p);
5616       /* lang->u.fn.hidden_friend_p is not streamed.  */
5617       RB (lang->u.fn.omp_declare_reduction_p);
5618       RB (lang->u.fn.has_dependent_explicit_spec_p);
5619       RB (lang->u.fn.immediate_fn_p);
5620       RB (lang->u.fn.maybe_deleted);
5621       goto lds_min;
5622 
5623     case lds_decomp:  /* lang_decl_decomp.  */
5624       /* No bools.  */
5625       goto lds_min;
5626 
5627     case lds_min:  /* lang_decl_min.  */
5628     lds_min:
5629       /* No bools.  */
5630       break;
5631 
5632     case lds_ns:  /* lang_decl_ns.  */
5633       /* No bools.  */
5634       break;
5635 
5636     case lds_parm:  /* lang_decl_parm.  */
5637       /* No bools.  */
5638       break;
5639     }
5640 #undef RB
5641   return !get_overrun ();
5642 }
5643 
5644 void
lang_type_bools(tree t)5645 trees_out::lang_type_bools (tree t)
5646 {
5647 #define WB(X) (b (X))
5648   const struct lang_type *lang = TYPE_LANG_SPECIFIC (t);
5649 
5650   WB (lang->has_type_conversion);
5651   WB (lang->has_copy_ctor);
5652   WB (lang->has_default_ctor);
5653   WB (lang->const_needs_init);
5654   WB (lang->ref_needs_init);
5655   WB (lang->has_const_copy_assign);
5656   WB ((lang->use_template >> 0) & 1);
5657   WB ((lang->use_template >> 1) & 1);
5658 
5659   WB (lang->has_mutable);
5660   WB (lang->com_interface);
5661   WB (lang->non_pod_class);
5662   WB (lang->nearly_empty_p);
5663   WB (lang->user_align);
5664   WB (lang->has_copy_assign);
5665   WB (lang->has_new);
5666   WB (lang->has_array_new);
5667 
5668   WB ((lang->gets_delete >> 0) & 1);
5669   WB ((lang->gets_delete >> 1) & 1);
5670   // Interfaceness is recalculated upon reading.  May have to revisit?
5671   // How do dllexport and dllimport interact across a module?
5672   // lang->interface_only
5673   // lang->interface_unknown
5674   WB (lang->contains_empty_class_p);
5675   WB (lang->anon_aggr);
5676   WB (lang->non_zero_init);
5677   WB (lang->empty_p);
5678 
5679   WB (lang->vec_new_uses_cookie);
5680   WB (lang->declared_class);
5681   WB (lang->diamond_shaped);
5682   WB (lang->repeated_base);
5683   gcc_assert (!lang->being_defined);
5684   // lang->debug_requested
5685   WB (lang->fields_readonly);
5686   WB (lang->ptrmemfunc_flag);
5687 
5688   WB (lang->lazy_default_ctor);
5689   WB (lang->lazy_copy_ctor);
5690   WB (lang->lazy_copy_assign);
5691   WB (lang->lazy_destructor);
5692   WB (lang->has_const_copy_ctor);
5693   WB (lang->has_complex_copy_ctor);
5694   WB (lang->has_complex_copy_assign);
5695   WB (lang->non_aggregate);
5696 
5697   WB (lang->has_complex_dflt);
5698   WB (lang->has_list_ctor);
5699   WB (lang->non_std_layout);
5700   WB (lang->is_literal);
5701   WB (lang->lazy_move_ctor);
5702   WB (lang->lazy_move_assign);
5703   WB (lang->has_complex_move_ctor);
5704   WB (lang->has_complex_move_assign);
5705 
5706   WB (lang->has_constexpr_ctor);
5707   WB (lang->unique_obj_representations);
5708   WB (lang->unique_obj_representations_set);
5709 #undef WB
5710 }
5711 
5712 bool
lang_type_bools(tree t)5713 trees_in::lang_type_bools (tree t)
5714 {
5715 #define RB(X) ((X) = b ())
5716   struct lang_type *lang = TYPE_LANG_SPECIFIC (t);
5717 
5718   RB (lang->has_type_conversion);
5719   RB (lang->has_copy_ctor);
5720   RB (lang->has_default_ctor);
5721   RB (lang->const_needs_init);
5722   RB (lang->ref_needs_init);
5723   RB (lang->has_const_copy_assign);
5724   unsigned v;
5725   v = b () << 0;
5726   v |= b () << 1;
5727   lang->use_template = v;
5728 
5729   RB (lang->has_mutable);
5730   RB (lang->com_interface);
5731   RB (lang->non_pod_class);
5732   RB (lang->nearly_empty_p);
5733   RB (lang->user_align);
5734   RB (lang->has_copy_assign);
5735   RB (lang->has_new);
5736   RB (lang->has_array_new);
5737 
5738   v = b () << 0;
5739   v |= b () << 1;
5740   lang->gets_delete = v;
5741   // lang->interface_only
5742   // lang->interface_unknown
5743   lang->interface_unknown = true; // Redetermine interface
5744   RB (lang->contains_empty_class_p);
5745   RB (lang->anon_aggr);
5746   RB (lang->non_zero_init);
5747   RB (lang->empty_p);
5748 
5749   RB (lang->vec_new_uses_cookie);
5750   RB (lang->declared_class);
5751   RB (lang->diamond_shaped);
5752   RB (lang->repeated_base);
5753   gcc_assert (!lang->being_defined);
5754   gcc_assert (!lang->debug_requested);
5755   RB (lang->fields_readonly);
5756   RB (lang->ptrmemfunc_flag);
5757 
5758   RB (lang->lazy_default_ctor);
5759   RB (lang->lazy_copy_ctor);
5760   RB (lang->lazy_copy_assign);
5761   RB (lang->lazy_destructor);
5762   RB (lang->has_const_copy_ctor);
5763   RB (lang->has_complex_copy_ctor);
5764   RB (lang->has_complex_copy_assign);
5765   RB (lang->non_aggregate);
5766 
5767   RB (lang->has_complex_dflt);
5768   RB (lang->has_list_ctor);
5769   RB (lang->non_std_layout);
5770   RB (lang->is_literal);
5771   RB (lang->lazy_move_ctor);
5772   RB (lang->lazy_move_assign);
5773   RB (lang->has_complex_move_ctor);
5774   RB (lang->has_complex_move_assign);
5775 
5776   RB (lang->has_constexpr_ctor);
5777   RB (lang->unique_obj_representations);
5778   RB (lang->unique_obj_representations_set);
5779 #undef RB
5780   return !get_overrun ();
5781 }
5782 
5783 /* Read & write the core values and pointers.  */
5784 
5785 void
core_vals(tree t)5786 trees_out::core_vals (tree t)
5787 {
5788 #define WU(X) (u (X))
5789 #define WT(X) (tree_node (X))
5790   tree_code code = TREE_CODE (t);
5791 
5792   /* First by shape of the tree.  */
5793 
5794   if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
5795     {
5796       /* Write this early, for better log information.  */
5797       WT (t->decl_minimal.name);
5798       if (!DECL_TEMPLATE_PARM_P (t))
5799 	WT (t->decl_minimal.context);
5800 
5801       if (state)
5802 	state->write_location (*this, t->decl_minimal.locus);
5803     }
5804 
5805   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
5806     {
5807       /* The only types we write also have TYPE_NON_COMMON.  */
5808       gcc_checking_assert (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON));
5809 
5810       /* We only stream the main variant.  */
5811       gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
5812 
5813       /* Stream the name & context first, for better log information  */
5814       WT (t->type_common.name);
5815       WT (t->type_common.context);
5816 
5817       /* By construction we want to make sure we have the canonical
5818 	 and main variants already in the type table, so emit them
5819 	 now.  */
5820       WT (t->type_common.main_variant);
5821 
5822       tree canonical = t->type_common.canonical;
5823       if (canonical && DECL_TEMPLATE_PARM_P (TYPE_NAME (t)))
5824 	/* We do not want to wander into different templates.
5825 	   Reconstructed on stream in.  */
5826 	canonical = t;
5827       WT (canonical);
5828 
5829       /* type_common.next_variant is internally manipulated.  */
5830       /* type_common.pointer_to, type_common.reference_to.  */
5831 
5832       if (streaming_p ())
5833 	{
5834 	  WU (t->type_common.precision);
5835 	  WU (t->type_common.contains_placeholder_bits);
5836 	  WU (t->type_common.mode);
5837 	  WU (t->type_common.align);
5838 	}
5839 
5840       if (!RECORD_OR_UNION_CODE_P (code))
5841 	{
5842 	  WT (t->type_common.size);
5843 	  WT (t->type_common.size_unit);
5844 	}
5845       WT (t->type_common.attributes);
5846 
5847       WT (t->type_common.common.chain); /* TYPE_STUB_DECL.  */
5848     }
5849 
5850   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
5851     {
5852       if (streaming_p ())
5853 	{
5854 	  WU (t->decl_common.mode);
5855 	  WU (t->decl_common.off_align);
5856 	  WU (t->decl_common.align);
5857 	}
5858 
5859       /* For templates these hold instantiation (partial and/or
5860 	 specialization) information.  */
5861       if (code != TEMPLATE_DECL)
5862 	{
5863 	  WT (t->decl_common.size);
5864 	  WT (t->decl_common.size_unit);
5865 	}
5866 
5867       WT (t->decl_common.attributes);
5868       // FIXME: Does this introduce cross-decl links?  For instance
5869       // from instantiation to the template.  If so, we'll need more
5870       // deduplication logic.  I think we'll need to walk the blocks
5871       // of the owning function_decl's abstract origin in tandem, to
5872       // generate the locating data needed?
5873       WT (t->decl_common.abstract_origin);
5874     }
5875 
5876   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
5877     {
5878       WT (t->decl_with_vis.assembler_name);
5879       if (streaming_p ())
5880 	WU (t->decl_with_vis.visibility);
5881     }
5882 
5883   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
5884     {
5885       /* Records and unions hold FIELDS, VFIELD & BINFO on these
5886 	 things.  */
5887       if (!RECORD_OR_UNION_CODE_P (code) && code != ENUMERAL_TYPE)
5888 	{
5889 	  // FIXME: These are from tpl_parm_value's 'type' writing.
5890 	  // Perhaps it should just be doing them directly?
5891 	  gcc_checking_assert (code == TEMPLATE_TYPE_PARM
5892 			       || code == TEMPLATE_TEMPLATE_PARM
5893 			       || code == BOUND_TEMPLATE_TEMPLATE_PARM);
5894 	  gcc_checking_assert (!TYPE_CACHED_VALUES_P (t));
5895 	  WT (t->type_non_common.values);
5896 	  WT (t->type_non_common.maxval);
5897 	  WT (t->type_non_common.minval);
5898 	}
5899 
5900       WT (t->type_non_common.lang_1);
5901     }
5902 
5903   if (CODE_CONTAINS_STRUCT (code, TS_EXP))
5904     {
5905       if (state)
5906 	state->write_location (*this, t->exp.locus);
5907 
5908       /* Walk in forward order, as (for instance) REQUIRES_EXPR has a
5909          bunch of unscoped parms on its first operand.  It's safer to
5910          create those in order.  */
5911       bool vl = TREE_CODE_CLASS (code) == tcc_vl_exp;
5912       for (unsigned limit = (vl ? VL_EXP_OPERAND_LENGTH (t)
5913 			     : TREE_OPERAND_LENGTH (t)),
5914 	     ix = unsigned (vl); ix != limit; ix++)
5915 	WT (TREE_OPERAND (t, ix));
5916     }
5917   else
5918     /* The CODE_CONTAINS tables were inaccurate when I started.  */
5919     gcc_checking_assert (TREE_CODE_CLASS (code) != tcc_expression
5920 			 && TREE_CODE_CLASS (code) != tcc_binary
5921 			 && TREE_CODE_CLASS (code) != tcc_unary
5922 			 && TREE_CODE_CLASS (code) != tcc_reference
5923 			 && TREE_CODE_CLASS (code) != tcc_comparison
5924 			 && TREE_CODE_CLASS (code) != tcc_statement
5925 			 && TREE_CODE_CLASS (code) != tcc_vl_exp);
5926 
5927   /* Then by CODE.  Special cases and/or 1:1 tree shape
5928      correspondance. */
5929   switch (code)
5930     {
5931     default:
5932       break;
5933 
5934     case ARGUMENT_PACK_SELECT:  /* Transient during instantiation.  */
5935     case DEFERRED_PARSE:	/* Expanded upon completion of
5936 				   outermost class.  */
5937     case IDENTIFIER_NODE:	/* Streamed specially.  */
5938     case BINDING_VECTOR:		/* Only in namespace-scope symbol
5939 				   table.  */
5940     case SSA_NAME:
5941     case TRANSLATION_UNIT_DECL: /* There is only one, it is a
5942 				   global_tree.  */
5943     case USERDEF_LITERAL:  	/* Expanded during parsing.  */
5944       gcc_unreachable (); /* Should never meet.  */
5945 
5946       /* Constants.  */
5947     case COMPLEX_CST:
5948       WT (TREE_REALPART (t));
5949       WT (TREE_IMAGPART (t));
5950       break;
5951 
5952     case FIXED_CST:
5953       gcc_unreachable (); /* Not supported in C++.  */
5954 
5955     case INTEGER_CST:
5956       if (streaming_p ())
5957 	{
5958 	  unsigned num = TREE_INT_CST_EXT_NUNITS (t);
5959 	  for (unsigned ix = 0; ix != num; ix++)
5960 	    wu (TREE_INT_CST_ELT (t, ix));
5961 	}
5962       break;
5963 
5964     case POLY_INT_CST:
5965       gcc_unreachable (); /* Not supported in C++.  */
5966 
5967     case REAL_CST:
5968       if (streaming_p ())
5969 	buf (TREE_REAL_CST_PTR (t), sizeof (real_value));
5970       break;
5971 
5972     case STRING_CST:
5973       /* Streamed during start.  */
5974       break;
5975 
5976     case VECTOR_CST:
5977       for (unsigned ix = vector_cst_encoded_nelts (t); ix--;)
5978 	WT (VECTOR_CST_ENCODED_ELT (t, ix));
5979       break;
5980 
5981       /* Decls.  */
5982     case VAR_DECL:
5983       if (DECL_CONTEXT (t)
5984 	  && TREE_CODE (DECL_CONTEXT (t)) != FUNCTION_DECL)
5985 	break;
5986       /* FALLTHROUGH  */
5987 
5988     case RESULT_DECL:
5989     case PARM_DECL:
5990       if (DECL_HAS_VALUE_EXPR_P (t))
5991 	WT (DECL_VALUE_EXPR (t));
5992       /* FALLTHROUGH  */
5993 
5994     case CONST_DECL:
5995     case IMPORTED_DECL:
5996       WT (t->decl_common.initial);
5997       break;
5998 
5999     case FIELD_DECL:
6000       WT (t->field_decl.offset);
6001       WT (t->field_decl.bit_field_type);
6002       WT (t->field_decl.qualifier); /* bitfield unit.  */
6003       WT (t->field_decl.bit_offset);
6004       WT (t->field_decl.fcontext);
6005       WT (t->decl_common.initial);
6006       break;
6007 
6008     case LABEL_DECL:
6009       if (streaming_p ())
6010 	{
6011 	  WU (t->label_decl.label_decl_uid);
6012 	  WU (t->label_decl.eh_landing_pad_nr);
6013 	}
6014       break;
6015 
6016     case FUNCTION_DECL:
6017       if (streaming_p ())
6018 	{
6019 	  /* Builtins can be streamed by value when a header declares
6020 	     them.  */
6021 	  WU (DECL_BUILT_IN_CLASS (t));
6022 	  if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
6023 	    WU (DECL_UNCHECKED_FUNCTION_CODE (t));
6024 	}
6025 
6026       WT (t->function_decl.personality);
6027       WT (t->function_decl.function_specific_target);
6028       WT (t->function_decl.function_specific_optimization);
6029       WT (t->function_decl.vindex);
6030       break;
6031 
6032     case USING_DECL:
6033       /* USING_DECL_DECLS  */
6034       WT (t->decl_common.initial);
6035       /* FALLTHROUGH  */
6036 
6037     case TYPE_DECL:
6038       /* USING_DECL: USING_DECL_SCOPE  */
6039       /* TYPE_DECL: DECL_ORIGINAL_TYPE */
6040       WT (t->decl_non_common.result);
6041       break;
6042 
6043       /* Miscellaneous common nodes.  */
6044     case BLOCK:
6045       if (state)
6046 	{
6047 	  state->write_location (*this, t->block.locus);
6048 	  state->write_location (*this, t->block.end_locus);
6049 	}
6050 
6051       /* DECL_LOCAL_DECL_P decls are first encountered here and
6052          streamed by value.  */
6053       chained_decls (t->block.vars);
6054       /* nonlocalized_vars is a middle-end thing.  */
6055       WT (t->block.subblocks);
6056       WT (t->block.supercontext);
6057       // FIXME: As for decl's abstract_origin, does this introduce crosslinks?
6058       WT (t->block.abstract_origin);
6059       /* fragment_origin, fragment_chain are middle-end things.  */
6060       WT (t->block.chain);
6061       /* nonlocalized_vars, block_num & die are middle endy/debug
6062 	 things.  */
6063       break;
6064 
6065     case CALL_EXPR:
6066       if (streaming_p ())
6067 	WU (t->base.u.ifn);
6068       break;
6069 
6070     case CONSTRUCTOR:
6071       {
6072 	unsigned len = vec_safe_length (t->constructor.elts);
6073 	if (streaming_p ())
6074 	  WU (len);
6075 	if (len)
6076 	  for (unsigned ix = 0; ix != len; ix++)
6077 	    {
6078 	      const constructor_elt &elt = (*t->constructor.elts)[ix];
6079 
6080 	      WT (elt.index);
6081 	      WT (elt.value);
6082 	    }
6083       }
6084       break;
6085 
6086     case OMP_CLAUSE:
6087       {
6088 	/* The ompcode is serialized in start.  */
6089 	if (streaming_p ())
6090 	  WU (t->omp_clause.subcode.map_kind);
6091 	if (state)
6092 	  state->write_location (*this, t->omp_clause.locus);
6093 
6094 	unsigned len = omp_clause_num_ops[OMP_CLAUSE_CODE (t)];
6095 	for (unsigned ix = 0; ix != len; ix++)
6096 	  WT (t->omp_clause.ops[ix]);
6097       }
6098       break;
6099 
6100     case STATEMENT_LIST:
6101       for (tree_stmt_iterator iter = tsi_start (t);
6102 	   !tsi_end_p (iter); tsi_next (&iter))
6103 	if (tree stmt = tsi_stmt (iter))
6104 	  WT (stmt);
6105       WT (NULL_TREE);
6106       break;
6107 
6108     case OPTIMIZATION_NODE:
6109     case TARGET_OPTION_NODE:
6110       // FIXME: Our representation for these two nodes is a cache of
6111       // the resulting set of options.  Not a record of the options
6112       // that got changed by a particular attribute or pragma.  Should
6113       // we record that, or should we record the diff from the command
6114       // line options?  The latter seems the right behaviour, but is
6115       // (a) harder, and I guess could introduce strangeness if the
6116       // importer has set some incompatible set of optimization flags?
6117       gcc_unreachable ();
6118       break;
6119 
6120     case TREE_BINFO:
6121       {
6122 	WT (t->binfo.common.chain);
6123 	WT (t->binfo.offset);
6124 	WT (t->binfo.inheritance);
6125 	WT (t->binfo.vptr_field);
6126 
6127 	WT (t->binfo.vtable);
6128 	WT (t->binfo.virtuals);
6129 	WT (t->binfo.vtt_subvtt);
6130 	WT (t->binfo.vtt_vptr);
6131 
6132 	tree_vec (BINFO_BASE_ACCESSES (t));
6133 	unsigned num = vec_safe_length (BINFO_BASE_ACCESSES (t));
6134 	for (unsigned ix = 0; ix != num; ix++)
6135 	  WT (BINFO_BASE_BINFO (t, ix));
6136       }
6137       break;
6138 
6139     case TREE_LIST:
6140       WT (t->list.purpose);
6141       WT (t->list.value);
6142       WT (t->list.common.chain);
6143       break;
6144 
6145     case TREE_VEC:
6146       for (unsigned ix = TREE_VEC_LENGTH (t); ix--;)
6147 	WT (TREE_VEC_ELT (t, ix));
6148       /* We stash NON_DEFAULT_TEMPLATE_ARGS_COUNT on TREE_CHAIN!  */
6149       gcc_checking_assert (!t->type_common.common.chain
6150 			   || (TREE_CODE (t->type_common.common.chain)
6151 			       == INTEGER_CST));
6152       WT (t->type_common.common.chain);
6153       break;
6154 
6155       /* C++-specific nodes ...  */
6156     case BASELINK:
6157       WT (((lang_tree_node *)t)->baselink.binfo);
6158       WT (((lang_tree_node *)t)->baselink.functions);
6159       WT (((lang_tree_node *)t)->baselink.access_binfo);
6160       break;
6161 
6162     case CONSTRAINT_INFO:
6163       WT (((lang_tree_node *)t)->constraint_info.template_reqs);
6164       WT (((lang_tree_node *)t)->constraint_info.declarator_reqs);
6165       WT (((lang_tree_node *)t)->constraint_info.associated_constr);
6166       break;
6167 
6168     case DEFERRED_NOEXCEPT:
6169       WT (((lang_tree_node *)t)->deferred_noexcept.pattern);
6170       WT (((lang_tree_node *)t)->deferred_noexcept.args);
6171       break;
6172 
6173     case LAMBDA_EXPR:
6174       WT (((lang_tree_node *)t)->lambda_expression.capture_list);
6175       WT (((lang_tree_node *)t)->lambda_expression.this_capture);
6176       WT (((lang_tree_node *)t)->lambda_expression.extra_scope);
6177       /* pending_proxies is a parse-time thing.  */
6178       gcc_assert (!((lang_tree_node *)t)->lambda_expression.pending_proxies);
6179       if (state)
6180 	state->write_location
6181 	  (*this, ((lang_tree_node *)t)->lambda_expression.locus);
6182       if (streaming_p ())
6183 	{
6184 	  WU (((lang_tree_node *)t)->lambda_expression.default_capture_mode);
6185 	  WU (((lang_tree_node *)t)->lambda_expression.discriminator);
6186 	}
6187       break;
6188 
6189     case OVERLOAD:
6190       WT (((lang_tree_node *)t)->overload.function);
6191       WT (t->common.chain);
6192       break;
6193 
6194     case PTRMEM_CST:
6195       WT (((lang_tree_node *)t)->ptrmem.member);
6196       break;
6197 
6198     case STATIC_ASSERT:
6199       WT (((lang_tree_node *)t)->static_assertion.condition);
6200       WT (((lang_tree_node *)t)->static_assertion.message);
6201       if (state)
6202 	state->write_location
6203 	  (*this, ((lang_tree_node *)t)->static_assertion.location);
6204       break;
6205 
6206     case TEMPLATE_DECL:
6207       /* Streamed with the template_decl node itself.  */
6208       gcc_checking_assert
6209       	(TREE_VISITED (((lang_tree_node *)t)->template_decl.arguments));
6210       gcc_checking_assert
6211 	(TREE_VISITED (((lang_tree_node *)t)->template_decl.result)
6212 	 || dep_hash->find_dependency (t)->is_alias_tmpl_inst ());
6213       if (DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (t))
6214 	WT (DECL_CHAIN (t));
6215       break;
6216 
6217     case TEMPLATE_INFO:
6218       {
6219 	WT (((lang_tree_node *)t)->template_info.tmpl);
6220 	WT (((lang_tree_node *)t)->template_info.args);
6221 
6222 	const auto *ac = (((lang_tree_node *)t)
6223 			  ->template_info.deferred_access_checks);
6224 	unsigned len = vec_safe_length (ac);
6225 	if (streaming_p ())
6226 	  u (len);
6227 	if (len)
6228 	  {
6229 	    for (unsigned ix = 0; ix != len; ix++)
6230 	      {
6231 		const auto &m = (*ac)[ix];
6232 		WT (m.binfo);
6233 		WT (m.decl);
6234 		WT (m.diag_decl);
6235 		if (state)
6236 		  state->write_location (*this, m.loc);
6237 	      }
6238 	  }
6239       }
6240       break;
6241 
6242     case TEMPLATE_PARM_INDEX:
6243       if (streaming_p ())
6244 	{
6245 	  WU (((lang_tree_node *)t)->tpi.index);
6246 	  WU (((lang_tree_node *)t)->tpi.level);
6247 	  WU (((lang_tree_node *)t)->tpi.orig_level);
6248 	}
6249       WT (((lang_tree_node *)t)->tpi.decl);
6250       /* TEMPLATE_PARM_DESCENDANTS (AKA TREE_CHAIN) is an internal
6251 	 cache, do not stream.  */
6252       break;
6253 
6254     case TRAIT_EXPR:
6255       WT (((lang_tree_node *)t)->trait_expression.type1);
6256       WT (((lang_tree_node *)t)->trait_expression.type2);
6257       if (streaming_p ())
6258 	WU (((lang_tree_node *)t)->trait_expression.kind);
6259       break;
6260     }
6261 
6262   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
6263     {
6264       /* We want to stream the type of a expression-like nodes /after/
6265          we've streamed the operands.  The type often contains (bits
6266          of the) types of the operands, and with things like decltype
6267          and noexcept in play, we really want to stream the decls
6268          defining the type before we try and stream the type on its
6269          own.  Otherwise we can find ourselves trying to read in a
6270          decl, when we're already partially reading in a component of
6271          its type.  And that's bad.  */
6272       tree type = t->typed.type;
6273       unsigned prec = 0;
6274 
6275       switch (code)
6276 	{
6277 	default:
6278 	  break;
6279 
6280 	case TEMPLATE_DECL:
6281 	  /* We fill in the template's type separately.  */
6282 	  type = NULL_TREE;
6283 	  break;
6284 
6285 	case TYPE_DECL:
6286 	  if (DECL_ORIGINAL_TYPE (t) && t == TYPE_NAME (type))
6287 	    /* This is a typedef.  We set its type separately.  */
6288 	    type = NULL_TREE;
6289 	  break;
6290 
6291 	case ENUMERAL_TYPE:
6292 	  if (type && !ENUM_FIXED_UNDERLYING_TYPE_P (t))
6293 	    {
6294 	      /* Type is a restricted range integer type derived from the
6295 		 integer_types.  Find the right one.  */
6296 	      prec = TYPE_PRECISION (type);
6297 	      tree name = DECL_NAME (TYPE_NAME (type));
6298 
6299 	      for (unsigned itk = itk_none; itk--;)
6300 		if (integer_types[itk]
6301 		    && DECL_NAME (TYPE_NAME (integer_types[itk])) == name)
6302 		  {
6303 		    type = integer_types[itk];
6304 		    break;
6305 		  }
6306 	      gcc_assert (type != t->typed.type);
6307 	    }
6308 	  break;
6309 	}
6310 
6311       WT (type);
6312       if (prec && streaming_p ())
6313 	WU (prec);
6314     }
6315 
6316 #undef WT
6317 #undef WU
6318 }
6319 
6320 // Streaming in a reference to a decl can cause that decl to be
6321 // TREE_USED, which is the mark_used behaviour we need most of the
6322 // time.  The trees_in::unused can be incremented to inhibit this,
6323 // which is at least needed for vtables.
6324 
6325 bool
core_vals(tree t)6326 trees_in::core_vals (tree t)
6327 {
6328 #define RU(X) ((X) = u ())
6329 #define RUC(T,X) ((X) = T (u ()))
6330 #define RT(X) ((X) = tree_node ())
6331 #define RTU(X) ((X) = tree_node (true))
6332   tree_code code = TREE_CODE (t);
6333 
6334   /* First by tree shape.  */
6335   if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
6336     {
6337       RT (t->decl_minimal.name);
6338       if (!DECL_TEMPLATE_PARM_P (t))
6339 	RT (t->decl_minimal.context);
6340 
6341       /* Don't zap the locus just yet, we don't record it correctly
6342 	 and thus lose all location information.  */
6343       t->decl_minimal.locus = state->read_location (*this);
6344     }
6345 
6346   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
6347     {
6348       RT (t->type_common.name);
6349       RT (t->type_common.context);
6350 
6351       RT (t->type_common.main_variant);
6352       RT (t->type_common.canonical);
6353 
6354       /* type_common.next_variant is internally manipulated.  */
6355       /* type_common.pointer_to, type_common.reference_to.  */
6356 
6357       RU (t->type_common.precision);
6358       RU (t->type_common.contains_placeholder_bits);
6359       RUC (machine_mode, t->type_common.mode);
6360       RU (t->type_common.align);
6361 
6362       if (!RECORD_OR_UNION_CODE_P (code))
6363 	{
6364 	  RT (t->type_common.size);
6365 	  RT (t->type_common.size_unit);
6366 	}
6367       RT (t->type_common.attributes);
6368 
6369       RT (t->type_common.common.chain); /* TYPE_STUB_DECL.  */
6370     }
6371 
6372   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
6373     {
6374       RUC (machine_mode, t->decl_common.mode);
6375       RU (t->decl_common.off_align);
6376       RU (t->decl_common.align);
6377 
6378       if (code != TEMPLATE_DECL)
6379 	{
6380 	  RT (t->decl_common.size);
6381 	  RT (t->decl_common.size_unit);
6382 	}
6383 
6384       RT (t->decl_common.attributes);
6385       RT (t->decl_common.abstract_origin);
6386     }
6387 
6388   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
6389     {
6390       RT (t->decl_with_vis.assembler_name);
6391       RUC (symbol_visibility, t->decl_with_vis.visibility);
6392     }
6393 
6394   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
6395     {
6396       /* Records and unions hold FIELDS, VFIELD & BINFO on these
6397 	 things.  */
6398       if (!RECORD_OR_UNION_CODE_P (code) && code != ENUMERAL_TYPE)
6399 	{
6400 	  /* This is not clobbering TYPE_CACHED_VALUES, because this
6401 	     is a type that doesn't have any.  */
6402 	  gcc_checking_assert (!TYPE_CACHED_VALUES_P (t));
6403 	  RT (t->type_non_common.values);
6404 	  RT (t->type_non_common.maxval);
6405 	  RT (t->type_non_common.minval);
6406 	}
6407 
6408       RT (t->type_non_common.lang_1);
6409     }
6410 
6411   if (CODE_CONTAINS_STRUCT (code, TS_EXP))
6412     {
6413       t->exp.locus = state->read_location (*this);
6414 
6415       bool vl = TREE_CODE_CLASS (code) == tcc_vl_exp;
6416       for (unsigned limit = (vl ? VL_EXP_OPERAND_LENGTH (t)
6417 			     : TREE_OPERAND_LENGTH (t)),
6418 	     ix = unsigned (vl); ix != limit; ix++)
6419 	RTU (TREE_OPERAND (t, ix));
6420     }
6421 
6422   /* Then by CODE.  Special cases and/or 1:1 tree shape
6423      correspondance. */
6424   switch (code)
6425     {
6426     default:
6427       break;
6428 
6429     case ARGUMENT_PACK_SELECT:
6430     case DEFERRED_PARSE:
6431     case IDENTIFIER_NODE:
6432     case BINDING_VECTOR:
6433     case SSA_NAME:
6434     case TRANSLATION_UNIT_DECL:
6435     case USERDEF_LITERAL:
6436       return false; /* Should never meet.  */
6437 
6438       /* Constants.  */
6439     case COMPLEX_CST:
6440       RT (TREE_REALPART (t));
6441       RT (TREE_IMAGPART (t));
6442       break;
6443 
6444     case FIXED_CST:
6445       /* Not suported in C++.  */
6446       return false;
6447 
6448     case INTEGER_CST:
6449       {
6450 	unsigned num = TREE_INT_CST_EXT_NUNITS (t);
6451 	for (unsigned ix = 0; ix != num; ix++)
6452 	  TREE_INT_CST_ELT (t, ix) = wu ();
6453       }
6454       break;
6455 
6456     case POLY_INT_CST:
6457       /* Not suported in C++.  */
6458       return false;
6459 
6460     case REAL_CST:
6461       if (const void *bytes = buf (sizeof (real_value)))
6462 	TREE_REAL_CST_PTR (t)
6463 	  = reinterpret_cast<real_value *> (memcpy (ggc_alloc<real_value> (),
6464 						    bytes, sizeof (real_value)));
6465       break;
6466 
6467     case STRING_CST:
6468       /* Streamed during start.  */
6469       break;
6470 
6471     case VECTOR_CST:
6472       for (unsigned ix = vector_cst_encoded_nelts (t); ix--;)
6473 	RT (VECTOR_CST_ENCODED_ELT (t, ix));
6474       break;
6475 
6476       /* Decls.  */
6477     case VAR_DECL:
6478       if (DECL_CONTEXT (t)
6479 	  && TREE_CODE (DECL_CONTEXT (t)) != FUNCTION_DECL)
6480 	break;
6481       /* FALLTHROUGH  */
6482 
6483     case RESULT_DECL:
6484     case PARM_DECL:
6485       if (DECL_HAS_VALUE_EXPR_P (t))
6486 	{
6487 	  /* The DECL_VALUE hash table is a cache, thus if we're
6488 	     reading a duplicate (which we end up discarding), the
6489 	     value expr will also be cleaned up at the next gc.  */
6490 	  tree val = tree_node ();
6491 	  SET_DECL_VALUE_EXPR (t, val);
6492 	}
6493       /* FALLTHROUGH  */
6494 
6495     case CONST_DECL:
6496     case IMPORTED_DECL:
6497       RT (t->decl_common.initial);
6498       break;
6499 
6500     case FIELD_DECL:
6501       RT (t->field_decl.offset);
6502       RT (t->field_decl.bit_field_type);
6503       RT (t->field_decl.qualifier);
6504       RT (t->field_decl.bit_offset);
6505       RT (t->field_decl.fcontext);
6506       RT (t->decl_common.initial);
6507       break;
6508 
6509     case LABEL_DECL:
6510       RU (t->label_decl.label_decl_uid);
6511       RU (t->label_decl.eh_landing_pad_nr);
6512       break;
6513 
6514     case FUNCTION_DECL:
6515       {
6516 	unsigned bltin = u ();
6517 	t->function_decl.built_in_class = built_in_class (bltin);
6518 	if (bltin != NOT_BUILT_IN)
6519 	  {
6520 	    bltin = u ();
6521 	    DECL_UNCHECKED_FUNCTION_CODE (t) = built_in_function (bltin);
6522 	  }
6523 
6524 	RT (t->function_decl.personality);
6525 	RT (t->function_decl.function_specific_target);
6526 	RT (t->function_decl.function_specific_optimization);
6527 	RT (t->function_decl.vindex);
6528       }
6529       break;
6530 
6531     case USING_DECL:
6532       /* USING_DECL_DECLS  */
6533       RT (t->decl_common.initial);
6534       /* FALLTHROUGH  */
6535 
6536     case TYPE_DECL:
6537       /* USING_DECL: USING_DECL_SCOPE  */
6538       /* TYPE_DECL: DECL_ORIGINAL_TYPE */
6539       RT (t->decl_non_common.result);
6540       break;
6541 
6542       /* Miscellaneous common nodes.  */
6543     case BLOCK:
6544       t->block.locus = state->read_location (*this);
6545       t->block.end_locus = state->read_location (*this);
6546       t->block.vars = chained_decls ();
6547       /* nonlocalized_vars is middle-end.  */
6548       RT (t->block.subblocks);
6549       RT (t->block.supercontext);
6550       RT (t->block.abstract_origin);
6551       /* fragment_origin, fragment_chain are middle-end.  */
6552       RT (t->block.chain);
6553       /* nonlocalized_vars, block_num, die are middle endy/debug
6554 	 things.  */
6555       break;
6556 
6557     case CALL_EXPR:
6558       RUC (internal_fn, t->base.u.ifn);
6559       break;
6560 
6561     case CONSTRUCTOR:
6562       if (unsigned len = u ())
6563 	{
6564 	  vec_alloc (t->constructor.elts, len);
6565 	  for (unsigned ix = 0; ix != len; ix++)
6566 	    {
6567 	      constructor_elt elt;
6568 
6569 	      RT (elt.index);
6570 	      RTU (elt.value);
6571 	      t->constructor.elts->quick_push (elt);
6572 	    }
6573 	}
6574       break;
6575 
6576     case OMP_CLAUSE:
6577       {
6578 	RU (t->omp_clause.subcode.map_kind);
6579 	t->omp_clause.locus = state->read_location (*this);
6580 
6581 	unsigned len = omp_clause_num_ops[OMP_CLAUSE_CODE (t)];
6582 	for (unsigned ix = 0; ix != len; ix++)
6583 	  RT (t->omp_clause.ops[ix]);
6584       }
6585       break;
6586 
6587     case STATEMENT_LIST:
6588       {
6589 	tree_stmt_iterator iter = tsi_start (t);
6590 	for (tree stmt; RT (stmt);)
6591 	  tsi_link_after (&iter, stmt, TSI_CONTINUE_LINKING);
6592       }
6593       break;
6594 
6595     case OPTIMIZATION_NODE:
6596     case TARGET_OPTION_NODE:
6597       /* Not yet implemented, see trees_out::core_vals.  */
6598       gcc_unreachable ();
6599       break;
6600 
6601     case TREE_BINFO:
6602       RT (t->binfo.common.chain);
6603       RT (t->binfo.offset);
6604       RT (t->binfo.inheritance);
6605       RT (t->binfo.vptr_field);
6606 
6607       /* Do not mark the vtables as USED in the address expressions
6608 	 here.  */
6609       unused++;
6610       RT (t->binfo.vtable);
6611       RT (t->binfo.virtuals);
6612       RT (t->binfo.vtt_subvtt);
6613       RT (t->binfo.vtt_vptr);
6614       unused--;
6615 
6616       BINFO_BASE_ACCESSES (t) = tree_vec ();
6617       if (!get_overrun ())
6618 	{
6619 	  unsigned num = vec_safe_length (BINFO_BASE_ACCESSES (t));
6620 	  for (unsigned ix = 0; ix != num; ix++)
6621 	    BINFO_BASE_APPEND (t, tree_node ());
6622 	}
6623       break;
6624 
6625     case TREE_LIST:
6626       RT (t->list.purpose);
6627       RT (t->list.value);
6628       RT (t->list.common.chain);
6629       break;
6630 
6631     case TREE_VEC:
6632       for (unsigned ix = TREE_VEC_LENGTH (t); ix--;)
6633 	RT (TREE_VEC_ELT (t, ix));
6634       RT (t->type_common.common.chain);
6635       break;
6636 
6637       /* C++-specific nodes ...  */
6638     case BASELINK:
6639       RT (((lang_tree_node *)t)->baselink.binfo);
6640       RTU (((lang_tree_node *)t)->baselink.functions);
6641       RT (((lang_tree_node *)t)->baselink.access_binfo);
6642       break;
6643 
6644     case CONSTRAINT_INFO:
6645       RT (((lang_tree_node *)t)->constraint_info.template_reqs);
6646       RT (((lang_tree_node *)t)->constraint_info.declarator_reqs);
6647       RT (((lang_tree_node *)t)->constraint_info.associated_constr);
6648       break;
6649 
6650     case DEFERRED_NOEXCEPT:
6651       RT (((lang_tree_node *)t)->deferred_noexcept.pattern);
6652       RT (((lang_tree_node *)t)->deferred_noexcept.args);
6653       break;
6654 
6655     case LAMBDA_EXPR:
6656       RT (((lang_tree_node *)t)->lambda_expression.capture_list);
6657       RT (((lang_tree_node *)t)->lambda_expression.this_capture);
6658       RT (((lang_tree_node *)t)->lambda_expression.extra_scope);
6659       /* lambda_expression.pending_proxies is NULL  */
6660       ((lang_tree_node *)t)->lambda_expression.locus
6661 	= state->read_location (*this);
6662       RUC (cp_lambda_default_capture_mode_type,
6663 	   ((lang_tree_node *)t)->lambda_expression.default_capture_mode);
6664       RU (((lang_tree_node *)t)->lambda_expression.discriminator);
6665       break;
6666 
6667     case OVERLOAD:
6668       RT (((lang_tree_node *)t)->overload.function);
6669       RT (t->common.chain);
6670       break;
6671 
6672     case PTRMEM_CST:
6673       RT (((lang_tree_node *)t)->ptrmem.member);
6674       break;
6675 
6676     case STATIC_ASSERT:
6677       RT (((lang_tree_node *)t)->static_assertion.condition);
6678       RT (((lang_tree_node *)t)->static_assertion.message);
6679       ((lang_tree_node *)t)->static_assertion.location
6680 	= state->read_location (*this);
6681       break;
6682 
6683     case TEMPLATE_DECL:
6684       /* Streamed when reading the raw template decl itself.  */
6685       gcc_assert (((lang_tree_node *)t)->template_decl.arguments);
6686       gcc_assert (((lang_tree_node *)t)->template_decl.result);
6687       if (DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (t))
6688 	RT (DECL_CHAIN (t));
6689       break;
6690 
6691     case TEMPLATE_INFO:
6692       RT (((lang_tree_node *)t)->template_info.tmpl);
6693       RT (((lang_tree_node *)t)->template_info.args);
6694       if (unsigned len = u ())
6695 	{
6696 	  auto &ac = (((lang_tree_node *)t)
6697 		      ->template_info.deferred_access_checks);
6698 	  vec_alloc (ac, len);
6699 	  for (unsigned ix = 0; ix != len; ix++)
6700 	    {
6701 	      deferred_access_check m;
6702 
6703 	      RT (m.binfo);
6704 	      RT (m.decl);
6705 	      RT (m.diag_decl);
6706 	      m.loc = state->read_location (*this);
6707 	      ac->quick_push (m);
6708 	    }
6709 	}
6710       break;
6711 
6712     case TEMPLATE_PARM_INDEX:
6713       RU (((lang_tree_node *)t)->tpi.index);
6714       RU (((lang_tree_node *)t)->tpi.level);
6715       RU (((lang_tree_node *)t)->tpi.orig_level);
6716       RT (((lang_tree_node *)t)->tpi.decl);
6717       break;
6718 
6719     case TRAIT_EXPR:
6720       RT (((lang_tree_node *)t)->trait_expression.type1);
6721       RT (((lang_tree_node *)t)->trait_expression.type2);
6722       RUC (cp_trait_kind, ((lang_tree_node *)t)->trait_expression.kind);
6723       break;
6724     }
6725 
6726   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
6727     {
6728       tree type = tree_node ();
6729 
6730       if (type && code == ENUMERAL_TYPE && !ENUM_FIXED_UNDERLYING_TYPE_P (t))
6731 	{
6732 	  unsigned precision = u ();
6733 
6734 	  type = build_distinct_type_copy (type);
6735 	  TYPE_PRECISION (type) = precision;
6736 	  set_min_and_max_values_for_integral_type (type, precision,
6737 						    TYPE_SIGN (type));
6738 	}
6739 
6740       if (code != TEMPLATE_DECL)
6741 	t->typed.type = type;
6742     }
6743 
6744 #undef RT
6745 #undef RM
6746 #undef RU
6747   return !get_overrun ();
6748 }
6749 
6750 void
lang_decl_vals(tree t)6751 trees_out::lang_decl_vals (tree t)
6752 {
6753   const struct lang_decl *lang = DECL_LANG_SPECIFIC (t);
6754 #define WU(X) (u (X))
6755 #define WT(X) (tree_node (X))
6756   /* Module index already written.  */
6757   switch (lang->u.base.selector)
6758     {
6759     default:
6760       gcc_unreachable ();
6761 
6762     case lds_fn:  /* lang_decl_fn.  */
6763       if (streaming_p ())
6764 	{
6765 	  if (DECL_NAME (t) && IDENTIFIER_OVL_OP_P (DECL_NAME (t)))
6766 	    WU (lang->u.fn.ovl_op_code);
6767 	}
6768 
6769       if (DECL_CLASS_SCOPE_P (t))
6770 	WT (lang->u.fn.context);
6771 
6772       if (lang->u.fn.thunk_p)
6773 	{
6774 	  /* The thunked-to function.  */
6775 	  WT (lang->u.fn.befriending_classes);
6776 	  if (streaming_p ())
6777 	    wi (lang->u.fn.u5.fixed_offset);
6778 	}
6779       else
6780 	WT (lang->u.fn.u5.cloned_function);
6781 
6782       if (FNDECL_USED_AUTO (t))
6783 	WT (lang->u.fn.u.saved_auto_return_type);
6784 
6785       goto lds_min;
6786 
6787     case lds_decomp:  /* lang_decl_decomp.  */
6788       WT (lang->u.decomp.base);
6789       goto lds_min;
6790 
6791     case lds_min:  /* lang_decl_min.  */
6792     lds_min:
6793       WT (lang->u.min.template_info);
6794       {
6795 	tree access = lang->u.min.access;
6796 
6797 	/* DECL_ACCESS needs to be maintained by the definition of the
6798 	   (derived) class that changes the access.  The other users
6799 	   of DECL_ACCESS need to write it here.  */
6800 	if (!DECL_THUNK_P (t)
6801 	    && (DECL_CONTEXT (t) && TYPE_P (DECL_CONTEXT (t))))
6802 	  access = NULL_TREE;
6803 
6804 	WT (access);
6805       }
6806       break;
6807 
6808     case lds_ns:  /* lang_decl_ns.  */
6809       break;
6810 
6811     case lds_parm:  /* lang_decl_parm.  */
6812       if (streaming_p ())
6813 	{
6814 	  WU (lang->u.parm.level);
6815 	  WU (lang->u.parm.index);
6816 	}
6817       break;
6818     }
6819 #undef WU
6820 #undef WT
6821 }
6822 
6823 bool
lang_decl_vals(tree t)6824 trees_in::lang_decl_vals (tree t)
6825 {
6826   struct lang_decl *lang = DECL_LANG_SPECIFIC (t);
6827 #define RU(X) ((X) = u ())
6828 #define RT(X) ((X) = tree_node ())
6829 
6830   /* Module index already read.  */
6831   switch (lang->u.base.selector)
6832     {
6833     default:
6834       gcc_unreachable ();
6835 
6836     case lds_fn:  /* lang_decl_fn.  */
6837       if (DECL_NAME (t) && IDENTIFIER_OVL_OP_P (DECL_NAME (t)))
6838 	{
6839 	  unsigned code = u ();
6840 
6841 	  /* Check consistency.  */
6842 	  if (code >= OVL_OP_MAX
6843 	      || (ovl_op_info[IDENTIFIER_ASSIGN_OP_P (DECL_NAME (t))][code]
6844 		  .ovl_op_code) == OVL_OP_ERROR_MARK)
6845 	    set_overrun ();
6846 	  else
6847 	    lang->u.fn.ovl_op_code = code;
6848 	}
6849 
6850       if (DECL_CLASS_SCOPE_P (t))
6851 	RT (lang->u.fn.context);
6852 
6853       if (lang->u.fn.thunk_p)
6854 	{
6855 	  RT (lang->u.fn.befriending_classes);
6856 	  lang->u.fn.u5.fixed_offset = wi ();
6857 	}
6858       else
6859 	RT (lang->u.fn.u5.cloned_function);
6860 
6861       if (FNDECL_USED_AUTO (t))
6862 	RT (lang->u.fn.u.saved_auto_return_type);
6863       goto lds_min;
6864 
6865     case lds_decomp:  /* lang_decl_decomp.  */
6866       RT (lang->u.decomp.base);
6867       goto lds_min;
6868 
6869     case lds_min:  /* lang_decl_min.  */
6870     lds_min:
6871       RT (lang->u.min.template_info);
6872       RT (lang->u.min.access);
6873       break;
6874 
6875     case lds_ns:  /* lang_decl_ns.  */
6876       break;
6877 
6878     case lds_parm:  /* lang_decl_parm.  */
6879       RU (lang->u.parm.level);
6880       RU (lang->u.parm.index);
6881       break;
6882     }
6883 #undef RU
6884 #undef RT
6885   return !get_overrun ();
6886 }
6887 
6888 /* Most of the value contents of lang_type is streamed in
6889    define_class.  */
6890 
6891 void
lang_type_vals(tree t)6892 trees_out::lang_type_vals (tree t)
6893 {
6894   const struct lang_type *lang = TYPE_LANG_SPECIFIC (t);
6895 #define WU(X) (u (X))
6896 #define WT(X) (tree_node (X))
6897   if (streaming_p ())
6898     WU (lang->align);
6899 #undef WU
6900 #undef WT
6901 }
6902 
6903 bool
lang_type_vals(tree t)6904 trees_in::lang_type_vals (tree t)
6905 {
6906   struct lang_type *lang = TYPE_LANG_SPECIFIC (t);
6907 #define RU(X) ((X) = u ())
6908 #define RT(X) ((X) = tree_node ())
6909   RU (lang->align);
6910 #undef RU
6911 #undef RT
6912   return !get_overrun ();
6913 }
6914 
6915 /* Write out the bools of T, including information about any
6916    LANG_SPECIFIC information.  Including allocation of any lang
6917    specific object.  */
6918 
6919 void
tree_node_bools(tree t)6920 trees_out::tree_node_bools (tree t)
6921 {
6922   gcc_checking_assert (streaming_p ());
6923 
6924   /* We should never stream a namespace.  */
6925   gcc_checking_assert (TREE_CODE (t) != NAMESPACE_DECL
6926 		       || DECL_NAMESPACE_ALIAS (t));
6927 
6928   core_bools (t);
6929 
6930   switch (TREE_CODE_CLASS (TREE_CODE (t)))
6931     {
6932     case tcc_declaration:
6933       {
6934 	bool specific = DECL_LANG_SPECIFIC (t) != NULL;
6935 	b (specific);
6936 	if (specific && VAR_P (t))
6937 	  b (DECL_DECOMPOSITION_P (t));
6938 	if (specific)
6939 	  lang_decl_bools (t);
6940       }
6941       break;
6942 
6943     case tcc_type:
6944       {
6945 	bool specific = (TYPE_MAIN_VARIANT (t) == t
6946 			 && TYPE_LANG_SPECIFIC (t) != NULL);
6947 	gcc_assert (TYPE_LANG_SPECIFIC (t)
6948 		    == TYPE_LANG_SPECIFIC (TYPE_MAIN_VARIANT (t)));
6949 
6950 	b (specific);
6951 	if (specific)
6952 	  lang_type_bools (t);
6953       }
6954       break;
6955 
6956     default:
6957       break;
6958     }
6959 
6960   bflush ();
6961 }
6962 
6963 bool
tree_node_bools(tree t)6964 trees_in::tree_node_bools (tree t)
6965 {
6966   bool ok = core_bools (t);
6967 
6968   if (ok)
6969     switch (TREE_CODE_CLASS (TREE_CODE (t)))
6970       {
6971       case tcc_declaration:
6972 	if (b ())
6973 	  {
6974 	    bool decomp = VAR_P (t) && b ();
6975 
6976 	    ok = maybe_add_lang_decl_raw (t, decomp);
6977 	    if (ok)
6978 	      ok = lang_decl_bools (t);
6979 	}
6980 	break;
6981 
6982       case tcc_type:
6983 	if (b ())
6984 	  {
6985 	    ok = maybe_add_lang_type_raw (t);
6986 	    if (ok)
6987 	      ok = lang_type_bools (t);
6988 	  }
6989 	break;
6990 
6991       default:
6992 	break;
6993       }
6994 
6995   bflush ();
6996   if (!ok || get_overrun ())
6997     return false;
6998 
6999   return true;
7000 }
7001 
7002 
7003 /* Write out the lang-specifc vals of node T.  */
7004 
7005 void
lang_vals(tree t)7006 trees_out::lang_vals (tree t)
7007 {
7008   switch (TREE_CODE_CLASS (TREE_CODE (t)))
7009     {
7010     case tcc_declaration:
7011       if (DECL_LANG_SPECIFIC (t))
7012 	lang_decl_vals (t);
7013       break;
7014 
7015     case tcc_type:
7016       if (TYPE_MAIN_VARIANT (t) == t && TYPE_LANG_SPECIFIC (t))
7017 	lang_type_vals (t);
7018       break;
7019 
7020     default:
7021       break;
7022     }
7023 }
7024 
7025 bool
lang_vals(tree t)7026 trees_in::lang_vals (tree t)
7027 {
7028   bool ok = true;
7029 
7030   switch (TREE_CODE_CLASS (TREE_CODE (t)))
7031     {
7032     case tcc_declaration:
7033       if (DECL_LANG_SPECIFIC (t))
7034 	ok = lang_decl_vals (t);
7035       break;
7036 
7037     case tcc_type:
7038       if (TYPE_LANG_SPECIFIC (t))
7039 	ok = lang_type_vals (t);
7040       else
7041 	TYPE_LANG_SPECIFIC (t) = TYPE_LANG_SPECIFIC (TYPE_MAIN_VARIANT (t));
7042       break;
7043 
7044     default:
7045       break;
7046     }
7047 
7048   return ok;
7049 }
7050 
7051 /* Write out the value fields of node T.  */
7052 
7053 void
tree_node_vals(tree t)7054 trees_out::tree_node_vals (tree t)
7055 {
7056   core_vals (t);
7057   lang_vals (t);
7058 }
7059 
7060 bool
tree_node_vals(tree t)7061 trees_in::tree_node_vals (tree t)
7062 {
7063   bool ok = core_vals (t);
7064   if (ok)
7065     ok = lang_vals (t);
7066 
7067   return ok;
7068 }
7069 
7070 
7071 /* If T is a back reference, fixed reference or NULL, write out it's
7072    code and return WK_none.  Otherwise return WK_value if we must write
7073    by value, or WK_normal otherwise.  */
7074 
7075 walk_kind
ref_node(tree t)7076 trees_out::ref_node (tree t)
7077 {
7078   if (!t)
7079     {
7080       if (streaming_p ())
7081 	{
7082 	  /* NULL_TREE -> tt_null.  */
7083 	  null_count++;
7084 	  i (tt_null);
7085 	}
7086       return WK_none;
7087     }
7088 
7089   if (!TREE_VISITED (t))
7090     return WK_normal;
7091 
7092   /* An already-visited tree.  It must be in the map.  */
7093   int val = get_tag (t);
7094 
7095   if (val == tag_value)
7096     /* An entry we should walk into.  */
7097     return WK_value;
7098 
7099   const char *kind;
7100 
7101   if (val <= tag_backref)
7102     {
7103       /* Back reference -> -ve number  */
7104       if (streaming_p ())
7105 	i (val);
7106       kind = "backref";
7107     }
7108   else if (val >= tag_fixed)
7109     {
7110       /* Fixed reference -> tt_fixed */
7111       val -= tag_fixed;
7112       if (streaming_p ())
7113 	i (tt_fixed), u (val);
7114       kind = "fixed";
7115     }
7116 
7117   if (streaming_p ())
7118     {
7119       back_ref_count++;
7120       dump (dumper::TREE)
7121 	&& dump ("Wrote %s:%d %C:%N%S", kind, val, TREE_CODE (t), t, t);
7122     }
7123   return WK_none;
7124 }
7125 
7126 tree
back_ref(int tag)7127 trees_in::back_ref (int tag)
7128 {
7129   tree res = NULL_TREE;
7130 
7131   if (tag < 0 && unsigned (~tag) < back_refs.length ())
7132     res = back_refs[~tag];
7133 
7134   if (!res
7135       /* Checking TREE_CODE is a dereference, so we know this is not a
7136 	 wild pointer.  Checking the code provides evidence we've not
7137 	 corrupted something.  */
7138       || TREE_CODE (res) >= MAX_TREE_CODES)
7139     set_overrun ();
7140   else
7141     dump (dumper::TREE) && dump ("Read backref:%d found %C:%N%S", tag,
7142 				 TREE_CODE (res), res, res);
7143   return res;
7144 }
7145 
7146 unsigned
add_indirect_tpl_parms(tree parms)7147 trees_out::add_indirect_tpl_parms (tree parms)
7148 {
7149   unsigned len = 0;
7150   for (; parms; parms = TREE_CHAIN (parms), len++)
7151     {
7152       if (TREE_VISITED (parms))
7153 	break;
7154 
7155       int tag = insert (parms);
7156       if (streaming_p ())
7157 	dump (dumper::TREE)
7158 	  && dump ("Indirect:%d template's parameter %u %C:%N",
7159 		   tag, len, TREE_CODE (parms), parms);
7160     }
7161 
7162   if (streaming_p ())
7163     u (len);
7164 
7165   return len;
7166 }
7167 
7168 unsigned
add_indirect_tpl_parms(tree parms)7169 trees_in::add_indirect_tpl_parms (tree parms)
7170 {
7171   unsigned len = u ();
7172   for (unsigned ix = 0; ix != len; parms = TREE_CHAIN (parms), ix++)
7173     {
7174       int tag = insert (parms);
7175       dump (dumper::TREE)
7176 	&& dump ("Indirect:%d template's parameter %u %C:%N",
7177 		 tag, ix, TREE_CODE (parms), parms);
7178     }
7179 
7180   return len;
7181 }
7182 
7183 /* We've just found DECL by name.  Insert nodes that come with it, but
7184    cannot be found by name, so we'll not accidentally walk into them.  */
7185 
7186 void
add_indirects(tree decl)7187 trees_out::add_indirects (tree decl)
7188 {
7189   unsigned count = 0;
7190 
7191   // FIXME:OPTIMIZATION We'll eventually want default fn parms of
7192   // templates and perhaps default template parms too.  The former can
7193   // be referenced from instantiations (as they are lazily
7194   // instantiated).  Also (deferred?) exception specifications of
7195   // templates.  See the note about PARM_DECLs in trees_out::decl_node.
7196   tree inner = decl;
7197   if (TREE_CODE (decl) == TEMPLATE_DECL)
7198     {
7199       count += add_indirect_tpl_parms (DECL_TEMPLATE_PARMS (decl));
7200 
7201       inner = DECL_TEMPLATE_RESULT (decl);
7202       int tag = insert (inner);
7203       if (streaming_p ())
7204 	dump (dumper::TREE)
7205 	  && dump ("Indirect:%d template's result %C:%N",
7206 		   tag, TREE_CODE (inner), inner);
7207       count++;
7208     }
7209 
7210   if (TREE_CODE (inner) == TYPE_DECL)
7211     {
7212       /* Make sure the type is in the map too.  Otherwise we get
7213 	 different RECORD_TYPEs for the same type, and things go
7214 	 south.  */
7215       tree type = TREE_TYPE (inner);
7216       gcc_checking_assert (DECL_ORIGINAL_TYPE (inner)
7217 			   || TYPE_NAME (type) == inner);
7218       int tag = insert (type);
7219       if (streaming_p ())
7220 	dump (dumper::TREE) && dump ("Indirect:%d decl's type %C:%N", tag,
7221 				     TREE_CODE (type), type);
7222       count++;
7223     }
7224 
7225   if (streaming_p ())
7226     {
7227       u (count);
7228       dump (dumper::TREE) && dump ("Inserted %u indirects", count);
7229     }
7230 }
7231 
7232 bool
add_indirects(tree decl)7233 trees_in::add_indirects (tree decl)
7234 {
7235   unsigned count = 0;
7236 
7237   tree inner = decl;
7238   if (TREE_CODE (inner) == TEMPLATE_DECL)
7239     {
7240       count += add_indirect_tpl_parms (DECL_TEMPLATE_PARMS (decl));
7241 
7242       inner = DECL_TEMPLATE_RESULT (decl);
7243       int tag = insert (inner);
7244       dump (dumper::TREE)
7245 	&& dump ("Indirect:%d templates's result %C:%N", tag,
7246 		 TREE_CODE (inner), inner);
7247       count++;
7248     }
7249 
7250   if (TREE_CODE (inner) == TYPE_DECL)
7251     {
7252       tree type = TREE_TYPE (inner);
7253       gcc_checking_assert (DECL_ORIGINAL_TYPE (inner)
7254 			   || TYPE_NAME (type) == inner);
7255       int tag = insert (type);
7256       dump (dumper::TREE)
7257 	&& dump ("Indirect:%d decl's type %C:%N", tag, TREE_CODE (type), type);
7258       count++;
7259     }
7260 
7261   dump (dumper::TREE) && dump ("Inserted %u indirects", count);
7262   return count == u ();
7263 }
7264 
7265 /* Stream a template parameter.  There are 4.5 kinds of parameter:
7266    a) Template - TEMPLATE_DECL->TYPE_DECL->TEMPLATE_TEMPLATE_PARM
7267    	TEMPLATE_TYPE_PARM_INDEX TPI
7268    b) Type - TYPE_DECL->TEMPLATE_TYPE_PARM TEMPLATE_TYPE_PARM_INDEX TPI
7269    c.1) NonTYPE - PARM_DECL DECL_INITIAL TPI We meet this first
7270    c.2) NonTYPE - CONST_DECL DECL_INITIAL Same TPI
7271    d) BoundTemplate - TYPE_DECL->BOUND_TEMPLATE_TEMPLATE_PARM
7272        TEMPLATE_TYPE_PARM_INDEX->TPI
7273        TEMPLATE_TEMPLATE_PARM_INFO->TEMPLATE_INFO
7274 
7275    All of these point to a TEMPLATE_PARM_INDEX, and #B also has a TEMPLATE_INFO
7276 */
7277 
7278 void
tpl_parm_value(tree parm)7279 trees_out::tpl_parm_value (tree parm)
7280 {
7281   gcc_checking_assert (DECL_P (parm) && DECL_TEMPLATE_PARM_P (parm));
7282 
7283   int parm_tag = insert (parm);
7284   if (streaming_p ())
7285     {
7286       i (tt_tpl_parm);
7287       dump (dumper::TREE) && dump ("Writing template parm:%d %C:%N",
7288 				   parm_tag, TREE_CODE (parm), parm);
7289       start (parm);
7290       tree_node_bools (parm);
7291     }
7292 
7293   tree inner = parm;
7294   if (TREE_CODE (inner) == TEMPLATE_DECL)
7295     {
7296       inner = DECL_TEMPLATE_RESULT (inner);
7297       int inner_tag = insert (inner);
7298       if (streaming_p ())
7299 	{
7300 	  dump (dumper::TREE) && dump ("Writing inner template parm:%d %C:%N",
7301 				       inner_tag, TREE_CODE (inner), inner);
7302 	  start (inner);
7303 	  tree_node_bools (inner);
7304 	}
7305     }
7306 
7307   tree type = NULL_TREE;
7308   if (TREE_CODE (inner) == TYPE_DECL)
7309     {
7310       type = TREE_TYPE (inner);
7311       int type_tag = insert (type);
7312       if (streaming_p ())
7313 	{
7314 	  dump (dumper::TREE) && dump ("Writing template parm type:%d %C:%N",
7315 				       type_tag, TREE_CODE (type), type);
7316 	  start (type);
7317 	  tree_node_bools (type);
7318 	}
7319     }
7320 
7321   if (inner != parm)
7322     {
7323       /* This is a template-template parameter.  */
7324       unsigned tpl_levels = 0;
7325       tpl_header (parm, &tpl_levels);
7326       tpl_parms_fini (parm, tpl_levels);
7327     }
7328 
7329   tree_node_vals (parm);
7330   if (inner != parm)
7331     tree_node_vals (inner);
7332   if (type)
7333     {
7334       tree_node_vals (type);
7335       if (DECL_NAME (inner) == auto_identifier
7336 	  || DECL_NAME (inner) == decltype_auto_identifier)
7337 	{
7338 	  /* Placeholder auto.  */
7339 	  tree_node (DECL_INITIAL (inner));
7340 	  tree_node (DECL_SIZE_UNIT (inner));
7341 	}
7342     }
7343 
7344   if (streaming_p ())
7345     dump (dumper::TREE) && dump ("Wrote template parm:%d %C:%N",
7346 				 parm_tag, TREE_CODE (parm), parm);
7347 }
7348 
7349 tree
tpl_parm_value()7350 trees_in::tpl_parm_value ()
7351 {
7352   tree parm = start ();
7353   if (!parm || !tree_node_bools (parm))
7354     return NULL_TREE;
7355 
7356   int parm_tag = insert (parm);
7357   dump (dumper::TREE) && dump ("Reading template parm:%d %C:%N",
7358 			       parm_tag, TREE_CODE (parm), parm);
7359 
7360   tree inner = parm;
7361   if (TREE_CODE (inner) == TEMPLATE_DECL)
7362     {
7363       inner = start ();
7364       if (!inner || !tree_node_bools (inner))
7365 	return NULL_TREE;
7366       int inner_tag = insert (inner);
7367       dump (dumper::TREE) && dump ("Reading inner template parm:%d %C:%N",
7368 				   inner_tag, TREE_CODE (inner), inner);
7369       DECL_TEMPLATE_RESULT (parm) = inner;
7370     }
7371 
7372   tree type = NULL_TREE;
7373   if (TREE_CODE (inner) == TYPE_DECL)
7374     {
7375       type = start ();
7376       if (!type || !tree_node_bools (type))
7377 	return NULL_TREE;
7378       int type_tag = insert (type);
7379       dump (dumper::TREE) && dump ("Reading template parm type:%d %C:%N",
7380 				   type_tag, TREE_CODE (type), type);
7381 
7382       TREE_TYPE (inner) = TREE_TYPE (parm) = type;
7383       TYPE_NAME (type) = parm;
7384     }
7385 
7386   if (inner != parm)
7387     {
7388       /* A template template parameter.  */
7389       unsigned tpl_levels = 0;
7390       tpl_header (parm, &tpl_levels);
7391       tpl_parms_fini (parm, tpl_levels);
7392     }
7393 
7394   tree_node_vals (parm);
7395   if (inner != parm)
7396     tree_node_vals (inner);
7397   if (type)
7398     {
7399       tree_node_vals (type);
7400       if (DECL_NAME (inner) == auto_identifier
7401 	  || DECL_NAME (inner) == decltype_auto_identifier)
7402 	{
7403 	  /* Placeholder auto.  */
7404 	  DECL_INITIAL (inner) = tree_node ();
7405 	  DECL_SIZE_UNIT (inner) = tree_node ();
7406 	}
7407       if (TYPE_CANONICAL (type))
7408 	{
7409 	  gcc_checking_assert (TYPE_CANONICAL (type) == type);
7410 	  TYPE_CANONICAL (type) = canonical_type_parameter (type);
7411 	}
7412     }
7413 
7414   dump (dumper::TREE) && dump ("Read template parm:%d %C:%N",
7415 			       parm_tag, TREE_CODE (parm), parm);
7416 
7417   return parm;
7418 }
7419 
7420 void
install_entity(tree decl,depset * dep)7421 trees_out::install_entity (tree decl, depset *dep)
7422 {
7423   gcc_checking_assert (streaming_p ());
7424 
7425   /* Write the entity index, so we can insert it as soon as we
7426      know this is new.  */
7427   u (dep ? dep->cluster + 1 : 0);
7428   if (CHECKING_P && dep)
7429     {
7430       /* Add it to the entity map, such that we can tell it is
7431 	 part of us.  */
7432       bool existed;
7433       unsigned *slot = &entity_map->get_or_insert
7434 	(DECL_UID (decl), &existed);
7435       if (existed)
7436 	/* If it existed, it should match.  */
7437 	gcc_checking_assert (decl == (*entity_ary)[*slot]);
7438       *slot = ~dep->cluster;
7439     }
7440 }
7441 
7442 bool
install_entity(tree decl)7443 trees_in::install_entity (tree decl)
7444 {
7445   unsigned entity_index = u ();
7446   if (!entity_index)
7447     return false;
7448 
7449   if (entity_index > state->entity_num)
7450     {
7451       set_overrun ();
7452       return false;
7453     }
7454 
7455   /* Insert the real decl into the entity ary.  */
7456   unsigned ident = state->entity_lwm + entity_index - 1;
7457   (*entity_ary)[ident] = decl;
7458 
7459   /* And into the entity map, if it's not already there.  */
7460   tree not_tmpl = STRIP_TEMPLATE (decl);
7461   if (!DECL_LANG_SPECIFIC (not_tmpl)
7462       || !DECL_MODULE_ENTITY_P (not_tmpl))
7463     {
7464       retrofit_lang_decl (not_tmpl);
7465       DECL_MODULE_ENTITY_P (not_tmpl) = true;
7466 
7467       /* Insert into the entity hash (it cannot already be there).  */
7468       bool existed;
7469       unsigned &slot = entity_map->get_or_insert (DECL_UID (decl), &existed);
7470       gcc_checking_assert (!existed);
7471       slot = ident;
7472     }
7473 
7474   return true;
7475 }
7476 
7477 static bool has_definition (tree decl);
7478 
7479 /* DECL is a decl node that must be written by value.  DEP is the
7480    decl's depset.  */
7481 
7482 void
decl_value(tree decl,depset * dep)7483 trees_out::decl_value (tree decl, depset *dep)
7484 {
7485   /* We should not be writing clones or template parms.  */
7486   gcc_checking_assert (DECL_P (decl)
7487 		       && !DECL_CLONED_FUNCTION_P (decl)
7488 		       && !DECL_TEMPLATE_PARM_P (decl));
7489 
7490   /* We should never be writing non-typedef ptrmemfuncs by value.  */
7491   gcc_checking_assert (TREE_CODE (decl) != TYPE_DECL
7492 		       || DECL_ORIGINAL_TYPE (decl)
7493 		       || !TYPE_PTRMEMFUNC_P (TREE_TYPE (decl)));
7494 
7495   merge_kind mk = get_merge_kind (decl, dep);
7496 
7497   if (CHECKING_P)
7498     {
7499       /* Never start in the middle of a template.  */
7500       int use_tpl = -1;
7501       if (tree ti = node_template_info (decl, use_tpl))
7502 	gcc_checking_assert (TREE_CODE (TI_TEMPLATE (ti)) == OVERLOAD
7503 			     || TREE_CODE (TI_TEMPLATE (ti)) == FIELD_DECL
7504 			     || (DECL_TEMPLATE_RESULT (TI_TEMPLATE (ti))
7505 				 != decl));
7506     }
7507 
7508   if (streaming_p ())
7509     {
7510       /* A new node -> tt_decl.  */
7511       decl_val_count++;
7512       i (tt_decl);
7513       u (mk);
7514       start (decl);
7515 
7516       if (mk != MK_unique)
7517 	{
7518 	  if (!(mk & MK_template_mask) && !state->is_header ())
7519 	    {
7520 	      /* Tell the importer whether this is a global module entity,
7521 		 or a module entity.  This bool merges into the next block
7522 		 of bools.  Sneaky.  */
7523 	      tree o = get_originating_module_decl (decl);
7524 	      bool is_mod = false;
7525 
7526 	      tree not_tmpl = STRIP_TEMPLATE (o);
7527 	      if (DECL_LANG_SPECIFIC (not_tmpl)
7528 		  && DECL_MODULE_PURVIEW_P (not_tmpl))
7529 		is_mod = true;
7530 
7531 	      b (is_mod);
7532 	    }
7533 	  b (dep && dep->has_defn ());
7534 	}
7535       tree_node_bools (decl);
7536     }
7537 
7538   int tag = insert (decl, WK_value);
7539   if (streaming_p ())
7540     dump (dumper::TREE)
7541       && dump ("Writing %s:%d %C:%N%S", merge_kind_name[mk], tag,
7542 	       TREE_CODE (decl), decl, decl);
7543 
7544   tree inner = decl;
7545   int inner_tag = 0;
7546   if (TREE_CODE (decl) == TEMPLATE_DECL)
7547     {
7548       inner = DECL_TEMPLATE_RESULT (decl);
7549       inner_tag = insert (inner, WK_value);
7550 
7551       if (streaming_p ())
7552 	{
7553 	  int code = TREE_CODE (inner);
7554 	  u (code);
7555 	  start (inner, true);
7556 	  tree_node_bools (inner);
7557 	  dump (dumper::TREE)
7558 	    && dump ("Writing %s:%d %C:%N%S", merge_kind_name[mk], inner_tag,
7559 		     TREE_CODE (inner), inner, inner);
7560 	}
7561     }
7562 
7563   tree type = NULL_TREE;
7564   int type_tag = 0;
7565   tree stub_decl = NULL_TREE;
7566   int stub_tag = 0;
7567   if (TREE_CODE (inner) == TYPE_DECL)
7568     {
7569       type = TREE_TYPE (inner);
7570       bool has_type = (type == TYPE_MAIN_VARIANT (type)
7571 		       && TYPE_NAME (type) == inner);
7572 
7573       if (streaming_p ())
7574 	u (has_type ? TREE_CODE (type) : 0);
7575 
7576       if (has_type)
7577 	{
7578 	  type_tag = insert (type, WK_value);
7579 	  if (streaming_p ())
7580 	    {
7581 	      start (type, true);
7582 	      tree_node_bools (type);
7583 	      dump (dumper::TREE)
7584 		&& dump ("Writing type:%d %C:%N", type_tag,
7585 			 TREE_CODE (type), type);
7586 	    }
7587 
7588 	  stub_decl = TYPE_STUB_DECL (type);
7589 	  bool has_stub = inner != stub_decl;
7590 	  if (streaming_p ())
7591 	    u (has_stub ? TREE_CODE (stub_decl) : 0);
7592 	  if (has_stub)
7593 	    {
7594 	      stub_tag = insert (stub_decl);
7595 	      if (streaming_p ())
7596 		{
7597 		  start (stub_decl, true);
7598 		  tree_node_bools (stub_decl);
7599 		  dump (dumper::TREE)
7600 		    && dump ("Writing stub_decl:%d %C:%N", stub_tag,
7601 			     TREE_CODE (stub_decl), stub_decl);
7602 		}
7603 	    }
7604 	  else
7605 	    stub_decl = NULL_TREE;
7606 	}
7607       else
7608 	/* Regular typedef.  */
7609 	type = NULL_TREE;
7610     }
7611 
7612   /* Stream the container, we want it correctly canonicalized before
7613      we start emitting keys for this decl.  */
7614   tree container = decl_container (decl);
7615 
7616   unsigned tpl_levels = 0;
7617   if (decl != inner)
7618     tpl_header (decl, &tpl_levels);
7619   if (TREE_CODE (inner) == FUNCTION_DECL)
7620     fn_parms_init (inner);
7621 
7622   /* Now write out the merging information, and then really
7623      install the tag values.  */
7624   key_mergeable (tag, mk, decl, inner, container, dep);
7625 
7626   if (streaming_p ())
7627     dump (dumper::MERGE)
7628       && dump ("Wrote:%d's %s merge key %C:%N", tag,
7629 	       merge_kind_name[mk], TREE_CODE (decl), decl);
7630 
7631   if (TREE_CODE (inner) == FUNCTION_DECL)
7632     fn_parms_fini (inner);
7633 
7634   if (!is_key_order ())
7635     tree_node_vals (decl);
7636 
7637   if (inner_tag)
7638     {
7639       if (!is_key_order ())
7640 	tree_node_vals (inner);
7641       tpl_parms_fini (decl, tpl_levels);
7642     }
7643 
7644   if (type && !is_key_order ())
7645     {
7646       tree_node_vals (type);
7647       if (stub_decl)
7648 	tree_node_vals (stub_decl);
7649     }
7650 
7651   if (!is_key_order ())
7652     {
7653       if (mk & MK_template_mask
7654 	  || mk == MK_partial
7655 	  || mk == MK_friend_spec)
7656 	{
7657 	  if (mk != MK_partial)
7658 	    {
7659 	      // FIXME: We should make use of the merge-key by
7660 	      // exposing it outside of key_mergeable.  But this gets
7661 	      // the job done.
7662 	      auto *entry = reinterpret_cast <spec_entry *> (dep->deps[0]);
7663 
7664 	      if (streaming_p ())
7665 		u (get_mergeable_specialization_flags (entry->tmpl, decl));
7666 	      tree_node (entry->tmpl);
7667 	      tree_node (entry->args);
7668 	    }
7669 	  else
7670 	    {
7671 	      tree_node (CLASSTYPE_TI_TEMPLATE (TREE_TYPE (inner)));
7672 	      tree_node (CLASSTYPE_TI_ARGS (TREE_TYPE (inner)));
7673 	    }
7674 	}
7675       tree_node (get_constraints (decl));
7676     }
7677 
7678   if (streaming_p ())
7679     {
7680       /* Do not stray outside this section.  */
7681       gcc_checking_assert (!dep || dep->section == dep_hash->section);
7682 
7683       /* Write the entity index, so we can insert it as soon as we
7684 	 know this is new.  */
7685       install_entity (decl, dep);
7686     }
7687 
7688   if (VAR_OR_FUNCTION_DECL_P (inner)
7689       && DECL_LANG_SPECIFIC (inner)
7690       && DECL_MODULE_ATTACHMENTS_P (inner)
7691       && !is_key_order ())
7692     {
7693       /* Stream the attached entities.  */
7694       auto *attach_vec = attached_table->get (inner);
7695       unsigned num = attach_vec->length ();
7696       if (streaming_p ())
7697 	u (num);
7698       for (unsigned ix = 0; ix != num; ix++)
7699 	{
7700 	  tree attached = (*attach_vec)[ix];
7701 	  tree_node (attached);
7702 	  if (streaming_p ())
7703 	    dump (dumper::MERGE)
7704 	      && dump ("Written %d[%u] attached decl %N", tag, ix, attached);
7705 	}
7706     }
7707 
7708   bool is_typedef = false;
7709   if (!type && TREE_CODE (inner) == TYPE_DECL)
7710     {
7711       tree t = TREE_TYPE (inner);
7712       unsigned tdef_flags = 0;
7713       if (DECL_ORIGINAL_TYPE (inner)
7714 	  && TYPE_NAME (TREE_TYPE (inner)) == inner)
7715 	{
7716 	  tdef_flags |= 1;
7717 	  if (TYPE_STRUCTURAL_EQUALITY_P (t)
7718 	      && TYPE_DEPENDENT_P_VALID (t)
7719 	      && TYPE_DEPENDENT_P (t))
7720 	    tdef_flags |= 2;
7721 	}
7722       if (streaming_p ())
7723 	u (tdef_flags);
7724 
7725       if (tdef_flags & 1)
7726 	{
7727 	  /* A typedef type.  */
7728 	  int type_tag = insert (t);
7729 	  if (streaming_p ())
7730 	    dump (dumper::TREE)
7731 	      && dump ("Cloned:%d %s %C:%N", type_tag,
7732 		       tdef_flags & 2 ? "depalias" : "typedef",
7733 		       TREE_CODE (t), t);
7734 
7735 	  is_typedef = true;
7736 	}
7737     }
7738 
7739   if (streaming_p () && DECL_MAYBE_IN_CHARGE_CDTOR_P (decl))
7740     {
7741       bool cloned_p
7742 	= (DECL_CHAIN (decl) && DECL_CLONED_FUNCTION_P (DECL_CHAIN (decl)));
7743       bool needs_vtt_parm_p
7744 	= (cloned_p && CLASSTYPE_VBASECLASSES (DECL_CONTEXT (decl)));
7745       bool omit_inherited_parms_p
7746 	= (cloned_p && DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (decl)
7747 	   && base_ctor_omit_inherited_parms (decl));
7748       unsigned flags = (int (cloned_p) << 0
7749 			| int (needs_vtt_parm_p) << 1
7750 			| int (omit_inherited_parms_p) << 2);
7751       u (flags);
7752       dump (dumper::TREE) && dump ("CDTOR %N is %scloned",
7753 				   decl, cloned_p ? "" : "not ");
7754     }
7755 
7756   if (streaming_p ())
7757     dump (dumper::TREE) && dump ("Written decl:%d %C:%N", tag,
7758 				 TREE_CODE (decl), decl);
7759 
7760   if (NAMESPACE_SCOPE_P (inner))
7761     gcc_checking_assert (!dep == (VAR_OR_FUNCTION_DECL_P (inner)
7762 				  && DECL_LOCAL_DECL_P (inner)));
7763   else if ((TREE_CODE (inner) == TYPE_DECL
7764 	    && !is_typedef
7765 	    && TYPE_NAME (TREE_TYPE (inner)) == inner)
7766 	   || TREE_CODE (inner) == FUNCTION_DECL)
7767     {
7768       bool write_defn = !dep && has_definition (decl);
7769       if (streaming_p ())
7770 	u (write_defn);
7771       if (write_defn)
7772 	write_definition (decl);
7773     }
7774 }
7775 
7776 tree
decl_value()7777 trees_in::decl_value ()
7778 {
7779   int tag = 0;
7780   bool is_mod = false;
7781   bool has_defn = false;
7782   unsigned mk_u = u ();
7783   if (mk_u >= MK_hwm || !merge_kind_name[mk_u])
7784     {
7785       set_overrun ();
7786       return NULL_TREE;
7787     }
7788 
7789   unsigned saved_unused = unused;
7790   unused = 0;
7791 
7792   merge_kind mk = merge_kind (mk_u);
7793 
7794   tree decl = start ();
7795   if (decl)
7796     {
7797       if (mk != MK_unique)
7798 	{
7799 	  if (!(mk & MK_template_mask) && !state->is_header ())
7800 	    /* See note in trees_out about where this bool is sequenced.  */
7801 	    is_mod = b ();
7802 
7803 	  has_defn = b ();
7804 	}
7805 
7806       if (!tree_node_bools (decl))
7807 	decl = NULL_TREE;
7808     }
7809 
7810   /* Insert into map.  */
7811   tag = insert (decl);
7812   if (decl)
7813     dump (dumper::TREE)
7814       && dump ("Reading:%d %C", tag, TREE_CODE (decl));
7815 
7816   tree inner = decl;
7817   int inner_tag = 0;
7818   if (decl && TREE_CODE (decl) == TEMPLATE_DECL)
7819     {
7820       int code = u ();
7821       inner = start (code);
7822       if (inner && tree_node_bools (inner))
7823 	DECL_TEMPLATE_RESULT (decl) = inner;
7824       else
7825 	decl = NULL_TREE;
7826 
7827       inner_tag = insert (inner);
7828       if (decl)
7829 	dump (dumper::TREE)
7830 	  && dump ("Reading:%d %C", inner_tag, TREE_CODE (inner));
7831     }
7832 
7833   tree type = NULL_TREE;
7834   int type_tag = 0;
7835   tree stub_decl = NULL_TREE;
7836   int stub_tag = 0;
7837   if (decl && TREE_CODE (inner) == TYPE_DECL)
7838     {
7839       if (unsigned type_code = u ())
7840 	{
7841 	  type = start (type_code);
7842 	  if (type && tree_node_bools (type))
7843 	    {
7844 	      TREE_TYPE (inner) = type;
7845 	      TYPE_NAME (type) = inner;
7846 	    }
7847 	  else
7848 	    decl = NULL_TREE;
7849 
7850 	  type_tag = insert (type);
7851 	  if (decl)
7852 	    dump (dumper::TREE)
7853 	      && dump ("Reading type:%d %C", type_tag, TREE_CODE (type));
7854 
7855 	  if (unsigned stub_code = u ())
7856 	    {
7857 	      stub_decl = start (stub_code);
7858 	      if (stub_decl && tree_node_bools (stub_decl))
7859 		{
7860 		  TREE_TYPE (stub_decl) = type;
7861 		  TYPE_STUB_DECL (type) = stub_decl;
7862 		}
7863 	      else
7864 		decl = NULL_TREE;
7865 
7866 	      stub_tag = insert (stub_decl);
7867 	      if (decl)
7868 		dump (dumper::TREE)
7869 		  && dump ("Reading stub_decl:%d %C", stub_tag,
7870 			   TREE_CODE (stub_decl));
7871 	    }
7872 	}
7873     }
7874 
7875   if (!decl)
7876     {
7877     bail:
7878       if (inner_tag != 0)
7879 	back_refs[~inner_tag] = NULL_TREE;
7880       if (type_tag != 0)
7881 	back_refs[~type_tag] = NULL_TREE;
7882       if (stub_tag != 0)
7883 	back_refs[~stub_tag] = NULL_TREE;
7884       if (tag != 0)
7885 	back_refs[~tag] = NULL_TREE;
7886       set_overrun ();
7887       /* Bail.  */
7888       unused = saved_unused;
7889       return NULL_TREE;
7890     }
7891 
7892   /* Read the container, to ensure it's already been streamed in.  */
7893   tree container = decl_container ();
7894   unsigned tpl_levels = 0;
7895 
7896   /* Figure out if this decl is already known about.  */
7897   int parm_tag = 0;
7898 
7899   if (decl != inner)
7900     if (!tpl_header (decl, &tpl_levels))
7901       goto bail;
7902   if (TREE_CODE (inner) == FUNCTION_DECL)
7903     parm_tag = fn_parms_init (inner);
7904 
7905   tree existing = key_mergeable (tag, mk, decl, inner, type, container, is_mod);
7906   tree existing_inner = existing;
7907   if (existing)
7908     {
7909       if (existing == error_mark_node)
7910 	goto bail;
7911 
7912       if (TREE_CODE (STRIP_TEMPLATE (existing)) == TYPE_DECL)
7913 	{
7914 	  tree etype = TREE_TYPE (existing);
7915 	  if (TYPE_LANG_SPECIFIC (etype)
7916 	      && COMPLETE_TYPE_P (etype)
7917 	      && !CLASSTYPE_MEMBER_VEC (etype))
7918 	    /* Give it a member vec, we're likely gonna be looking
7919 	       inside it.  */
7920 	    set_class_bindings (etype, -1);
7921 	}
7922 
7923       /* Install the existing decl into the back ref array.  */
7924       register_duplicate (decl, existing);
7925       back_refs[~tag] = existing;
7926       if (inner_tag != 0)
7927 	{
7928 	  existing_inner = DECL_TEMPLATE_RESULT (existing);
7929 	  back_refs[~inner_tag] = existing_inner;
7930 	}
7931 
7932       if (type_tag != 0)
7933 	{
7934 	  tree existing_type = TREE_TYPE (existing);
7935 	  back_refs[~type_tag] = existing_type;
7936 	  if (stub_tag != 0)
7937 	    back_refs[~stub_tag] = TYPE_STUB_DECL (existing_type);
7938 	}
7939     }
7940 
7941   if (parm_tag)
7942     fn_parms_fini (parm_tag, inner, existing_inner, has_defn);
7943 
7944   if (!tree_node_vals (decl))
7945     goto bail;
7946 
7947   if (inner_tag)
7948     {
7949       gcc_checking_assert (DECL_TEMPLATE_RESULT (decl) == inner);
7950 
7951       if (!tree_node_vals (inner))
7952 	goto bail;
7953 
7954       if (!tpl_parms_fini (decl, tpl_levels))
7955 	goto bail;
7956     }
7957 
7958   if (type && (!tree_node_vals (type)
7959 	       || (stub_decl && !tree_node_vals (stub_decl))))
7960     goto bail;
7961 
7962   spec_entry spec;
7963   unsigned spec_flags = 0;
7964   if (mk & MK_template_mask
7965       || mk == MK_partial
7966       || mk == MK_friend_spec)
7967     {
7968       if (mk == MK_partial)
7969 	spec_flags = 2;
7970       else
7971 	spec_flags = u ();
7972 
7973       spec.tmpl = tree_node ();
7974       spec.args = tree_node ();
7975     }
7976   /* Hold constraints on the spec field, for a short while.  */
7977   spec.spec = tree_node ();
7978 
7979   dump (dumper::TREE) && dump ("Read:%d %C:%N", tag, TREE_CODE (decl), decl);
7980 
7981   existing = back_refs[~tag];
7982   bool installed = install_entity (existing);
7983   bool is_new = existing == decl;
7984 
7985   if (VAR_OR_FUNCTION_DECL_P (inner)
7986       && DECL_LANG_SPECIFIC (inner)
7987       && DECL_MODULE_ATTACHMENTS_P (inner))
7988     {
7989       /* Read and maybe install the attached entities.  */
7990       bool existed;
7991       auto &set = attached_table->get_or_insert (STRIP_TEMPLATE (existing),
7992 						 &existed);
7993       unsigned num = u ();
7994       if (is_new == existed)
7995 	set_overrun ();
7996       if (is_new)
7997 	set.reserve (num);
7998       for (unsigned ix = 0; !get_overrun () && ix != num; ix++)
7999 	{
8000 	  tree attached = tree_node ();
8001 	  dump (dumper::MERGE)
8002 	    && dump ("Read %d[%u] %s attached decl %N", tag, ix,
8003 		     is_new ? "new" : "matched", attached);
8004 	  if (is_new)
8005 	    set.quick_push (attached);
8006 	  else if (set[ix] != attached)
8007 	    set_overrun ();
8008 	}
8009     }
8010 
8011   /* Regular typedefs will have a NULL TREE_TYPE at this point.  */
8012   unsigned tdef_flags = 0;
8013   bool is_typedef = false;
8014   if (!type && TREE_CODE (inner) == TYPE_DECL)
8015     {
8016       tdef_flags = u ();
8017       if (tdef_flags & 1)
8018 	is_typedef = true;
8019     }
8020 
8021   if (is_new)
8022     {
8023       /* A newly discovered node.  */
8024       if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8025 	/* Mark this identifier as naming a virtual function --
8026 	   lookup_overrides relies on this optimization.  */
8027 	IDENTIFIER_VIRTUAL_P (DECL_NAME (decl)) = true;
8028 
8029       if (installed)
8030 	{
8031 	  /* Mark the entity as imported.  */
8032 	  retrofit_lang_decl (inner);
8033 	  DECL_MODULE_IMPORT_P (inner) = true;
8034 	}
8035 
8036       if (spec.spec)
8037 	set_constraints (decl, spec.spec);
8038 
8039       if (TREE_CODE (decl) == INTEGER_CST && !TREE_OVERFLOW (decl))
8040 	{
8041 	  decl = cache_integer_cst (decl, true);
8042 	  back_refs[~tag] = decl;
8043 	}
8044 
8045       if (is_typedef)
8046 	{
8047 	  /* Frob it to be ready for cloning.  */
8048 	  TREE_TYPE (inner) = DECL_ORIGINAL_TYPE (inner);
8049 	  DECL_ORIGINAL_TYPE (inner) = NULL_TREE;
8050 	  set_underlying_type (inner);
8051 	  if (tdef_flags & 2)
8052 	    {
8053 	      /* Match instantiate_alias_template's handling.  */
8054 	      tree type = TREE_TYPE (inner);
8055 	      TYPE_DEPENDENT_P (type) = true;
8056 	      TYPE_DEPENDENT_P_VALID (type) = true;
8057 	      SET_TYPE_STRUCTURAL_EQUALITY (type);
8058 	    }
8059 	}
8060 
8061       if (inner_tag)
8062 	/* Set the TEMPLATE_DECL's type.  */
8063 	TREE_TYPE (decl) = TREE_TYPE (inner);
8064 
8065       if (mk & MK_template_mask
8066 	  || mk == MK_partial)
8067 	{
8068 	  /* Add to specialization tables now that constraints etc are
8069 	     added.  */
8070 	  bool is_type = mk == MK_partial || !(mk & MK_tmpl_decl_mask);
8071 
8072 	  spec.spec = is_type ? type : mk & MK_tmpl_tmpl_mask ? inner : decl;
8073 	  add_mergeable_specialization (!is_type,
8074 					!is_type && mk & MK_tmpl_alias_mask,
8075 					&spec, decl, spec_flags);
8076 	}
8077 
8078       if (NAMESPACE_SCOPE_P (decl)
8079 	  && (mk == MK_named || mk == MK_unique
8080 	      || mk == MK_enum || mk == MK_friend_spec)
8081 	  && !(VAR_OR_FUNCTION_DECL_P (decl) && DECL_LOCAL_DECL_P (decl)))
8082 	add_module_namespace_decl (CP_DECL_CONTEXT (decl), decl);
8083 
8084       if (DECL_ARTIFICIAL (decl)
8085 	  && TREE_CODE (decl) == FUNCTION_DECL
8086 	  && !DECL_TEMPLATE_INFO (decl)
8087 	  && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
8088 	  && TYPE_SIZE (DECL_CONTEXT (decl))
8089 	  && !DECL_THUNK_P (decl))
8090 	/* A new implicit member function, when the class is
8091 	   complete.  This means the importee declared it, and
8092 	   we must now add it to the class.  Note that implicit
8093 	   member fns of template instantiations do not themselves
8094 	   look like templates.  */
8095 	if (!install_implicit_member (inner))
8096 	  set_overrun ();
8097     }
8098   else
8099     {
8100       /* DECL is the to-be-discarded decl.  Its internal pointers will
8101 	 be to the EXISTING's structure.  Frob it to point to its
8102 	 own other structures, so loading its definition will alter
8103 	 it, and not the existing decl.  */
8104       dump (dumper::MERGE) && dump ("Deduping %N", existing);
8105 
8106       if (inner_tag)
8107 	DECL_TEMPLATE_RESULT (decl) = inner;
8108 
8109       if (type)
8110 	{
8111 	  /* Point at the to-be-discarded type & decl.  */
8112 	  TYPE_NAME (type) = inner;
8113 	  TREE_TYPE (inner) = type;
8114 
8115 	  TYPE_STUB_DECL (type) = stub_decl ? stub_decl : inner;
8116 	  if (stub_decl)
8117 	    TREE_TYPE (stub_decl) = type;
8118 	}
8119 
8120       if (inner_tag)
8121 	/* Set the TEMPLATE_DECL's type.  */
8122 	TREE_TYPE (decl) = TREE_TYPE (inner);
8123 
8124       if (!is_matching_decl (existing, decl, is_typedef))
8125 	unmatched_duplicate (existing);
8126 
8127       if (TREE_CODE (inner) == FUNCTION_DECL)
8128 	{
8129 	  tree e_inner = STRIP_TEMPLATE (existing);
8130 	  for (auto parm = DECL_ARGUMENTS (inner);
8131 	       parm; parm = DECL_CHAIN (parm))
8132 	    DECL_CONTEXT (parm) = e_inner;
8133 	}
8134 
8135       /* And our result is the existing node.  */
8136       decl = existing;
8137     }
8138 
8139   if (mk == MK_friend_spec)
8140     {
8141       tree e = match_mergeable_specialization (true, &spec);
8142       if (!e)
8143 	{
8144 	  spec.spec = inner;
8145 	  add_mergeable_specialization (true, false, &spec, decl, spec_flags);
8146 	}
8147       else if (e != existing)
8148 	set_overrun ();
8149     }
8150 
8151   if (is_typedef)
8152     {
8153       /* Insert the type into the array now.  */
8154       tag = insert (TREE_TYPE (decl));
8155       dump (dumper::TREE)
8156 	&& dump ("Cloned:%d typedef %C:%N",
8157 		 tag, TREE_CODE (TREE_TYPE (decl)), TREE_TYPE (decl));
8158     }
8159 
8160   unused = saved_unused;
8161 
8162   if (DECL_MAYBE_IN_CHARGE_CDTOR_P (decl))
8163     {
8164       unsigned flags = u ();
8165 
8166       if (is_new)
8167 	{
8168 	  bool cloned_p = flags & 1;
8169 	  dump (dumper::TREE) && dump ("CDTOR %N is %scloned",
8170 				       decl, cloned_p ? "" : "not ");
8171 	  if (cloned_p)
8172 	    build_cdtor_clones (decl, flags & 2, flags & 4,
8173 				/* Update the member vec, if there is
8174 				   one (we're in a different cluster
8175 				   to the class defn).  */
8176 				CLASSTYPE_MEMBER_VEC (DECL_CONTEXT (decl)));
8177 	}
8178     }
8179 
8180   if (!NAMESPACE_SCOPE_P (inner)
8181       && ((TREE_CODE (inner) == TYPE_DECL
8182 	   && !is_typedef
8183 	   && TYPE_NAME (TREE_TYPE (inner)) == inner)
8184 	  || TREE_CODE (inner) == FUNCTION_DECL)
8185       && u ())
8186     read_definition (decl);
8187 
8188   return decl;
8189 }
8190 
8191 /* DECL is an unnameable member of CTX.  Return a suitable identifying
8192    index.  */
8193 
8194 static unsigned
get_field_ident(tree ctx,tree decl)8195 get_field_ident (tree ctx, tree decl)
8196 {
8197   gcc_checking_assert (TREE_CODE (decl) == USING_DECL
8198 		       || !DECL_NAME (decl)
8199 		       || IDENTIFIER_ANON_P (DECL_NAME (decl)));
8200 
8201   unsigned ix = 0;
8202   for (tree fields = TYPE_FIELDS (ctx);
8203        fields; fields = DECL_CHAIN (fields))
8204     {
8205       if (fields == decl)
8206 	return ix;
8207 
8208       if (DECL_CONTEXT (fields) == ctx
8209 	  && (TREE_CODE (fields) == USING_DECL
8210 	      || (TREE_CODE (fields) == FIELD_DECL
8211 		  && (!DECL_NAME (fields)
8212 		      || IDENTIFIER_ANON_P (DECL_NAME (fields))))))
8213 	/* Count this field.  */
8214 	ix++;
8215     }
8216   gcc_unreachable ();
8217 }
8218 
8219 static tree
lookup_field_ident(tree ctx,unsigned ix)8220 lookup_field_ident (tree ctx, unsigned ix)
8221 {
8222   for (tree fields = TYPE_FIELDS (ctx);
8223        fields; fields = DECL_CHAIN (fields))
8224     if (DECL_CONTEXT (fields) == ctx
8225 	&& (TREE_CODE (fields) == USING_DECL
8226 	    || (TREE_CODE (fields) == FIELD_DECL
8227 		&& (!DECL_NAME (fields)
8228 		    || IDENTIFIER_ANON_P (DECL_NAME (fields))))))
8229       if (!ix--)
8230 	return fields;
8231 
8232   return NULL_TREE;
8233 }
8234 
8235 /* Reference DECL.  REF indicates the walk kind we are performing.
8236    Return true if we should write this decl by value.  */
8237 
8238 bool
decl_node(tree decl,walk_kind ref)8239 trees_out::decl_node (tree decl, walk_kind ref)
8240 {
8241   gcc_checking_assert (DECL_P (decl) && !DECL_TEMPLATE_PARM_P (decl)
8242 		       && DECL_CONTEXT (decl));
8243 
8244   if (ref == WK_value)
8245     {
8246       depset *dep = dep_hash->find_dependency (decl);
8247       decl_value (decl, dep);
8248       return false;
8249     }
8250 
8251   switch (TREE_CODE (decl))
8252     {
8253     default:
8254       break;
8255 
8256     case FUNCTION_DECL:
8257       gcc_checking_assert (!DECL_LOCAL_DECL_P (decl));
8258       break;
8259 
8260     case RESULT_DECL:
8261       /* Unlike PARM_DECLs, RESULT_DECLs are only generated and
8262          referenced when we're inside the function itself.  */
8263       return true;
8264 
8265     case PARM_DECL:
8266       {
8267 	if (streaming_p ())
8268 	  i (tt_parm);
8269 	tree_node (DECL_CONTEXT (decl));
8270 	if (streaming_p ())
8271 	  {
8272 	    /* That must have put this in the map.  */
8273 	    walk_kind ref = ref_node (decl);
8274 	    if (ref != WK_none)
8275 	      // FIXME:OPTIMIZATION We can wander into bits of the
8276 	      // template this was instantiated from.  For instance
8277 	      // deferred noexcept and default parms.  Currently we'll
8278 	      // end up cloning those bits of tree.  It would be nice
8279 	      // to reference those specific nodes.  I think putting
8280 	      // those things in the map when we reference their
8281 	      // template by name.  See the note in add_indirects.
8282 	      return true;
8283 
8284 	    dump (dumper::TREE)
8285 	      && dump ("Wrote %s reference %N",
8286 		       TREE_CODE (decl) == PARM_DECL ? "parameter" : "result",
8287 		       decl);
8288 	  }
8289       }
8290       return false;
8291 
8292     case IMPORTED_DECL:
8293       /* This describes a USING_DECL to the ME's debug machinery.  It
8294 	 originates from the fortran FE, and has nothing to do with
8295 	 C++ modules.  */
8296       return true;
8297 
8298     case LABEL_DECL:
8299       return true;
8300 
8301     case CONST_DECL:
8302       {
8303 	/* If I end up cloning enum decls, implementing C++20 using
8304 	   E::v, this will need tweaking.   */
8305 	if (streaming_p ())
8306 	  i (tt_enum_decl);
8307 	tree ctx = DECL_CONTEXT (decl);
8308 	gcc_checking_assert (TREE_CODE (ctx) == ENUMERAL_TYPE);
8309 	tree_node (ctx);
8310 	tree_node (DECL_NAME (decl));
8311 
8312 	int tag = insert (decl);
8313 	if (streaming_p ())
8314 	  dump (dumper::TREE)
8315 	    && dump ("Wrote enum decl:%d %C:%N", tag, TREE_CODE (decl), decl);
8316 	return false;
8317       }
8318       break;
8319 
8320     case USING_DECL:
8321       if (TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
8322 	break;
8323       /* FALLTHROUGH  */
8324 
8325     case FIELD_DECL:
8326       {
8327 	if (streaming_p ())
8328 	  i (tt_data_member);
8329 
8330 	tree ctx = DECL_CONTEXT (decl);
8331 	tree_node (ctx);
8332 
8333 	tree name = NULL_TREE;
8334 
8335 	if (TREE_CODE (decl) == USING_DECL)
8336 	  ;
8337 	else
8338 	  {
8339 	    name = DECL_NAME (decl);
8340 	    if (name && IDENTIFIER_ANON_P (name))
8341 	      name = NULL_TREE;
8342 	  }
8343 
8344 	tree_node (name);
8345 	if (!name && streaming_p ())
8346 	  {
8347 	    unsigned ix = get_field_ident (ctx, decl);
8348 	    u (ix);
8349 	  }
8350 
8351 	int tag = insert (decl);
8352 	if (streaming_p ())
8353 	  dump (dumper::TREE)
8354 	    && dump ("Wrote member:%d %C:%N", tag, TREE_CODE (decl), decl);
8355 	return false;
8356       }
8357       break;
8358 
8359     case VAR_DECL:
8360       gcc_checking_assert (!DECL_LOCAL_DECL_P (decl));
8361       if (DECL_VTABLE_OR_VTT_P (decl))
8362 	{
8363 	  /* VTT or VTABLE, they are all on the vtables list.  */
8364 	  tree ctx = CP_DECL_CONTEXT (decl);
8365 	  tree vtable = CLASSTYPE_VTABLES (ctx);
8366 	  for (unsigned ix = 0; ; vtable = DECL_CHAIN (vtable), ix++)
8367 	    if (vtable == decl)
8368 	      {
8369 		gcc_checking_assert (DECL_VIRTUAL_P (decl));
8370 		if (streaming_p ())
8371 		  {
8372 		    u (tt_vtable);
8373 		    u (ix);
8374 		    dump (dumper::TREE)
8375 		      && dump ("Writing vtable %N[%u]", ctx, ix);
8376 		  }
8377 		tree_node (ctx);
8378 		return false;
8379 	      }
8380 	  gcc_unreachable ();
8381 	}
8382 
8383       if (DECL_TINFO_P (decl))
8384 	{
8385 	tinfo:
8386 	  /* A typeinfo, tt_tinfo_typedef or tt_tinfo_var.  */
8387 	  bool is_var = TREE_CODE (decl) == VAR_DECL;
8388 	  tree type = TREE_TYPE (decl);
8389 	  unsigned ix = get_pseudo_tinfo_index (type);
8390 	  if (streaming_p ())
8391 	    {
8392 	      i (is_var ? tt_tinfo_var : tt_tinfo_typedef);
8393 	      u (ix);
8394 	    }
8395 
8396 	  if (is_var)
8397 	    {
8398 	      /* We also need the type it is for and mangled name, so
8399 		 the reader doesn't need to complete the type (which
8400 		 would break section ordering).  The type it is for is
8401 		 stashed on the name's TREE_TYPE.  */
8402 	      tree name = DECL_NAME (decl);
8403 	      tree_node (name);
8404 	      type = TREE_TYPE (name);
8405 	      tree_node (type);
8406 	    }
8407 
8408 	  int tag = insert (decl);
8409 	  if (streaming_p ())
8410 	    dump (dumper::TREE)
8411 	      && dump ("Wrote tinfo_%s:%d %u %N", is_var ? "var" : "type",
8412 		       tag, ix, type);
8413 
8414 	  if (!is_var)
8415 	    {
8416 	      tag = insert (type);
8417 	      if (streaming_p ())
8418 		dump (dumper::TREE)
8419 		  && dump ("Wrote tinfo_type:%d %u %N", tag, ix, type);
8420 	    }
8421 	  return false;
8422 	}
8423       break;
8424 
8425     case TYPE_DECL:
8426       if (DECL_TINFO_P (decl))
8427 	goto tinfo;
8428       break;
8429     }
8430 
8431   if (DECL_THUNK_P (decl))
8432     {
8433       /* Thunks are similar to binfos -- write the thunked-to decl and
8434 	 then thunk-specific key info.  */
8435       if (streaming_p ())
8436 	{
8437 	  i (tt_thunk);
8438 	  i (THUNK_FIXED_OFFSET (decl));
8439 	}
8440 
8441       tree target = decl;
8442       while (DECL_THUNK_P (target))
8443 	target = THUNK_TARGET (target);
8444       tree_node (target);
8445       tree_node (THUNK_VIRTUAL_OFFSET (decl));
8446       int tag = insert (decl);
8447       if (streaming_p ())
8448 	dump (dumper::TREE)
8449 	  && dump ("Wrote:%d thunk %N to %N", tag, DECL_NAME (decl), target);
8450       return false;
8451     }
8452 
8453   if (DECL_CLONED_FUNCTION_P (decl))
8454     {
8455       tree target = get_clone_target (decl);
8456       if (streaming_p ())
8457 	i (tt_clone_ref);
8458 
8459       tree_node (target);
8460       tree_node (DECL_NAME (decl));
8461       int tag = insert (decl);
8462       if (streaming_p ())
8463 	dump (dumper::TREE)
8464 	  && dump ("Wrote:%d clone %N of %N", tag, DECL_NAME (decl), target);
8465       return false;
8466     }
8467 
8468   /* Everything left should be a thing that is in the entity table.
8469      Mostly things that can be defined outside of their (original
8470      declaration) context.  */
8471   gcc_checking_assert (TREE_CODE (decl) == TEMPLATE_DECL
8472 		       || TREE_CODE (decl) == VAR_DECL
8473 		       || TREE_CODE (decl) == FUNCTION_DECL
8474 		       || TREE_CODE (decl) == TYPE_DECL
8475 		       || TREE_CODE (decl) == USING_DECL
8476 		       || TREE_CODE (decl) == CONCEPT_DECL
8477 		       || TREE_CODE (decl) == NAMESPACE_DECL);
8478 
8479   int use_tpl = -1;
8480   tree ti = node_template_info (decl, use_tpl);
8481   tree tpl = NULL_TREE;
8482 
8483   /* If this is the TEMPLATE_DECL_RESULT of a TEMPLATE_DECL, get the
8484      TEMPLATE_DECL.  Note TI_TEMPLATE is not a TEMPLATE_DECL for
8485      (some) friends, so we need to check that.  */
8486   // FIXME: Should local friend template specializations be by value?
8487   // They don't get idents so we'll never know they're imported, but I
8488   // think we can only reach them from the TU that defines the
8489   // befriending class?
8490   if (ti && TREE_CODE (TI_TEMPLATE (ti)) == TEMPLATE_DECL
8491       && DECL_TEMPLATE_RESULT (TI_TEMPLATE (ti)) == decl)
8492     {
8493       tpl = TI_TEMPLATE (ti);
8494     partial_template:
8495       if (streaming_p ())
8496 	{
8497 	  i (tt_template);
8498 	  dump (dumper::TREE)
8499 	    && dump ("Writing implicit template %C:%N%S",
8500 		     TREE_CODE (tpl), tpl, tpl);
8501 	}
8502       tree_node (tpl);
8503 
8504       /* Streaming TPL caused us to visit DECL and maybe its type.  */
8505       gcc_checking_assert (TREE_VISITED (decl));
8506       if (DECL_IMPLICIT_TYPEDEF_P (decl))
8507 	gcc_checking_assert (TREE_VISITED (TREE_TYPE (decl)));
8508       return false;
8509     }
8510 
8511   tree ctx = CP_DECL_CONTEXT (decl);
8512   depset *dep = NULL;
8513   if (streaming_p ())
8514     dep = dep_hash->find_dependency (decl);
8515   else if (TREE_CODE (ctx) != FUNCTION_DECL
8516 	   || TREE_CODE (decl) == TEMPLATE_DECL
8517 	   || (dep_hash->sneakoscope && DECL_IMPLICIT_TYPEDEF_P (decl))
8518 	   || (DECL_LANG_SPECIFIC (decl)
8519 	       && DECL_MODULE_IMPORT_P (decl)))
8520     {
8521       auto kind = (TREE_CODE (decl) == NAMESPACE_DECL
8522 		   && !DECL_NAMESPACE_ALIAS (decl)
8523 		   ? depset::EK_NAMESPACE : depset::EK_DECL);
8524       dep = dep_hash->add_dependency (decl, kind);
8525     }
8526 
8527   if (!dep)
8528     {
8529       /* Some internal entity of context.  Do by value.  */
8530       decl_value (decl, NULL);
8531       return false;
8532     }
8533 
8534   if (dep->get_entity_kind () == depset::EK_REDIRECT)
8535     {
8536       /* The DECL_TEMPLATE_RESULT of a partial specialization.
8537 	 Write the partial specialization's template.  */
8538       depset *redirect = dep->deps[0];
8539       gcc_checking_assert (redirect->get_entity_kind () == depset::EK_PARTIAL);
8540       tpl = redirect->get_entity ();
8541       goto partial_template;
8542     }
8543 
8544   if (streaming_p ())
8545     {
8546       /* Locate the entity.  */
8547       unsigned index = dep->cluster;
8548       unsigned import = 0;
8549 
8550       if (dep->is_import ())
8551 	import = dep->section;
8552       else if (CHECKING_P)
8553 	/* It should be what we put there.  */
8554 	gcc_checking_assert (index == ~import_entity_index (decl));
8555 
8556 #if CHECKING_P
8557       gcc_assert (!import || importedness >= 0);
8558 #endif
8559       i (tt_entity);
8560       u (import);
8561       u (index);
8562     }
8563 
8564   int tag = insert (decl);
8565   if (streaming_p () && dump (dumper::TREE))
8566     {
8567       char const *kind = "import";
8568       module_state *from = (*modules)[0];
8569       if (dep->is_import ())
8570 	/* Rediscover the unremapped index.  */
8571 	from = import_entity_module (import_entity_index (decl));
8572       else
8573 	{
8574 	  tree o = get_originating_module_decl (decl);
8575 	  o = STRIP_TEMPLATE (o);
8576 	  kind = (DECL_LANG_SPECIFIC (o) && DECL_MODULE_PURVIEW_P (o)
8577 		  ? "purview" : "GMF");
8578 	}
8579       dump ("Wrote %s:%d %C:%N@%M", kind,
8580 	    tag, TREE_CODE (decl), decl, from);
8581     }
8582 
8583   add_indirects (decl);
8584 
8585   return false;
8586 }
8587 
8588 void
type_node(tree type)8589 trees_out::type_node (tree type)
8590 {
8591   gcc_assert (TYPE_P (type));
8592 
8593   tree root = (TYPE_NAME (type)
8594 	       ? TREE_TYPE (TYPE_NAME (type)) : TYPE_MAIN_VARIANT (type));
8595 
8596   if (type != root)
8597     {
8598       if (streaming_p ())
8599 	i (tt_variant_type);
8600       tree_node (root);
8601 
8602       int flags = -1;
8603 
8604       if (TREE_CODE (type) == FUNCTION_TYPE
8605 	  || TREE_CODE (type) == METHOD_TYPE)
8606 	{
8607 	  int quals = type_memfn_quals (type);
8608 	  int rquals = type_memfn_rqual (type);
8609 	  tree raises = TYPE_RAISES_EXCEPTIONS (type);
8610 	  bool late = TYPE_HAS_LATE_RETURN_TYPE (type);
8611 
8612 	  if (raises != TYPE_RAISES_EXCEPTIONS (root)
8613 	      || rquals != type_memfn_rqual (root)
8614 	      || quals != type_memfn_quals (root)
8615 	      || late != TYPE_HAS_LATE_RETURN_TYPE (root))
8616 	    flags = rquals | (int (late) << 2) | (quals << 3);
8617 	}
8618       else
8619 	{
8620 	  if (TYPE_USER_ALIGN (type))
8621 	    flags = TYPE_ALIGN_RAW (type);
8622 	}
8623 
8624       if (streaming_p ())
8625 	i (flags);
8626 
8627       if (flags < 0)
8628 	;
8629       else if (TREE_CODE (type) == FUNCTION_TYPE
8630 	       || TREE_CODE (type) == METHOD_TYPE)
8631 	{
8632 	  tree raises = TYPE_RAISES_EXCEPTIONS (type);
8633 	  if (raises == TYPE_RAISES_EXCEPTIONS (root))
8634 	    raises = error_mark_node;
8635 	  tree_node (raises);
8636 	}
8637 
8638       tree_node (TYPE_ATTRIBUTES (type));
8639 
8640       if (streaming_p ())
8641 	{
8642 	  /* Qualifiers.  */
8643 	  int rquals = cp_type_quals (root);
8644 	  int quals = cp_type_quals (type);
8645 	  if (quals == rquals)
8646 	    quals = -1;
8647 	  i (quals);
8648 	}
8649 
8650       if (ref_node (type) != WK_none)
8651 	{
8652 	  int tag = insert (type);
8653 	  if (streaming_p ())
8654 	    {
8655 	      i (0);
8656 	      dump (dumper::TREE)
8657 		&& dump ("Wrote:%d variant type %C", tag, TREE_CODE (type));
8658 	    }
8659 	}
8660       return;
8661     }
8662 
8663   if (tree name = TYPE_NAME (type))
8664     if ((TREE_CODE (name) == TYPE_DECL && DECL_ORIGINAL_TYPE (name))
8665 	|| DECL_TEMPLATE_PARM_P (name)
8666 	|| TREE_CODE (type) == RECORD_TYPE
8667 	|| TREE_CODE (type) == UNION_TYPE
8668 	|| TREE_CODE (type) == ENUMERAL_TYPE)
8669       {
8670 	/* We can meet template parms that we didn't meet in the
8671 	   tpl_parms walk, because we're referring to a derived type
8672 	   that was previously constructed from equivalent template
8673 	   parms. */
8674 	if (streaming_p ())
8675 	  {
8676 	    i (tt_typedef_type);
8677 	    dump (dumper::TREE)
8678 	      && dump ("Writing %stypedef %C:%N",
8679 		       DECL_IMPLICIT_TYPEDEF_P (name) ? "implicit " : "",
8680 		       TREE_CODE (name), name);
8681 	  }
8682 	tree_node (name);
8683 	if (streaming_p ())
8684 	  dump (dumper::TREE) && dump ("Wrote typedef %C:%N%S",
8685 				       TREE_CODE (name), name, name);
8686 	gcc_checking_assert (TREE_VISITED (type));
8687 	return;
8688       }
8689 
8690   if (TYPE_PTRMEMFUNC_P (type))
8691     {
8692       /* This is a distinct type node, masquerading as a structure. */
8693       tree fn_type = TYPE_PTRMEMFUNC_FN_TYPE (type);
8694       if (streaming_p ())
8695 	i (tt_ptrmem_type);
8696       tree_node (fn_type);
8697       int tag = insert (type);
8698       if (streaming_p ())
8699 	dump (dumper::TREE) && dump ("Written:%d ptrmem type", tag);
8700       return;
8701     }
8702 
8703   if (streaming_p ())
8704     {
8705       u (tt_derived_type);
8706       u (TREE_CODE (type));
8707     }
8708 
8709   tree_node (TREE_TYPE (type));
8710   switch (TREE_CODE (type))
8711     {
8712     default:
8713       /* We should never meet a type here that is indescribable in
8714 	 terms of other types.  */
8715       gcc_unreachable ();
8716 
8717     case ARRAY_TYPE:
8718       tree_node (TYPE_DOMAIN (type));
8719       if (streaming_p ())
8720 	/* Dependent arrays are constructed with TYPE_DEPENENT_P
8721 	   already set.  */
8722 	u (TYPE_DEPENDENT_P (type));
8723       break;
8724 
8725     case COMPLEX_TYPE:
8726       /* No additional data.  */
8727       break;
8728 
8729     case BOOLEAN_TYPE:
8730       /* A non-standard boolean type.  */
8731       if (streaming_p ())
8732 	u (TYPE_PRECISION (type));
8733       break;
8734 
8735     case INTEGER_TYPE:
8736       if (TREE_TYPE (type))
8737 	{
8738 	  /* A range type (representing an array domain).  */
8739 	  tree_node (TYPE_MIN_VALUE (type));
8740 	  tree_node (TYPE_MAX_VALUE (type));
8741 	}
8742       else
8743 	{
8744 	  /* A new integral type (representing a bitfield).  */
8745 	  if (streaming_p ())
8746 	    {
8747 	      unsigned prec = TYPE_PRECISION (type);
8748 	      bool unsigned_p = TYPE_UNSIGNED (type);
8749 
8750 	      u ((prec << 1) | unsigned_p);
8751 	    }
8752 	}
8753       break;
8754 
8755     case METHOD_TYPE:
8756     case FUNCTION_TYPE:
8757       {
8758 	gcc_checking_assert (type_memfn_rqual (type) == REF_QUAL_NONE);
8759 
8760 	tree arg_types = TYPE_ARG_TYPES (type);
8761 	if (TREE_CODE (type) == METHOD_TYPE)
8762 	  {
8763 	    tree_node (TREE_TYPE (TREE_VALUE (arg_types)));
8764 	    arg_types = TREE_CHAIN (arg_types);
8765 	  }
8766 	tree_node (arg_types);
8767       }
8768       break;
8769 
8770     case OFFSET_TYPE:
8771       tree_node (TYPE_OFFSET_BASETYPE (type));
8772       break;
8773 
8774     case POINTER_TYPE:
8775       /* No additional data.  */
8776       break;
8777 
8778     case REFERENCE_TYPE:
8779       if (streaming_p ())
8780 	u (TYPE_REF_IS_RVALUE (type));
8781       break;
8782 
8783     case DECLTYPE_TYPE:
8784     case TYPEOF_TYPE:
8785     case UNDERLYING_TYPE:
8786       tree_node (TYPE_VALUES_RAW (type));
8787       if (TREE_CODE (type) == DECLTYPE_TYPE)
8788 	/* We stash a whole bunch of things into decltype's
8789 	   flags.  */
8790 	if (streaming_p ())
8791 	  tree_node_bools (type);
8792       break;
8793 
8794     case TYPE_ARGUMENT_PACK:
8795       /* No additional data.  */
8796       break;
8797 
8798     case TYPE_PACK_EXPANSION:
8799       if (streaming_p ())
8800 	u (PACK_EXPANSION_LOCAL_P (type));
8801       tree_node (PACK_EXPANSION_PARAMETER_PACKS (type));
8802       break;
8803 
8804     case TYPENAME_TYPE:
8805       {
8806 	tree_node (TYPE_CONTEXT (type));
8807 	tree_node (DECL_NAME (TYPE_NAME (type)));
8808 	tree_node (TYPENAME_TYPE_FULLNAME (type));
8809 	if (streaming_p ())
8810 	  {
8811 	    enum tag_types tag_type = none_type;
8812 	    if (TYPENAME_IS_ENUM_P (type))
8813 	      tag_type = enum_type;
8814 	    else if (TYPENAME_IS_CLASS_P (type))
8815 	      tag_type = class_type;
8816 	    u (int (tag_type));
8817 	  }
8818 	}
8819       break;
8820 
8821     case UNBOUND_CLASS_TEMPLATE:
8822       {
8823 	tree decl = TYPE_NAME (type);
8824 	tree_node (DECL_CONTEXT (decl));
8825 	tree_node (DECL_NAME (decl));
8826 	tree_node (DECL_TEMPLATE_PARMS (decl));
8827       }
8828       break;
8829 
8830     case VECTOR_TYPE:
8831       if (streaming_p ())
8832 	{
8833 	  poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (type);
8834 	  /* to_constant asserts that only coeff[0] is of interest.  */
8835 	  wu (static_cast<unsigned HOST_WIDE_INT> (nunits.to_constant ()));
8836 	}
8837       break;
8838     }
8839 
8840   /* We may have met the type during emitting the above.  */
8841   if (ref_node (type) != WK_none)
8842     {
8843       int tag = insert (type);
8844       if (streaming_p ())
8845 	{
8846 	  i (0);
8847 	  dump (dumper::TREE)
8848 	    && dump ("Wrote:%d derived type %C", tag, TREE_CODE (type));
8849 	}
8850     }
8851 
8852   return;
8853 }
8854 
8855 /* T is (mostly*) a non-mergeable node that must be written by value.
8856    The mergeable case is a BINFO, which are as-if DECLSs.   */
8857 
8858 void
tree_value(tree t)8859 trees_out::tree_value (tree t)
8860 {
8861   /* We should never be writing a type by value.  tree_type should
8862      have streamed it, or we're going via its TYPE_DECL.  */
8863   gcc_checking_assert (!TYPE_P (t));
8864 
8865   if (DECL_P (t))
8866     /* No template, type, var or function, except anonymous
8867        non-context vars.  */
8868     gcc_checking_assert ((TREE_CODE (t) != TEMPLATE_DECL
8869 			  && TREE_CODE (t) != TYPE_DECL
8870 			  && (TREE_CODE (t) != VAR_DECL
8871 			      || (!DECL_NAME (t) && !DECL_CONTEXT (t)))
8872 			  && TREE_CODE (t) != FUNCTION_DECL));
8873 
8874   if (streaming_p ())
8875     {
8876       /* A new node -> tt_node.  */
8877       tree_val_count++;
8878       i (tt_node);
8879       start (t);
8880       tree_node_bools (t);
8881     }
8882 
8883   if  (TREE_CODE (t) == TREE_BINFO)
8884     /* Binfos are decl-like and need merging information.  */
8885     binfo_mergeable (t);
8886 
8887   int tag = insert (t, WK_value);
8888   if (streaming_p ())
8889     dump (dumper::TREE)
8890       && dump ("Writing tree:%d %C:%N", tag, TREE_CODE (t), t);
8891 
8892   tree_node_vals (t);
8893 
8894   if (streaming_p ())
8895     dump (dumper::TREE) && dump ("Written tree:%d %C:%N", tag, TREE_CODE (t), t);
8896 }
8897 
8898 tree
tree_value()8899 trees_in::tree_value ()
8900 {
8901   tree t = start ();
8902   if (!t || !tree_node_bools (t))
8903     return NULL_TREE;
8904 
8905   tree existing = t;
8906   if (TREE_CODE (t) == TREE_BINFO)
8907     {
8908       tree type;
8909       unsigned ix = binfo_mergeable (&type);
8910       if (TYPE_BINFO (type))
8911 	{
8912 	  /* We already have a definition, this must be a duplicate.  */
8913 	  dump (dumper::MERGE)
8914 	    && dump ("Deduping binfo %N[%u]", type, ix);
8915 	  existing = TYPE_BINFO (type);
8916 	  while (existing && ix--)
8917 	    existing = TREE_CHAIN (existing);
8918 	  if (existing)
8919 	    register_duplicate (t, existing);
8920 	  else
8921 	    /* Error, mismatch -- diagnose in read_class_def's
8922 	       checking.  */
8923 	    existing = t;
8924 	}
8925     }
8926 
8927   /* Insert into map.  */
8928   int tag = insert (existing);
8929   dump (dumper::TREE)
8930     && dump ("Reading tree:%d %C", tag, TREE_CODE (t));
8931 
8932   if (!tree_node_vals (t))
8933     {
8934       back_refs[~tag] = NULL_TREE;
8935       set_overrun ();
8936       /* Bail.  */
8937       return NULL_TREE;
8938     }
8939 
8940   dump (dumper::TREE) && dump ("Read tree:%d %C:%N", tag, TREE_CODE (t), t);
8941 
8942   if (TREE_CODE (existing) == INTEGER_CST && !TREE_OVERFLOW (existing))
8943     {
8944       existing = cache_integer_cst (t, true);
8945       back_refs[~tag] = existing;
8946     }
8947 
8948   return existing;
8949 }
8950 
8951 /* Stream out tree node T.  We automatically create local back
8952    references, which is essentially a single pass lisp
8953    self-referential structure pretty-printer.  */
8954 
8955 void
tree_node(tree t)8956 trees_out::tree_node (tree t)
8957 {
8958   dump.indent ();
8959   walk_kind ref = ref_node (t);
8960   if (ref == WK_none)
8961     goto done;
8962 
8963   if (ref != WK_normal)
8964     goto skip_normal;
8965 
8966   if (TREE_CODE (t) == IDENTIFIER_NODE)
8967     {
8968       /* An identifier node -> tt_id, tt_conv_id, tt_anon_id, tt_lambda_id.  */
8969       int code = tt_id;
8970       if (IDENTIFIER_ANON_P (t))
8971 	code = IDENTIFIER_LAMBDA_P (t) ? tt_lambda_id : tt_anon_id;
8972       else if (IDENTIFIER_CONV_OP_P (t))
8973 	code = tt_conv_id;
8974 
8975       if (streaming_p ())
8976 	i (code);
8977 
8978       if (code == tt_conv_id)
8979 	{
8980 	  tree type = TREE_TYPE (t);
8981 	  gcc_checking_assert (type || t == conv_op_identifier);
8982 	  tree_node (type);
8983 	}
8984       else if (code == tt_id && streaming_p ())
8985 	str (IDENTIFIER_POINTER (t), IDENTIFIER_LENGTH (t));
8986 
8987       int tag = insert (t);
8988       if (streaming_p ())
8989 	{
8990 	  /* We know the ordering of the 4 id tags.  */
8991 	  static const char *const kinds[] =
8992 	    {"", "conv_op ", "anon ", "lambda "};
8993 	  dump (dumper::TREE)
8994 	    && dump ("Written:%d %sidentifier:%N", tag,
8995 		     kinds[code - tt_id],
8996 		     code == tt_conv_id ? TREE_TYPE (t) : t);
8997 	}
8998       goto done;
8999     }
9000 
9001   if (TREE_CODE (t) == TREE_BINFO)
9002     {
9003       /* A BINFO -> tt_binfo.
9004 	 We must do this by reference.  We stream the binfo tree
9005 	 itself when streaming its owning RECORD_TYPE.  That we got
9006 	 here means the dominating type is not in this SCC.  */
9007       if (streaming_p ())
9008 	i (tt_binfo);
9009       binfo_mergeable (t);
9010       gcc_checking_assert (!TREE_VISITED (t));
9011       int tag = insert (t);
9012       if (streaming_p ())
9013 	dump (dumper::TREE) && dump ("Inserting binfo:%d %N", tag, t);
9014       goto done;
9015     }
9016 
9017   if (TREE_CODE (t) == INTEGER_CST
9018       && !TREE_OVERFLOW (t)
9019       && TREE_CODE (TREE_TYPE (t)) == ENUMERAL_TYPE)
9020     {
9021       /* An integral constant of enumeral type.  See if it matches one
9022 	 of the enumeration values.  */
9023       for (tree values = TYPE_VALUES (TREE_TYPE (t));
9024 	   values; values = TREE_CHAIN (values))
9025 	{
9026 	  tree decl = TREE_VALUE (values);
9027 	  if (tree_int_cst_equal (DECL_INITIAL (decl), t))
9028 	    {
9029 	      if (streaming_p ())
9030 		u (tt_enum_value);
9031 	      tree_node (decl);
9032 	      dump (dumper::TREE) && dump ("Written enum value %N", decl);
9033 	      goto done;
9034 	    }
9035 	}
9036       /* It didn't match.  We'll write it a an explicit INTEGER_CST
9037 	 node.  */
9038     }
9039 
9040   if (TYPE_P (t))
9041     {
9042       type_node (t);
9043       goto done;
9044     }
9045 
9046   if (DECL_P (t))
9047     {
9048       if (DECL_TEMPLATE_PARM_P (t))
9049 	{
9050 	  tpl_parm_value (t);
9051 	  goto done;
9052 	}
9053 
9054       if (!DECL_CONTEXT (t))
9055 	{
9056 	  /* There are a few cases of decls with no context.  We'll write
9057 	     these by value, but first assert they are cases we expect.  */
9058 	  gcc_checking_assert (ref == WK_normal);
9059 	  switch (TREE_CODE (t))
9060 	    {
9061 	    default: gcc_unreachable ();
9062 
9063 	    case LABEL_DECL:
9064 	      /* CASE_LABEL_EXPRs contain uncontexted LABEL_DECLs.  */
9065 	      gcc_checking_assert (!DECL_NAME (t));
9066 	      break;
9067 
9068 	    case VAR_DECL:
9069 	      /* AGGR_INIT_EXPRs cons up anonymous uncontexted VAR_DECLs.  */
9070 	      gcc_checking_assert (!DECL_NAME (t)
9071 				   && DECL_ARTIFICIAL (t));
9072 	      break;
9073 
9074 	    case PARM_DECL:
9075 	      /* REQUIRES_EXPRs have a tree list of uncontexted
9076 		 PARM_DECLS.  It'd be nice if they had a
9077 		 distinguishing flag to double check.  */
9078 	      break;
9079 	    }
9080 	  goto by_value;
9081 	}
9082     }
9083 
9084  skip_normal:
9085   if (DECL_P (t) && !decl_node (t, ref))
9086     goto done;
9087 
9088   /* Otherwise by value */
9089  by_value:
9090   tree_value (t);
9091 
9092  done:
9093   /* And, breath out.  */
9094   dump.outdent ();
9095 }
9096 
9097 /* Stream in a tree node.  */
9098 
9099 tree
tree_node(bool is_use)9100 trees_in::tree_node (bool is_use)
9101 {
9102   if (get_overrun ())
9103     return NULL_TREE;
9104 
9105   dump.indent ();
9106   int tag = i ();
9107   tree res = NULL_TREE;
9108   switch (tag)
9109     {
9110     default:
9111       /* backref, pull it out of the map.  */
9112       res = back_ref (tag);
9113       break;
9114 
9115     case tt_null:
9116       /* NULL_TREE.  */
9117       break;
9118 
9119     case tt_fixed:
9120       /* A fixed ref, find it in the fixed_ref array.   */
9121       {
9122 	unsigned fix = u ();
9123 	if (fix < (*fixed_trees).length ())
9124 	  {
9125 	    res = (*fixed_trees)[fix];
9126 	    dump (dumper::TREE) && dump ("Read fixed:%u %C:%N%S", fix,
9127 					 TREE_CODE (res), res, res);
9128 	  }
9129 
9130 	if (!res)
9131 	  set_overrun ();
9132       }
9133       break;
9134 
9135     case tt_parm:
9136       {
9137 	tree fn = tree_node ();
9138 	if (fn && TREE_CODE (fn) == FUNCTION_DECL)
9139 	  res = tree_node ();
9140 	if (res)
9141 	  dump (dumper::TREE)
9142 	    && dump ("Read %s reference %N",
9143 		     TREE_CODE (res) == PARM_DECL ? "parameter" : "result",
9144 		     res);
9145       }
9146       break;
9147 
9148     case tt_node:
9149       /* A new node.  Stream it in.  */
9150       res = tree_value ();
9151       break;
9152 
9153     case tt_decl:
9154       /* A new decl.  Stream it in.  */
9155       res = decl_value ();
9156       break;
9157 
9158     case tt_tpl_parm:
9159       /* A template parameter.  Stream it in.  */
9160       res = tpl_parm_value ();
9161       break;
9162 
9163     case tt_id:
9164       /* An identifier node.  */
9165       {
9166 	size_t l;
9167 	const char *chars = str (&l);
9168 	res = get_identifier_with_length (chars, l);
9169 	int tag = insert (res);
9170 	dump (dumper::TREE)
9171 	  && dump ("Read identifier:%d %N", tag, res);
9172       }
9173       break;
9174 
9175     case tt_conv_id:
9176       /* A conversion operator.  Get the type and recreate the
9177 	 identifier.  */
9178       {
9179 	tree type = tree_node ();
9180 	if (!get_overrun ())
9181 	  {
9182 	    res = type ? make_conv_op_name (type) : conv_op_identifier;
9183 	    int tag = insert (res);
9184 	    dump (dumper::TREE)
9185 	      && dump ("Created conv_op:%d %S for %N", tag, res, type);
9186 	  }
9187       }
9188       break;
9189 
9190     case tt_anon_id:
9191     case tt_lambda_id:
9192       /* An anonymous or lambda id.  */
9193       {
9194 	res = make_anon_name ();
9195 	if (tag == tt_lambda_id)
9196 	  IDENTIFIER_LAMBDA_P (res) = true;
9197 	int tag = insert (res);
9198 	dump (dumper::TREE)
9199 	  && dump ("Read %s identifier:%d %N",
9200 		   IDENTIFIER_LAMBDA_P (res) ? "lambda" : "anon", tag, res);
9201       }
9202       break;
9203 
9204     case tt_typedef_type:
9205       res = tree_node ();
9206       if (res)
9207 	{
9208 	  dump (dumper::TREE)
9209 	    && dump ("Read %stypedef %C:%N",
9210 		     DECL_IMPLICIT_TYPEDEF_P (res) ? "implicit " : "",
9211 		     TREE_CODE (res), res);
9212 	  res = TREE_TYPE (res);
9213 	}
9214       break;
9215 
9216     case tt_derived_type:
9217       /* A type derived from some other type.  */
9218       {
9219 	enum tree_code code = tree_code (u ());
9220 	res = tree_node ();
9221 
9222 	switch (code)
9223 	  {
9224 	  default:
9225 	    set_overrun ();
9226 	    break;
9227 
9228 	  case ARRAY_TYPE:
9229 	    {
9230 	      tree domain = tree_node ();
9231 	      int dep = u ();
9232 	      if (!get_overrun ())
9233 		res = build_cplus_array_type (res, domain, dep);
9234 	    }
9235 	    break;
9236 
9237 	  case COMPLEX_TYPE:
9238 	    if (!get_overrun ())
9239 	      res = build_complex_type (res);
9240 	    break;
9241 
9242 	  case BOOLEAN_TYPE:
9243 	    {
9244 	      unsigned precision = u ();
9245 	      if (!get_overrun ())
9246 		res = build_nonstandard_boolean_type (precision);
9247 	    }
9248 	    break;
9249 
9250 	  case INTEGER_TYPE:
9251 	    if (res)
9252 	      {
9253 		/* A range type (representing an array domain).  */
9254 		tree min = tree_node ();
9255 		tree max = tree_node ();
9256 
9257 		if (!get_overrun ())
9258 		  res = build_range_type (res, min, max);
9259 	      }
9260 	    else
9261 	      {
9262 		/* A new integral type (representing a bitfield).  */
9263 		unsigned enc = u ();
9264 		if (!get_overrun ())
9265 		  res = build_nonstandard_integer_type (enc >> 1, enc & 1);
9266 	      }
9267 	    break;
9268 
9269 	  case FUNCTION_TYPE:
9270 	  case METHOD_TYPE:
9271 	    {
9272 	      tree klass =  code == METHOD_TYPE ? tree_node () : NULL_TREE;
9273 	      tree args = tree_node ();
9274 	      if (!get_overrun ())
9275 		{
9276 		  if (klass)
9277 		    res = build_method_type_directly (klass, res, args);
9278 		  else
9279 		    res = build_function_type (res, args);
9280 		}
9281 	    }
9282 	    break;
9283 
9284 	  case OFFSET_TYPE:
9285 	    {
9286 	      tree base = tree_node ();
9287 	      if (!get_overrun ())
9288 		res = build_offset_type (base, res);
9289 	    }
9290 	    break;
9291 
9292 	  case POINTER_TYPE:
9293 	    if (!get_overrun ())
9294 	      res = build_pointer_type (res);
9295 	    break;
9296 
9297 	  case REFERENCE_TYPE:
9298 	    {
9299 	      bool rval = bool (u ());
9300 	      if (!get_overrun ())
9301 		res = cp_build_reference_type (res, rval);
9302 	    }
9303 	    break;
9304 
9305 	  case DECLTYPE_TYPE:
9306 	  case TYPEOF_TYPE:
9307 	  case UNDERLYING_TYPE:
9308 	    {
9309 	      tree expr = tree_node ();
9310 	      if (!get_overrun ())
9311 		{
9312 		  res = cxx_make_type (code);
9313 		  TYPE_VALUES_RAW (res) = expr;
9314 		  if (code == DECLTYPE_TYPE)
9315 		    tree_node_bools (res);
9316 		  SET_TYPE_STRUCTURAL_EQUALITY (res);
9317 		}
9318 	    }
9319 	    break;
9320 
9321 	  case TYPE_ARGUMENT_PACK:
9322 	    if (!get_overrun ())
9323 	      {
9324 		tree pack = cxx_make_type (TYPE_ARGUMENT_PACK);
9325 		SET_ARGUMENT_PACK_ARGS (pack, res);
9326 		res = pack;
9327 	      }
9328 	    break;
9329 
9330 	  case TYPE_PACK_EXPANSION:
9331 	    {
9332 	      bool local = u ();
9333 	      tree param_packs = tree_node ();
9334 	      if (!get_overrun ())
9335 		{
9336 		  tree expn = cxx_make_type (TYPE_PACK_EXPANSION);
9337 		  SET_TYPE_STRUCTURAL_EQUALITY (expn);
9338 		  SET_PACK_EXPANSION_PATTERN (expn, res);
9339 		  PACK_EXPANSION_PARAMETER_PACKS (expn) = param_packs;
9340 		  PACK_EXPANSION_LOCAL_P (expn) = local;
9341 		  res = expn;
9342 		}
9343 	    }
9344 	    break;
9345 
9346 	  case TYPENAME_TYPE:
9347 	    {
9348 	      tree ctx = tree_node ();
9349 	      tree name = tree_node ();
9350 	      tree fullname = tree_node ();
9351 	      enum tag_types tag_type = tag_types (u ());
9352 
9353 	      if (!get_overrun ())
9354 		res = build_typename_type (ctx, name, fullname, tag_type);
9355 	    }
9356 	    break;
9357 
9358 	  case UNBOUND_CLASS_TEMPLATE:
9359 	    {
9360 	      tree ctx = tree_node ();
9361 	      tree name = tree_node ();
9362 	      tree parms = tree_node ();
9363 
9364 	      if (!get_overrun ())
9365 		res = make_unbound_class_template_raw (ctx, name, parms);
9366 	    }
9367 	    break;
9368 
9369 	  case VECTOR_TYPE:
9370 	    {
9371 	      unsigned HOST_WIDE_INT nunits = wu ();
9372 	      if (!get_overrun ())
9373 		res = build_vector_type (res, static_cast<poly_int64> (nunits));
9374 	    }
9375 	    break;
9376 	  }
9377 
9378 	int tag = i ();
9379 	if (!tag)
9380 	  {
9381 	    tag = insert (res);
9382 	    if (res)
9383 	      dump (dumper::TREE)
9384 		&& dump ("Created:%d derived type %C", tag, code);
9385 	  }
9386 	else
9387 	  res = back_ref (tag);
9388       }
9389       break;
9390 
9391     case tt_variant_type:
9392       /* Variant of some type.  */
9393       {
9394 	res = tree_node ();
9395 	int flags = i ();
9396 	if (get_overrun ())
9397 	  ;
9398 	else if (flags < 0)
9399 	  /* No change.  */;
9400 	else if (TREE_CODE (res) == FUNCTION_TYPE
9401 		 || TREE_CODE (res) == METHOD_TYPE)
9402 	  {
9403 	    cp_ref_qualifier rqual = cp_ref_qualifier (flags & 3);
9404 	    bool late = (flags >> 2) & 1;
9405 	    cp_cv_quals quals = cp_cv_quals (flags >> 3);
9406 
9407 	    tree raises = tree_node ();
9408 	    if (raises == error_mark_node)
9409 	      raises = TYPE_RAISES_EXCEPTIONS (res);
9410 
9411 	    res = build_cp_fntype_variant (res, rqual, raises, late);
9412 	    if (TREE_CODE (res) == FUNCTION_TYPE)
9413 	      res = apply_memfn_quals (res, quals, rqual);
9414 	  }
9415 	else
9416 	  {
9417 	    res = build_aligned_type (res, (1u << flags) >> 1);
9418 	    TYPE_USER_ALIGN (res) = true;
9419 	  }
9420 
9421 	if (tree attribs = tree_node ())
9422 	  res = cp_build_type_attribute_variant (res, attribs);
9423 
9424 	int quals = i ();
9425 	if (quals >= 0 && !get_overrun ())
9426 	  res = cp_build_qualified_type (res, quals);
9427 
9428 	int tag = i ();
9429 	if (!tag)
9430 	  {
9431 	    tag = insert (res);
9432 	    if (res)
9433 	      dump (dumper::TREE)
9434 		&& dump ("Created:%d variant type %C", tag, TREE_CODE (res));
9435 	  }
9436 	else
9437 	  res = back_ref (tag);
9438       }
9439       break;
9440 
9441     case tt_tinfo_var:
9442     case tt_tinfo_typedef:
9443       /* A tinfo var or typedef.  */
9444       {
9445 	bool is_var = tag == tt_tinfo_var;
9446 	unsigned ix = u ();
9447 	tree type = NULL_TREE;
9448 
9449 	if (is_var)
9450 	  {
9451 	    tree name = tree_node ();
9452 	    type = tree_node ();
9453 
9454 	    if (!get_overrun ())
9455 	      res = get_tinfo_decl_direct (type, name, int (ix));
9456 	  }
9457 	else
9458 	  {
9459 	    if (!get_overrun ())
9460 	      {
9461 		type = get_pseudo_tinfo_type (ix);
9462 		res = TYPE_NAME (type);
9463 	      }
9464 	  }
9465 	if (res)
9466 	  {
9467 	    int tag = insert (res);
9468 	    dump (dumper::TREE)
9469 	      && dump ("Created tinfo_%s:%d %S:%u for %N",
9470 		       is_var ? "var" : "decl", tag, res, ix, type);
9471 	    if (!is_var)
9472 	      {
9473 		tag = insert (type);
9474 		dump (dumper::TREE)
9475 		  && dump ("Created tinfo_type:%d %u %N", tag, ix, type);
9476 	      }
9477 	  }
9478       }
9479       break;
9480 
9481     case tt_ptrmem_type:
9482       /* A pointer to member function.  */
9483       {
9484 	tree type = tree_node ();
9485 	if (type && TREE_CODE (type) == POINTER_TYPE
9486 	    && TREE_CODE (TREE_TYPE (type)) == METHOD_TYPE)
9487 	  {
9488 	    res = build_ptrmemfunc_type (type);
9489 	    int tag = insert (res);
9490 	    dump (dumper::TREE) && dump ("Created:%d ptrmem type", tag);
9491 	  }
9492 	else
9493 	  set_overrun ();
9494       }
9495       break;
9496 
9497     case tt_enum_value:
9498       /* An enum const value.  */
9499       {
9500 	if (tree decl = tree_node ())
9501 	  {
9502 	    dump (dumper::TREE) && dump ("Read enum value %N", decl);
9503 	    res = DECL_INITIAL (decl);
9504 	  }
9505 
9506 	if (!res)
9507 	  set_overrun ();
9508       }
9509       break;
9510 
9511     case tt_enum_decl:
9512       /* An enum decl.  */
9513       {
9514 	tree ctx = tree_node ();
9515 	tree name = tree_node ();
9516 
9517 	if (!get_overrun ()
9518 	    && TREE_CODE (ctx) == ENUMERAL_TYPE)
9519 	  res = find_enum_member (ctx, name);
9520 
9521 	if (!res)
9522 	  set_overrun ();
9523 	else
9524 	  {
9525 	    int tag = insert (res);
9526 	    dump (dumper::TREE)
9527 	      && dump ("Read enum decl:%d %C:%N", tag, TREE_CODE (res), res);
9528 	  }
9529       }
9530       break;
9531 
9532     case tt_data_member:
9533       /* A data member.  */
9534       {
9535 	tree ctx = tree_node ();
9536 	tree name = tree_node ();
9537 
9538 	if (!get_overrun ()
9539 	    && RECORD_OR_UNION_TYPE_P (ctx))
9540 	  {
9541 	    if (name)
9542 	      res = lookup_class_binding (ctx, name);
9543 	    else
9544 	      res = lookup_field_ident (ctx, u ());
9545 
9546 	    if (!res
9547 		|| TREE_CODE (res) != FIELD_DECL
9548 		|| DECL_CONTEXT (res) != ctx)
9549 	      res = NULL_TREE;
9550 	  }
9551 
9552 	if (!res)
9553 	  set_overrun ();
9554 	else
9555 	  {
9556 	    int tag = insert (res);
9557 	    dump (dumper::TREE)
9558 	      && dump ("Read member:%d %C:%N", tag, TREE_CODE (res), res);
9559 	  }
9560       }
9561       break;
9562 
9563     case tt_binfo:
9564       /* A BINFO.  Walk the tree of the dominating type.  */
9565       {
9566 	tree type;
9567 	unsigned ix = binfo_mergeable (&type);
9568 	if (type)
9569 	  {
9570 	    res = TYPE_BINFO (type);
9571 	    for (; ix && res; res = TREE_CHAIN (res))
9572 	      ix--;
9573 	    if (!res)
9574 	      set_overrun ();
9575 	  }
9576 
9577 	if (get_overrun ())
9578 	  break;
9579 
9580 	/* Insert binfo into backreferences.  */
9581 	tag = insert (res);
9582 	dump (dumper::TREE) && dump ("Read binfo:%d %N", tag, res);
9583       }
9584       break;
9585 
9586     case tt_vtable:
9587       {
9588 	unsigned ix = u ();
9589 	tree ctx = tree_node ();
9590 	dump (dumper::TREE) && dump ("Reading vtable %N[%u]", ctx, ix);
9591 	if (TREE_CODE (ctx) == RECORD_TYPE && TYPE_LANG_SPECIFIC (ctx))
9592 	  for (res = CLASSTYPE_VTABLES (ctx); res; res = DECL_CHAIN (res))
9593 	    if (!ix--)
9594 	      break;
9595 	if (!res)
9596 	  set_overrun ();
9597       }
9598       break;
9599 
9600     case tt_thunk:
9601       {
9602 	int fixed = i ();
9603 	tree target = tree_node ();
9604 	tree virt = tree_node ();
9605 
9606 	for (tree thunk = DECL_THUNKS (target);
9607 	     thunk; thunk = DECL_CHAIN (thunk))
9608 	  if (THUNK_FIXED_OFFSET (thunk) == fixed
9609 	      && !THUNK_VIRTUAL_OFFSET (thunk) == !virt
9610 	      && (!virt
9611 		  || tree_int_cst_equal (virt, THUNK_VIRTUAL_OFFSET (thunk))))
9612 	    {
9613 	      res = thunk;
9614 	      break;
9615 	    }
9616 
9617 	int tag = insert (res);
9618 	if (res)
9619 	  dump (dumper::TREE)
9620 	    && dump ("Read:%d thunk %N to %N", tag, DECL_NAME (res), target);
9621 	else
9622 	  set_overrun ();
9623       }
9624       break;
9625 
9626     case tt_clone_ref:
9627       {
9628 	tree target = tree_node ();
9629 	tree name = tree_node ();
9630 
9631 	if (DECL_P (target) && DECL_MAYBE_IN_CHARGE_CDTOR_P (target))
9632 	  {
9633 	    tree clone;
9634 	    FOR_EVERY_CLONE (clone, target)
9635 	      if (DECL_NAME (clone) == name)
9636 		{
9637 		  res = clone;
9638 		  break;
9639 		}
9640 	  }
9641 
9642 	if (!res)
9643 	  set_overrun ();
9644 	int tag = insert (res);
9645 	if (res)
9646 	  dump (dumper::TREE)
9647 	    && dump ("Read:%d clone %N of %N", tag, DECL_NAME (res), target);
9648 	else
9649 	  set_overrun ();
9650        }
9651       break;
9652 
9653     case tt_entity:
9654       /* Index into the entity table.  Perhaps not loaded yet!  */
9655       {
9656 	unsigned origin = state->slurp->remap_module (u ());
9657 	unsigned ident = u ();
9658 	module_state *from = (*modules)[origin];
9659 
9660 	if (!origin || ident >= from->entity_num)
9661 	  set_overrun ();
9662 	if (!get_overrun ())
9663 	  {
9664 	    binding_slot *slot = &(*entity_ary)[from->entity_lwm + ident];
9665 	    if (slot->is_lazy ())
9666 	      if (!from->lazy_load (ident, slot))
9667 		set_overrun ();
9668 	    res = *slot;
9669 	  }
9670 
9671 	if (res)
9672 	  {
9673 	    const char *kind = (origin != state->mod ? "Imported" : "Named");
9674 	    int tag = insert (res);
9675 	    dump (dumper::TREE)
9676 	      && dump ("%s:%d %C:%N@%M", kind, tag, TREE_CODE (res),
9677 		       res, (*modules)[origin]);
9678 
9679 	    if (!add_indirects (res))
9680 	      {
9681 		set_overrun ();
9682 		res = NULL_TREE;
9683 	      }
9684 	  }
9685       }
9686       break;
9687 
9688     case tt_template:
9689       /* A template.  */
9690       if (tree tpl = tree_node ())
9691 	{
9692 	  res = DECL_TEMPLATE_RESULT (tpl);
9693 	  dump (dumper::TREE)
9694 	    && dump ("Read template %C:%N", TREE_CODE (res), res);
9695 	}
9696       break;
9697     }
9698 
9699   if (is_use && !unused && res && DECL_P (res) && !TREE_USED (res))
9700     {
9701       /* Mark decl used as mark_used does -- we cannot call
9702 	 mark_used in the middle of streaming, we only need a subset
9703 	 of its functionality.   */
9704       TREE_USED (res) = true;
9705 
9706       /* And for structured bindings also the underlying decl.  */
9707       if (DECL_DECOMPOSITION_P (res) && DECL_DECOMP_BASE (res))
9708 	TREE_USED (DECL_DECOMP_BASE (res)) = true;
9709 
9710       if (DECL_CLONED_FUNCTION_P (res))
9711 	TREE_USED (DECL_CLONED_FUNCTION (res)) = true;
9712     }
9713 
9714   dump.outdent ();
9715   return res;
9716 }
9717 
9718 void
tpl_parms(tree parms,unsigned & tpl_levels)9719 trees_out::tpl_parms (tree parms, unsigned &tpl_levels)
9720 {
9721   if (!parms)
9722     return;
9723 
9724   if (TREE_VISITED (parms))
9725     {
9726       ref_node (parms);
9727       return;
9728     }
9729 
9730   tpl_parms (TREE_CHAIN (parms), tpl_levels);
9731 
9732   tree vec = TREE_VALUE (parms);
9733   unsigned len = TREE_VEC_LENGTH (vec);
9734   /* Depth.  */
9735   int tag = insert (parms);
9736   if (streaming_p ())
9737     {
9738       i (len + 1);
9739       dump (dumper::TREE)
9740 	&& dump ("Writing template parms:%d level:%N length:%d",
9741 		 tag, TREE_PURPOSE (parms), len);
9742     }
9743   tree_node (TREE_PURPOSE (parms));
9744 
9745   for (unsigned ix = 0; ix != len; ix++)
9746     {
9747       tree parm = TREE_VEC_ELT (vec, ix);
9748       tree decl = TREE_VALUE (parm);
9749 
9750       gcc_checking_assert (DECL_TEMPLATE_PARM_P (decl));
9751       if (CHECKING_P)
9752 	switch (TREE_CODE (decl))
9753 	  {
9754 	  default: gcc_unreachable ();
9755 
9756 	  case TEMPLATE_DECL:
9757 	    gcc_assert ((TREE_CODE (TREE_TYPE (decl)) == TEMPLATE_TEMPLATE_PARM)
9758 			&& (TREE_CODE (DECL_TEMPLATE_RESULT (decl)) == TYPE_DECL)
9759 			&& (TYPE_NAME (TREE_TYPE (decl)) == decl));
9760 	    break;
9761 
9762 	  case TYPE_DECL:
9763 	    gcc_assert ((TREE_CODE (TREE_TYPE (decl)) == TEMPLATE_TYPE_PARM)
9764 			&& (TYPE_NAME (TREE_TYPE (decl)) == decl));
9765 	    break;
9766 
9767 	  case PARM_DECL:
9768 	    gcc_assert ((TREE_CODE (DECL_INITIAL (decl)) == TEMPLATE_PARM_INDEX)
9769 			&& (TREE_CODE (TEMPLATE_PARM_DECL (DECL_INITIAL (decl)))
9770 			    == CONST_DECL)
9771 			&& (DECL_TEMPLATE_PARM_P
9772 			    (TEMPLATE_PARM_DECL (DECL_INITIAL (decl)))));
9773 	    break;
9774 	  }
9775 
9776       tree_node (decl);
9777       tree_node (TEMPLATE_PARM_CONSTRAINTS (parm));
9778     }
9779 
9780   tpl_levels++;
9781 }
9782 
9783 tree
tpl_parms(unsigned & tpl_levels)9784 trees_in::tpl_parms (unsigned &tpl_levels)
9785 {
9786   tree parms = NULL_TREE;
9787 
9788   while (int len = i ())
9789     {
9790       if (len < 0)
9791 	{
9792 	  parms = back_ref (len);
9793 	  continue;
9794 	}
9795 
9796       len -= 1;
9797       parms = tree_cons (NULL_TREE, NULL_TREE, parms);
9798       int tag = insert (parms);
9799       TREE_PURPOSE (parms) = tree_node ();
9800 
9801       dump (dumper::TREE)
9802 	&& dump ("Reading template parms:%d level:%N length:%d",
9803 		 tag, TREE_PURPOSE (parms), len);
9804 
9805       tree vec = make_tree_vec (len);
9806       for (int ix = 0; ix != len; ix++)
9807 	{
9808 	  tree decl = tree_node ();
9809 	  if (!decl)
9810 	    return NULL_TREE;
9811 
9812 	  tree parm = build_tree_list (NULL, decl);
9813 	  TEMPLATE_PARM_CONSTRAINTS (parm) = tree_node ();
9814 
9815 	  TREE_VEC_ELT (vec, ix) = parm;
9816 	}
9817 
9818       TREE_VALUE (parms) = vec;
9819       tpl_levels++;
9820     }
9821 
9822   return parms;
9823 }
9824 
9825 void
tpl_parms_fini(tree tmpl,unsigned tpl_levels)9826 trees_out::tpl_parms_fini (tree tmpl, unsigned tpl_levels)
9827 {
9828   for (tree parms = DECL_TEMPLATE_PARMS (tmpl);
9829        tpl_levels--; parms = TREE_CHAIN (parms))
9830     {
9831       tree vec = TREE_VALUE (parms);
9832 
9833       tree_node (TREE_TYPE (vec));
9834       tree dflt = error_mark_node;
9835       for (unsigned ix = TREE_VEC_LENGTH (vec); ix--;)
9836 	{
9837 	  tree parm = TREE_VEC_ELT (vec, ix);
9838 	  if (dflt)
9839 	    {
9840 	      dflt = TREE_PURPOSE (parm);
9841 	      tree_node (dflt);
9842 	    }
9843 
9844 	  if (streaming_p ())
9845 	    {
9846 	      tree decl = TREE_VALUE (parm);
9847 	      if (TREE_CODE (decl) == TEMPLATE_DECL)
9848 		{
9849 		  tree ctx = DECL_CONTEXT (decl);
9850 		  tree inner = DECL_TEMPLATE_RESULT (decl);
9851 		  tree tpi = (TREE_CODE (inner) == TYPE_DECL
9852 			      ? TEMPLATE_TYPE_PARM_INDEX (TREE_TYPE (decl))
9853 			      : DECL_INITIAL (inner));
9854 		  bool original = (TEMPLATE_PARM_LEVEL (tpi)
9855 				   == TEMPLATE_PARM_ORIG_LEVEL (tpi));
9856 		  /* Original template template parms have a context
9857 		     of their owning template.  Reduced ones do not.  */
9858 		  gcc_checking_assert (original ? ctx == tmpl : !ctx);
9859 		}
9860 	    }
9861 	}
9862     }
9863 }
9864 
9865 bool
tpl_parms_fini(tree tmpl,unsigned tpl_levels)9866 trees_in::tpl_parms_fini (tree tmpl, unsigned tpl_levels)
9867 {
9868   for (tree parms = DECL_TEMPLATE_PARMS (tmpl);
9869        tpl_levels--; parms = TREE_CHAIN (parms))
9870     {
9871       tree vec = TREE_VALUE (parms);
9872       tree dflt = error_mark_node;
9873 
9874       TREE_TYPE (vec) = tree_node ();
9875       for (unsigned ix = TREE_VEC_LENGTH (vec); ix--;)
9876 	{
9877 	  tree parm = TREE_VEC_ELT (vec, ix);
9878 	  if (dflt)
9879 	    {
9880 	      dflt = tree_node ();
9881 	      if (get_overrun ())
9882 		return false;
9883 	      TREE_PURPOSE (parm) = dflt;
9884 	    }
9885 
9886 	  tree decl = TREE_VALUE (parm);
9887 	  if (TREE_CODE (decl) == TEMPLATE_DECL)
9888 	    {
9889 	      tree inner = DECL_TEMPLATE_RESULT (decl);
9890 	      tree tpi = (TREE_CODE (inner) == TYPE_DECL
9891 			  ? TEMPLATE_TYPE_PARM_INDEX (TREE_TYPE (decl))
9892 			  : DECL_INITIAL (inner));
9893 	      bool original = (TEMPLATE_PARM_LEVEL (tpi)
9894 			       == TEMPLATE_PARM_ORIG_LEVEL (tpi));
9895 	      /* Original template template parms have a context
9896 		 of their owning template.  Reduced ones do not.  */
9897 	      if (original)
9898 		DECL_CONTEXT (decl) = tmpl;
9899 	    }
9900 	}
9901     }
9902   return true;
9903 }
9904 
9905 /* PARMS is a LIST, one node per level.
9906    TREE_VALUE is a TREE_VEC of parm info for that level.
9907    each ELT is a TREE_LIST
9908    TREE_VALUE is PARM_DECL, TYPE_DECL or TEMPLATE_DECL
9909    TREE_PURPOSE is the default value.  */
9910 
9911 void
tpl_header(tree tpl,unsigned * tpl_levels)9912 trees_out::tpl_header (tree tpl, unsigned *tpl_levels)
9913 {
9914   tree parms = DECL_TEMPLATE_PARMS (tpl);
9915   tpl_parms (parms, *tpl_levels);
9916 
9917   /* Mark end.  */
9918   if (streaming_p ())
9919     u (0);
9920 
9921   if (*tpl_levels)
9922     tree_node (TEMPLATE_PARMS_CONSTRAINTS (parms));
9923 }
9924 
9925 bool
tpl_header(tree tpl,unsigned * tpl_levels)9926 trees_in::tpl_header (tree tpl, unsigned *tpl_levels)
9927 {
9928   tree parms = tpl_parms (*tpl_levels);
9929   if (!parms)
9930     return false;
9931 
9932   DECL_TEMPLATE_PARMS (tpl) = parms;
9933 
9934   if (*tpl_levels)
9935     TEMPLATE_PARMS_CONSTRAINTS (parms) = tree_node ();
9936 
9937   return true;
9938 }
9939 
9940 /* Stream skeleton parm nodes, with their flags, type & parm indices.
9941    All the parms will have consecutive tags.  */
9942 
9943 void
fn_parms_init(tree fn)9944 trees_out::fn_parms_init (tree fn)
9945 {
9946   /* First init them.  */
9947   int base_tag = ref_num - 1;
9948   int ix = 0;
9949   for (tree parm = DECL_ARGUMENTS (fn);
9950        parm; parm = DECL_CHAIN (parm), ix++)
9951     {
9952       if (streaming_p ())
9953 	{
9954 	  start (parm);
9955 	  tree_node_bools (parm);
9956 	}
9957       int tag = insert (parm);
9958       gcc_checking_assert (base_tag - ix == tag);
9959     }
9960   /* Mark the end.  */
9961   if (streaming_p ())
9962     u (0);
9963 
9964   /* Now stream their contents.  */
9965   ix = 0;
9966   for (tree parm = DECL_ARGUMENTS (fn);
9967        parm; parm = DECL_CHAIN (parm), ix++)
9968     {
9969       if (streaming_p ())
9970 	dump (dumper::TREE)
9971 	  && dump ("Writing parm:%d %u (%N) of %N",
9972 		   base_tag - ix, ix, parm, fn);
9973       tree_node_vals (parm);
9974     }
9975 }
9976 
9977 /* Build skeleton parm nodes, read their flags, type & parm indices.  */
9978 
9979 int
fn_parms_init(tree fn)9980 trees_in::fn_parms_init (tree fn)
9981 {
9982   int base_tag = ~(int)back_refs.length ();
9983 
9984   tree *parm_ptr = &DECL_ARGUMENTS (fn);
9985   int ix = 0;
9986   for (; int code = u (); ix++)
9987     {
9988       tree parm = start (code);
9989       if (!tree_node_bools (parm))
9990 	return 0;
9991 
9992       int tag = insert (parm);
9993       gcc_checking_assert (base_tag - ix == tag);
9994       *parm_ptr = parm;
9995       parm_ptr = &DECL_CHAIN (parm);
9996     }
9997 
9998   ix = 0;
9999   for (tree parm = DECL_ARGUMENTS (fn);
10000        parm; parm = DECL_CHAIN (parm), ix++)
10001     {
10002       dump (dumper::TREE)
10003 	&& dump ("Reading parm:%d %u (%N) of %N",
10004 		 base_tag - ix, ix, parm, fn);
10005       if (!tree_node_vals (parm))
10006 	return 0;
10007     }
10008 
10009   return base_tag;
10010 }
10011 
10012 /* Read the remaining parm node data.  Replace with existing (if
10013    non-null) in the map.  */
10014 
10015 void
fn_parms_fini(int tag,tree fn,tree existing,bool is_defn)10016 trees_in::fn_parms_fini (int tag, tree fn, tree existing, bool is_defn)
10017 {
10018   tree existing_parm = existing ? DECL_ARGUMENTS (existing) : NULL_TREE;
10019   tree parms = DECL_ARGUMENTS (fn);
10020   unsigned ix = 0;
10021   for (tree parm = parms; parm; parm = DECL_CHAIN (parm), ix++)
10022     {
10023       if (existing_parm)
10024 	{
10025 	  if (is_defn && !DECL_SAVED_TREE (existing))
10026 	    {
10027 	      /* If we're about to become the definition, set the
10028 		 names of the parms from us.  */
10029 	      DECL_NAME (existing_parm) = DECL_NAME (parm);
10030 	      DECL_SOURCE_LOCATION (existing_parm) = DECL_SOURCE_LOCATION (parm);
10031 	    }
10032 
10033 	  back_refs[~tag] = existing_parm;
10034 	  existing_parm = DECL_CHAIN (existing_parm);
10035 	}
10036       tag--;
10037     }
10038 }
10039 
10040 /* DEP is the depset of some decl we're streaming by value.  Determine
10041    the merging behaviour.  */
10042 
10043 merge_kind
get_merge_kind(tree decl,depset * dep)10044 trees_out::get_merge_kind (tree decl, depset *dep)
10045 {
10046   if (!dep)
10047     {
10048       if (VAR_OR_FUNCTION_DECL_P (decl))
10049 	{
10050 	  /* Any var or function with template info should have DEP.  */
10051 	  gcc_checking_assert (!DECL_LANG_SPECIFIC (decl)
10052 			       || !DECL_TEMPLATE_INFO (decl));
10053 	  if (DECL_LOCAL_DECL_P (decl))
10054 	    return MK_unique;
10055 	}
10056 
10057       /* Either unique, or some member of a class that cannot have an
10058 	 out-of-class definition.  For instance a FIELD_DECL.  */
10059       tree ctx = CP_DECL_CONTEXT (decl);
10060       if (TREE_CODE (ctx) == FUNCTION_DECL)
10061 	{
10062 	  /* USING_DECLs cannot have DECL_TEMPLATE_INFO -- this isn't
10063 	     permitting them to have one.   */
10064 	  gcc_checking_assert (TREE_CODE (decl) == USING_DECL
10065 			       || !DECL_LANG_SPECIFIC (decl)
10066 			       || !DECL_TEMPLATE_INFO (decl));
10067 
10068 	  return MK_unique;
10069 	}
10070 
10071       if (TREE_CODE (decl) == TEMPLATE_DECL
10072 	  && DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (decl))
10073 	return MK_local_friend;
10074 
10075       gcc_checking_assert (TYPE_P (ctx));
10076       if (TREE_CODE (decl) == USING_DECL)
10077 	return MK_field;
10078 
10079       if (TREE_CODE (decl) == FIELD_DECL)
10080 	{
10081 	  if (DECL_NAME (decl))
10082 	    {
10083 	      /* Anonymous FIELD_DECLs have a NULL name.  */
10084 	      gcc_checking_assert (!IDENTIFIER_ANON_P (DECL_NAME (decl)));
10085 	      return MK_named;
10086 	    }
10087 
10088 	  if (!DECL_NAME (decl)
10089 	      && !RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl))
10090 	      && !DECL_BIT_FIELD_REPRESENTATIVE (decl))
10091 	    {
10092 	      /* The underlying storage unit for a bitfield.  We do not
10093 		 need to dedup it, because it's only reachable through
10094 		 the bitfields it represents.  And those are deduped.  */
10095 	      // FIXME: Is that assertion correct -- do we ever fish it
10096 	      // out and put it in an expr?
10097 	      gcc_checking_assert ((TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
10098 				    ? TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
10099 				    : TREE_CODE (TREE_TYPE (decl)))
10100 				   == INTEGER_TYPE);
10101 	      return MK_unique;
10102 	    }
10103 
10104 	  return MK_field;
10105 	}
10106 
10107       if (TREE_CODE (decl) == CONST_DECL)
10108 	return MK_named;
10109 
10110       if (TREE_CODE (decl) == VAR_DECL
10111 	  && DECL_VTABLE_OR_VTT_P (decl))
10112 	return MK_vtable;
10113 
10114       if (DECL_THUNK_P (decl))
10115 	/* Thunks are unique-enough, because they're only referenced
10116 	   from the vtable.  And that's either new (so we want the
10117 	   thunks), or it's a duplicate (so it will be dropped).  */
10118 	return MK_unique;
10119 
10120       /* There should be no other cases.  */
10121       gcc_unreachable ();
10122     }
10123 
10124   gcc_checking_assert (TREE_CODE (decl) != FIELD_DECL
10125 		       && TREE_CODE (decl) != USING_DECL
10126 		       && TREE_CODE (decl) != CONST_DECL);
10127 
10128   if (is_key_order ())
10129     {
10130       /* When doing the mergeablilty graph, there's an indirection to
10131 	 the actual depset.  */
10132       gcc_assert (dep->is_special ());
10133       dep = dep->deps[0];
10134     }
10135 
10136   gcc_checking_assert (decl == dep->get_entity ());
10137 
10138   merge_kind mk = MK_named;
10139   switch (dep->get_entity_kind ())
10140     {
10141     default:
10142       gcc_unreachable ();
10143 
10144     case depset::EK_PARTIAL:
10145       mk = MK_partial;
10146       break;
10147 
10148     case depset::EK_DECL:
10149       {
10150 	tree ctx = CP_DECL_CONTEXT (decl);
10151 
10152 	switch (TREE_CODE (ctx))
10153 	  {
10154 	  default:
10155 	    gcc_unreachable ();
10156 
10157 	  case FUNCTION_DECL:
10158 	    // FIXME: This can occur for (a) voldemorty TYPE_DECLS
10159 	    // (which are returned from a function), or (b)
10160 	    // block-scope class definitions in template functions.
10161 	    // These are as unique as the containing function.  While
10162 	    // on read-back we can discover if the CTX was a
10163 	    // duplicate, we don't have a mechanism to get from the
10164 	    // existing CTX to the existing version of this decl.
10165 	    gcc_checking_assert
10166 	      (DECL_IMPLICIT_TYPEDEF_P (STRIP_TEMPLATE (decl)));
10167 
10168 	    mk = MK_unique;
10169 	    break;
10170 
10171 	  case RECORD_TYPE:
10172 	  case UNION_TYPE:
10173 	    if (DECL_NAME (decl) == as_base_identifier)
10174 	      mk = MK_as_base;
10175 	    else if (IDENTIFIER_ANON_P (DECL_NAME (decl)))
10176 	      mk = MK_field;
10177 	    break;
10178 
10179 	  case NAMESPACE_DECL:
10180 	    if (DECL_IMPLICIT_TYPEDEF_P (STRIP_TEMPLATE (decl))
10181 		&& LAMBDA_TYPE_P (TREE_TYPE (decl)))
10182 	      if (tree scope
10183 		  = LAMBDA_EXPR_EXTRA_SCOPE (CLASSTYPE_LAMBDA_EXPR
10184 					     (TREE_TYPE (decl))))
10185 		if (TREE_CODE (scope) == VAR_DECL
10186 		    && DECL_MODULE_ATTACHMENTS_P (scope))
10187 		  {
10188 		    mk = MK_attached;
10189 		    break;
10190 		  }
10191 
10192 	    if (TREE_CODE (decl) == TEMPLATE_DECL
10193 		&& DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (decl))
10194 	      mk = MK_local_friend;
10195 	    else if (IDENTIFIER_ANON_P (DECL_NAME (decl)))
10196 	      {
10197 		if (DECL_IMPLICIT_TYPEDEF_P (decl)
10198 		    && UNSCOPED_ENUM_P (TREE_TYPE (decl))
10199 		    && TYPE_VALUES (TREE_TYPE (decl)))
10200 		  /* Keyed by first enum value, and underlying type.  */
10201 		  mk = MK_enum;
10202 		else
10203 		  /* No way to merge it, it is an ODR land-mine.  */
10204 		  mk = MK_unique;
10205 	      }
10206 	  }
10207       }
10208       break;
10209 
10210     case depset::EK_SPECIALIZATION:
10211       {
10212 	gcc_checking_assert (dep->is_special ());
10213 
10214 	if (TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
10215 	  /* An block-scope classes of templates are themselves
10216 	     templates.  */
10217 	  gcc_checking_assert (DECL_IMPLICIT_TYPEDEF_P (decl));
10218 
10219 	if (dep->is_friend_spec ())
10220 	  mk = MK_friend_spec;
10221 	else if (dep->is_type_spec ())
10222 	  mk = MK_type_spec;
10223 	else if (dep->is_alias ())
10224 	  mk = MK_alias_spec;
10225 	else
10226 	  mk = MK_decl_spec;
10227 
10228 	if (TREE_CODE (decl) == TEMPLATE_DECL)
10229 	  {
10230 	    spec_entry *entry = reinterpret_cast <spec_entry *> (dep->deps[0]);
10231 	    if (TREE_CODE (entry->spec) != TEMPLATE_DECL)
10232 	      mk = merge_kind (mk | MK_tmpl_tmpl_mask);
10233 	  }
10234       }
10235       break;
10236     }
10237 
10238   return mk;
10239 }
10240 
10241 
10242 /* The container of DECL -- not necessarily its context!  */
10243 
10244 tree
decl_container(tree decl)10245 trees_out::decl_container (tree decl)
10246 {
10247   int use_tpl;
10248   tree tpl = NULL_TREE;
10249   if (tree template_info = node_template_info (decl, use_tpl))
10250     tpl = TI_TEMPLATE (template_info);
10251   if (tpl == decl)
10252     tpl = nullptr;
10253 
10254   /* Stream the template we're instantiated from.  */
10255   tree_node (tpl);
10256 
10257   tree container = NULL_TREE;
10258   if (TREE_CODE (decl) == TEMPLATE_DECL
10259       && DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (decl))
10260     container = DECL_CHAIN (decl);
10261   else
10262     container = CP_DECL_CONTEXT (decl);
10263 
10264   if (TYPE_P (container))
10265     container = TYPE_NAME (container);
10266 
10267   tree_node (container);
10268 
10269   return container;
10270 }
10271 
10272 tree
decl_container()10273 trees_in::decl_container ()
10274 {
10275   /* The maybe-template.  */
10276   (void)tree_node ();
10277 
10278   tree container = tree_node ();
10279 
10280   return container;
10281 }
10282 
10283 /* Write out key information about a mergeable DEP.  Does not write
10284    the contents of DEP itself.  The context has already been
10285    written.  The container has already been streamed.  */
10286 
10287 void
key_mergeable(int tag,merge_kind mk,tree decl,tree inner,tree container,depset * dep)10288 trees_out::key_mergeable (int tag, merge_kind mk, tree decl, tree inner,
10289 			  tree container, depset *dep)
10290 {
10291   if (dep && is_key_order ())
10292     {
10293       gcc_checking_assert (dep->is_special ());
10294       dep = dep->deps[0];
10295     }
10296 
10297   if (streaming_p ())
10298     dump (dumper::MERGE)
10299       && dump ("Writing:%d's %s merge key (%s) %C:%N", tag, merge_kind_name[mk],
10300 	       dep ? dep->entity_kind_name () : "contained",
10301 	       TREE_CODE (decl), decl);
10302 
10303   /* Now write the locating information. */
10304   if (mk & MK_template_mask)
10305     {
10306       /* Specializations are located via their originating template,
10307 	 and the set of template args they specialize.  */
10308       gcc_checking_assert (dep && dep->is_special ());
10309       spec_entry *entry = reinterpret_cast <spec_entry *> (dep->deps[0]);
10310 
10311       tree_node (entry->tmpl);
10312       tree_node (entry->args);
10313       if (mk & MK_tmpl_decl_mask)
10314 	if (flag_concepts && TREE_CODE (inner) == VAR_DECL)
10315 	  {
10316 	    /* Variable template partial specializations might need
10317 	       constraints (see spec_hasher::equal).  It's simpler to
10318 	       write NULL when we don't need them.  */
10319 	    tree constraints = NULL_TREE;
10320 
10321 	    if (uses_template_parms (entry->args))
10322 	      constraints = get_constraints (inner);
10323 	    tree_node (constraints);
10324 	  }
10325 
10326       if (CHECKING_P)
10327 	{
10328 	  /* Make sure we can locate the decl.  */
10329 	  tree existing = match_mergeable_specialization
10330 	    (bool (mk & MK_tmpl_decl_mask), entry);
10331 
10332 	  gcc_assert (existing);
10333 	  if (mk & MK_tmpl_decl_mask)
10334 	    {
10335 	      if (mk & MK_tmpl_alias_mask)
10336 		/* It should be in both tables.  */
10337 		gcc_checking_assert
10338 		  (same_type_p (match_mergeable_specialization (false, entry),
10339 				TREE_TYPE (existing)));
10340 	      if (mk & MK_tmpl_tmpl_mask)
10341 		existing = DECL_TI_TEMPLATE (existing);
10342 	    }
10343 	  else
10344 	    {
10345 	      if (mk & MK_tmpl_tmpl_mask)
10346 		existing = CLASSTYPE_TI_TEMPLATE (existing);
10347 	      else
10348 		existing = TYPE_NAME (existing);
10349 	    }
10350 
10351 	  /* The walkabout should have found ourselves.  */
10352 	  gcc_checking_assert (TREE_CODE (decl) == TYPE_DECL
10353 			       ? same_type_p (TREE_TYPE (decl),
10354 					      TREE_TYPE (existing))
10355 			       : existing == decl);
10356 	}
10357     }
10358   else if (mk != MK_unique)
10359     {
10360       merge_key key;
10361       tree name = DECL_NAME (decl);
10362 
10363       switch (mk)
10364 	{
10365 	default:
10366 	  gcc_unreachable ();
10367 
10368 	case MK_named:
10369 	case MK_friend_spec:
10370 	  if (IDENTIFIER_CONV_OP_P (name))
10371 	    name = conv_op_identifier;
10372 
10373 	  if (TREE_CODE (inner) == FUNCTION_DECL)
10374 	    {
10375 	      /* Functions are distinguished by parameter types.  */
10376 	      tree fn_type = TREE_TYPE (inner);
10377 
10378 	      key.ref_q = type_memfn_rqual (fn_type);
10379 	      key.args = TYPE_ARG_TYPES (fn_type);
10380 
10381 	      if (tree reqs = get_constraints (inner))
10382 		{
10383 		  if (cxx_dialect < cxx20)
10384 		    reqs = CI_ASSOCIATED_CONSTRAINTS (reqs);
10385 		  else
10386 		    reqs = CI_DECLARATOR_REQS (reqs);
10387 		  key.constraints = reqs;
10388 		}
10389 
10390 	      if (IDENTIFIER_CONV_OP_P (name)
10391 		  || (decl != inner
10392 		      && !(name == fun_identifier
10393 			   /* In case the user names something _FUN  */
10394 			   && LAMBDA_TYPE_P (DECL_CONTEXT (inner)))))
10395 		/* And a function template, or conversion operator needs
10396 		   the return type.  Except for the _FUN thunk of a
10397 		   generic lambda, which has a recursive decl_type'd
10398 		   return type.  */
10399 		// FIXME: What if the return type is a voldemort?
10400 		key.ret = fndecl_declared_return_type (inner);
10401 	    }
10402 	  break;
10403 
10404 	case MK_field:
10405 	  {
10406 	    unsigned ix = 0;
10407 	    if (TREE_CODE (inner) != FIELD_DECL)
10408 	      name = NULL_TREE;
10409 	    else
10410 	      gcc_checking_assert (!name || !IDENTIFIER_ANON_P (name));
10411 
10412 	    for (tree field = TYPE_FIELDS (TREE_TYPE (container));
10413 		 ; field = DECL_CHAIN (field))
10414 	      {
10415 		tree finner = STRIP_TEMPLATE (field);
10416 		if (TREE_CODE (finner) == TREE_CODE (inner))
10417 		  {
10418 		    if (finner == inner)
10419 		      break;
10420 		    ix++;
10421 		  }
10422 	      }
10423 	    key.index = ix;
10424 	  }
10425 	  break;
10426 
10427 	case MK_vtable:
10428 	  {
10429 	    tree vtable = CLASSTYPE_VTABLES (TREE_TYPE (container));
10430 	    for (unsigned ix = 0; ; vtable = DECL_CHAIN (vtable), ix++)
10431 	      if (vtable == decl)
10432 		{
10433 		  key.index = ix;
10434 		  break;
10435 		}
10436 	    name = NULL_TREE;
10437 	  }
10438 	  break;
10439 
10440 	case MK_as_base:
10441 	  gcc_checking_assert
10442 	    (decl == TYPE_NAME (CLASSTYPE_AS_BASE (TREE_TYPE (container))));
10443 	  break;
10444 
10445 	case MK_local_friend:
10446 	  {
10447 	    /* Find by index on the class's DECL_LIST  */
10448 	    unsigned ix = 0;
10449 	    for (tree decls = CLASSTYPE_DECL_LIST (TREE_CHAIN (decl));
10450 		 decls; decls = TREE_CHAIN (decls))
10451 	      if (!TREE_PURPOSE (decls))
10452 		{
10453 		  tree frnd = friend_from_decl_list (TREE_VALUE (decls));
10454 		  if (frnd == decl)
10455 		    break;
10456 		  ix++;
10457 		}
10458 	    key.index = ix;
10459 	    name = NULL_TREE;
10460 	  }
10461 	  break;
10462 
10463 	case MK_enum:
10464 	  {
10465 	    /* Anonymous enums are located by their first identifier,
10466 	       and underlying type.  */
10467 	    tree type = TREE_TYPE (decl);
10468 
10469 	    gcc_checking_assert (UNSCOPED_ENUM_P (type));
10470 	    /* Using the type name drops the bit precision we might
10471 	       have been using on the enum.  */
10472 	    key.ret = TYPE_NAME (ENUM_UNDERLYING_TYPE (type));
10473 	    if (tree values = TYPE_VALUES (type))
10474 	      name = DECL_NAME (TREE_VALUE (values));
10475 	  }
10476 	  break;
10477 
10478 	case MK_attached:
10479 	  {
10480 	    gcc_checking_assert (LAMBDA_TYPE_P (TREE_TYPE (inner)));
10481 	    tree scope = LAMBDA_EXPR_EXTRA_SCOPE (CLASSTYPE_LAMBDA_EXPR
10482 						  (TREE_TYPE (inner)));
10483 	    gcc_checking_assert (TREE_CODE (scope) == VAR_DECL);
10484 	    auto *root = attached_table->get (scope);
10485 	    unsigned ix = root->length ();
10486 	    /* If we don't find it, we'll write a really big number
10487 	       that the reader will ignore.  */
10488 	    while (ix--)
10489 	      if ((*root)[ix] == inner)
10490 		break;
10491 
10492 	    /* Use the attached-to decl as the 'name'.  */
10493 	    name = scope;
10494 	    key.index = ix;
10495 	  }
10496 	  break;
10497 
10498 	case MK_partial:
10499 	  {
10500 	    key.constraints = get_constraints (inner);
10501 	    key.ret = CLASSTYPE_TI_TEMPLATE (TREE_TYPE (inner));
10502 	    key.args = CLASSTYPE_TI_ARGS (TREE_TYPE (inner));
10503 	  }
10504 	  break;
10505 	}
10506 
10507       tree_node (name);
10508       if (streaming_p ())
10509 	{
10510 	  unsigned code = (key.ref_q << 0) | (key.index << 2);
10511 	  u (code);
10512 	}
10513 
10514       if (mk == MK_enum)
10515 	tree_node (key.ret);
10516       else if (mk == MK_partial
10517 	       || (mk == MK_named && inner
10518 		   && TREE_CODE (inner) == FUNCTION_DECL))
10519 	{
10520 	  tree_node (key.ret);
10521 	  tree arg = key.args;
10522 	  if (mk == MK_named)
10523 	    while (arg && arg != void_list_node)
10524 	      {
10525 		tree_node (TREE_VALUE (arg));
10526 		arg = TREE_CHAIN (arg);
10527 	      }
10528 	  tree_node (arg);
10529 	  tree_node (key.constraints);
10530 	}
10531     }
10532 }
10533 
10534 /* DECL is a new declaration that may be duplicated in OVL.  Use RET &
10535    ARGS to find its clone, or NULL.  If DECL's DECL_NAME is NULL, this
10536    has been found by a proxy.  It will be an enum type located by it's
10537    first member.
10538 
10539    We're conservative with matches, so ambiguous decls will be
10540    registered as different, then lead to a lookup error if the two
10541    modules are both visible.  Perhaps we want to do something similar
10542    to duplicate decls to get ODR errors on loading?  We already have
10543    some special casing for namespaces.  */
10544 
10545 static tree
check_mergeable_decl(merge_kind mk,tree decl,tree ovl,merge_key const & key)10546 check_mergeable_decl (merge_kind mk, tree decl, tree ovl, merge_key const &key)
10547 {
10548   tree found = NULL_TREE;
10549   for (ovl_iterator iter (ovl); !found && iter; ++iter)
10550     {
10551       tree match = *iter;
10552 
10553       tree d_inner = decl;
10554       tree m_inner = match;
10555 
10556     again:
10557       if (TREE_CODE (d_inner) != TREE_CODE (m_inner))
10558 	{
10559 	  if (TREE_CODE (match) == NAMESPACE_DECL
10560 	      && !DECL_NAMESPACE_ALIAS (match))
10561 	    /* Namespaces are never overloaded.  */
10562 	    found = match;
10563 
10564 	  continue;
10565 	}
10566 
10567       switch (TREE_CODE (d_inner))
10568 	{
10569 	case TEMPLATE_DECL:
10570 	  if (template_heads_equivalent_p (d_inner, m_inner))
10571 	    {
10572 	      d_inner = DECL_TEMPLATE_RESULT (d_inner);
10573 	      m_inner = DECL_TEMPLATE_RESULT (m_inner);
10574 	      if (d_inner == error_mark_node
10575 		  && TYPE_DECL_ALIAS_P (m_inner))
10576 		{
10577 		  found = match;
10578 		  break;
10579 		}
10580 	      goto again;
10581 	    }
10582 	  break;
10583 
10584 	case FUNCTION_DECL:
10585 	  if (tree m_type = TREE_TYPE (m_inner))
10586 	    if ((!key.ret
10587 		 || same_type_p (key.ret, fndecl_declared_return_type (m_inner)))
10588 		&& type_memfn_rqual (m_type) == key.ref_q
10589 		&& compparms (key.args, TYPE_ARG_TYPES (m_type))
10590 		/* Reject if old is a "C" builtin and new is not "C".
10591 		   Matches decls_match behaviour.  */
10592 		&& (!DECL_IS_UNDECLARED_BUILTIN (m_inner)
10593 		    || !DECL_EXTERN_C_P (m_inner)
10594 		    || DECL_EXTERN_C_P (d_inner)))
10595 	      {
10596 		tree m_reqs = get_constraints (m_inner);
10597 		if (m_reqs)
10598 		  {
10599 		    if (cxx_dialect < cxx20)
10600 		      m_reqs = CI_ASSOCIATED_CONSTRAINTS (m_reqs);
10601 		    else
10602 		      m_reqs = CI_DECLARATOR_REQS (m_reqs);
10603 		  }
10604 
10605 		if (cp_tree_equal (key.constraints, m_reqs))
10606 		  found = match;
10607 	      }
10608 	  break;
10609 
10610 	case TYPE_DECL:
10611 	  if (DECL_IMPLICIT_TYPEDEF_P (d_inner)
10612 	      == DECL_IMPLICIT_TYPEDEF_P (m_inner))
10613 	    {
10614 	      if (!IDENTIFIER_ANON_P (DECL_NAME (m_inner)))
10615 		return match;
10616 	      else if (mk == MK_enum
10617 		       && (TYPE_NAME (ENUM_UNDERLYING_TYPE (TREE_TYPE (m_inner)))
10618 			   == key.ret))
10619 		found = match;
10620 	    }
10621 	  break;
10622 
10623 	default:
10624 	  found = match;
10625 	  break;
10626 	}
10627     }
10628 
10629   return found;
10630 }
10631 
10632 /* DECL, INNER & TYPE are a skeleton set of nodes for a decl.  Only
10633    the bools have been filled in.  Read its merging key and merge it.
10634    Returns the existing decl if there is one.  */
10635 
10636 tree
key_mergeable(int tag,merge_kind mk,tree decl,tree inner,tree type,tree container,bool is_mod)10637 trees_in::key_mergeable (int tag, merge_kind mk, tree decl, tree inner,
10638 			 tree type, tree container, bool is_mod)
10639 {
10640   const char *kind = "new";
10641   tree existing = NULL_TREE;
10642 
10643   if (mk & MK_template_mask)
10644     {
10645       // FIXME: We could stream the specialization hash?
10646       spec_entry spec;
10647       spec.tmpl = tree_node ();
10648       spec.args = tree_node ();
10649 
10650       if (get_overrun ())
10651 	return error_mark_node;
10652 
10653       DECL_NAME (decl) = DECL_NAME (spec.tmpl);
10654       DECL_CONTEXT (decl) = DECL_CONTEXT (spec.tmpl);
10655       DECL_NAME (inner) = DECL_NAME (decl);
10656       DECL_CONTEXT (inner) = DECL_CONTEXT (decl);
10657 
10658       tree constr = NULL_TREE;
10659       bool is_decl = mk & MK_tmpl_decl_mask;
10660       if (is_decl)
10661 	{
10662 	  if (flag_concepts && TREE_CODE (inner) == VAR_DECL)
10663 	    {
10664 	      constr = tree_node ();
10665 	      if (constr)
10666 		set_constraints (inner, constr);
10667 	    }
10668 	  spec.spec = (mk & MK_tmpl_tmpl_mask) ? inner : decl;
10669 	}
10670       else
10671 	spec.spec = type;
10672       existing = match_mergeable_specialization (is_decl, &spec);
10673       if (constr)
10674 	/* We'll add these back later, if this is the new decl.  */
10675 	remove_constraints (inner);
10676 
10677       if (!existing)
10678 	; /* We'll add to the table once read.  */
10679       else if (mk & MK_tmpl_decl_mask)
10680 	{
10681 	  /* A declaration specialization.  */
10682 	  if (mk & MK_tmpl_tmpl_mask)
10683 	    existing = DECL_TI_TEMPLATE (existing);
10684 	}
10685       else
10686 	{
10687 	  /* A type specialization.  */
10688 	  if (mk & MK_tmpl_tmpl_mask)
10689 	    existing = CLASSTYPE_TI_TEMPLATE (existing);
10690 	  else
10691 	    existing = TYPE_NAME (existing);
10692 	}
10693     }
10694   else if (mk == MK_unique)
10695     kind = "unique";
10696   else
10697     {
10698       tree name = tree_node ();
10699 
10700       merge_key key;
10701       unsigned code = u ();
10702       key.ref_q = cp_ref_qualifier ((code >> 0) & 3);
10703       key.index = code >> 2;
10704 
10705       if (mk == MK_enum)
10706 	key.ret = tree_node ();
10707       else if (mk == MK_partial
10708 	       || ((mk == MK_named || mk == MK_friend_spec)
10709 		   && TREE_CODE (inner) == FUNCTION_DECL))
10710 	{
10711 	  key.ret = tree_node ();
10712 	  tree arg, *arg_ptr = &key.args;
10713 	  while ((arg = tree_node ())
10714 		 && arg != void_list_node
10715 		 && mk != MK_partial)
10716 	    {
10717 	      *arg_ptr = tree_cons (NULL_TREE, arg, NULL_TREE);
10718 	      arg_ptr = &TREE_CHAIN (*arg_ptr);
10719 	    }
10720 	  *arg_ptr = arg;
10721 	  key.constraints = tree_node ();
10722 	}
10723 
10724       if (get_overrun ())
10725 	return error_mark_node;
10726 
10727       if (mk < MK_indirect_lwm)
10728 	{
10729 	  DECL_NAME (decl) = name;
10730 	  DECL_CONTEXT (decl) = FROB_CONTEXT (container);
10731 	}
10732       DECL_NAME (inner) = DECL_NAME (decl);
10733       DECL_CONTEXT (inner) = DECL_CONTEXT (decl);
10734 
10735       if (mk == MK_partial)
10736 	{
10737 	  for (tree spec = DECL_TEMPLATE_SPECIALIZATIONS (key.ret);
10738 	       spec; spec = TREE_CHAIN (spec))
10739 	    {
10740 	      tree tmpl = TREE_VALUE (spec);
10741 	      if (template_args_equal (key.args,
10742 				       CLASSTYPE_TI_ARGS (TREE_TYPE (tmpl)))
10743 		  && cp_tree_equal (key.constraints,
10744 				    get_constraints
10745 				    (DECL_TEMPLATE_RESULT (tmpl))))
10746 		{
10747 		  existing = tmpl;
10748 		  break;
10749 		}
10750 	    }
10751 	}
10752       else
10753 	switch (TREE_CODE (container))
10754 	  {
10755 	  default:
10756 	    gcc_unreachable ();
10757 
10758 	  case NAMESPACE_DECL:
10759 	    if (mk == MK_attached)
10760 	      {
10761 		if (DECL_LANG_SPECIFIC (name)
10762 		    && VAR_OR_FUNCTION_DECL_P (name)
10763 		    && DECL_MODULE_ATTACHMENTS_P (name))
10764 		  if (auto *set = attached_table->get (name))
10765 		    if (key.index < set->length ())
10766 		      {
10767 			existing = (*set)[key.index];
10768 			if (existing)
10769 			  {
10770 			    gcc_checking_assert
10771 			      (DECL_IMPLICIT_TYPEDEF_P (existing));
10772 			    if (inner != decl)
10773 			      existing
10774 				= CLASSTYPE_TI_TEMPLATE (TREE_TYPE (existing));
10775 			  }
10776 		      }
10777 	      }
10778 	    else if (is_mod && !(state->is_module () || state->is_partition ()))
10779 	      kind = "unique";
10780 	    else
10781 	      {
10782 		gcc_checking_assert (mk == MK_named || mk == MK_enum);
10783 		tree mvec;
10784 		tree *vslot = mergeable_namespace_slots (container, name,
10785 							 !is_mod, &mvec);
10786 		existing = check_mergeable_decl (mk, decl, *vslot, key);
10787 		if (!existing)
10788 		  add_mergeable_namespace_entity (vslot, decl);
10789 		else
10790 		  {
10791 		    /* Note that we now have duplicates to deal with in
10792 		       name lookup.  */
10793 		    if (is_mod)
10794 		      BINDING_VECTOR_PARTITION_DUPS_P (mvec) = true;
10795 		    else
10796 		      BINDING_VECTOR_GLOBAL_DUPS_P (mvec) = true;
10797 		  }
10798 	      }
10799 	    break;
10800 
10801 	  case FUNCTION_DECL:
10802 	    // FIXME: What about a voldemort? how do we find what it
10803 	    // duplicates? Do we have to number vmorts relative to
10804 	    // their containing function?  But how would that work
10805 	    // when matching an in-TU declaration?
10806 	    kind = "unique";
10807 	    break;
10808 
10809 	  case TYPE_DECL:
10810 	    if (is_mod && !(state->is_module () || state->is_partition ())
10811 		/* Implicit member functions can come from
10812 		   anywhere.  */
10813 		&& !(DECL_ARTIFICIAL (decl)
10814 		     && TREE_CODE (decl) == FUNCTION_DECL
10815 		     && !DECL_THUNK_P (decl)))
10816 	      kind = "unique";
10817 	    else
10818 	      {
10819 		tree ctx = TREE_TYPE (container);
10820 
10821 		/* For some reason templated enumeral types are not marked
10822 		   as COMPLETE_TYPE_P, even though they have members.
10823 		   This may well be a bug elsewhere.  */
10824 		if (TREE_CODE (ctx) == ENUMERAL_TYPE)
10825 		  existing = find_enum_member (ctx, name);
10826 		else if (COMPLETE_TYPE_P (ctx))
10827 		  {
10828 		    switch (mk)
10829 		      {
10830 		      default:
10831 			gcc_unreachable ();
10832 
10833 		      case MK_named:
10834 			existing = lookup_class_binding (ctx, name);
10835 			if (existing)
10836 			  {
10837 			    tree inner = decl;
10838 			    if (TREE_CODE (inner) == TEMPLATE_DECL
10839 				&& !DECL_MEMBER_TEMPLATE_P (inner))
10840 			      inner = DECL_TEMPLATE_RESULT (inner);
10841 
10842 			    existing = check_mergeable_decl
10843 			      (mk, inner, existing, key);
10844 
10845 			    if (!existing && DECL_ALIAS_TEMPLATE_P (decl))
10846 			      {} // FIXME: Insert into specialization
10847 			    // tables, we'll need the arguments for that!
10848 			  }
10849 			break;
10850 
10851 		      case MK_field:
10852 			{
10853 			  unsigned ix = key.index;
10854 			  for (tree field = TYPE_FIELDS (ctx);
10855 			       field; field = DECL_CHAIN (field))
10856 			    {
10857 			      tree finner = STRIP_TEMPLATE (field);
10858 			      if (TREE_CODE (finner) == TREE_CODE (inner))
10859 				if (!ix--)
10860 				  {
10861 				    existing = field;
10862 				    break;
10863 				  }
10864 			    }
10865 			}
10866 			break;
10867 
10868 		      case MK_vtable:
10869 			{
10870 			  unsigned ix = key.index;
10871 			  for (tree vtable = CLASSTYPE_VTABLES (ctx);
10872 			       vtable; vtable = DECL_CHAIN (vtable))
10873 			    if (!ix--)
10874 			      {
10875 				existing = vtable;
10876 				break;
10877 			      }
10878 			}
10879 			break;
10880 
10881 		      case MK_as_base:
10882 			{
10883 			  tree as_base = CLASSTYPE_AS_BASE (ctx);
10884 			  if (as_base && as_base != ctx)
10885 			    existing = TYPE_NAME (as_base);
10886 			}
10887 			break;
10888 
10889 		      case MK_local_friend:
10890 			{
10891 			  unsigned ix = key.index;
10892 			  for (tree decls = CLASSTYPE_DECL_LIST (ctx);
10893 			       decls; decls = TREE_CHAIN (decls))
10894 			    if (!TREE_PURPOSE (decls) && !ix--)
10895 			      {
10896 				existing
10897 				  = friend_from_decl_list (TREE_VALUE (decls));
10898 				break;
10899 			      }
10900 			}
10901 			break;
10902 		      }
10903 
10904 		    if (existing && mk < MK_indirect_lwm && mk != MK_partial
10905 			&& TREE_CODE (decl) == TEMPLATE_DECL
10906 			&& !DECL_MEMBER_TEMPLATE_P (decl))
10907 		      {
10908 			tree ti;
10909 			if (DECL_IMPLICIT_TYPEDEF_P (existing))
10910 			  ti = TYPE_TEMPLATE_INFO (TREE_TYPE (existing));
10911 			else
10912 			  ti = DECL_TEMPLATE_INFO (existing);
10913 			existing = TI_TEMPLATE (ti);
10914 		      }
10915 		  }
10916 	      }
10917 	  }
10918     }
10919 
10920   dump (dumper::MERGE)
10921     && dump ("Read:%d's %s merge key (%s) %C:%N", tag, merge_kind_name[mk],
10922 	     existing ? "matched" : kind, TREE_CODE (decl), decl);
10923 
10924   return existing;
10925 }
10926 
10927 void
binfo_mergeable(tree binfo)10928 trees_out::binfo_mergeable (tree binfo)
10929 {
10930   tree dom = binfo;
10931   while (tree parent = BINFO_INHERITANCE_CHAIN (dom))
10932     dom = parent;
10933   tree type = BINFO_TYPE (dom);
10934   gcc_checking_assert (TYPE_BINFO (type) == dom);
10935   tree_node (type);
10936   if (streaming_p ())
10937     {
10938       unsigned ix = 0;
10939       for (; dom != binfo; dom = TREE_CHAIN (dom))
10940 	ix++;
10941       u (ix);
10942     }
10943 }
10944 
10945 unsigned
binfo_mergeable(tree * type)10946 trees_in::binfo_mergeable (tree *type)
10947 {
10948   *type = tree_node ();
10949   return u ();
10950 }
10951 
10952 /* DECL is a just streamed mergeable decl that should match EXISTING.  Check
10953    it does and issue an appropriate diagnostic if not.  Merge any
10954    bits from DECL to EXISTING.  This is stricter matching than
10955    decls_match, because we can rely on ODR-sameness, and we cannot use
10956    decls_match because it can cause instantiations of constraints.  */
10957 
10958 bool
is_matching_decl(tree existing,tree decl,bool is_typedef)10959 trees_in::is_matching_decl (tree existing, tree decl, bool is_typedef)
10960 {
10961   // FIXME: We should probably do some duplicate decl-like stuff here
10962   // (beware, default parms should be the same?)  Can we just call
10963   // duplicate_decls and teach it how to handle the module-specific
10964   // permitted/required duplications?
10965 
10966   // We know at this point that the decls have matched by key, so we
10967   // can elide some of the checking
10968   gcc_checking_assert (TREE_CODE (existing) == TREE_CODE (decl));
10969 
10970   tree d_inner = decl;
10971   tree e_inner = existing;
10972   if (TREE_CODE (decl) == TEMPLATE_DECL)
10973     {
10974       d_inner = DECL_TEMPLATE_RESULT (d_inner);
10975       e_inner = DECL_TEMPLATE_RESULT (e_inner);
10976       gcc_checking_assert (TREE_CODE (e_inner) == TREE_CODE (d_inner));
10977     }
10978 
10979   if (TREE_CODE (d_inner) == FUNCTION_DECL)
10980     {
10981       tree e_ret = fndecl_declared_return_type (existing);
10982       tree d_ret = fndecl_declared_return_type (decl);
10983 
10984       if (decl != d_inner && DECL_NAME (d_inner) == fun_identifier
10985 	  && LAMBDA_TYPE_P (DECL_CONTEXT (d_inner)))
10986 	/* This has a recursive type that will compare different.  */;
10987       else if (!same_type_p (d_ret, e_ret))
10988 	goto mismatch;
10989 
10990       tree e_type = TREE_TYPE (e_inner);
10991       tree d_type = TREE_TYPE (d_inner);
10992 
10993       if (DECL_EXTERN_C_P (d_inner) != DECL_EXTERN_C_P (e_inner))
10994 	goto mismatch;
10995 
10996       for (tree e_args = TYPE_ARG_TYPES (e_type),
10997 	     d_args = TYPE_ARG_TYPES (d_type);
10998 	   e_args != d_args && (e_args || d_args);
10999 	   e_args = TREE_CHAIN (e_args), d_args = TREE_CHAIN (d_args))
11000 	{
11001 	  if (!(e_args && d_args))
11002 	    goto mismatch;
11003 
11004 	  if (!same_type_p (TREE_VALUE (d_args), TREE_VALUE (e_args)))
11005 	    goto mismatch;
11006 
11007 	  // FIXME: Check default values
11008 	}
11009 
11010       /* If EXISTING has an undeduced or uninstantiated exception
11011 	 specification, but DECL does not, propagate the exception
11012 	 specification.  Otherwise we end up asserting or trying to
11013 	 instantiate it in the middle of loading.   */
11014       tree e_spec = TYPE_RAISES_EXCEPTIONS (e_type);
11015       tree d_spec = TYPE_RAISES_EXCEPTIONS (d_type);
11016       if (DEFERRED_NOEXCEPT_SPEC_P (e_spec))
11017 	{
11018 	  if (!DEFERRED_NOEXCEPT_SPEC_P (d_spec)
11019 	      || (UNEVALUATED_NOEXCEPT_SPEC_P (e_spec)
11020 		  && !UNEVALUATED_NOEXCEPT_SPEC_P (d_spec)))
11021 	    {
11022 	      dump (dumper::MERGE)
11023 		&& dump ("Propagating instantiated noexcept to %N", existing);
11024 	      TREE_TYPE (existing) = d_type;
11025 
11026 	      /* Propagate to existing clones.  */
11027 	      tree clone;
11028 	      FOR_EACH_CLONE (clone, existing)
11029 		{
11030 		  if (TREE_TYPE (clone) == e_type)
11031 		    TREE_TYPE (clone) = d_type;
11032 		  else
11033 		    TREE_TYPE (clone)
11034 		      = build_exception_variant (TREE_TYPE (clone), d_spec);
11035 		}
11036 	    }
11037 	}
11038       else if (!DEFERRED_NOEXCEPT_SPEC_P (d_spec)
11039 	       && !comp_except_specs (d_spec, e_spec, ce_type))
11040 	goto mismatch;
11041     }
11042   else if (is_typedef)
11043     {
11044       if (!DECL_ORIGINAL_TYPE (e_inner)
11045 	  || !same_type_p (DECL_ORIGINAL_TYPE (d_inner),
11046 			   DECL_ORIGINAL_TYPE (e_inner)))
11047 	goto mismatch;
11048     }
11049   /* Using cp_tree_equal because we can meet TYPE_ARGUMENT_PACKs
11050      here. I suspect the entities that directly do that are things
11051      that shouldn't go to duplicate_decls (FIELD_DECLs etc).   */
11052   else if (!cp_tree_equal (TREE_TYPE (decl), TREE_TYPE (existing)))
11053     {
11054     mismatch:
11055       if (DECL_IS_UNDECLARED_BUILTIN (existing))
11056 	/* Just like duplicate_decls, presum the user knows what
11057 	   they're doing in overriding a builtin.   */
11058 	TREE_TYPE (existing) = TREE_TYPE (decl);
11059       else
11060 	{
11061 	  // FIXME:QOI Might be template specialization from a module,
11062 	  // not necessarily global module
11063 	  error_at (DECL_SOURCE_LOCATION (decl),
11064 		    "conflicting global module declaration %#qD", decl);
11065 	  inform (DECL_SOURCE_LOCATION (existing),
11066 		  "existing declaration %#qD", existing);
11067 	  return false;
11068 	}
11069     }
11070 
11071   if (DECL_IS_UNDECLARED_BUILTIN (existing)
11072       && !DECL_IS_UNDECLARED_BUILTIN (decl))
11073     {
11074       /* We're matching a builtin that the user has yet to declare.
11075 	 We are the one!  This is very much duplicate-decl
11076 	 shenanigans. */
11077       DECL_SOURCE_LOCATION (existing) = DECL_SOURCE_LOCATION (decl);
11078       if (TREE_CODE (decl) != TYPE_DECL)
11079 	{
11080 	  /* Propagate exceptions etc.  */
11081 	  TREE_TYPE (existing) = TREE_TYPE (decl);
11082 	  TREE_NOTHROW (existing) = TREE_NOTHROW (decl);
11083 	}
11084       /* This is actually an import! */
11085       DECL_MODULE_IMPORT_P (existing) = true;
11086 
11087       /* Yay, sliced!  */
11088       existing->base = decl->base;
11089 
11090       if (TREE_CODE (decl) == FUNCTION_DECL)
11091 	{
11092 	  /* Ew :(  */
11093 	  memcpy (&existing->decl_common.size,
11094 		  &decl->decl_common.size,
11095 		  (offsetof (tree_decl_common, pt_uid)
11096 		   - offsetof (tree_decl_common, size)));
11097 	  auto bltin_class = DECL_BUILT_IN_CLASS (decl);
11098 	  existing->function_decl.built_in_class = bltin_class;
11099 	  auto fncode = DECL_UNCHECKED_FUNCTION_CODE (decl);
11100 	  DECL_UNCHECKED_FUNCTION_CODE (existing) = fncode;
11101 	  if (existing->function_decl.built_in_class == BUILT_IN_NORMAL)
11102 	    {
11103 	      if (builtin_decl_explicit_p (built_in_function (fncode)))
11104 		switch (fncode)
11105 		  {
11106 		  case BUILT_IN_STPCPY:
11107 		    set_builtin_decl_implicit_p
11108 		      (built_in_function (fncode), true);
11109 		    break;
11110 		  default:
11111 		    set_builtin_decl_declared_p
11112 		      (built_in_function (fncode), true);
11113 		    break;
11114 		  }
11115 	      copy_attributes_to_builtin (decl);
11116 	    }
11117 	}
11118     }
11119 
11120   if (VAR_OR_FUNCTION_DECL_P (decl)
11121       && DECL_TEMPLATE_INSTANTIATED (decl))
11122     /* Don't instantiate again!  */
11123     DECL_TEMPLATE_INSTANTIATED (existing) = true;
11124 
11125   if (TREE_CODE (d_inner) == FUNCTION_DECL
11126       && DECL_DECLARED_INLINE_P (d_inner))
11127     DECL_DECLARED_INLINE_P (e_inner) = true;
11128   if (!DECL_EXTERNAL (d_inner))
11129     DECL_EXTERNAL (e_inner) = false;
11130 
11131   // FIXME: Check default tmpl and fn parms here
11132 
11133   return true;
11134 }
11135 
11136 /* FN is an implicit member function that we've discovered is new to
11137    the class.  Add it to the TYPE_FIELDS chain and the method vector.
11138    Reset the appropriate classtype lazy flag.   */
11139 
11140 bool
install_implicit_member(tree fn)11141 trees_in::install_implicit_member (tree fn)
11142 {
11143   tree ctx = DECL_CONTEXT (fn);
11144   tree name = DECL_NAME (fn);
11145   /* We know these are synthesized, so the set of expected prototypes
11146      is quite restricted.  We're not validating correctness, just
11147      distinguishing beteeen the small set of possibilities.  */
11148   tree parm_type = TREE_VALUE (FUNCTION_FIRST_USER_PARMTYPE (fn));
11149   if (IDENTIFIER_CTOR_P (name))
11150     {
11151       if (CLASSTYPE_LAZY_DEFAULT_CTOR (ctx)
11152 	  && VOID_TYPE_P (parm_type))
11153 	CLASSTYPE_LAZY_DEFAULT_CTOR (ctx) = false;
11154       else if (!TYPE_REF_P (parm_type))
11155 	return false;
11156       else if (CLASSTYPE_LAZY_COPY_CTOR (ctx)
11157 	       && !TYPE_REF_IS_RVALUE (parm_type))
11158 	CLASSTYPE_LAZY_COPY_CTOR (ctx) = false;
11159       else if (CLASSTYPE_LAZY_MOVE_CTOR (ctx))
11160 	CLASSTYPE_LAZY_MOVE_CTOR (ctx) = false;
11161       else
11162 	return false;
11163     }
11164   else if (IDENTIFIER_DTOR_P (name))
11165     {
11166       if (CLASSTYPE_LAZY_DESTRUCTOR (ctx))
11167 	CLASSTYPE_LAZY_DESTRUCTOR (ctx) = false;
11168       else
11169 	return false;
11170       if (DECL_VIRTUAL_P (fn))
11171 	/* A virtual dtor should have been created when the class
11172 	   became complete.  */
11173 	return false;
11174     }
11175   else if (name == assign_op_identifier)
11176     {
11177       if (!TYPE_REF_P (parm_type))
11178 	return false;
11179       else if (CLASSTYPE_LAZY_COPY_ASSIGN (ctx)
11180 	       && !TYPE_REF_IS_RVALUE (parm_type))
11181 	CLASSTYPE_LAZY_COPY_ASSIGN (ctx) = false;
11182       else if (CLASSTYPE_LAZY_MOVE_ASSIGN (ctx))
11183 	CLASSTYPE_LAZY_MOVE_ASSIGN (ctx) = false;
11184       else
11185 	return false;
11186     }
11187   else
11188     return false;
11189 
11190   dump (dumper::MERGE) && dump ("Adding implicit member %N", fn);
11191 
11192   DECL_CHAIN (fn) = TYPE_FIELDS (ctx);
11193   TYPE_FIELDS (ctx) = fn;
11194 
11195   add_method (ctx, fn, false);
11196 
11197     /* Propagate TYPE_FIELDS.  */
11198   fixup_type_variants (ctx);
11199 
11200   return true;
11201 }
11202 
11203 /* Return non-zero if DECL has a definition that would be interesting to
11204    write out.  */
11205 
11206 static bool
has_definition(tree decl)11207 has_definition (tree decl)
11208 {
11209   bool is_tmpl = TREE_CODE (decl) == TEMPLATE_DECL;
11210   if (is_tmpl)
11211     decl = DECL_TEMPLATE_RESULT (decl);
11212 
11213   switch (TREE_CODE (decl))
11214     {
11215     default:
11216       break;
11217 
11218     case FUNCTION_DECL:
11219       if (!DECL_SAVED_TREE (decl))
11220 	/* Not defined.  */
11221 	break;
11222 
11223       if (DECL_DECLARED_INLINE_P (decl))
11224 	return true;
11225 
11226       if (DECL_THIS_STATIC (decl)
11227 	  && (header_module_p ()
11228 	      || (!DECL_LANG_SPECIFIC (decl) || !DECL_MODULE_PURVIEW_P (decl))))
11229 	/* GM static function.  */
11230 	return true;
11231 
11232       if (DECL_TEMPLATE_INFO (decl))
11233 	{
11234 	  int use_tpl = DECL_USE_TEMPLATE (decl);
11235 
11236 	  // FIXME: Partial specializations have definitions too.
11237 	  if (use_tpl < 2)
11238 	    return true;
11239 	}
11240       break;
11241 
11242     case TYPE_DECL:
11243       {
11244 	tree type = TREE_TYPE (decl);
11245 	if (type == TYPE_MAIN_VARIANT (type)
11246 	    && decl == TYPE_NAME (type)
11247 	    && (TREE_CODE (type) == ENUMERAL_TYPE
11248 		? TYPE_VALUES (type) : TYPE_FIELDS (type)))
11249 	  return true;
11250       }
11251       break;
11252 
11253     case VAR_DECL:
11254       if (DECL_LANG_SPECIFIC (decl)
11255 	  && DECL_TEMPLATE_INFO (decl)
11256 	  && DECL_USE_TEMPLATE (decl) < 2)
11257 	return DECL_INITIAL (decl);
11258       else
11259 	{
11260 	  if (!DECL_INITIALIZED_P (decl))
11261 	    return false;
11262 
11263 	  if (header_module_p ()
11264 	      || (!DECL_LANG_SPECIFIC (decl) || !DECL_MODULE_PURVIEW_P (decl)))
11265 	    /* GM static variable.  */
11266 	    return true;
11267 
11268 	  if (!TREE_CONSTANT (decl))
11269 	    return false;
11270 
11271 	  return true;
11272 	}
11273       break;
11274 
11275     case CONCEPT_DECL:
11276       if (DECL_INITIAL (decl))
11277 	return true;
11278 
11279       break;
11280     }
11281 
11282   return false;
11283 }
11284 
11285 uintptr_t *
find_duplicate(tree existing)11286 trees_in::find_duplicate (tree existing)
11287 {
11288   if (!duplicates)
11289     return NULL;
11290 
11291   return duplicates->get (existing);
11292 }
11293 
11294 /* We're starting to read a duplicate DECL.  EXISTING is the already
11295    known node.  */
11296 
11297 void
register_duplicate(tree decl,tree existing)11298 trees_in::register_duplicate (tree decl, tree existing)
11299 {
11300   if (!duplicates)
11301     duplicates = new duplicate_hash_map (40);
11302 
11303   bool existed;
11304   uintptr_t &slot = duplicates->get_or_insert (existing, &existed);
11305   gcc_checking_assert (!existed);
11306   slot = reinterpret_cast<uintptr_t> (decl);
11307 }
11308 
11309 /* We've read a definition of MAYBE_EXISTING.  If not a duplicate,
11310    return MAYBE_EXISTING (into which the definition should be
11311    installed).  Otherwise return NULL if already known bad, or the
11312    duplicate we read (for ODR checking, or extracting additional merge
11313    information).  */
11314 
11315 tree
odr_duplicate(tree maybe_existing,bool has_defn)11316 trees_in::odr_duplicate (tree maybe_existing, bool has_defn)
11317 {
11318   tree res = NULL_TREE;
11319 
11320   if (uintptr_t *dup = find_duplicate (maybe_existing))
11321     {
11322       if (!(*dup & 1))
11323 	res = reinterpret_cast<tree> (*dup);
11324     }
11325   else
11326     res = maybe_existing;
11327 
11328   assert_definition (maybe_existing, res && !has_defn);
11329 
11330   // FIXME: We probably need to return the template, so that the
11331   // template header can be checked?
11332   return res ? STRIP_TEMPLATE (res) : NULL_TREE;
11333 }
11334 
11335 /* The following writer functions rely on the current behaviour of
11336    depset::hash::add_dependency making the decl and defn depset nodes
11337    depend on eachother.  That way we don't have to worry about seeding
11338    the tree map with named decls that cannot be looked up by name (I.e
11339    template and function parms).  We know the decl and definition will
11340    be in the same cluster, which is what we want.  */
11341 
11342 void
write_function_def(tree decl)11343 trees_out::write_function_def (tree decl)
11344 {
11345   tree_node (DECL_RESULT (decl));
11346   tree_node (DECL_INITIAL (decl));
11347   tree_node (DECL_SAVED_TREE (decl));
11348   tree_node (DECL_FRIEND_CONTEXT (decl));
11349 
11350   constexpr_fundef *cexpr = retrieve_constexpr_fundef (decl);
11351   int tag = 0;
11352   if (cexpr)
11353     {
11354       if (cexpr->result == error_mark_node)
11355 	/* We'll stream the RESULT_DECL naturally during the
11356 	   serialization.  We never need to fish it back again, so
11357 	   that's ok.  */
11358 	tag = 0;
11359       else
11360 	tag = insert (cexpr->result);
11361     }
11362   if (streaming_p ())
11363     {
11364       i (tag);
11365       if (tag)
11366 	dump (dumper::TREE)
11367 	  && dump ("Constexpr:%d result %N", tag, cexpr->result);
11368     }
11369   if (tag)
11370     {
11371       unsigned ix = 0;
11372       for (tree parm = cexpr->parms; parm; parm = DECL_CHAIN (parm), ix++)
11373 	{
11374 	  tag = insert (parm);
11375 	  if (streaming_p ())
11376 	    dump (dumper::TREE)
11377 	      && dump ("Constexpr:%d parm:%u %N", tag, ix, parm);
11378 	}
11379       tree_node (cexpr->body);
11380     }
11381 
11382   if (streaming_p ())
11383     {
11384       unsigned flags = 0;
11385 
11386       if (DECL_NOT_REALLY_EXTERN (decl))
11387 	flags |= 1;
11388 
11389       u (flags);
11390     }
11391 }
11392 
11393 void
mark_function_def(tree)11394 trees_out::mark_function_def (tree)
11395 {
11396 }
11397 
11398 bool
read_function_def(tree decl,tree maybe_template)11399 trees_in::read_function_def (tree decl, tree maybe_template)
11400 {
11401   dump () && dump ("Reading function definition %N", decl);
11402   tree result = tree_node ();
11403   tree initial = tree_node ();
11404   tree saved = tree_node ();
11405   tree context = tree_node ();
11406   constexpr_fundef cexpr;
11407 
11408   tree maybe_dup = odr_duplicate (maybe_template, DECL_SAVED_TREE (decl));
11409   bool installing = maybe_dup && !DECL_SAVED_TREE (decl);
11410 
11411   if (int wtag = i ())
11412     {
11413       int tag = 1;
11414       cexpr.result = error_mark_node;
11415 
11416       cexpr.result = copy_decl (result);
11417       tag = insert (cexpr.result);
11418 
11419       if (wtag != tag)
11420 	set_overrun ();
11421       dump (dumper::TREE)
11422 	&& dump ("Constexpr:%d result %N", tag, cexpr.result);
11423 
11424       cexpr.parms = NULL_TREE;
11425       tree *chain = &cexpr.parms;
11426       unsigned ix = 0;
11427       for (tree parm = DECL_ARGUMENTS (maybe_dup ? maybe_dup : decl);
11428 	   parm; parm = DECL_CHAIN (parm), ix++)
11429 	{
11430 	  tree p = copy_decl (parm);
11431 	  tag = insert (p);
11432 	  dump (dumper::TREE)
11433 	    && dump ("Constexpr:%d parm:%u %N", tag, ix, p);
11434 	  *chain = p;
11435 	  chain = &DECL_CHAIN (p);
11436 	}
11437       cexpr.body = tree_node ();
11438       cexpr.decl = decl;
11439     }
11440   else
11441     cexpr.decl = NULL_TREE;
11442 
11443   unsigned flags = u ();
11444 
11445   if (get_overrun ())
11446     return NULL_TREE;
11447 
11448   if (installing)
11449     {
11450       DECL_NOT_REALLY_EXTERN (decl) = flags & 1;
11451       DECL_RESULT (decl) = result;
11452       DECL_INITIAL (decl) = initial;
11453       DECL_SAVED_TREE (decl) = saved;
11454       if (maybe_dup)
11455 	DECL_ARGUMENTS (decl) = DECL_ARGUMENTS (maybe_dup);
11456 
11457       if (context)
11458 	SET_DECL_FRIEND_CONTEXT (decl, context);
11459       if (cexpr.decl)
11460 	register_constexpr_fundef (cexpr);
11461       post_process (maybe_template);
11462     }
11463   else if (maybe_dup)
11464     {
11465       // FIXME:QOI Check matching defn
11466     }
11467 
11468   return true;
11469 }
11470 
11471 /* Also for CONCEPT_DECLs.  */
11472 
11473 void
write_var_def(tree decl)11474 trees_out::write_var_def (tree decl)
11475 {
11476   tree init = DECL_INITIAL (decl);
11477   tree_node (init);
11478   if (!init)
11479     {
11480       tree dyn_init = NULL_TREE;
11481 
11482       if (DECL_NONTRIVIALLY_INITIALIZED_P (decl))
11483 	{
11484 	  dyn_init = value_member (decl,
11485 				   CP_DECL_THREAD_LOCAL_P (decl)
11486 				   ? tls_aggregates : static_aggregates);
11487 	  gcc_checking_assert (dyn_init);
11488 	  /* Mark it so write_inits knows this is needed.  */
11489 	  TREE_LANG_FLAG_0 (dyn_init) = true;
11490 	  dyn_init = TREE_PURPOSE (dyn_init);
11491 	}
11492       tree_node (dyn_init);
11493     }
11494 }
11495 
11496 void
mark_var_def(tree)11497 trees_out::mark_var_def (tree)
11498 {
11499 }
11500 
11501 bool
read_var_def(tree decl,tree maybe_template)11502 trees_in::read_var_def (tree decl, tree maybe_template)
11503 {
11504   /* Do not mark the virtual table entries as used.  */
11505   bool vtable = TREE_CODE (decl) == VAR_DECL && DECL_VTABLE_OR_VTT_P (decl);
11506   unused += vtable;
11507   tree init = tree_node ();
11508   tree dyn_init = init ? NULL_TREE : tree_node ();
11509   unused -= vtable;
11510 
11511   if (get_overrun ())
11512     return false;
11513 
11514   bool initialized = (VAR_P (decl) ? bool (DECL_INITIALIZED_P (decl))
11515 		      : bool (DECL_INITIAL (decl)));
11516   tree maybe_dup = odr_duplicate (maybe_template, initialized);
11517   bool installing = maybe_dup && !initialized;
11518   if (installing)
11519     {
11520       if (DECL_EXTERNAL (decl))
11521 	DECL_NOT_REALLY_EXTERN (decl) = true;
11522       if (VAR_P (decl))
11523 	{
11524 	  DECL_INITIALIZED_P (decl) = true;
11525 	  if (maybe_dup && DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (maybe_dup))
11526 	    DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl) = true;
11527 	}
11528       DECL_INITIAL (decl) = init;
11529       if (!dyn_init)
11530 	;
11531       else if (CP_DECL_THREAD_LOCAL_P (decl))
11532 	tls_aggregates = tree_cons (dyn_init, decl, tls_aggregates);
11533       else
11534 	static_aggregates = tree_cons (dyn_init, decl, static_aggregates);
11535     }
11536   else if (maybe_dup)
11537     {
11538       // FIXME:QOI Check matching defn
11539     }
11540 
11541   return true;
11542 }
11543 
11544 /* If MEMBER doesn't have an independent life outside the class,
11545    return it (or it's TEMPLATE_DECL).  Otherwise NULL.  */
11546 
11547 static tree
member_owned_by_class(tree member)11548 member_owned_by_class (tree member)
11549 {
11550   gcc_assert (DECL_P (member));
11551 
11552   /* Clones are owned by their origin.  */
11553   if (DECL_CLONED_FUNCTION_P (member))
11554     return NULL;
11555 
11556   if (TREE_CODE (member) == FIELD_DECL)
11557     /* FIELD_DECLS can have template info in some cases.  We always
11558        want the FIELD_DECL though, as there's never a TEMPLATE_DECL
11559        wrapping them.  */
11560     return member;
11561 
11562   int use_tpl = -1;
11563   if (tree ti = node_template_info (member, use_tpl))
11564     {
11565       // FIXME: Don't bail on things that CANNOT have their own
11566       // template header.  No, make sure they're in the same cluster.
11567       if (use_tpl > 0)
11568 	return NULL_TREE;
11569 
11570       if (DECL_TEMPLATE_RESULT (TI_TEMPLATE (ti)) == member)
11571 	member = TI_TEMPLATE (ti);
11572     }
11573   return member;
11574 }
11575 
11576 void
write_class_def(tree defn)11577 trees_out::write_class_def (tree defn)
11578 {
11579   gcc_assert (DECL_P (defn));
11580   if (streaming_p ())
11581     dump () && dump ("Writing class definition %N", defn);
11582 
11583   tree type = TREE_TYPE (defn);
11584   tree_node (TYPE_SIZE (type));
11585   tree_node (TYPE_SIZE_UNIT (type));
11586   tree_node (TYPE_VFIELD (type));
11587   tree_node (TYPE_BINFO (type));
11588 
11589   vec_chained_decls (TYPE_FIELDS (type));
11590 
11591   /* Every class but __as_base has a type-specific.  */
11592   gcc_checking_assert (!TYPE_LANG_SPECIFIC (type) == IS_FAKE_BASE_TYPE (type));
11593 
11594   if (TYPE_LANG_SPECIFIC (type))
11595     {
11596       {
11597 	vec<tree, va_gc> *v = CLASSTYPE_MEMBER_VEC (type);
11598 	if (!v)
11599 	  {
11600 	    gcc_checking_assert (!streaming_p ());
11601 	    /* Force a class vector.  */
11602 	    v = set_class_bindings (type, -1);
11603 	    gcc_checking_assert (v);
11604 	  }
11605 
11606 	unsigned len = v->length ();
11607 	if (streaming_p ())
11608 	  u (len);
11609 	for (unsigned ix = 0; ix != len; ix++)
11610 	  {
11611 	    tree m = (*v)[ix];
11612 	    if (TREE_CODE (m) == TYPE_DECL
11613 		&& DECL_ARTIFICIAL (m)
11614 		&& TYPE_STUB_DECL (TREE_TYPE (m)) == m)
11615 	      /* This is a using-decl for a type, or an anonymous
11616 		 struct (maybe with a typedef name).  Write the type.  */
11617 	      m = TREE_TYPE (m);
11618 	    tree_node (m);
11619 	  }
11620       }
11621       tree_node (CLASSTYPE_LAMBDA_EXPR (type));
11622 
11623       /* TYPE_CONTAINS_VPTR_P looks at the vbase vector, which the
11624 	 reader won't know at this point.  */
11625       int has_vptr = TYPE_CONTAINS_VPTR_P (type);
11626 
11627       if (streaming_p ())
11628 	{
11629 	  unsigned nvbases = vec_safe_length (CLASSTYPE_VBASECLASSES (type));
11630 	  u (nvbases);
11631 	  i (has_vptr);
11632 	}
11633 
11634       if (has_vptr)
11635 	{
11636 	  tree_vec (CLASSTYPE_PURE_VIRTUALS (type));
11637 	  tree_pair_vec (CLASSTYPE_VCALL_INDICES (type));
11638 	  tree_node (CLASSTYPE_KEY_METHOD (type));
11639 	}
11640     }
11641 
11642   if (TYPE_LANG_SPECIFIC (type))
11643     {
11644       tree_node (CLASSTYPE_PRIMARY_BINFO (type));
11645 
11646       tree as_base = CLASSTYPE_AS_BASE (type);
11647       if (as_base)
11648 	as_base = TYPE_NAME (as_base);
11649       tree_node (as_base);
11650 
11651       /* Write the vtables.  */
11652       tree vtables = CLASSTYPE_VTABLES (type);
11653       vec_chained_decls (vtables);
11654       for (; vtables; vtables = TREE_CHAIN (vtables))
11655 	write_definition (vtables);
11656 
11657       /* Write the friend classes.  */
11658       tree_list (CLASSTYPE_FRIEND_CLASSES (type), false);
11659 
11660       /* Write the friend functions.  */
11661       for (tree friends = DECL_FRIENDLIST (defn);
11662 	   friends; friends = TREE_CHAIN (friends))
11663 	{
11664 	  /* Name of these friends.  */
11665 	  tree_node (TREE_PURPOSE (friends));
11666 	  tree_list (TREE_VALUE (friends), false);
11667 	}
11668       /* End of friend fns.  */
11669       tree_node (NULL_TREE);
11670 
11671       /* Write the decl list.  */
11672       tree_list (CLASSTYPE_DECL_LIST (type), true);
11673 
11674       if (TYPE_CONTAINS_VPTR_P (type))
11675 	{
11676 	  /* Write the thunks.  */
11677 	  for (tree decls = TYPE_FIELDS (type);
11678 	       decls; decls = DECL_CHAIN (decls))
11679 	    if (TREE_CODE (decls) == FUNCTION_DECL
11680 		&& DECL_VIRTUAL_P (decls)
11681 		&& DECL_THUNKS (decls))
11682 	      {
11683 		tree_node (decls);
11684 		/* Thunks are always unique, so chaining is ok.  */
11685 		chained_decls (DECL_THUNKS (decls));
11686 	      }
11687 	  tree_node (NULL_TREE);
11688 	}
11689     }
11690 }
11691 
11692 void
mark_class_member(tree member,bool do_defn)11693 trees_out::mark_class_member (tree member, bool do_defn)
11694 {
11695   gcc_assert (DECL_P (member));
11696 
11697   member = member_owned_by_class (member);
11698   if (member)
11699     mark_declaration (member, do_defn && has_definition (member));
11700 }
11701 
11702 void
mark_class_def(tree defn)11703 trees_out::mark_class_def (tree defn)
11704 {
11705   gcc_assert (DECL_P (defn));
11706   tree type = TREE_TYPE (defn);
11707   /* Mark the class members that are not type-decls and cannot have
11708      independent definitions.  */
11709   for (tree member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
11710     if (TREE_CODE (member) == FIELD_DECL
11711 	|| TREE_CODE (member) == USING_DECL
11712 	/* A cloned enum-decl from 'using enum unrelated;'   */
11713 	|| (TREE_CODE (member) == CONST_DECL
11714 	    && DECL_CONTEXT (member) == type))
11715       {
11716 	mark_class_member (member);
11717 	if (TREE_CODE (member) == FIELD_DECL)
11718 	  if (tree repr = DECL_BIT_FIELD_REPRESENTATIVE (member))
11719 	    mark_declaration (repr, false);
11720       }
11721 
11722   /* Mark the binfo hierarchy.  */
11723   for (tree child = TYPE_BINFO (type); child; child = TREE_CHAIN (child))
11724     mark_by_value (child);
11725 
11726   if (TYPE_LANG_SPECIFIC (type))
11727     {
11728       for (tree vtable = CLASSTYPE_VTABLES (type);
11729 	   vtable; vtable = TREE_CHAIN (vtable))
11730 	mark_declaration (vtable, true);
11731 
11732       if (TYPE_CONTAINS_VPTR_P (type))
11733 	/* Mark the thunks, they belong to the class definition,
11734 	   /not/ the thunked-to function.  */
11735 	for (tree decls = TYPE_FIELDS (type);
11736 	     decls; decls = DECL_CHAIN (decls))
11737 	  if (TREE_CODE (decls) == FUNCTION_DECL)
11738 	    for (tree thunks = DECL_THUNKS (decls);
11739 		 thunks; thunks = DECL_CHAIN (thunks))
11740 	      mark_declaration (thunks, false);
11741     }
11742 }
11743 
11744 /* Nop sorting, needed for resorting the member vec.  */
11745 
11746 static void
nop(void *,void *)11747 nop (void *, void *)
11748 {
11749 }
11750 
11751 bool
read_class_def(tree defn,tree maybe_template)11752 trees_in::read_class_def (tree defn, tree maybe_template)
11753 {
11754   gcc_assert (DECL_P (defn));
11755   dump () && dump ("Reading class definition %N", defn);
11756   tree type = TREE_TYPE (defn);
11757   tree size = tree_node ();
11758   tree size_unit = tree_node ();
11759   tree vfield = tree_node ();
11760   tree binfo = tree_node ();
11761   vec<tree, va_gc> *vbase_vec = NULL;
11762   vec<tree, va_gc> *member_vec = NULL;
11763   vec<tree, va_gc> *pure_virts = NULL;
11764   vec<tree_pair_s, va_gc> *vcall_indices = NULL;
11765   tree key_method = NULL_TREE;
11766   tree lambda = NULL_TREE;
11767 
11768   /* Read the fields.  */
11769   vec<tree, va_heap> *fields = vec_chained_decls ();
11770 
11771   if (TYPE_LANG_SPECIFIC (type))
11772     {
11773       if (unsigned len = u ())
11774 	{
11775 	  vec_alloc (member_vec, len);
11776 	  for (unsigned ix = 0; ix != len; ix++)
11777 	    {
11778 	      tree m = tree_node ();
11779 	      if (get_overrun ())
11780 		break;
11781 	      if (TYPE_P (m))
11782 		m = TYPE_STUB_DECL (m);
11783 	      member_vec->quick_push (m);
11784 	    }
11785 	}
11786       lambda = tree_node ();
11787 
11788       if (!get_overrun ())
11789 	{
11790 	  unsigned nvbases = u ();
11791 	  if (nvbases)
11792 	    {
11793 	      vec_alloc (vbase_vec, nvbases);
11794 	      for (tree child = binfo; child; child = TREE_CHAIN (child))
11795 		if (BINFO_VIRTUAL_P (child))
11796 		  vbase_vec->quick_push (child);
11797 	    }
11798 	}
11799 
11800       if (!get_overrun ())
11801 	{
11802 	  int has_vptr = i ();
11803 	  if (has_vptr)
11804 	    {
11805 	      pure_virts = tree_vec ();
11806 	      vcall_indices = tree_pair_vec ();
11807 	      key_method = tree_node ();
11808 	    }
11809 	}
11810     }
11811 
11812   tree maybe_dup = odr_duplicate (maybe_template, TYPE_SIZE (type));
11813   bool installing = maybe_dup && !TYPE_SIZE (type);
11814   if (installing)
11815     {
11816       if (DECL_EXTERNAL (defn) && TYPE_LANG_SPECIFIC (type))
11817 	{
11818 	  /* We don't deal with not-really-extern, because, for a
11819 	     module you want the import to be the interface, and for a
11820 	     header-unit, you're doing it wrong.  */
11821 	  CLASSTYPE_INTERFACE_UNKNOWN (type) = false;
11822 	  CLASSTYPE_INTERFACE_ONLY (type) = true;
11823 	}
11824 
11825       if (maybe_dup != defn)
11826 	{
11827 	  // FIXME: This is needed on other defns too, almost
11828 	  // duplicate-decl like?  See is_matching_decl too.
11829 	  /* Copy flags from the duplicate.  */
11830 	  tree type_dup = TREE_TYPE (maybe_dup);
11831 
11832 	  /* Core pieces.  */
11833 	  TYPE_MODE_RAW (type) = TYPE_MODE_RAW (type_dup);
11834 	  SET_DECL_MODE (defn, DECL_MODE (maybe_dup));
11835 	  TREE_ADDRESSABLE (type) = TREE_ADDRESSABLE (type_dup);
11836 	  DECL_SIZE (defn) = DECL_SIZE (maybe_dup);
11837 	  DECL_SIZE_UNIT (defn) = DECL_SIZE_UNIT (maybe_dup);
11838 	  DECL_ALIGN_RAW (defn) = DECL_ALIGN_RAW (maybe_dup);
11839 	  DECL_WARN_IF_NOT_ALIGN_RAW (defn)
11840 	    = DECL_WARN_IF_NOT_ALIGN_RAW (maybe_dup);
11841 	  DECL_USER_ALIGN (defn) = DECL_USER_ALIGN (maybe_dup);
11842 
11843 	  /* C++ pieces.  */
11844 	  TYPE_POLYMORPHIC_P (type) = TYPE_POLYMORPHIC_P (type_dup);
11845 	  TYPE_HAS_USER_CONSTRUCTOR (type)
11846 	    = TYPE_HAS_USER_CONSTRUCTOR (type_dup);
11847 	  TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type)
11848 	    = TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type_dup);
11849 
11850 	  if (auto ls = TYPE_LANG_SPECIFIC (type_dup))
11851 	    {
11852 	      if (TYPE_LANG_SPECIFIC (type))
11853 		{
11854 		  CLASSTYPE_BEFRIENDING_CLASSES (type_dup)
11855 		    = CLASSTYPE_BEFRIENDING_CLASSES (type);
11856 		  CLASSTYPE_TYPEINFO_VAR (type_dup)
11857 		    = CLASSTYPE_TYPEINFO_VAR (type);
11858 		}
11859 	      for (tree v = type; v; v = TYPE_NEXT_VARIANT (v))
11860 		TYPE_LANG_SPECIFIC (v) = ls;
11861 	    }
11862 	}
11863 
11864       TYPE_SIZE (type) = size;
11865       TYPE_SIZE_UNIT (type) = size_unit;
11866 
11867       if (fields)
11868 	{
11869 	  tree *chain = &TYPE_FIELDS (type);
11870 	  unsigned len = fields->length ();
11871 	  for (unsigned ix = 0; ix != len; ix++)
11872 	    {
11873 	      tree decl = (*fields)[ix];
11874 
11875 	      if (!decl)
11876 		{
11877 		  /* An anonymous struct with typedef name.  */
11878 		  tree tdef = (*fields)[ix+1];
11879 		  decl = TYPE_STUB_DECL (TREE_TYPE (tdef));
11880 		  gcc_checking_assert (IDENTIFIER_ANON_P (DECL_NAME (decl))
11881 				       && decl != tdef);
11882 		}
11883 
11884 	      gcc_checking_assert (!*chain == !DECL_CLONED_FUNCTION_P (decl));
11885 	      *chain = decl;
11886 	      chain = &DECL_CHAIN (decl);
11887 
11888 	      if (TREE_CODE (decl) == USING_DECL
11889 		  && TREE_CODE (USING_DECL_SCOPE (decl)) == RECORD_TYPE)
11890 		{
11891 		  /* Reconstruct DECL_ACCESS.  */
11892 		  tree decls = USING_DECL_DECLS (decl);
11893 		  tree access = declared_access (decl);
11894 
11895 		  for (ovl_iterator iter (decls); iter; ++iter)
11896 		    {
11897 		      tree d = *iter;
11898 
11899 		      retrofit_lang_decl (d);
11900 		      tree list = DECL_ACCESS (d);
11901 
11902 		      if (!purpose_member (type, list))
11903 			DECL_ACCESS (d) = tree_cons (type, access, list);
11904 		    }
11905 		}
11906 	    }
11907 	}
11908 
11909       TYPE_VFIELD (type) = vfield;
11910       TYPE_BINFO (type) = binfo;
11911 
11912       if (TYPE_LANG_SPECIFIC (type))
11913 	{
11914 	  CLASSTYPE_LAMBDA_EXPR (type) = lambda;
11915 
11916 	  CLASSTYPE_MEMBER_VEC (type) = member_vec;
11917 	  CLASSTYPE_PURE_VIRTUALS (type) = pure_virts;
11918 	  CLASSTYPE_VCALL_INDICES (type) = vcall_indices;
11919 
11920 	  CLASSTYPE_KEY_METHOD (type) = key_method;
11921 
11922 	  CLASSTYPE_VBASECLASSES (type) = vbase_vec;
11923 
11924 	  /* Resort the member vector.  */
11925 	  resort_type_member_vec (member_vec, NULL, nop, NULL);
11926 	}
11927     }
11928   else if (maybe_dup)
11929     {
11930       // FIXME:QOI Check matching defn
11931     }
11932 
11933   if (TYPE_LANG_SPECIFIC (type))
11934     {
11935       tree primary = tree_node ();
11936       tree as_base = tree_node ();
11937 
11938       if (as_base)
11939 	as_base = TREE_TYPE (as_base);
11940 
11941       /* Read the vtables.  */
11942       vec<tree, va_heap> *vtables = vec_chained_decls ();
11943       if (vtables)
11944 	{
11945 	  unsigned len = vtables->length ();
11946 	  for (unsigned ix = 0; ix != len; ix++)
11947 	    {
11948 	      tree vtable = (*vtables)[ix];
11949 	      read_var_def (vtable, vtable);
11950 	    }
11951 	}
11952 
11953       tree friend_classes = tree_list (false);
11954       tree friend_functions = NULL_TREE;
11955       for (tree *chain = &friend_functions;
11956 	   tree name = tree_node (); chain = &TREE_CHAIN (*chain))
11957 	{
11958 	  tree val = tree_list (false);
11959 	  *chain = build_tree_list (name, val);
11960 	}
11961       tree decl_list = tree_list (true);
11962 
11963       if (installing)
11964 	{
11965 	  CLASSTYPE_PRIMARY_BINFO (type) = primary;
11966 	  CLASSTYPE_AS_BASE (type) = as_base;
11967 
11968 	  if (vtables)
11969 	    {
11970 	      if (!CLASSTYPE_KEY_METHOD (type)
11971 		  /* Sneaky user may have defined it inline
11972 		     out-of-class.  */
11973 		  || DECL_DECLARED_INLINE_P (CLASSTYPE_KEY_METHOD (type)))
11974 		vec_safe_push (keyed_classes, type);
11975 	      unsigned len = vtables->length ();
11976 	      tree *chain = &CLASSTYPE_VTABLES (type);
11977 	      for (unsigned ix = 0; ix != len; ix++)
11978 		{
11979 		  tree vtable = (*vtables)[ix];
11980 		  gcc_checking_assert (!*chain);
11981 		  *chain = vtable;
11982 		  chain = &DECL_CHAIN (vtable);
11983 		}
11984 	    }
11985 	  CLASSTYPE_FRIEND_CLASSES (type) = friend_classes;
11986 	  DECL_FRIENDLIST (defn) = friend_functions;
11987 	  CLASSTYPE_DECL_LIST (type) = decl_list;
11988 
11989 	  for (; friend_classes; friend_classes = TREE_CHAIN (friend_classes))
11990 	    {
11991 	      tree f = TREE_VALUE (friend_classes);
11992 
11993 	      if (TYPE_P (f))
11994 		{
11995 		  CLASSTYPE_BEFRIENDING_CLASSES (f)
11996 		    = tree_cons (NULL_TREE, type,
11997 				 CLASSTYPE_BEFRIENDING_CLASSES (f));
11998 		  dump () && dump ("Class %N befriending %C:%N",
11999 				   type, TREE_CODE (f), f);
12000 		}
12001 	    }
12002 
12003 	  for (; friend_functions;
12004 	       friend_functions = TREE_CHAIN (friend_functions))
12005 	    for (tree friend_decls = TREE_VALUE (friend_functions);
12006 		 friend_decls; friend_decls = TREE_CHAIN (friend_decls))
12007 	      {
12008 		tree f = TREE_VALUE (friend_decls);
12009 
12010 		DECL_BEFRIENDING_CLASSES (f)
12011 		  = tree_cons (NULL_TREE, type, DECL_BEFRIENDING_CLASSES (f));
12012 		dump () && dump ("Class %N befriending %C:%N",
12013 				 type, TREE_CODE (f), f);
12014 	      }
12015 	}
12016 
12017       if (TYPE_CONTAINS_VPTR_P (type))
12018 	/* Read and install the thunks.  */
12019 	while (tree vfunc = tree_node ())
12020 	  {
12021 	    tree thunks = chained_decls ();
12022 	    if (installing)
12023 	      SET_DECL_THUNKS (vfunc, thunks);
12024 	  }
12025 
12026       vec_free (vtables);
12027     }
12028 
12029   /* Propagate to all variants.  */
12030   if (installing)
12031     fixup_type_variants (type);
12032 
12033   /* IS_FAKE_BASE_TYPE is inaccurate at this point, because if this is
12034      the fake base, we've not hooked it into the containing class's
12035      data structure yet.  Fortunately it has a unique name.  */
12036   if (installing
12037       && DECL_NAME (defn) != as_base_identifier
12038       && (!CLASSTYPE_TEMPLATE_INFO (type)
12039 	  || !uses_template_parms (TI_ARGS (CLASSTYPE_TEMPLATE_INFO (type)))))
12040     /* Emit debug info.  It'd be nice to know if the interface TU
12041        already emitted this.  */
12042     rest_of_type_compilation (type, !LOCAL_CLASS_P (type));
12043 
12044   vec_free (fields);
12045 
12046   return !get_overrun ();
12047 }
12048 
12049 void
write_enum_def(tree decl)12050 trees_out::write_enum_def (tree decl)
12051 {
12052   tree type = TREE_TYPE (decl);
12053 
12054   tree_node (TYPE_VALUES (type));
12055   tree_node (TYPE_MIN_VALUE (type));
12056   tree_node (TYPE_MAX_VALUE (type));
12057 }
12058 
12059 void
mark_enum_def(tree decl)12060 trees_out::mark_enum_def (tree decl)
12061 {
12062   tree type = TREE_TYPE (decl);
12063 
12064   for (tree values = TYPE_VALUES (type); values; values = TREE_CHAIN (values))
12065     {
12066       tree cst = TREE_VALUE (values);
12067       mark_by_value (cst);
12068       /* We must mark the init to avoid circularity in tt_enum_int.  */
12069       if (tree init = DECL_INITIAL (cst))
12070 	if (TREE_CODE (init) == INTEGER_CST)
12071 	  mark_by_value (init);
12072     }
12073 }
12074 
12075 bool
read_enum_def(tree defn,tree maybe_template)12076 trees_in::read_enum_def (tree defn, tree maybe_template)
12077 {
12078   tree type = TREE_TYPE (defn);
12079   tree values = tree_node ();
12080   tree min = tree_node ();
12081   tree max = tree_node ();
12082 
12083   if (get_overrun ())
12084     return false;
12085 
12086   tree maybe_dup = odr_duplicate (maybe_template, TYPE_VALUES (type));
12087   bool installing = maybe_dup && !TYPE_VALUES (type);
12088 
12089   if (installing)
12090     {
12091       TYPE_VALUES (type) = values;
12092       TYPE_MIN_VALUE (type) = min;
12093       TYPE_MAX_VALUE (type) = max;
12094 
12095       rest_of_type_compilation (type, DECL_NAMESPACE_SCOPE_P (defn));
12096     }
12097   else if (maybe_dup)
12098     {
12099       tree known = TYPE_VALUES (type);
12100       for (; known && values;
12101 	   known = TREE_CHAIN (known), values = TREE_CHAIN (values))
12102 	{
12103 	  tree known_decl = TREE_VALUE (known);
12104 	  tree new_decl = TREE_VALUE (values);
12105 
12106 	  if (DECL_NAME (known_decl) != DECL_NAME (new_decl))
12107 	    goto bad;
12108 
12109 	  new_decl = maybe_duplicate (new_decl);
12110 
12111 	  if (!cp_tree_equal (DECL_INITIAL (known_decl),
12112 			      DECL_INITIAL (new_decl)))
12113 	    goto bad;
12114 	}
12115 
12116       if (known || values)
12117 	goto bad;
12118 
12119       if (!cp_tree_equal (TYPE_MIN_VALUE (type), min)
12120 	  || !cp_tree_equal (TYPE_MAX_VALUE (type), max))
12121 	{
12122 	bad:;
12123 	  error_at (DECL_SOURCE_LOCATION (maybe_dup),
12124 		    "definition of %qD does not match", maybe_dup);
12125 	  inform (DECL_SOURCE_LOCATION (defn),
12126 		  "existing definition %qD", defn);
12127 
12128 	  tree known_decl = NULL_TREE, new_decl = NULL_TREE;
12129 
12130 	  if (known)
12131 	    known_decl = TREE_VALUE (known);
12132 	  if (values)
12133 	    new_decl = maybe_duplicate (TREE_VALUE (values));
12134 
12135 	  if (known_decl && new_decl)
12136 	    {
12137 	      inform (DECL_SOURCE_LOCATION (new_decl),
12138 		      "... this enumerator %qD", new_decl);
12139 	      inform (DECL_SOURCE_LOCATION (known_decl),
12140 		      "enumerator %qD does not match ...", known_decl);
12141 	    }
12142 	  else if (known_decl || new_decl)
12143 	    {
12144 	      tree extra = known_decl ? known_decl : new_decl;
12145 	      inform (DECL_SOURCE_LOCATION (extra),
12146 		      "additional enumerators beginning with %qD", extra);
12147 	    }
12148 	  else
12149 	    inform (DECL_SOURCE_LOCATION (maybe_dup),
12150 		    "enumeration range differs");
12151 
12152 	  /* Mark it bad.  */
12153 	  unmatched_duplicate (maybe_template);
12154 	}
12155     }
12156 
12157   return true;
12158 }
12159 
12160 /* Write out the body of DECL.  See above circularity note.  */
12161 
12162 void
write_definition(tree decl)12163 trees_out::write_definition (tree decl)
12164 {
12165   if (streaming_p ())
12166     {
12167       assert_definition (decl);
12168       dump ()
12169 	&& dump ("Writing definition %C:%N", TREE_CODE (decl), decl);
12170     }
12171   else
12172     dump (dumper::DEPEND)
12173       && dump ("Depending definition %C:%N", TREE_CODE (decl), decl);
12174 
12175  again:
12176   switch (TREE_CODE (decl))
12177     {
12178     default:
12179       gcc_unreachable ();
12180 
12181     case TEMPLATE_DECL:
12182       decl = DECL_TEMPLATE_RESULT (decl);
12183       goto again;
12184 
12185     case FUNCTION_DECL:
12186       write_function_def (decl);
12187       break;
12188 
12189     case TYPE_DECL:
12190       {
12191 	tree type = TREE_TYPE (decl);
12192 	gcc_assert (TYPE_MAIN_VARIANT (type) == type
12193 		    && TYPE_NAME (type) == decl);
12194 	if (TREE_CODE (type) == ENUMERAL_TYPE)
12195 	  write_enum_def (decl);
12196 	else
12197 	  write_class_def (decl);
12198       }
12199       break;
12200 
12201     case VAR_DECL:
12202     case CONCEPT_DECL:
12203       write_var_def (decl);
12204       break;
12205     }
12206 }
12207 
12208 /* Mark a declaration for by-value walking.  If DO_DEFN is true, mark
12209    its body too.  */
12210 
12211 void
mark_declaration(tree decl,bool do_defn)12212 trees_out::mark_declaration (tree decl, bool do_defn)
12213 {
12214   mark_by_value (decl);
12215 
12216   if (TREE_CODE (decl) == TEMPLATE_DECL)
12217     decl = DECL_TEMPLATE_RESULT (decl);
12218 
12219   if (!do_defn)
12220     return;
12221 
12222   switch (TREE_CODE (decl))
12223     {
12224     default:
12225       gcc_unreachable ();
12226 
12227     case FUNCTION_DECL:
12228       mark_function_def (decl);
12229       break;
12230 
12231     case TYPE_DECL:
12232       {
12233 	tree type = TREE_TYPE (decl);
12234 	gcc_assert (TYPE_MAIN_VARIANT (type) == type
12235 		    && TYPE_NAME (type) == decl);
12236 	if (TREE_CODE (type) == ENUMERAL_TYPE)
12237 	  mark_enum_def (decl);
12238 	else
12239 	  mark_class_def (decl);
12240       }
12241       break;
12242 
12243     case VAR_DECL:
12244     case CONCEPT_DECL:
12245       mark_var_def (decl);
12246       break;
12247     }
12248 }
12249 
12250 /* Read in the body of DECL.  See above circularity note.  */
12251 
12252 bool
read_definition(tree decl)12253 trees_in::read_definition (tree decl)
12254 {
12255   dump () && dump ("Reading definition %C %N", TREE_CODE (decl), decl);
12256 
12257   tree maybe_template = decl;
12258 
12259  again:
12260   switch (TREE_CODE (decl))
12261     {
12262     default:
12263       break;
12264 
12265     case TEMPLATE_DECL:
12266       decl = DECL_TEMPLATE_RESULT (decl);
12267       goto again;
12268 
12269     case FUNCTION_DECL:
12270       return read_function_def (decl, maybe_template);
12271 
12272     case TYPE_DECL:
12273       {
12274 	tree type = TREE_TYPE (decl);
12275 	gcc_assert (TYPE_MAIN_VARIANT (type) == type
12276 		    && TYPE_NAME (type) == decl);
12277 	if (TREE_CODE (type) == ENUMERAL_TYPE)
12278 	  return read_enum_def (decl, maybe_template);
12279 	else
12280 	  return read_class_def (decl, maybe_template);
12281       }
12282       break;
12283 
12284     case VAR_DECL:
12285     case CONCEPT_DECL:
12286       return read_var_def (decl, maybe_template);
12287     }
12288 
12289   return false;
12290 }
12291 
12292 /* Lookup an maybe insert a slot for depset for KEY.  */
12293 
12294 depset **
entity_slot(tree entity,bool insert)12295 depset::hash::entity_slot (tree entity, bool insert)
12296 {
12297   traits::compare_type key (entity, NULL);
12298   depset **slot = find_slot_with_hash (key, traits::hash (key),
12299 				       insert ? INSERT : NO_INSERT);
12300 
12301   return slot;
12302 }
12303 
12304 depset **
binding_slot(tree ctx,tree name,bool insert)12305 depset::hash::binding_slot (tree ctx, tree name, bool insert)
12306 {
12307   traits::compare_type key (ctx, name);
12308   depset **slot = find_slot_with_hash (key, traits::hash (key),
12309 				       insert ? INSERT : NO_INSERT);
12310 
12311   return slot;
12312 }
12313 
12314 depset *
find_dependency(tree decl)12315 depset::hash::find_dependency (tree decl)
12316 {
12317   depset **slot = entity_slot (decl, false);
12318 
12319   return slot ? *slot : NULL;
12320 }
12321 
12322 depset *
find_binding(tree ctx,tree name)12323 depset::hash::find_binding (tree ctx, tree name)
12324 {
12325   depset **slot = binding_slot (ctx, name, false);
12326 
12327   return slot ? *slot : NULL;
12328 }
12329 
12330 /* DECL is a newly discovered dependency.  Create the depset, if it
12331    doesn't already exist.  Add it to the worklist if so.
12332 
12333    DECL will be an OVL_USING_P OVERLOAD, if it's from a binding that's
12334    a using decl.
12335 
12336    We do not have to worry about adding the same dependency more than
12337    once.  First it's harmless, but secondly the TREE_VISITED marking
12338    prevents us wanting to do it anyway.  */
12339 
12340 depset *
make_dependency(tree decl,entity_kind ek)12341 depset::hash::make_dependency (tree decl, entity_kind ek)
12342 {
12343   /* Make sure we're being told consistent information.  */
12344   gcc_checking_assert ((ek == EK_NAMESPACE)
12345 		       == (TREE_CODE (decl) == NAMESPACE_DECL
12346 			   && !DECL_NAMESPACE_ALIAS (decl)));
12347   gcc_checking_assert (ek != EK_BINDING && ek != EK_REDIRECT);
12348   gcc_checking_assert (TREE_CODE (decl) != FIELD_DECL
12349 		       && (TREE_CODE (decl) != USING_DECL
12350 			   || TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL));
12351   gcc_checking_assert (!is_key_order ());
12352   if (ek == EK_USING)
12353     gcc_checking_assert (TREE_CODE (decl) == OVERLOAD);
12354 
12355   if (TREE_CODE (decl) == TEMPLATE_DECL)
12356     /* The template should have copied these from its result decl.  */
12357     gcc_checking_assert (DECL_MODULE_EXPORT_P (decl)
12358 			 == DECL_MODULE_EXPORT_P (DECL_TEMPLATE_RESULT (decl)));
12359 
12360   depset **slot = entity_slot (decl, true);
12361   depset *dep = *slot;
12362   bool for_binding = ek == EK_FOR_BINDING;
12363 
12364   if (!dep)
12365     {
12366       if (DECL_IMPLICIT_TYPEDEF_P (decl)
12367 	  /* ... not an enum, for instance.  */
12368 	  && RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl))
12369 	  && TYPE_LANG_SPECIFIC (TREE_TYPE (decl))
12370 	  && CLASSTYPE_USE_TEMPLATE (TREE_TYPE (decl)) == 2)
12371 	{
12372 	  /* A partial or explicit specialization. Partial
12373 	     specializations might not be in the hash table, because
12374 	     there can be multiple differently-constrained variants.
12375 
12376 	     template<typename T> class silly;
12377 	     template<typename T> requires true class silly {};
12378 
12379 	     We need to find them, insert their TEMPLATE_DECL in the
12380 	     dep_hash, and then convert the dep we just found into a
12381 	     redirect.  */
12382 
12383 	  tree ti = TYPE_TEMPLATE_INFO (TREE_TYPE (decl));
12384 	  tree tmpl = TI_TEMPLATE (ti);
12385 	  tree partial = NULL_TREE;
12386 	  for (tree spec = DECL_TEMPLATE_SPECIALIZATIONS (tmpl);
12387 	       spec; spec = TREE_CHAIN (spec))
12388 	    if (DECL_TEMPLATE_RESULT (TREE_VALUE (spec)) == decl)
12389 	      {
12390 		partial = TREE_VALUE (spec);
12391 		break;
12392 	      }
12393 
12394 	  if (partial)
12395 	    {
12396 	      /* Eagerly create an empty redirect.  The following
12397 	         make_dependency call could cause hash reallocation,
12398 	         and invalidate slot's value.  */
12399 	      depset *redirect = make_entity (decl, EK_REDIRECT);
12400 
12401 	      /* Redirects are never reached -- always snap to their target.  */
12402 	      redirect->set_flag_bit<DB_UNREACHED_BIT> ();
12403 
12404 	      *slot = redirect;
12405 
12406 	      depset *tmpl_dep = make_dependency (partial, EK_PARTIAL);
12407 	      gcc_checking_assert (tmpl_dep->get_entity_kind () == EK_PARTIAL);
12408 
12409 	      redirect->deps.safe_push (tmpl_dep);
12410 
12411 	      return redirect;
12412 	    }
12413 	}
12414 
12415       bool has_def = ek != EK_USING && has_definition (decl);
12416       if (ek > EK_BINDING)
12417 	ek = EK_DECL;
12418 
12419       /* The only OVERLOADS we should see are USING decls from
12420 	 bindings.  */
12421       *slot = dep = make_entity (decl, ek, has_def);
12422 
12423       if (TREE_CODE (decl) == TEMPLATE_DECL)
12424 	{
12425 	  if (DECL_ALIAS_TEMPLATE_P (decl) && DECL_TEMPLATE_INFO (decl))
12426 	    dep->set_flag_bit<DB_ALIAS_TMPL_INST_BIT> ();
12427 	  else if (CHECKING_P)
12428 	    /* The template_result should otherwise not be in the
12429 	       table, or be an empty redirect (created above).  */
12430 	    if (auto *eslot = entity_slot (DECL_TEMPLATE_RESULT (decl), false))
12431 	      gcc_checking_assert ((*eslot)->get_entity_kind () == EK_REDIRECT
12432 				   && !(*eslot)->deps.length ());
12433 	}
12434 
12435       if (ek != EK_USING)
12436 	{
12437 	  tree not_tmpl = STRIP_TEMPLATE (decl);
12438 
12439 	  if (DECL_LANG_SPECIFIC (not_tmpl)
12440 	      && DECL_MODULE_IMPORT_P (not_tmpl))
12441 	    {
12442 	      /* Store the module number and index in cluster/section,
12443 		 so we don't have to look them up again.  */
12444 	      unsigned index = import_entity_index (decl);
12445 	      module_state *from = import_entity_module (index);
12446 	      /* Remap will be zero for imports from partitions, which
12447 		 we want to treat as-if declared in this TU.  */
12448 	      if (from->remap)
12449 		{
12450 		  dep->cluster = index - from->entity_lwm;
12451 		  dep->section = from->remap;
12452 		  dep->set_flag_bit<DB_IMPORTED_BIT> ();
12453 		}
12454 	    }
12455 
12456 	  if (ek == EK_DECL
12457 	      && !dep->is_import ()
12458 	      && TREE_CODE (CP_DECL_CONTEXT (decl)) == NAMESPACE_DECL
12459 	      && !(TREE_CODE (decl) == TEMPLATE_DECL
12460 		   && DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (decl)))
12461 	    {
12462 	      tree ctx = CP_DECL_CONTEXT (decl);
12463 
12464 	      if (!TREE_PUBLIC (ctx))
12465 		/* Member of internal namespace.  */
12466 		dep->set_flag_bit<DB_IS_INTERNAL_BIT> ();
12467 	      else if (VAR_OR_FUNCTION_DECL_P (not_tmpl)
12468 		       && DECL_THIS_STATIC (not_tmpl))
12469 		{
12470 		  /* An internal decl.  This is ok in a GM entity.  */
12471 		  if (!(header_module_p ()
12472 			|| !DECL_LANG_SPECIFIC (not_tmpl)
12473 			|| !DECL_MODULE_PURVIEW_P (not_tmpl)))
12474 		    dep->set_flag_bit<DB_IS_INTERNAL_BIT> ();
12475 		}
12476 	    }
12477 	}
12478 
12479       if (!dep->is_import ())
12480 	worklist.safe_push (dep);
12481     }
12482 
12483   dump (dumper::DEPEND)
12484     && dump ("%s on %s %C:%N found",
12485 	     ek == EK_REDIRECT ? "Redirect"
12486 	     : for_binding ? "Binding" : "Dependency",
12487 	     dep->entity_kind_name (), TREE_CODE (decl), decl);
12488 
12489   return dep;
12490 }
12491 
12492 /* DEP is a newly discovered dependency.  Append it to current's
12493    depset.  */
12494 
12495 void
add_dependency(depset * dep)12496 depset::hash::add_dependency (depset *dep)
12497 {
12498   gcc_checking_assert (current && !is_key_order ());
12499   current->deps.safe_push (dep);
12500 
12501   if (dep->is_internal () && !current->is_internal ())
12502     current->set_flag_bit<DB_REFS_INTERNAL_BIT> ();
12503 
12504   if (current->get_entity_kind () == EK_USING
12505       && DECL_IMPLICIT_TYPEDEF_P (dep->get_entity ())
12506       && TREE_CODE (TREE_TYPE (dep->get_entity ())) == ENUMERAL_TYPE)
12507     {
12508       /* CURRENT is an unwrapped using-decl and DECL is an enum's
12509 	 implicit typedef.  Is CURRENT a member of the enum?  */
12510       tree c_decl = OVL_FUNCTION (current->get_entity ());
12511 
12512       if (TREE_CODE (c_decl) == CONST_DECL
12513 	  && (current->deps[0]->get_entity ()
12514 	      == CP_DECL_CONTEXT (dep->get_entity ())))
12515 	/* Make DECL depend on CURRENT.  */
12516 	dep->deps.safe_push (current);
12517     }
12518 
12519   if (dep->is_unreached ())
12520     {
12521       /* The dependency is reachable now.  */
12522       reached_unreached = true;
12523       dep->clear_flag_bit<DB_UNREACHED_BIT> ();
12524       dump (dumper::DEPEND)
12525 	&& dump ("Reaching unreached %s %C:%N", dep->entity_kind_name (),
12526 		 TREE_CODE (dep->get_entity ()), dep->get_entity ());
12527     }
12528 }
12529 
12530 depset *
add_dependency(tree decl,entity_kind ek)12531 depset::hash::add_dependency (tree decl, entity_kind ek)
12532 {
12533   depset *dep;
12534 
12535   if (is_key_order ())
12536     {
12537       dep = find_dependency (decl);
12538       if (dep)
12539 	{
12540 	  current->deps.safe_push (dep);
12541 	  dump (dumper::MERGE)
12542 	    && dump ("Key dependency on %s %C:%N found",
12543 		     dep->entity_kind_name (), TREE_CODE (decl), decl);
12544 	}
12545       else
12546 	{
12547 	  /* It's not a mergeable decl, look for it in the original
12548 	     table.  */
12549 	  dep = chain->find_dependency (decl);
12550 	  gcc_checking_assert (dep);
12551 	}
12552     }
12553   else
12554     {
12555       dep = make_dependency (decl, ek);
12556       if (dep->get_entity_kind () != EK_REDIRECT)
12557 	add_dependency (dep);
12558     }
12559 
12560   return dep;
12561 }
12562 
12563 void
add_namespace_context(depset * dep,tree ns)12564 depset::hash::add_namespace_context (depset *dep, tree ns)
12565 {
12566   depset *ns_dep = make_dependency (ns, depset::EK_NAMESPACE);
12567   dep->deps.safe_push (ns_dep);
12568 
12569   /* Mark it as special if imported so we don't walk connect when
12570      SCCing.  */
12571   if (!dep->is_binding () && ns_dep->is_import ())
12572     dep->set_special ();
12573 }
12574 
12575 struct add_binding_data
12576 {
12577   tree ns;
12578   bitmap partitions;
12579   depset *binding;
12580   depset::hash *hash;
12581   bool met_namespace;
12582 };
12583 
12584 /* Return true if we are, or contain something that is exported.  */
12585 
12586 bool
add_binding_entity(tree decl,WMB_Flags flags,void * data_)12587 depset::hash::add_binding_entity (tree decl, WMB_Flags flags, void *data_)
12588 {
12589   auto data = static_cast <add_binding_data *> (data_);
12590 
12591   if (!(TREE_CODE (decl) == NAMESPACE_DECL && !DECL_NAMESPACE_ALIAS (decl)))
12592     {
12593       tree inner = decl;
12594 
12595       if (TREE_CODE (inner) == CONST_DECL
12596 	  && TREE_CODE (DECL_CONTEXT (inner)) == ENUMERAL_TYPE)
12597 	inner = TYPE_NAME (DECL_CONTEXT (inner));
12598       else if (TREE_CODE (inner) == TEMPLATE_DECL)
12599 	inner = DECL_TEMPLATE_RESULT (inner);
12600 
12601       if (!DECL_LANG_SPECIFIC (inner) || !DECL_MODULE_PURVIEW_P (inner))
12602 	/* Ignore global module fragment entities.  */
12603 	return false;
12604 
12605       if (VAR_OR_FUNCTION_DECL_P (inner)
12606 	  && DECL_THIS_STATIC (inner))
12607 	{
12608 	  if (!header_module_p ())
12609 	    /* Ignore internal-linkage entitites.  */
12610 	    return false;
12611 	}
12612 
12613       if ((TREE_CODE (decl) == VAR_DECL
12614 	   || TREE_CODE (decl) == TYPE_DECL)
12615 	  && DECL_TINFO_P (decl))
12616 	/* Ignore TINFO things.  */
12617 	return false;
12618 
12619       if (!(flags & WMB_Using) && CP_DECL_CONTEXT (decl) != data->ns)
12620 	{
12621 	  /* A using that lost its wrapper or an unscoped enum
12622 	     constant.  */
12623 	  flags = WMB_Flags (flags | WMB_Using);
12624 	  if (DECL_MODULE_EXPORT_P (TREE_CODE (decl) == CONST_DECL
12625 				    ? TYPE_NAME (TREE_TYPE (decl))
12626 				    : STRIP_TEMPLATE (decl)))
12627 	    flags = WMB_Flags (flags | WMB_Export);
12628 	}
12629 
12630       if (!data->binding)
12631 	/* No binding to check.  */;
12632       else if (flags & WMB_Using)
12633 	{
12634 	  /* Look in the binding to see if we already have this
12635 	     using.  */
12636 	  for (unsigned ix = data->binding->deps.length (); --ix;)
12637 	    {
12638 	      depset *d = data->binding->deps[ix];
12639 	      if (d->get_entity_kind () == EK_USING
12640 		  && OVL_FUNCTION (d->get_entity ()) == decl)
12641 		{
12642 		  if (!(flags & WMB_Hidden))
12643 		    d->clear_hidden_binding ();
12644 		  if (flags & WMB_Export)
12645 		    OVL_EXPORT_P (d->get_entity ()) = true;
12646 		  return bool (flags & WMB_Export);
12647 		}
12648 	    }
12649 	}
12650       else if (flags & WMB_Dups)
12651 	{
12652 	  /* Look in the binding to see if we already have this decl.  */
12653 	  for (unsigned ix = data->binding->deps.length (); --ix;)
12654 	    {
12655 	      depset *d = data->binding->deps[ix];
12656 	      if (d->get_entity () == decl)
12657 		{
12658 		  if (!(flags & WMB_Hidden))
12659 		    d->clear_hidden_binding ();
12660 		  return false;
12661 		}
12662 	    }
12663 	}
12664 
12665       /* We're adding something.  */
12666       if (!data->binding)
12667 	{
12668 	  data->binding = make_binding (data->ns, DECL_NAME (decl));
12669 	  data->hash->add_namespace_context (data->binding, data->ns);
12670 
12671 	  depset **slot = data->hash->binding_slot (data->ns,
12672 						    DECL_NAME (decl), true);
12673 	  gcc_checking_assert (!*slot);
12674 	  *slot = data->binding;
12675 	}
12676 
12677       /* Make sure nobody left a tree visited lying about.  */
12678       gcc_checking_assert (!TREE_VISITED (decl));
12679 
12680       if (flags & WMB_Using)
12681 	{
12682 	  decl = ovl_make (decl, NULL_TREE);
12683 	  if (flags & WMB_Export)
12684 	    OVL_EXPORT_P (decl) = true;
12685 	}
12686 
12687       depset *dep = data->hash->make_dependency
12688 	(decl, flags & WMB_Using ? EK_USING : EK_FOR_BINDING);
12689       if (flags & WMB_Hidden)
12690 	dep->set_hidden_binding ();
12691       data->binding->deps.safe_push (dep);
12692       /* Binding and contents are mutually dependent.  */
12693       dep->deps.safe_push (data->binding);
12694 
12695       return (flags & WMB_Using
12696 	      ? flags & WMB_Export : DECL_MODULE_EXPORT_P (decl));
12697     }
12698   else if (DECL_NAME (decl) && !data->met_namespace)
12699     {
12700       /* Namespace, walk exactly once.  */
12701       gcc_checking_assert (TREE_PUBLIC (decl));
12702       data->met_namespace = true;
12703       if (data->hash->add_namespace_entities (decl, data->partitions))
12704 	{
12705 	  /* It contains an exported thing, so it is exported.  */
12706 	  gcc_checking_assert (DECL_MODULE_PURVIEW_P (decl));
12707 	  DECL_MODULE_EXPORT_P (decl) = true;
12708 	}
12709 
12710       if (DECL_MODULE_PURVIEW_P (decl))
12711 	{
12712 	  data->hash->make_dependency (decl, depset::EK_NAMESPACE);
12713 
12714 	  return DECL_MODULE_EXPORT_P (decl);
12715 	}
12716     }
12717 
12718   return false;
12719 }
12720 
12721 /* Recursively find all the namespace bindings of NS.  Add a depset
12722    for every binding that contains an export or module-linkage entity.
12723    Add a defining depset for every such decl that we need to write a
12724    definition.  Such defining depsets depend on the binding depset.
12725    Returns true if we contain something exported.  */
12726 
12727 bool
add_namespace_entities(tree ns,bitmap partitions)12728 depset::hash::add_namespace_entities (tree ns, bitmap partitions)
12729 {
12730   dump () && dump ("Looking for writables in %N", ns);
12731   dump.indent ();
12732 
12733   unsigned count = 0;
12734   add_binding_data data;
12735   data.ns = ns;
12736   data.partitions = partitions;
12737   data.hash = this;
12738 
12739   hash_table<named_decl_hash>::iterator end
12740     (DECL_NAMESPACE_BINDINGS (ns)->end ());
12741   for (hash_table<named_decl_hash>::iterator iter
12742 	 (DECL_NAMESPACE_BINDINGS (ns)->begin ()); iter != end; ++iter)
12743     {
12744       data.binding = nullptr;
12745       data.met_namespace = false;
12746       if (walk_module_binding (*iter, partitions, add_binding_entity, &data))
12747 	count++;
12748     }
12749 
12750   if (count)
12751     dump () && dump ("Found %u entries", count);
12752   dump.outdent ();
12753 
12754   return count != 0;
12755 }
12756 
12757 void
add_partial_entities(vec<tree,va_gc> * partial_classes)12758 depset::hash::add_partial_entities (vec<tree, va_gc> *partial_classes)
12759 {
12760   for (unsigned ix = 0; ix != partial_classes->length (); ix++)
12761     {
12762       tree inner = (*partial_classes)[ix];
12763 
12764       depset *dep = make_dependency (inner, depset::EK_DECL);
12765 
12766       if (dep->get_entity_kind () == depset::EK_REDIRECT)
12767 	/* We should have recorded the template as a partial
12768 	   specialization.  */
12769 	gcc_checking_assert (dep->deps[0]->get_entity_kind ()
12770 			     == depset::EK_PARTIAL);
12771       else
12772 	/* It was an explicit specialization, not a partial one.  */
12773 	gcc_checking_assert (dep->get_entity_kind ()
12774 			     == depset::EK_SPECIALIZATION);
12775     }
12776 }
12777 
12778 /* Add the members of imported classes that we defined in this TU.
12779    This will also include lazily created implicit member function
12780    declarations.  (All others will be definitions.)  */
12781 
12782 void
add_class_entities(vec<tree,va_gc> * class_members)12783 depset::hash::add_class_entities (vec<tree, va_gc> *class_members)
12784 {
12785   for (unsigned ix = 0; ix != class_members->length (); ix++)
12786     {
12787       tree defn = (*class_members)[ix];
12788       depset *dep = make_dependency (defn, EK_INNER_DECL);
12789 
12790       if (dep->get_entity_kind () == EK_REDIRECT)
12791 	dep = dep->deps[0];
12792 
12793       /* Only non-instantiations need marking as members.  */
12794       if (dep->get_entity_kind () == EK_DECL)
12795 	dep->set_flag_bit <DB_IS_MEMBER_BIT> ();
12796     }
12797 }
12798 
12799 /* We add the partial & explicit specializations, and the explicit
12800    instantiations.  */
12801 
12802 static void
specialization_add(bool decl_p,spec_entry * entry,void * data_)12803 specialization_add (bool decl_p, spec_entry *entry, void *data_)
12804 {
12805   vec<spec_entry *> *data = reinterpret_cast <vec<spec_entry *> *> (data_);
12806 
12807   if (!decl_p)
12808     {
12809       /* We exclusively use decls to locate things.  Make sure there's
12810 	 no mismatch between the two specialization tables we keep.
12811 	 pt.c optimizes instantiation lookup using a complicated
12812 	 heuristic.  We don't attempt to replicate that algorithm, but
12813 	 observe its behaviour and reproduce it upon read back.  */
12814 
12815        gcc_checking_assert (DECL_ALIAS_TEMPLATE_P (entry->tmpl)
12816 			   || TREE_CODE (entry->spec) == ENUMERAL_TYPE
12817 			   || DECL_CLASS_TEMPLATE_P (entry->tmpl));
12818 
12819        /* Only alias templates can appear in both tables (and
12820 	  if they're in the type table they must also be in the decl
12821 	  table).  */
12822        gcc_checking_assert
12823 	 (!match_mergeable_specialization (true, entry)
12824 	  == !DECL_ALIAS_TEMPLATE_P (entry->tmpl));
12825     }
12826   else if (VAR_OR_FUNCTION_DECL_P (entry->spec))
12827     gcc_checking_assert (!DECL_LOCAL_DECL_P (entry->spec));
12828 
12829   data->safe_push (entry);
12830 }
12831 
12832 /* Arbitrary stable comparison.  */
12833 
12834 static int
specialization_cmp(const void * a_,const void * b_)12835 specialization_cmp (const void *a_, const void *b_)
12836 {
12837   const spec_entry *ea = *reinterpret_cast<const spec_entry *const *> (a_);
12838   const spec_entry *eb = *reinterpret_cast<const spec_entry *const *> (b_);
12839 
12840   if (ea == eb)
12841     return 0;
12842 
12843   tree a = ea->spec;
12844   tree b = eb->spec;
12845   if (TYPE_P (a))
12846     {
12847       a = TYPE_NAME (a);
12848       b = TYPE_NAME (b);
12849     }
12850 
12851   if (a == b)
12852     /* This can happen with friend specializations.  Just order by
12853        entry address.  See note in depset_cmp.  */
12854     return ea < eb ? -1 : +1;
12855 
12856   return DECL_UID (a) < DECL_UID (b) ? -1 : +1;
12857 }
12858 
12859 /* We add all kinds of specialializations.  Implicit specializations
12860    should only streamed and walked if they are reachable from
12861    elsewhere.  Hence the UNREACHED flag.  This is making the
12862    assumption that it is cheaper to reinstantiate them on demand
12863    elsewhere, rather than stream them in when we instantiate their
12864    general template.  Also, if we do stream them, we can only do that
12865    if they are not internal (which they can become if they themselves
12866    touch an internal entity?).  */
12867 
12868 void
add_specializations(bool decl_p)12869 depset::hash::add_specializations (bool decl_p)
12870 {
12871   vec<spec_entry *> data;
12872   data.create (100);
12873   walk_specializations (decl_p, specialization_add, &data);
12874   data.qsort (specialization_cmp);
12875   while (data.length ())
12876     {
12877       spec_entry *entry = data.pop ();
12878       tree spec = entry->spec;
12879       int use_tpl = 0;
12880       bool is_alias = false;
12881       bool is_friend = false;
12882 
12883       if (decl_p && DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (entry->tmpl))
12884 	/* A friend of a template.  This is keyed to the
12885 	   instantiation.  */
12886 	is_friend = true;
12887 
12888       if (!decl_p && DECL_ALIAS_TEMPLATE_P (entry->tmpl))
12889 	{
12890 	  spec = TYPE_NAME (spec);
12891 	  is_alias = true;
12892 	}
12893 
12894       if (decl_p || is_alias)
12895 	{
12896 	  if (tree ti = DECL_TEMPLATE_INFO (spec))
12897 	    {
12898 	      tree tmpl = TI_TEMPLATE (ti);
12899 
12900 	      use_tpl = DECL_USE_TEMPLATE (spec);
12901 	      if (spec == DECL_TEMPLATE_RESULT (tmpl))
12902 		{
12903 		  spec = tmpl;
12904 		  gcc_checking_assert (DECL_USE_TEMPLATE (spec) == use_tpl);
12905 		}
12906 	      else if (is_friend)
12907 		{
12908 		  if (TI_TEMPLATE (ti) != entry->tmpl
12909 		      || !template_args_equal (TI_ARGS (ti), entry->tmpl))
12910 		    goto template_friend;
12911 		}
12912 	    }
12913 	  else
12914 	    {
12915 	    template_friend:;
12916 	      gcc_checking_assert (is_friend);
12917 	      /* This is a friend of a template class, but not the one
12918 		 that generated entry->spec itself (i.e. it's an
12919 		 equivalent clone).  We do not need to record
12920 		 this.  */
12921 	      continue;
12922 	    }
12923 	}
12924       else
12925 	{
12926 	  if (TREE_CODE (spec) == ENUMERAL_TYPE)
12927 	    {
12928 	      tree ctx = DECL_CONTEXT (TYPE_NAME (spec));
12929 
12930 	      if (TYPE_P (ctx))
12931 		use_tpl = CLASSTYPE_USE_TEMPLATE (ctx);
12932 	      else
12933 		use_tpl = DECL_USE_TEMPLATE (ctx);
12934 	    }
12935 	  else
12936 	    use_tpl = CLASSTYPE_USE_TEMPLATE (spec);
12937 
12938 	  tree ti = TYPE_TEMPLATE_INFO (spec);
12939 	  tree tmpl = TI_TEMPLATE (ti);
12940 
12941 	  spec = TYPE_NAME (spec);
12942 	  if (spec == DECL_TEMPLATE_RESULT (tmpl))
12943 	    {
12944 	      spec = tmpl;
12945 	      use_tpl = DECL_USE_TEMPLATE (spec);
12946 	    }
12947 	}
12948 
12949       bool needs_reaching = false;
12950       if (use_tpl == 1)
12951 	/* Implicit instantiations only walked if we reach them.  */
12952 	needs_reaching = true;
12953       else if (!DECL_LANG_SPECIFIC (spec)
12954 	       || !DECL_MODULE_PURVIEW_P (spec))
12955 	/* Likewise, GMF explicit or partial specializations.  */
12956 	needs_reaching = true;
12957 
12958 #if false && CHECKING_P
12959       /* The instantiation isn't always on
12960 	 DECL_TEMPLATE_INSTANTIATIONS, */
12961       // FIXME: we probably need to remember this information?
12962       /* Verify the specialization is on the
12963 	 DECL_TEMPLATE_INSTANTIATIONS of the template.  */
12964       for (tree cons = DECL_TEMPLATE_INSTANTIATIONS (entry->tmpl);
12965 	   cons; cons = TREE_CHAIN (cons))
12966 	if (TREE_VALUE (cons) == entry->spec)
12967 	  {
12968 	    gcc_assert (entry->args == TREE_PURPOSE (cons));
12969 	    goto have_spec;
12970 	  }
12971       gcc_unreachable ();
12972     have_spec:;
12973 #endif
12974 
12975       /* Make sure nobody left a tree visited lying about.  */
12976       gcc_checking_assert (!TREE_VISITED (spec));
12977       depset *dep = make_dependency (spec, depset::EK_SPECIALIZATION);
12978       if (dep->is_special ())
12979 	{
12980 	  /* An already located specialization, this must be the TYPE
12981 	     corresponding to an alias_decl we found in the decl
12982 	     table.  */
12983 	  spec_entry *other = reinterpret_cast <spec_entry *> (dep->deps[0]);
12984 	  gcc_checking_assert (!decl_p && is_alias && !dep->is_type_spec ());
12985 	  gcc_checking_assert (other->tmpl == entry->tmpl
12986 			       && template_args_equal (other->args, entry->args)
12987 			       && TREE_TYPE (other->spec) == entry->spec);
12988 	  dep->set_flag_bit<DB_ALIAS_SPEC_BIT> ();
12989 	}
12990       else
12991 	{
12992 	  gcc_checking_assert (decl_p || !is_alias);
12993 	  if (dep->get_entity_kind () == depset::EK_REDIRECT)
12994 	    dep = dep->deps[0];
12995 	  else if (dep->get_entity_kind () == depset::EK_SPECIALIZATION)
12996 	    {
12997 	      dep->set_special ();
12998 	      dep->deps.safe_push (reinterpret_cast<depset *> (entry));
12999 	      if (!decl_p)
13000 		dep->set_flag_bit<DB_TYPE_SPEC_BIT> ();
13001 	    }
13002 
13003 	  if (needs_reaching)
13004 	    dep->set_flag_bit<DB_UNREACHED_BIT> ();
13005 	  if (is_friend)
13006 	    dep->set_flag_bit<DB_FRIEND_SPEC_BIT> ();
13007 	}
13008     }
13009   data.release ();
13010 }
13011 
13012 /* Add a depset into the mergeable hash.  */
13013 
13014 void
add_mergeable(depset * mergeable)13015 depset::hash::add_mergeable (depset *mergeable)
13016 {
13017   gcc_checking_assert (is_key_order ());
13018   entity_kind ek = mergeable->get_entity_kind ();
13019   tree decl = mergeable->get_entity ();
13020   gcc_checking_assert (ek < EK_DIRECT_HWM);
13021 
13022   depset **slot = entity_slot (decl, true);
13023   gcc_checking_assert (!*slot);
13024   depset *dep = make_entity (decl, ek);
13025   *slot = dep;
13026 
13027   worklist.safe_push (dep);
13028 
13029   /* So we can locate the mergeable depset this depset refers to,
13030      mark the first dep.  */
13031   dep->set_special ();
13032   dep->deps.safe_push (mergeable);
13033 }
13034 
13035 /* Find the innermost-namespace scope of DECL, and that
13036    namespace-scope decl.  */
13037 
13038 tree
find_pending_key(tree decl,tree * decl_p=nullptr)13039 find_pending_key (tree decl, tree *decl_p = nullptr)
13040 {
13041   tree ns = decl;
13042   do
13043     {
13044       decl = ns;
13045       ns = CP_DECL_CONTEXT (ns);
13046       if (TYPE_P (ns))
13047 	ns = TYPE_NAME (ns);
13048     }
13049   while (TREE_CODE (ns) != NAMESPACE_DECL);
13050 
13051   if (decl_p)
13052     *decl_p = decl;
13053 
13054   return ns;
13055 }
13056 
13057 /* Iteratively find dependencies.  During the walk we may find more
13058    entries on the same binding that need walking.  */
13059 
13060 void
find_dependencies(module_state * module)13061 depset::hash::find_dependencies (module_state *module)
13062 {
13063   trees_out walker (NULL, module, *this);
13064   vec<depset *> unreached;
13065   unreached.create (worklist.length ());
13066 
13067   for (;;)
13068     {
13069       reached_unreached = false;
13070       while (worklist.length ())
13071 	{
13072 	  depset *item = worklist.pop ();
13073 
13074 	  gcc_checking_assert (!item->is_binding ());
13075 	  if (item->is_unreached ())
13076 	    unreached.quick_push (item);
13077 	  else
13078 	    {
13079 	      current = item;
13080 	      tree decl = current->get_entity ();
13081 	      dump (is_key_order () ? dumper::MERGE : dumper::DEPEND)
13082 		&& dump ("Dependencies of %s %C:%N",
13083 			 is_key_order () ? "key-order"
13084 			 : current->entity_kind_name (), TREE_CODE (decl), decl);
13085 	      dump.indent ();
13086 	      walker.begin ();
13087 	      if (current->get_entity_kind () == EK_USING)
13088 		walker.tree_node (OVL_FUNCTION (decl));
13089 	      else if (TREE_VISITED (decl))
13090 		/* A global tree.  */;
13091 	      else if (item->get_entity_kind () == EK_NAMESPACE)
13092 		add_namespace_context (current, CP_DECL_CONTEXT (decl));
13093 	      else
13094 		{
13095 		  walker.mark_declaration (decl, current->has_defn ());
13096 
13097 		  if (!walker.is_key_order ()
13098 		      && (item->get_entity_kind () == EK_SPECIALIZATION
13099 			  || item->get_entity_kind () == EK_PARTIAL
13100 			  || (item->get_entity_kind () == EK_DECL
13101 			      && item->is_member ())))
13102 		    {
13103 		      tree ns = find_pending_key (decl, nullptr);
13104 		      add_namespace_context (item, ns);
13105 		    }
13106 
13107 		  // FIXME: Perhaps p1815 makes this redundant? Or at
13108 		  // least simplifies it.  Voldemort types are only
13109 		  // ever emissable when containing (inline) function
13110 		  // definition is emitted?
13111 		  /* Turn the Sneakoscope on when depending the decl.  */
13112 		  sneakoscope = true;
13113 		  walker.decl_value (decl, current);
13114 		  sneakoscope = false;
13115 		  if (current->has_defn ())
13116 		    walker.write_definition (decl);
13117 		}
13118 	      walker.end ();
13119 
13120 	      if (!walker.is_key_order ()
13121 		  && TREE_CODE (decl) == TEMPLATE_DECL
13122 		  && !DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (decl))
13123 		/* Mark all the explicit & partial specializations as
13124 		   reachable.  */
13125 		for (tree cons = DECL_TEMPLATE_INSTANTIATIONS (decl);
13126 		     cons; cons = TREE_CHAIN (cons))
13127 		  {
13128 		    tree spec = TREE_VALUE (cons);
13129 		    if (TYPE_P (spec))
13130 		      spec = TYPE_NAME (spec);
13131 		    int use_tpl;
13132 		    node_template_info (spec, use_tpl);
13133 		    if (use_tpl & 2)
13134 		      {
13135 			depset *spec_dep = find_dependency (spec);
13136 			if (spec_dep->get_entity_kind () == EK_REDIRECT)
13137 			  spec_dep = spec_dep->deps[0];
13138 			if (spec_dep->is_unreached ())
13139 			  {
13140 			    reached_unreached = true;
13141 			    spec_dep->clear_flag_bit<DB_UNREACHED_BIT> ();
13142 			    dump (dumper::DEPEND)
13143 			      && dump ("Reaching unreached specialization"
13144 				       " %C:%N", TREE_CODE (spec), spec);
13145 			  }
13146 		      }
13147 		  }
13148 
13149 	      dump.outdent ();
13150 	      current = NULL;
13151 	    }
13152 	}
13153 
13154       if (!reached_unreached)
13155 	break;
13156 
13157       /* It's possible the we reached the unreached before we
13158 	 processed it in the above loop, so we'll be doing this an
13159 	 extra time.  However, to avoid that we have to do some
13160 	 bit shuffling that also involves a scan of the list.
13161 	 Swings & roundabouts I guess.  */
13162       std::swap (worklist, unreached);
13163     }
13164 
13165   unreached.release ();
13166 }
13167 
13168 /* Compare two entries of a single binding.  TYPE_DECL before
13169    non-exported before exported.  */
13170 
13171 static int
binding_cmp(const void * a_,const void * b_)13172 binding_cmp (const void *a_, const void *b_)
13173 {
13174   depset *a = *(depset *const *)a_;
13175   depset *b = *(depset *const *)b_;
13176 
13177   tree a_ent = a->get_entity ();
13178   tree b_ent = b->get_entity ();
13179   gcc_checking_assert (a_ent != b_ent
13180 		       && !a->is_binding ()
13181 		       && !b->is_binding ());
13182 
13183   /* Implicit typedefs come first.  */
13184   bool a_implicit = DECL_IMPLICIT_TYPEDEF_P (a_ent);
13185   bool b_implicit = DECL_IMPLICIT_TYPEDEF_P (b_ent);
13186   if (a_implicit || b_implicit)
13187     {
13188       /* A binding with two implicit type decls?  That's unpossible!  */
13189       gcc_checking_assert (!(a_implicit && b_implicit));
13190       return a_implicit ? -1 : +1;  /* Implicit first.  */
13191     }
13192 
13193   /* Hidden before non-hidden.  */
13194   bool a_hidden = a->is_hidden ();
13195   bool b_hidden = b->is_hidden ();
13196   if (a_hidden != b_hidden)
13197     return a_hidden ? -1 : +1;
13198 
13199   bool a_using = a->get_entity_kind () == depset::EK_USING;
13200   bool a_export;
13201   if (a_using)
13202     {
13203       a_export = OVL_EXPORT_P (a_ent);
13204       a_ent = OVL_FUNCTION (a_ent);
13205     }
13206   else
13207     a_export = DECL_MODULE_EXPORT_P (TREE_CODE (a_ent) == CONST_DECL
13208 				     ? TYPE_NAME (TREE_TYPE (a_ent))
13209 				     : STRIP_TEMPLATE (a_ent));
13210 
13211   bool b_using = b->get_entity_kind () == depset::EK_USING;
13212   bool b_export;
13213   if (b_using)
13214     {
13215       b_export = OVL_EXPORT_P (b_ent);
13216       b_ent = OVL_FUNCTION (b_ent);
13217     }
13218   else
13219     b_export = DECL_MODULE_EXPORT_P (TREE_CODE (b_ent) == CONST_DECL
13220 				     ? TYPE_NAME (TREE_TYPE (b_ent))
13221 				     : STRIP_TEMPLATE (b_ent));
13222 
13223   /* Non-exports before exports.  */
13224   if (a_export != b_export)
13225     return a_export ? +1 : -1;
13226 
13227   /* At this point we don't care, but want a stable sort.  */
13228 
13229   if (a_using != b_using)
13230     /* using first.  */
13231     return a_using? -1 : +1;
13232 
13233   return DECL_UID (a_ent) < DECL_UID (b_ent) ? -1 : +1;
13234 }
13235 
13236 /* Sort the bindings, issue errors about bad internal refs.  */
13237 
13238 bool
finalize_dependencies()13239 depset::hash::finalize_dependencies ()
13240 {
13241   bool ok = true;
13242   depset::hash::iterator end (this->end ());
13243   for (depset::hash::iterator iter (begin ()); iter != end; ++iter)
13244     {
13245       depset *dep = *iter;
13246       if (dep->is_binding ())
13247 	{
13248 	  /* Keep the containing namespace dep first.  */
13249 	  gcc_checking_assert (dep->deps.length () > 1
13250 			       && (dep->deps[0]->get_entity_kind ()
13251 				   == EK_NAMESPACE)
13252 			       && (dep->deps[0]->get_entity ()
13253 				   == dep->get_entity ()));
13254 	  if (dep->deps.length () > 2)
13255 	    gcc_qsort (&dep->deps[1], dep->deps.length () - 1,
13256 		       sizeof (dep->deps[1]), binding_cmp);
13257 	}
13258       else if (dep->refs_internal ())
13259 	{
13260 	  for (unsigned ix = dep->deps.length (); ix--;)
13261 	    {
13262 	      depset *rdep = dep->deps[ix];
13263 	      if (rdep->is_internal ())
13264 		{
13265 		  // FIXME:QOI Better location information?  We're
13266 		  // losing, so it doesn't matter about efficiency
13267 		  tree decl = dep->get_entity ();
13268 		  error_at (DECL_SOURCE_LOCATION (decl),
13269 			    "%q#D references internal linkage entity %q#D",
13270 			    decl, rdep->get_entity ());
13271 		  break;
13272 		}
13273 	    }
13274 	  ok = false;
13275 	}
13276     }
13277 
13278   return ok;
13279 }
13280 
13281 /* Core of TARJAN's algorithm to find Strongly Connected Components
13282    within a graph.  See https://en.wikipedia.org/wiki/
13283    Tarjan%27s_strongly_connected_components_algorithm for details.
13284 
13285    We use depset::section as lowlink.  Completed nodes have
13286    depset::cluster containing the cluster number, with the top
13287    bit set.
13288 
13289    A useful property is that the output vector is a reverse
13290    topological sort of the resulting DAG.  In our case that means
13291    dependent SCCs are found before their dependers.  We make use of
13292    that property.  */
13293 
13294 void
connect(depset * v)13295 depset::tarjan::connect (depset *v)
13296 {
13297   gcc_checking_assert (v->is_binding ()
13298 		       || !(v->is_unreached () || v->is_import ()));
13299 
13300   v->cluster = v->section = ++index;
13301   stack.safe_push (v);
13302 
13303   /* Walk all our dependencies, ignore a first marked slot  */
13304   for (unsigned ix = v->is_special (); ix != v->deps.length (); ix++)
13305     {
13306       depset *dep = v->deps[ix];
13307 
13308       if (dep->is_binding () || !dep->is_import ())
13309 	{
13310 	  unsigned lwm = dep->cluster;
13311 
13312 	  if (!dep->cluster)
13313 	    {
13314 	      /* A new node.  Connect it.  */
13315 	      connect (dep);
13316 	      lwm = dep->section;
13317 	    }
13318 
13319 	  if (dep->section && v->section > lwm)
13320 	    v->section = lwm;
13321 	}
13322     }
13323 
13324   if (v->section == v->cluster)
13325     {
13326       /* Root of a new SCC.  Push all the members onto the result list. */
13327       unsigned num = v->cluster;
13328       depset *p;
13329       do
13330 	{
13331 	  p = stack.pop ();
13332 	  p->cluster = num;
13333 	  p->section = 0;
13334 	  result.quick_push (p);
13335 	}
13336       while (p != v);
13337     }
13338 }
13339 
13340 /* Compare two depsets.  The specific ordering is unimportant, we're
13341    just trying to get consistency.  */
13342 
13343 static int
depset_cmp(const void * a_,const void * b_)13344 depset_cmp (const void *a_, const void *b_)
13345 {
13346   depset *a = *(depset *const *)a_;
13347   depset *b = *(depset *const *)b_;
13348 
13349   depset::entity_kind a_kind = a->get_entity_kind ();
13350   depset::entity_kind b_kind = b->get_entity_kind ();
13351 
13352   if  (a_kind != b_kind)
13353     /* Different entity kinds, order by that.  */
13354     return a_kind < b_kind ? -1 : +1;
13355 
13356   tree a_decl = a->get_entity ();
13357   tree b_decl = b->get_entity ();
13358   if (a_kind == depset::EK_USING)
13359     {
13360       /* If one is a using, the other must be too.  */
13361       a_decl = OVL_FUNCTION (a_decl);
13362       b_decl = OVL_FUNCTION (b_decl);
13363     }
13364 
13365   if (a_decl != b_decl)
13366     /* Different entities, order by their UID.  */
13367     return DECL_UID (a_decl) < DECL_UID (b_decl) ? -1 : +1;
13368 
13369   if (a_kind == depset::EK_BINDING)
13370     {
13371       /* Both are bindings.  Order by identifier hash.  */
13372       gcc_checking_assert (a->get_name () != b->get_name ());
13373       return (IDENTIFIER_HASH_VALUE (a->get_name ())
13374 	      < IDENTIFIER_HASH_VALUE (b->get_name ())
13375 	      ? -1 : +1);
13376     }
13377 
13378   /* They are the same decl.  This can happen with two using decls
13379      pointing to the same target.  The best we can aim for is
13380      consistently telling qsort how to order them.  Hopefully we'll
13381      never have to debug a case that depends on this.  Oh, who am I
13382      kidding?  Good luck.  */
13383   gcc_checking_assert (a_kind == depset::EK_USING);
13384 
13385   /* Order by depset address.  Not the best, but it is something.  */
13386   return a < b ? -1 : +1;
13387 }
13388 
13389 /* Sort the clusters in SCC such that those that depend on one another
13390    are placed later.   */
13391 
13392 // FIXME: I am not convinced this is needed and, if needed,
13393 // sufficient.  We emit the decls in this order but that emission
13394 // could walk into later decls (from the body of the decl, or default
13395 // arg-like things).  Why doesn't that walk do the right thing?  And
13396 // if it DTRT why do we need to sort here -- won't things naturally
13397 // work?  I think part of the issue is that when we're going to refer
13398 // to an entity by name, and that entity is in the same cluster as us,
13399 // we need to actually walk that entity, if we've not already walked
13400 // it.
13401 static void
sort_cluster(depset::hash * original,depset * scc[],unsigned size)13402 sort_cluster (depset::hash *original, depset *scc[], unsigned size)
13403 {
13404   depset::hash table (size, original);
13405 
13406   dump.indent ();
13407 
13408   /* Place bindings last, usings before that.  It's not strictly
13409      necessary, but it does make things neater.  Says Mr OCD.  */
13410   unsigned bind_lwm = size;
13411   unsigned use_lwm = size;
13412   for (unsigned ix = 0; ix != use_lwm;)
13413     {
13414       depset *dep = scc[ix];
13415       switch (dep->get_entity_kind ())
13416 	{
13417 	case depset::EK_BINDING:
13418 	  /* Move to end.  No increment.  Notice this could be moving
13419 	     a using decl, which we'll then move again.  */
13420 	  if (--bind_lwm != ix)
13421 	    {
13422 	      scc[ix] = scc[bind_lwm];
13423 	      scc[bind_lwm] = dep;
13424 	    }
13425 	  if (use_lwm > bind_lwm)
13426 	    {
13427 	      use_lwm--;
13428 	      break;
13429 	    }
13430 	  /* We must have copied a using, so move it too.  */
13431 	  dep = scc[ix];
13432 	  gcc_checking_assert (dep->get_entity_kind () == depset::EK_USING);
13433 	  /* FALLTHROUGH  */
13434 
13435 	case depset::EK_USING:
13436 	  if (--use_lwm != ix)
13437 	    {
13438 	      scc[ix] = scc[use_lwm];
13439 	      scc[use_lwm] = dep;
13440 	    }
13441 	  break;
13442 
13443 	case depset::EK_DECL:
13444 	case depset::EK_SPECIALIZATION:
13445 	case depset::EK_PARTIAL:
13446 	  table.add_mergeable (dep);
13447 	  ix++;
13448 	  break;
13449 
13450 	default:
13451 	  gcc_unreachable ();
13452 	}
13453     }
13454 
13455   gcc_checking_assert (use_lwm <= bind_lwm);
13456   dump (dumper::MERGE) && dump ("Ordering %u/%u depsets", use_lwm, size);
13457 
13458   table.find_dependencies (nullptr);
13459 
13460   vec<depset *> order = table.connect ();
13461   gcc_checking_assert (order.length () == use_lwm);
13462 
13463   /* Now rewrite entries [0,lwm), in the dependency order we
13464      discovered.  Usually each entity is in its own cluster.  Rarely,
13465      we can get multi-entity clusters, in which case all but one must
13466      only be reached from within the cluster.  This happens for
13467      something like:
13468 
13469      template<typename T>
13470      auto Foo (const T &arg) -> TPL<decltype (arg)>;
13471 
13472      The instantiation of TPL will be in the specialization table, and
13473      refer to Foo via arg.  But we can only get to that specialization
13474      from Foo's declaration, so we only need to treat Foo as mergable
13475      (We'll do structural comparison of TPL<decltype (arg)>).
13476 
13477      Finding the single cluster entry dep is very tricky and
13478      expensive.  Let's just not do that.  It's harmless in this case
13479      anyway. */
13480   unsigned pos = 0;
13481   unsigned cluster = ~0u;
13482   for (unsigned ix = 0; ix != order.length (); ix++)
13483     {
13484       gcc_checking_assert (order[ix]->is_special ());
13485       depset *dep = order[ix]->deps[0];
13486       scc[pos++] = dep;
13487       dump (dumper::MERGE)
13488 	&& dump ("Mergeable %u is %N%s", ix, dep->get_entity (),
13489 		 order[ix]->cluster == cluster ? " (tight)" : "");
13490       cluster = order[ix]->cluster;
13491     }
13492 
13493   gcc_checking_assert (pos == use_lwm);
13494 
13495   order.release ();
13496   dump (dumper::MERGE) && dump ("Ordered %u keys", pos);
13497   dump.outdent ();
13498 }
13499 
13500 /* Reduce graph to SCCS clusters.  SCCS will be populated with the
13501    depsets in dependency order.  Each depset's CLUSTER field contains
13502    its cluster number.  Each SCC has a unique cluster number, and are
13503    contiguous in SCCS. Cluster numbers are otherwise arbitrary.  */
13504 
13505 vec<depset *>
connect()13506 depset::hash::connect ()
13507 {
13508   tarjan connector (size ());
13509   vec<depset *> deps;
13510   deps.create (size ());
13511   iterator end (this->end ());
13512   for (iterator iter (begin ()); iter != end; ++iter)
13513     {
13514       depset *item = *iter;
13515 
13516       entity_kind kind = item->get_entity_kind ();
13517       if (kind == EK_BINDING
13518 	  || !(kind == EK_REDIRECT
13519 	       || item->is_unreached ()
13520 	       || item->is_import ()))
13521 	deps.quick_push (item);
13522     }
13523 
13524   /* Iteration over the hash table is an unspecified ordering.  While
13525      that has advantages, it causes 2 problems.  Firstly repeatable
13526      builds are tricky.  Secondly creating testcases that check
13527      dependencies are correct by making sure a bad ordering would
13528      happen if that was wrong.  */
13529   deps.qsort (depset_cmp);
13530 
13531   while (deps.length ())
13532     {
13533       depset *v = deps.pop ();
13534       dump (dumper::CLUSTER) &&
13535 	(v->is_binding ()
13536 	 ? dump ("Connecting binding %P", v->get_entity (), v->get_name ())
13537 	 : dump ("Connecting %s %s %C:%N",
13538 		 is_key_order () ? "key-order"
13539 		 : !v->has_defn () ? "declaration" : "definition",
13540 		 v->entity_kind_name (), TREE_CODE (v->get_entity ()),
13541 		 v->get_entity ()));
13542       if (!v->cluster)
13543 	connector.connect (v);
13544     }
13545 
13546   deps.release ();
13547   return connector.result;
13548 }
13549 
13550 /* Initialize location spans.  */
13551 
13552 void
init(const line_maps * lmaps,const line_map_ordinary * map)13553 loc_spans::init (const line_maps *lmaps, const line_map_ordinary *map)
13554 {
13555   gcc_checking_assert (!init_p ());
13556   spans = new vec<span> ();
13557   spans->reserve (20);
13558 
13559   span interval;
13560   interval.ordinary.first = 0;
13561   interval.macro.second = MAX_LOCATION_T + 1;
13562   interval.ordinary_delta = interval.macro_delta = 0;
13563 
13564   /* A span for reserved fixed locs.  */
13565   interval.ordinary.second
13566     = MAP_START_LOCATION (LINEMAPS_ORDINARY_MAP_AT (line_table, 0));
13567   interval.macro.first = interval.macro.second;
13568   dump (dumper::LOCATION)
13569     && dump ("Fixed span %u ordinary:[%u,%u) macro:[%u,%u)", spans->length (),
13570 	     interval.ordinary.first, interval.ordinary.second,
13571 	     interval.macro.first, interval.macro.second);
13572   spans->quick_push (interval);
13573 
13574   /* A span for command line & forced headers.  */
13575   interval.ordinary.first = interval.ordinary.second;
13576   interval.macro.second = interval.macro.first;
13577   if (map)
13578     {
13579       interval.ordinary.second = map->start_location;
13580       interval.macro.first = LINEMAPS_MACRO_LOWEST_LOCATION (lmaps);
13581     }
13582   dump (dumper::LOCATION)
13583     && dump ("Pre span %u ordinary:[%u,%u) macro:[%u,%u)", spans->length (),
13584 	     interval.ordinary.first, interval.ordinary.second,
13585 	     interval.macro.first, interval.macro.second);
13586   spans->quick_push (interval);
13587 
13588   /* Start an interval for the main file.  */
13589   interval.ordinary.first = interval.ordinary.second;
13590   interval.macro.second = interval.macro.first;
13591   dump (dumper::LOCATION)
13592     && dump ("Main span %u ordinary:[%u,*) macro:[*,%u)", spans->length (),
13593 	     interval.ordinary.first, interval.macro.second);
13594   spans->quick_push (interval);
13595 }
13596 
13597 /* Reopen the span, if we want the about-to-be-inserted set of maps to
13598    be propagated in our own location table.  I.e. we are the primary
13599    interface and we're importing a partition.  */
13600 
13601 bool
maybe_propagate(module_state * import,location_t hwm)13602 loc_spans::maybe_propagate (module_state *import, location_t hwm)
13603 {
13604   bool opened = (module_interface_p () && !module_partition_p ()
13605 		 && import->is_partition ());
13606   if (opened)
13607     open (hwm);
13608   return opened;
13609 }
13610 
13611 /* Open a new linemap interval.  The just-created ordinary map is the
13612    first map of the interval.  */
13613 
13614 void
open(location_t hwm)13615 loc_spans::open (location_t hwm)
13616 {
13617   span interval;
13618   interval.ordinary.first = interval.ordinary.second = hwm;
13619   interval.macro.first = interval.macro.second
13620     = LINEMAPS_MACRO_LOWEST_LOCATION (line_table);
13621   interval.ordinary_delta = interval.macro_delta = 0;
13622   dump (dumper::LOCATION)
13623     && dump ("Opening span %u ordinary:[%u,... macro:...,%u)",
13624 	     spans->length (), interval.ordinary.first,
13625 	     interval.macro.second);
13626   if (spans->length ())
13627     {
13628       /* No overlapping!  */
13629       auto &last = spans->last ();
13630       gcc_checking_assert (interval.ordinary.first >= last.ordinary.second);
13631       gcc_checking_assert (interval.macro.second <= last.macro.first);
13632     }
13633   spans->safe_push (interval);
13634 }
13635 
13636 /* Close out the current linemap interval.  The last maps are within
13637    the interval.  */
13638 
13639 void
close()13640 loc_spans::close ()
13641 {
13642   span &interval = spans->last ();
13643 
13644   interval.ordinary.second
13645     = ((line_table->highest_location + (1 << line_table->default_range_bits))
13646        & ~((1u << line_table->default_range_bits) - 1));
13647   interval.macro.first = LINEMAPS_MACRO_LOWEST_LOCATION (line_table);
13648   dump (dumper::LOCATION)
13649     && dump ("Closing span %u ordinary:[%u,%u) macro:[%u,%u)",
13650 	     spans->length () - 1,
13651 	     interval.ordinary.first,interval.ordinary.second,
13652 	     interval.macro.first, interval.macro.second);
13653 }
13654 
13655 /* Given an ordinary location LOC, return the lmap_interval it resides
13656    in.  NULL if it is not in an interval.  */
13657 
13658 const loc_spans::span *
ordinary(location_t loc)13659 loc_spans::ordinary (location_t loc)
13660 {
13661   unsigned len = spans->length ();
13662   unsigned pos = 0;
13663   while (len)
13664     {
13665       unsigned half = len / 2;
13666       const span &probe = (*spans)[pos + half];
13667       if (loc < probe.ordinary.first)
13668 	len = half;
13669       else if (loc < probe.ordinary.second)
13670 	return &probe;
13671       else
13672 	{
13673 	  pos += half + 1;
13674 	  len = len - (half + 1);
13675 	}
13676     }
13677   return NULL;
13678 }
13679 
13680 /* Likewise, given a macro location LOC, return the lmap interval it
13681    resides in.   */
13682 
13683 const loc_spans::span *
macro(location_t loc)13684 loc_spans::macro (location_t loc)
13685 {
13686   unsigned len = spans->length ();
13687   unsigned pos = 0;
13688   while (len)
13689     {
13690       unsigned half = len / 2;
13691       const span &probe = (*spans)[pos + half];
13692       if (loc >= probe.macro.second)
13693 	len = half;
13694       else if (loc >= probe.macro.first)
13695 	return &probe;
13696       else
13697 	{
13698 	  pos += half + 1;
13699 	  len = len - (half + 1);
13700 	}
13701     }
13702   return NULL;
13703 }
13704 
13705 /* Return the ordinary location closest to FROM.  */
13706 
13707 static location_t
ordinary_loc_of(line_maps * lmaps,location_t from)13708 ordinary_loc_of (line_maps *lmaps, location_t from)
13709 {
13710   while (!IS_ORDINARY_LOC (from))
13711     {
13712       if (IS_ADHOC_LOC (from))
13713 	from = get_location_from_adhoc_loc (lmaps, from);
13714       if (from >= LINEMAPS_MACRO_LOWEST_LOCATION (lmaps))
13715 	{
13716 	  /* Find the ordinary location nearest FROM.  */
13717 	  const line_map *map = linemap_lookup (lmaps, from);
13718 	  const line_map_macro *mac_map = linemap_check_macro (map);
13719 	  from = MACRO_MAP_EXPANSION_POINT_LOCATION (mac_map);
13720 	}
13721     }
13722   return from;
13723 }
13724 
13725 static module_state **
get_module_slot(tree name,module_state * parent,bool partition,bool insert)13726 get_module_slot (tree name, module_state *parent, bool partition, bool insert)
13727 {
13728   module_state_hash::compare_type ct (name, uintptr_t (parent) | partition);
13729   hashval_t hv = module_state_hash::hash (ct);
13730 
13731   return modules_hash->find_slot_with_hash (ct, hv, insert ? INSERT : NO_INSERT);
13732 }
13733 
13734 static module_state *
get_primary(module_state * parent)13735 get_primary (module_state *parent)
13736 {
13737   while (parent->is_partition ())
13738     parent = parent->parent;
13739 
13740   if (!parent->name)
13741     // Implementation unit has null name
13742     parent = parent->parent;
13743 
13744   return parent;
13745 }
13746 
13747 /* Find or create module NAME & PARENT in the hash table.  */
13748 
13749 module_state *
get_module(tree name,module_state * parent,bool partition)13750 get_module (tree name, module_state *parent, bool partition)
13751 {
13752   if (partition)
13753     {
13754       if (!parent)
13755 	parent = get_primary ((*modules)[0]);
13756 
13757       if (!parent->is_partition () && !parent->flatname)
13758 	parent->set_flatname ();
13759     }
13760 
13761   module_state **slot = get_module_slot (name, parent, partition, true);
13762   module_state *state = *slot;
13763   if (!state)
13764     {
13765       state = (new (ggc_alloc<module_state> ())
13766 	       module_state (name, parent, partition));
13767       *slot = state;
13768     }
13769   return state;
13770 }
13771 
13772 /* Process string name PTR into a module_state.  */
13773 
13774 static module_state *
get_module(const char * ptr)13775 get_module (const char *ptr)
13776 {
13777   if (ptr[0] == '.' ? IS_DIR_SEPARATOR (ptr[1]) : IS_ABSOLUTE_PATH (ptr))
13778     /* A header name.  */
13779     return get_module (build_string (strlen (ptr), ptr));
13780 
13781   bool partition = false;
13782   module_state *mod = NULL;
13783 
13784   for (const char *probe = ptr;; probe++)
13785     if (!*probe || *probe == '.' || *probe == ':')
13786       {
13787 	if (probe == ptr)
13788 	  return NULL;
13789 
13790 	mod = get_module (get_identifier_with_length (ptr, probe - ptr),
13791 			  mod, partition);
13792 	ptr = probe;
13793 	if (*ptr == ':')
13794 	  {
13795 	    if (partition)
13796 	      return NULL;
13797 	    partition = true;
13798 	  }
13799 
13800 	if (!*ptr++)
13801 	  break;
13802       }
13803     else if (!(ISALPHA (*probe) || *probe == '_'
13804 	       || (probe != ptr && ISDIGIT (*probe))))
13805       return NULL;
13806 
13807   return mod;
13808 }
13809 
13810 /* Create a new mapper connecting to OPTION.  */
13811 
13812 module_client *
make_mapper(location_t loc)13813 make_mapper (location_t loc)
13814 {
13815   timevar_start (TV_MODULE_MAPPER);
13816   const char *option = module_mapper_name;
13817   if (!option)
13818     option = getenv ("CXX_MODULE_MAPPER");
13819 
13820   mapper = module_client::open_module_client
13821     (loc, option, &set_cmi_repo,
13822      (save_decoded_options[0].opt_index == OPT_SPECIAL_program_name)
13823      && save_decoded_options[0].arg != progname
13824      ? save_decoded_options[0].arg : nullptr);
13825 
13826   timevar_stop (TV_MODULE_MAPPER);
13827 
13828   return mapper;
13829 }
13830 
13831 static unsigned lazy_snum;
13832 
13833 static bool
recursive_lazy(unsigned snum=~0u)13834 recursive_lazy (unsigned snum = ~0u)
13835 {
13836   if (lazy_snum)
13837     {
13838       error_at (input_location, "recursive lazy load");
13839       return true;
13840     }
13841 
13842   lazy_snum = snum;
13843   return false;
13844 }
13845 
13846 /* If THIS is the current purview, issue an import error and return false.  */
13847 
13848 bool
check_not_purview(location_t from)13849 module_state::check_not_purview (location_t from)
13850 {
13851   module_state *imp = (*modules)[0];
13852   if (imp && !imp->name)
13853     imp = imp->parent;
13854   if (imp == this)
13855     {
13856       /* Cannot import the current module.  */
13857       error_at (from, "cannot import module in its own purview");
13858       inform (loc, "module %qs declared here", get_flatname ());
13859       return false;
13860     }
13861   return true;
13862 }
13863 
13864 /* Module name substitutions.  */
13865 static vec<module_state *,va_heap> substs;
13866 
13867 void
mangle(bool include_partition)13868 module_state::mangle (bool include_partition)
13869 {
13870   if (subst)
13871     mangle_module_substitution (subst - 1);
13872   else
13873     {
13874       if (parent)
13875 	parent->mangle (include_partition);
13876       if (include_partition || !is_partition ())
13877 	{
13878 	  char p = 0;
13879 	  // Partitions are significant for global initializer functions
13880 	  if (is_partition () && !parent->is_partition ())
13881 	    p = 'P';
13882 	  substs.safe_push (this);
13883 	  subst = substs.length ();
13884 	  mangle_identifier (p, name);
13885 	}
13886     }
13887 }
13888 
13889 void
mangle_module(int mod,bool include_partition)13890 mangle_module (int mod, bool include_partition)
13891 {
13892   module_state *imp = (*modules)[mod];
13893 
13894   if (!imp->name)
13895     /* Set when importing the primary module interface.  */
13896     imp = imp->parent;
13897 
13898   imp->mangle (include_partition);
13899 }
13900 
13901 /* Clean up substitutions.  */
13902 void
mangle_module_fini()13903 mangle_module_fini ()
13904 {
13905   while (substs.length ())
13906     substs.pop ()->subst = 0;
13907 }
13908 
13909 /* Announce WHAT about the module.  */
13910 
13911 void
announce(const char * what) const13912 module_state::announce (const char *what) const
13913 {
13914   if (noisy_p ())
13915     {
13916       fprintf (stderr, " %s:%s", what, get_flatname ());
13917       fflush (stderr);
13918     }
13919 }
13920 
13921 /* A human-readable README section.  The contents of this section to
13922    not contribute to the CRC, so the contents can change per
13923    compilation.  That allows us to embed CWD, hostname, build time and
13924    what not.  It is a STRTAB that may be extracted with:
13925      readelf -pgnu.c++.README $(module).gcm */
13926 
13927 void
write_readme(elf_out * to,cpp_reader * reader,const char * dialect,unsigned extensions)13928 module_state::write_readme (elf_out *to, cpp_reader *reader,
13929 			    const char *dialect, unsigned extensions)
13930 {
13931   bytes_out readme (to);
13932 
13933   readme.begin (false);
13934 
13935   readme.printf ("GNU C++ %smodule%s%s",
13936 		 is_header () ? "header " : is_partition () ? "" : "primary ",
13937 		 is_header () ? ""
13938 		 : is_interface () ? " interface" : " implementation",
13939 		 is_partition () ? " partition" : "");
13940 
13941   /* Compiler's version.  */
13942   readme.printf ("compiler: %s", version_string);
13943 
13944   /* Module format version.  */
13945   verstr_t string;
13946   version2string (MODULE_VERSION, string);
13947   readme.printf ("version: %s", string);
13948 
13949   /* Module information.  */
13950   readme.printf ("module: %s", get_flatname ());
13951   readme.printf ("source: %s", main_input_filename);
13952   readme.printf ("dialect: %s", dialect);
13953   if (extensions)
13954     readme.printf ("extensions: %s",
13955 		   extensions & SE_OPENMP ? "-fopenmp" : "");
13956 
13957   /* The following fields could be expected to change between
13958      otherwise identical compilations.  Consider a distributed build
13959      system.  We should have a way of overriding that.  */
13960   if (char *cwd = getcwd (NULL, 0))
13961     {
13962       readme.printf ("cwd: %s", cwd);
13963       free (cwd);
13964     }
13965   readme.printf ("repository: %s", cmi_repo ? cmi_repo : ".");
13966 #if NETWORKING
13967   {
13968     char hostname[64];
13969     if (!gethostname (hostname, sizeof (hostname)))
13970       readme.printf ("host: %s", hostname);
13971   }
13972 #endif
13973   {
13974     /* This of course will change!  */
13975     time_t stampy;
13976     auto kind = cpp_get_date (reader, &stampy);
13977     if (kind != CPP_time_kind::UNKNOWN)
13978       {
13979 	struct tm *time;
13980 
13981 	time = gmtime (&stampy);
13982 	readme.print_time ("build", time, "UTC");
13983 
13984 	if (kind == CPP_time_kind::DYNAMIC)
13985 	  {
13986 	    time = localtime (&stampy);
13987 	    readme.print_time ("local", time,
13988 #if defined (__USE_MISC) || defined (__USE_BSD) /* Is there a better way?  */
13989 			       time->tm_zone
13990 #else
13991 			       ""
13992 #endif
13993 			       );
13994 	  }
13995       }
13996   }
13997 
13998   /* Its direct imports.  */
13999   for (unsigned ix = 1; ix < modules->length (); ix++)
14000     {
14001       module_state *state = (*modules)[ix];
14002 
14003       if (state->is_direct ())
14004 	readme.printf ("%s: %s %s", state->exported_p ? "export" : "import",
14005 		       state->get_flatname (), state->filename);
14006     }
14007 
14008   readme.end (to, to->name (MOD_SNAME_PFX ".README"), NULL);
14009 }
14010 
14011 /* Sort environment var names in reverse order.  */
14012 
14013 static int
env_var_cmp(const void * a_,const void * b_)14014 env_var_cmp (const void *a_, const void *b_)
14015 {
14016   const unsigned char *a = *(const unsigned char *const *)a_;
14017   const unsigned char *b = *(const unsigned char *const *)b_;
14018 
14019   for (unsigned ix = 0; ; ix++)
14020     {
14021       bool a_end = !a[ix] || a[ix] == '=';
14022       if (a[ix] == b[ix])
14023 	{
14024 	  if (a_end)
14025 	    break;
14026 	}
14027       else
14028 	{
14029 	  bool b_end = !b[ix] || b[ix] == '=';
14030 
14031 	  if (!a_end && !b_end)
14032 	    return a[ix] < b[ix] ? +1 : -1;
14033 	  if (a_end && b_end)
14034 	    break;
14035 	  return a_end ? +1 : -1;
14036 	}
14037     }
14038 
14039   return 0;
14040 }
14041 
14042 /* Write the environment. It is a STRTAB that may be extracted with:
14043      readelf -pgnu.c++.ENV $(module).gcm */
14044 
14045 void
write_env(elf_out * to)14046 module_state::write_env (elf_out *to)
14047 {
14048   vec<const char *> vars;
14049   vars.create (20);
14050 
14051   extern char **environ;
14052   while (const char *var = environ[vars.length ()])
14053     vars.safe_push (var);
14054   vars.qsort (env_var_cmp);
14055 
14056   bytes_out env (to);
14057   env.begin (false);
14058   while (vars.length ())
14059     env.printf ("%s", vars.pop ());
14060   env.end (to, to->name (MOD_SNAME_PFX ".ENV"), NULL);
14061 
14062   vars.release ();
14063 }
14064 
14065 /* Write the direct or indirect imports.
14066    u:N
14067    {
14068      u:index
14069      s:name
14070      u32:crc
14071      s:filename (direct)
14072      u:exported (direct)
14073    } imports[N]
14074  */
14075 
14076 void
write_imports(bytes_out & sec,bool direct)14077 module_state::write_imports (bytes_out &sec, bool direct)
14078 {
14079   unsigned count = 0;
14080 
14081   for (unsigned ix = 1; ix < modules->length (); ix++)
14082     {
14083       module_state *imp = (*modules)[ix];
14084 
14085       if (imp->remap && imp->is_direct () == direct)
14086 	count++;
14087     }
14088 
14089   gcc_assert (!direct || count);
14090 
14091   sec.u (count);
14092   for (unsigned ix = 1; ix < modules->length (); ix++)
14093     {
14094       module_state *imp = (*modules)[ix];
14095 
14096       if (imp->remap && imp->is_direct () == direct)
14097 	{
14098 	  dump () && dump ("Writing %simport:%u->%u %M (crc=%x)",
14099 			   !direct ? "indirect "
14100 			   : imp->exported_p ? "exported " : "",
14101 			   ix, imp->remap, imp, imp->crc);
14102 	  sec.u (imp->remap);
14103 	  sec.str (imp->get_flatname ());
14104 	  sec.u32 (imp->crc);
14105 	  if (direct)
14106 	    {
14107 	      write_location (sec, imp->imported_from ());
14108 	      sec.str (imp->filename);
14109 	      int exportedness = 0;
14110 	      if (imp->exported_p)
14111 		exportedness = +1;
14112 	      else if (!imp->is_purview_direct ())
14113 		exportedness = -1;
14114 	      sec.i (exportedness);
14115 	    }
14116 	}
14117     }
14118 }
14119 
14120 /* READER, LMAPS  != NULL == direct imports,
14121    == NUL == indirect imports.  */
14122 
14123 unsigned
read_imports(bytes_in & sec,cpp_reader * reader,line_maps * lmaps)14124 module_state::read_imports (bytes_in &sec, cpp_reader *reader, line_maps *lmaps)
14125 {
14126   unsigned count = sec.u ();
14127   unsigned loaded = 0;
14128 
14129   while (count--)
14130     {
14131       unsigned ix = sec.u ();
14132       if (ix >= slurp->remap->length () || !ix || (*slurp->remap)[ix])
14133 	{
14134 	  sec.set_overrun ();
14135 	  break;
14136 	}
14137 
14138       const char *name = sec.str (NULL);
14139       module_state *imp = get_module (name);
14140       unsigned crc = sec.u32 ();
14141       int exportedness = 0;
14142 
14143       /* If the import is a partition, it must be the same primary
14144 	 module as this TU.  */
14145       if (imp && imp->is_partition () &&
14146 	  (!named_module_p ()
14147 	   || (get_primary ((*modules)[0]) != get_primary (imp))))
14148 	imp = NULL;
14149 
14150       if (!imp)
14151 	sec.set_overrun ();
14152       if (sec.get_overrun ())
14153 	break;
14154 
14155       if (lmaps)
14156 	{
14157 	  /* A direct import, maybe load it.  */
14158 	  location_t floc = read_location (sec);
14159 	  const char *fname = sec.str (NULL);
14160 	  exportedness = sec.i ();
14161 
14162 	  if (sec.get_overrun ())
14163 	    break;
14164 
14165 	  if (!imp->check_not_purview (loc))
14166 	    continue;
14167 
14168 	  if (imp->loadedness == ML_NONE)
14169 	    {
14170 	      imp->loc = floc;
14171 	      imp->crc = crc;
14172 	      if (!imp->get_flatname ())
14173 		imp->set_flatname ();
14174 
14175 	      unsigned n = dump.push (imp);
14176 
14177 	      if (!imp->filename && fname)
14178 		imp->filename = xstrdup (fname);
14179 
14180 	      if (imp->is_partition ())
14181 		dump () && dump ("Importing elided partition %M", imp);
14182 
14183 	      if (!imp->do_import (reader, false))
14184 		imp = NULL;
14185 	      dump.pop (n);
14186 	      if (!imp)
14187 		continue;
14188 	    }
14189 
14190 	  if (is_partition ())
14191 	    {
14192 	      if (!imp->is_direct ())
14193 		imp->directness = MD_PARTITION_DIRECT;
14194 	      if (exportedness > 0)
14195 		imp->exported_p = true;
14196 	    }
14197 	}
14198       else
14199 	{
14200 	  /* An indirect import, find it, it should already be here.  */
14201 	  if (imp->loadedness == ML_NONE)
14202 	    {
14203 	      error_at (loc, "indirect import %qs is not already loaded", name);
14204 	      continue;
14205 	    }
14206 	}
14207 
14208       if (imp->crc != crc)
14209 	error_at (loc, "import %qs has CRC mismatch", imp->get_flatname ());
14210 
14211       (*slurp->remap)[ix] = (imp->mod << 1) | (lmaps != NULL);
14212 
14213       if (lmaps && exportedness >= 0)
14214 	set_import (imp, bool (exportedness));
14215       dump () && dump ("Found %simport:%u %M->%u", !lmaps ? "indirect "
14216 		       : exportedness > 0 ? "exported "
14217 		       : exportedness < 0 ? "gmf" : "", ix, imp,
14218 		       imp->mod);
14219       loaded++;
14220     }
14221 
14222   return loaded;
14223 }
14224 
14225 /* Write the import table to MOD_SNAME_PFX.imp.  */
14226 
14227 void
write_imports(elf_out * to,unsigned * crc_ptr)14228 module_state::write_imports (elf_out *to, unsigned *crc_ptr)
14229 {
14230   dump () && dump ("Writing imports");
14231   dump.indent ();
14232 
14233   bytes_out sec (to);
14234   sec.begin ();
14235 
14236   write_imports (sec, true);
14237   write_imports (sec, false);
14238 
14239   sec.end (to, to->name (MOD_SNAME_PFX ".imp"), crc_ptr);
14240   dump.outdent ();
14241 }
14242 
14243 bool
read_imports(cpp_reader * reader,line_maps * lmaps)14244 module_state::read_imports (cpp_reader *reader, line_maps *lmaps)
14245 {
14246   bytes_in sec;
14247 
14248   if (!sec.begin (loc, from (), MOD_SNAME_PFX ".imp"))
14249     return false;
14250 
14251   dump () && dump ("Reading %u imports", slurp->remap->length () - 1);
14252   dump.indent ();
14253 
14254   /* Read the imports.  */
14255   unsigned direct = read_imports (sec, reader, lmaps);
14256   unsigned indirect = read_imports (sec, NULL, NULL);
14257   if (direct + indirect + 1 != slurp->remap->length ())
14258     from ()->set_error (elf::E_BAD_IMPORT);
14259 
14260   dump.outdent ();
14261   if (!sec.end (from ()))
14262     return false;
14263   return true;
14264 }
14265 
14266 /* We're the primary module interface, but have partitions.  Document
14267    them so that non-partition module implementation units know which
14268    have already been loaded.  */
14269 
14270 void
write_partitions(elf_out * to,unsigned count,unsigned * crc_ptr)14271 module_state::write_partitions (elf_out *to, unsigned count, unsigned *crc_ptr)
14272 {
14273   dump () && dump ("Writing %u elided partitions", count);
14274   dump.indent ();
14275 
14276   bytes_out sec (to);
14277   sec.begin ();
14278 
14279   for (unsigned ix = 1; ix != modules->length (); ix++)
14280     {
14281       module_state *imp = (*modules)[ix];
14282       if (imp->is_partition ())
14283 	{
14284 	  dump () && dump ("Writing elided partition %M (crc=%x)",
14285 			   imp, imp->crc);
14286 	  sec.str (imp->get_flatname ());
14287 	  sec.u32 (imp->crc);
14288 	  write_location (sec, imp->is_direct ()
14289 			  ? imp->imported_from () : UNKNOWN_LOCATION);
14290 	  sec.str (imp->filename);
14291 	}
14292     }
14293 
14294   sec.end (to, to->name (MOD_SNAME_PFX ".prt"), crc_ptr);
14295   dump.outdent ();
14296 }
14297 
14298 bool
read_partitions(unsigned count)14299 module_state::read_partitions (unsigned count)
14300 {
14301   bytes_in sec;
14302   if (!sec.begin (loc, from (), MOD_SNAME_PFX ".prt"))
14303     return false;
14304 
14305   dump () && dump ("Reading %u elided partitions", count);
14306   dump.indent ();
14307 
14308   while (count--)
14309     {
14310       const char *name = sec.str (NULL);
14311       unsigned crc = sec.u32 ();
14312       location_t floc = read_location (sec);
14313       const char *fname = sec.str (NULL);
14314 
14315       if (sec.get_overrun ())
14316 	break;
14317 
14318       dump () && dump ("Reading elided partition %s (crc=%x)", name, crc);
14319 
14320       module_state *imp = get_module (name);
14321       if (!imp	/* Partition should be ...  */
14322 	  || !imp->is_partition () /* a partition ...  */
14323 	  || imp->loadedness != ML_NONE  /* that is not yet loaded ...  */
14324 	  || get_primary (imp) != this) /* whose primary is this.  */
14325 	{
14326 	  sec.set_overrun ();
14327 	  break;
14328 	}
14329 
14330       if (!imp->has_location ())
14331 	imp->loc = floc;
14332       imp->crc = crc;
14333       if (!imp->filename && fname[0])
14334 	imp->filename = xstrdup (fname);
14335     }
14336 
14337   dump.outdent ();
14338   if (!sec.end (from ()))
14339     return false;
14340   return true;
14341 }
14342 
14343 /* Counter indices.  */
14344 enum module_state_counts
14345 {
14346   MSC_sec_lwm,
14347   MSC_sec_hwm,
14348   MSC_pendings,
14349   MSC_entities,
14350   MSC_namespaces,
14351   MSC_bindings,
14352   MSC_macros,
14353   MSC_inits,
14354   MSC_HWM
14355 };
14356 
14357 /* Data for config reading and writing.  */
14358 struct module_state_config {
14359   const char *dialect_str;
14360   unsigned num_imports;
14361   unsigned num_partitions;
14362   unsigned num_entities;
14363   unsigned ordinary_locs;
14364   unsigned macro_locs;
14365   unsigned ordinary_loc_align;
14366 
14367 public:
module_state_configmodule_state_config14368   module_state_config ()
14369     :dialect_str (get_dialect ()),
14370      num_imports (0), num_partitions (0), num_entities (0),
14371      ordinary_locs (0), macro_locs (0), ordinary_loc_align (0)
14372   {
14373   }
14374 
releasemodule_state_config14375   static void release ()
14376   {
14377     XDELETEVEC (dialect);
14378     dialect = NULL;
14379   }
14380 
14381 private:
14382   static const char *get_dialect ();
14383   static char *dialect;
14384 };
14385 
14386 char *module_state_config::dialect;
14387 
14388 /* Generate a string of the significant compilation options.
14389    Generally assume the user knows what they're doing, in the same way
14390    that object files can be mixed.  */
14391 
14392 const char *
get_dialect()14393 module_state_config::get_dialect ()
14394 {
14395   if (!dialect)
14396     dialect = concat (get_cxx_dialect_name (cxx_dialect),
14397 		      /* C++ implies these, only show if disabled.  */
14398 		      flag_exceptions ? "" : "/no-exceptions",
14399 		      flag_rtti ? "" : "/no-rtti",
14400 		      flag_new_inheriting_ctors ? "" : "/old-inheriting-ctors",
14401 		      /* C++ 20 implies concepts.  */
14402 		      cxx_dialect < cxx20 && flag_concepts ? "/concepts" : "",
14403 		      flag_coroutines ? "/coroutines" : "",
14404 		      flag_module_implicit_inline ? "/implicit-inline" : "",
14405 		      NULL);
14406 
14407   return dialect;
14408 }
14409 
14410 /* Contents of a cluster.  */
14411 enum cluster_tag {
14412   ct_decl,	/* A decl.  */
14413   ct_defn,	/* A definition.  */
14414   ct_bind,	/* A binding.  */
14415   ct_hwm
14416 };
14417 
14418 /* Binding modifiers.  */
14419 enum ct_bind_flags
14420 {
14421   cbf_export = 0x1,	/* An exported decl.  */
14422   cbf_hidden = 0x2,	/* A hidden (friend) decl.  */
14423   cbf_using = 0x4,	/* A using decl.  */
14424   cbf_wrapped = 0x8,  	/* ... that is wrapped.  */
14425 };
14426 
14427 /* DEP belongs to a different cluster, seed it to prevent
14428    unfortunately timed duplicate import.  */
14429 // FIXME: QOI For inter-cluster references we could just only pick
14430 // one entity from an earlier cluster.  Even better track
14431 // dependencies between earlier clusters
14432 
14433 void
intercluster_seed(trees_out & sec,unsigned index_hwm,depset * dep)14434 module_state::intercluster_seed (trees_out &sec, unsigned index_hwm, depset *dep)
14435 {
14436   if (dep->is_import ()
14437       || dep->cluster < index_hwm)
14438     {
14439       tree ent = dep->get_entity ();
14440       if (!TREE_VISITED (ent))
14441 	{
14442 	  sec.tree_node (ent);
14443 	  dump (dumper::CLUSTER)
14444 	    && dump ("Seeded %s %N",
14445 		     dep->is_import () ? "import" : "intercluster", ent);
14446 	}
14447     }
14448 }
14449 
14450 /* Write the cluster of depsets in SCC[0-SIZE).
14451    dep->section -> section number
14452    dep->cluster -> entity number
14453  */
14454 
14455 unsigned
write_cluster(elf_out * to,depset * scc[],unsigned size,depset::hash & table,unsigned * counts,unsigned * crc_ptr)14456 module_state::write_cluster (elf_out *to, depset *scc[], unsigned size,
14457 			     depset::hash &table, unsigned *counts,
14458 			     unsigned *crc_ptr)
14459 {
14460   dump () && dump ("Writing section:%u %u depsets", table.section, size);
14461   dump.indent ();
14462 
14463   trees_out sec (to, this, table, table.section);
14464   sec.begin ();
14465   unsigned index_lwm = counts[MSC_entities];
14466 
14467   /* Determine entity numbers, mark for writing.   */
14468   dump (dumper::CLUSTER) && dump ("Cluster members:") && (dump.indent (), true);
14469   for (unsigned ix = 0; ix != size; ix++)
14470     {
14471       depset *b = scc[ix];
14472 
14473       switch (b->get_entity_kind ())
14474 	{
14475 	default:
14476 	  gcc_unreachable ();
14477 
14478 	case depset::EK_BINDING:
14479 	  {
14480 	    dump (dumper::CLUSTER)
14481 	      && dump ("[%u]=%s %P", ix, b->entity_kind_name (),
14482 		       b->get_entity (), b->get_name ());
14483 	    depset *ns_dep = b->deps[0];
14484 	    gcc_checking_assert (ns_dep->get_entity_kind ()
14485 				 == depset::EK_NAMESPACE
14486 				 && ns_dep->get_entity () == b->get_entity ());
14487 	    for (unsigned jx = b->deps.length (); --jx;)
14488 	      {
14489 		depset *dep = b->deps[jx];
14490 		// We could be declaring something that is also a
14491 		// (merged) import
14492 		gcc_checking_assert (dep->is_import ()
14493 				     || TREE_VISITED (dep->get_entity ())
14494 				     || (dep->get_entity_kind ()
14495 					 == depset::EK_USING));
14496 	      }
14497 	  }
14498 	  break;
14499 
14500 	case depset::EK_DECL:
14501 	case depset::EK_SPECIALIZATION:
14502 	case depset::EK_PARTIAL:
14503 	  b->cluster = counts[MSC_entities]++;
14504 	  sec.mark_declaration (b->get_entity (), b->has_defn ());
14505 	  /* FALLTHROUGH  */
14506 
14507 	case depset::EK_USING:
14508 	  gcc_checking_assert (!b->is_import ()
14509 			       && !b->is_unreached ());
14510 	  dump (dumper::CLUSTER)
14511 	    && dump ("[%u]=%s %s %N", ix, b->entity_kind_name (),
14512 		     b->has_defn () ? "definition" : "declaration",
14513 		     b->get_entity ());
14514 	  break;
14515 	}
14516     }
14517   dump (dumper::CLUSTER) && (dump.outdent (), true);
14518 
14519   /* Ensure every out-of-cluster decl is referenced before we start
14520      streaming.  We must do both imports *and* earlier clusters,
14521      because the latter could reach into the former and cause a
14522      duplicate loop.   */
14523   sec.set_importing (+1);
14524   for (unsigned ix = 0; ix != size; ix++)
14525     {
14526       depset *b = scc[ix];
14527       for (unsigned jx = (b->get_entity_kind () == depset::EK_BINDING
14528 			  || b->is_special ()) ? 1 : 0;
14529 	   jx != b->deps.length (); jx++)
14530 	{
14531 	  depset *dep = b->deps[jx];
14532 
14533 	  if (dep->is_binding ())
14534 	    {
14535 	      for (unsigned ix = dep->deps.length (); --ix;)
14536 		{
14537 		  depset *bind = dep->deps[ix];
14538 		  if (bind->get_entity_kind () == depset::EK_USING)
14539 		    bind = bind->deps[1];
14540 
14541 		  intercluster_seed (sec, index_lwm, bind);
14542 		}
14543 	      /* Also check the namespace itself.  */
14544 	      dep = dep->deps[0];
14545 	    }
14546 
14547 	  intercluster_seed (sec, index_lwm, dep);
14548 	}
14549     }
14550   sec.tree_node (NULL_TREE);
14551   /* We're done importing now.  */
14552   sec.set_importing (-1);
14553 
14554   /* Write non-definitions.  */
14555   for (unsigned ix = 0; ix != size; ix++)
14556     {
14557       depset *b = scc[ix];
14558       tree decl = b->get_entity ();
14559       switch (b->get_entity_kind ())
14560 	{
14561 	default:
14562 	  gcc_unreachable ();
14563 	  break;
14564 
14565 	case depset::EK_BINDING:
14566 	  {
14567 	    gcc_assert (TREE_CODE (decl) == NAMESPACE_DECL);
14568 	    dump () && dump ("Depset:%u binding %C:%P", ix, TREE_CODE (decl),
14569 			     decl, b->get_name ());
14570 	    sec.u (ct_bind);
14571 	    sec.tree_node (decl);
14572 	    sec.tree_node (b->get_name ());
14573 
14574 	    /* Write in reverse order, so reading will see the exports
14575 	       first, thus building the overload chain will be
14576 	       optimized.  */
14577 	    for (unsigned jx = b->deps.length (); --jx;)
14578 	      {
14579 		depset *dep = b->deps[jx];
14580 		tree bound = dep->get_entity ();
14581 		unsigned flags = 0;
14582 		if (dep->get_entity_kind () == depset::EK_USING)
14583 		  {
14584 		    tree ovl = bound;
14585 		    bound = OVL_FUNCTION (bound);
14586 		    if (!(TREE_CODE (bound) == CONST_DECL
14587 			  && UNSCOPED_ENUM_P (TREE_TYPE (bound))
14588 			  && decl == TYPE_NAME (TREE_TYPE (bound))))
14589 		      {
14590 			/* An unscope enumerator in its enumeration's
14591 			   scope is not a using.  */
14592 			flags |= cbf_using;
14593 			if (OVL_USING_P (ovl))
14594 			  flags |= cbf_wrapped;
14595 		      }
14596 		    if (OVL_EXPORT_P (ovl))
14597 		      flags |= cbf_export;
14598 		  }
14599 		else
14600 		  {
14601 		    /* An implicit typedef must be at one.  */
14602 		    gcc_assert (!DECL_IMPLICIT_TYPEDEF_P (bound) || jx == 1);
14603 		    if (dep->is_hidden ())
14604 		      flags |= cbf_hidden;
14605 		    else if (DECL_MODULE_EXPORT_P (STRIP_TEMPLATE (bound)))
14606 		      flags |= cbf_export;
14607 		  }
14608 
14609 		gcc_checking_assert (DECL_P (bound));
14610 
14611 		sec.i (flags);
14612 		sec.tree_node (bound);
14613 	      }
14614 
14615 	    /* Terminate the list.  */
14616 	    sec.i (-1);
14617 	  }
14618 	  break;
14619 
14620 	case depset::EK_USING:
14621 	  dump () && dump ("Depset:%u %s %C:%N", ix, b->entity_kind_name (),
14622 			   TREE_CODE (decl), decl);
14623 	  break;
14624 
14625 	case depset::EK_SPECIALIZATION:
14626 	case depset::EK_PARTIAL:
14627 	case depset::EK_DECL:
14628 	  dump () && dump ("Depset:%u %s entity:%u %C:%N", ix,
14629 			   b->entity_kind_name (), b->cluster,
14630 			   TREE_CODE (decl), decl);
14631 
14632 	  sec.u (ct_decl);
14633 	  sec.tree_node (decl);
14634 
14635 	  dump () && dump ("Wrote declaration entity:%u %C:%N",
14636 			   b->cluster, TREE_CODE (decl), decl);
14637 	  break;
14638 	}
14639     }
14640 
14641   depset *namer = NULL;
14642 
14643   /* Write out definitions  */
14644   for (unsigned ix = 0; ix != size; ix++)
14645     {
14646       depset *b = scc[ix];
14647       tree decl = b->get_entity ();
14648       switch (b->get_entity_kind ())
14649 	{
14650 	default:
14651 	  break;
14652 
14653 	case depset::EK_SPECIALIZATION:
14654 	case depset::EK_PARTIAL:
14655 	case depset::EK_DECL:
14656 	  if (!namer)
14657 	    namer = b;
14658 
14659 	  if (b->has_defn ())
14660 	    {
14661 	      sec.u (ct_defn);
14662 	      sec.tree_node (decl);
14663 	      dump () && dump ("Writing definition %N", decl);
14664 	      sec.write_definition (decl);
14665 
14666 	      if (!namer->has_defn ())
14667 		namer = b;
14668 	    }
14669 	  break;
14670 	}
14671     }
14672 
14673   /* We don't find the section by name.  Use depset's decl's name for
14674      human friendliness.  */
14675   unsigned name = 0;
14676   tree naming_decl = NULL_TREE;
14677   if (namer)
14678     {
14679       naming_decl = namer->get_entity ();
14680       if (namer->get_entity_kind () == depset::EK_USING)
14681 	/* This unfortunately names the section from the target of the
14682 	   using decl.  But the name is only a guide, so Do Not Care.  */
14683 	naming_decl = OVL_FUNCTION (naming_decl);
14684       if (DECL_IMPLICIT_TYPEDEF_P (naming_decl))
14685 	/* Lose any anonymousness.  */
14686 	naming_decl = TYPE_NAME (TREE_TYPE (naming_decl));
14687       name = to->qualified_name (naming_decl, namer->has_defn ());
14688     }
14689 
14690   unsigned bytes = sec.pos;
14691   unsigned snum = sec.end (to, name, crc_ptr);
14692 
14693   for (unsigned ix = size; ix--;)
14694     gcc_checking_assert (scc[ix]->section == snum);
14695 
14696   dump.outdent ();
14697   dump () && dump ("Wrote section:%u named-by:%N", table.section, naming_decl);
14698 
14699   return bytes;
14700 }
14701 
14702 /* Read a cluster from section SNUM.  */
14703 
14704 bool
read_cluster(unsigned snum)14705 module_state::read_cluster (unsigned snum)
14706 {
14707   trees_in sec (this);
14708 
14709   if (!sec.begin (loc, from (), snum))
14710     return false;
14711 
14712   dump () && dump ("Reading section:%u", snum);
14713   dump.indent ();
14714 
14715   /* We care about structural equality.  */
14716   comparing_dependent_aliases++;
14717 
14718   /* First seed the imports.  */
14719   while (tree import = sec.tree_node ())
14720     dump (dumper::CLUSTER) && dump ("Seeded import %N", import);
14721 
14722   while (!sec.get_overrun () && sec.more_p ())
14723     {
14724       unsigned ct = sec.u ();
14725       switch (ct)
14726 	{
14727 	default:
14728 	  sec.set_overrun ();
14729 	  break;
14730 
14731 	case ct_bind:
14732 	  /* A set of namespace bindings.  */
14733 	  {
14734 	    tree ns = sec.tree_node ();
14735 	    tree name = sec.tree_node ();
14736 	    tree decls = NULL_TREE;
14737 	    tree visible = NULL_TREE;
14738 	    tree type = NULL_TREE;
14739 	    bool dedup = false;
14740 
14741 	    /* We rely on the bindings being in the reverse order of
14742 	       the resulting overload set.  */
14743 	    for (;;)
14744 	      {
14745 		int flags = sec.i ();
14746 		if (flags < 0)
14747 		  break;
14748 
14749 		if ((flags & cbf_hidden)
14750 		    && (flags & (cbf_using | cbf_export)))
14751 		  sec.set_overrun ();
14752 
14753 		tree decl = sec.tree_node ();
14754 		if (sec.get_overrun ())
14755 		  break;
14756 
14757 		if (decls && TREE_CODE (decl) == TYPE_DECL)
14758 		  {
14759 		    /* Stat hack.  */
14760 		    if (type || !DECL_IMPLICIT_TYPEDEF_P (decl))
14761 		      sec.set_overrun ();
14762 		    type = decl;
14763 		  }
14764 		else
14765 		  {
14766 		    if (decls
14767 			|| (flags & (cbf_hidden | cbf_wrapped))
14768 			|| DECL_FUNCTION_TEMPLATE_P (decl))
14769 		      {
14770 			decls = ovl_make (decl, decls);
14771 			if (flags & cbf_using)
14772 			  {
14773 			    dedup = true;
14774 			    OVL_USING_P (decls) = true;
14775 			    if (flags & cbf_export)
14776 			      OVL_EXPORT_P (decls) = true;
14777 			  }
14778 
14779 			if (flags & cbf_hidden)
14780 			  OVL_HIDDEN_P (decls) = true;
14781 			else if (dedup)
14782 			  OVL_DEDUP_P (decls) = true;
14783 		      }
14784 		    else
14785 		      decls = decl;
14786 
14787 		    if (flags & cbf_export
14788 			|| (!(flags & cbf_hidden)
14789 			    && (is_module () || is_partition ())))
14790 		      visible = decls;
14791 		  }
14792 	      }
14793 
14794 	    if (!decls)
14795 	      sec.set_overrun ();
14796 
14797 	    if (sec.get_overrun ())
14798 	      break; /* Bail.  */
14799 
14800 	    dump () && dump ("Binding of %P", ns, name);
14801 	    if (!set_module_binding (ns, name, mod,
14802 				     is_header () ? -1
14803 				     : is_module () || is_partition () ? 1
14804 				     : 0,
14805 				     decls, type, visible))
14806 	      sec.set_overrun ();
14807 	  }
14808 	  break;
14809 
14810 	case ct_decl:
14811 	  /* A decl.  */
14812 	  {
14813 	    tree decl = sec.tree_node ();
14814 	    dump () && dump ("Read declaration of %N", decl);
14815 	  }
14816 	  break;
14817 
14818 	case ct_defn:
14819 	  {
14820 	    tree decl = sec.tree_node ();
14821 	    dump () && dump ("Reading definition of %N", decl);
14822 	    sec.read_definition (decl);
14823 	  }
14824 	  break;
14825 	}
14826     }
14827 
14828   /* When lazy loading is in effect, we can be in the middle of
14829      parsing or instantiating a function.  Save it away.
14830      push_function_context does too much work.   */
14831   tree old_cfd = current_function_decl;
14832   struct function *old_cfun = cfun;
14833   while (tree decl = sec.post_process ())
14834     {
14835       bool abstract = false;
14836       if (TREE_CODE (decl) == TEMPLATE_DECL)
14837 	{
14838 	  abstract = true;
14839 	  decl = DECL_TEMPLATE_RESULT (decl);
14840 	}
14841 
14842       current_function_decl = decl;
14843       allocate_struct_function (decl, abstract);
14844       cfun->language = ggc_cleared_alloc<language_function> ();
14845       cfun->language->base.x_stmt_tree.stmts_are_full_exprs_p = 1;
14846 
14847       if (abstract)
14848 	;
14849       else if (DECL_ABSTRACT_P (decl))
14850 	vec_safe_push (post_load_decls, decl);
14851       else
14852 	{
14853 	  bool aggr = aggregate_value_p (DECL_RESULT (decl), decl);
14854 #ifdef PCC_STATIC_STRUCT_RETURN
14855 	  cfun->returns_pcc_struct = aggr;
14856 #endif
14857 	  cfun->returns_struct = aggr;
14858 
14859 	  if (DECL_COMDAT (decl))
14860 	    // FIXME: Comdat grouping?
14861 	    comdat_linkage (decl);
14862 	  note_vague_linkage_fn (decl);
14863 	  cgraph_node::finalize_function (decl, true);
14864 	}
14865 
14866     }
14867   /* Look, function.c's interface to cfun does too much for us, we
14868      just need to restore the old value.  I do not want to go
14869      redesigning that API right now.  */
14870 #undef cfun
14871   cfun = old_cfun;
14872   current_function_decl = old_cfd;
14873   comparing_dependent_aliases--;
14874 
14875   dump.outdent ();
14876   dump () && dump ("Read section:%u", snum);
14877 
14878   loaded_clusters++;
14879 
14880   if (!sec.end (from ()))
14881     return false;
14882 
14883   return true;
14884 }
14885 
14886 void
write_namespace(bytes_out & sec,depset * dep)14887 module_state::write_namespace (bytes_out &sec, depset *dep)
14888 {
14889   unsigned ns_num = dep->cluster;
14890   unsigned ns_import = 0;
14891 
14892   if (dep->is_import ())
14893     ns_import = dep->section;
14894   else if (dep->get_entity () != global_namespace)
14895     ns_num++;
14896 
14897   sec.u (ns_import);
14898   sec.u (ns_num);
14899 }
14900 
14901 tree
read_namespace(bytes_in & sec)14902 module_state::read_namespace (bytes_in &sec)
14903 {
14904   unsigned ns_import = sec.u ();
14905   unsigned ns_num = sec.u ();
14906   tree ns = NULL_TREE;
14907 
14908   if (ns_import || ns_num)
14909     {
14910       if (!ns_import)
14911 	ns_num--;
14912 
14913       if (unsigned origin = slurp->remap_module (ns_import))
14914 	{
14915 	  module_state *from = (*modules)[origin];
14916 	  if (ns_num < from->entity_num)
14917 	    {
14918 	      binding_slot &slot = (*entity_ary)[from->entity_lwm + ns_num];
14919 
14920 	      if (!slot.is_lazy ())
14921 		ns = slot;
14922 	    }
14923 	}
14924       else
14925 	sec.set_overrun ();
14926     }
14927   else
14928     ns = global_namespace;
14929 
14930   return ns;
14931 }
14932 
14933 /* SPACES is a sorted vector of namespaces.  Write out the namespaces
14934    to MOD_SNAME_PFX.nms section.   */
14935 
14936 void
write_namespaces(elf_out * to,vec<depset * > spaces,unsigned num,unsigned * crc_p)14937 module_state::write_namespaces (elf_out *to, vec<depset *> spaces,
14938 				unsigned num, unsigned *crc_p)
14939 {
14940   dump () && dump ("Writing namespaces");
14941   dump.indent ();
14942 
14943   bytes_out sec (to);
14944   sec.begin ();
14945 
14946   for (unsigned ix = 0; ix != num; ix++)
14947     {
14948       depset *b = spaces[ix];
14949       tree ns = b->get_entity ();
14950 
14951       gcc_checking_assert (TREE_CODE (ns) == NAMESPACE_DECL);
14952       /* P1815 may have something to say about this.  */
14953       gcc_checking_assert (TREE_PUBLIC (ns));
14954 
14955       unsigned flags = 0;
14956       if (TREE_PUBLIC (ns))
14957 	flags |= 1;
14958       if (DECL_NAMESPACE_INLINE_P (ns))
14959 	flags |= 2;
14960       if (DECL_MODULE_PURVIEW_P (ns))
14961 	flags |= 4;
14962       if (DECL_MODULE_EXPORT_P (ns))
14963 	flags |= 8;
14964 
14965       dump () && dump ("Writing namespace:%u %N%s%s%s%s",
14966 		       b->cluster, ns,
14967 		       flags & 1 ? ", public" : "",
14968 		       flags & 2 ? ", inline" : "",
14969 		       flags & 4 ? ", purview" : "",
14970 		       flags & 8 ? ", export" : "");
14971       sec.u (b->cluster);
14972       sec.u (to->name (DECL_NAME (ns)));
14973       write_namespace (sec, b->deps[0]);
14974 
14975       sec.u (flags);
14976       write_location (sec, DECL_SOURCE_LOCATION (ns));
14977     }
14978 
14979   sec.end (to, to->name (MOD_SNAME_PFX ".nms"), crc_p);
14980   dump.outdent ();
14981 }
14982 
14983 /* Read the namespace hierarchy from MOD_SNAME_PFX.namespace.  Fill in
14984    SPACES from that data.  */
14985 
14986 bool
read_namespaces(unsigned num)14987 module_state::read_namespaces (unsigned num)
14988 {
14989   bytes_in sec;
14990 
14991   if (!sec.begin (loc, from (), MOD_SNAME_PFX ".nms"))
14992     return false;
14993 
14994   dump () && dump ("Reading namespaces");
14995   dump.indent ();
14996 
14997   for (unsigned ix = 0; ix != num; ix++)
14998     {
14999       unsigned entity_index = sec.u ();
15000       unsigned name = sec.u ();
15001 
15002       tree parent = read_namespace (sec);
15003 
15004       /* See comment in write_namespace about why not bits.  */
15005       unsigned flags = sec.u ();
15006       location_t src_loc = read_location (sec);
15007 
15008       if (entity_index >= entity_num
15009 	  || !parent
15010 	  || (flags & 0xc) == 0x8)
15011 	sec.set_overrun ();
15012       if (sec.get_overrun ())
15013 	break;
15014 
15015       tree id = name ? get_identifier (from ()->name (name)) : NULL_TREE;
15016 
15017       dump () && dump ("Read namespace:%u %P%s%s%s%s",
15018 		       entity_index, parent, id,
15019 		       flags & 1 ? ", public" : "",
15020 		       flags & 2 ? ", inline" : "",
15021 		       flags & 4 ? ", purview" : "",
15022 		       flags & 8 ? ", export" : "");
15023       bool visible_p = ((flags & 8)
15024 			|| ((flags & 1)
15025 			    && (flags & 4)
15026 			    && (is_partition () || is_module ())));
15027       tree inner = add_imported_namespace (parent, id, src_loc, mod,
15028 					   bool (flags & 2), visible_p);
15029       if (!inner)
15030 	{
15031 	  sec.set_overrun ();
15032 	  break;
15033 	}
15034 
15035       if (is_partition ())
15036 	{
15037 	  if (flags & 4)
15038 	    DECL_MODULE_PURVIEW_P (inner) = true;
15039 	  if (flags & 8)
15040 	    DECL_MODULE_EXPORT_P (inner) = true;
15041 	}
15042 
15043       /* Install the namespace.  */
15044       (*entity_ary)[entity_lwm + entity_index] = inner;
15045       if (DECL_MODULE_IMPORT_P (inner))
15046 	{
15047 	  bool existed;
15048 	  unsigned *slot = &entity_map->get_or_insert
15049 	    (DECL_UID (inner), &existed);
15050 	  if (existed)
15051 	    /* If it existed, it should match.  */
15052 	    gcc_checking_assert (inner == (*entity_ary)[*slot]);
15053 	  else
15054 	    *slot = entity_lwm + entity_index;
15055 	}
15056     }
15057   dump.outdent ();
15058   if (!sec.end (from ()))
15059     return false;
15060   return true;
15061 }
15062 
15063 /* Write the binding TABLE to MOD_SNAME_PFX.bnd   */
15064 
15065 unsigned
write_bindings(elf_out * to,vec<depset * > sccs,unsigned * crc_p)15066 module_state::write_bindings (elf_out *to, vec<depset *> sccs, unsigned *crc_p)
15067 {
15068   dump () && dump ("Writing binding table");
15069   dump.indent ();
15070 
15071   unsigned num = 0;
15072   bytes_out sec (to);
15073   sec.begin ();
15074 
15075   for (unsigned ix = 0; ix != sccs.length (); ix++)
15076     {
15077       depset *b = sccs[ix];
15078       if (b->is_binding ())
15079 	{
15080 	  tree ns = b->get_entity ();
15081 	  dump () && dump ("Bindings %P section:%u", ns, b->get_name (),
15082 			   b->section);
15083 	  sec.u (to->name (b->get_name ()));
15084 	  write_namespace (sec, b->deps[0]);
15085 	  sec.u (b->section);
15086 	  num++;
15087 	}
15088     }
15089 
15090   sec.end (to, to->name (MOD_SNAME_PFX ".bnd"), crc_p);
15091   dump.outdent ();
15092 
15093   return num;
15094 }
15095 
15096 /* Read the binding table from MOD_SNAME_PFX.bind.  */
15097 
15098 bool
read_bindings(unsigned num,unsigned lwm,unsigned hwm)15099 module_state::read_bindings (unsigned num, unsigned lwm, unsigned hwm)
15100 {
15101   bytes_in sec;
15102 
15103   if (!sec.begin (loc, from (), MOD_SNAME_PFX ".bnd"))
15104     return false;
15105 
15106   dump () && dump ("Reading binding table");
15107   dump.indent ();
15108   for (; !sec.get_overrun () && num--;)
15109     {
15110       const char *name = from ()->name (sec.u ());
15111       tree ns = read_namespace (sec);
15112       unsigned snum = sec.u ();
15113 
15114       if (!ns || !name || (snum - lwm) >= (hwm - lwm))
15115 	sec.set_overrun ();
15116       if (!sec.get_overrun ())
15117 	{
15118 	  tree id = get_identifier (name);
15119 	  dump () && dump ("Bindings %P section:%u", ns, id, snum);
15120 	  if (mod && !import_module_binding (ns, id, mod, snum))
15121 	    break;
15122 	}
15123     }
15124 
15125   dump.outdent ();
15126   if (!sec.end (from ()))
15127     return false;
15128   return true;
15129 }
15130 
15131 /* Write the entity table to MOD_SNAME_PFX.ent
15132 
15133    Each entry is a section number.  */
15134 
15135 void
write_entities(elf_out * to,vec<depset * > depsets,unsigned count,unsigned * crc_p)15136 module_state::write_entities (elf_out *to, vec<depset *> depsets,
15137 			      unsigned count, unsigned *crc_p)
15138 {
15139   dump () && dump ("Writing entities");
15140   dump.indent ();
15141 
15142   bytes_out sec (to);
15143   sec.begin ();
15144 
15145   unsigned current = 0;
15146   for (unsigned ix = 0; ix < depsets.length (); ix++)
15147     {
15148       depset *d = depsets[ix];
15149 
15150       switch (d->get_entity_kind ())
15151 	{
15152 	default:
15153 	  break;
15154 
15155 	case depset::EK_NAMESPACE:
15156 	  if (!d->is_import () && d->get_entity () != global_namespace)
15157 	    {
15158 	      gcc_checking_assert (d->cluster == current);
15159 	      current++;
15160 	      sec.u (0);
15161 	    }
15162 	  break;
15163 
15164 	case depset::EK_DECL:
15165 	case depset::EK_SPECIALIZATION:
15166 	case depset::EK_PARTIAL:
15167 	  gcc_checking_assert (!d->is_unreached ()
15168 			       && !d->is_import ()
15169 			       && d->cluster == current
15170 			       && d->section);
15171 	  current++;
15172 	  sec.u (d->section);
15173 	  break;
15174 	}
15175     }
15176   gcc_assert (count == current);
15177   sec.end (to, to->name (MOD_SNAME_PFX ".ent"), crc_p);
15178   dump.outdent ();
15179 }
15180 
15181 bool
read_entities(unsigned count,unsigned lwm,unsigned hwm)15182 module_state::read_entities (unsigned count, unsigned lwm, unsigned hwm)
15183 {
15184   trees_in sec (this);
15185 
15186   if (!sec.begin (loc, from (), MOD_SNAME_PFX ".ent"))
15187     return false;
15188 
15189   dump () && dump ("Reading entities");
15190   dump.indent ();
15191 
15192   for (binding_slot *slot = entity_ary->begin () + entity_lwm; count--; slot++)
15193     {
15194       unsigned snum = sec.u ();
15195       if (snum && (snum - lwm) >= (hwm - lwm))
15196 	sec.set_overrun ();
15197       if (sec.get_overrun ())
15198 	break;
15199 
15200       if (snum)
15201 	slot->set_lazy (snum << 2);
15202     }
15203 
15204   dump.outdent ();
15205   if (!sec.end (from ()))
15206     return false;
15207   return true;
15208 }
15209 
15210 /* Write the pending table to MOD_SNAME_PFX.pnd
15211 
15212    The pending table holds information about clusters that need to be
15213    loaded because they contain information about something that is not
15214    found by namespace-scope lookup.
15215 
15216    The three cases are:
15217 
15218    (a) Template (maybe-partial) specializations that we have
15219    instantiated or defined.  When an importer needs to instantiate
15220    that template, they /must have/ the partial, explicit & extern
15221    specializations available.  If they have the other specializations
15222    available, they'll have less work to do.  Thus, when we're about to
15223    instantiate FOO, we have to be able to ask 'are there any
15224    specialization of FOO in our imports?'.
15225 
15226    (b) (Maybe-implicit) member functions definitions.  A class could
15227    be defined in one header, and an inline member defined in a
15228    different header (this occurs in the STL).  Similarly, like the
15229    specialization case, an implicit member function could have been
15230    'instantiated' in one module, and it'd be nice to not have to
15231    reinstantiate it in another.
15232 
15233    (c) A member classes completed elsewhere.  A member class could be
15234    declared in one header and defined in another.  We need to know to
15235    load the class definition before looking in it.  This turns out to
15236    be a specific case of #b, so we can treat these the same.  But it
15237    does highlight an issue -- there could be an intermediate import
15238    between the outermost containing namespace-scope class and the
15239    innermost being-defined member class.  This is actually possible
15240    with all of these cases, so be aware -- we're not just talking of
15241    one level of import to get to the innermost namespace.
15242 
15243    This gets complicated fast, it took me multiple attempts to even
15244    get something remotely working.  Partially because I focussed on
15245    optimizing what I think turns out to be a smaller problem, given
15246    the known need to do the more general case *anyway*.  I document
15247    the smaller problem, because it does appear to be the natural way
15248    to do it.  It's trap!
15249 
15250    **** THE TRAP
15251 
15252    Let's refer to the primary template or the containing class as the
15253    KEY.  And the specialization or member as the PENDING-ENTITY.  (To
15254    avoid having to say those mouthfuls all the time.)
15255 
15256    In either case, we have an entity and we need some way of mapping
15257    that to a set of entities that need to be loaded before we can
15258    proceed with whatever processing of the entity we were going to do.
15259 
15260    We need to link the key to the pending-entity in some way.  Given a
15261    key, tell me the pending-entities I need to have loaded.  However
15262    we tie the key to the pending-entity must not rely on the key being
15263    loaded -- that'd defeat the lazy loading scheme.
15264 
15265    As the key will be an import in we know its entity number (either
15266    because we imported it, or we're writing it out too).  Thus we can
15267    generate a map of key-indices to pending-entities.  The
15268    pending-entity indices will be into our span of the entity table,
15269    and thus allow them to be lazily loaded.  The key index will be
15270    into another slot of the entity table.  Notice that this checking
15271    could be expensive, we don't want to iterate over a bunch of
15272    pending-entity indices (across multiple imports), every time we're
15273    about do to the thing with the key.  We need to quickly determine
15274    'definitely nothing needed'.
15275 
15276    That's almost good enough, except that key indices are not unique
15277    in a couple of cases :( Specifically the Global Module or a module
15278    partition can result in multiple modules assigning an entity index
15279    for the key.  The decl-merging on loading will detect that so we
15280    only have one Key loaded, and in the entity hash it'll indicate the
15281    entity index of first load.  Which might be different to how we
15282    know it.  Notice this is restricted to GM entities or this-module
15283    entities.  Foreign imports cannot have this.
15284 
15285    We can simply resolve this in the direction of how this module
15286    referred to the key to how the importer knows it.  Look in the
15287    entity table slot that we nominate, maybe lazy load it, and then
15288    lookup the resultant entity in the entity hash to learn how the
15289    importer knows it.
15290 
15291    But we need to go in the other direction :( Given the key, find all
15292    the index-aliases of that key.  We can partially solve that by
15293    adding an alias hash table.  Whenever we load a merged decl, add or
15294    augment a mapping from the entity (or its entity-index) to the
15295    newly-discovered index.  Then when we look for pending entities of
15296    a key, we also iterate over this aliases this mapping provides.
15297 
15298    But that requires the alias to be loaded.  And that's not
15299    necessarily true.
15300 
15301    *** THE SIMPLER WAY
15302 
15303    The remaining fixed thing we have is the innermost namespace
15304    containing the ultimate namespace-scope container of the key and
15305    the name of that container (which might be the key itself).  I.e. a
15306    namespace-decl/identifier/module tuple.  Let's call this the
15307    top-key.  We'll discover that the module is not important here,
15308    because of cross-module possibilities mentioned in case #c above.
15309    We can't markup namespace-binding slots.  The best we can do is
15310    mark the binding vector with 'there's something here', and have
15311    another map from namespace/identifier pairs to a vector of pending
15312    entity indices.
15313 
15314    Maintain a pending-entity map.  This is keyed by top-key, and
15315    maps to a vector of pending-entity indices.  On the binding vector
15316    have flags saying whether the pending-name-entity map has contents.
15317    (We might want to further extend the key to be GM-vs-Partition and
15318    specialization-vs-member, but let's not get ahead of ourselves.)
15319 
15320    For every key-like entity, find the outermost namespace-scope
15321    name.  Use that to lookup in the pending-entity map and then make
15322    sure the specified entities are loaded.
15323 
15324    An optimization might be to have a flag in each key-entity saying
15325    that it's top key might be in the entity table.  It's not clear to
15326    me how to set that flag cheaply -- cheaper than just looking.
15327 
15328    FIXME: It'd be nice to have a bit in decls to tell us whether to
15329    even try this.  We can have a 'already done' flag, that we set when
15330    we've done KLASS's lazy pendings.  When we import a module that
15331    registers pendings on the same top-key as KLASS we need to clear
15332    the flag.  A recursive walk of the top-key clearing the bit will
15333    suffice.  Plus we only need to recurse on classes that have the bit
15334    set.  (That means we need to set the bit on parents of KLASS here,
15335    don't forget.)  However, first: correctness, second: efficiency.  */
15336 
15337 unsigned
write_pendings(elf_out * to,vec<depset * > depsets,depset::hash & table,unsigned * crc_p)15338 module_state::write_pendings (elf_out *to, vec<depset *> depsets,
15339 			      depset::hash &table, unsigned *crc_p)
15340 {
15341   dump () && dump ("Writing pending-entities");
15342   dump.indent ();
15343 
15344   trees_out sec (to, this, table);
15345   sec.begin ();
15346 
15347   unsigned count = 0;
15348   tree cache_ns = NULL_TREE;
15349   tree cache_id = NULL_TREE;
15350   unsigned cache_section = ~0;
15351   for (unsigned ix = 0; ix < depsets.length (); ix++)
15352     {
15353       depset *d = depsets[ix];
15354 
15355       if (d->is_binding ())
15356 	continue;
15357 
15358       if (d->is_import ())
15359 	continue;
15360 
15361       if (!(d->get_entity_kind () == depset::EK_SPECIALIZATION
15362 	    || d->get_entity_kind () == depset::EK_PARTIAL
15363 	    || (d->get_entity_kind () == depset::EK_DECL && d->is_member ())))
15364 	continue;
15365 
15366       tree key_decl = nullptr;
15367       tree key_ns = find_pending_key (d->get_entity (), &key_decl);
15368       tree key_name = DECL_NAME (key_decl);
15369 
15370       if (IDENTIFIER_ANON_P (key_name))
15371 	{
15372 	  gcc_checking_assert (IDENTIFIER_LAMBDA_P (key_name));
15373 	  if (tree attached = LAMBDA_TYPE_EXTRA_SCOPE (TREE_TYPE (key_decl)))
15374 	    key_name = DECL_NAME (attached);
15375 	  else
15376 	    {
15377 	      /* There's nothing to attach it to.  Must
15378 		 always reinstantiate.  */
15379 	      dump ()
15380 		&& dump ("Unattached lambda %N[%u] section:%u",
15381 			 d->get_entity_kind () == depset::EK_DECL
15382 			 ? "Member" : "Specialization", d->get_entity (),
15383 			 d->cluster, d->section);
15384 	      continue;
15385 	    }
15386 	}
15387 
15388       char const *also = "";
15389       if (d->section == cache_section
15390 	  && key_ns == cache_ns
15391 	  && key_name == cache_id)
15392 	/* Same section & key as previous, no need to repeat ourselves.  */
15393 	also = "also ";
15394       else
15395 	{
15396 	  cache_ns = key_ns;
15397 	  cache_id = key_name;
15398 	  cache_section = d->section;
15399 	  gcc_checking_assert (table.find_dependency (cache_ns));
15400 	  sec.tree_node (cache_ns);
15401 	  sec.tree_node (cache_id);
15402 	  sec.u (d->cluster);
15403 	  count++;
15404 	}
15405       dump () && dump ("Pending %s %N entity:%u section:%u %skeyed to %P",
15406 		       d->get_entity_kind () == depset::EK_DECL
15407 		       ? "member" : "specialization", d->get_entity (),
15408 		       d->cluster, cache_section, also, cache_ns, cache_id);
15409       }
15410   sec.end (to, to->name (MOD_SNAME_PFX ".pnd"), crc_p);
15411   dump.outdent ();
15412 
15413   return count;
15414 }
15415 
15416 bool
read_pendings(unsigned count)15417 module_state::read_pendings (unsigned count)
15418 {
15419   trees_in sec (this);
15420 
15421   if (!sec.begin (loc, from (), MOD_SNAME_PFX ".pnd"))
15422     return false;
15423 
15424   dump () && dump ("Reading %u pendings", count);
15425   dump.indent ();
15426 
15427   for (unsigned ix = 0; ix != count; ix++)
15428     {
15429       pending_key key;
15430       unsigned index;
15431 
15432       key.ns = sec.tree_node ();
15433       key.id = sec.tree_node ();
15434       index = sec.u ();
15435 
15436       if (!key.ns || !key.id
15437 	  || !(TREE_CODE (key.ns) == NAMESPACE_DECL
15438 	       && !DECL_NAMESPACE_ALIAS (key.ns))
15439 	  || !identifier_p (key.id)
15440 	  || index >= entity_num)
15441 	sec.set_overrun ();
15442 
15443       if (sec.get_overrun ())
15444 	break;
15445 
15446       dump () && dump ("Pending:%u keyed to %P", index, key.ns, key.id);
15447 
15448       index += entity_lwm;
15449       auto &vec = pending_table->get_or_insert (key);
15450       vec.safe_push (index);
15451     }
15452 
15453   dump.outdent ();
15454   if (!sec.end (from ()))
15455     return false;
15456   return true;
15457 }
15458 
15459 /* Read & write locations.  */
15460 enum loc_kind {
15461   LK_ORDINARY,
15462   LK_MACRO,
15463   LK_IMPORT_ORDINARY,
15464   LK_IMPORT_MACRO,
15465   LK_ADHOC,
15466   LK_RESERVED,
15467 };
15468 
15469 static const module_state *
module_for_ordinary_loc(location_t loc)15470 module_for_ordinary_loc (location_t loc)
15471 {
15472   unsigned pos = 0;
15473   unsigned len = ool->length () - pos;
15474 
15475   while (len)
15476     {
15477       unsigned half = len / 2;
15478       module_state *probe = (*ool)[pos + half];
15479       if (loc < probe->ordinary_locs.first)
15480 	len = half;
15481       else if (loc < probe->ordinary_locs.second)
15482 	return probe;
15483       else
15484 	{
15485 	  pos += half + 1;
15486 	  len = len - (half + 1);
15487 	}
15488     }
15489 
15490   return nullptr;
15491 }
15492 
15493 static const module_state *
module_for_macro_loc(location_t loc)15494 module_for_macro_loc (location_t loc)
15495 {
15496   unsigned pos = 1;
15497   unsigned len = modules->length () - pos;
15498 
15499   while (len)
15500     {
15501       unsigned half = len / 2;
15502       module_state *probe = (*modules)[pos + half];
15503       if (loc >= probe->macro_locs.second)
15504 	len = half;
15505       else if (loc >= probe->macro_locs.first)
15506 	return probe;
15507       else
15508 	{
15509 	  pos += half + 1;
15510 	  len = len - (half + 1);
15511 	}
15512     }
15513 
15514   return NULL;
15515 }
15516 
15517 location_t
imported_from() const15518 module_state::imported_from () const
15519 {
15520   location_t from = loc;
15521   line_map_ordinary const *fmap
15522     = linemap_check_ordinary (linemap_lookup (line_table, from));
15523 
15524   if (MAP_MODULE_P (fmap))
15525     from = linemap_included_from (fmap);
15526 
15527   return from;
15528 }
15529 
15530 /* If we're not streaming, record that we need location LOC.
15531    Otherwise stream it.  */
15532 
15533 void
write_location(bytes_out & sec,location_t loc)15534 module_state::write_location (bytes_out &sec, location_t loc)
15535 {
15536   if (!sec.streaming_p ())
15537     /* This is where we should note we use this location.  See comment
15538        about write_ordinary_maps.  */
15539     return;
15540 
15541   if (loc < RESERVED_LOCATION_COUNT)
15542     {
15543       dump (dumper::LOCATION) && dump ("Reserved location %u", unsigned (loc));
15544       sec.u (LK_RESERVED + loc);
15545     }
15546   else if (IS_ADHOC_LOC (loc))
15547     {
15548       dump (dumper::LOCATION) && dump ("Adhoc location");
15549       sec.u (LK_ADHOC);
15550       location_t locus = get_location_from_adhoc_loc (line_table, loc);
15551       write_location (sec, locus);
15552       source_range range = get_range_from_loc (line_table, loc);
15553       if (range.m_start == locus)
15554 	/* Compress.  */
15555 	range.m_start = UNKNOWN_LOCATION;
15556       write_location (sec, range.m_start);
15557       write_location (sec, range.m_finish);
15558     }
15559   else if (loc >= LINEMAPS_MACRO_LOWEST_LOCATION (line_table))
15560     {
15561       if (const loc_spans::span *span = spans.macro (loc))
15562 	{
15563 	  unsigned off = MAX_LOCATION_T - loc;
15564 
15565 	  off -= span->macro_delta;
15566 
15567 	  sec.u (LK_MACRO);
15568 	  sec.u (off);
15569 	  dump (dumper::LOCATION)
15570 	    && dump ("Macro location %u output %u", loc, off);
15571 	}
15572       else if (const module_state *import = module_for_macro_loc (loc))
15573 	{
15574 	  unsigned off = import->macro_locs.second - loc - 1;
15575 	  sec.u (LK_IMPORT_MACRO);
15576 	  sec.u (import->remap);
15577 	  sec.u (off);
15578 	  dump (dumper::LOCATION)
15579 	    && dump ("Imported macro location %u output %u:%u",
15580 		     loc, import->remap, off);
15581 	}
15582       else
15583 	gcc_unreachable ();
15584     }
15585   else if (IS_ORDINARY_LOC (loc))
15586     {
15587       if (const loc_spans::span *span = spans.ordinary (loc))
15588 	{
15589 	  unsigned off = loc;
15590 
15591 	  off += span->ordinary_delta;
15592 	  sec.u (LK_ORDINARY);
15593 	  sec.u (off);
15594 
15595 	  dump (dumper::LOCATION)
15596 	    && dump ("Ordinary location %u output %u", loc, off);
15597 	}
15598       else if (const module_state *import = module_for_ordinary_loc (loc))
15599 	{
15600 	  unsigned off = loc - import->ordinary_locs.first;
15601 	  sec.u (LK_IMPORT_ORDINARY);
15602 	  sec.u (import->remap);
15603 	  sec.u (off);
15604 	  dump (dumper::LOCATION)
15605 	    && dump ("Imported ordinary location %u output %u:%u",
15606 		     import->remap, import->remap, off);
15607 	}
15608       else
15609 	gcc_unreachable ();
15610     }
15611   else
15612     gcc_unreachable ();
15613 }
15614 
15615 location_t
read_location(bytes_in & sec) const15616 module_state::read_location (bytes_in &sec) const
15617 {
15618   location_t locus = UNKNOWN_LOCATION;
15619   unsigned kind = sec.u ();
15620   switch (kind)
15621      {
15622     default:
15623       {
15624 	if (kind < LK_RESERVED + RESERVED_LOCATION_COUNT)
15625 	  locus = location_t (kind - LK_RESERVED);
15626 	else
15627 	  sec.set_overrun ();
15628 	dump (dumper::LOCATION)
15629 	  && dump ("Reserved location %u", unsigned (locus));
15630       }
15631       break;
15632 
15633      case LK_ADHOC:
15634       {
15635 	dump (dumper::LOCATION) && dump ("Adhoc location");
15636 	locus = read_location (sec);
15637 	source_range range;
15638 	range.m_start = read_location (sec);
15639 	if (range.m_start == UNKNOWN_LOCATION)
15640 	  range.m_start = locus;
15641 	range.m_finish = read_location (sec);
15642 	if (locus != loc && range.m_start != loc && range.m_finish != loc)
15643 	  locus = get_combined_adhoc_loc (line_table, locus, range, NULL);
15644       }
15645       break;
15646 
15647     case LK_MACRO:
15648       {
15649 	unsigned off = sec.u ();
15650 
15651 	if (macro_locs.first)
15652 	  {
15653 	    location_t adjusted = MAX_LOCATION_T - off;
15654 	    adjusted -= slurp->loc_deltas.second;
15655 	    if (adjusted < macro_locs.first)
15656 	      sec.set_overrun ();
15657 	    else if (adjusted < macro_locs.second)
15658 	      locus = adjusted;
15659 	    else
15660 	      sec.set_overrun ();
15661 	  }
15662 	else
15663 	  locus = loc;
15664 	dump (dumper::LOCATION)
15665 	  && dump ("Macro %u becoming %u", off, locus);
15666       }
15667       break;
15668 
15669     case LK_ORDINARY:
15670       {
15671 	unsigned off = sec.u ();
15672 	if (ordinary_locs.second)
15673 	  {
15674 	    location_t adjusted = off;
15675 
15676 	    adjusted += slurp->loc_deltas.first;
15677 	    if (adjusted >= ordinary_locs.second)
15678 	      sec.set_overrun ();
15679 	    else if (adjusted >= ordinary_locs.first)
15680 	      locus = adjusted;
15681 	    else if (adjusted < spans.main_start ())
15682 	      locus = off;
15683 	  }
15684 	else
15685 	  locus = loc;
15686 
15687 	dump (dumper::LOCATION)
15688 	  && dump ("Ordinary location %u becoming %u", off, locus);
15689       }
15690       break;
15691 
15692      case LK_IMPORT_MACRO:
15693      case LK_IMPORT_ORDINARY:
15694        {
15695 	 unsigned mod = sec.u ();
15696 	 unsigned off = sec.u ();
15697 	 const module_state *import = NULL;
15698 
15699 	 if (!mod && !slurp->remap)
15700 	   /* This is an early read of a partition location during the
15701 	      read of our ordinary location map.  */
15702 	   import = this;
15703 	 else
15704 	   {
15705 	     mod = slurp->remap_module (mod);
15706 	     if (!mod)
15707 	       sec.set_overrun ();
15708 	     else
15709 	       import = (*modules)[mod];
15710 	   }
15711 
15712 	 if (import)
15713 	   {
15714 	     if (kind == LK_IMPORT_MACRO)
15715 	       {
15716 		 if (!import->macro_locs.first)
15717 		   locus = import->loc;
15718 		 else if (off < import->macro_locs.second - macro_locs.first)
15719 		   locus = import->macro_locs.second - off - 1;
15720 		 else
15721 		   sec.set_overrun ();
15722 	       }
15723 	     else
15724 	       {
15725 		 if (!import->ordinary_locs.second)
15726 		   locus = import->loc;
15727 		 else if (off < (import->ordinary_locs.second
15728 			    - import->ordinary_locs.first))
15729 		   locus = import->ordinary_locs.first + off;
15730 		 else
15731 		   sec.set_overrun ();
15732 	       }
15733 	   }
15734        }
15735        break;
15736     }
15737 
15738   return locus;
15739 }
15740 
15741 /* Prepare the span adjustments.  */
15742 
15743 // FIXME:QOI I do not prune the unreachable locations.  Modules with
15744 // textually-large GMFs could well cause us to run out of locations.
15745 // Regular single-file modules could also be affected.  We should
15746 // determine which locations we need to represent, so that we do not
15747 // grab more locations than necessary.  An example is in
15748 // write_macro_maps where we work around macro expansions that are not
15749 // covering any locations -- the macro expands to nothing.  Perhaps we
15750 // should decompose locations so that we can have a more graceful
15751 // degradation upon running out?
15752 
15753 location_map_info
write_prepare_maps(module_state_config *)15754 module_state::write_prepare_maps (module_state_config *)
15755 {
15756   dump () && dump ("Preparing locations");
15757   dump.indent ();
15758 
15759   dump () && dump ("Reserved locations [%u,%u) macro [%u,%u)",
15760 		   spans[loc_spans::SPAN_RESERVED].ordinary.first,
15761 		   spans[loc_spans::SPAN_RESERVED].ordinary.second,
15762 		   spans[loc_spans::SPAN_RESERVED].macro.first,
15763 		   spans[loc_spans::SPAN_RESERVED].macro.second);
15764 
15765   location_map_info info;
15766 
15767   info.num_maps.first = info.num_maps.second = 0;
15768 
15769   /* Figure the alignment of ordinary location spans.  */
15770   unsigned max_range = 0;
15771   for (unsigned ix = loc_spans::SPAN_FIRST; ix != spans.length (); ix++)
15772     {
15773       loc_spans::span &span = spans[ix];
15774 
15775       if (span.ordinary.first != span.ordinary.second)
15776 	{
15777 	  line_map_ordinary const *omap
15778 	    = linemap_check_ordinary (linemap_lookup (line_table,
15779 						      span.ordinary.first));
15780 
15781 	  /* We should exactly match up.  */
15782 	  gcc_checking_assert (MAP_START_LOCATION (omap) == span.ordinary.first);
15783 
15784 	  line_map_ordinary const *fmap = omap;
15785 	  for (; MAP_START_LOCATION (omap) < span.ordinary.second; omap++)
15786 	    {
15787 	      /* We should never find a module linemap in an interval.  */
15788 	      gcc_checking_assert (!MAP_MODULE_P (omap));
15789 
15790 	      if (max_range < omap->m_range_bits)
15791 		max_range = omap->m_range_bits;
15792 	    }
15793 
15794 	  info.num_maps.first += omap - fmap;
15795 	}
15796 
15797       if (span.macro.first != span.macro.second)
15798 	{
15799 	  /* Iterate over the span's macros, to elide the empty
15800 	     expansions.  */
15801 	  unsigned count = 0;
15802 	  for (unsigned macro
15803 		 = linemap_lookup_macro_index (line_table,
15804 					       span.macro.second - 1);
15805 	       macro < LINEMAPS_MACRO_USED (line_table);
15806 	       macro++)
15807 	    {
15808 	      line_map_macro const *mmap
15809 		= LINEMAPS_MACRO_MAP_AT (line_table, macro);
15810 	      if (MAP_START_LOCATION (mmap) < span.macro.first)
15811 		/* Fallen out of the span.  */
15812 		break;
15813 
15814 	      if (mmap->n_tokens)
15815 		count++;
15816 	    }
15817 	  dump (dumper::LOCATION) && dump ("Span:%u %u macro maps", ix, count);
15818 	  info.num_maps.second += count;
15819 	}
15820     }
15821 
15822   /* Adjust the maps.  Ordinary ones ascend, and we must maintain
15823      alignment.  Macro ones descend, but are unaligned.  */
15824   location_t ord_off = spans[loc_spans::SPAN_FIRST].ordinary.first;
15825   location_t mac_off = spans[loc_spans::SPAN_FIRST].macro.second;
15826   location_t range_mask = (1u << max_range) - 1;
15827 
15828   dump () && dump ("Ordinary maps range bits:%u, preserve:%x, zero:%u",
15829 		   max_range, ord_off & range_mask, ord_off & ~range_mask);
15830 
15831   for (unsigned ix = loc_spans::SPAN_FIRST; ix != spans.length (); ix++)
15832     {
15833       loc_spans::span &span = spans[ix];
15834 
15835       span.macro_delta = mac_off - span.macro.second;
15836       mac_off -= span.macro.second - span.macro.first;
15837       dump () && dump ("Macro span:%u [%u,%u):%u->%d(%u)", ix,
15838 		       span.macro.first, span.macro.second,
15839 		       span.macro.second - span.macro.first,
15840 		       span.macro_delta, span.macro.first + span.macro_delta);
15841 
15842       line_map_ordinary const *omap
15843 	= linemap_check_ordinary (linemap_lookup (line_table,
15844 						      span.ordinary.first));
15845       location_t base = MAP_START_LOCATION (omap);
15846 
15847       /* Preserve the low MAX_RANGE bits of base by incrementing ORD_OFF.  */
15848       unsigned low_bits = base & range_mask;
15849       if ((ord_off & range_mask) > low_bits)
15850 	low_bits += range_mask + 1;
15851       ord_off = (ord_off & ~range_mask) + low_bits;
15852       span.ordinary_delta = ord_off - base;
15853 
15854       for (; MAP_START_LOCATION (omap) < span.ordinary.second; omap++)
15855 	{
15856 	  location_t start_loc = MAP_START_LOCATION (omap);
15857 	  unsigned to = start_loc + span.ordinary_delta;
15858 	  location_t end_loc = MAP_START_LOCATION (omap + 1);
15859 
15860 	  dump () && dump ("Ordinary span:%u [%u,%u):%u->%d(%u)",
15861 			   ix, start_loc,
15862 			   end_loc, end_loc - start_loc,
15863 			   span.ordinary_delta, to);
15864 
15865 	  /* There should be no change in the low order bits.  */
15866 	  gcc_checking_assert (((start_loc ^ to) & range_mask) == 0);
15867 	}
15868 
15869       /* The ending serialized value.  */
15870       ord_off = span.ordinary.second + span.ordinary_delta;
15871     }
15872 
15873   dump () && dump ("Ordinary:%u maps hwm:%u macro:%u maps lwm:%u ",
15874 		   info.num_maps.first, ord_off,
15875 		   info.num_maps.second, mac_off);
15876 
15877   dump.outdent ();
15878 
15879   info.max_range = max_range;
15880 
15881   return info;
15882 }
15883 
15884 bool
read_prepare_maps(const module_state_config * cfg)15885 module_state::read_prepare_maps (const module_state_config *cfg)
15886 {
15887   location_t ordinary = line_table->highest_location + 1;
15888   ordinary = ((ordinary + (1u << cfg->ordinary_loc_align))
15889 	      & ~((1u << cfg->ordinary_loc_align) - 1));
15890   ordinary += cfg->ordinary_locs;
15891 
15892   location_t macro = LINEMAPS_MACRO_LOWEST_LOCATION (line_table);
15893   macro -= cfg->macro_locs;
15894 
15895   if (ordinary < LINE_MAP_MAX_LOCATION_WITH_COLS
15896       && macro >= LINE_MAP_MAX_LOCATION)
15897     /* OK, we have enough locations.  */
15898     return true;
15899 
15900   ordinary_locs.first = ordinary_locs.second = 0;
15901   macro_locs.first = macro_locs.second = 0;
15902 
15903   static bool informed = false;
15904   if (!informed)
15905     {
15906       /* Just give the notice once.  */
15907       informed = true;
15908       inform (loc, "unable to represent further imported source locations");
15909     }
15910 
15911   return false;
15912 }
15913 
15914 /* Write the location maps.  This also determines the shifts for the
15915    location spans.  */
15916 
15917 void
write_ordinary_maps(elf_out * to,location_map_info & info,module_state_config * cfg,bool has_partitions,unsigned * crc_p)15918 module_state::write_ordinary_maps (elf_out *to, location_map_info &info,
15919 				   module_state_config *cfg, bool has_partitions,
15920 				   unsigned *crc_p)
15921 {
15922   dump () && dump ("Writing ordinary location maps");
15923   dump.indent ();
15924 
15925   vec<const char *> filenames;
15926   filenames.create (20);
15927 
15928   /* Determine the unique filenames.  */
15929   // FIXME:QOI We should find the set of filenames when working out
15930   // which locations we actually need.  See write_prepare_maps.
15931   for (unsigned ix = loc_spans::SPAN_FIRST; ix != spans.length (); ix++)
15932     {
15933       loc_spans::span &span = spans[ix];
15934       line_map_ordinary const *omap
15935 	= linemap_check_ordinary (linemap_lookup (line_table,
15936 						  span.ordinary.first));
15937 
15938       /* We should exactly match up.  */
15939       gcc_checking_assert (MAP_START_LOCATION (omap) == span.ordinary.first);
15940 
15941       for (; MAP_START_LOCATION (omap) < span.ordinary.second; omap++)
15942 	{
15943 	  const char *fname = ORDINARY_MAP_FILE_NAME (omap);
15944 
15945 	  /* We should never find a module linemap in an interval.  */
15946 	  gcc_checking_assert (!MAP_MODULE_P (omap));
15947 
15948 	  /* We expect very few filenames, so just an array.
15949 	     (Not true when headers are still in play :()  */
15950 	  for (unsigned jx = filenames.length (); jx--;)
15951 	    {
15952 	      const char *name = filenames[jx];
15953 	      if (0 == strcmp (name, fname))
15954 		{
15955 		  /* Reset the linemap's name, because for things like
15956 		     preprocessed input we could have multiple
15957 		     instances of the same name, and we'd rather not
15958 		     percolate that.  */
15959 		  const_cast<line_map_ordinary *> (omap)->to_file = name;
15960 		  fname = NULL;
15961 		  break;
15962 		}
15963 	    }
15964 	  if (fname)
15965 	    filenames.safe_push (fname);
15966 	}
15967     }
15968 
15969   bytes_out sec (to);
15970   sec.begin ();
15971 
15972   /* Write the filenames.  */
15973   unsigned len = filenames.length ();
15974   sec.u (len);
15975   dump () && dump ("%u source file names", len);
15976   for (unsigned ix = 0; ix != len; ix++)
15977     {
15978       const char *fname = filenames[ix];
15979       dump (dumper::LOCATION) && dump ("Source file[%u]=%s", ix, fname);
15980       sec.str (fname);
15981     }
15982 
15983   location_t offset = spans[loc_spans::SPAN_FIRST].ordinary.first;
15984   location_t range_mask = (1u << info.max_range) - 1;
15985 
15986   dump () && dump ("Ordinary maps:%u, range bits:%u, preserve:%x, zero:%u",
15987 		   info.num_maps.first, info.max_range, offset & range_mask,
15988 		   offset & ~range_mask);
15989   sec.u (info.num_maps.first);	/* Num maps.  */
15990   sec.u (info.max_range);		/* Maximum range bits  */
15991   sec.u (offset & range_mask);	/* Bits to preserve.  */
15992   sec.u (offset & ~range_mask);
15993 
15994   for (unsigned ix = loc_spans::SPAN_FIRST; ix != spans.length (); ix++)
15995     {
15996       loc_spans::span &span = spans[ix];
15997       line_map_ordinary const *omap
15998 	= linemap_check_ordinary (linemap_lookup (line_table,
15999 						  span.ordinary.first));
16000       for (; MAP_START_LOCATION (omap) < span.ordinary.second; omap++)
16001 	{
16002 	  location_t start_loc = MAP_START_LOCATION (omap);
16003 	  unsigned to = start_loc + span.ordinary_delta;
16004 
16005 	  dump (dumper::LOCATION)
16006 	    && dump ("Span:%u ordinary [%u,%u)->%u", ix, start_loc,
16007 		     MAP_START_LOCATION (omap + 1), to);
16008 
16009 	  /* There should be no change in the low order bits.  */
16010 	  gcc_checking_assert (((start_loc ^ to) & range_mask) == 0);
16011 	  sec.u (to);
16012 
16013 	  /* Making accessors just for here, seems excessive.  */
16014 	  sec.u (omap->reason);
16015 	  sec.u (omap->sysp);
16016 	  sec.u (omap->m_range_bits);
16017 	  sec.u (omap->m_column_and_range_bits - omap->m_range_bits);
16018 
16019 	  const char *fname = ORDINARY_MAP_FILE_NAME (omap);
16020 	  for (unsigned ix = 0; ix != filenames.length (); ix++)
16021 	    if (filenames[ix] == fname)
16022 	      {
16023 		sec.u (ix);
16024 		break;
16025 	      }
16026 	  sec.u (ORDINARY_MAP_STARTING_LINE_NUMBER (omap));
16027 
16028 	  /* Write the included from location, which means reading it
16029 	     while reading in the ordinary maps.  So we'd better not
16030 	     be getting ahead of ourselves.  */
16031 	  location_t from = linemap_included_from (omap);
16032 	  gcc_checking_assert (from < MAP_START_LOCATION (omap));
16033 	  if (from != UNKNOWN_LOCATION && has_partitions)
16034 	    {
16035 	      /* A partition's span will have a from pointing at a
16036 		 MODULE_INC.  Find that map's from.  */
16037 	      line_map_ordinary const *fmap
16038 		= linemap_check_ordinary (linemap_lookup (line_table, from));
16039 	      if (MAP_MODULE_P (fmap))
16040 		from = linemap_included_from (fmap);
16041 	    }
16042 	  write_location (sec, from);
16043 	}
16044       /* The ending serialized value.  */
16045       offset = MAP_START_LOCATION (omap) + span.ordinary_delta;
16046     }
16047   dump () && dump ("Ordinary location hwm:%u", offset);
16048   sec.u (offset);
16049 
16050   // Record number of locations and alignment.
16051   cfg->ordinary_loc_align = info.max_range;
16052   cfg->ordinary_locs = offset;
16053 
16054   filenames.release ();
16055 
16056   sec.end (to, to->name (MOD_SNAME_PFX ".olm"), crc_p);
16057   dump.outdent ();
16058 }
16059 
16060 void
write_macro_maps(elf_out * to,location_map_info & info,module_state_config * cfg,unsigned * crc_p)16061 module_state::write_macro_maps (elf_out *to, location_map_info &info,
16062 				module_state_config *cfg, unsigned *crc_p)
16063 {
16064   dump () && dump ("Writing macro location maps");
16065   dump.indent ();
16066 
16067   bytes_out sec (to);
16068   sec.begin ();
16069 
16070   dump () && dump ("Macro maps:%u", info.num_maps.second);
16071   sec.u (info.num_maps.second);
16072 
16073   location_t offset = spans[loc_spans::SPAN_FIRST].macro.second;
16074   sec.u (offset);
16075 
16076   unsigned macro_num = 0;
16077   for (unsigned ix = loc_spans::SPAN_FIRST; ix != spans.length (); ix++)
16078     {
16079       loc_spans::span &span = spans[ix];
16080       if (span.macro.first == span.macro.second)
16081 	/* Empty span.  */
16082 	continue;
16083 
16084       for (unsigned macro
16085 	     = linemap_lookup_macro_index (line_table, span.macro.second - 1);
16086 	   macro < LINEMAPS_MACRO_USED (line_table);
16087 	   macro++)
16088 	{
16089 	  line_map_macro const *mmap
16090 	    = LINEMAPS_MACRO_MAP_AT (line_table, macro);
16091 	  location_t start_loc = MAP_START_LOCATION (mmap);
16092 	  if (start_loc < span.macro.first)
16093 	    /* Fallen out of the span.  */
16094 	    break;
16095 
16096 	  if (!mmap->n_tokens)
16097 	    /* Empty expansion.  */
16098 	    continue;
16099 
16100 	  sec.u (offset);
16101 	  sec.u (mmap->n_tokens);
16102 	  sec.cpp_node (mmap->macro);
16103 	  write_location (sec, mmap->expansion);
16104 	  const location_t *locs = mmap->macro_locations;
16105 	  /* There are lots of identical runs.  */
16106 	  location_t prev = UNKNOWN_LOCATION;
16107 	  unsigned count = 0;
16108 	  unsigned runs = 0;
16109 	  for (unsigned jx = mmap->n_tokens * 2; jx--;)
16110 	    {
16111 	      location_t tok_loc = locs[jx];
16112 	      if (tok_loc == prev)
16113 		{
16114 		  count++;
16115 		  continue;
16116 		}
16117 	      runs++;
16118 	      sec.u (count);
16119 	      count = 1;
16120 	      prev = tok_loc;
16121 	      write_location (sec, tok_loc);
16122 	    }
16123 	  sec.u (count);
16124 	  dump (dumper::LOCATION)
16125 	    && dump ("Span:%u macro:%u %I %u/%u*2 locations [%u,%u)->%u",
16126 		     ix, macro_num, identifier (mmap->macro),
16127 		     runs, mmap->n_tokens,
16128 		     start_loc, start_loc + mmap->n_tokens,
16129 		     start_loc + span.macro_delta);
16130 	  macro_num++;
16131 	  offset -= mmap->n_tokens;
16132 	  gcc_checking_assert (offset == start_loc + span.macro_delta);
16133 	}
16134     }
16135   dump () && dump ("Macro location lwm:%u", offset);
16136   sec.u (offset);
16137   gcc_assert (macro_num == info.num_maps.second);
16138 
16139   cfg->macro_locs = MAX_LOCATION_T + 1 - offset;
16140 
16141   sec.end (to, to->name (MOD_SNAME_PFX ".mlm"), crc_p);
16142   dump.outdent ();
16143 }
16144 
16145 bool
read_ordinary_maps()16146 module_state::read_ordinary_maps ()
16147 {
16148   bytes_in sec;
16149 
16150   if (!sec.begin (loc, from (), MOD_SNAME_PFX ".olm"))
16151     return false;
16152   dump () && dump ("Reading ordinary location maps");
16153   dump.indent ();
16154 
16155   /* Read the filename table.  */
16156   unsigned len = sec.u ();
16157   dump () && dump ("%u source file names", len);
16158   vec<const char *> filenames;
16159   filenames.create (len);
16160   for (unsigned ix = 0; ix != len; ix++)
16161     {
16162       size_t l;
16163       const char *buf = sec.str (&l);
16164       char *fname = XNEWVEC (char, l + 1);
16165       memcpy (fname, buf, l + 1);
16166       dump (dumper::LOCATION) && dump ("Source file[%u]=%s", ix, fname);
16167       /* We leak these names into the line-map table.  But it
16168 	 doesn't own them.  */
16169       filenames.quick_push (fname);
16170     }
16171 
16172   unsigned num_ordinary = sec.u ();
16173   unsigned max_range = sec.u ();
16174   unsigned low_bits = sec.u ();
16175   location_t zero = sec.u ();
16176   location_t range_mask = (1u << max_range) - 1;
16177 
16178   dump () && dump ("Ordinary maps:%u, range bits:%u, preserve:%x, zero:%u",
16179 		   num_ordinary, max_range, low_bits, zero);
16180 
16181   location_t offset = line_table->highest_location + 1;
16182   /* Ensure offset doesn't go backwards at the start.  */
16183   if ((offset & range_mask) > low_bits)
16184     offset += range_mask + 1;
16185   offset = (offset & ~range_mask);
16186 
16187   bool propagated = spans.maybe_propagate (this, offset + low_bits);
16188 
16189   line_map_ordinary *maps = static_cast<line_map_ordinary *>
16190     (line_map_new_raw (line_table, false, num_ordinary));
16191 
16192   location_t lwm = offset;
16193   slurp->loc_deltas.first = offset - zero;
16194   ordinary_locs.first = zero + low_bits + slurp->loc_deltas.first;
16195   dump () && dump ("Ordinary loc delta %d", slurp->loc_deltas.first);
16196 
16197   for (unsigned ix = 0; ix != num_ordinary && !sec.get_overrun (); ix++)
16198     {
16199       line_map_ordinary *map = &maps[ix];
16200       unsigned hwm = sec.u ();
16201 
16202       /* Record the current HWM so that the below read_location is
16203 	 ok.  */
16204       ordinary_locs.second = hwm + slurp->loc_deltas.first;
16205       map->start_location = hwm + (offset - zero);
16206       if (map->start_location < lwm)
16207 	sec.set_overrun ();
16208       lwm = map->start_location;
16209       dump (dumper::LOCATION) && dump ("Map:%u %u->%u", ix, hwm, lwm);
16210       map->reason = lc_reason (sec.u ());
16211       map->sysp = sec.u ();
16212       map->m_range_bits = sec.u ();
16213       map->m_column_and_range_bits = map->m_range_bits + sec.u ();
16214 
16215       unsigned fnum = sec.u ();
16216       map->to_file = (fnum < filenames.length () ? filenames[fnum] : "");
16217       map->to_line = sec.u ();
16218 
16219       /* Root the outermost map at our location.  */
16220       location_t from = read_location (sec);
16221       map->included_from = from != UNKNOWN_LOCATION ? from : loc;
16222     }
16223 
16224   location_t hwm = sec.u ();
16225   ordinary_locs.second = hwm + slurp->loc_deltas.first;
16226 
16227   /* highest_location is the one handed out, not the next one to
16228      hand out.  */
16229   line_table->highest_location = ordinary_locs.second - 1;
16230 
16231   if (line_table->highest_location >= LINE_MAP_MAX_LOCATION_WITH_COLS)
16232     /* We shouldn't run out of locations, as we checked before
16233        starting.  */
16234     sec.set_overrun ();
16235   dump () && dump ("Ordinary location hwm:%u", ordinary_locs.second);
16236 
16237   if (propagated)
16238     spans.close ();
16239 
16240   filenames.release ();
16241 
16242   dump.outdent ();
16243   if (!sec.end (from ()))
16244     return false;
16245 
16246   return true;
16247 }
16248 
16249 bool
read_macro_maps()16250 module_state::read_macro_maps ()
16251 {
16252   bytes_in sec;
16253 
16254   if (!sec.begin (loc, from (), MOD_SNAME_PFX ".mlm"))
16255     return false;
16256   dump () && dump ("Reading macro location maps");
16257   dump.indent ();
16258 
16259   unsigned num_macros = sec.u ();
16260   location_t zero = sec.u ();
16261   dump () && dump ("Macro maps:%u zero:%u", num_macros, zero);
16262 
16263   bool propagated = spans.maybe_propagate (this,
16264 					   line_table->highest_location + 1);
16265 
16266   location_t offset = LINEMAPS_MACRO_LOWEST_LOCATION (line_table);
16267   slurp->loc_deltas.second = zero - offset;
16268   macro_locs.second = zero - slurp->loc_deltas.second;
16269   dump () && dump ("Macro loc delta %d", slurp->loc_deltas.second);
16270 
16271   for (unsigned ix = 0; ix != num_macros && !sec.get_overrun (); ix++)
16272     {
16273       unsigned lwm = sec.u ();
16274       /* Record the current LWM so that the below read_location is
16275 	 ok.  */
16276       macro_locs.first = lwm - slurp->loc_deltas.second;
16277 
16278       unsigned n_tokens = sec.u ();
16279       cpp_hashnode *node = sec.cpp_node ();
16280       location_t exp_loc = read_location (sec);
16281 
16282       const line_map_macro *macro
16283 	= linemap_enter_macro (line_table, node, exp_loc, n_tokens);
16284       if (!macro)
16285 	/* We shouldn't run out of locations, as we checked that we
16286 	   had enough before starting.  */
16287 	break;
16288 
16289       location_t *locs = macro->macro_locations;
16290       location_t tok_loc = UNKNOWN_LOCATION;
16291       unsigned count = sec.u ();
16292       unsigned runs = 0;
16293       for (unsigned jx = macro->n_tokens * 2; jx-- && !sec.get_overrun ();)
16294 	{
16295 	  while (!count-- && !sec.get_overrun ())
16296 	    {
16297 	      runs++;
16298 	      tok_loc = read_location (sec);
16299 	      count = sec.u ();
16300 	    }
16301 	  locs[jx] = tok_loc;
16302 	}
16303       if (count)
16304 	sec.set_overrun ();
16305       dump (dumper::LOCATION)
16306 	&& dump ("Macro:%u %I %u/%u*2 locations [%u,%u)",
16307 		 ix, identifier (node), runs, n_tokens,
16308 		 MAP_START_LOCATION (macro),
16309 		 MAP_START_LOCATION (macro) + n_tokens);
16310     }
16311   location_t lwm = sec.u ();
16312   macro_locs.first = lwm - slurp->loc_deltas.second;
16313 
16314   dump () && dump ("Macro location lwm:%u", macro_locs.first);
16315 
16316   if (propagated)
16317     spans.close ();
16318 
16319   dump.outdent ();
16320   if (!sec.end (from ()))
16321     return false;
16322 
16323   return true;
16324 }
16325 
16326 /* Serialize the definition of MACRO.  */
16327 
16328 void
write_define(bytes_out & sec,const cpp_macro * macro,bool located)16329 module_state::write_define (bytes_out &sec, const cpp_macro *macro, bool located)
16330 {
16331   sec.u (macro->count);
16332 
16333   sec.b (macro->fun_like);
16334   sec.b (macro->variadic);
16335   sec.b (macro->syshdr);
16336   sec.bflush ();
16337 
16338   if (located)
16339     write_location (sec, macro->line);
16340   if (macro->fun_like)
16341     {
16342       sec.u (macro->paramc);
16343       const cpp_hashnode *const *parms = macro->parm.params;
16344       for (unsigned ix = 0; ix != macro->paramc; ix++)
16345 	sec.cpp_node (parms[ix]);
16346     }
16347 
16348   unsigned len = 0;
16349   for (unsigned ix = 0; ix != macro->count; ix++)
16350     {
16351       const cpp_token *token = &macro->exp.tokens[ix];
16352       if (located)
16353 	write_location (sec, token->src_loc);
16354       sec.u (token->type);
16355       sec.u (token->flags);
16356       switch (cpp_token_val_index (token))
16357 	{
16358 	default:
16359 	  gcc_unreachable ();
16360 
16361 	case CPP_TOKEN_FLD_ARG_NO:
16362 	  /* An argument reference.  */
16363 	  sec.u (token->val.macro_arg.arg_no);
16364 	  sec.cpp_node (token->val.macro_arg.spelling);
16365 	  break;
16366 
16367 	case CPP_TOKEN_FLD_NODE:
16368 	  /* An identifier.  */
16369 	  sec.cpp_node (token->val.node.node);
16370 	  if (token->val.node.spelling == token->val.node.node)
16371 	    /* The spelling will usually be the same.  so optimize
16372 	       that.  */
16373 	    sec.str (NULL, 0);
16374 	  else
16375 	    sec.cpp_node (token->val.node.spelling);
16376 	  break;
16377 
16378 	case CPP_TOKEN_FLD_NONE:
16379 	  break;
16380 
16381 	case CPP_TOKEN_FLD_STR:
16382 	  /* A string, number or comment.  Not always NUL terminated,
16383 	     we stream out in a single contatenation with embedded
16384 	     NULs as that's a safe default.  */
16385 	  len += token->val.str.len + 1;
16386 	  sec.u (token->val.str.len);
16387 	  break;
16388 
16389 	case CPP_TOKEN_FLD_SOURCE:
16390 	case CPP_TOKEN_FLD_TOKEN_NO:
16391 	case CPP_TOKEN_FLD_PRAGMA:
16392 	  /* These do not occur inside a macro itself.  */
16393 	  gcc_unreachable ();
16394 	}
16395     }
16396 
16397   if (len)
16398     {
16399       char *ptr = reinterpret_cast<char *> (sec.buf (len));
16400       len = 0;
16401       for (unsigned ix = 0; ix != macro->count; ix++)
16402 	{
16403 	  const cpp_token *token = &macro->exp.tokens[ix];
16404 	  if (cpp_token_val_index (token) == CPP_TOKEN_FLD_STR)
16405 	    {
16406 	      memcpy (ptr + len, token->val.str.text,
16407 		      token->val.str.len);
16408 	      len += token->val.str.len;
16409 	      ptr[len++] = 0;
16410 	    }
16411 	}
16412     }
16413 }
16414 
16415 /* Read a macro definition.  */
16416 
16417 cpp_macro *
read_define(bytes_in & sec,cpp_reader * reader,bool located) const16418 module_state::read_define (bytes_in &sec, cpp_reader *reader, bool located) const
16419 {
16420   unsigned count = sec.u ();
16421   /* We rely on knowing cpp_reader's hash table is ident_hash, and
16422      it's subobject allocator is stringpool_ggc_alloc and that is just
16423      a wrapper for ggc_alloc_atomic.  */
16424   cpp_macro *macro
16425     = (cpp_macro *)ggc_alloc_atomic (sizeof (cpp_macro)
16426 				     + sizeof (cpp_token) * (count - !!count));
16427   memset (macro, 0, sizeof (cpp_macro) + sizeof (cpp_token) * (count - !!count));
16428 
16429   macro->count = count;
16430   macro->kind = cmk_macro;
16431   macro->imported_p = true;
16432 
16433   macro->fun_like = sec.b ();
16434   macro->variadic = sec.b ();
16435   macro->syshdr = sec.b ();
16436   sec.bflush ();
16437 
16438   macro->line = located ? read_location (sec) : loc;
16439 
16440   if (macro->fun_like)
16441     {
16442       unsigned paramc = sec.u ();
16443       cpp_hashnode **params
16444 	= (cpp_hashnode **)ggc_alloc_atomic (sizeof (cpp_hashnode *) * paramc);
16445       macro->paramc = paramc;
16446       macro->parm.params = params;
16447       for (unsigned ix = 0; ix != paramc; ix++)
16448 	params[ix] = sec.cpp_node ();
16449     }
16450 
16451   unsigned len = 0;
16452   for (unsigned ix = 0; ix != count && !sec.get_overrun (); ix++)
16453     {
16454       cpp_token *token = &macro->exp.tokens[ix];
16455       token->src_loc = located ? read_location (sec) : loc;
16456       token->type = cpp_ttype (sec.u ());
16457       token->flags = sec.u ();
16458       switch (cpp_token_val_index (token))
16459 	{
16460 	default:
16461 	  sec.set_overrun ();
16462 	  break;
16463 
16464 	case CPP_TOKEN_FLD_ARG_NO:
16465 	  /* An argument reference.  */
16466 	  {
16467 	    unsigned arg_no = sec.u ();
16468 	    if (arg_no - 1 >= macro->paramc)
16469 	      sec.set_overrun ();
16470 	    token->val.macro_arg.arg_no = arg_no;
16471 	    token->val.macro_arg.spelling = sec.cpp_node ();
16472 	  }
16473 	  break;
16474 
16475 	case CPP_TOKEN_FLD_NODE:
16476 	  /* An identifier.  */
16477 	  token->val.node.node = sec.cpp_node ();
16478 	  token->val.node.spelling = sec.cpp_node ();
16479 	  if (!token->val.node.spelling)
16480 	    token->val.node.spelling = token->val.node.node;
16481 	  break;
16482 
16483 	case CPP_TOKEN_FLD_NONE:
16484 	  break;
16485 
16486 	case CPP_TOKEN_FLD_STR:
16487 	  /* A string, number or comment.  */
16488 	  token->val.str.len = sec.u ();
16489 	  len += token->val.str.len + 1;
16490 	  break;
16491 	}
16492     }
16493 
16494   if (len)
16495     if (const char *ptr = reinterpret_cast<const char *> (sec.buf (len)))
16496       {
16497 	/* There should be a final NUL.  */
16498 	if (ptr[len-1])
16499 	  sec.set_overrun ();
16500 	/* cpp_alloc_token_string will add a final NUL.  */
16501 	const unsigned char *buf
16502 	  = cpp_alloc_token_string (reader, (const unsigned char *)ptr, len - 1);
16503 	len = 0;
16504 	for (unsigned ix = 0; ix != count && !sec.get_overrun (); ix++)
16505 	  {
16506 	    cpp_token *token = &macro->exp.tokens[ix];
16507 	    if (cpp_token_val_index (token) == CPP_TOKEN_FLD_STR)
16508 	      {
16509 		token->val.str.text = buf + len;
16510 		len += token->val.str.len;
16511 		if (buf[len++])
16512 		  sec.set_overrun ();
16513 	      }
16514 	  }
16515       }
16516 
16517   if (sec.get_overrun ())
16518     return NULL;
16519   return macro;
16520 }
16521 
16522 /* Exported macro data.  */
16523 struct GTY(()) macro_export {
16524   cpp_macro *def;
16525   location_t undef_loc;
16526 
macro_exportmacro_export16527   macro_export ()
16528     :def (NULL), undef_loc (UNKNOWN_LOCATION)
16529   {
16530   }
16531 };
16532 
16533 /* Imported macro data.  */
16534 class macro_import {
16535 public:
16536   struct slot {
16537 #if defined (WORDS_BIGENDIAN) && SIZEOF_VOID_P == 8
16538     int offset;
16539 #endif
16540     /* We need to ensure we don't use the LSB for representation, as
16541        that's the union discriminator below.  */
16542     unsigned bits;
16543 
16544 #if !(defined (WORDS_BIGENDIAN) && SIZEOF_VOID_P == 8)
16545     int offset;
16546 #endif
16547 
16548   public:
16549     enum Layout {
16550       L_DEF = 1,
16551       L_UNDEF = 2,
16552       L_BOTH = 3,
16553       L_MODULE_SHIFT = 2
16554     };
16555 
16556   public:
16557     /* Not a regular ctor, because we put it in a union, and that's
16558        not allowed in C++ 98.  */
ctormacro_import::slot16559     static slot ctor (unsigned module, unsigned defness)
16560     {
16561       gcc_checking_assert (defness);
16562       slot s;
16563       s.bits = defness | (module << L_MODULE_SHIFT);
16564       s.offset = -1;
16565       return s;
16566     }
16567 
16568   public:
get_defnessmacro_import::slot16569     unsigned get_defness () const
16570     {
16571       return bits & L_BOTH;
16572     }
get_modulemacro_import::slot16573     unsigned get_module () const
16574     {
16575       return bits >> L_MODULE_SHIFT;
16576     }
become_undefmacro_import::slot16577     void become_undef ()
16578     {
16579       bits &= ~unsigned (L_DEF);
16580       bits |= unsigned (L_UNDEF);
16581     }
16582   };
16583 
16584 private:
16585   typedef vec<slot, va_heap, vl_embed> ary_t;
16586   union either {
16587     /* Discriminated by bits 0|1 != 0.  The expected case is that
16588        there will be exactly one slot per macro, hence the effort of
16589        packing that.  */
16590     ary_t *ary;
16591     slot single;
16592   } u;
16593 
16594 public:
macro_import()16595   macro_import ()
16596   {
16597     u.ary = NULL;
16598   }
16599 
16600 private:
single_p() const16601   bool single_p () const
16602   {
16603     return u.single.bits & slot::L_BOTH;
16604   }
occupied_p() const16605   bool occupied_p () const
16606   {
16607     return u.ary != NULL;
16608   }
16609 
16610 public:
length() const16611   unsigned length () const
16612   {
16613     gcc_checking_assert (occupied_p ());
16614     return single_p () ? 1 : u.ary->length ();
16615   }
operator [](unsigned ix)16616   slot &operator[] (unsigned ix)
16617   {
16618     gcc_checking_assert (occupied_p ());
16619     if (single_p ())
16620       {
16621 	gcc_checking_assert (!ix);
16622 	return u.single;
16623       }
16624     else
16625       return (*u.ary)[ix];
16626   }
16627 
16628 public:
16629   slot &exported ();
16630   slot &append (unsigned module, unsigned defness);
16631 };
16632 
16633 /* O is a new import to append to the list for.  If we're an empty
16634    set, initialize us.  */
16635 
16636 macro_import::slot &
append(unsigned module,unsigned defness)16637 macro_import::append (unsigned module, unsigned defness)
16638 {
16639   if (!occupied_p ())
16640     {
16641       u.single = slot::ctor (module, defness);
16642       return u.single;
16643     }
16644   else
16645     {
16646       bool single = single_p ();
16647       ary_t *m = single ? NULL : u.ary;
16648       vec_safe_reserve (m, 1 + single);
16649       if (single)
16650 	m->quick_push (u.single);
16651       u.ary = m;
16652       return *u.ary->quick_push (slot::ctor (module, defness));
16653     }
16654 }
16655 
16656 /* We're going to export something.  Make sure the first import slot
16657    is us.  */
16658 
16659 macro_import::slot &
exported()16660 macro_import::exported ()
16661 {
16662   if (occupied_p () && !(*this)[0].get_module ())
16663     {
16664       slot &res = (*this)[0];
16665       res.bits |= slot::L_DEF;
16666       return res;
16667     }
16668 
16669   slot *a = &append (0, slot::L_DEF);
16670   if (!single_p ())
16671     {
16672       slot &f = (*this)[0];
16673       std::swap (f, *a);
16674       a = &f;
16675     }
16676   return *a;
16677 }
16678 
16679 /* The import (&exported) macros.  cpp_hasnode's deferred field
16680    indexes this array (offset by 1, so zero means 'not present'.  */
16681 
16682 static vec<macro_import, va_heap, vl_embed> *macro_imports;
16683 
16684 /* The exported macros.  A macro_import slot's zeroth element's offset
16685    indexes this array.  If the zeroth slot is not for module zero,
16686    there is no export.  */
16687 
16688 static GTY(()) vec<macro_export, va_gc> *macro_exports;
16689 
16690 /* The reachable set of header imports from this TU.  */
16691 
16692 static GTY(()) bitmap headers;
16693 
16694 /* Get the (possibly empty) macro imports for NODE.  */
16695 
16696 static macro_import &
get_macro_imports(cpp_hashnode * node)16697 get_macro_imports (cpp_hashnode *node)
16698 {
16699   if (node->deferred)
16700     return (*macro_imports)[node->deferred - 1];
16701 
16702   vec_safe_reserve (macro_imports, 1);
16703   node->deferred = macro_imports->length () + 1;
16704   return *vec_safe_push (macro_imports, macro_import ());
16705 }
16706 
16707 /* Get the macro export for export EXP of NODE.  */
16708 
16709 static macro_export &
get_macro_export(macro_import::slot & slot)16710 get_macro_export (macro_import::slot &slot)
16711 {
16712   if (slot.offset >= 0)
16713     return (*macro_exports)[slot.offset];
16714 
16715   vec_safe_reserve (macro_exports, 1);
16716   slot.offset = macro_exports->length ();
16717   return *macro_exports->quick_push (macro_export ());
16718 }
16719 
16720 /* If NODE is an exportable macro, add it to the export set.  */
16721 
16722 static int
maybe_add_macro(cpp_reader *,cpp_hashnode * node,void * data_)16723 maybe_add_macro (cpp_reader *, cpp_hashnode *node, void *data_)
16724 {
16725   bool exporting = false;
16726 
16727   if (cpp_user_macro_p (node))
16728     if (cpp_macro *macro = node->value.macro)
16729       /* Ignore imported, builtins, command line and forced header macros.  */
16730       if (!macro->imported_p
16731 	  && !macro->lazy && macro->line >= spans.main_start ())
16732 	{
16733 	  gcc_checking_assert (macro->kind == cmk_macro);
16734 	  /* I don't want to deal with this corner case, that I suspect is
16735 	     a devil's advocate reading of the standard.  */
16736 	  gcc_checking_assert (!macro->extra_tokens);
16737 
16738 	  macro_import::slot &slot = get_macro_imports (node).exported ();
16739 	  macro_export &exp = get_macro_export (slot);
16740 	  exp.def = macro;
16741 	  exporting = true;
16742 	}
16743 
16744   if (!exporting && node->deferred)
16745     {
16746       macro_import &imports = (*macro_imports)[node->deferred - 1];
16747       macro_import::slot &slot = imports[0];
16748       if (!slot.get_module ())
16749 	{
16750 	  gcc_checking_assert (slot.get_defness ());
16751 	  exporting = true;
16752 	}
16753     }
16754 
16755   if (exporting)
16756     static_cast<vec<cpp_hashnode *> *> (data_)->safe_push (node);
16757 
16758   return 1; /* Don't stop.  */
16759 }
16760 
16761 /* Order cpp_hashnodes A_ and B_ by their exported macro locations.  */
16762 
16763 static int
macro_loc_cmp(const void * a_,const void * b_)16764 macro_loc_cmp (const void *a_, const void *b_)
16765 {
16766   const cpp_hashnode *node_a = *(const cpp_hashnode *const *)a_;
16767   macro_import &import_a = (*macro_imports)[node_a->deferred - 1];
16768   const macro_export &export_a = (*macro_exports)[import_a[0].offset];
16769   location_t loc_a = export_a.def ? export_a.def->line : export_a.undef_loc;
16770 
16771   const cpp_hashnode *node_b = *(const cpp_hashnode *const *)b_;
16772   macro_import &import_b = (*macro_imports)[node_b->deferred - 1];
16773   const macro_export &export_b = (*macro_exports)[import_b[0].offset];
16774   location_t loc_b = export_b.def ? export_b.def->line : export_b.undef_loc;
16775 
16776   if (loc_a < loc_b)
16777     return +1;
16778   else if (loc_a > loc_b)
16779     return -1;
16780   else
16781     return 0;
16782 }
16783 
16784 /* Write out the exported defines.  This is two sections, one
16785    containing the definitions, the other a table of node names.  */
16786 
16787 unsigned
write_macros(elf_out * to,cpp_reader * reader,unsigned * crc_p)16788 module_state::write_macros (elf_out *to, cpp_reader *reader, unsigned *crc_p)
16789 {
16790   dump () && dump ("Writing macros");
16791   dump.indent ();
16792 
16793   vec<cpp_hashnode *> macros;
16794   macros.create (100);
16795   cpp_forall_identifiers (reader, maybe_add_macro, &macros);
16796 
16797   dump (dumper::MACRO) && dump ("No more than %u macros", macros.length ());
16798 
16799   macros.qsort (macro_loc_cmp);
16800 
16801   /* Write the defs */
16802   bytes_out sec (to);
16803   sec.begin ();
16804 
16805   unsigned count = 0;
16806   for (unsigned ix = macros.length (); ix--;)
16807     {
16808       cpp_hashnode *node = macros[ix];
16809       macro_import::slot &slot = (*macro_imports)[node->deferred - 1][0];
16810       gcc_assert (!slot.get_module () && slot.get_defness ());
16811 
16812       macro_export &mac = (*macro_exports)[slot.offset];
16813       gcc_assert (!!(slot.get_defness () & macro_import::slot::L_UNDEF)
16814 		  == (mac.undef_loc != UNKNOWN_LOCATION)
16815 		  && !!(slot.get_defness () & macro_import::slot::L_DEF)
16816 		  == (mac.def != NULL));
16817 
16818       if (IDENTIFIER_KEYWORD_P (identifier (node)))
16819 	{
16820 	  warning_at (mac.def->line, 0,
16821 		      "not exporting %<#define %E%> as it is a keyword",
16822 		      identifier (node));
16823 	  slot.offset = 0;
16824 	  continue;
16825 	}
16826 
16827       count++;
16828       slot.offset = sec.pos;
16829       dump (dumper::MACRO)
16830 	&& dump ("Writing macro %s%s%s %I at %u",
16831 		 slot.get_defness () & macro_import::slot::L_UNDEF
16832 		 ? "#undef" : "",
16833 		 slot.get_defness () == macro_import::slot::L_BOTH
16834 		 ? " & " : "",
16835 		 slot.get_defness () & macro_import::slot::L_DEF
16836 		 ? "#define" : "",
16837 		 identifier (node), slot.offset);
16838       if (mac.undef_loc != UNKNOWN_LOCATION)
16839 	write_location (sec, mac.undef_loc);
16840       if (mac.def)
16841 	write_define (sec, mac.def);
16842     }
16843   sec.end (to, to->name (MOD_SNAME_PFX ".def"), crc_p);
16844 
16845   if (count)
16846     {
16847       /* Write the table.  */
16848       bytes_out sec (to);
16849       sec.begin ();
16850       sec.u (count);
16851 
16852       for (unsigned ix = macros.length (); ix--;)
16853 	{
16854 	  const cpp_hashnode *node = macros[ix];
16855 	  macro_import::slot &slot = (*macro_imports)[node->deferred - 1][0];
16856 
16857 	  if (slot.offset)
16858 	    {
16859 	      sec.cpp_node (node);
16860 	      sec.u (slot.get_defness ());
16861 	      sec.u (slot.offset);
16862 	    }
16863 	}
16864       sec.end (to, to->name (MOD_SNAME_PFX ".mac"), crc_p);
16865     }
16866 
16867   macros.release ();
16868   dump.outdent ();
16869   return count;
16870 }
16871 
16872 bool
read_macros()16873 module_state::read_macros ()
16874 {
16875   /* Get the def section.  */
16876   if (!slurp->macro_defs.begin (loc, from (), MOD_SNAME_PFX ".def"))
16877     return false;
16878 
16879   /* Get the tbl section, if there are defs. */
16880   if (slurp->macro_defs.more_p ()
16881       && !slurp->macro_tbl.begin (loc, from (), MOD_SNAME_PFX ".mac"))
16882     return false;
16883 
16884   return true;
16885 }
16886 
16887 /* Install the macro name table.  */
16888 
16889 void
install_macros()16890 module_state::install_macros ()
16891 {
16892   bytes_in &sec = slurp->macro_tbl;
16893   if (!sec.size)
16894     return;
16895 
16896   dump () && dump ("Reading macro table %M", this);
16897   dump.indent ();
16898 
16899   unsigned count = sec.u ();
16900   dump () && dump ("%u macros", count);
16901   while (count--)
16902     {
16903       cpp_hashnode *node = sec.cpp_node ();
16904       macro_import &imp = get_macro_imports (node);
16905       unsigned flags = sec.u () & macro_import::slot::L_BOTH;
16906       if (!flags)
16907 	sec.set_overrun ();
16908 
16909       if (sec.get_overrun ())
16910 	break;
16911 
16912       macro_import::slot &slot = imp.append (mod, flags);
16913       slot.offset = sec.u ();
16914 
16915       dump (dumper::MACRO)
16916 	&& dump ("Read %s macro %s%s%s %I at %u",
16917 		 imp.length () > 1 ? "add" : "new",
16918 		 flags & macro_import::slot::L_UNDEF ? "#undef" : "",
16919 		 flags == macro_import::slot::L_BOTH ? " & " : "",
16920 		 flags & macro_import::slot::L_DEF ? "#define" : "",
16921 		 identifier (node), slot.offset);
16922 
16923       /* We'll leak an imported definition's TOKEN_FLD_STR's data
16924 	 here.  But that only happens when we've had to resolve the
16925 	 deferred macro before this import -- why are you doing
16926 	 that?  */
16927       if (cpp_macro *cur = cpp_set_deferred_macro (node))
16928 	if (!cur->imported_p)
16929 	  {
16930 	    macro_import::slot &slot = imp.exported ();
16931 	    macro_export &exp = get_macro_export (slot);
16932 	    exp.def = cur;
16933 	    dump (dumper::MACRO)
16934 	      && dump ("Saving current #define %I", identifier (node));
16935 	  }
16936     }
16937 
16938   /* We're now done with the table.  */
16939   elf_in::release (slurp->from, sec);
16940 
16941   dump.outdent ();
16942 }
16943 
16944 /* Import the transitive macros.  */
16945 
16946 void
import_macros()16947 module_state::import_macros ()
16948 {
16949   bitmap_ior_into (headers, slurp->headers);
16950 
16951   bitmap_iterator bititer;
16952   unsigned bitnum;
16953   EXECUTE_IF_SET_IN_BITMAP (slurp->headers, 0, bitnum, bititer)
16954     (*modules)[bitnum]->install_macros ();
16955 }
16956 
16957 /* NODE is being undefined at LOC.  Record it in the export table, if
16958    necessary.  */
16959 
16960 void
undef_macro(cpp_reader *,location_t loc,cpp_hashnode * node)16961 module_state::undef_macro (cpp_reader *, location_t loc, cpp_hashnode *node)
16962 {
16963   if (!node->deferred)
16964     /* The macro is not imported, so our undef is irrelevant.  */
16965     return;
16966 
16967   unsigned n = dump.push (NULL);
16968 
16969   macro_import::slot &slot = (*macro_imports)[node->deferred - 1].exported ();
16970   macro_export &exp = get_macro_export (slot);
16971 
16972   exp.undef_loc = loc;
16973   slot.become_undef ();
16974   exp.def = NULL;
16975 
16976   dump (dumper::MACRO) && dump ("Recording macro #undef %I", identifier (node));
16977 
16978   dump.pop (n);
16979 }
16980 
16981 /* NODE is a deferred macro node.  Determine the definition and return
16982    it, with NULL if undefined.  May issue diagnostics.
16983 
16984    This can leak memory, when merging declarations -- the string
16985    contents (TOKEN_FLD_STR) of each definition are allocated in
16986    unreclaimable cpp objstack.  Only one will win.  However, I do not
16987    expect this to be common -- mostly macros have a single point of
16988    definition.  Perhaps we could restore the objstack to its position
16989    after the first imported definition (if that wins)?  The macros
16990    themselves are GC'd.  */
16991 
16992 cpp_macro *
deferred_macro(cpp_reader * reader,location_t loc,cpp_hashnode * node)16993 module_state::deferred_macro (cpp_reader *reader, location_t loc,
16994 			      cpp_hashnode *node)
16995 {
16996   macro_import &imports = (*macro_imports)[node->deferred - 1];
16997 
16998   unsigned n = dump.push (NULL);
16999   dump (dumper::MACRO) && dump ("Deferred macro %I", identifier (node));
17000 
17001   bitmap visible (BITMAP_GGC_ALLOC ());
17002 
17003   if (!((imports[0].get_defness () & macro_import::slot::L_UNDEF)
17004 	&& !imports[0].get_module ()))
17005     {
17006       /* Calculate the set of visible header imports.  */
17007       bitmap_copy (visible, headers);
17008       for (unsigned ix = imports.length (); ix--;)
17009 	{
17010 	  const macro_import::slot &slot = imports[ix];
17011 	  unsigned mod = slot.get_module ();
17012 	  if ((slot.get_defness () & macro_import::slot::L_UNDEF)
17013 	      && bitmap_bit_p (visible, mod))
17014 	    {
17015 	      bitmap arg = mod ? (*modules)[mod]->slurp->headers : headers;
17016 	      bitmap_and_compl_into (visible, arg);
17017 	      bitmap_set_bit (visible, mod);
17018 	    }
17019 	}
17020     }
17021   bitmap_set_bit (visible, 0);
17022 
17023   /* Now find the macros that are still visible.  */
17024   bool failed = false;
17025   cpp_macro *def = NULL;
17026   vec<macro_export> defs;
17027   defs.create (imports.length ());
17028   for (unsigned ix = imports.length (); ix--;)
17029     {
17030       const macro_import::slot &slot = imports[ix];
17031       unsigned mod = slot.get_module ();
17032       if (bitmap_bit_p (visible, mod))
17033 	{
17034 	  macro_export *pushed = NULL;
17035 	  if (mod)
17036 	    {
17037 	      const module_state *imp = (*modules)[mod];
17038 	      bytes_in &sec = imp->slurp->macro_defs;
17039 	      if (!sec.get_overrun ())
17040 		{
17041 		  dump (dumper::MACRO)
17042 		    && dump ("Reading macro %s%s%s %I module %M at %u",
17043 			     slot.get_defness () & macro_import::slot::L_UNDEF
17044 			     ? "#undef" : "",
17045 			     slot.get_defness () == macro_import::slot::L_BOTH
17046 			     ? " & " : "",
17047 			     slot.get_defness () & macro_import::slot::L_DEF
17048 			     ? "#define" : "",
17049 			     identifier (node), imp, slot.offset);
17050 		  sec.random_access (slot.offset);
17051 
17052 		  macro_export exp;
17053 		  if (slot.get_defness () & macro_import::slot::L_UNDEF)
17054 		    exp.undef_loc = imp->read_location (sec);
17055 		  if (slot.get_defness () & macro_import::slot::L_DEF)
17056 		    exp.def = imp->read_define (sec, reader);
17057 		  if (sec.get_overrun ())
17058 		    error_at (loc, "macro definitions of %qE corrupted",
17059 			      imp->name);
17060 		  else
17061 		    pushed = defs.quick_push (exp);
17062 		}
17063 	    }
17064 	  else
17065 	    pushed = defs.quick_push ((*macro_exports)[slot.offset]);
17066 	  if (pushed && pushed->def)
17067 	    {
17068 	      if (!def)
17069 		def = pushed->def;
17070 	      else if (cpp_compare_macros (def, pushed->def))
17071 		failed = true;
17072 	    }
17073 	}
17074     }
17075 
17076   if (failed)
17077     {
17078       /* If LOC is the first loc, this is the end of file check, which
17079 	 is a warning.  */
17080       if (loc == MAP_START_LOCATION (LINEMAPS_ORDINARY_MAP_AT (line_table, 0)))
17081 	warning_at (loc, OPT_Winvalid_imported_macros,
17082 		    "inconsistent imported macro definition %qE",
17083 		    identifier (node));
17084       else
17085 	error_at (loc, "inconsistent imported macro definition %qE",
17086 		  identifier (node));
17087       for (unsigned ix = defs.length (); ix--;)
17088 	{
17089 	  macro_export &exp = defs[ix];
17090 	  if (exp.undef_loc)
17091 	    inform (exp.undef_loc, "%<#undef %E%>", identifier (node));
17092 	  if (exp.def)
17093 	    inform (exp.def->line, "%<#define %s%>",
17094 		    cpp_macro_definition (reader, node, exp.def));
17095 	}
17096       def = NULL;
17097     }
17098 
17099   defs.release ();
17100 
17101   dump.pop (n);
17102 
17103   return def;
17104 }
17105 
17106 /* Stream the static aggregates.  Sadly some headers (ahem:
17107    iostream) contain static vars, and rely on them to run global
17108    ctors.  */
17109 unsigned
write_inits(elf_out * to,depset::hash & table,unsigned * crc_ptr)17110 module_state::write_inits (elf_out *to, depset::hash &table, unsigned *crc_ptr)
17111 {
17112   if (!static_aggregates && !tls_aggregates)
17113     return 0;
17114 
17115   dump () && dump ("Writing initializers");
17116   dump.indent ();
17117 
17118   static_aggregates = nreverse (static_aggregates);
17119   tls_aggregates = nreverse (tls_aggregates);
17120 
17121   unsigned count = 0;
17122   trees_out sec (to, this, table, ~0u);
17123   sec.begin ();
17124 
17125   tree list = static_aggregates;
17126   for (int passes = 0; passes != 2; passes++)
17127     {
17128       for (tree init = list; init; init = TREE_CHAIN (init), count++)
17129 	if (TREE_LANG_FLAG_0 (init))
17130 	  {
17131 	    tree decl = TREE_VALUE (init);
17132 
17133 	    dump ("Initializer:%u for %N", count, decl);
17134 	    sec.tree_node (decl);
17135 	  }
17136 
17137       list = tls_aggregates;
17138     }
17139 
17140   sec.end (to, to->name (MOD_SNAME_PFX ".ini"), crc_ptr);
17141   dump.outdent ();
17142 
17143   return count;
17144 }
17145 
17146 /* We have to defer some post-load processing until we've completed
17147    reading, because they can cause more reading.  */
17148 
17149 static void
post_load_processing()17150 post_load_processing ()
17151 {
17152   /* We mustn't cause a GC, our caller should have arranged for that
17153      not to happen.  */
17154   gcc_checking_assert (function_depth);
17155 
17156   if (!post_load_decls)
17157     return;
17158 
17159   tree old_cfd = current_function_decl;
17160   struct function *old_cfun = cfun;
17161   while (post_load_decls->length ())
17162     {
17163       tree decl = post_load_decls->pop ();
17164 
17165       dump () && dump ("Post-load processing of %N", decl);
17166 
17167       gcc_checking_assert (DECL_ABSTRACT_P (decl));
17168       /* Cloning can cause loading -- specifically operator delete for
17169 	 the deleting dtor.  */
17170       maybe_clone_body (decl);
17171     }
17172 
17173   cfun = old_cfun;
17174   current_function_decl = old_cfd;
17175 }
17176 
17177 bool
read_inits(unsigned count)17178 module_state::read_inits (unsigned count)
17179 {
17180   trees_in sec (this);
17181   if (!sec.begin (loc, from (), from ()->find (MOD_SNAME_PFX ".ini")))
17182     return false;
17183   dump () && dump ("Reading %u initializers", count);
17184   dump.indent ();
17185 
17186   lazy_snum = ~0u;
17187   for (unsigned ix = 0; ix != count; ix++)
17188     {
17189       /* Merely referencing the decl causes its initializer to be read
17190 	 and added to the correct list.  */
17191       tree decl = sec.tree_node ();
17192 
17193       if (sec.get_overrun ())
17194 	break;
17195       if (decl)
17196 	dump ("Initializer:%u for %N", count, decl);
17197     }
17198   lazy_snum = 0;
17199   post_load_processing ();
17200   dump.outdent ();
17201   if (!sec.end (from ()))
17202     return false;
17203   return true;
17204 }
17205 
17206 void
write_counts(elf_out * to,unsigned counts[MSC_HWM],unsigned * crc_ptr)17207 module_state::write_counts (elf_out *to, unsigned counts[MSC_HWM],
17208 			    unsigned *crc_ptr)
17209 {
17210   bytes_out cfg (to);
17211 
17212   cfg.begin ();
17213 
17214   for (unsigned ix = MSC_HWM; ix--;)
17215     cfg.u (counts[ix]);
17216 
17217   if (dump ())
17218     {
17219       dump ("Cluster sections are [%u,%u)",
17220 	    counts[MSC_sec_lwm], counts[MSC_sec_hwm]);
17221       dump ("Bindings %u", counts[MSC_bindings]);
17222       dump ("Pendings %u", counts[MSC_pendings]);
17223       dump ("Entities %u", counts[MSC_entities]);
17224       dump ("Namespaces %u", counts[MSC_namespaces]);
17225       dump ("Macros %u", counts[MSC_macros]);
17226       dump ("Initializers %u", counts[MSC_inits]);
17227     }
17228 
17229   cfg.end (to, to->name (MOD_SNAME_PFX ".cnt"), crc_ptr);
17230 }
17231 
17232 bool
read_counts(unsigned counts[MSC_HWM])17233 module_state::read_counts (unsigned counts[MSC_HWM])
17234 {
17235   bytes_in cfg;
17236 
17237   if (!cfg.begin (loc, from (), MOD_SNAME_PFX ".cnt"))
17238     return false;
17239 
17240   for (unsigned ix = MSC_HWM; ix--;)
17241     counts[ix] = cfg.u ();
17242 
17243   if (dump ())
17244     {
17245       dump ("Declaration sections are [%u,%u)",
17246 	    counts[MSC_sec_lwm], counts[MSC_sec_hwm]);
17247       dump ("Bindings %u", counts[MSC_bindings]);
17248       dump ("Pendings %u", counts[MSC_pendings]);
17249       dump ("Entities %u", counts[MSC_entities]);
17250       dump ("Namespaces %u", counts[MSC_namespaces]);
17251       dump ("Macros %u", counts[MSC_macros]);
17252       dump ("Initializers %u", counts[MSC_inits]);
17253     }
17254 
17255   return cfg.end (from ());
17256 }
17257 
17258 /* Tool configuration:  MOD_SNAME_PFX .config
17259 
17260    This is data that confirms current state (or fails).  */
17261 
17262 void
write_config(elf_out * to,module_state_config & config,unsigned inner_crc)17263 module_state::write_config (elf_out *to, module_state_config &config,
17264 			    unsigned inner_crc)
17265 {
17266   bytes_out cfg (to);
17267 
17268   cfg.begin ();
17269 
17270   /* Write version and inner crc as u32 values, for easier
17271      debug inspection.  */
17272   dump () && dump ("Writing version=%V, inner_crc=%x",
17273 		   MODULE_VERSION, inner_crc);
17274   cfg.u32 (unsigned (MODULE_VERSION));
17275   cfg.u32 (inner_crc);
17276 
17277   cfg.u (to->name (is_header () ? "" : get_flatname ()));
17278 
17279   /* Configuration. */
17280   dump () && dump ("Writing target='%s', host='%s'",
17281 		   TARGET_MACHINE, HOST_MACHINE);
17282   unsigned target = to->name (TARGET_MACHINE);
17283   unsigned host = (!strcmp (TARGET_MACHINE, HOST_MACHINE)
17284 		   ? target : to->name (HOST_MACHINE));
17285   cfg.u (target);
17286   cfg.u (host);
17287 
17288   cfg.str (config.dialect_str);
17289   cfg.u (extensions);
17290 
17291   /* Global tree information.  We write the globals crc separately,
17292      rather than mix it directly into the overall crc, as it is used
17293      to ensure data match between instances of the compiler, not
17294      integrity of the file.  */
17295   dump () && dump ("Writing globals=%u, crc=%x",
17296 		   fixed_trees->length (), global_crc);
17297   cfg.u (fixed_trees->length ());
17298   cfg.u32 (global_crc);
17299 
17300   if (is_partition ())
17301     cfg.u (is_interface ());
17302 
17303   cfg.u (config.num_imports);
17304   cfg.u (config.num_partitions);
17305   cfg.u (config.num_entities);
17306 
17307   cfg.u (config.ordinary_locs);
17308   cfg.u (config.macro_locs);
17309   cfg.u (config.ordinary_loc_align);
17310 
17311   /* Now generate CRC, we'll have incorporated the inner CRC because
17312      of its serialization above.  */
17313   cfg.end (to, to->name (MOD_SNAME_PFX ".cfg"), &crc);
17314   dump () && dump ("Writing CRC=%x", crc);
17315 }
17316 
17317 void
note_cmi_name()17318 module_state::note_cmi_name ()
17319 {
17320   if (!cmi_noted_p && filename)
17321     {
17322       cmi_noted_p = true;
17323       inform (loc, "compiled module file is %qs",
17324 	      maybe_add_cmi_prefix (filename));
17325     }
17326 }
17327 
17328 bool
read_config(module_state_config & config)17329 module_state::read_config (module_state_config &config)
17330 {
17331   bytes_in cfg;
17332 
17333   if (!cfg.begin (loc, from (), MOD_SNAME_PFX ".cfg"))
17334     return false;
17335 
17336   /* Check version.  */
17337   unsigned my_ver = MODULE_VERSION;
17338   unsigned their_ver = cfg.u32 ();
17339   dump () && dump  (my_ver == their_ver ? "Version %V"
17340 		    : "Expecting %V found %V", my_ver, their_ver);
17341   if (their_ver != my_ver)
17342     {
17343       /* The compiler versions differ.  Close enough? */
17344       verstr_t my_string, their_string;
17345 
17346       version2string (my_ver, my_string);
17347       version2string (their_ver, their_string);
17348 
17349       /* Reject when either is non-experimental or when experimental
17350 	 major versions differ.  */
17351       bool reject_p = ((!IS_EXPERIMENTAL (my_ver)
17352 			|| !IS_EXPERIMENTAL (their_ver)
17353 			|| MODULE_MAJOR (my_ver) != MODULE_MAJOR (their_ver))
17354 		       /* The 'I know what I'm doing' switch.  */
17355 		       && !flag_module_version_ignore);
17356       bool inform_p = true;
17357       if (reject_p)
17358 	{
17359 	  cfg.set_overrun ();
17360 	  error_at (loc, "compiled module is %sversion %s",
17361 		    IS_EXPERIMENTAL (their_ver) ? "experimental " : "",
17362 		    their_string);
17363 	}
17364       else
17365 	inform_p = warning_at (loc, 0, "compiled module is %sversion %s",
17366 			     IS_EXPERIMENTAL (their_ver) ? "experimental " : "",
17367 			     their_string);
17368 
17369       if (inform_p)
17370 	{
17371 	  inform (loc, "compiler is %sversion %s%s%s",
17372 		  IS_EXPERIMENTAL (my_ver) ? "experimental " : "",
17373 		  my_string,
17374 		  reject_p ? "" : flag_module_version_ignore
17375 		  ? ", be it on your own head!" : ", close enough?",
17376 		  reject_p ? "" : " \xc2\xaf\\_(\xe3\x83\x84)_/\xc2\xaf");
17377 	  note_cmi_name ();
17378 	}
17379 
17380       if (reject_p)
17381 	goto done;
17382     }
17383 
17384   /*  We wrote the inner crc merely to merge it, so simply read it
17385       back and forget it.  */
17386   cfg.u32 ();
17387 
17388   /* Check module name.  */
17389   {
17390     const char *their_name = from ()->name (cfg.u ());
17391     const char *our_name = "";
17392 
17393     if (!is_header ())
17394       our_name = get_flatname ();
17395 
17396     /* Header units can be aliased, so name checking is
17397        inappropriate.  */
17398     if (0 != strcmp (their_name, our_name))
17399       {
17400 	error_at (loc,
17401 		  their_name[0] && our_name[0] ? G_("module %qs found")
17402 		  : their_name[0]
17403 		  ? G_("header module expected, module %qs found")
17404 		  : G_("module %qs expected, header module found"),
17405 		  their_name[0] ? their_name : our_name);
17406 	cfg.set_overrun ();
17407 	goto done;
17408       }
17409   }
17410 
17411   /* Check the CRC after the above sanity checks, so that the user is
17412      clued in.  */
17413   {
17414     unsigned e_crc = crc;
17415     crc = cfg.get_crc ();
17416     dump () && dump ("Reading CRC=%x", crc);
17417     if (!is_direct () && crc != e_crc)
17418       {
17419 	error_at (loc, "module %qs CRC mismatch", get_flatname ());
17420 	cfg.set_overrun ();
17421 	goto done;
17422       }
17423   }
17424 
17425   /* Check target & host.  */
17426   {
17427     const char *their_target = from ()->name (cfg.u ());
17428     const char *their_host = from ()->name (cfg.u ());
17429     dump () && dump ("Read target='%s', host='%s'", their_target, their_host);
17430     if (strcmp (their_target, TARGET_MACHINE)
17431 	|| strcmp (their_host, HOST_MACHINE))
17432       {
17433 	error_at (loc, "target & host is %qs:%qs, expected %qs:%qs",
17434 		  their_target, TARGET_MACHINE, their_host, HOST_MACHINE);
17435 	cfg.set_overrun ();
17436 	goto done;
17437       }
17438   }
17439 
17440   /* Check compilation dialect.  This must match.  */
17441   {
17442     const char *their_dialect = cfg.str ();
17443     if (strcmp (their_dialect, config.dialect_str))
17444       {
17445 	error_at (loc, "language dialect differs %qs, expected %qs",
17446 		  their_dialect, config.dialect_str);
17447 	cfg.set_overrun ();
17448 	goto done;
17449       }
17450   }
17451 
17452   /* Check for extensions.  If they set any, we must have them set
17453      too.  */
17454   {
17455     unsigned ext = cfg.u ();
17456     unsigned allowed = (flag_openmp ? SE_OPENMP : 0);
17457 
17458     if (unsigned bad = ext & ~allowed)
17459       {
17460 	if (bad & SE_OPENMP)
17461 	  error_at (loc, "module contains OpenMP, use %<-fopenmp%> to enable");
17462 	cfg.set_overrun ();
17463 	goto done;
17464       }
17465     extensions = ext;
17466   }
17467 
17468   /* Check global trees.  */
17469   {
17470     unsigned their_fixed_length = cfg.u ();
17471     unsigned their_fixed_crc = cfg.u32 ();
17472     dump () && dump ("Read globals=%u, crc=%x",
17473 		     their_fixed_length, their_fixed_crc);
17474     if (!flag_preprocess_only
17475 	&& (their_fixed_length != fixed_trees->length ()
17476 	    || their_fixed_crc != global_crc))
17477       {
17478 	error_at (loc, "fixed tree mismatch");
17479 	cfg.set_overrun ();
17480 	goto done;
17481       }
17482   }
17483 
17484   /* All non-partitions are interfaces.  */
17485   interface_p = !is_partition () || cfg.u ();
17486 
17487   config.num_imports = cfg.u ();
17488   config.num_partitions = cfg.u ();
17489   config.num_entities = cfg.u ();
17490 
17491   config.ordinary_locs = cfg.u ();
17492   config.macro_locs = cfg.u ();
17493   config.ordinary_loc_align = cfg.u ();
17494 
17495  done:
17496   return cfg.end (from ());
17497 }
17498 
17499 /* Comparator for ordering the Ordered Ordinary Location array.  */
17500 
17501 static int
ool_cmp(const void * a_,const void * b_)17502 ool_cmp (const void *a_, const void *b_)
17503 {
17504   auto *a = *static_cast<const module_state *const *> (a_);
17505   auto *b = *static_cast<const module_state *const *> (b_);
17506   if (a == b)
17507     return 0;
17508   else if (a->ordinary_locs.first < b->ordinary_locs.second)
17509     return -1;
17510   else
17511     return +1;
17512 }
17513 
17514 /* Use ELROND format to record the following sections:
17515      qualified-names	    : binding value(s)
17516      MOD_SNAME_PFX.README   : human readable, strings
17517      MOD_SNAME_PFX.ENV      : environment strings, strings
17518      MOD_SNAME_PFX.nms 	    : namespace hierarchy
17519      MOD_SNAME_PFX.bnd      : binding table
17520      MOD_SNAME_PFX.spc      : specialization table
17521      MOD_SNAME_PFX.imp      : import table
17522      MOD_SNAME_PFX.ent      : entity table
17523      MOD_SNAME_PFX.prt      : partitions table
17524      MOD_SNAME_PFX.olm      : ordinary line maps
17525      MOD_SNAME_PFX.mlm      : macro line maps
17526      MOD_SNAME_PFX.def      : macro definitions
17527      MOD_SNAME_PFX.mac      : macro index
17528      MOD_SNAME_PFX.ini      : inits
17529      MOD_SNAME_PFX.cnt      : counts
17530      MOD_SNAME_PFX.cfg      : config data
17531 */
17532 
17533 void
write(elf_out * to,cpp_reader * reader)17534 module_state::write (elf_out *to, cpp_reader *reader)
17535 {
17536   /* Figure out remapped module numbers, which might elide
17537      partitions.  */
17538   bitmap partitions = NULL;
17539   if (!is_header () && !is_partition ())
17540     partitions = BITMAP_GGC_ALLOC ();
17541 
17542   unsigned mod_hwm = 1;
17543   for (unsigned ix = 1; ix != modules->length (); ix++)
17544     {
17545       module_state *imp = (*modules)[ix];
17546 
17547       /* Promote any non-partition direct import from a partition, unless
17548 	 we're a partition.  */
17549       if (!is_partition () && !imp->is_partition ()
17550 	  && imp->is_partition_direct ())
17551 	imp->directness = MD_PURVIEW_DIRECT;
17552 
17553       /* Write any import that is not a partition, unless we're a
17554 	 partition.  */
17555       if (!partitions || !imp->is_partition ())
17556 	imp->remap = mod_hwm++;
17557       else
17558 	{
17559 	  dump () && dump ("Partition %M %u", imp, ix);
17560 	  bitmap_set_bit (partitions, ix);
17561 	  imp->remap = 0;
17562 	  /* All interface partitions must be exported.  */
17563 	  if (imp->is_interface () && !bitmap_bit_p (exports, imp->mod))
17564 	    {
17565 	      error_at (imp->loc, "interface partition is not exported");
17566 	      bitmap_set_bit (exports, imp->mod);
17567 	    }
17568 
17569 	  /* All the partition entities should have been loaded when
17570 	     loading the partition.  */
17571 	  if (CHECKING_P)
17572 	    for (unsigned jx = 0; jx != imp->entity_num; jx++)
17573 	      {
17574 		binding_slot *slot = &(*entity_ary)[imp->entity_lwm + jx];
17575 		gcc_checking_assert (!slot->is_lazy ());
17576 	      }
17577 	}
17578     }
17579 
17580   if (partitions && bitmap_empty_p (partitions))
17581     /* No partitions present.  */
17582     partitions = nullptr;
17583 
17584   /* Find the set of decls we must write out.  */
17585   depset::hash table (DECL_NAMESPACE_BINDINGS (global_namespace)->size () * 8);
17586   /* Add the specializations before the writables, so that we can
17587      detect injected friend specializations.  */
17588   table.add_specializations (true);
17589   table.add_specializations (false);
17590   if (partial_specializations)
17591     {
17592       table.add_partial_entities (partial_specializations);
17593       partial_specializations = NULL;
17594     }
17595   table.add_namespace_entities (global_namespace, partitions);
17596   if (class_members)
17597     {
17598       table.add_class_entities (class_members);
17599       class_members = NULL;
17600     }
17601 
17602   /* Now join everything up.  */
17603   table.find_dependencies (this);
17604 
17605   if (!table.finalize_dependencies ())
17606     {
17607       to->set_error ();
17608       return;
17609     }
17610 
17611 #if CHECKING_P
17612   /* We're done verifying at-most once reading, reset to verify
17613      at-most once writing.  */
17614   note_defs = note_defs_table_t::create_ggc (1000);
17615 #endif
17616 
17617   /* Determine Strongy Connected Components.  */
17618   vec<depset *> sccs = table.connect ();
17619 
17620   vec_alloc (ool, modules->length ());
17621   for (unsigned ix = modules->length (); --ix;)
17622     {
17623       auto *import = (*modules)[ix];
17624       if (import->loadedness > ML_NONE
17625 	  && !(partitions && bitmap_bit_p (partitions, import->mod)))
17626 	ool->quick_push (import);
17627     }
17628   ool->qsort (ool_cmp);
17629 
17630   unsigned crc = 0;
17631   module_state_config config;
17632   location_map_info map_info = write_prepare_maps (&config);
17633   unsigned counts[MSC_HWM];
17634 
17635   config.num_imports = mod_hwm;
17636   config.num_partitions = modules->length () - mod_hwm;
17637   memset (counts, 0, sizeof (counts));
17638 
17639   /* depset::cluster is the cluster number,
17640      depset::section is unspecified scratch value.
17641 
17642      The following loops make use of the tarjan property that
17643      dependencies will be earlier in the SCCS array.  */
17644 
17645   /* This first loop determines the number of depsets in each SCC, and
17646      also the number of namespaces we're dealing with.  During the
17647      loop, the meaning of a couple of depset fields now change:
17648 
17649      depset::cluster -> size_of cluster, if first of cluster & !namespace
17650      depset::section -> section number of cluster (if !namespace). */
17651 
17652   unsigned n_spaces = 0;
17653   counts[MSC_sec_lwm] = counts[MSC_sec_hwm] = to->get_section_limit ();
17654   for (unsigned size, ix = 0; ix < sccs.length (); ix += size)
17655     {
17656       depset **base = &sccs[ix];
17657 
17658       if (base[0]->get_entity_kind () == depset::EK_NAMESPACE)
17659 	{
17660 	  n_spaces++;
17661 	  size = 1;
17662 	}
17663       else
17664 	{
17665 	  /* Count the members in this cluster.  */
17666 	  for (size = 1; ix + size < sccs.length (); size++)
17667 	    if (base[size]->cluster != base[0]->cluster)
17668 	      break;
17669 
17670 	  for (unsigned jx = 0; jx != size; jx++)
17671 	    {
17672 	      /* Set the section number.  */
17673 	      base[jx]->cluster = ~(~0u >> 1); /* A bad value.  */
17674 	      base[jx]->section = counts[MSC_sec_hwm];
17675 	    }
17676 
17677 	  /* Save the size in the first member's cluster slot.  */
17678 	  base[0]->cluster = size;
17679 
17680 	  counts[MSC_sec_hwm]++;
17681 	}
17682     }
17683 
17684   /* Write the clusters.  Namespace decls are put in the spaces array.
17685      The meaning of depset::cluster changes to provide the
17686      unnamed-decl count of the depset's decl (and remains zero for
17687      non-decls and non-unnamed).  */
17688   unsigned bytes = 0;
17689   vec<depset *> spaces;
17690   spaces.create (n_spaces);
17691 
17692   for (unsigned size, ix = 0; ix < sccs.length (); ix += size)
17693     {
17694       depset **base = &sccs[ix];
17695 
17696       if (base[0]->get_entity_kind () == depset::EK_NAMESPACE)
17697 	{
17698 	  tree decl = base[0]->get_entity ();
17699 	  if (decl == global_namespace)
17700 	    base[0]->cluster = 0;
17701 	  else if (!base[0]->is_import ())
17702 	    {
17703 	      base[0]->cluster = counts[MSC_entities]++;
17704 	      spaces.quick_push (base[0]);
17705 	      counts[MSC_namespaces]++;
17706 	      if (CHECKING_P)
17707 		{
17708 		  /* Add it to the entity map, such that we can tell it is
17709 		     part of us.  */
17710 		  bool existed;
17711 		  unsigned *slot = &entity_map->get_or_insert
17712 		    (DECL_UID (decl), &existed);
17713 		  if (existed)
17714 		    /* It must have come from a partition.  */
17715 		    gcc_checking_assert
17716 		      (import_entity_module (*slot)->is_partition ());
17717 		  *slot = ~base[0]->cluster;
17718 		}
17719 	      dump (dumper::CLUSTER) && dump ("Cluster namespace %N", decl);
17720 	    }
17721 	  size = 1;
17722 	}
17723       else
17724 	{
17725 	  size = base[0]->cluster;
17726 
17727 	  /* Cluster is now used to number entities.  */
17728 	  base[0]->cluster = ~(~0u >> 1); /* A bad value.  */
17729 
17730 	  sort_cluster (&table, base, size);
17731 
17732 	  /* Record the section for consistency checking during stream
17733 	     out -- we don't want to start writing decls in different
17734 	     sections.  */
17735 	  table.section = base[0]->section;
17736 	  bytes += write_cluster (to, base, size, table, counts, &crc);
17737 	  table.section = 0;
17738 	}
17739     }
17740 
17741   /* depset::cluster - entity number (on entities)
17742      depset::section - cluster number  */
17743   /* We'd better have written as many sections and found as many
17744      namespaces as we predicted.  */
17745   gcc_assert (counts[MSC_sec_hwm] == to->get_section_limit ()
17746 	      && spaces.length () == counts[MSC_namespaces]);
17747 
17748   /* Write the entitites.  None happens if we contain namespaces or
17749      nothing. */
17750   config.num_entities = counts[MSC_entities];
17751   if (counts[MSC_entities])
17752     write_entities (to, sccs, counts[MSC_entities], &crc);
17753 
17754   /* Write the namespaces.  */
17755   if (counts[MSC_namespaces])
17756     write_namespaces (to, spaces, counts[MSC_namespaces], &crc);
17757 
17758   /* Write the bindings themselves.  */
17759   counts[MSC_bindings] = write_bindings (to, sccs, &crc);
17760 
17761   /* Write the unnamed.  */
17762   counts[MSC_pendings] = write_pendings (to, sccs, table, &crc);
17763 
17764   /* Write the import table.  */
17765   if (config.num_imports > 1)
17766     write_imports (to, &crc);
17767 
17768   /* Write elided partition table.  */
17769   if (config.num_partitions)
17770     write_partitions (to, config.num_partitions, &crc);
17771 
17772   /* Write the line maps.  */
17773   write_ordinary_maps (to, map_info, &config, config.num_partitions, &crc);
17774   write_macro_maps (to, map_info, &config, &crc);
17775 
17776   if (is_header ())
17777     {
17778       counts[MSC_macros] = write_macros (to, reader, &crc);
17779       counts[MSC_inits] = write_inits (to, table, &crc);
17780     }
17781 
17782   unsigned clusters = counts[MSC_sec_hwm] - counts[MSC_sec_lwm];
17783   dump () && dump ("Wrote %u clusters, average %u bytes/cluster",
17784 		   clusters, (bytes + clusters / 2) / (clusters + !clusters));
17785 
17786   write_counts (to, counts, &crc);
17787 
17788   /* And finish up.  */
17789   write_config (to, config, crc);
17790 
17791   spaces.release ();
17792   sccs.release ();
17793 
17794   vec_free (ool);
17795 
17796   /* Human-readable info.  */
17797   write_readme (to, reader, config.dialect_str, extensions);
17798 
17799   // FIXME:QOI:  Have a command line switch to control more detailed
17800   // information (which might leak data you do not want to leak).
17801   // Perhaps (some of) the write_readme contents should also be
17802   // so-controlled.
17803   if (false)
17804     write_env (to);
17805 
17806   trees_out::instrument ();
17807   dump () && dump ("Wrote %u sections", to->get_section_limit ());
17808 }
17809 
17810 /* Initial read of a CMI.  Checks config, loads up imports and line
17811    maps.  */
17812 
17813 bool
read_initial(cpp_reader * reader)17814 module_state::read_initial (cpp_reader *reader)
17815 {
17816   module_state_config config;
17817   bool ok = true;
17818 
17819   if (ok && !from ()->begin (loc))
17820     ok = false;
17821 
17822   if (ok && !read_config (config))
17823     ok = false;
17824 
17825   bool have_locs = ok && read_prepare_maps (&config);
17826 
17827   /* Ordinary maps before the imports.  */
17828   if (have_locs && !read_ordinary_maps ())
17829     ok = false;
17830 
17831   /* Allocate the REMAP vector.  */
17832   slurp->alloc_remap (config.num_imports);
17833 
17834   if (ok)
17835     {
17836       /* Read the import table.  Decrement current to stop this CMI
17837 	 from being evicted during the import. */
17838       slurp->current--;
17839       if (config.num_imports > 1 && !read_imports (reader, line_table))
17840 	ok = false;
17841       slurp->current++;
17842     }
17843 
17844   /* Read the elided partition table, if we're the primary partition.  */
17845   if (ok && config.num_partitions && is_module ()
17846       && !read_partitions (config.num_partitions))
17847     ok = false;
17848 
17849   /* Determine the module's number.  */
17850   gcc_checking_assert (mod == MODULE_UNKNOWN);
17851   gcc_checking_assert (this != (*modules)[0]);
17852 
17853   {
17854     /* Allocate space in the entities array now -- that array must be
17855        monotionically in step with the modules array.  */
17856     entity_lwm = vec_safe_length (entity_ary);
17857     entity_num = config.num_entities;
17858     gcc_checking_assert (modules->length () == 1
17859 			 || modules->last ()->entity_lwm <= entity_lwm);
17860     vec_safe_reserve (entity_ary, config.num_entities);
17861 
17862     binding_slot slot;
17863     slot.u.binding = NULL_TREE;
17864     for (unsigned count = config.num_entities; count--;)
17865       entity_ary->quick_push (slot);
17866   }
17867 
17868   /* We'll run out of other resources before we run out of module
17869      indices.  */
17870   mod = modules->length ();
17871   vec_safe_push (modules, this);
17872 
17873   /* We always import and export ourselves. */
17874   bitmap_set_bit (imports, mod);
17875   bitmap_set_bit (exports, mod);
17876 
17877   if (ok)
17878     (*slurp->remap)[0] = mod << 1;
17879   dump () && dump ("Assigning %M module number %u", this, mod);
17880 
17881   /* We should not have been frozen during the importing done by
17882      read_config.  */
17883   gcc_assert (!from ()->is_frozen ());
17884 
17885   /* Macro maps after the imports.  */
17886   if (ok && have_locs && !read_macro_maps ())
17887     ok = false;
17888 
17889   gcc_assert (slurp->current == ~0u);
17890   return ok;
17891 }
17892 
17893 /* Read a preprocessor state.  */
17894 
17895 bool
read_preprocessor(bool outermost)17896 module_state::read_preprocessor (bool outermost)
17897 {
17898   gcc_checking_assert (is_header () && slurp
17899 		       && slurp->remap_module (0) == mod);
17900 
17901   if (loadedness == ML_PREPROCESSOR)
17902     return !(from () && from ()->get_error ());
17903 
17904   bool ok = true;
17905 
17906   /* Read direct header imports.  */
17907   unsigned len = slurp->remap->length ();
17908   for (unsigned ix = 1; ok && ix != len; ix++)
17909     {
17910       unsigned map = (*slurp->remap)[ix];
17911       if (map & 1)
17912 	{
17913 	  module_state *import = (*modules)[map >> 1];
17914 	  if (import->is_header ())
17915 	    {
17916 	      ok = import->read_preprocessor (false);
17917 	      bitmap_ior_into (slurp->headers, import->slurp->headers);
17918 	    }
17919 	}
17920     }
17921 
17922   /* Record as a direct header.  */
17923   if (ok)
17924     bitmap_set_bit (slurp->headers, mod);
17925 
17926   if (ok && !read_macros ())
17927     ok = false;
17928 
17929   loadedness = ML_PREPROCESSOR;
17930   announce ("macros");
17931 
17932   if (flag_preprocess_only)
17933     /* We're done with the string table.  */
17934     from ()->release ();
17935 
17936   return check_read (outermost, ok);
17937 }
17938 
17939 /* Read language state.  */
17940 
17941 bool
read_language(bool outermost)17942 module_state::read_language (bool outermost)
17943 {
17944   gcc_checking_assert (!lazy_snum);
17945 
17946   if (loadedness == ML_LANGUAGE)
17947     return !(slurp && from () && from ()->get_error ());
17948 
17949   gcc_checking_assert (slurp && slurp->current == ~0u
17950 		       && slurp->remap_module (0) == mod);
17951 
17952   bool ok = true;
17953 
17954   /* Read direct imports.  */
17955   unsigned len = slurp->remap->length ();
17956   for (unsigned ix = 1; ok && ix != len; ix++)
17957     {
17958       unsigned map = (*slurp->remap)[ix];
17959       if (map & 1)
17960 	{
17961 	  module_state *import = (*modules)[map >> 1];
17962 	  if (!import->read_language (false))
17963 	    ok = false;
17964 	}
17965     }
17966 
17967   unsigned counts[MSC_HWM];
17968 
17969   if (ok && !read_counts (counts))
17970     ok = false;
17971 
17972   function_depth++; /* Prevent unexpected GCs.  */
17973 
17974   if (counts[MSC_entities] != entity_num)
17975     ok = false;
17976   if (ok && counts[MSC_entities]
17977       && !read_entities (counts[MSC_entities],
17978 			 counts[MSC_sec_lwm], counts[MSC_sec_hwm]))
17979     ok = false;
17980 
17981   /* Read the namespace hierarchy. */
17982   if (ok && counts[MSC_namespaces]
17983       && !read_namespaces (counts[MSC_namespaces]))
17984     ok = false;
17985 
17986   if (ok && !read_bindings (counts[MSC_bindings],
17987 			    counts[MSC_sec_lwm], counts[MSC_sec_hwm]))
17988     ok = false;
17989 
17990   /* And unnamed.  */
17991   if (ok && counts[MSC_pendings] && !read_pendings (counts[MSC_pendings]))
17992     ok = false;
17993 
17994   if (ok)
17995     {
17996       slurp->remaining = counts[MSC_sec_hwm] - counts[MSC_sec_lwm];
17997       available_clusters += counts[MSC_sec_hwm] - counts[MSC_sec_lwm];
17998     }
17999 
18000   if (!flag_module_lazy
18001       || (is_partition ()
18002 	  && module_interface_p ()
18003 	  && !module_partition_p ()))
18004     {
18005       /* Read the sections in forward order, so that dependencies are read
18006 	 first.  See note about tarjan_connect.  */
18007       ggc_collect ();
18008 
18009       lazy_snum = ~0u;
18010 
18011       unsigned hwm = counts[MSC_sec_hwm];
18012       for (unsigned ix = counts[MSC_sec_lwm]; ok && ix != hwm; ix++)
18013 	if (!load_section (ix, NULL))
18014 	  {
18015 	    ok = false;
18016 	    break;
18017 	  }
18018       lazy_snum = 0;
18019       post_load_processing ();
18020 
18021       ggc_collect ();
18022 
18023       if (ok && CHECKING_P)
18024 	for (unsigned ix = 0; ix != entity_num; ix++)
18025 	  gcc_assert (!(*entity_ary)[ix + entity_lwm].is_lazy ());
18026     }
18027 
18028   // If the import is a header-unit, we need to register initializers
18029   // of any static objects it contains (looking at you _Ioinit).
18030   // Notice, the ordering of these initializers will be that of a
18031   // dynamic initializer at this point in the current TU.  (Other
18032   // instances of these objects in other TUs will be initialized as
18033   // part of that TU's global initializers.)
18034   if (ok && counts[MSC_inits] && !read_inits (counts[MSC_inits]))
18035     ok = false;
18036 
18037   function_depth--;
18038 
18039   announce (flag_module_lazy ? "lazy" : "imported");
18040   loadedness = ML_LANGUAGE;
18041 
18042   gcc_assert (slurp->current == ~0u);
18043 
18044   /* We're done with the string table.  */
18045   from ()->release ();
18046 
18047   return check_read (outermost, ok);
18048 }
18049 
18050 bool
maybe_defrost()18051 module_state::maybe_defrost ()
18052 {
18053   bool ok = true;
18054   if (from ()->is_frozen ())
18055     {
18056       if (lazy_open >= lazy_limit)
18057 	freeze_an_elf ();
18058       dump () && dump ("Defrosting '%s'", filename);
18059       ok = from ()->defrost (maybe_add_cmi_prefix (filename));
18060       lazy_open++;
18061     }
18062 
18063   return ok;
18064 }
18065 
18066 /* Load section SNUM, dealing with laziness.  It doesn't matter if we
18067    have multiple concurrent loads, because we do not use TREE_VISITED
18068    when reading back in.  */
18069 
18070 bool
load_section(unsigned snum,binding_slot * mslot)18071 module_state::load_section (unsigned snum, binding_slot *mslot)
18072 {
18073   if (from ()->get_error ())
18074     return false;
18075 
18076   if (snum >= slurp->current)
18077     from ()->set_error (elf::E_BAD_LAZY);
18078   else if (maybe_defrost ())
18079     {
18080       unsigned old_current = slurp->current;
18081       slurp->current = snum;
18082       slurp->lru = 0;  /* Do not swap out.  */
18083       slurp->remaining--;
18084       read_cluster (snum);
18085       slurp->lru = ++lazy_lru;
18086       slurp->current = old_current;
18087     }
18088 
18089   if (mslot && mslot->is_lazy ())
18090     {
18091       /* Oops, the section didn't set this slot.  */
18092       from ()->set_error (elf::E_BAD_DATA);
18093       *mslot = NULL_TREE;
18094     }
18095 
18096   bool ok = !from ()->get_error ();
18097   if (!ok)
18098     {
18099       error_at (loc, "failed to read compiled module cluster %u: %s",
18100 		snum, from ()->get_error (filename));
18101       note_cmi_name ();
18102     }
18103 
18104   maybe_completed_reading ();
18105 
18106   return ok;
18107 }
18108 
18109 void
maybe_completed_reading()18110 module_state::maybe_completed_reading ()
18111 {
18112   if (loadedness == ML_LANGUAGE && slurp->current == ~0u && !slurp->remaining)
18113     {
18114       lazy_open--;
18115       /* We no longer need the macros, all tokenizing has been done.  */
18116       slurp->release_macros ();
18117 
18118       from ()->end ();
18119       slurp->close ();
18120       slurped ();
18121     }
18122 }
18123 
18124 /* After a reading operation, make sure things are still ok.  If not,
18125    emit an error and clean up.  */
18126 
18127 bool
check_read(bool outermost,bool ok)18128 module_state::check_read (bool outermost, bool ok)
18129 {
18130   gcc_checking_assert (!outermost || slurp->current == ~0u);
18131 
18132   if (!ok)
18133     from ()->set_error ();
18134 
18135   if (int e = from ()->get_error ())
18136     {
18137       error_at (loc, "failed to read compiled module: %s",
18138 		from ()->get_error (filename));
18139       note_cmi_name ();
18140 
18141       if (e == EMFILE
18142 	  || e == ENFILE
18143 #if MAPPED_READING
18144 	  || e == ENOMEM
18145 #endif
18146 	  || false)
18147 	inform (loc, "consider using %<-fno-module-lazy%>,"
18148 		" increasing %<-param-lazy-modules=%u%> value,"
18149 		" or increasing the per-process file descriptor limit",
18150 		param_lazy_modules);
18151       else if (e == ENOENT)
18152 	inform (loc, "imports must be built before being imported");
18153 
18154       if (outermost)
18155 	fatal_error (loc, "returning to the gate for a mechanical issue");
18156 
18157       ok = false;
18158     }
18159 
18160   maybe_completed_reading ();
18161 
18162   return ok;
18163 }
18164 
18165 /* Return the IDENTIFIER_NODE naming module IX.  This is the name
18166    including dots.  */
18167 
18168 char const *
module_name(unsigned ix,bool header_ok)18169 module_name (unsigned ix, bool header_ok)
18170 {
18171   if (modules)
18172     {
18173       module_state *imp = (*modules)[ix];
18174 
18175       if (ix && !imp->name)
18176 	imp = imp->parent;
18177 
18178       if (header_ok || !imp->is_header ())
18179 	return imp->get_flatname ();
18180     }
18181 
18182   return NULL;
18183 }
18184 
18185 /* Return the bitmap describing what modules are imported.  Remember,
18186    we always import ourselves.  */
18187 
18188 bitmap
get_import_bitmap()18189 get_import_bitmap ()
18190 {
18191   return (*modules)[0]->imports;
18192 }
18193 
18194 /* Return the visible imports and path of instantiation for an
18195    instantiation at TINST.  If TINST is nullptr, we're not in an
18196    instantiation, and thus will return the visible imports of the
18197    current TU (and NULL *PATH_MAP_P).   We cache the information on
18198    the tinst level itself.  */
18199 
18200 static bitmap
path_of_instantiation(tinst_level * tinst,bitmap * path_map_p)18201 path_of_instantiation (tinst_level *tinst,  bitmap *path_map_p)
18202 {
18203   gcc_checking_assert (modules_p ());
18204 
18205   if (!tinst)
18206     {
18207       /* Not inside an instantiation, just the regular case.  */
18208       *path_map_p = nullptr;
18209       return get_import_bitmap ();
18210     }
18211 
18212   if (!tinst->path)
18213     {
18214       /* Calculate.  */
18215       bitmap visible = path_of_instantiation (tinst->next, path_map_p);
18216       bitmap path_map = *path_map_p;
18217 
18218       if (!path_map)
18219 	{
18220 	  path_map = BITMAP_GGC_ALLOC ();
18221 	  bitmap_set_bit (path_map, 0);
18222 	}
18223 
18224       tree decl = tinst->tldcl;
18225       if (TREE_CODE (decl) == TREE_LIST)
18226 	decl = TREE_PURPOSE (decl);
18227       if (TYPE_P (decl))
18228 	decl = TYPE_NAME (decl);
18229 
18230       if (unsigned mod = get_originating_module (decl))
18231 	if (!bitmap_bit_p (path_map, mod))
18232 	  {
18233 	    /* This is brand new information!  */
18234 	    bitmap new_path = BITMAP_GGC_ALLOC ();
18235 	    bitmap_copy (new_path, path_map);
18236 	    bitmap_set_bit (new_path, mod);
18237 	    path_map = new_path;
18238 
18239 	    bitmap imports = (*modules)[mod]->imports;
18240 	    if (bitmap_intersect_compl_p (imports, visible))
18241 	      {
18242 		/* IMPORTS contains additional modules to VISIBLE.  */
18243 		bitmap new_visible = BITMAP_GGC_ALLOC ();
18244 
18245 		bitmap_ior (new_visible, visible, imports);
18246 		visible = new_visible;
18247 	      }
18248 	  }
18249 
18250       tinst->path = path_map;
18251       tinst->visible = visible;
18252     }
18253 
18254   *path_map_p = tinst->path;
18255   return tinst->visible;
18256 }
18257 
18258 /* Return the bitmap describing what modules are visible along the
18259    path of instantiation.  If we're not an instantiation, this will be
18260    the visible imports of the TU.  *PATH_MAP_P is filled in with the
18261    modules owning the instantiation path -- we see the module-linkage
18262    entities of those modules.  */
18263 
18264 bitmap
visible_instantiation_path(bitmap * path_map_p)18265 visible_instantiation_path (bitmap *path_map_p)
18266 {
18267   if (!modules_p ())
18268     return NULL;
18269 
18270   return path_of_instantiation (current_instantiation (), path_map_p);
18271 }
18272 
18273 /* We've just directly imported IMPORT.  Update our import/export
18274    bitmaps.  IS_EXPORT is true if we're reexporting the OTHER.  */
18275 
18276 void
set_import(module_state const * import,bool is_export)18277 module_state::set_import (module_state const *import, bool is_export)
18278 {
18279   gcc_checking_assert (this != import);
18280 
18281   /* We see IMPORT's exports (which includes IMPORT).  If IMPORT is
18282      the primary interface or a partition we'll see its imports.  */
18283   bitmap_ior_into (imports, import->is_module () || import->is_partition ()
18284 		   ? import->imports : import->exports);
18285 
18286   if (is_export)
18287     /* We'll export OTHER's exports.  */
18288     bitmap_ior_into (exports, import->exports);
18289 }
18290 
18291 /* Return the declaring entity of DECL.  That is the decl determining
18292    how to decorate DECL with module information.  Returns NULL_TREE if
18293    it's the global module.  */
18294 
18295 tree
get_originating_module_decl(tree decl)18296 get_originating_module_decl (tree decl)
18297 {
18298   /* An enumeration constant.  */
18299   if (TREE_CODE (decl) == CONST_DECL
18300       && DECL_CONTEXT (decl)
18301       && (TREE_CODE (DECL_CONTEXT (decl)) == ENUMERAL_TYPE))
18302     decl = TYPE_NAME (DECL_CONTEXT (decl));
18303   else if (TREE_CODE (decl) == FIELD_DECL
18304 	   || TREE_CODE (decl) == USING_DECL)
18305     {
18306       decl = DECL_CONTEXT (decl);
18307       if (TREE_CODE (decl) != FUNCTION_DECL)
18308 	decl = TYPE_NAME (decl);
18309     }
18310 
18311   gcc_checking_assert (TREE_CODE (decl) == TEMPLATE_DECL
18312 		       || TREE_CODE (decl) == FUNCTION_DECL
18313 		       || TREE_CODE (decl) == TYPE_DECL
18314 		       || TREE_CODE (decl) == VAR_DECL
18315 		       || TREE_CODE (decl) == CONCEPT_DECL
18316 		       || TREE_CODE (decl) == NAMESPACE_DECL);
18317 
18318   for (;;)
18319     {
18320       /* Uninstantiated template friends are owned by the befriending
18321 	 class -- not their context.  */
18322       if (TREE_CODE (decl) == TEMPLATE_DECL
18323 	  && DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (decl))
18324 	decl = TYPE_NAME (DECL_CHAIN (decl));
18325 
18326       int use;
18327       if (tree ti = node_template_info (decl, use))
18328 	{
18329 	  decl = TI_TEMPLATE (ti);
18330 	  if (TREE_CODE (decl) != TEMPLATE_DECL)
18331 	    {
18332 	      /* A friend template specialization.  */
18333 	      gcc_checking_assert (OVL_P (decl));
18334 	      return global_namespace;
18335 	    }
18336 	}
18337       else
18338 	{
18339 	  tree ctx = CP_DECL_CONTEXT (decl);
18340 	  if (TREE_CODE (ctx) == NAMESPACE_DECL)
18341 	    break;
18342 
18343 	  if (TYPE_P (ctx))
18344 	    {
18345 	      ctx = TYPE_NAME (ctx);
18346 	      if (!ctx)
18347 		{
18348 		  /* Some kind of internal type.  */
18349 		  gcc_checking_assert (DECL_ARTIFICIAL (decl));
18350 		  return global_namespace;
18351 		}
18352 	    }
18353 	  decl = ctx;
18354 	}
18355     }
18356 
18357   return decl;
18358 }
18359 
18360 int
get_originating_module(tree decl,bool for_mangle)18361 get_originating_module (tree decl, bool for_mangle)
18362 {
18363   tree owner = get_originating_module_decl (decl);
18364   tree not_tmpl = STRIP_TEMPLATE (owner);
18365 
18366   if (!DECL_LANG_SPECIFIC (not_tmpl))
18367     return for_mangle ? -1 : 0;
18368 
18369   if (for_mangle
18370       && (DECL_MODULE_EXPORT_P (owner) || !DECL_MODULE_PURVIEW_P (not_tmpl)))
18371     return -1;
18372 
18373   if (!DECL_MODULE_IMPORT_P (not_tmpl))
18374     return 0;
18375 
18376   return get_importing_module (owner);
18377 }
18378 
18379 unsigned
get_importing_module(tree decl,bool flexible)18380 get_importing_module (tree decl, bool flexible)
18381 {
18382   unsigned index = import_entity_index (decl, flexible);
18383   if (index == ~(~0u >> 1))
18384     return -1;
18385   module_state *module = import_entity_module (index);
18386 
18387   return module->mod;
18388 }
18389 
18390 /* Is it permissible to redeclare DECL.  */
18391 
18392 bool
module_may_redeclare(tree decl)18393 module_may_redeclare (tree decl)
18394 {
18395   module_state *me = (*modules)[0];
18396   module_state *them = me;
18397   tree not_tmpl = STRIP_TEMPLATE (decl);
18398   if (DECL_LANG_SPECIFIC (not_tmpl) && DECL_MODULE_IMPORT_P (not_tmpl))
18399     {
18400       /* We can be given the TEMPLATE_RESULT.  We want the
18401 	 TEMPLATE_DECL.  */
18402       int use_tpl = -1;
18403       if (tree ti = node_template_info (decl, use_tpl))
18404 	{
18405 	  tree tmpl = TI_TEMPLATE (ti);
18406 	  if (use_tpl == 2)
18407 	    {
18408 	      /* A partial specialization.  Find that specialization's
18409 		 template_decl.  */
18410 	      for (tree list = DECL_TEMPLATE_SPECIALIZATIONS (tmpl);
18411 		   list; list = TREE_CHAIN (list))
18412 		if (DECL_TEMPLATE_RESULT (TREE_VALUE (list)) == decl)
18413 		  {
18414 		    decl = TREE_VALUE (list);
18415 		    break;
18416 		}
18417 	    }
18418 	  else if (DECL_TEMPLATE_RESULT (tmpl) == decl)
18419 	    decl = tmpl;
18420 	}
18421       unsigned index = import_entity_index (decl);
18422       them = import_entity_module (index);
18423     }
18424 
18425   if (them->is_header ())
18426     {
18427       if (!header_module_p ())
18428 	return !module_purview_p ();
18429 
18430       if (DECL_SOURCE_LOCATION (decl) == BUILTINS_LOCATION)
18431 	/* This is a builtin, being declared in header-unit.  We
18432 	   now need to mark it as an export.  */
18433 	DECL_MODULE_EXPORT_P (decl) = true;
18434 
18435       /* If it came from a header, it's in the global module.  */
18436       return true;
18437     }
18438 
18439   if (me == them)
18440     return ((DECL_LANG_SPECIFIC (not_tmpl) && DECL_MODULE_PURVIEW_P (not_tmpl))
18441 	    == module_purview_p ());
18442 
18443   if (!me->name)
18444     me = me->parent;
18445 
18446   /* We can't have found a GMF entity from a named module.  */
18447   gcc_checking_assert (DECL_LANG_SPECIFIC (not_tmpl)
18448 		       && DECL_MODULE_PURVIEW_P (not_tmpl));
18449 
18450   return me && get_primary (them) == get_primary (me);
18451 }
18452 
18453 /* DECL is being created by this TU.  Record it came from here.  We
18454    record module purview, so we can see if partial or explicit
18455    specialization needs to be written out, even though its purviewness
18456    comes from the most general template.  */
18457 
18458 void
set_instantiating_module(tree decl)18459 set_instantiating_module (tree decl)
18460 {
18461   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
18462 	      || TREE_CODE (decl) == VAR_DECL
18463 	      || TREE_CODE (decl) == TYPE_DECL
18464 	      || TREE_CODE (decl) == CONCEPT_DECL
18465 	      || TREE_CODE (decl) == TEMPLATE_DECL
18466 	      || (TREE_CODE (decl) == NAMESPACE_DECL
18467 		  && DECL_NAMESPACE_ALIAS (decl)));
18468 
18469   if (!modules_p ())
18470     return;
18471 
18472   decl = STRIP_TEMPLATE (decl);
18473 
18474   if (!DECL_LANG_SPECIFIC (decl) && module_purview_p ())
18475     retrofit_lang_decl (decl);
18476 
18477   if (DECL_LANG_SPECIFIC (decl))
18478     {
18479       DECL_MODULE_PURVIEW_P (decl) = module_purview_p ();
18480       /* If this was imported, we'll still be in the entity_hash.  */
18481       DECL_MODULE_IMPORT_P (decl) = false;
18482     }
18483 }
18484 
18485 /* If DECL is a class member, whose class is not defined in this TU
18486    (it was imported), remember this decl.  */
18487 
18488 void
set_defining_module(tree decl)18489 set_defining_module (tree decl)
18490 {
18491   gcc_checking_assert (!DECL_LANG_SPECIFIC (decl)
18492 		       || !DECL_MODULE_IMPORT_P (decl));
18493 
18494   if (module_has_cmi_p ())
18495     {
18496       tree ctx = DECL_CONTEXT (decl);
18497       if (ctx
18498 	  && (TREE_CODE (ctx) == RECORD_TYPE || TREE_CODE (ctx) == UNION_TYPE)
18499 	  && DECL_LANG_SPECIFIC (TYPE_NAME (ctx))
18500 	  && DECL_MODULE_IMPORT_P (TYPE_NAME (ctx)))
18501 	{
18502 	  /* This entity's context is from an import.  We may need to
18503 	     record this entity to make sure we emit it in the CMI.
18504 	     Template specializations are in the template hash tables,
18505 	     so we don't need to record them here as well.  */
18506 	  int use_tpl = -1;
18507 	  tree ti = node_template_info (decl, use_tpl);
18508 	  if (use_tpl <= 0)
18509 	    {
18510 	      if (ti)
18511 		{
18512 		  gcc_checking_assert (!use_tpl);
18513 		  /* Get to the TEMPLATE_DECL.  */
18514 		  decl = TI_TEMPLATE (ti);
18515 		}
18516 
18517 	      /* Record it on the class_members list.  */
18518 	      vec_safe_push (class_members, decl);
18519 	    }
18520 	}
18521       else if (DECL_IMPLICIT_TYPEDEF_P (decl)
18522 	       && CLASSTYPE_TEMPLATE_SPECIALIZATION (TREE_TYPE (decl)))
18523 	/* This is a partial or explicit specialization.  */
18524 	vec_safe_push (partial_specializations, decl);
18525     }
18526 }
18527 
18528 void
set_originating_module(tree decl,bool friend_p ATTRIBUTE_UNUSED)18529 set_originating_module (tree decl, bool friend_p ATTRIBUTE_UNUSED)
18530 {
18531   set_instantiating_module (decl);
18532 
18533   if (TREE_CODE (CP_DECL_CONTEXT (decl)) != NAMESPACE_DECL)
18534     return;
18535 
18536   gcc_checking_assert (friend_p || decl == get_originating_module_decl (decl));
18537 
18538   if (!module_exporting_p ())
18539     return;
18540 
18541   // FIXME: Check ill-formed linkage
18542   DECL_MODULE_EXPORT_P (decl) = true;
18543 }
18544 
18545 /* DECL is attached to ROOT for odr purposes.  */
18546 
18547 void
maybe_attach_decl(tree ctx,tree decl)18548 maybe_attach_decl (tree ctx, tree decl)
18549 {
18550   if (!modules_p ())
18551     return;
18552 
18553   // FIXME: For now just deal with lambdas attached to var decls.
18554   // This might be sufficient?
18555   if (TREE_CODE (ctx) != VAR_DECL)
18556     return;
18557 
18558   gcc_checking_assert (DECL_NAMESPACE_SCOPE_P (ctx));
18559 
18560  if (!attached_table)
18561     attached_table = new attached_map_t (EXPERIMENT (1, 400));
18562 
18563  auto &vec = attached_table->get_or_insert (ctx);
18564  if (!vec.length ())
18565    {
18566      retrofit_lang_decl (ctx);
18567      DECL_MODULE_ATTACHMENTS_P (ctx) = true;
18568    }
18569  vec.safe_push (decl);
18570 }
18571 
18572 /* Create the flat name string.  It is simplest to have it handy.  */
18573 
18574 void
set_flatname()18575 module_state::set_flatname ()
18576 {
18577   gcc_checking_assert (!flatname);
18578   if (parent)
18579     {
18580       auto_vec<tree,5> ids;
18581       size_t len = 0;
18582       char const *primary = NULL;
18583       size_t pfx_len = 0;
18584 
18585       for (module_state *probe = this;
18586 	   probe;
18587 	   probe = probe->parent)
18588 	if (is_partition () && !probe->is_partition ())
18589 	  {
18590 	    primary = probe->get_flatname ();
18591 	    pfx_len = strlen (primary);
18592 	    break;
18593 	  }
18594 	else
18595 	  {
18596 	    ids.safe_push (probe->name);
18597 	    len += IDENTIFIER_LENGTH (probe->name) + 1;
18598 	  }
18599 
18600       char *flat = XNEWVEC (char, pfx_len + len + is_partition ());
18601       flatname = flat;
18602 
18603       if (primary)
18604 	{
18605 	  memcpy (flat, primary, pfx_len);
18606 	  flat += pfx_len;
18607 	  *flat++ = ':';
18608 	}
18609 
18610       for (unsigned len = 0; ids.length ();)
18611 	{
18612 	  if (len)
18613 	    flat[len++] = '.';
18614 	  tree elt = ids.pop ();
18615 	  unsigned l = IDENTIFIER_LENGTH (elt);
18616 	  memcpy (flat + len, IDENTIFIER_POINTER (elt), l + 1);
18617 	  len += l;
18618 	}
18619     }
18620   else if (is_header ())
18621     flatname = TREE_STRING_POINTER (name);
18622   else
18623     flatname = IDENTIFIER_POINTER (name);
18624 }
18625 
18626 /* Read the CMI file for a module.  */
18627 
18628 bool
do_import(cpp_reader * reader,bool outermost)18629 module_state::do_import (cpp_reader *reader, bool outermost)
18630 {
18631   gcc_assert (global_namespace == current_scope () && loadedness == ML_NONE);
18632 
18633   loc = linemap_module_loc (line_table, loc, get_flatname ());
18634 
18635   if (lazy_open >= lazy_limit)
18636     freeze_an_elf ();
18637 
18638   int fd = -1;
18639   int e = ENOENT;
18640   if (filename)
18641     {
18642       const char *file = maybe_add_cmi_prefix (filename);
18643       dump () && dump ("CMI is %s", file);
18644       if (note_module_cmi_yes || inform_cmi_p)
18645 	inform (loc, "reading CMI %qs", file);
18646       fd = open (file, O_RDONLY | O_CLOEXEC | O_BINARY);
18647       e = errno;
18648     }
18649 
18650   gcc_checking_assert (!slurp);
18651   slurp = new slurping (new elf_in (fd, e));
18652 
18653   bool ok = true;
18654   if (!from ()->get_error ())
18655     {
18656       announce ("importing");
18657       loadedness = ML_CONFIG;
18658       lazy_open++;
18659       ok = read_initial (reader);
18660       slurp->lru = ++lazy_lru;
18661     }
18662 
18663   gcc_assert (slurp->current == ~0u);
18664 
18665   return check_read (outermost, ok);
18666 }
18667 
18668 /* Attempt to increase the file descriptor limit.  */
18669 
18670 static bool
try_increase_lazy(unsigned want)18671 try_increase_lazy (unsigned want)
18672 {
18673   gcc_checking_assert (lazy_open >= lazy_limit);
18674 
18675   /* If we're increasing, saturate at hard limit.  */
18676   if (want > lazy_hard_limit && lazy_limit < lazy_hard_limit)
18677     want = lazy_hard_limit;
18678 
18679 #if HAVE_SETRLIMIT
18680   if ((!lazy_limit || !param_lazy_modules)
18681       && lazy_hard_limit
18682       && want <= lazy_hard_limit)
18683     {
18684       struct rlimit rlimit;
18685       rlimit.rlim_cur = want + LAZY_HEADROOM;
18686       rlimit.rlim_max = lazy_hard_limit + LAZY_HEADROOM;
18687       if (!setrlimit (RLIMIT_NOFILE, &rlimit))
18688 	lazy_limit = want;
18689     }
18690 #endif
18691 
18692   return lazy_open < lazy_limit;
18693 }
18694 
18695 /* Pick a victim module to freeze its reader.  */
18696 
18697 void
freeze_an_elf()18698 module_state::freeze_an_elf ()
18699 {
18700   if (try_increase_lazy (lazy_open * 2))
18701     return;
18702 
18703   module_state *victim = NULL;
18704   for (unsigned ix = modules->length (); ix--;)
18705     {
18706       module_state *candidate = (*modules)[ix];
18707       if (candidate && candidate->slurp && candidate->slurp->lru
18708 	  && candidate->from ()->is_freezable ()
18709 	  && (!victim || victim->slurp->lru > candidate->slurp->lru))
18710 	victim = candidate;
18711     }
18712 
18713   if (victim)
18714     {
18715       dump () && dump ("Freezing '%s'", victim->filename);
18716       if (victim->slurp->macro_defs.size)
18717 	/* Save the macro definitions to a buffer.  */
18718 	victim->from ()->preserve (victim->slurp->macro_defs);
18719       if (victim->slurp->macro_tbl.size)
18720 	/* Save the macro definitions to a buffer.  */
18721 	victim->from ()->preserve (victim->slurp->macro_tbl);
18722       victim->from ()->freeze ();
18723       lazy_open--;
18724     }
18725   else
18726     dump () && dump ("No module available for freezing");
18727 }
18728 
18729 /* Load the lazy slot *MSLOT, INDEX'th slot of the module.  */
18730 
18731 bool
lazy_load(unsigned index,binding_slot * mslot)18732 module_state::lazy_load (unsigned index, binding_slot *mslot)
18733 {
18734   unsigned n = dump.push (this);
18735 
18736   gcc_checking_assert (function_depth);
18737 
18738   unsigned cookie = mslot->get_lazy ();
18739   unsigned snum = cookie >> 2;
18740   dump () && dump ("Loading entity %M[%u] section:%u", this, index, snum);
18741 
18742   bool ok = load_section (snum, mslot);
18743 
18744   dump.pop (n);
18745 
18746   return ok;
18747 }
18748 
18749 /* Load MOD's binding for NS::ID into *MSLOT.  *MSLOT contains the
18750    lazy cookie.  OUTER is true if this is the outermost lazy, (used
18751    for diagnostics).  */
18752 
18753 void
lazy_load_binding(unsigned mod,tree ns,tree id,binding_slot * mslot)18754 lazy_load_binding (unsigned mod, tree ns, tree id, binding_slot *mslot)
18755 {
18756   int count = errorcount + warningcount;
18757 
18758   timevar_start (TV_MODULE_IMPORT);
18759 
18760   /* Stop GC happening, even in outermost loads (because our caller
18761      could well be building up a lookup set).  */
18762   function_depth++;
18763 
18764   gcc_checking_assert (mod);
18765   module_state *module = (*modules)[mod];
18766   unsigned n = dump.push (module);
18767 
18768   unsigned snum = mslot->get_lazy ();
18769   dump () && dump ("Lazily binding %P@%N section:%u", ns, id,
18770 		   module->name, snum);
18771 
18772   bool ok = !recursive_lazy (snum);
18773   if (ok)
18774     {
18775       ok = module->load_section (snum, mslot);
18776       lazy_snum = 0;
18777       post_load_processing ();
18778     }
18779 
18780   dump.pop (n);
18781 
18782   function_depth--;
18783 
18784   timevar_stop (TV_MODULE_IMPORT);
18785 
18786   if (!ok)
18787     fatal_error (input_location,
18788 		 module->is_header ()
18789 		 ? G_("failed to load binding %<%E%s%E%>")
18790 		 : G_("failed to load binding %<%E%s%E@%s%>"),
18791 		 ns, &"::"[ns == global_namespace ? 2 : 0], id,
18792 		 module->get_flatname ());
18793 
18794   if (count != errorcount + warningcount)
18795     inform (input_location,
18796 	    module->is_header ()
18797 	    ? G_("during load of binding %<%E%s%E%>")
18798 	    : G_("during load of binding %<%E%s%E@%s%>"),
18799 	    ns, &"::"[ns == global_namespace ? 2 : 0], id,
18800 	    module->get_flatname ());
18801 }
18802 
18803 /* Load any pending entities keyed to the top-key of DECL.  */
18804 
18805 void
lazy_load_pendings(tree decl)18806 lazy_load_pendings (tree decl)
18807 {
18808   tree key_decl;
18809   pending_key key;
18810   key.ns = find_pending_key (decl, &key_decl);
18811   key.id = DECL_NAME (key_decl);
18812 
18813   auto *pending_vec = pending_table ? pending_table->get (key) : nullptr;
18814   if (!pending_vec)
18815     return;
18816 
18817   int count = errorcount + warningcount;
18818 
18819   timevar_start (TV_MODULE_IMPORT);
18820   bool ok = !recursive_lazy ();
18821   if (ok)
18822     {
18823       function_depth++; /* Prevent GC */
18824       unsigned n = dump.push (NULL);
18825       dump () && dump ("Reading %u pending entities keyed to %P",
18826 		       pending_vec->length (), key.ns, key.id);
18827       for (unsigned ix = pending_vec->length (); ix--;)
18828 	{
18829 	  unsigned index = (*pending_vec)[ix];
18830 	  binding_slot *slot = &(*entity_ary)[index];
18831 
18832 	  if (slot->is_lazy ())
18833 	    {
18834 	      module_state *import = import_entity_module (index);
18835 	      if (!import->lazy_load (index - import->entity_lwm, slot))
18836 		ok = false;
18837 	    }
18838 	  else if (dump ())
18839 	    {
18840 	      module_state *import = import_entity_module (index);
18841 	      dump () && dump ("Entity %M[%u] already loaded",
18842 			       import, index - import->entity_lwm);
18843 	    }
18844 	}
18845 
18846       pending_table->remove (key);
18847       dump.pop (n);
18848       lazy_snum = 0;
18849       post_load_processing ();
18850       function_depth--;
18851     }
18852 
18853   timevar_stop (TV_MODULE_IMPORT);
18854 
18855   if (!ok)
18856     fatal_error (input_location, "failed to load pendings for %<%E%s%E%>",
18857 		 key.ns, &"::"[key.ns == global_namespace ? 2 : 0], key.id);
18858 
18859   if (count != errorcount + warningcount)
18860     inform (input_location, "during load of pendings for %<%E%s%E%>",
18861 	    key.ns, &"::"[key.ns == global_namespace ? 2 : 0], key.id);
18862 }
18863 
18864 static void
direct_import(module_state * import,cpp_reader * reader)18865 direct_import (module_state *import, cpp_reader *reader)
18866 {
18867   timevar_start (TV_MODULE_IMPORT);
18868   unsigned n = dump.push (import);
18869 
18870   gcc_checking_assert (import->is_direct () && import->has_location ());
18871   if (import->loadedness == ML_NONE)
18872     if (!import->do_import (reader, true))
18873       gcc_unreachable ();
18874 
18875   if (import->loadedness < ML_LANGUAGE)
18876     {
18877       if (!attached_table)
18878 	attached_table = new attached_map_t (EXPERIMENT (1, 400));
18879       import->read_language (true);
18880     }
18881 
18882   (*modules)[0]->set_import (import, import->exported_p);
18883 
18884   dump.pop (n);
18885   timevar_stop (TV_MODULE_IMPORT);
18886 }
18887 
18888 /* Import module IMPORT.  */
18889 
18890 void
import_module(module_state * import,location_t from_loc,bool exporting_p,tree,cpp_reader * reader)18891 import_module (module_state *import, location_t from_loc, bool exporting_p,
18892 	       tree, cpp_reader *reader)
18893 {
18894   if (!import->check_not_purview (from_loc))
18895     return;
18896 
18897   if (!import->is_header () && current_lang_depth ())
18898     /* Only header units should appear inside language
18899        specifications.  The std doesn't specify this, but I think
18900        that's an error in resolving US 033, because language linkage
18901        is also our escape clause to getting things into the global
18902        module, so we don't want to confuse things by having to think
18903        about whether 'extern "C++" { import foo; }' puts foo's
18904        contents into the global module all of a sudden.  */
18905     warning (0, "import of named module %qs inside language-linkage block",
18906 	     import->get_flatname ());
18907 
18908   if (exporting_p || module_exporting_p ())
18909     import->exported_p = true;
18910 
18911   if (import->loadedness != ML_NONE)
18912     {
18913       from_loc = ordinary_loc_of (line_table, from_loc);
18914       linemap_module_reparent (line_table, import->loc, from_loc);
18915     }
18916   gcc_checking_assert (!import->module_p);
18917   gcc_checking_assert (import->is_direct () && import->has_location ());
18918 
18919   direct_import (import, reader);
18920 }
18921 
18922 /* Declare the name of the current module to be NAME.  EXPORTING_p is
18923    true if this TU is the exporting module unit.  */
18924 
18925 void
declare_module(module_state * module,location_t from_loc,bool exporting_p,tree,cpp_reader * reader)18926 declare_module (module_state *module, location_t from_loc, bool exporting_p,
18927 		tree, cpp_reader *reader)
18928 {
18929   gcc_assert (global_namespace == current_scope ());
18930 
18931   module_state *current = (*modules)[0];
18932   if (module_purview_p () || module->loadedness > ML_CONFIG)
18933     {
18934       error_at (from_loc, module_purview_p ()
18935 		? G_("module already declared")
18936 		: G_("module already imported"));
18937       if (module_purview_p ())
18938 	module = current;
18939       inform (module->loc, module_purview_p ()
18940 	      ? G_("module %qs declared here")
18941 	      : G_("module %qs imported here"),
18942 	      module->get_flatname ());
18943       return;
18944     }
18945 
18946   gcc_checking_assert (module->module_p);
18947   gcc_checking_assert (module->is_direct () && module->has_location ());
18948 
18949   /* Yer a module, 'arry.  */
18950   module_kind &= ~MK_GLOBAL;
18951   module_kind |= MK_MODULE;
18952 
18953   if (module->is_partition () || exporting_p)
18954     {
18955       gcc_checking_assert (module->get_flatname ());
18956 
18957       if (module->is_partition ())
18958 	module_kind |= MK_PARTITION;
18959 
18960       if (exporting_p)
18961 	{
18962 	  module->interface_p = true;
18963 	  module_kind |= MK_INTERFACE;
18964 	}
18965 
18966       if (module->is_header ())
18967 	module_kind |= MK_GLOBAL | MK_EXPORTING;
18968 
18969       /* Copy the importing information we may have already done.  We
18970 	 do not need to separate out the imports that only happen in
18971 	 the GMF, inspite of what the literal wording of the std
18972 	 might imply.  See p2191, the core list had a discussion
18973 	 where the module implementors agreed that the GMF of a named
18974 	 module is invisible to importers.  */
18975       module->imports = current->imports;
18976 
18977       module->mod = 0;
18978       (*modules)[0] = module;
18979     }
18980   else
18981     {
18982       module->interface_p = true;
18983       current->parent = module; /* So mangler knows module identity. */
18984       direct_import (module, reader);
18985     }
18986 }
18987 
18988 /* +1, we're the primary or a partition.  Therefore emitting a
18989    globally-callable idemportent initializer function.
18990    -1, we have direct imports.  Therefore emitting calls to their
18991    initializers.  */
18992 
18993 int
module_initializer_kind()18994 module_initializer_kind ()
18995 {
18996   int result = 0;
18997 
18998   if (module_has_cmi_p () && !header_module_p ())
18999     result = +1;
19000   else if (num_init_calls_needed)
19001     result = -1;
19002 
19003   return result;
19004 }
19005 
19006 /* Emit calls to each direct import's global initializer.  Including
19007    direct imports of directly imported header units.  The initializers
19008    of (static) entities in header units will be called by their
19009    importing modules (for the instance contained within that), or by
19010    the current TU (for the instances we've brought in).  Of course
19011    such header unit behaviour is evil, but iostream went through that
19012    door some time ago.  */
19013 
19014 void
module_add_import_initializers()19015 module_add_import_initializers ()
19016 {
19017   unsigned calls = 0;
19018   if (modules)
19019     {
19020       tree fntype = build_function_type (void_type_node, void_list_node);
19021       releasing_vec args;  // There are no args
19022 
19023       for (unsigned ix = modules->length (); --ix;)
19024 	{
19025 	  module_state *import = (*modules)[ix];
19026 	  if (import->call_init_p)
19027 	    {
19028 	      tree name = mangle_module_global_init (ix);
19029 	      tree fndecl = build_lang_decl (FUNCTION_DECL, name, fntype);
19030 
19031 	      DECL_CONTEXT (fndecl) = FROB_CONTEXT (global_namespace);
19032 	      SET_DECL_ASSEMBLER_NAME (fndecl, name);
19033 	      TREE_PUBLIC (fndecl) = true;
19034 	      determine_visibility (fndecl);
19035 
19036 	      tree call = cp_build_function_call_vec (fndecl, &args,
19037 						      tf_warning_or_error);
19038 	      finish_expr_stmt (call);
19039 
19040 	      calls++;
19041 	    }
19042 	}
19043     }
19044 
19045   gcc_checking_assert (calls == num_init_calls_needed);
19046 }
19047 
19048 /* NAME & LEN are a preprocessed header name, possibly including the
19049    surrounding "" or <> characters.  Return the raw string name of the
19050    module to which it refers.  This will be an absolute path, or begin
19051    with ./, so it is immediately distinguishable from a (non-header
19052    unit) module name.  If READER is non-null, ask the preprocessor to
19053    locate the header to which it refers using the appropriate include
19054    path.  Note that we do never do \ processing of the string, as that
19055    matches the preprocessor's behaviour.  */
19056 
19057 static const char *
canonicalize_header_name(cpp_reader * reader,location_t loc,bool unquoted,const char * str,size_t & len_r)19058 canonicalize_header_name (cpp_reader *reader, location_t loc, bool unquoted,
19059 			  const char *str, size_t &len_r)
19060 {
19061   size_t len = len_r;
19062   static char *buf = 0;
19063   static size_t alloc = 0;
19064 
19065   if (!unquoted)
19066     {
19067       gcc_checking_assert (len >= 2
19068 			   && ((reader && str[0] == '<' && str[len-1] == '>')
19069 			       || (str[0] == '"' && str[len-1] == '"')));
19070       str += 1;
19071       len -= 2;
19072     }
19073 
19074   if (reader)
19075     {
19076       gcc_assert (!unquoted);
19077 
19078       if (len >= alloc)
19079 	{
19080 	  alloc = len + 1;
19081 	  buf = XRESIZEVEC (char, buf, alloc);
19082 	}
19083       memcpy (buf, str, len);
19084       buf[len] = 0;
19085 
19086       if (const char *hdr
19087 	  = cpp_probe_header_unit (reader, buf, str[-1] == '<', loc))
19088 	{
19089 	  len = strlen (hdr);
19090 	  str = hdr;
19091 	}
19092       else
19093 	str = buf;
19094     }
19095 
19096   if (!(str[0] == '.' ? IS_DIR_SEPARATOR (str[1]) : IS_ABSOLUTE_PATH (str)))
19097     {
19098       /* Prepend './'  */
19099       if (len + 3 > alloc)
19100 	{
19101 	  alloc = len + 3;
19102 	  buf = XRESIZEVEC (char, buf, alloc);
19103 	}
19104 
19105       buf[0] = '.';
19106       buf[1] = DIR_SEPARATOR;
19107       memmove (buf + 2, str, len);
19108       len += 2;
19109       buf[len] = 0;
19110       str = buf;
19111     }
19112 
19113   len_r = len;
19114   return str;
19115 }
19116 
19117 /* Set the CMI name from a cody packet.  Issue an error if
19118    ill-formed.  */
19119 
set_filename(const Cody::Packet & packet)19120 void module_state::set_filename (const Cody::Packet &packet)
19121 {
19122   gcc_checking_assert (!filename);
19123   if (packet.GetCode () == Cody::Client::PC_PATHNAME)
19124     filename = xstrdup (packet.GetString ().c_str ());
19125   else
19126     {
19127       gcc_checking_assert (packet.GetCode () == Cody::Client::PC_ERROR);
19128       error_at (loc, "unknown Compiled Module Interface: %s",
19129 		packet.GetString ().c_str ());
19130     }
19131 }
19132 
19133 /* Figure out whether to treat HEADER as an include or an import.  */
19134 
19135 static char *
maybe_translate_include(cpp_reader * reader,line_maps * lmaps,location_t loc,const char * path)19136 maybe_translate_include (cpp_reader *reader, line_maps *lmaps, location_t loc,
19137 			 const char *path)
19138 {
19139   if (!modules_p ())
19140     {
19141       /* Turn off.  */
19142       cpp_get_callbacks (reader)->translate_include = NULL;
19143       return nullptr;
19144     }
19145 
19146   if (!spans.init_p ())
19147     /* Before the main file, don't divert.  */
19148     return nullptr;
19149 
19150   dump.push (NULL);
19151 
19152   dump () && dump ("Checking include translation '%s'", path);
19153   auto *mapper = get_mapper (cpp_main_loc (reader));
19154 
19155   size_t len = strlen (path);
19156   path = canonicalize_header_name (NULL, loc, true, path, len);
19157   auto packet = mapper->IncludeTranslate (path, Cody::Flags::None, len);
19158   int xlate = false;
19159   if (packet.GetCode () == Cody::Client::PC_BOOL)
19160     xlate = -int (packet.GetInteger ());
19161   else if (packet.GetCode () == Cody::Client::PC_PATHNAME)
19162     {
19163       /* Record the CMI name for when we do the import.  */
19164       module_state *import = get_module (build_string (len, path));
19165       import->set_filename (packet);
19166       xlate = +1;
19167     }
19168   else
19169     {
19170       gcc_checking_assert (packet.GetCode () == Cody::Client::PC_ERROR);
19171       error_at (loc, "cannot determine %<#include%> translation of %s: %s",
19172 		path, packet.GetString ().c_str ());
19173     }
19174 
19175   bool note = false;
19176   if (note_include_translate_yes && xlate > 1)
19177     note = true;
19178   else if (note_include_translate_no && xlate == 0)
19179     note = true;
19180   else if (note_includes)
19181     /* We do not expect the note_includes vector to be large, so O(N)
19182        iteration.  */
19183     for (unsigned ix = note_includes->length (); !note && ix--;)
19184       if (!strcmp ((*note_includes)[ix], path))
19185 	note = true;
19186 
19187   if (note)
19188     inform (loc, xlate
19189 	    ? G_("include %qs translated to import")
19190 	    : G_("include %qs processed textually") , path);
19191 
19192   dump () && dump (xlate ? "Translating include to import"
19193 		   : "Keeping include as include");
19194   dump.pop (0);
19195 
19196   if (!(xlate > 0))
19197     return nullptr;
19198 
19199   /* Create the translation text.  */
19200   loc = ordinary_loc_of (lmaps, loc);
19201   const line_map_ordinary *map
19202     = linemap_check_ordinary (linemap_lookup (lmaps, loc));
19203   unsigned col = SOURCE_COLUMN (map, loc);
19204   col -= (col != 0); /* Columns are 1-based.  */
19205 
19206   unsigned alloc = len + col + 60;
19207   char *res = XNEWVEC (char, alloc);
19208 
19209   strcpy (res, "__import");
19210   unsigned actual = 8;
19211   if (col > actual)
19212     {
19213       /* Pad out so the filename appears at the same position.  */
19214       memset (res + actual, ' ', col - actual);
19215       actual = col;
19216     }
19217   /* No need to encode characters, that's not how header names are
19218      handled.  */
19219   actual += snprintf (res + actual, alloc - actual,
19220 		      "\"%s\" [[__translated]];\n", path);
19221   gcc_checking_assert (actual < alloc);
19222 
19223   /* cpplib will delete the buffer.  */
19224   return res;
19225 }
19226 
19227 static void
begin_header_unit(cpp_reader * reader)19228 begin_header_unit (cpp_reader *reader)
19229 {
19230   /* Set the module header name from the main_input_filename.  */
19231   const char *main = main_input_filename;
19232   size_t len = strlen (main);
19233   main = canonicalize_header_name (NULL, 0, true, main, len);
19234   module_state *module = get_module (build_string (len, main));
19235 
19236   preprocess_module (module, cpp_main_loc (reader), false, false, true, reader);
19237 }
19238 
19239 /* We've just properly entered the main source file.  I.e. after the
19240    command line, builtins and forced headers.  Record the line map and
19241    location of this map.  Note we may be called more than once.  The
19242    first call sticks.  */
19243 
19244 void
module_begin_main_file(cpp_reader * reader,line_maps * lmaps,const line_map_ordinary * map)19245 module_begin_main_file (cpp_reader *reader, line_maps *lmaps,
19246 		       const line_map_ordinary *map)
19247 {
19248   gcc_checking_assert (lmaps == line_table);
19249   if (modules_p () && !spans.init_p ())
19250     {
19251       unsigned n = dump.push (NULL);
19252       spans.init (lmaps, map);
19253       dump.pop (n);
19254       if (flag_header_unit && !cpp_get_options (reader)->preprocessed)
19255 	{
19256 	  /* Tell the preprocessor this is an include file.  */
19257 	  cpp_retrofit_as_include (reader);
19258 	  begin_header_unit (reader);
19259 	}
19260     }
19261 }
19262 
19263 /* Process the pending_import queue, making sure we know the
19264    filenames.   */
19265 
19266 static void
name_pending_imports(cpp_reader * reader)19267 name_pending_imports (cpp_reader *reader)
19268 {
19269   auto *mapper = get_mapper (cpp_main_loc (reader));
19270 
19271   if (!vec_safe_length (pending_imports))
19272     /* Not doing anything.  */
19273     return;
19274 
19275   timevar_start (TV_MODULE_MAPPER);
19276 
19277   auto n = dump.push (NULL);
19278   dump () && dump ("Resolving direct import names");
19279   bool want_deps = (bool (mapper->get_flags () & Cody::Flags::NameOnly)
19280 		    || cpp_get_deps (reader));
19281   bool any = false;
19282 
19283   for (unsigned ix = 0; ix != pending_imports->length (); ix++)
19284     {
19285       module_state *module = (*pending_imports)[ix];
19286       gcc_checking_assert (module->is_direct ());
19287       if (!module->filename && !module->visited_p)
19288 	{
19289 	  bool export_p = (module->module_p
19290 			   && (module->is_partition () || module->exported_p));
19291 
19292 	  Cody::Flags flags = Cody::Flags::None;
19293 	  if (flag_preprocess_only
19294 	      && !(module->is_header () && !export_p))
19295 	    {
19296 	      if (!want_deps)
19297 		continue;
19298 	      flags = Cody::Flags::NameOnly;
19299 	    }
19300 
19301 	  if (!any)
19302 	    {
19303 	      any = true;
19304 	      mapper->Cork ();
19305 	    }
19306 	  if (export_p)
19307 	    mapper->ModuleExport (module->get_flatname (), flags);
19308 	  else
19309 	    mapper->ModuleImport (module->get_flatname (), flags);
19310 	  module->visited_p = true;
19311 	}
19312     }
19313 
19314   if (any)
19315     {
19316       auto response = mapper->Uncork ();
19317       auto r_iter = response.begin ();
19318       for (unsigned ix = 0; ix != pending_imports->length (); ix++)
19319 	{
19320 	  module_state *module = (*pending_imports)[ix];
19321 	  if (module->visited_p)
19322 	    {
19323 	      module->visited_p = false;
19324 	      gcc_checking_assert (!module->filename);
19325 
19326 	      module->set_filename (*r_iter);
19327 	      ++r_iter;
19328 	    }
19329 	}
19330     }
19331 
19332   dump.pop (n);
19333 
19334   timevar_stop (TV_MODULE_MAPPER);
19335 }
19336 
19337 /* We've just lexed a module-specific control line for MODULE.  Mark
19338    the module as a direct import, and possibly load up its macro
19339    state.  Returns the primary module, if this is a module
19340    declaration.  */
19341 /* Perhaps we should offer a preprocessing mode where we read the
19342    directives from the header unit, rather than require the header's
19343    CMI.  */
19344 
19345 module_state *
preprocess_module(module_state * module,location_t from_loc,bool in_purview,bool is_import,bool is_export,cpp_reader * reader)19346 preprocess_module (module_state *module, location_t from_loc,
19347 		   bool in_purview, bool is_import, bool is_export,
19348 		   cpp_reader *reader)
19349 {
19350   if (!is_import)
19351     {
19352       if (module->loc)
19353 	/* It's already been mentioned, so ignore its module-ness.  */
19354 	is_import = true;
19355       else
19356 	{
19357 	  /* Record it is the module.  */
19358 	  module->module_p = true;
19359 	  if (is_export)
19360 	    {
19361 	      module->exported_p = true;
19362 	      module->interface_p = true;
19363 	    }
19364 	}
19365     }
19366 
19367   if (module->directness < MD_DIRECT + in_purview)
19368     {
19369       /* Mark as a direct import.  */
19370       module->directness = module_directness (MD_DIRECT + in_purview);
19371 
19372       /* Set the location to be most informative for users.  */
19373       from_loc = ordinary_loc_of (line_table, from_loc);
19374       if (module->loadedness != ML_NONE)
19375 	linemap_module_reparent (line_table, module->loc, from_loc);
19376       else
19377 	{
19378 	  module->loc = from_loc;
19379 	  if (!module->flatname)
19380 	    module->set_flatname ();
19381 	}
19382     }
19383 
19384   auto desired = ML_CONFIG;
19385   if (is_import
19386       && module->is_header ()
19387       && (!cpp_get_options (reader)->preprocessed
19388 	  || cpp_get_options (reader)->directives_only))
19389     /* We need preprocessor state now.  */
19390     desired = ML_PREPROCESSOR;
19391 
19392   if (!is_import || module->loadedness < desired)
19393     {
19394       vec_safe_push (pending_imports, module);
19395 
19396       if (desired == ML_PREPROCESSOR)
19397 	{
19398 	  unsigned n = dump.push (NULL);
19399 
19400 	  dump () && dump ("Reading %M preprocessor state", module);
19401 	  name_pending_imports (reader);
19402 
19403 	  /* Preserve the state of the line-map.  */
19404 	  unsigned pre_hwm = LINEMAPS_ORDINARY_USED (line_table);
19405 
19406 	  /* We only need to close the span, if we're going to emit a
19407 	     CMI.  But that's a little tricky -- our token scanner
19408 	     needs to be smarter -- and this isn't much state.
19409 	     Remember, we've not parsed anything at this point, so
19410 	     our module state flags are inadequate.  */
19411 	  spans.maybe_init ();
19412 	  spans.close ();
19413 
19414 	  timevar_start (TV_MODULE_IMPORT);
19415 
19416 	  /* Load the config of each pending import -- we must assign
19417 	     module numbers monotonically.  */
19418 	  for (unsigned ix = 0; ix != pending_imports->length (); ix++)
19419 	    {
19420 	      auto *import = (*pending_imports)[ix];
19421 	      if (!(import->module_p
19422 		    && (import->is_partition () || import->exported_p))
19423 		  && import->loadedness == ML_NONE
19424 		  && (import->is_header () || !flag_preprocess_only))
19425 		{
19426 		  unsigned n = dump.push (import);
19427 		  import->do_import (reader, true);
19428 		  dump.pop (n);
19429 		}
19430 	    }
19431 	  vec_free (pending_imports);
19432 
19433 	  /* Restore the line-map state.  */
19434 	  spans.open (linemap_module_restore (line_table, pre_hwm));
19435 
19436 	  /* Now read the preprocessor state of this particular
19437 	     import.  */
19438 	  if (module->loadedness == ML_CONFIG
19439 	      && module->read_preprocessor (true))
19440 	    module->import_macros ();
19441 
19442 	  timevar_stop (TV_MODULE_IMPORT);
19443 
19444 	  dump.pop (n);
19445 	}
19446     }
19447 
19448   return is_import ? NULL : get_primary (module);
19449 }
19450 
19451 /* We've completed phase-4 translation.  Emit any dependency
19452    information for the not-yet-loaded direct imports, and fill in
19453    their file names.  We'll have already loaded up the direct header
19454    unit wavefront.  */
19455 
19456 void
preprocessed_module(cpp_reader * reader)19457 preprocessed_module (cpp_reader *reader)
19458 {
19459   unsigned n = dump.push (NULL);
19460 
19461   dump () && dump ("Completed phase-4 (tokenization) processing");
19462 
19463   name_pending_imports (reader);
19464   vec_free (pending_imports);
19465 
19466   spans.maybe_init ();
19467   spans.close ();
19468 
19469   using iterator = hash_table<module_state_hash>::iterator;
19470   if (mkdeps *deps = cpp_get_deps (reader))
19471     {
19472       /* Walk the module hash, informing the dependency machinery.  */
19473       iterator end = modules_hash->end ();
19474       for (iterator iter = modules_hash->begin (); iter != end; ++iter)
19475 	{
19476 	  module_state *module = *iter;
19477 
19478 	  if (module->is_direct ())
19479 	    {
19480 	      if (module->is_module ()
19481 		  && (module->is_interface () || module->is_partition ()))
19482 		deps_add_module_target (deps, module->get_flatname (),
19483 					maybe_add_cmi_prefix (module->filename),
19484 					module->is_header());
19485 	      else
19486 		deps_add_module_dep (deps, module->get_flatname ());
19487 	    }
19488 	}
19489     }
19490 
19491   if (flag_header_unit && !flag_preprocess_only)
19492     {
19493       /* Find the main module -- remember, it's not yet in the module
19494 	 array.  */
19495       iterator end = modules_hash->end ();
19496       for (iterator iter = modules_hash->begin (); iter != end; ++iter)
19497 	{
19498 	  module_state *module = *iter;
19499 	  if (module->is_module ())
19500 	    {
19501 	      declare_module (module, cpp_main_loc (reader), true, NULL, reader);
19502 	      break;
19503 	    }
19504 	}
19505     }
19506 
19507   dump.pop (n);
19508 }
19509 
19510 /* VAL is a global tree, add it to the global vec if it is
19511    interesting.  Add some of its targets, if they too are
19512    interesting.  We do not add identifiers, as they can be re-found
19513    via the identifier hash table.  There is a cost to the number of
19514    global trees.  */
19515 
19516 static int
maybe_add_global(tree val,unsigned & crc)19517 maybe_add_global (tree val, unsigned &crc)
19518 {
19519   int v = 0;
19520 
19521   if (val && !(identifier_p (val) || TREE_VISITED (val)))
19522     {
19523       TREE_VISITED (val) = true;
19524       crc = crc32_unsigned (crc, fixed_trees->length ());
19525       vec_safe_push (fixed_trees, val);
19526       v++;
19527 
19528       if (CODE_CONTAINS_STRUCT (TREE_CODE (val), TS_TYPED))
19529 	v += maybe_add_global (TREE_TYPE (val), crc);
19530       if (CODE_CONTAINS_STRUCT (TREE_CODE (val), TS_TYPE_COMMON))
19531 	v += maybe_add_global (TYPE_NAME (val), crc);
19532     }
19533 
19534   return v;
19535 }
19536 
19537 /* Initialize module state.  Create the hash table, determine the
19538    global trees.  Create the module for current TU.  */
19539 
19540 void
init_modules(cpp_reader * reader)19541 init_modules (cpp_reader *reader)
19542 {
19543   /* PCH should not be reachable because of lang-specs, but the
19544      user could have overriden that.  */
19545   if (pch_file)
19546     fatal_error (input_location,
19547 		 "C++ modules are incompatible with precompiled headers");
19548 
19549   if (cpp_get_options (reader)->traditional)
19550     fatal_error (input_location,
19551 		 "C++ modules are incompatible with traditional preprocessing");
19552 
19553   if (flag_preprocess_only)
19554     {
19555       cpp_options *cpp_opts = cpp_get_options (reader);
19556       if (flag_no_output
19557 	  || (cpp_opts->deps.style != DEPS_NONE
19558 	      && !cpp_opts->deps.need_preprocessor_output))
19559 	{
19560 	  warning (0, flag_dump_macros == 'M'
19561 		   ? G_("macro debug output may be incomplete with modules")
19562 		   : G_("module dependencies require preprocessing"));
19563 	  if (cpp_opts->deps.style != DEPS_NONE)
19564 	    inform (input_location, "you should use the %<-%s%> option",
19565 		    cpp_opts->deps.style == DEPS_SYSTEM ? "MD" : "MMD");
19566 	}
19567     }
19568 
19569   /* :: is always exported.  */
19570   DECL_MODULE_EXPORT_P (global_namespace) = true;
19571 
19572   modules_hash = hash_table<module_state_hash>::create_ggc (31);
19573   vec_safe_reserve (modules, 20);
19574 
19575   /* Create module for current TU.  */
19576   module_state *current
19577     = new (ggc_alloc<module_state> ()) module_state (NULL_TREE, NULL, false);
19578   current->mod = 0;
19579   bitmap_set_bit (current->imports, 0);
19580   modules->quick_push (current);
19581 
19582   gcc_checking_assert (!fixed_trees);
19583 
19584   headers = BITMAP_GGC_ALLOC ();
19585 
19586   if (note_includes)
19587     /* Canonicalize header names.  */
19588     for (unsigned ix = 0; ix != note_includes->length (); ix++)
19589       {
19590 	const char *hdr = (*note_includes)[ix];
19591 	size_t len = strlen (hdr);
19592 
19593 	bool system = hdr[0] == '<';
19594 	bool user = hdr[0] == '"';
19595 	bool delimed = system || user;
19596 
19597 	if (len <= (delimed ? 2 : 0)
19598 	    || (delimed && hdr[len-1] != (system ? '>' : '"')))
19599 	  error ("invalid header name %qs", hdr);
19600 
19601 	hdr = canonicalize_header_name (delimed ? reader : NULL,
19602 					0, !delimed, hdr, len);
19603 	char *path = XNEWVEC (char, len + 1);
19604 	memcpy (path, hdr, len);
19605 	path[len] = 0;
19606 
19607 	(*note_includes)[ix] = path;
19608       }
19609 
19610   if (note_cmis)
19611     /* Canonicalize & mark module names.  */
19612     for (unsigned ix = 0; ix != note_cmis->length (); ix++)
19613       {
19614 	const char *name = (*note_cmis)[ix];
19615 	size_t len = strlen (name);
19616 
19617 	bool is_system = name[0] == '<';
19618 	bool is_user = name[0] == '"';
19619 	bool is_pathname = false;
19620 	if (!(is_system || is_user))
19621 	  for (unsigned ix = len; !is_pathname && ix--;)
19622 	    is_pathname = IS_DIR_SEPARATOR (name[ix]);
19623 	if (is_system || is_user || is_pathname)
19624 	  {
19625 	    if (len <= (is_pathname ? 0 : 2)
19626 		|| (!is_pathname && name[len-1] != (is_system ? '>' : '"')))
19627 	      {
19628 		error ("invalid header name %qs", name);
19629 		continue;
19630 	      }
19631 	    else
19632 	      name = canonicalize_header_name (is_pathname ? nullptr : reader,
19633 					       0, is_pathname, name, len);
19634 	  }
19635 	if (auto module = get_module (name))
19636 	  module->inform_cmi_p = 1;
19637 	else
19638 	  error ("invalid module name %qs", name);
19639       }
19640 
19641   dump.push (NULL);
19642 
19643   /* Determine lazy handle bound.  */
19644   {
19645     unsigned limit = 1000;
19646 #if HAVE_GETRLIMIT
19647     struct rlimit rlimit;
19648     if (!getrlimit (RLIMIT_NOFILE, &rlimit))
19649       {
19650 	lazy_hard_limit = (rlimit.rlim_max < 1000000
19651 			   ? unsigned (rlimit.rlim_max) : 1000000);
19652 	lazy_hard_limit = (lazy_hard_limit > LAZY_HEADROOM
19653 			   ? lazy_hard_limit - LAZY_HEADROOM : 0);
19654 	if (rlimit.rlim_cur < limit)
19655 	  limit = unsigned (rlimit.rlim_cur);
19656       }
19657 #endif
19658     limit = limit > LAZY_HEADROOM ? limit - LAZY_HEADROOM : 1;
19659 
19660     if (unsigned parm = param_lazy_modules)
19661       {
19662 	if (parm <= limit || !lazy_hard_limit || !try_increase_lazy (parm))
19663 	  lazy_limit = parm;
19664       }
19665     else
19666       lazy_limit = limit;
19667   }
19668 
19669   if (dump ())
19670     {
19671       verstr_t ver;
19672       version2string (MODULE_VERSION, ver);
19673       dump ("Source: %s", main_input_filename);
19674       dump ("Compiler: %s", version_string);
19675       dump ("Modules: %s", ver);
19676       dump ("Checking: %s",
19677 #if CHECKING_P
19678 	    "checking"
19679 #elif ENABLE_ASSERT_CHECKING
19680 	    "asserting"
19681 #else
19682 	    "release"
19683 #endif
19684 	    );
19685       dump ("Compiled by: "
19686 #ifdef __GNUC__
19687 	    "GCC %d.%d, %s", __GNUC__, __GNUC_MINOR__,
19688 #ifdef __OPTIMIZE__
19689 	    "optimizing"
19690 #else
19691 	    "not optimizing"
19692 #endif
19693 #else
19694 	    "not GCC"
19695 #endif
19696 	    );
19697       dump ("Reading: %s", MAPPED_READING ? "mmap" : "fileio");
19698       dump ("Writing: %s", MAPPED_WRITING ? "mmap" : "fileio");
19699       dump ("Lazy limit: %u", lazy_limit);
19700       dump ("Lazy hard limit: %u", lazy_hard_limit);
19701       dump ("");
19702     }
19703 
19704   /* Construct the global tree array.  This is an array of unique
19705      global trees (& types).  Do this now, rather than lazily, as
19706      some global trees are lazily created and we don't want that to
19707      mess with our syndrome of fixed trees.  */
19708   unsigned crc = 0;
19709   vec_alloc (fixed_trees, 200);
19710 
19711   dump () && dump ("+Creating globals");
19712   /* Insert the TRANSLATION_UNIT_DECL.  */
19713   TREE_VISITED (DECL_CONTEXT (global_namespace)) = true;
19714   fixed_trees->quick_push (DECL_CONTEXT (global_namespace));
19715   for (unsigned jx = 0; global_tree_arys[jx].first; jx++)
19716     {
19717       const tree *ptr = global_tree_arys[jx].first;
19718       unsigned limit = global_tree_arys[jx].second;
19719 
19720       for (unsigned ix = 0; ix != limit; ix++, ptr++)
19721 	{
19722 	  !(ix & 31) && dump ("") && dump ("+\t%u:%u:", jx, ix);
19723 	  unsigned v = maybe_add_global (*ptr, crc);
19724 	  dump () && dump ("+%u", v);
19725 	}
19726     }
19727   global_crc = crc32_unsigned (crc, fixed_trees->length ());
19728   dump ("") && dump ("Created %u unique globals, crc=%x",
19729 		     fixed_trees->length (), global_crc);
19730   for (unsigned ix = fixed_trees->length (); ix--;)
19731     TREE_VISITED ((*fixed_trees)[ix]) = false;
19732 
19733   dump.pop (0);
19734 
19735   if (!flag_module_lazy)
19736     /* Get the mapper now, if we're not being lazy.  */
19737     get_mapper (cpp_main_loc (reader));
19738 
19739   if (!flag_preprocess_only)
19740     {
19741       pending_table = new pending_map_t (EXPERIMENT (1, 400));
19742       entity_map = new entity_map_t (EXPERIMENT (1, 400));
19743       vec_safe_reserve (entity_ary, EXPERIMENT (1, 400));
19744     }
19745 
19746 #if CHECKING_P
19747   note_defs = note_defs_table_t::create_ggc (1000);
19748 #endif
19749 
19750   if (flag_header_unit && cpp_get_options (reader)->preprocessed)
19751     begin_header_unit (reader);
19752 
19753   /* Collect here to make sure things are tagged correctly (when
19754      aggressively GC'd).  */
19755   ggc_collect ();
19756 }
19757 
19758 /* If NODE is a deferred macro, load it.  */
19759 
19760 static int
load_macros(cpp_reader * reader,cpp_hashnode * node,void *)19761 load_macros (cpp_reader *reader, cpp_hashnode *node, void *)
19762 {
19763   location_t main_loc
19764     = MAP_START_LOCATION (LINEMAPS_ORDINARY_MAP_AT (line_table, 0));
19765 
19766   if (cpp_user_macro_p (node)
19767       && !node->value.macro)
19768     {
19769       cpp_macro *macro = cpp_get_deferred_macro (reader, node, main_loc);
19770       dump () && dump ("Loaded macro #%s %I",
19771 		       macro ? "define" : "undef", identifier (node));
19772     }
19773 
19774   return 1;
19775 }
19776 
19777 /* At the end of tokenizing, we no longer need the macro tables of
19778    imports.  But the user might have requested some checking.  */
19779 
19780 void
maybe_check_all_macros(cpp_reader * reader)19781 maybe_check_all_macros (cpp_reader *reader)
19782 {
19783   if (!warn_imported_macros)
19784     return;
19785 
19786   /* Force loading of any remaining deferred macros.  This will
19787      produce diagnostics if they are ill-formed.  */
19788   unsigned n = dump.push (NULL);
19789   cpp_forall_identifiers (reader, load_macros, NULL);
19790   dump.pop (n);
19791 }
19792 
19793 /* Write the CMI, if we're a module interface.  */
19794 
19795 void
finish_module_processing(cpp_reader * reader)19796 finish_module_processing (cpp_reader *reader)
19797 {
19798   if (header_module_p ())
19799     module_kind &= ~MK_EXPORTING;
19800 
19801   if (!modules || !(*modules)[0]->name)
19802     {
19803       if (flag_module_only)
19804 	warning (0, "%<-fmodule-only%> used for non-interface");
19805     }
19806   else if (!flag_syntax_only)
19807     {
19808       int fd = -1;
19809       int e = ENOENT;
19810 
19811       timevar_start (TV_MODULE_EXPORT);
19812 
19813       /* Force a valid but empty line map at the end.  This simplifies
19814 	 the line table preparation and writing logic.  */
19815       linemap_add (line_table, LC_ENTER, false, "", 0);
19816 
19817       /* We write to a tmpname, and then atomically rename.  */
19818       const char *path = NULL;
19819       char *tmp_name = NULL;
19820       module_state *state = (*modules)[0];
19821 
19822       unsigned n = dump.push (state);
19823       state->announce ("creating");
19824       if (state->filename)
19825 	{
19826 	  size_t len = 0;
19827 	  path = maybe_add_cmi_prefix (state->filename, &len);
19828 	  tmp_name = XNEWVEC (char, len + 3);
19829 	  memcpy (tmp_name, path, len);
19830 	  strcpy (&tmp_name[len], "~");
19831 
19832 	  if (!errorcount)
19833 	    for (unsigned again = 2; ; again--)
19834 	      {
19835 		fd = open (tmp_name,
19836 			   O_RDWR | O_CREAT | O_TRUNC | O_CLOEXEC | O_BINARY,
19837 			   S_IRUSR|S_IWUSR|S_IRGRP|S_IWGRP|S_IROTH|S_IWOTH);
19838 		e = errno;
19839 		if (fd >= 0 || !again || e != ENOENT)
19840 		  break;
19841 		create_dirs (tmp_name);
19842 	      }
19843 	  if (note_module_cmi_yes || state->inform_cmi_p)
19844 	    inform (state->loc, "writing CMI %qs", path);
19845 	  dump () && dump ("CMI is %s", path);
19846 	}
19847 
19848       if (errorcount)
19849 	warning_at (state->loc, 0, "not writing module %qs due to errors",
19850 		    state->get_flatname ());
19851       else
19852 	{
19853 	  elf_out to (fd, e);
19854 	  if (to.begin ())
19855 	    {
19856 	      auto loc = input_location;
19857 	      /* So crashes finger-point the module decl.  */
19858 	      input_location = state->loc;
19859 	      state->write (&to, reader);
19860 	      input_location = loc;
19861 	    }
19862 	  if (to.end ())
19863 	    {
19864 	      /* Some OS's do not replace NEWNAME if it already
19865 		 exists.  This'll have a race condition in erroneous
19866 		 concurrent builds.  */
19867 	      unlink (path);
19868 	      if (rename (tmp_name, path))
19869 		{
19870 		  dump () && dump ("Rename ('%s','%s') errno=%u", errno);
19871 		  to.set_error (errno);
19872 		}
19873 	    }
19874 
19875 	  if (to.get_error ())
19876 	    {
19877 	      error_at (state->loc, "failed to write compiled module: %s",
19878 			to.get_error (state->filename));
19879 	      state->note_cmi_name ();
19880 	    }
19881 	}
19882 
19883       if (!errorcount)
19884 	{
19885 	  auto *mapper = get_mapper (cpp_main_loc (reader));
19886 
19887 	  mapper->ModuleCompiled (state->get_flatname ());
19888 	}
19889       else if (path)
19890 	{
19891 	  /* We failed, attempt to erase all evidence we even tried.  */
19892 	  unlink (tmp_name);
19893 	  unlink (path);
19894 	  XDELETEVEC (tmp_name);
19895 	}
19896 
19897       dump.pop (n);
19898       timevar_stop (TV_MODULE_EXPORT);
19899 
19900       ggc_collect ();
19901     }
19902 
19903   if (modules)
19904     {
19905       unsigned n = dump.push (NULL);
19906       dump () && dump ("Imported %u modules", modules->length () - 1);
19907       dump () && dump ("Containing %u clusters", available_clusters);
19908       dump () && dump ("Loaded %u clusters (%u%%)", loaded_clusters,
19909 		       (loaded_clusters * 100 + available_clusters / 2) /
19910 		       (available_clusters + !available_clusters));
19911       dump.pop (n);
19912     }
19913 
19914   if (modules && !header_module_p ())
19915     {
19916       /* Determine call_init_p.  We need the same bitmap allocation
19917          scheme as for the imports member.  */
19918       function_depth++; /* Disable GC.  */
19919       bitmap indirect_imports (BITMAP_GGC_ALLOC ());
19920 
19921       /* Because indirect imports are before their direct import, and
19922 	 we're scanning the array backwards, we only need one pass!  */
19923       for (unsigned ix = modules->length (); --ix;)
19924 	{
19925 	  module_state *import = (*modules)[ix];
19926 
19927 	  if (!import->is_header ()
19928 	      && !bitmap_bit_p (indirect_imports, ix))
19929 	    {
19930 	      /* Everything this imports is therefore indirectly
19931 		 imported.  */
19932 	      bitmap_ior_into (indirect_imports, import->imports);
19933 	      /* We don't have to worry about the self-import bit,
19934 		 because of the single pass.  */
19935 
19936 	      import->call_init_p = true;
19937 	      num_init_calls_needed++;
19938 	    }
19939 	}
19940       function_depth--;
19941     }
19942 }
19943 
19944 void
fini_modules()19945 fini_modules ()
19946 {
19947   /* We're done with the macro tables now.  */
19948   vec_free (macro_exports);
19949   vec_free (macro_imports);
19950   headers = NULL;
19951 
19952   /* We're now done with everything but the module names.  */
19953   set_cmi_repo (NULL);
19954   if (mapper)
19955     {
19956       timevar_start (TV_MODULE_MAPPER);
19957       module_client::close_module_client (0, mapper);
19958       mapper = nullptr;
19959       timevar_stop (TV_MODULE_MAPPER);
19960     }
19961   module_state_config::release ();
19962 
19963 #if CHECKING_P
19964   note_defs = NULL;
19965 #endif
19966 
19967   if (modules)
19968     for (unsigned ix = modules->length (); --ix;)
19969       if (module_state *state = (*modules)[ix])
19970 	state->release ();
19971 
19972   /* No need to lookup modules anymore.  */
19973   modules_hash = NULL;
19974 
19975   /* Or entity array.  We still need the entity map to find import numbers.  */
19976   vec_free (entity_ary);
19977   entity_ary = NULL;
19978 
19979   /* Or remember any pending entities.  */
19980   delete pending_table;
19981   pending_table = NULL;
19982 
19983   /* Or any attachments -- Let it go!  */
19984   delete attached_table;
19985   attached_table = NULL;
19986 
19987   /* Allow a GC, we've possibly made much data unreachable.  */
19988   ggc_collect ();
19989 }
19990 
19991 /* If CODE is a module option, handle it & return true.  Otherwise
19992    return false.  For unknown reasons I cannot get the option
19993    generation machinery to set fmodule-mapper or -fmodule-header to
19994    make a string type option variable.  */
19995 
19996 bool
handle_module_option(unsigned code,const char * str,int)19997 handle_module_option (unsigned code, const char *str, int)
19998 {
19999   auto hdr = CMS_header;
20000 
20001   switch (opt_code (code))
20002     {
20003     case OPT_fmodule_mapper_:
20004       module_mapper_name = str;
20005       return true;
20006 
20007     case OPT_fmodule_header_:
20008       {
20009 	if (!strcmp (str, "user"))
20010 	  hdr = CMS_user;
20011 	else if (!strcmp (str, "system"))
20012 	  hdr = CMS_system;
20013 	else
20014 	  error ("unknown header kind %qs", str);
20015       }
20016       /* Fallthrough.  */
20017 
20018     case OPT_fmodule_header:
20019       flag_header_unit = hdr;
20020       flag_modules = 1;
20021       return true;
20022 
20023     case OPT_flang_info_include_translate_:
20024       vec_safe_push (note_includes, str);
20025       return true;
20026 
20027     case OPT_flang_info_module_cmi_:
20028       vec_safe_push (note_cmis, str);
20029       return true;
20030 
20031     default:
20032       return false;
20033     }
20034 }
20035 
20036 /* Set preprocessor callbacks and options for modules.  */
20037 
20038 void
module_preprocess_options(cpp_reader * reader)20039 module_preprocess_options (cpp_reader *reader)
20040 {
20041   gcc_checking_assert (!lang_hooks.preprocess_undef);
20042   if (modules_p ())
20043     {
20044       auto *cb = cpp_get_callbacks (reader);
20045 
20046       cb->translate_include = maybe_translate_include;
20047       cb->user_deferred_macro = module_state::deferred_macro;
20048       if (flag_header_unit)
20049 	{
20050 	  /* If the preprocessor hook is already in use, that
20051 	     implementation will call the undef langhook.  */
20052 	  if (cb->undef)
20053 	    lang_hooks.preprocess_undef = module_state::undef_macro;
20054 	  else
20055 	    cb->undef = module_state::undef_macro;
20056 	}
20057       auto *opt = cpp_get_options (reader);
20058       opt->module_directives = true;
20059       opt->main_search = cpp_main_search (flag_header_unit);
20060     }
20061 }
20062 
20063 #include "gt-cp-module.h"
20064