1 // Copyright (c) 2013-2016 Sandstorm Development Group, Inc. and contributors
2 // Licensed under the MIT License:
3 //
4 // Permission is hereby granted, free of charge, to any person obtaining a copy
5 // of this software and associated documentation files (the "Software"), to deal
6 // in the Software without restriction, including without limitation the rights
7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 // copies of the Software, and to permit persons to whom the Software is
9 // furnished to do so, subject to the following conditions:
10 //
11 // The above copyright notice and this permission notice shall be included in
12 // all copies or substantial portions of the Software.
13 //
14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20 // THE SOFTWARE.
21 
22 // This file is NOT intended for use by clients, except in generated code.
23 //
24 // This file defines low-level, non-type-safe classes for traversing the Cap'n Proto memory layout
25 // (which is also its wire format).  Code generated by the Cap'n Proto compiler uses these classes,
26 // as does other parts of the Cap'n proto library which provide a higher-level interface for
27 // dynamic introspection.
28 
29 #pragma once
30 
31 #include <kj/common.h>
32 #include <kj/memory.h>
33 #include "common.h"
34 #include "blob.h"
35 #include "endian.h"
36 #include <kj/windows-sanity.h>  // work-around macro conflict with `VOID`
37 
38 CAPNP_BEGIN_HEADER
39 
40 #if (defined(__mips__) || defined(__hppa__)) && !defined(CAPNP_CANONICALIZE_NAN)
41 #define CAPNP_CANONICALIZE_NAN 1
42 // Explicitly detect NaNs and canonicalize them to the quiet NaN value as would be returned by
43 // __builtin_nan("") on systems implementing the IEEE-754 recommended (but not required) NaN
44 // signalling/quiet differentiation (such as x86).  Unfortunately, some architectures -- in
45 // particular, MIPS -- represent quiet vs. signalling nans differently than the rest of the world.
46 // Canonicalizing them makes output consistent (which is important!), but hurts performance
47 // slightly.
48 //
49 // Note that trying to convert MIPS NaNs to standard NaNs without losing data doesn't work.
50 // Signaling vs. quiet is indicated by a bit, with the meaning being the opposite on MIPS vs.
51 // everyone else.  It would be great if we could just flip that bit, but we can't, because if the
52 // significand is all-zero, then the value is infinity rather than NaN.  This means that on most
53 // machines, where the bit indicates quietness, there is one more quiet NaN value than signalling
54 // NaN value, whereas on MIPS there is one more sNaN than qNaN, and thus there is no isomorphic
55 // mapping that properly preserves quietness.  Instead of doing something hacky, we just give up
56 // and blow away NaN payloads, because no one uses them anyway.
57 #endif
58 
59 namespace capnp {
60 
61 class ClientHook;
62 
63 namespace _ {  // private
64 
65 class PointerBuilder;
66 class PointerReader;
67 class StructBuilder;
68 class StructReader;
69 class ListBuilder;
70 class ListReader;
71 class OrphanBuilder;
72 struct WirePointer;
73 struct WireHelpers;
74 class SegmentReader;
75 class SegmentBuilder;
76 class Arena;
77 class BuilderArena;
78 
79 // =============================================================================
80 
81 #if CAPNP_DEBUG_TYPES
82 typedef kj::UnitRatio<kj::Bounded<64, uint>, BitLabel, ElementLabel> BitsPerElementTableType;
83 #else
84 typedef uint BitsPerElementTableType;
85 #endif
86 
87 static constexpr BitsPerElementTableType BITS_PER_ELEMENT_TABLE[8] = {
88   bounded< 0>() * BITS / ELEMENTS,
89   bounded< 1>() * BITS / ELEMENTS,
90   bounded< 8>() * BITS / ELEMENTS,
91   bounded<16>() * BITS / ELEMENTS,
92   bounded<32>() * BITS / ELEMENTS,
93   bounded<64>() * BITS / ELEMENTS,
94   bounded< 0>() * BITS / ELEMENTS,
95   bounded< 0>() * BITS / ELEMENTS
96 };
97 
KJ_CONSTEXPR()98 inline KJ_CONSTEXPR() BitsPerElementTableType dataBitsPerElement(ElementSize size) {
99   return _::BITS_PER_ELEMENT_TABLE[static_cast<int>(size)];
100 }
101 
pointersPerElement(ElementSize size)102 inline constexpr PointersPerElementN<1> pointersPerElement(ElementSize size) {
103   return size == ElementSize::POINTER
104       ? PointersPerElementN<1>(ONE * POINTERS / ELEMENTS)
105       : PointersPerElementN<1>(ZERO * POINTERS / ELEMENTS);
106 }
107 
108 static constexpr BitsPerElementTableType BITS_PER_ELEMENT_INCLUDING_PONITERS_TABLE[8] = {
109   bounded< 0>() * BITS / ELEMENTS,
110   bounded< 1>() * BITS / ELEMENTS,
111   bounded< 8>() * BITS / ELEMENTS,
112   bounded<16>() * BITS / ELEMENTS,
113   bounded<32>() * BITS / ELEMENTS,
114   bounded<64>() * BITS / ELEMENTS,
115   bounded<64>() * BITS / ELEMENTS,
116   bounded< 0>() * BITS / ELEMENTS
117 };
118 
KJ_CONSTEXPR()119 inline KJ_CONSTEXPR() BitsPerElementTableType bitsPerElementIncludingPointers(ElementSize size) {
120   return _::BITS_PER_ELEMENT_INCLUDING_PONITERS_TABLE[static_cast<int>(size)];
121 }
122 
123 template <size_t size> struct ElementSizeForByteSize;
124 template <> struct ElementSizeForByteSize<1> { static constexpr ElementSize value = ElementSize::BYTE; };
125 template <> struct ElementSizeForByteSize<2> { static constexpr ElementSize value = ElementSize::TWO_BYTES; };
126 template <> struct ElementSizeForByteSize<4> { static constexpr ElementSize value = ElementSize::FOUR_BYTES; };
127 template <> struct ElementSizeForByteSize<8> { static constexpr ElementSize value = ElementSize::EIGHT_BYTES; };
128 
129 template <typename T> struct ElementSizeForType {
130   static constexpr ElementSize value =
131       // Primitive types that aren't special-cased below can be determined from sizeof().
132       CAPNP_KIND(T) == Kind::PRIMITIVE ? ElementSizeForByteSize<sizeof(T)>::value :
133       CAPNP_KIND(T) == Kind::ENUM ? ElementSize::TWO_BYTES :
134       CAPNP_KIND(T) == Kind::STRUCT ? ElementSize::INLINE_COMPOSITE :
135 
136       // Everything else is a pointer.
137       ElementSize::POINTER;
138 };
139 
140 // Void and bool are special.
141 template <> struct ElementSizeForType<Void> { static constexpr ElementSize value = ElementSize::VOID; };
142 template <> struct ElementSizeForType<bool> { static constexpr ElementSize value = ElementSize::BIT; };
143 
144 // Lists and blobs are pointers, not structs.
145 template <typename T, Kind K> struct ElementSizeForType<List<T, K>> {
146   static constexpr ElementSize value = ElementSize::POINTER;
147 };
148 template <> struct ElementSizeForType<Text> {
149   static constexpr ElementSize value = ElementSize::POINTER;
150 };
151 template <> struct ElementSizeForType<Data> {
152   static constexpr ElementSize value = ElementSize::POINTER;
153 };
154 
155 template <typename T>
156 inline constexpr ElementSize elementSizeForType() {
157   return ElementSizeForType<T>::value;
158 }
159 
160 struct MessageSizeCounts {
161   WordCountN<61, uint64_t> wordCount;  // 2^64 bytes
162   uint capCount;
163 
164   MessageSizeCounts& operator+=(const MessageSizeCounts& other) {
165     // OK to truncate unchecked because this class is used to count actual stuff in memory, and
166     // we couldn't possibly have anywhere near 2^61 words.
167     wordCount = assumeBits<61>(wordCount + other.wordCount);
168     capCount += other.capCount;
169     return *this;
170   }
171 
172   void addWords(WordCountN<61, uint64_t> other) {
173     wordCount = assumeBits<61>(wordCount + other);
174   }
175 
176   MessageSize asPublic() {
177     return MessageSize { unbound(wordCount / WORDS), capCount };
178   }
179 };
180 
181 // =============================================================================
182 
183 template <int wordCount>
184 union AlignedData {
185   // Useful for declaring static constant data blobs as an array of bytes, but forcing those
186   // bytes to be word-aligned.
187 
188   uint8_t bytes[wordCount * sizeof(word)];
189   word words[wordCount];
190 };
191 
192 struct StructSize {
193   StructDataWordCount data;
194   StructPointerCount pointers;
195 
196   inline constexpr WordCountN<17> total() const { return data + pointers * WORDS_PER_POINTER; }
197 
198   StructSize() = default;
199   inline constexpr StructSize(StructDataWordCount data, StructPointerCount pointers)
200       : data(data), pointers(pointers) {}
201 };
202 
203 template <typename T, typename CapnpPrivate = typename T::_capnpPrivate>
204 inline constexpr StructSize structSize() {
205   return StructSize(bounded(CapnpPrivate::dataWordSize) * WORDS,
206                     bounded(CapnpPrivate::pointerCount) * POINTERS);
207 }
208 
209 template <typename T, typename CapnpPrivate = typename T::_capnpPrivate,
210           typename = kj::EnableIf<CAPNP_KIND(T) == Kind::STRUCT>>
211 inline constexpr StructSize minStructSizeForElement() {
212   // If T is a struct, return its struct size. Otherwise return the minimum struct size big enough
213   // to hold a T.
214 
215   return StructSize(bounded(CapnpPrivate::dataWordSize) * WORDS,
216                     bounded(CapnpPrivate::pointerCount) * POINTERS);
217 }
218 
219 template <typename T, typename = kj::EnableIf<CAPNP_KIND(T) != Kind::STRUCT>>
220 inline constexpr StructSize minStructSizeForElement() {
221   // If T is a struct, return its struct size. Otherwise return the minimum struct size big enough
222   // to hold a T.
223 
224   return StructSize(
225       dataBitsPerElement(elementSizeForType<T>()) * ELEMENTS > ZERO * BITS
226           ? StructDataWordCount(ONE * WORDS) : StructDataWordCount(ZERO * WORDS),
227       pointersPerElement(elementSizeForType<T>()) * ELEMENTS);
228 }
229 
230 // -------------------------------------------------------------------
231 // Masking of default values
232 
233 template <typename T, Kind kind = CAPNP_KIND(T)> struct Mask_;
234 template <typename T> struct Mask_<T, Kind::PRIMITIVE> { typedef T Type; };
235 template <typename T> struct Mask_<T, Kind::ENUM> { typedef uint16_t Type; };
236 template <> struct Mask_<float, Kind::PRIMITIVE> { typedef uint32_t Type; };
237 template <> struct Mask_<double, Kind::PRIMITIVE> { typedef uint64_t Type; };
238 
239 template <typename T> struct Mask_<T, Kind::OTHER> {
240   // Union discriminants end up here.
241   static_assert(sizeof(T) == 2, "Don't know how to mask this type.");
242   typedef uint16_t Type;
243 };
244 
245 template <typename T>
246 using Mask = typename Mask_<T>::Type;
247 
248 template <typename T>
249 KJ_ALWAYS_INLINE(Mask<T> mask(T value, Mask<T> mask));
250 template <typename T>
251 KJ_ALWAYS_INLINE(T unmask(Mask<T> value, Mask<T> mask));
252 
253 template <typename T>
254 inline Mask<T> mask(T value, Mask<T> mask) {
255   return static_cast<Mask<T> >(value) ^ mask;
256 }
257 
258 template <>
259 inline uint32_t mask<float>(float value, uint32_t mask) {
260 #if CAPNP_CANONICALIZE_NAN
261   if (value != value) {
262     return 0x7fc00000u ^ mask;
263   }
264 #endif
265 
266   uint32_t i;
267   static_assert(sizeof(i) == sizeof(value), "float is not 32 bits?");
268   memcpy(&i, &value, sizeof(value));
269   return i ^ mask;
270 }
271 
272 template <>
273 inline uint64_t mask<double>(double value, uint64_t mask) {
274 #if CAPNP_CANONICALIZE_NAN
275   if (value != value) {
276     return 0x7ff8000000000000ull ^ mask;
277   }
278 #endif
279 
280   uint64_t i;
281   static_assert(sizeof(i) == sizeof(value), "double is not 64 bits?");
282   memcpy(&i, &value, sizeof(value));
283   return i ^ mask;
284 }
285 
286 template <typename T>
287 inline T unmask(Mask<T> value, Mask<T> mask) {
288   return static_cast<T>(value ^ mask);
289 }
290 
291 template <>
292 inline float unmask<float>(uint32_t value, uint32_t mask) {
293   value ^= mask;
294   float result;
295   static_assert(sizeof(result) == sizeof(value), "float is not 32 bits?");
296   memcpy(&result, &value, sizeof(value));
297   return result;
298 }
299 
300 template <>
301 inline double unmask<double>(uint64_t value, uint64_t mask) {
302   value ^= mask;
303   double result;
304   static_assert(sizeof(result) == sizeof(value), "double is not 64 bits?");
305   memcpy(&result, &value, sizeof(value));
306   return result;
307 }
308 
309 // -------------------------------------------------------------------
310 
311 class CapTableReader {
312 public:
313   virtual kj::Maybe<kj::Own<ClientHook>> extractCap(uint index) = 0;
314   // Extract the capability at the given index.  If the index is invalid, returns null.
315 };
316 
317 class CapTableBuilder: public CapTableReader {
318 public:
319   virtual uint injectCap(kj::Own<ClientHook>&& cap) = 0;
320   // Add the capability to the message and return its index.  If the same ClientHook is injected
321   // twice, this may return the same index both times, but in this case dropCap() needs to be
322   // called an equal number of times to actually remove the cap.
323 
324   virtual void dropCap(uint index) = 0;
325   // Remove a capability injected earlier.  Called when the pointer is overwritten or zero'd out.
326 };
327 
328 // -------------------------------------------------------------------
329 
330 class PointerBuilder: public kj::DisallowConstCopy {
331   // Represents a single pointer, usually embedded in a struct or a list.
332 
333 public:
334   inline PointerBuilder(): segment(nullptr), capTable(nullptr), pointer(nullptr) {}
335 
336   static inline PointerBuilder getRoot(
337       SegmentBuilder* segment, CapTableBuilder* capTable, word* location);
338   // Get a PointerBuilder representing a message root located in the given segment at the given
339   // location.
340 
341   inline bool isNull() { return getPointerType() == PointerType::NULL_; }
342   PointerType getPointerType() const;
343 
344   StructBuilder getStruct(StructSize size, const word* defaultValue);
345   ListBuilder getList(ElementSize elementSize, const word* defaultValue);
346   ListBuilder getStructList(StructSize elementSize, const word* defaultValue);
347   ListBuilder getListAnySize(const word* defaultValue);
348   template <typename T> typename T::Builder getBlob(
349       const void* defaultValue, ByteCount defaultSize);
350 #if !CAPNP_LITE
351   kj::Own<ClientHook> getCapability();
352 #endif  // !CAPNP_LITE
353   // Get methods:  Get the value.  If it is null, initialize it to a copy of the default value.
354   // The default value is encoded as an "unchecked message" for structs, lists, and objects, or a
355   // simple byte array for blobs.
356 
357   StructBuilder initStruct(StructSize size);
358   ListBuilder initList(ElementSize elementSize, ElementCount elementCount);
359   ListBuilder initStructList(ElementCount elementCount, StructSize size);
360   template <typename T> typename T::Builder initBlob(ByteCount size);
361   // Init methods:  Initialize the pointer to a newly-allocated object, discarding the existing
362   // object.
363 
364   void setStruct(const StructReader& value, bool canonical = false);
365   void setList(const ListReader& value, bool canonical = false);
366   template <typename T> void setBlob(typename T::Reader value);
367 #if !CAPNP_LITE
368   void setCapability(kj::Own<ClientHook>&& cap);
369 #endif  // !CAPNP_LITE
370   // Set methods:  Initialize the pointer to a newly-allocated copy of the given value, discarding
371   // the existing object.
372 
373   void adopt(OrphanBuilder&& orphan);
374   // Set the pointer to point at the given orphaned value.
375 
376   OrphanBuilder disown();
377   // Set the pointer to null and return its previous value as an orphan.
378 
379   void clear();
380   // Clear the pointer to null, discarding its previous value.
381 
382   void transferFrom(PointerBuilder other);
383   // Equivalent to `adopt(other.disown())`.
384 
385   void copyFrom(PointerReader other, bool canonical = false);
386   // Equivalent to `set(other.get())`.
387   // If you set the canonical flag, it will attempt to lay the target out
388   // canonically, provided enough space is available.
389 
390   PointerReader asReader() const;
391 
392   BuilderArena* getArena() const;
393   // Get the arena containing this pointer.
394 
395   CapTableBuilder* getCapTable();
396   // Gets the capability context in which this object is operating.
397 
398   PointerBuilder imbue(CapTableBuilder* capTable);
399   // Return a copy of this builder except using the given capability context.
400 
401 private:
402   SegmentBuilder* segment;     // Memory segment in which the pointer resides.
403   CapTableBuilder* capTable;   // Table of capability indexes.
404   WirePointer* pointer;        // Pointer to the pointer.
405 
406   inline PointerBuilder(SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* pointer)
407       : segment(segment), capTable(capTable), pointer(pointer) {}
408 
409   friend class StructBuilder;
410   friend class ListBuilder;
411   friend class OrphanBuilder;
412 };
413 
414 class PointerReader {
415 public:
416   inline PointerReader()
417       : segment(nullptr), capTable(nullptr), pointer(nullptr), nestingLimit(0x7fffffff) {}
418 
419   static PointerReader getRoot(SegmentReader* segment, CapTableReader* capTable,
420                                const word* location, int nestingLimit);
421   // Get a PointerReader representing a message root located in the given segment at the given
422   // location.
423 
424   static inline PointerReader getRootUnchecked(const word* location);
425   // Get a PointerReader for an unchecked message.
426 
427   MessageSizeCounts targetSize() const;
428   // Return the total size of the target object and everything to which it points.  Does not count
429   // far pointer overhead.  This is useful for deciding how much space is needed to copy the object
430   // into a flat array.  However, the caller is advised NOT to treat this value as secure.  Instead,
431   // use the result as a hint for allocating the first segment, do the copy, and then throw an
432   // exception if it overruns.
433 
434   inline bool isNull() const { return getPointerType() == PointerType::NULL_; }
435   PointerType getPointerType() const;
436 
437   StructReader getStruct(const word* defaultValue) const;
438   ListReader getList(ElementSize expectedElementSize, const word* defaultValue) const;
439   ListReader getListAnySize(const word* defaultValue) const;
440   template <typename T>
441   typename T::Reader getBlob(const void* defaultValue, ByteCount defaultSize) const;
442 #if !CAPNP_LITE
443   kj::Own<ClientHook> getCapability() const;
444 #endif  // !CAPNP_LITE
445   // Get methods:  Get the value.  If it is null, return the default value instead.
446   // The default value is encoded as an "unchecked message" for structs, lists, and objects, or a
447   // simple byte array for blobs.
448 
449   const word* getUnchecked() const;
450   // If this is an unchecked message, get a word* pointing at the location of the pointer.  This
451   // word* can actually be passed to readUnchecked() to read the designated sub-object later.  If
452   // this isn't an unchecked message, throws an exception.
453 
454   kj::Maybe<Arena&> getArena() const;
455   // Get the arena containing this pointer.
456 
457   CapTableReader* getCapTable();
458   // Gets the capability context in which this object is operating.
459 
460   PointerReader imbue(CapTableReader* capTable) const;
461   // Return a copy of this reader except using the given capability context.
462 
463   bool isCanonical(const word **readHead);
464   // Validate this pointer's canonicity, subject to the conditions:
465   // * All data to the left of readHead has been read thus far (for pointer
466   //   ordering)
467   // * All pointers in preorder have already been checked
468   // * This pointer is in the first and only segment of the message
469 
470 private:
471   SegmentReader* segment;      // Memory segment in which the pointer resides.
472   CapTableReader* capTable;    // Table of capability indexes.
473   const WirePointer* pointer;  // Pointer to the pointer.  null = treat as null pointer.
474 
475   int nestingLimit;
476   // Limits the depth of message structures to guard against stack-overflow-based DoS attacks.
477   // Once this reaches zero, further pointers will be pruned.
478 
479   inline PointerReader(SegmentReader* segment, CapTableReader* capTable,
480                        const WirePointer* pointer, int nestingLimit)
481       : segment(segment), capTable(capTable), pointer(pointer), nestingLimit(nestingLimit) {}
482 
483   friend class StructReader;
484   friend class ListReader;
485   friend class PointerBuilder;
486   friend class OrphanBuilder;
487 };
488 
489 // -------------------------------------------------------------------
490 
491 class StructBuilder: public kj::DisallowConstCopy {
492 public:
493   inline StructBuilder(): segment(nullptr), capTable(nullptr), data(nullptr), pointers(nullptr) {}
494 
495   inline word* getLocation() { return reinterpret_cast<word*>(data); }
496   // Get the object's location.  Only valid for independently-allocated objects (i.e. not list
497   // elements).
498 
499   inline StructDataBitCount getDataSectionSize() const { return dataSize; }
500   inline StructPointerCount getPointerSectionSize() const { return pointerCount; }
501   inline kj::ArrayPtr<byte> getDataSectionAsBlob();
502   inline _::ListBuilder getPointerSectionAsList();
503 
504   template <typename T>
505   KJ_ALWAYS_INLINE(bool hasDataField(StructDataOffset offset));
506   // Return true if the field is set to something other than its default value.
507 
508   template <typename T>
509   KJ_ALWAYS_INLINE(T getDataField(StructDataOffset offset));
510   // Gets the data field value of the given type at the given offset.  The offset is measured in
511   // multiples of the field size, determined by the type.
512 
513   template <typename T>
514   KJ_ALWAYS_INLINE(T getDataField(StructDataOffset offset, Mask<T> mask));
515   // Like getDataField() but applies the given XOR mask to the data on load.  Used for reading
516   // fields with non-zero default values.
517 
518   template <typename T>
519   KJ_ALWAYS_INLINE(void setDataField(StructDataOffset offset, kj::NoInfer<T> value));
520   // Sets the data field value at the given offset.
521 
522   template <typename T>
523   KJ_ALWAYS_INLINE(void setDataField(StructDataOffset offset,
524                                      kj::NoInfer<T> value, Mask<T> mask));
525   // Like setDataField() but applies the given XOR mask before storing.  Used for writing fields
526   // with non-zero default values.
527 
528   KJ_ALWAYS_INLINE(PointerBuilder getPointerField(StructPointerOffset ptrIndex));
529   // Get a builder for a pointer field given the index within the pointer section.
530 
531   void clearAll();
532   // Clear all pointers and data.
533 
534   void transferContentFrom(StructBuilder other);
535   // Adopt all pointers from `other`, and also copy all data.  If `other`'s sections are larger
536   // than this, the extra data is not transferred, meaning there is a risk of data loss when
537   // transferring from messages built with future versions of the protocol.
538 
539   void copyContentFrom(StructReader other);
540   // Copy content from `other`.  If `other`'s sections are larger than this, the extra data is not
541   // copied, meaning there is a risk of data loss when copying from messages built with future
542   // versions of the protocol.
543 
544   StructReader asReader() const;
545   // Gets a StructReader pointing at the same memory.
546 
547   BuilderArena* getArena();
548   // Gets the arena in which this object is allocated.
549 
550   CapTableBuilder* getCapTable();
551   // Gets the capability context in which this object is operating.
552 
553   StructBuilder imbue(CapTableBuilder* capTable);
554   // Return a copy of this builder except using the given capability context.
555 
556 private:
557   SegmentBuilder* segment;     // Memory segment in which the struct resides.
558   CapTableBuilder* capTable;   // Table of capability indexes.
559   void* data;                  // Pointer to the encoded data.
560   WirePointer* pointers;   // Pointer to the encoded pointers.
561 
562   StructDataBitCount dataSize;
563   // Size of data section.  We use a bit count rather than a word count to more easily handle the
564   // case of struct lists encoded with less than a word per element.
565 
566   StructPointerCount pointerCount;  // Size of the pointer section.
567 
568   inline StructBuilder(SegmentBuilder* segment, CapTableBuilder* capTable,
569                        void* data, WirePointer* pointers,
570                        StructDataBitCount dataSize, StructPointerCount pointerCount)
571       : segment(segment), capTable(capTable), data(data), pointers(pointers),
572         dataSize(dataSize), pointerCount(pointerCount) {}
573 
574   friend class ListBuilder;
575   friend struct WireHelpers;
576   friend class OrphanBuilder;
577 };
578 
579 class StructReader {
580 public:
581   inline StructReader()
582       : segment(nullptr), capTable(nullptr), data(nullptr), pointers(nullptr),
583         dataSize(ZERO * BITS), pointerCount(ZERO * POINTERS), nestingLimit(0x7fffffff) {}
584   inline StructReader(kj::ArrayPtr<const word> data)
585       : segment(nullptr), capTable(nullptr), data(data.begin()), pointers(nullptr),
586         dataSize(assumeBits<STRUCT_DATA_WORD_COUNT_BITS>(data.size()) * WORDS * BITS_PER_WORD),
587         pointerCount(ZERO * POINTERS), nestingLimit(0x7fffffff) {}
588 
589   const void* getLocation() const { return data; }
590 
591   inline StructDataBitCount getDataSectionSize() const { return dataSize; }
592   inline StructPointerCount getPointerSectionSize() const { return pointerCount; }
593   inline kj::ArrayPtr<const byte> getDataSectionAsBlob() const;
594   inline _::ListReader getPointerSectionAsList() const;
595 
596   kj::Array<word> canonicalize();
597 
598   template <typename T>
599   KJ_ALWAYS_INLINE(bool hasDataField(StructDataOffset offset) const);
600   // Return true if the field is set to something other than its default value.
601 
602   template <typename T>
603   KJ_ALWAYS_INLINE(T getDataField(StructDataOffset offset) const);
604   // Get the data field value of the given type at the given offset.  The offset is measured in
605   // multiples of the field size, determined by the type.  Returns zero if the offset is past the
606   // end of the struct's data section.
607 
608   template <typename T>
609   KJ_ALWAYS_INLINE(T getDataField(StructDataOffset offset, Mask<T> mask) const);
610   // Like getDataField(offset), but applies the given XOR mask to the result.  Used for reading
611   // fields with non-zero default values.
612 
613   KJ_ALWAYS_INLINE(PointerReader getPointerField(StructPointerOffset ptrIndex) const);
614   // Get a reader for a pointer field given the index within the pointer section.  If the index
615   // is out-of-bounds, returns a null pointer.
616 
617   MessageSizeCounts totalSize() const;
618   // Return the total size of the struct and everything to which it points.  Does not count far
619   // pointer overhead.  This is useful for deciding how much space is needed to copy the struct
620   // into a flat array.
621 
622   CapTableReader* getCapTable();
623   // Gets the capability context in which this object is operating.
624 
625   StructReader imbue(CapTableReader* capTable) const;
626   // Return a copy of this reader except using the given capability context.
627 
628   bool isCanonical(const word **readHead, const word **ptrHead,
629                    bool *dataTrunc, bool *ptrTrunc);
630   // Validate this pointer's canonicity, subject to the conditions:
631   // * All data to the left of readHead has been read thus far (for pointer
632   //   ordering)
633   // * All pointers in preorder have already been checked
634   // * This pointer is in the first and only segment of the message
635   //
636   // If this function returns false, the struct is non-canonical. If it
637   // returns true, then:
638   // * If it is a composite in a list, it is canonical if at least one struct
639   //   in the list outputs dataTrunc = 1, and at least one outputs ptrTrunc = 1
640   // * If it is derived from a struct pointer, it is canonical if
641   //   dataTrunc = 1 AND ptrTrunc = 1
642 
643 private:
644   SegmentReader* segment;    // Memory segment in which the struct resides.
645   CapTableReader* capTable;  // Table of capability indexes.
646 
647   const void* data;
648   const WirePointer* pointers;
649 
650   StructDataBitCount dataSize;
651   // Size of data section.  We use a bit count rather than a word count to more easily handle the
652   // case of struct lists encoded with less than a word per element.
653 
654   StructPointerCount pointerCount;  // Size of the pointer section.
655 
656   int nestingLimit;
657   // Limits the depth of message structures to guard against stack-overflow-based DoS attacks.
658   // Once this reaches zero, further pointers will be pruned.
659   // TODO(perf):  Limit to 16 bits for better packing?
660 
661   inline StructReader(SegmentReader* segment, CapTableReader* capTable,
662                       const void* data, const WirePointer* pointers,
663                       StructDataBitCount dataSize, StructPointerCount pointerCount,
664                       int nestingLimit)
665       : segment(segment), capTable(capTable), data(data), pointers(pointers),
666         dataSize(dataSize), pointerCount(pointerCount),
667         nestingLimit(nestingLimit) {}
668 
669   friend class ListReader;
670   friend class StructBuilder;
671   friend struct WireHelpers;
672 };
673 
674 // -------------------------------------------------------------------
675 
676 class ListBuilder: public kj::DisallowConstCopy {
677 public:
678   inline explicit ListBuilder(ElementSize elementSize)
679       : segment(nullptr), capTable(nullptr), ptr(nullptr), elementCount(ZERO * ELEMENTS),
680         step(ZERO * BITS / ELEMENTS), structDataSize(ZERO * BITS),
681         structPointerCount(ZERO * POINTERS), elementSize(elementSize) {}
682 
683   inline word* getLocation() {
684     // Get the object's location.
685 
686     if (elementSize == ElementSize::INLINE_COMPOSITE && ptr != nullptr) {
687       return reinterpret_cast<word*>(ptr) - POINTER_SIZE_IN_WORDS;
688     } else {
689       return reinterpret_cast<word*>(ptr);
690     }
691   }
692 
693   inline ElementSize getElementSize() const { return elementSize; }
694 
695   inline ListElementCount size() const;
696   // The number of elements in the list.
697 
698   Text::Builder asText();
699   Data::Builder asData();
700   // Reinterpret the list as a blob.  Throws an exception if the elements are not byte-sized.
701 
702   template <typename T>
703   KJ_ALWAYS_INLINE(T getDataElement(ElementCount index));
704   // Get the element of the given type at the given index.
705 
706   template <typename T>
707   KJ_ALWAYS_INLINE(void setDataElement(ElementCount index, kj::NoInfer<T> value));
708   // Set the element at the given index.
709 
710   KJ_ALWAYS_INLINE(PointerBuilder getPointerElement(ElementCount index));
711 
712   StructBuilder getStructElement(ElementCount index);
713 
714   ListReader asReader() const;
715   // Get a ListReader pointing at the same memory.
716 
717   BuilderArena* getArena();
718   // Gets the arena in which this object is allocated.
719 
720   CapTableBuilder* getCapTable();
721   // Gets the capability context in which this object is operating.
722 
723   ListBuilder imbue(CapTableBuilder* capTable);
724   // Return a copy of this builder except using the given capability context.
725 
726 private:
727   SegmentBuilder* segment;    // Memory segment in which the list resides.
728   CapTableBuilder* capTable;  // Table of capability indexes.
729 
730   byte* ptr;  // Pointer to list content.
731 
732   ListElementCount elementCount;  // Number of elements in the list.
733 
734   BitsPerElementN<23> step;
735   // The distance between elements. The maximum value occurs when a struct contains 2^16-1 data
736   // words and 2^16-1 pointers, i.e. 2^17 - 2 words, or 2^23 - 128 bits.
737 
738   StructDataBitCount structDataSize;
739   StructPointerCount structPointerCount;
740   // The struct properties to use when interpreting the elements as structs.  All lists can be
741   // interpreted as struct lists, so these are always filled in.
742 
743   ElementSize elementSize;
744   // The element size as a ElementSize. This is only really needed to disambiguate INLINE_COMPOSITE
745   // from other types when the overall size is exactly zero or one words.
746 
747   inline ListBuilder(SegmentBuilder* segment, CapTableBuilder* capTable, void* ptr,
748                      BitsPerElementN<23> step, ListElementCount size,
749                      StructDataBitCount structDataSize, StructPointerCount structPointerCount,
750                      ElementSize elementSize)
751       : segment(segment), capTable(capTable), ptr(reinterpret_cast<byte*>(ptr)),
752         elementCount(size), step(step), structDataSize(structDataSize),
753         structPointerCount(structPointerCount), elementSize(elementSize) {}
754 
755   friend class StructBuilder;
756   friend struct WireHelpers;
757   friend class OrphanBuilder;
758 };
759 
760 class ListReader {
761 public:
762   inline explicit ListReader(ElementSize elementSize)
763       : segment(nullptr), capTable(nullptr), ptr(nullptr), elementCount(ZERO * ELEMENTS),
764         step(ZERO * BITS / ELEMENTS), structDataSize(ZERO * BITS),
765         structPointerCount(ZERO * POINTERS), elementSize(elementSize), nestingLimit(0x7fffffff) {}
766 
767   inline ListElementCount size() const;
768   // The number of elements in the list.
769 
770   inline ElementSize getElementSize() const { return elementSize; }
771 
772   Text::Reader asText();
773   Data::Reader asData();
774   // Reinterpret the list as a blob.  Throws an exception if the elements are not byte-sized.
775 
776   kj::ArrayPtr<const byte> asRawBytes() const;
777 
778   template <typename T>
779   KJ_ALWAYS_INLINE(T getDataElement(ElementCount index) const);
780   // Get the element of the given type at the given index.
781 
782   KJ_ALWAYS_INLINE(PointerReader getPointerElement(ElementCount index) const);
783 
784   StructReader getStructElement(ElementCount index) const;
785 
786   MessageSizeCounts totalSize() const;
787   // Like StructReader::totalSize(). Note that for struct lists, the size includes the list tag.
788 
789   CapTableReader* getCapTable();
790   // Gets the capability context in which this object is operating.
791 
792   ListReader imbue(CapTableReader* capTable) const;
793   // Return a copy of this reader except using the given capability context.
794 
795   bool isCanonical(const word **readHead, const WirePointer* ref);
796   // Validate this pointer's canonicity, subject to the conditions:
797   // * All data to the left of readHead has been read thus far (for pointer
798   //   ordering)
799   // * All pointers in preorder have already been checked
800   // * This pointer is in the first and only segment of the message
801 
802 private:
803   SegmentReader* segment;    // Memory segment in which the list resides.
804   CapTableReader* capTable;  // Table of capability indexes.
805 
806   const byte* ptr;  // Pointer to list content.
807 
808   ListElementCount elementCount;  // Number of elements in the list.
809 
810   BitsPerElementN<23> step;
811   // The distance between elements. The maximum value occurs when a struct contains 2^16-1 data
812   // words and 2^16-1 pointers, i.e. 2^17 - 2 words, or 2^23 - 2 bits.
813 
814   StructDataBitCount structDataSize;
815   StructPointerCount structPointerCount;
816   // The struct properties to use when interpreting the elements as structs.  All lists can be
817   // interpreted as struct lists, so these are always filled in.
818 
819   ElementSize elementSize;
820   // The element size as a ElementSize. This is only really needed to disambiguate INLINE_COMPOSITE
821   // from other types when the overall size is exactly zero or one words.
822 
823   int nestingLimit;
824   // Limits the depth of message structures to guard against stack-overflow-based DoS attacks.
825   // Once this reaches zero, further pointers will be pruned.
826 
827   inline ListReader(SegmentReader* segment, CapTableReader* capTable, const void* ptr,
828                     ListElementCount elementCount, BitsPerElementN<23> step,
829                     StructDataBitCount structDataSize, StructPointerCount structPointerCount,
830                     ElementSize elementSize, int nestingLimit)
831       : segment(segment), capTable(capTable), ptr(reinterpret_cast<const byte*>(ptr)),
832         elementCount(elementCount), step(step), structDataSize(structDataSize),
833         structPointerCount(structPointerCount), elementSize(elementSize),
834         nestingLimit(nestingLimit) {}
835 
836   friend class StructReader;
837   friend class ListBuilder;
838   friend struct WireHelpers;
839   friend class OrphanBuilder;
840 };
841 
842 // -------------------------------------------------------------------
843 
844 class OrphanBuilder {
845 public:
846   inline OrphanBuilder(): segment(nullptr), capTable(nullptr), location(nullptr) {
847     memset(&tag, 0, sizeof(tag));
848   }
849   OrphanBuilder(const OrphanBuilder& other) = delete;
850   inline OrphanBuilder(OrphanBuilder&& other) noexcept;
851   inline ~OrphanBuilder() noexcept(false);
852 
853   static OrphanBuilder initStruct(BuilderArena* arena, CapTableBuilder* capTable, StructSize size);
854   static OrphanBuilder initList(BuilderArena* arena, CapTableBuilder* capTable,
855                                 ElementCount elementCount, ElementSize elementSize);
856   static OrphanBuilder initStructList(BuilderArena* arena, CapTableBuilder* capTable,
857                                       ElementCount elementCount, StructSize elementSize);
858   static OrphanBuilder initText(BuilderArena* arena, CapTableBuilder* capTable, ByteCount size);
859   static OrphanBuilder initData(BuilderArena* arena, CapTableBuilder* capTable, ByteCount size);
860 
861   static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable, StructReader copyFrom);
862   static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable, ListReader copyFrom);
863   static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable, PointerReader copyFrom);
864   static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable, Text::Reader copyFrom);
865   static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable, Data::Reader copyFrom);
866 #if !CAPNP_LITE
867   static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable,
868                             kj::Own<ClientHook> copyFrom);
869 #endif  // !CAPNP_LITE
870 
871   static OrphanBuilder concat(BuilderArena* arena, CapTableBuilder* capTable,
872                               ElementSize expectedElementSize, StructSize expectedStructSize,
873                               kj::ArrayPtr<const ListReader> lists);
874 
875   static OrphanBuilder referenceExternalData(BuilderArena* arena, Data::Reader data);
876 
877   OrphanBuilder& operator=(const OrphanBuilder& other) = delete;
878   inline OrphanBuilder& operator=(OrphanBuilder&& other);
879 
880   inline bool operator==(decltype(nullptr)) const { return location == nullptr; }
881   inline bool operator!=(decltype(nullptr)) const { return location != nullptr; }
882 
883   StructBuilder asStruct(StructSize size);
884   // Interpret as a struct, or throw an exception if not a struct.
885 
886   ListBuilder asList(ElementSize elementSize);
887   // Interpret as a list, or throw an exception if not a list.  elementSize cannot be
888   // INLINE_COMPOSITE -- use asStructList() instead.
889 
890   ListBuilder asStructList(StructSize elementSize);
891   // Interpret as a struct list, or throw an exception if not a list.
892 
893   ListBuilder asListAnySize();
894   // For AnyList.
895 
896   Text::Builder asText();
897   Data::Builder asData();
898   // Interpret as a blob, or throw an exception if not a blob.
899 
900   StructReader asStructReader(StructSize size) const;
901   ListReader asListReader(ElementSize elementSize) const;
902   ListReader asListReaderAnySize() const;
903 #if !CAPNP_LITE
904   kj::Own<ClientHook> asCapability() const;
905 #endif  // !CAPNP_LITE
906   Text::Reader asTextReader() const;
907   Data::Reader asDataReader() const;
908 
909   bool truncate(ElementCount size, bool isText) KJ_WARN_UNUSED_RESULT;
910   // Resize the orphan list to the given size. Returns false if the list is currently empty but
911   // the requested size is non-zero, in which case the caller will need to allocate a new list.
912 
913   void truncate(ElementCount size, ElementSize elementSize);
914   void truncate(ElementCount size, StructSize elementSize);
915   void truncateText(ElementCount size);
916   // Versions of truncate() that know how to allocate a new list if needed.
917 
918 private:
919   static_assert(ONE * POINTERS * WORDS_PER_POINTER == ONE * WORDS,
920                 "This struct assumes a pointer is one word.");
921   word tag;
922   // Contains an encoded WirePointer representing this object.  WirePointer is defined in
923   // layout.c++, but fits in a word.
924   //
925   // This may be a FAR pointer.  Even in that case, `location` points to the eventual destination
926   // of that far pointer.  The reason we keep the far pointer around rather than just making `tag`
927   // represent the final destination is because if the eventual adopter of the pointer is not in
928   // the target's segment then it may be useful to reuse the far pointer landing pad.
929   //
930   // If `tag` is not a far pointer, its offset is garbage; only `location` points to the actual
931   // target.
932 
933   SegmentBuilder* segment;
934   // Segment in which the object resides.
935 
936   CapTableBuilder* capTable;
937   // Table of capability indexes.
938 
939   word* location;
940   // Pointer to the object, or nullptr if the pointer is null.  For capabilities, we make this
941   // 0x1 just so that it is non-null for operator==, but it is never used.
942 
943   inline OrphanBuilder(const void* tagPtr, SegmentBuilder* segment,
944                        CapTableBuilder* capTable, word* location)
945       : segment(segment), capTable(capTable), location(location) {
946     memcpy(&tag, tagPtr, sizeof(tag));
947   }
948 
949   inline WirePointer* tagAsPtr() { return reinterpret_cast<WirePointer*>(&tag); }
950   inline const WirePointer* tagAsPtr() const { return reinterpret_cast<const WirePointer*>(&tag); }
951 
952   void euthanize();
953   // Erase the target object, zeroing it out and possibly reclaiming the memory.  Called when
954   // the OrphanBuilder is being destroyed or overwritten and it is non-null.
955 
956   friend struct WireHelpers;
957 };
958 
959 // =======================================================================================
960 // Internal implementation details...
961 
962 // These are defined in the source file.
963 template <> typename Text::Builder PointerBuilder::initBlob<Text>(ByteCount size);
964 template <> void PointerBuilder::setBlob<Text>(typename Text::Reader value);
965 template <> typename Text::Builder PointerBuilder::getBlob<Text>(
966     const void* defaultValue, ByteCount defaultSize);
967 template <> typename Text::Reader PointerReader::getBlob<Text>(
968     const void* defaultValue, ByteCount defaultSize) const;
969 
970 template <> typename Data::Builder PointerBuilder::initBlob<Data>(ByteCount size);
971 template <> void PointerBuilder::setBlob<Data>(typename Data::Reader value);
972 template <> typename Data::Builder PointerBuilder::getBlob<Data>(
973     const void* defaultValue, ByteCount defaultSize);
974 template <> typename Data::Reader PointerReader::getBlob<Data>(
975     const void* defaultValue, ByteCount defaultSize) const;
976 
977 inline PointerBuilder PointerBuilder::getRoot(
978     SegmentBuilder* segment, CapTableBuilder* capTable, word* location) {
979   return PointerBuilder(segment, capTable, reinterpret_cast<WirePointer*>(location));
980 }
981 
982 inline PointerReader PointerReader::getRootUnchecked(const word* location) {
983   return PointerReader(nullptr, nullptr,
984                        reinterpret_cast<const WirePointer*>(location), 0x7fffffff);
985 }
986 
987 // -------------------------------------------------------------------
988 
989 inline kj::ArrayPtr<byte> StructBuilder::getDataSectionAsBlob() {
990   return kj::ArrayPtr<byte>(reinterpret_cast<byte*>(data),
991       unbound(dataSize / BITS_PER_BYTE / BYTES));
992 }
993 
994 inline _::ListBuilder StructBuilder::getPointerSectionAsList() {
995   return _::ListBuilder(segment, capTable, pointers, ONE * POINTERS * BITS_PER_POINTER / ELEMENTS,
996                         pointerCount * (ONE * ELEMENTS / POINTERS),
997                         ZERO * BITS, ONE * POINTERS, ElementSize::POINTER);
998 }
999 
1000 template <typename T>
1001 inline bool StructBuilder::hasDataField(StructDataOffset offset) {
1002   return getDataField<Mask<T>>(offset) != 0;
1003 }
1004 
1005 template <>
1006 inline bool StructBuilder::hasDataField<Void>(StructDataOffset offset) {
1007   return false;
1008 }
1009 
1010 template <typename T>
1011 inline T StructBuilder::getDataField(StructDataOffset offset) {
1012   return reinterpret_cast<WireValue<T>*>(data)[unbound(offset / ELEMENTS)].get();
1013 }
1014 
1015 template <>
1016 inline bool StructBuilder::getDataField<bool>(StructDataOffset offset) {
1017   BitCount32 boffset = offset * (ONE * BITS / ELEMENTS);
1018   byte* b = reinterpret_cast<byte*>(data) + boffset / BITS_PER_BYTE;
1019   return (*reinterpret_cast<uint8_t*>(b) &
1020       unbound(ONE << (boffset % BITS_PER_BYTE / BITS))) != 0;
1021 }
1022 
1023 template <>
1024 inline Void StructBuilder::getDataField<Void>(StructDataOffset offset) {
1025   return VOID;
1026 }
1027 
1028 template <typename T>
1029 inline T StructBuilder::getDataField(StructDataOffset offset, Mask<T> mask) {
1030   return unmask<T>(getDataField<Mask<T> >(offset), mask);
1031 }
1032 
1033 template <typename T>
1034 inline void StructBuilder::setDataField(StructDataOffset offset, kj::NoInfer<T> value) {
1035   reinterpret_cast<WireValue<T>*>(data)[unbound(offset / ELEMENTS)].set(value);
1036 }
1037 
1038 #if CAPNP_CANONICALIZE_NAN
1039 // Use mask() on floats and doubles to make sure we canonicalize NaNs.
1040 template <>
1041 inline void StructBuilder::setDataField<float>(StructDataOffset offset, float value) {
1042   setDataField<uint32_t>(offset, mask<float>(value, 0));
1043 }
1044 template <>
1045 inline void StructBuilder::setDataField<double>(StructDataOffset offset, double value) {
1046   setDataField<uint64_t>(offset, mask<double>(value, 0));
1047 }
1048 #endif
1049 
1050 template <>
1051 inline void StructBuilder::setDataField<bool>(StructDataOffset offset, bool value) {
1052   auto boffset = offset * (ONE * BITS / ELEMENTS);
1053   byte* b = reinterpret_cast<byte*>(data) + boffset / BITS_PER_BYTE;
1054   uint bitnum = unboundMaxBits<3>(boffset % BITS_PER_BYTE / BITS);
1055   *reinterpret_cast<uint8_t*>(b) = (*reinterpret_cast<uint8_t*>(b) & ~(1 << bitnum))
1056                                  | (static_cast<uint8_t>(value) << bitnum);
1057 }
1058 
1059 template <>
1060 inline void StructBuilder::setDataField<Void>(StructDataOffset offset, Void value) {}
1061 
1062 template <typename T>
1063 inline void StructBuilder::setDataField(StructDataOffset offset,
1064                                         kj::NoInfer<T> value, Mask<T> m) {
1065   setDataField<Mask<T> >(offset, mask<T>(value, m));
1066 }
1067 
1068 inline PointerBuilder StructBuilder::getPointerField(StructPointerOffset ptrIndex) {
1069   // Hacky because WirePointer is defined in the .c++ file (so is incomplete here).
1070   return PointerBuilder(segment, capTable, reinterpret_cast<WirePointer*>(
1071       reinterpret_cast<word*>(pointers) + ptrIndex * WORDS_PER_POINTER));
1072 }
1073 
1074 // -------------------------------------------------------------------
1075 
1076 inline kj::ArrayPtr<const byte> StructReader::getDataSectionAsBlob() const {
1077   return kj::ArrayPtr<const byte>(reinterpret_cast<const byte*>(data),
1078       unbound(dataSize / BITS_PER_BYTE / BYTES));
1079 }
1080 
1081 inline _::ListReader StructReader::getPointerSectionAsList() const {
1082   return _::ListReader(segment, capTable, pointers, pointerCount * (ONE * ELEMENTS / POINTERS),
1083                        ONE * POINTERS * BITS_PER_POINTER / ELEMENTS, ZERO * BITS, ONE * POINTERS,
1084                        ElementSize::POINTER, nestingLimit);
1085 }
1086 
1087 template <typename T>
1088 inline bool StructReader::hasDataField(StructDataOffset offset) const {
1089   return getDataField<Mask<T>>(offset) != 0;
1090 }
1091 
1092 template <>
1093 inline bool StructReader::hasDataField<Void>(StructDataOffset offset) const {
1094   return false;
1095 }
1096 
1097 template <typename T>
1098 inline T StructReader::getDataField(StructDataOffset offset) const {
1099   if ((offset + ONE * ELEMENTS) * capnp::bitsPerElement<T>() <= dataSize) {
1100     return reinterpret_cast<const WireValue<T>*>(data)[unbound(offset / ELEMENTS)].get();
1101   } else {
1102     return static_cast<T>(0);
1103   }
1104 }
1105 
1106 template <>
1107 inline bool StructReader::getDataField<bool>(StructDataOffset offset) const {
1108   auto boffset = offset * (ONE * BITS / ELEMENTS);
1109   if (boffset < dataSize) {
1110     const byte* b = reinterpret_cast<const byte*>(data) + boffset / BITS_PER_BYTE;
1111     return (*reinterpret_cast<const uint8_t*>(b) &
1112         unbound(ONE << (boffset % BITS_PER_BYTE / BITS))) != 0;
1113   } else {
1114     return false;
1115   }
1116 }
1117 
1118 template <>
1119 inline Void StructReader::getDataField<Void>(StructDataOffset offset) const {
1120   return VOID;
1121 }
1122 
1123 template <typename T>
1124 T StructReader::getDataField(StructDataOffset offset, Mask<T> mask) const {
1125   return unmask<T>(getDataField<Mask<T> >(offset), mask);
1126 }
1127 
1128 inline PointerReader StructReader::getPointerField(StructPointerOffset ptrIndex) const {
1129   if (ptrIndex < pointerCount) {
1130     // Hacky because WirePointer is defined in the .c++ file (so is incomplete here).
1131     return PointerReader(segment, capTable, reinterpret_cast<const WirePointer*>(
1132         reinterpret_cast<const word*>(pointers) + ptrIndex * WORDS_PER_POINTER), nestingLimit);
1133   } else{
1134     return PointerReader();
1135   }
1136 }
1137 
1138 // -------------------------------------------------------------------
1139 
1140 inline ListElementCount ListBuilder::size() const { return elementCount; }
1141 
1142 template <typename T>
1143 inline T ListBuilder::getDataElement(ElementCount index) {
1144   return reinterpret_cast<WireValue<T>*>(
1145       ptr + upgradeBound<uint64_t>(index) * step / BITS_PER_BYTE)->get();
1146 
1147   // TODO(perf):  Benchmark this alternate implementation, which I suspect may make better use of
1148   //   the x86 SIB byte.  Also use it for all the other getData/setData implementations below, and
1149   //   the various non-inline methods that look up pointers.
1150   //   Also if using this, consider changing ptr back to void* instead of byte*.
1151 //  return reinterpret_cast<WireValue<T>*>(ptr)[
1152 //      index / ELEMENTS * (step / capnp::bitsPerElement<T>())].get();
1153 }
1154 
1155 template <>
1156 inline bool ListBuilder::getDataElement<bool>(ElementCount index) {
1157   // Ignore step for bit lists because bit lists cannot be upgraded to struct lists.
1158   auto bindex = index * (ONE * BITS / ELEMENTS);
1159   byte* b = ptr + bindex / BITS_PER_BYTE;
1160   return (*reinterpret_cast<uint8_t*>(b) &
1161       unbound(ONE << (bindex % BITS_PER_BYTE / BITS))) != 0;
1162 }
1163 
1164 template <>
1165 inline Void ListBuilder::getDataElement<Void>(ElementCount index) {
1166   return VOID;
1167 }
1168 
1169 template <typename T>
1170 inline void ListBuilder::setDataElement(ElementCount index, kj::NoInfer<T> value) {
1171   reinterpret_cast<WireValue<T>*>(
1172       ptr + upgradeBound<uint64_t>(index) * step / BITS_PER_BYTE)->set(value);
1173 }
1174 
1175 #if CAPNP_CANONICALIZE_NAN
1176 // Use mask() on floats and doubles to make sure we canonicalize NaNs.
1177 template <>
1178 inline void ListBuilder::setDataElement<float>(ElementCount index, float value) {
1179   setDataElement<uint32_t>(index, mask<float>(value, 0));
1180 }
1181 template <>
1182 inline void ListBuilder::setDataElement<double>(ElementCount index, double value) {
1183   setDataElement<uint64_t>(index, mask<double>(value, 0));
1184 }
1185 #endif
1186 
1187 template <>
1188 inline void ListBuilder::setDataElement<bool>(ElementCount index, bool value) {
1189   // Ignore stepBytes for bit lists because bit lists cannot be upgraded to struct lists.
1190   auto bindex = index * (ONE * BITS / ELEMENTS);
1191   byte* b = ptr + bindex / BITS_PER_BYTE;
1192   auto bitnum = bindex % BITS_PER_BYTE / BITS;
1193   *reinterpret_cast<uint8_t*>(b) = (*reinterpret_cast<uint8_t*>(b) & ~(1 << unbound(bitnum)))
1194                                  | (static_cast<uint8_t>(value) << unbound(bitnum));
1195 }
1196 
1197 template <>
1198 inline void ListBuilder::setDataElement<Void>(ElementCount index, Void value) {}
1199 
1200 inline PointerBuilder ListBuilder::getPointerElement(ElementCount index) {
1201   return PointerBuilder(segment, capTable, reinterpret_cast<WirePointer*>(ptr +
1202       upgradeBound<uint64_t>(index) * step / BITS_PER_BYTE));
1203 }
1204 
1205 // -------------------------------------------------------------------
1206 
1207 inline ListElementCount ListReader::size() const { return elementCount; }
1208 
1209 template <typename T>
1210 inline T ListReader::getDataElement(ElementCount index) const {
1211   return reinterpret_cast<const WireValue<T>*>(
1212       ptr + upgradeBound<uint64_t>(index) * step / BITS_PER_BYTE)->get();
1213 }
1214 
1215 template <>
1216 inline bool ListReader::getDataElement<bool>(ElementCount index) const {
1217   // Ignore step for bit lists because bit lists cannot be upgraded to struct lists.
1218   auto bindex = index * (ONE * BITS / ELEMENTS);
1219   const byte* b = ptr + bindex / BITS_PER_BYTE;
1220   return (*reinterpret_cast<const uint8_t*>(b) &
1221       unbound(ONE << (bindex % BITS_PER_BYTE / BITS))) != 0;
1222 }
1223 
1224 template <>
1225 inline Void ListReader::getDataElement<Void>(ElementCount index) const {
1226   return VOID;
1227 }
1228 
1229 inline PointerReader ListReader::getPointerElement(ElementCount index) const {
1230   return PointerReader(segment, capTable, reinterpret_cast<const WirePointer*>(
1231       ptr + upgradeBound<uint64_t>(index) * step / BITS_PER_BYTE), nestingLimit);
1232 }
1233 
1234 // -------------------------------------------------------------------
1235 
1236 inline OrphanBuilder::OrphanBuilder(OrphanBuilder&& other) noexcept
1237     : segment(other.segment), capTable(other.capTable), location(other.location) {
1238   memcpy(&tag, &other.tag, sizeof(tag));  // Needs memcpy to comply with aliasing rules.
1239   other.segment = nullptr;
1240   other.location = nullptr;
1241 }
1242 
1243 inline OrphanBuilder::~OrphanBuilder() noexcept(false) {
1244   if (segment != nullptr) euthanize();
1245 }
1246 
1247 inline OrphanBuilder& OrphanBuilder::operator=(OrphanBuilder&& other) {
1248   // With normal smart pointers, it's important to handle the case where the incoming pointer
1249   // is actually transitively owned by this one.  In this case, euthanize() would destroy `other`
1250   // before we copied it.  This isn't possible in the case of `OrphanBuilder` because it only
1251   // owns message objects, and `other` is not itself a message object, therefore cannot possibly
1252   // be transitively owned by `this`.
1253 
1254   if (segment != nullptr) euthanize();
1255   segment = other.segment;
1256   capTable = other.capTable;
1257   location = other.location;
1258   memcpy(&tag, &other.tag, sizeof(tag));  // Needs memcpy to comply with aliasing rules.
1259   other.segment = nullptr;
1260   other.location = nullptr;
1261   return *this;
1262 }
1263 
1264 }  // namespace _ (private)
1265 }  // namespace capnp
1266 
1267 CAPNP_END_HEADER
1268