1 // Protocol Buffers - Google's data interchange format
2 // Copyright 2008 Google Inc.  All rights reserved.
3 // https://developers.google.com/protocol-buffers/
4 //
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are
7 // met:
8 //
9 //     * Redistributions of source code must retain the above copyright
10 // notice, this list of conditions and the following disclaimer.
11 //     * Redistributions in binary form must reproduce the above
12 // copyright notice, this list of conditions and the following disclaimer
13 // in the documentation and/or other materials provided with the
14 // distribution.
15 //     * Neither the name of Google Inc. nor the names of its
16 // contributors may be used to endorse or promote products derived from
17 // this software without specific prior written permission.
18 //
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 
31 // Author: kenton@google.com (Kenton Varda)
32 //  Based on original Protocol Buffers design by
33 //  Sanjay Ghemawat, Jeff Dean, and others.
34 //
35 // RepeatedField and RepeatedPtrField are used by generated protocol message
36 // classes to manipulate repeated fields.  These classes are very similar to
37 // STL's vector, but include a number of optimizations found to be useful
38 // specifically in the case of Protocol Buffers.  RepeatedPtrField is
39 // particularly different from STL vector as it manages ownership of the
40 // pointers that it contains.
41 //
42 // Typically, clients should not need to access RepeatedField objects directly,
43 // but should instead use the accessor functions generated automatically by the
44 // protocol compiler.
45 
46 #ifndef GOOGLE_PROTOBUF_REPEATED_FIELD_H__
47 #define GOOGLE_PROTOBUF_REPEATED_FIELD_H__
48 
49 #include <utility>
50 #ifdef _MSC_VER
51 // This is required for min/max on VS2013 only.
52 #include <algorithm>
53 #endif
54 
55 #include <iterator>
56 #include <limits>
57 #include <string>
58 #include <type_traits>
59 
60 #include <google/protobuf/stubs/logging.h>
61 #include <google/protobuf/stubs/common.h>
62 #include <google/protobuf/arena.h>
63 #include <google/protobuf/message_lite.h>
64 #include <google/protobuf/port.h>
65 #include <google/protobuf/stubs/casts.h>
66 #include <type_traits>
67 
68 
69 // Must be included last.
70 #include <google/protobuf/port_def.inc>
71 
72 #ifdef SWIG
73 #error "You cannot SWIG proto headers"
74 #endif
75 
76 namespace google {
77 namespace protobuf {
78 
79 class Message;
80 class Reflection;
81 
82 template <typename T>
83 struct WeakRepeatedPtrField;
84 
85 namespace internal {
86 
87 class MergePartialFromCodedStreamHelper;
88 class SwapFieldHelper;
89 
90 // kRepeatedFieldLowerClampLimit is the smallest size that will be allocated
91 // when growing a repeated field.
92 constexpr int kRepeatedFieldLowerClampLimit = 4;
93 
94 // kRepeatedFieldUpperClampLimit is the lowest signed integer value that
95 // overflows when multiplied by 2 (which is undefined behavior). Sizes above
96 // this will clamp to the maximum int value instead of following exponential
97 // growth when growing a repeated field.
98 constexpr int kRepeatedFieldUpperClampLimit =
99     (std::numeric_limits<int>::max() / 2) + 1;
100 
101 // A utility function for logging that doesn't need any template types.
102 void LogIndexOutOfBounds(int index, int size);
103 
104 template <typename Iter>
CalculateReserve(Iter begin,Iter end,std::forward_iterator_tag)105 inline int CalculateReserve(Iter begin, Iter end, std::forward_iterator_tag) {
106   return static_cast<int>(std::distance(begin, end));
107 }
108 
109 template <typename Iter>
CalculateReserve(Iter,Iter,std::input_iterator_tag)110 inline int CalculateReserve(Iter /*begin*/, Iter /*end*/,
111                             std::input_iterator_tag /*unused*/) {
112   return -1;
113 }
114 
115 template <typename Iter>
CalculateReserve(Iter begin,Iter end)116 inline int CalculateReserve(Iter begin, Iter end) {
117   typedef typename std::iterator_traits<Iter>::iterator_category Category;
118   return CalculateReserve(begin, end, Category());
119 }
120 
121 // Swaps two blocks of memory of size sizeof(T).
122 template <typename T>
SwapBlock(char * p,char * q)123 inline void SwapBlock(char* p, char* q) {
124   T tmp;
125   memcpy(&tmp, p, sizeof(T));
126   memcpy(p, q, sizeof(T));
127   memcpy(q, &tmp, sizeof(T));
128 }
129 
130 // Swaps two blocks of memory of size kSize:
131 //  template <int kSize> void memswap(char* p, char* q);
132 
133 template <int kSize>
memswap(char *,char *)134 inline typename std::enable_if<(kSize == 0), void>::type memswap(char*, char*) {
135 }
136 
137 #define PROTO_MEMSWAP_DEF_SIZE(reg_type, max_size)                           \
138   template <int kSize>                                                       \
139   typename std::enable_if<(kSize >= sizeof(reg_type) && kSize < (max_size)), \
140                           void>::type                                        \
141   memswap(char* p, char* q) {                                                \
142     SwapBlock<reg_type>(p, q);                                               \
143     memswap<kSize - sizeof(reg_type)>(p + sizeof(reg_type),                  \
144                                       q + sizeof(reg_type));                 \
145   }
146 
147 PROTO_MEMSWAP_DEF_SIZE(uint8, 2)
148 PROTO_MEMSWAP_DEF_SIZE(uint16, 4)
149 PROTO_MEMSWAP_DEF_SIZE(uint32, 8)
150 
151 #ifdef __SIZEOF_INT128__
152 PROTO_MEMSWAP_DEF_SIZE(uint64, 16)
153 PROTO_MEMSWAP_DEF_SIZE(__uint128_t, (1u << 31))
154 #else
155 PROTO_MEMSWAP_DEF_SIZE(uint64, (1u << 31))
156 #endif
157 
158 #undef PROTO_MEMSWAP_DEF_SIZE
159 
160 }  // namespace internal
161 
162 // RepeatedField is used to represent repeated fields of a primitive type (in
163 // other words, everything except strings and nested Messages).  Most users will
164 // not ever use a RepeatedField directly; they will use the get-by-index,
165 // set-by-index, and add accessors that are generated for all repeated fields.
166 template <typename Element>
167 class RepeatedField final {
168   static_assert(
169       alignof(Arena) >= alignof(Element),
170       "We only support types that have an alignment smaller than Arena");
171 
172  public:
173   constexpr RepeatedField();
174   explicit RepeatedField(Arena* arena);
175 
176   RepeatedField(const RepeatedField& other);
177 
178   template <typename Iter,
179             typename = typename std::enable_if<std::is_constructible<
180                 Element, decltype(*std::declval<Iter>())>::value>::type>
181   RepeatedField(Iter begin, Iter end);
182 
183   ~RepeatedField();
184 
185   RepeatedField& operator=(const RepeatedField& other);
186 
187   RepeatedField(RepeatedField&& other) noexcept;
188   RepeatedField& operator=(RepeatedField&& other) noexcept;
189 
190   bool empty() const;
191   int size() const;
192 
193   const Element& Get(int index) const;
194   Element* Mutable(int index);
195 
196   const Element& operator[](int index) const { return Get(index); }
197   Element& operator[](int index) { return *Mutable(index); }
198 
199   const Element& at(int index) const;
200   Element& at(int index);
201 
202   void Set(int index, const Element& value);
203   void Add(const Element& value);
204   // Appends a new element and return a pointer to it.
205   // The new element is uninitialized if |Element| is a POD type.
206   Element* Add();
207   // Append elements in the range [begin, end) after reserving
208   // the appropriate number of elements.
209   template <typename Iter>
210   void Add(Iter begin, Iter end);
211 
212   // Remove the last element in the array.
213   void RemoveLast();
214 
215   // Extract elements with indices in "[start .. start+num-1]".
216   // Copy them into "elements[0 .. num-1]" if "elements" is not NULL.
217   // Caution: implementation also moves elements with indices [start+num ..].
218   // Calling this routine inside a loop can cause quadratic behavior.
219   void ExtractSubrange(int start, int num, Element* elements);
220 
221   void Clear();
222   void MergeFrom(const RepeatedField& other);
223   void CopyFrom(const RepeatedField& other);
224 
225   // Replaces the contents with RepeatedField(begin, end).
226   template <typename Iter>
227   void Assign(Iter begin, Iter end);
228 
229   // Reserve space to expand the field to at least the given size.  If the
230   // array is grown, it will always be at least doubled in size.
231   void Reserve(int new_size);
232 
233   // Resize the RepeatedField to a new, smaller size.  This is O(1).
234   void Truncate(int new_size);
235 
236   void AddAlreadyReserved(const Element& value);
237   // Appends a new element and return a pointer to it.
238   // The new element is uninitialized if |Element| is a POD type.
239   // Should be called only if Capacity() > Size().
240   Element* AddAlreadyReserved();
241   Element* AddNAlreadyReserved(int elements);
242   int Capacity() const;
243 
244   // Like STL resize.  Uses value to fill appended elements.
245   // Like Truncate() if new_size <= size(), otherwise this is
246   // O(new_size - size()).
247   void Resize(int new_size, const Element& value);
248 
249   // Gets the underlying array.  This pointer is possibly invalidated by
250   // any add or remove operation.
251   Element* mutable_data();
252   const Element* data() const;
253 
254   // Swap entire contents with "other". If they are separate arenas then, copies
255   // data between each other.
256   void Swap(RepeatedField* other);
257 
258   // Swap entire contents with "other". Should be called only if the caller can
259   // guarantee that both repeated fields are on the same arena or are on the
260   // heap. Swapping between different arenas is disallowed and caught by a
261   // GOOGLE_DCHECK (see API docs for details).
262   void UnsafeArenaSwap(RepeatedField* other);
263 
264   // Swap two elements.
265   void SwapElements(int index1, int index2);
266 
267   // STL-like iterator support
268   typedef Element* iterator;
269   typedef const Element* const_iterator;
270   typedef Element value_type;
271   typedef value_type& reference;
272   typedef const value_type& const_reference;
273   typedef value_type* pointer;
274   typedef const value_type* const_pointer;
275   typedef int size_type;
276   typedef ptrdiff_t difference_type;
277 
278   iterator begin();
279   const_iterator begin() const;
280   const_iterator cbegin() const;
281   iterator end();
282   const_iterator end() const;
283   const_iterator cend() const;
284 
285   // Reverse iterator support
286   typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
287   typedef std::reverse_iterator<iterator> reverse_iterator;
rbegin()288   reverse_iterator rbegin() { return reverse_iterator(end()); }
rbegin()289   const_reverse_iterator rbegin() const {
290     return const_reverse_iterator(end());
291   }
rend()292   reverse_iterator rend() { return reverse_iterator(begin()); }
rend()293   const_reverse_iterator rend() const {
294     return const_reverse_iterator(begin());
295   }
296 
297   // Returns the number of bytes used by the repeated field, excluding
298   // sizeof(*this)
299   size_t SpaceUsedExcludingSelfLong() const;
300 
SpaceUsedExcludingSelf()301   int SpaceUsedExcludingSelf() const {
302     return internal::ToIntSize(SpaceUsedExcludingSelfLong());
303   }
304 
305   // Removes the element referenced by position.
306   //
307   // Returns an iterator to the element immediately following the removed
308   // element.
309   //
310   // Invalidates all iterators at or after the removed element, including end().
311   iterator erase(const_iterator position);
312 
313   // Removes the elements in the range [first, last).
314   //
315   // Returns an iterator to the element immediately following the removed range.
316   //
317   // Invalidates all iterators at or after the removed range, including end().
318   iterator erase(const_iterator first, const_iterator last);
319 
320   // Get the Arena on which this RepeatedField stores its elements.
GetArena()321   inline Arena* GetArena() const {
322     return (total_size_ == 0) ? static_cast<Arena*>(arena_or_elements_)
323                               : rep()->arena;
324   }
325 
326   // For internal use only.
327   //
328   // This is public due to it being called by generated code.
329   inline void InternalSwap(RepeatedField* other);
330 
331  private:
332   static constexpr int kInitialSize = 0;
333   // A note on the representation here (see also comment below for
334   // RepeatedPtrFieldBase's struct Rep):
335   //
336   // We maintain the same sizeof(RepeatedField) as before we added arena support
337   // so that we do not degrade performance by bloating memory usage. Directly
338   // adding an arena_ element to RepeatedField is quite costly. By using
339   // indirection in this way, we keep the same size when the RepeatedField is
340   // empty (common case), and add only an 8-byte header to the elements array
341   // when non-empty. We make sure to place the size fields directly in the
342   // RepeatedField class to avoid costly cache misses due to the indirection.
343   int current_size_;
344   int total_size_;
345   struct Rep {
346     Arena* arena;
347     // Here we declare a huge array as a way of approximating C's "flexible
348     // array member" feature without relying on undefined behavior.
349     Element elements[(std::numeric_limits<int>::max() - 2 * sizeof(Arena*)) /
350                      sizeof(Element)];
351   };
352   static constexpr size_t kRepHeaderSize = offsetof(Rep, elements);
353 
354   // If total_size_ == 0 this points to an Arena otherwise it points to the
355   // elements member of a Rep struct. Using this invariant allows the storage of
356   // the arena pointer without an extra allocation in the constructor.
357   void* arena_or_elements_;
358 
359   // Return pointer to elements array.
360   // pre-condition: the array must have been allocated.
elements()361   Element* elements() const {
362     GOOGLE_DCHECK_GT(total_size_, 0);
363     // Because of above pre-condition this cast is safe.
364     return unsafe_elements();
365   }
366 
367   // Return pointer to elements array if it exists otherwise either null or
368   // a invalid pointer is returned. This only happens for empty repeated fields,
369   // where you can't dereference this pointer anyway (it's empty).
unsafe_elements()370   Element* unsafe_elements() const {
371     return static_cast<Element*>(arena_or_elements_);
372   }
373 
374   // Return pointer to the Rep struct.
375   // pre-condition: the Rep must have been allocated, ie elements() is safe.
rep()376   Rep* rep() const {
377     char* addr = reinterpret_cast<char*>(elements()) - offsetof(Rep, elements);
378     return reinterpret_cast<Rep*>(addr);
379   }
380 
381   friend class Arena;
382   typedef void InternalArenaConstructable_;
383 
384   // Move the contents of |from| into |to|, possibly clobbering |from| in the
385   // process.  For primitive types this is just a memcpy(), but it could be
386   // specialized for non-primitive types to, say, swap each element instead.
387   void MoveArray(Element* to, Element* from, int size);
388 
389   // Copy the elements of |from| into |to|.
390   void CopyArray(Element* to, const Element* from, int size);
391 
392   // Internal helper to delete all elements and deallocate the storage.
InternalDeallocate(Rep * rep,int size)393   void InternalDeallocate(Rep* rep, int size) {
394     if (rep != NULL) {
395       Element* e = &rep->elements[0];
396       if (!std::is_trivial<Element>::value) {
397         Element* limit = &rep->elements[size];
398         for (; e < limit; e++) {
399           e->~Element();
400         }
401       }
402       if (rep->arena == NULL) {
403 #if defined(__GXX_DELETE_WITH_SIZE__) || defined(__cpp_sized_deallocation)
404         const size_t bytes = size * sizeof(*e) + kRepHeaderSize;
405         ::operator delete(static_cast<void*>(rep), bytes);
406 #else
407         ::operator delete(static_cast<void*>(rep));
408 #endif
409       }
410     }
411   }
412 
413   // This class is a performance wrapper around RepeatedField::Add(const T&)
414   // function. In general unless a RepeatedField is a local stack variable LLVM
415   // has a hard time optimizing Add. The machine code tends to be
416   // loop:
417   // mov %size, dword ptr [%repeated_field]       // load
418   // cmp %size, dword ptr [%repeated_field + 4]
419   // jae fallback
420   // mov %buffer, qword ptr [%repeated_field + 8]
421   // mov dword [%buffer + %size * 4], %value
422   // inc %size                                    // increment
423   // mov dword ptr [%repeated_field], %size       // store
424   // jmp loop
425   //
426   // This puts a load/store in each iteration of the important loop variable
427   // size. It's a pretty bad compile that happens even in simple cases, but
428   // largely the presence of the fallback path disturbs the compilers mem-to-reg
429   // analysis.
430   //
431   // This class takes ownership of a repeated field for the duration of it's
432   // lifetime. The repeated field should not be accessed during this time, ie.
433   // only access through this class is allowed. This class should always be a
434   // function local stack variable. Intended use
435   //
436   // void AddSequence(const int* begin, const int* end, RepeatedField<int>* out)
437   // {
438   //   RepeatedFieldAdder<int> adder(out);  // Take ownership of out
439   //   for (auto it = begin; it != end; ++it) {
440   //     adder.Add(*it);
441   //   }
442   // }
443   //
444   // Typically due to the fact adder is a local stack variable. The compiler
445   // will be successful in mem-to-reg transformation and the machine code will
446   // be loop: cmp %size, %capacity jae fallback mov dword ptr [%buffer + %size *
447   // 4], %val inc %size jmp loop
448   //
449   // The first version executes at 7 cycles per iteration while the second
450   // version near 1 or 2 cycles.
451   template <int = 0, bool = std::is_trivial<Element>::value>
452   class FastAdderImpl {
453    public:
FastAdderImpl(RepeatedField * rf)454     explicit FastAdderImpl(RepeatedField* rf) : repeated_field_(rf) {
455       index_ = repeated_field_->current_size_;
456       capacity_ = repeated_field_->total_size_;
457       buffer_ = repeated_field_->unsafe_elements();
458     }
~FastAdderImpl()459     ~FastAdderImpl() { repeated_field_->current_size_ = index_; }
460 
Add(Element val)461     void Add(Element val) {
462       if (index_ == capacity_) {
463         repeated_field_->current_size_ = index_;
464         repeated_field_->Reserve(index_ + 1);
465         capacity_ = repeated_field_->total_size_;
466         buffer_ = repeated_field_->unsafe_elements();
467       }
468       buffer_[index_++] = val;
469     }
470 
471    private:
472     RepeatedField* repeated_field_;
473     int index_;
474     int capacity_;
475     Element* buffer_;
476 
477     GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FastAdderImpl);
478   };
479 
480   // FastAdder is a wrapper for adding fields. The specialization above handles
481   // POD types more efficiently than RepeatedField.
482   template <int I>
483   class FastAdderImpl<I, false> {
484    public:
FastAdderImpl(RepeatedField * rf)485     explicit FastAdderImpl(RepeatedField* rf) : repeated_field_(rf) {}
Add(const Element & val)486     void Add(const Element& val) { repeated_field_->Add(val); }
487 
488    private:
489     RepeatedField* repeated_field_;
490     GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FastAdderImpl);
491   };
492 
493   using FastAdder = FastAdderImpl<>;
494 
495   friend class TestRepeatedFieldHelper;
496   friend class ::google::protobuf::internal::ParseContext;
497 };
498 
499 namespace internal {
500 template <typename It>
501 class RepeatedPtrIterator;
502 template <typename It, typename VoidPtr>
503 class RepeatedPtrOverPtrsIterator;
504 }  // namespace internal
505 
506 namespace internal {
507 
508 // This is a helper template to copy an array of elements efficiently when they
509 // have a trivial copy constructor, and correctly otherwise. This really
510 // shouldn't be necessary, but our compiler doesn't optimize std::copy very
511 // effectively.
512 template <typename Element,
513           bool HasTrivialCopy = std::is_trivial<Element>::value>
514 struct ElementCopier {
515   void operator()(Element* to, const Element* from, int array_size);
516 };
517 
518 }  // namespace internal
519 
520 namespace internal {
521 
522 // type-traits helper for RepeatedPtrFieldBase: we only want to invoke
523 // arena-related "copy if on different arena" behavior if the necessary methods
524 // exist on the contained type. In particular, we rely on MergeFrom() existing
525 // as a general proxy for the fact that a copy will work, and we also provide a
526 // specific override for std::string*.
527 template <typename T>
528 struct TypeImplementsMergeBehaviorProbeForMergeFrom {
529   typedef char HasMerge;
530   typedef long HasNoMerge;
531 
532   // We accept either of:
533   // - void MergeFrom(const T& other)
534   // - bool MergeFrom(const T& other)
535   //
536   // We mangle these names a bit to avoid compatibility issues in 'unclean'
537   // include environments that may have, e.g., "#define test ..." (yes, this
538   // exists).
539   template <typename U, typename RetType, RetType (U::*)(const U& arg)>
540   struct CheckType;
541   template <typename U>
542   static HasMerge Check(CheckType<U, void, &U::MergeFrom>*);
543   template <typename U>
544   static HasMerge Check(CheckType<U, bool, &U::MergeFrom>*);
545   template <typename U>
546   static HasNoMerge Check(...);
547 
548   // Resolves to either std::true_type or std::false_type.
549   typedef std::integral_constant<bool,
550                                  (sizeof(Check<T>(0)) == sizeof(HasMerge))>
551       type;
552 };
553 
554 template <typename T, typename = void>
555 struct TypeImplementsMergeBehavior
556     : TypeImplementsMergeBehaviorProbeForMergeFrom<T> {};
557 
558 
559 template <>
560 struct TypeImplementsMergeBehavior<std::string> {
561   typedef std::true_type type;
562 };
563 
564 template <typename T>
565 struct IsMovable
566     : std::integral_constant<bool, std::is_move_constructible<T>::value &&
567                                        std::is_move_assignable<T>::value> {};
568 
569 // This is the common base class for RepeatedPtrFields.  It deals only in void*
570 // pointers.  Users should not use this interface directly.
571 //
572 // The methods of this interface correspond to the methods of RepeatedPtrField,
573 // but may have a template argument called TypeHandler.  Its signature is:
574 //   class TypeHandler {
575 //    public:
576 //     typedef MyType Type;
577 //     static Type* New();
578 //     static Type* NewFromPrototype(const Type* prototype,
579 //                                       Arena* arena);
580 //     static void Delete(Type*);
581 //     static void Clear(Type*);
582 //     static void Merge(const Type& from, Type* to);
583 //
584 //     // Only needs to be implemented if SpaceUsedExcludingSelf() is called.
585 //     static int SpaceUsedLong(const Type&);
586 //   };
587 class PROTOBUF_EXPORT RepeatedPtrFieldBase {
588  protected:
589   constexpr RepeatedPtrFieldBase();
590   explicit RepeatedPtrFieldBase(Arena* arena);
591   ~RepeatedPtrFieldBase() {
592 #ifndef NDEBUG
593     // Try to trigger segfault / asan failure in non-opt builds. If arena_
594     // lifetime has ended before the destructor.
595     if (arena_) (void)arena_->SpaceAllocated();
596 #endif
597   }
598 
599   // Must be called from destructor.
600   template <typename TypeHandler>
601   void Destroy();
602 
603   bool empty() const;
604   int size() const;
605 
606   template <typename TypeHandler>
607   const typename TypeHandler::Type& at(int index) const;
608   template <typename TypeHandler>
609   typename TypeHandler::Type& at(int index);
610 
611   template <typename TypeHandler>
612   typename TypeHandler::Type* Mutable(int index);
613   template <typename TypeHandler>
614   void Delete(int index);
615   template <typename TypeHandler>
616   typename TypeHandler::Type* Add(typename TypeHandler::Type* prototype = NULL);
617 
618  public:
619   // The next few methods are public so that they can be called from generated
620   // code when implicit weak fields are used, but they should never be called by
621   // application code.
622 
623   template <typename TypeHandler>
624   const typename TypeHandler::Type& Get(int index) const;
625 
626   // Creates and adds an element using the given prototype, without introducing
627   // a link-time dependency on the concrete message type. This method is used to
628   // implement implicit weak fields. The prototype may be NULL, in which case an
629   // ImplicitWeakMessage will be used as a placeholder.
630   MessageLite* AddWeak(const MessageLite* prototype);
631 
632   template <typename TypeHandler>
633   void Clear();
634 
635   template <typename TypeHandler>
636   void MergeFrom(const RepeatedPtrFieldBase& other);
637 
638   inline void InternalSwap(RepeatedPtrFieldBase* other);
639 
640  protected:
641   template <
642       typename TypeHandler,
643       typename std::enable_if<TypeHandler::Movable::value>::type* = nullptr>
644   void Add(typename TypeHandler::Type&& value);
645 
646   template <typename TypeHandler>
647   void RemoveLast();
648   template <typename TypeHandler>
649   void CopyFrom(const RepeatedPtrFieldBase& other);
650 
651   void CloseGap(int start, int num);
652 
653   void Reserve(int new_size);
654 
655   int Capacity() const;
656 
657   template <typename TypeHandler>
658   static inline typename TypeHandler::Type* copy(
659       typename TypeHandler::Type* value) {
660     auto* new_value = TypeHandler::NewFromPrototype(value, nullptr);
661     TypeHandler::Merge(*value, new_value);
662     return new_value;
663   }
664 
665   // Used for constructing iterators.
666   void* const* raw_data() const;
667   void** raw_mutable_data() const;
668 
669   template <typename TypeHandler>
670   typename TypeHandler::Type** mutable_data();
671   template <typename TypeHandler>
672   const typename TypeHandler::Type* const* data() const;
673 
674   template <typename TypeHandler>
675   PROTOBUF_NDEBUG_INLINE void Swap(RepeatedPtrFieldBase* other);
676 
677   void SwapElements(int index1, int index2);
678 
679   template <typename TypeHandler>
680   size_t SpaceUsedExcludingSelfLong() const;
681 
682   // Advanced memory management --------------------------------------
683 
684   // Like Add(), but if there are no cleared objects to use, returns NULL.
685   template <typename TypeHandler>
686   typename TypeHandler::Type* AddFromCleared();
687 
688   template <typename TypeHandler>
689   void AddAllocated(typename TypeHandler::Type* value) {
690     typename TypeImplementsMergeBehavior<typename TypeHandler::Type>::type t;
691     AddAllocatedInternal<TypeHandler>(value, t);
692   }
693 
694   template <typename TypeHandler>
695   void UnsafeArenaAddAllocated(typename TypeHandler::Type* value);
696 
697   template <typename TypeHandler>
698   PROTOBUF_MUST_USE_RESULT typename TypeHandler::Type* ReleaseLast() {
699     typename TypeImplementsMergeBehavior<typename TypeHandler::Type>::type t;
700     return ReleaseLastInternal<TypeHandler>(t);
701   }
702 
703   // Releases last element and returns it, but does not do out-of-arena copy.
704   // And just returns the raw pointer to the contained element in the arena.
705   template <typename TypeHandler>
706   typename TypeHandler::Type* UnsafeArenaReleaseLast();
707 
708   int ClearedCount() const;
709   template <typename TypeHandler>
710   void AddCleared(typename TypeHandler::Type* value);
711   template <typename TypeHandler>
712   PROTOBUF_MUST_USE_RESULT typename TypeHandler::Type* ReleaseCleared();
713 
714   template <typename TypeHandler>
715   void AddAllocatedInternal(typename TypeHandler::Type* value, std::true_type);
716   template <typename TypeHandler>
717   void AddAllocatedInternal(typename TypeHandler::Type* value, std::false_type);
718 
719   template <typename TypeHandler>
720   PROTOBUF_NOINLINE void AddAllocatedSlowWithCopy(
721       typename TypeHandler::Type* value, Arena* value_arena, Arena* my_arena);
722   template <typename TypeHandler>
723   PROTOBUF_NOINLINE void AddAllocatedSlowWithoutCopy(
724       typename TypeHandler::Type* value);
725 
726   template <typename TypeHandler>
727   typename TypeHandler::Type* ReleaseLastInternal(std::true_type);
728   template <typename TypeHandler>
729   typename TypeHandler::Type* ReleaseLastInternal(std::false_type);
730 
731   template <typename TypeHandler>
732   PROTOBUF_NOINLINE void SwapFallback(RepeatedPtrFieldBase* other);
733 
734   inline Arena* GetArena() const { return arena_; }
735 
736  private:
737   static constexpr int kInitialSize = 0;
738   // A few notes on internal representation:
739   //
740   // We use an indirected approach, with struct Rep, to keep
741   // sizeof(RepeatedPtrFieldBase) equivalent to what it was before arena support
742   // was added, namely, 3 8-byte machine words on x86-64. An instance of Rep is
743   // allocated only when the repeated field is non-empty, and it is a
744   // dynamically-sized struct (the header is directly followed by elements[]).
745   // We place arena_ and current_size_ directly in the object to avoid cache
746   // misses due to the indirection, because these fields are checked frequently.
747   // Placing all fields directly in the RepeatedPtrFieldBase instance costs
748   // significant performance for memory-sensitive workloads.
749   Arena* arena_;
750   int current_size_;
751   int total_size_;
752   struct Rep {
753     int allocated_size;
754     // Here we declare a huge array as a way of approximating C's "flexible
755     // array member" feature without relying on undefined behavior.
756     void* elements[(std::numeric_limits<int>::max() - 2 * sizeof(int)) /
757                    sizeof(void*)];
758   };
759   static constexpr size_t kRepHeaderSize = offsetof(Rep, elements);
760   Rep* rep_;
761 
762   template <typename TypeHandler>
763   static inline typename TypeHandler::Type* cast(void* element) {
764     return reinterpret_cast<typename TypeHandler::Type*>(element);
765   }
766   template <typename TypeHandler>
767   static inline const typename TypeHandler::Type* cast(const void* element) {
768     return reinterpret_cast<const typename TypeHandler::Type*>(element);
769   }
770 
771   // Non-templated inner function to avoid code duplication. Takes a function
772   // pointer to the type-specific (templated) inner allocate/merge loop.
773   void MergeFromInternal(const RepeatedPtrFieldBase& other,
774                          void (RepeatedPtrFieldBase::*inner_loop)(void**,
775                                                                   void**, int,
776                                                                   int));
777 
778   template <typename TypeHandler>
779   PROTOBUF_NOINLINE void MergeFromInnerLoop(void** our_elems,
780                                             void** other_elems, int length,
781                                             int already_allocated);
782 
783   // Internal helper: extend array space if necessary to contain |extend_amount|
784   // more elements, and return a pointer to the element immediately following
785   // the old list of elements.  This interface factors out common behavior from
786   // Reserve() and MergeFrom() to reduce code size. |extend_amount| must be > 0.
787   void** InternalExtend(int extend_amount);
788 
789   // Internal helper for Add: add "obj" as the next element in the
790   // array, including potentially resizing the array with Reserve if
791   // needed
792   void* AddOutOfLineHelper(void* obj);
793 
794   // The reflection implementation needs to call protected methods directly,
795   // reinterpreting pointers as being to Message instead of a specific Message
796   // subclass.
797   friend class ::PROTOBUF_NAMESPACE_ID::Reflection;
798   friend class ::PROTOBUF_NAMESPACE_ID::internal::SwapFieldHelper;
799 
800   // ExtensionSet stores repeated message extensions as
801   // RepeatedPtrField<MessageLite>, but non-lite ExtensionSets need to implement
802   // SpaceUsedLong(), and thus need to call SpaceUsedExcludingSelfLong()
803   // reinterpreting MessageLite as Message.  ExtensionSet also needs to make use
804   // of AddFromCleared(), which is not part of the public interface.
805   friend class ExtensionSet;
806 
807   // The MapFieldBase implementation needs to call protected methods directly,
808   // reinterpreting pointers as being to Message instead of a specific Message
809   // subclass.
810   friend class MapFieldBase;
811   friend class MapFieldBaseStub;
812 
813   // The table-driven MergePartialFromCodedStream implementation needs to
814   // operate on RepeatedPtrField<MessageLite>.
815   friend class MergePartialFromCodedStreamHelper;
816   friend class AccessorHelper;
817   template <typename T>
818   friend struct google::protobuf::WeakRepeatedPtrField;
819 
820   GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(RepeatedPtrFieldBase);
821 };
822 
823 template <typename GenericType>
824 class GenericTypeHandler {
825  public:
826   typedef GenericType Type;
827   using Movable = IsMovable<GenericType>;
828 
829   static inline GenericType* New(Arena* arena) {
830     return Arena::CreateMaybeMessage<Type>(arena);
831   }
832   static inline GenericType* New(Arena* arena, GenericType&& value) {
833     return Arena::Create<GenericType>(arena, std::move(value));
834   }
835   static inline GenericType* NewFromPrototype(const GenericType* prototype,
836                                               Arena* arena = NULL);
837   static inline void Delete(GenericType* value, Arena* arena) {
838     if (arena == NULL) {
839       delete value;
840     }
841   }
842   static inline Arena* GetOwningArena(GenericType* value) {
843     return Arena::GetOwningArena<Type>(value);
844   }
845 
846   static inline void Clear(GenericType* value) { value->Clear(); }
847   PROTOBUF_NOINLINE
848   static void Merge(const GenericType& from, GenericType* to);
849   static inline size_t SpaceUsedLong(const GenericType& value) {
850     return value.SpaceUsedLong();
851   }
852 };
853 
854 template <typename GenericType>
855 GenericType* GenericTypeHandler<GenericType>::NewFromPrototype(
856     const GenericType* /* prototype */, Arena* arena) {
857   return New(arena);
858 }
859 template <typename GenericType>
860 void GenericTypeHandler<GenericType>::Merge(const GenericType& from,
861                                             GenericType* to) {
862   to->MergeFrom(from);
863 }
864 
865 // NewFromPrototype() and Merge() are not defined inline here, as we will need
866 // to do a virtual function dispatch anyways to go from Message* to call
867 // New/Merge.
868 template <>
869 MessageLite* GenericTypeHandler<MessageLite>::NewFromPrototype(
870     const MessageLite* prototype, Arena* arena);
871 template <>
872 inline Arena* GenericTypeHandler<MessageLite>::GetOwningArena(
873     MessageLite* value) {
874   return value->GetOwningArena();
875 }
876 template <>
877 void GenericTypeHandler<MessageLite>::Merge(const MessageLite& from,
878                                             MessageLite* to);
879 template <>
880 inline void GenericTypeHandler<std::string>::Clear(std::string* value) {
881   value->clear();
882 }
883 template <>
884 void GenericTypeHandler<std::string>::Merge(const std::string& from,
885                                             std::string* to);
886 
887 // Message specialization bodies defined in message.cc. This split is necessary
888 // to allow proto2-lite (which includes this header) to be independent of
889 // Message.
890 template <>
891 PROTOBUF_EXPORT Message* GenericTypeHandler<Message>::NewFromPrototype(
892     const Message* prototype, Arena* arena);
893 template <>
894 PROTOBUF_EXPORT Arena* GenericTypeHandler<Message>::GetOwningArena(
895     Message* value);
896 
897 class StringTypeHandler {
898  public:
899   typedef std::string Type;
900   using Movable = IsMovable<Type>;
901 
902   static inline std::string* New(Arena* arena) {
903     return Arena::Create<std::string>(arena);
904   }
905   static inline std::string* New(Arena* arena, std::string&& value) {
906     return Arena::Create<std::string>(arena, std::move(value));
907   }
908   static inline std::string* NewFromPrototype(const std::string*,
909                                               Arena* arena) {
910     return New(arena);
911   }
912   static inline Arena* GetOwningArena(std::string*) { return nullptr; }
913   static inline void Delete(std::string* value, Arena* arena) {
914     if (arena == NULL) {
915       delete value;
916     }
917   }
918   static inline void Clear(std::string* value) { value->clear(); }
919   static inline void Merge(const std::string& from, std::string* to) {
920     *to = from;
921   }
922   static size_t SpaceUsedLong(const std::string& value) {
923     return sizeof(value) + StringSpaceUsedExcludingSelfLong(value);
924   }
925 };
926 
927 }  // namespace internal
928 
929 // RepeatedPtrField is like RepeatedField, but used for repeated strings or
930 // Messages.
931 template <typename Element>
932 class RepeatedPtrField final : private internal::RepeatedPtrFieldBase {
933  public:
934   constexpr RepeatedPtrField();
935   explicit RepeatedPtrField(Arena* arena);
936 
937   RepeatedPtrField(const RepeatedPtrField& other);
938 
939   template <typename Iter,
940             typename = typename std::enable_if<std::is_constructible<
941                 Element, decltype(*std::declval<Iter>())>::value>::type>
942   RepeatedPtrField(Iter begin, Iter end);
943 
944   ~RepeatedPtrField();
945 
946   RepeatedPtrField& operator=(const RepeatedPtrField& other);
947 
948   RepeatedPtrField(RepeatedPtrField&& other) noexcept;
949   RepeatedPtrField& operator=(RepeatedPtrField&& other) noexcept;
950 
951   bool empty() const;
952   int size() const;
953 
954   const Element& Get(int index) const;
955   Element* Mutable(int index);
956   Element* Add();
957   void Add(Element&& value);
958   // Append elements in the range [begin, end) after reserving
959   // the appropriate number of elements.
960   template <typename Iter>
961   void Add(Iter begin, Iter end);
962 
963   const Element& operator[](int index) const { return Get(index); }
964   Element& operator[](int index) { return *Mutable(index); }
965 
966   const Element& at(int index) const;
967   Element& at(int index);
968 
969   // Remove the last element in the array.
970   // Ownership of the element is retained by the array.
971   void RemoveLast();
972 
973   // Delete elements with indices in the range [start .. start+num-1].
974   // Caution: implementation moves all elements with indices [start+num .. ].
975   // Calling this routine inside a loop can cause quadratic behavior.
976   void DeleteSubrange(int start, int num);
977 
978   void Clear();
979   void MergeFrom(const RepeatedPtrField& other);
980   void CopyFrom(const RepeatedPtrField& other);
981 
982   // Replaces the contents with RepeatedPtrField(begin, end).
983   template <typename Iter>
984   void Assign(Iter begin, Iter end);
985 
986   // Reserve space to expand the field to at least the given size.  This only
987   // resizes the pointer array; it doesn't allocate any objects.  If the
988   // array is grown, it will always be at least doubled in size.
989   void Reserve(int new_size);
990 
991   int Capacity() const;
992 
993   // Gets the underlying array.  This pointer is possibly invalidated by
994   // any add or remove operation.
995   Element** mutable_data();
996   const Element* const* data() const;
997 
998   // Swap entire contents with "other". If they are on separate arenas, then
999   // copies data.
1000   void Swap(RepeatedPtrField* other);
1001 
1002   // Swap entire contents with "other". Caller should guarantee that either both
1003   // fields are on the same arena or both are on the heap. Swapping between
1004   // different arenas with this function is disallowed and is caught via
1005   // GOOGLE_DCHECK.
1006   void UnsafeArenaSwap(RepeatedPtrField* other);
1007 
1008   // Swap two elements.
1009   void SwapElements(int index1, int index2);
1010 
1011   // STL-like iterator support
1012   typedef internal::RepeatedPtrIterator<Element> iterator;
1013   typedef internal::RepeatedPtrIterator<const Element> const_iterator;
1014   typedef Element value_type;
1015   typedef value_type& reference;
1016   typedef const value_type& const_reference;
1017   typedef value_type* pointer;
1018   typedef const value_type* const_pointer;
1019   typedef int size_type;
1020   typedef ptrdiff_t difference_type;
1021 
1022   iterator begin();
1023   const_iterator begin() const;
1024   const_iterator cbegin() const;
1025   iterator end();
1026   const_iterator end() const;
1027   const_iterator cend() const;
1028 
1029   // Reverse iterator support
1030   typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
1031   typedef std::reverse_iterator<iterator> reverse_iterator;
1032   reverse_iterator rbegin() { return reverse_iterator(end()); }
1033   const_reverse_iterator rbegin() const {
1034     return const_reverse_iterator(end());
1035   }
1036   reverse_iterator rend() { return reverse_iterator(begin()); }
1037   const_reverse_iterator rend() const {
1038     return const_reverse_iterator(begin());
1039   }
1040 
1041   // Custom STL-like iterator that iterates over and returns the underlying
1042   // pointers to Element rather than Element itself.
1043   typedef internal::RepeatedPtrOverPtrsIterator<Element*, void*>
1044       pointer_iterator;
1045   typedef internal::RepeatedPtrOverPtrsIterator<const Element* const,
1046                                                 const void* const>
1047       const_pointer_iterator;
1048   pointer_iterator pointer_begin();
1049   const_pointer_iterator pointer_begin() const;
1050   pointer_iterator pointer_end();
1051   const_pointer_iterator pointer_end() const;
1052 
1053   // Returns (an estimate of) the number of bytes used by the repeated field,
1054   // excluding sizeof(*this).
1055   size_t SpaceUsedExcludingSelfLong() const;
1056 
1057   int SpaceUsedExcludingSelf() const {
1058     return internal::ToIntSize(SpaceUsedExcludingSelfLong());
1059   }
1060 
1061   // Advanced memory management --------------------------------------
1062   // When hardcore memory management becomes necessary -- as it sometimes
1063   // does here at Google -- the following methods may be useful.
1064 
1065   // Add an already-allocated object, passing ownership to the
1066   // RepeatedPtrField.
1067   //
1068   // Note that some special behavior occurs with respect to arenas:
1069   //
1070   //   (i) if this field holds submessages, the new submessage will be copied if
1071   //   the original is in an arena and this RepeatedPtrField is either in a
1072   //   different arena, or on the heap.
1073   //   (ii) if this field holds strings, the passed-in string *must* be
1074   //   heap-allocated, not arena-allocated. There is no way to dynamically check
1075   //   this at runtime, so User Beware.
1076   void AddAllocated(Element* value);
1077 
1078   // Remove the last element and return it, passing ownership to the caller.
1079   // Requires:  size() > 0
1080   //
1081   // If this RepeatedPtrField is on an arena, an object copy is required to pass
1082   // ownership back to the user (for compatible semantics). Use
1083   // UnsafeArenaReleaseLast() if this behavior is undesired.
1084   PROTOBUF_MUST_USE_RESULT Element* ReleaseLast();
1085 
1086   // Add an already-allocated object, skipping arena-ownership checks. The user
1087   // must guarantee that the given object is in the same arena as this
1088   // RepeatedPtrField.
1089   // It is also useful in legacy code that uses temporary ownership to avoid
1090   // copies. Example:
1091   //   RepeatedPtrField<T> temp_field;
1092   //   temp_field.AddAllocated(new T);
1093   //   ... // Do something with temp_field
1094   //   temp_field.ExtractSubrange(0, temp_field.size(), nullptr);
1095   // If you put temp_field on the arena this fails, because the ownership
1096   // transfers to the arena at the "AddAllocated" call and is not released
1097   // anymore causing a double delete. UnsafeArenaAddAllocated prevents this.
1098   void UnsafeArenaAddAllocated(Element* value);
1099 
1100   // Remove the last element and return it.  Works only when operating on an
1101   // arena. The returned pointer is to the original object in the arena, hence
1102   // has the arena's lifetime.
1103   // Requires:  current_size_ > 0
1104   Element* UnsafeArenaReleaseLast();
1105 
1106   // Extract elements with indices in the range "[start .. start+num-1]".
1107   // The caller assumes ownership of the extracted elements and is responsible
1108   // for deleting them when they are no longer needed.
1109   // If "elements" is non-NULL, then pointers to the extracted elements
1110   // are stored in "elements[0 .. num-1]" for the convenience of the caller.
1111   // If "elements" is NULL, then the caller must use some other mechanism
1112   // to perform any further operations (like deletion) on these elements.
1113   // Caution: implementation also moves elements with indices [start+num ..].
1114   // Calling this routine inside a loop can cause quadratic behavior.
1115   //
1116   // Memory copying behavior is identical to ReleaseLast(), described above: if
1117   // this RepeatedPtrField is on an arena, an object copy is performed for each
1118   // returned element, so that all returned element pointers are to
1119   // heap-allocated copies. If this copy is not desired, the user should call
1120   // UnsafeArenaExtractSubrange().
1121   void ExtractSubrange(int start, int num, Element** elements);
1122 
1123   // Identical to ExtractSubrange() described above, except that when this
1124   // repeated field is on an arena, no object copies are performed. Instead, the
1125   // raw object pointers are returned. Thus, if on an arena, the returned
1126   // objects must not be freed, because they will not be heap-allocated objects.
1127   void UnsafeArenaExtractSubrange(int start, int num, Element** elements);
1128 
1129   // When elements are removed by calls to RemoveLast() or Clear(), they
1130   // are not actually freed.  Instead, they are cleared and kept so that
1131   // they can be reused later.  This can save lots of CPU time when
1132   // repeatedly reusing a protocol message for similar purposes.
1133   //
1134   // Hardcore programs may choose to manipulate these cleared objects
1135   // to better optimize memory management using the following routines.
1136 
1137   // Get the number of cleared objects that are currently being kept
1138   // around for reuse.
1139   int ClearedCount() const;
1140   // Add an element to the pool of cleared objects, passing ownership to
1141   // the RepeatedPtrField.  The element must be cleared prior to calling
1142   // this method.
1143   //
1144   // This method cannot be called when the repeated field is on an arena or when
1145   // |value| is; both cases will trigger a GOOGLE_DCHECK-failure.
1146   void AddCleared(Element* value);
1147   // Remove a single element from the cleared pool and return it, passing
1148   // ownership to the caller.  The element is guaranteed to be cleared.
1149   // Requires:  ClearedCount() > 0
1150   //
1151   //
1152   // This method cannot be called when the repeated field is on an arena; doing
1153   // so will trigger a GOOGLE_DCHECK-failure.
1154   PROTOBUF_MUST_USE_RESULT Element* ReleaseCleared();
1155 
1156   // Removes the element referenced by position.
1157   //
1158   // Returns an iterator to the element immediately following the removed
1159   // element.
1160   //
1161   // Invalidates all iterators at or after the removed element, including end().
1162   iterator erase(const_iterator position);
1163 
1164   // Removes the elements in the range [first, last).
1165   //
1166   // Returns an iterator to the element immediately following the removed range.
1167   //
1168   // Invalidates all iterators at or after the removed range, including end().
1169   iterator erase(const_iterator first, const_iterator last);
1170 
1171   // Gets the arena on which this RepeatedPtrField stores its elements.
1172   inline Arena* GetArena() const;
1173 
1174   // For internal use only.
1175   //
1176   // This is public due to it being called by generated code.
1177   void InternalSwap(RepeatedPtrField* other) {
1178     internal::RepeatedPtrFieldBase::InternalSwap(other);
1179   }
1180 
1181  private:
1182   // Note:  RepeatedPtrField SHOULD NOT be subclassed by users.
1183   class TypeHandler;
1184 
1185   // Implementations for ExtractSubrange(). The copying behavior must be
1186   // included only if the type supports the necessary operations (e.g.,
1187   // MergeFrom()), so we must resolve this at compile time. ExtractSubrange()
1188   // uses SFINAE to choose one of the below implementations.
1189   void ExtractSubrangeInternal(int start, int num, Element** elements,
1190                                std::true_type);
1191   void ExtractSubrangeInternal(int start, int num, Element** elements,
1192                                std::false_type);
1193 
1194   friend class Arena;
1195 
1196   template <typename T>
1197   friend struct WeakRepeatedPtrField;
1198 
1199   typedef void InternalArenaConstructable_;
1200 
1201 };
1202 
1203 // implementation ====================================================
1204 
1205 template <typename Element>
1206 constexpr RepeatedField<Element>::RepeatedField()
1207     : current_size_(0), total_size_(0), arena_or_elements_(nullptr) {}
1208 
1209 template <typename Element>
1210 inline RepeatedField<Element>::RepeatedField(Arena* arena)
1211     : current_size_(0), total_size_(0), arena_or_elements_(arena) {}
1212 
1213 template <typename Element>
1214 inline RepeatedField<Element>::RepeatedField(const RepeatedField& other)
1215     : current_size_(0), total_size_(0), arena_or_elements_(nullptr) {
1216   if (other.current_size_ != 0) {
1217     Reserve(other.size());
1218     AddNAlreadyReserved(other.size());
1219     CopyArray(Mutable(0), &other.Get(0), other.size());
1220   }
1221 }
1222 
1223 template <typename Element>
1224 template <typename Iter, typename>
1225 RepeatedField<Element>::RepeatedField(Iter begin, Iter end)
1226     : current_size_(0), total_size_(0), arena_or_elements_(nullptr) {
1227   Add(begin, end);
1228 }
1229 
1230 template <typename Element>
1231 RepeatedField<Element>::~RepeatedField() {
1232 #ifndef NDEBUG
1233   // Try to trigger segfault / asan failure in non-opt builds. If arena_
1234   // lifetime has ended before the destructor.
1235   auto arena = GetArena();
1236   if (arena) (void)arena->SpaceAllocated();
1237 #endif
1238   if (total_size_ > 0) {
1239     InternalDeallocate(rep(), total_size_);
1240   }
1241 }
1242 
1243 template <typename Element>
1244 inline RepeatedField<Element>& RepeatedField<Element>::operator=(
1245     const RepeatedField& other) {
1246   if (this != &other) CopyFrom(other);
1247   return *this;
1248 }
1249 
1250 template <typename Element>
1251 inline RepeatedField<Element>::RepeatedField(RepeatedField&& other) noexcept
1252     : RepeatedField() {
1253   // We don't just call Swap(&other) here because it would perform 3 copies if
1254   // other is on an arena. This field can't be on an arena because arena
1255   // construction always uses the Arena* accepting constructor.
1256   if (other.GetArena()) {
1257     CopyFrom(other);
1258   } else {
1259     InternalSwap(&other);
1260   }
1261 }
1262 
1263 template <typename Element>
1264 inline RepeatedField<Element>& RepeatedField<Element>::operator=(
1265     RepeatedField&& other) noexcept {
1266   // We don't just call Swap(&other) here because it would perform 3 copies if
1267   // the two fields are on different arenas.
1268   if (this != &other) {
1269     if (this->GetArena() != other.GetArena()) {
1270       CopyFrom(other);
1271     } else {
1272       InternalSwap(&other);
1273     }
1274   }
1275   return *this;
1276 }
1277 
1278 template <typename Element>
1279 inline bool RepeatedField<Element>::empty() const {
1280   return current_size_ == 0;
1281 }
1282 
1283 template <typename Element>
1284 inline int RepeatedField<Element>::size() const {
1285   return current_size_;
1286 }
1287 
1288 template <typename Element>
1289 inline int RepeatedField<Element>::Capacity() const {
1290   return total_size_;
1291 }
1292 
1293 template <typename Element>
1294 inline void RepeatedField<Element>::AddAlreadyReserved(const Element& value) {
1295   GOOGLE_DCHECK_LT(current_size_, total_size_);
1296   elements()[current_size_++] = value;
1297 }
1298 
1299 template <typename Element>
1300 inline Element* RepeatedField<Element>::AddAlreadyReserved() {
1301   GOOGLE_DCHECK_LT(current_size_, total_size_);
1302   return &elements()[current_size_++];
1303 }
1304 
1305 template <typename Element>
1306 inline Element* RepeatedField<Element>::AddNAlreadyReserved(int n) {
1307   GOOGLE_DCHECK_GE(total_size_ - current_size_, n)
1308       << total_size_ << ", " << current_size_;
1309   // Warning: sometimes people call this when n == 0 and total_size_ == 0. In
1310   // this case the return pointer points to a zero size array (n == 0). Hence
1311   // we can just use unsafe_elements(), because the user cannot dereference the
1312   // pointer anyway.
1313   Element* ret = unsafe_elements() + current_size_;
1314   current_size_ += n;
1315   return ret;
1316 }
1317 
1318 template <typename Element>
1319 inline void RepeatedField<Element>::Resize(int new_size, const Element& value) {
1320   GOOGLE_DCHECK_GE(new_size, 0);
1321   if (new_size > current_size_) {
1322     Reserve(new_size);
1323     std::fill(&elements()[current_size_], &elements()[new_size], value);
1324   }
1325   current_size_ = new_size;
1326 }
1327 
1328 template <typename Element>
1329 inline const Element& RepeatedField<Element>::Get(int index) const {
1330   GOOGLE_DCHECK_GE(index, 0);
1331   GOOGLE_DCHECK_LT(index, current_size_);
1332   return elements()[index];
1333 }
1334 
1335 template <typename Element>
1336 inline const Element& RepeatedField<Element>::at(int index) const {
1337   GOOGLE_CHECK_GE(index, 0);
1338   GOOGLE_CHECK_LT(index, current_size_);
1339   return elements()[index];
1340 }
1341 
1342 template <typename Element>
1343 inline Element& RepeatedField<Element>::at(int index) {
1344   GOOGLE_CHECK_GE(index, 0);
1345   GOOGLE_CHECK_LT(index, current_size_);
1346   return elements()[index];
1347 }
1348 
1349 template <typename Element>
1350 inline Element* RepeatedField<Element>::Mutable(int index) {
1351   GOOGLE_DCHECK_GE(index, 0);
1352   GOOGLE_DCHECK_LT(index, current_size_);
1353   return &elements()[index];
1354 }
1355 
1356 template <typename Element>
1357 inline void RepeatedField<Element>::Set(int index, const Element& value) {
1358   GOOGLE_DCHECK_GE(index, 0);
1359   GOOGLE_DCHECK_LT(index, current_size_);
1360   elements()[index] = value;
1361 }
1362 
1363 template <typename Element>
1364 inline void RepeatedField<Element>::Add(const Element& value) {
1365   uint32 size = current_size_;
1366   if (static_cast<int>(size) == total_size_) {
1367     // value could reference an element of the array. Reserving new space will
1368     // invalidate the reference. So we must make a copy first.
1369     auto tmp = value;
1370     Reserve(total_size_ + 1);
1371     elements()[size] = std::move(tmp);
1372   } else {
1373     elements()[size] = value;
1374   }
1375   current_size_ = size + 1;
1376 }
1377 
1378 template <typename Element>
1379 inline Element* RepeatedField<Element>::Add() {
1380   uint32 size = current_size_;
1381   if (static_cast<int>(size) == total_size_) Reserve(total_size_ + 1);
1382   auto ptr = &elements()[size];
1383   current_size_ = size + 1;
1384   return ptr;
1385 }
1386 
1387 template <typename Element>
1388 template <typename Iter>
1389 inline void RepeatedField<Element>::Add(Iter begin, Iter end) {
1390   int reserve = internal::CalculateReserve(begin, end);
1391   if (reserve != -1) {
1392     if (reserve == 0) {
1393       return;
1394     }
1395 
1396     Reserve(reserve + size());
1397     // TODO(ckennelly):  The compiler loses track of the buffer freshly
1398     // allocated by Reserve() by the time we call elements, so it cannot
1399     // guarantee that elements does not alias [begin(), end()).
1400     //
1401     // If restrict is available, annotating the pointer obtained from elements()
1402     // causes this to lower to memcpy instead of memmove.
1403     std::copy(begin, end, elements() + size());
1404     current_size_ = reserve + size();
1405   } else {
1406     FastAdder fast_adder(this);
1407     for (; begin != end; ++begin) fast_adder.Add(*begin);
1408   }
1409 }
1410 
1411 template <typename Element>
1412 inline void RepeatedField<Element>::RemoveLast() {
1413   GOOGLE_DCHECK_GT(current_size_, 0);
1414   current_size_--;
1415 }
1416 
1417 template <typename Element>
1418 void RepeatedField<Element>::ExtractSubrange(int start, int num,
1419                                              Element* elements) {
1420   GOOGLE_DCHECK_GE(start, 0);
1421   GOOGLE_DCHECK_GE(num, 0);
1422   GOOGLE_DCHECK_LE(start + num, this->current_size_);
1423 
1424   // Save the values of the removed elements if requested.
1425   if (elements != NULL) {
1426     for (int i = 0; i < num; ++i) elements[i] = this->Get(i + start);
1427   }
1428 
1429   // Slide remaining elements down to fill the gap.
1430   if (num > 0) {
1431     for (int i = start + num; i < this->current_size_; ++i)
1432       this->Set(i - num, this->Get(i));
1433     this->Truncate(this->current_size_ - num);
1434   }
1435 }
1436 
1437 template <typename Element>
1438 inline void RepeatedField<Element>::Clear() {
1439   current_size_ = 0;
1440 }
1441 
1442 template <typename Element>
1443 inline void RepeatedField<Element>::MergeFrom(const RepeatedField& other) {
1444   GOOGLE_DCHECK_NE(&other, this);
1445   if (other.current_size_ != 0) {
1446     int existing_size = size();
1447     Reserve(existing_size + other.size());
1448     AddNAlreadyReserved(other.size());
1449     CopyArray(Mutable(existing_size), &other.Get(0), other.size());
1450   }
1451 }
1452 
1453 template <typename Element>
1454 inline void RepeatedField<Element>::CopyFrom(const RepeatedField& other) {
1455   if (&other == this) return;
1456   Clear();
1457   MergeFrom(other);
1458 }
1459 
1460 template <typename Element>
1461 template <typename Iter>
1462 inline void RepeatedField<Element>::Assign(Iter begin, Iter end) {
1463   Clear();
1464   Add(begin, end);
1465 }
1466 
1467 template <typename Element>
1468 inline typename RepeatedField<Element>::iterator RepeatedField<Element>::erase(
1469     const_iterator position) {
1470   return erase(position, position + 1);
1471 }
1472 
1473 template <typename Element>
1474 inline typename RepeatedField<Element>::iterator RepeatedField<Element>::erase(
1475     const_iterator first, const_iterator last) {
1476   size_type first_offset = first - cbegin();
1477   if (first != last) {
1478     Truncate(std::copy(last, cend(), begin() + first_offset) - cbegin());
1479   }
1480   return begin() + first_offset;
1481 }
1482 
1483 template <typename Element>
1484 inline Element* RepeatedField<Element>::mutable_data() {
1485   return unsafe_elements();
1486 }
1487 
1488 template <typename Element>
1489 inline const Element* RepeatedField<Element>::data() const {
1490   return unsafe_elements();
1491 }
1492 
1493 template <typename Element>
1494 inline void RepeatedField<Element>::InternalSwap(RepeatedField* other) {
1495   GOOGLE_DCHECK(this != other);
1496 
1497   // Swap all fields at once.
1498   static_assert(std::is_standard_layout<RepeatedField<Element>>::value,
1499                 "offsetof() requires standard layout before c++17");
1500   internal::memswap<offsetof(RepeatedField, arena_or_elements_) +
1501                     sizeof(this->arena_or_elements_) -
1502                     offsetof(RepeatedField, current_size_)>(
1503       reinterpret_cast<char*>(this) + offsetof(RepeatedField, current_size_),
1504       reinterpret_cast<char*>(other) + offsetof(RepeatedField, current_size_));
1505 }
1506 
1507 template <typename Element>
1508 void RepeatedField<Element>::Swap(RepeatedField* other) {
1509   if (this == other) return;
1510 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1511   if (GetArena() != nullptr && GetArena() == other->GetArena()) {
1512 #else   // PROTOBUF_FORCE_COPY_IN_SWAP
1513   if (GetArena() == other->GetArena()) {
1514 #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
1515     InternalSwap(other);
1516   } else {
1517     RepeatedField<Element> temp(other->GetArena());
1518     temp.MergeFrom(*this);
1519     CopyFrom(*other);
1520     other->UnsafeArenaSwap(&temp);
1521   }
1522 }
1523 
1524 template <typename Element>
1525 void RepeatedField<Element>::UnsafeArenaSwap(RepeatedField* other) {
1526   if (this == other) return;
1527   InternalSwap(other);
1528 }
1529 
1530 template <typename Element>
1531 void RepeatedField<Element>::SwapElements(int index1, int index2) {
1532   using std::swap;  // enable ADL with fallback
1533   swap(elements()[index1], elements()[index2]);
1534 }
1535 
1536 template <typename Element>
1537 inline typename RepeatedField<Element>::iterator
1538 RepeatedField<Element>::begin() {
1539   return unsafe_elements();
1540 }
1541 template <typename Element>
1542 inline typename RepeatedField<Element>::const_iterator
1543 RepeatedField<Element>::begin() const {
1544   return unsafe_elements();
1545 }
1546 template <typename Element>
1547 inline typename RepeatedField<Element>::const_iterator
1548 RepeatedField<Element>::cbegin() const {
1549   return unsafe_elements();
1550 }
1551 template <typename Element>
1552 inline typename RepeatedField<Element>::iterator RepeatedField<Element>::end() {
1553   return unsafe_elements() + current_size_;
1554 }
1555 template <typename Element>
1556 inline typename RepeatedField<Element>::const_iterator
1557 RepeatedField<Element>::end() const {
1558   return unsafe_elements() + current_size_;
1559 }
1560 template <typename Element>
1561 inline typename RepeatedField<Element>::const_iterator
1562 RepeatedField<Element>::cend() const {
1563   return unsafe_elements() + current_size_;
1564 }
1565 
1566 template <typename Element>
1567 inline size_t RepeatedField<Element>::SpaceUsedExcludingSelfLong() const {
1568   return total_size_ > 0 ? (total_size_ * sizeof(Element) + kRepHeaderSize) : 0;
1569 }
1570 
1571 namespace internal {
1572 // Returns the new size for a reserved field based on its 'total_size' and the
1573 // requested 'new_size'. The result is clamped to the closed interval:
1574 //   [internal::kMinRepeatedFieldAllocationSize,
1575 //    std::numeric_limits<int>::max()]
1576 // Requires:
1577 //     new_size > total_size &&
1578 //     (total_size == 0 ||
1579 //      total_size >= kRepeatedFieldLowerClampLimit)
1580 inline int CalculateReserveSize(int total_size, int new_size) {
1581   if (new_size < kRepeatedFieldLowerClampLimit) {
1582     // Clamp to smallest allowed size.
1583     return kRepeatedFieldLowerClampLimit;
1584   }
1585   if (total_size < kRepeatedFieldUpperClampLimit) {
1586     return std::max(total_size * 2, new_size);
1587   } else {
1588     // Clamp to largest allowed size.
1589     GOOGLE_DCHECK_GT(new_size, kRepeatedFieldUpperClampLimit);
1590     return std::numeric_limits<int>::max();
1591   }
1592 }
1593 }  // namespace internal
1594 
1595 // Avoid inlining of Reserve(): new, copy, and delete[] lead to a significant
1596 // amount of code bloat.
1597 template <typename Element>
1598 void RepeatedField<Element>::Reserve(int new_size) {
1599   if (total_size_ >= new_size) return;
1600   Rep* old_rep = total_size_ > 0 ? rep() : NULL;
1601   Rep* new_rep;
1602   Arena* arena = GetArena();
1603   new_size = internal::CalculateReserveSize(total_size_, new_size);
1604   GOOGLE_DCHECK_LE(
1605       static_cast<size_t>(new_size),
1606       (std::numeric_limits<size_t>::max() - kRepHeaderSize) / sizeof(Element))
1607       << "Requested size is too large to fit into size_t.";
1608   size_t bytes =
1609       kRepHeaderSize + sizeof(Element) * static_cast<size_t>(new_size);
1610   if (arena == NULL) {
1611     new_rep = static_cast<Rep*>(::operator new(bytes));
1612   } else {
1613     new_rep = reinterpret_cast<Rep*>(Arena::CreateArray<char>(arena, bytes));
1614   }
1615   new_rep->arena = arena;
1616   int old_total_size = total_size_;
1617   // Already known: new_size >= internal::kMinRepeatedFieldAllocationSize
1618   // Maintain invariant:
1619   //     total_size_ == 0 ||
1620   //     total_size_ >= internal::kMinRepeatedFieldAllocationSize
1621   total_size_ = new_size;
1622   arena_or_elements_ = new_rep->elements;
1623   // Invoke placement-new on newly allocated elements. We shouldn't have to do
1624   // this, since Element is supposed to be POD, but a previous version of this
1625   // code allocated storage with "new Element[size]" and some code uses
1626   // RepeatedField with non-POD types, relying on constructor invocation. If
1627   // Element has a trivial constructor (e.g., int32), gcc (tested with -O2)
1628   // completely removes this loop because the loop body is empty, so this has no
1629   // effect unless its side-effects are required for correctness.
1630   // Note that we do this before MoveArray() below because Element's copy
1631   // assignment implementation will want an initialized instance first.
1632   Element* e = &elements()[0];
1633   Element* limit = e + total_size_;
1634   for (; e < limit; e++) {
1635     new (e) Element;
1636   }
1637   if (current_size_ > 0) {
1638     MoveArray(&elements()[0], old_rep->elements, current_size_);
1639   }
1640 
1641   // Likewise, we need to invoke destructors on the old array.
1642   InternalDeallocate(old_rep, old_total_size);
1643 
1644 }
1645 
1646 template <typename Element>
1647 inline void RepeatedField<Element>::Truncate(int new_size) {
1648   GOOGLE_DCHECK_LE(new_size, current_size_);
1649   if (current_size_ > 0) {
1650     current_size_ = new_size;
1651   }
1652 }
1653 
1654 template <typename Element>
1655 inline void RepeatedField<Element>::MoveArray(Element* to, Element* from,
1656                                               int array_size) {
1657   CopyArray(to, from, array_size);
1658 }
1659 
1660 template <typename Element>
1661 inline void RepeatedField<Element>::CopyArray(Element* to, const Element* from,
1662                                               int array_size) {
1663   internal::ElementCopier<Element>()(to, from, array_size);
1664 }
1665 
1666 namespace internal {
1667 
1668 template <typename Element, bool HasTrivialCopy>
1669 void ElementCopier<Element, HasTrivialCopy>::operator()(Element* to,
1670                                                         const Element* from,
1671                                                         int array_size) {
1672   std::copy(from, from + array_size, to);
1673 }
1674 
1675 template <typename Element>
1676 struct ElementCopier<Element, true> {
1677   void operator()(Element* to, const Element* from, int array_size) {
1678     memcpy(to, from, static_cast<size_t>(array_size) * sizeof(Element));
1679   }
1680 };
1681 
1682 }  // namespace internal
1683 
1684 
1685 // -------------------------------------------------------------------
1686 
1687 namespace internal {
1688 
1689 constexpr RepeatedPtrFieldBase::RepeatedPtrFieldBase()
1690     : arena_(NULL), current_size_(0), total_size_(0), rep_(NULL) {}
1691 
1692 inline RepeatedPtrFieldBase::RepeatedPtrFieldBase(Arena* arena)
1693     : arena_(arena), current_size_(0), total_size_(0), rep_(NULL) {}
1694 
1695 template <typename TypeHandler>
1696 void RepeatedPtrFieldBase::Destroy() {
1697   if (rep_ != NULL && arena_ == NULL) {
1698     int n = rep_->allocated_size;
1699     void* const* elements = rep_->elements;
1700     for (int i = 0; i < n; i++) {
1701       TypeHandler::Delete(cast<TypeHandler>(elements[i]), NULL);
1702     }
1703 #if defined(__GXX_DELETE_WITH_SIZE__) || defined(__cpp_sized_deallocation)
1704     const size_t size = total_size_ * sizeof(elements[0]) + kRepHeaderSize;
1705     ::operator delete(static_cast<void*>(rep_), size);
1706 #else
1707     ::operator delete(static_cast<void*>(rep_));
1708 #endif
1709   }
1710   rep_ = NULL;
1711 }
1712 
1713 template <typename TypeHandler>
1714 inline void RepeatedPtrFieldBase::Swap(RepeatedPtrFieldBase* other) {
1715 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1716   if (GetArena() != nullptr && GetArena() == other->GetArena()) {
1717 #else   // PROTOBUF_FORCE_COPY_IN_SWAP
1718   if (GetArena() == other->GetArena()) {
1719 #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
1720     InternalSwap(other);
1721   } else {
1722     SwapFallback<TypeHandler>(other);
1723   }
1724 }
1725 
1726 template <typename TypeHandler>
1727 void RepeatedPtrFieldBase::SwapFallback(RepeatedPtrFieldBase* other) {
1728 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1729   GOOGLE_DCHECK(GetArena() == nullptr || other->GetArena() != GetArena());
1730 #else   // PROTOBUF_FORCE_COPY_IN_SWAP
1731   GOOGLE_DCHECK(other->GetArena() != GetArena());
1732 #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
1733 
1734   // Copy semantics in this case. We try to improve efficiency by placing the
1735   // temporary on |other|'s arena so that messages are copied twice rather than
1736   // three times.
1737   RepeatedPtrFieldBase temp(other->GetArena());
1738   temp.MergeFrom<TypeHandler>(*this);
1739   this->Clear<TypeHandler>();
1740   this->MergeFrom<TypeHandler>(*other);
1741   other->InternalSwap(&temp);
1742   temp.Destroy<TypeHandler>();  // Frees rep_ if `other` had no arena.
1743 }
1744 
1745 inline bool RepeatedPtrFieldBase::empty() const { return current_size_ == 0; }
1746 
1747 inline int RepeatedPtrFieldBase::size() const { return current_size_; }
1748 
1749 template <typename TypeHandler>
1750 inline const typename TypeHandler::Type& RepeatedPtrFieldBase::Get(
1751     int index) const {
1752   GOOGLE_DCHECK_GE(index, 0);
1753   GOOGLE_DCHECK_LT(index, current_size_);
1754   return *cast<TypeHandler>(rep_->elements[index]);
1755 }
1756 
1757 template <typename TypeHandler>
1758 inline const typename TypeHandler::Type& RepeatedPtrFieldBase::at(
1759     int index) const {
1760   GOOGLE_CHECK_GE(index, 0);
1761   GOOGLE_CHECK_LT(index, current_size_);
1762   return *cast<TypeHandler>(rep_->elements[index]);
1763 }
1764 
1765 template <typename TypeHandler>
1766 inline typename TypeHandler::Type& RepeatedPtrFieldBase::at(int index) {
1767   GOOGLE_CHECK_GE(index, 0);
1768   GOOGLE_CHECK_LT(index, current_size_);
1769   return *cast<TypeHandler>(rep_->elements[index]);
1770 }
1771 
1772 template <typename TypeHandler>
1773 inline typename TypeHandler::Type* RepeatedPtrFieldBase::Mutable(int index) {
1774   GOOGLE_DCHECK_GE(index, 0);
1775   GOOGLE_DCHECK_LT(index, current_size_);
1776   return cast<TypeHandler>(rep_->elements[index]);
1777 }
1778 
1779 template <typename TypeHandler>
1780 inline void RepeatedPtrFieldBase::Delete(int index) {
1781   GOOGLE_DCHECK_GE(index, 0);
1782   GOOGLE_DCHECK_LT(index, current_size_);
1783   TypeHandler::Delete(cast<TypeHandler>(rep_->elements[index]), arena_);
1784 }
1785 
1786 template <typename TypeHandler>
1787 inline typename TypeHandler::Type* RepeatedPtrFieldBase::Add(
1788     typename TypeHandler::Type* prototype) {
1789   if (rep_ != NULL && current_size_ < rep_->allocated_size) {
1790     return cast<TypeHandler>(rep_->elements[current_size_++]);
1791   }
1792   typename TypeHandler::Type* result =
1793       TypeHandler::NewFromPrototype(prototype, arena_);
1794   return reinterpret_cast<typename TypeHandler::Type*>(
1795       AddOutOfLineHelper(result));
1796 }
1797 
1798 template <typename TypeHandler,
1799           typename std::enable_if<TypeHandler::Movable::value>::type*>
1800 inline void RepeatedPtrFieldBase::Add(typename TypeHandler::Type&& value) {
1801   if (rep_ != NULL && current_size_ < rep_->allocated_size) {
1802     *cast<TypeHandler>(rep_->elements[current_size_++]) = std::move(value);
1803     return;
1804   }
1805   if (!rep_ || rep_->allocated_size == total_size_) {
1806     Reserve(total_size_ + 1);
1807   }
1808   ++rep_->allocated_size;
1809   typename TypeHandler::Type* result =
1810       TypeHandler::New(arena_, std::move(value));
1811   rep_->elements[current_size_++] = result;
1812 }
1813 
1814 template <typename TypeHandler>
1815 inline void RepeatedPtrFieldBase::RemoveLast() {
1816   GOOGLE_DCHECK_GT(current_size_, 0);
1817   TypeHandler::Clear(cast<TypeHandler>(rep_->elements[--current_size_]));
1818 }
1819 
1820 template <typename TypeHandler>
1821 void RepeatedPtrFieldBase::Clear() {
1822   const int n = current_size_;
1823   GOOGLE_DCHECK_GE(n, 0);
1824   if (n > 0) {
1825     void* const* elements = rep_->elements;
1826     int i = 0;
1827     do {
1828       TypeHandler::Clear(cast<TypeHandler>(elements[i++]));
1829     } while (i < n);
1830     current_size_ = 0;
1831   }
1832 }
1833 
1834 // To avoid unnecessary code duplication and reduce binary size, we use a
1835 // layered approach to implementing MergeFrom(). The toplevel method is
1836 // templated, so we get a small thunk per concrete message type in the binary.
1837 // This calls a shared implementation with most of the logic, passing a function
1838 // pointer to another type-specific piece of code that calls the object-allocate
1839 // and merge handlers.
1840 template <typename TypeHandler>
1841 inline void RepeatedPtrFieldBase::MergeFrom(const RepeatedPtrFieldBase& other) {
1842   GOOGLE_DCHECK_NE(&other, this);
1843   if (other.current_size_ == 0) return;
1844   MergeFromInternal(other,
1845                     &RepeatedPtrFieldBase::MergeFromInnerLoop<TypeHandler>);
1846 }
1847 
1848 inline void RepeatedPtrFieldBase::MergeFromInternal(
1849     const RepeatedPtrFieldBase& other,
1850     void (RepeatedPtrFieldBase::*inner_loop)(void**, void**, int, int)) {
1851   // Note: wrapper has already guaranteed that other.rep_ != NULL here.
1852   int other_size = other.current_size_;
1853   void** other_elements = other.rep_->elements;
1854   void** new_elements = InternalExtend(other_size);
1855   int allocated_elems = rep_->allocated_size - current_size_;
1856   (this->*inner_loop)(new_elements, other_elements, other_size,
1857                       allocated_elems);
1858   current_size_ += other_size;
1859   if (rep_->allocated_size < current_size_) {
1860     rep_->allocated_size = current_size_;
1861   }
1862 }
1863 
1864 // Merges other_elems to our_elems.
1865 template <typename TypeHandler>
1866 void RepeatedPtrFieldBase::MergeFromInnerLoop(void** our_elems,
1867                                               void** other_elems, int length,
1868                                               int already_allocated) {
1869   if (already_allocated < length) {
1870     Arena* arena = GetArena();
1871     typename TypeHandler::Type* elem_prototype =
1872         reinterpret_cast<typename TypeHandler::Type*>(other_elems[0]);
1873     for (int i = already_allocated; i < length; i++) {
1874       // Allocate a new empty element that we'll merge into below
1875       typename TypeHandler::Type* new_elem =
1876           TypeHandler::NewFromPrototype(elem_prototype, arena);
1877       our_elems[i] = new_elem;
1878     }
1879   }
1880   // Main loop that does the actual merging
1881   for (int i = 0; i < length; i++) {
1882     // Already allocated: use existing element.
1883     typename TypeHandler::Type* other_elem =
1884         reinterpret_cast<typename TypeHandler::Type*>(other_elems[i]);
1885     typename TypeHandler::Type* new_elem =
1886         reinterpret_cast<typename TypeHandler::Type*>(our_elems[i]);
1887     TypeHandler::Merge(*other_elem, new_elem);
1888   }
1889 }
1890 
1891 template <typename TypeHandler>
1892 inline void RepeatedPtrFieldBase::CopyFrom(const RepeatedPtrFieldBase& other) {
1893   if (&other == this) return;
1894   RepeatedPtrFieldBase::Clear<TypeHandler>();
1895   RepeatedPtrFieldBase::MergeFrom<TypeHandler>(other);
1896 }
1897 
1898 inline int RepeatedPtrFieldBase::Capacity() const { return total_size_; }
1899 
1900 inline void* const* RepeatedPtrFieldBase::raw_data() const {
1901   return rep_ ? rep_->elements : NULL;
1902 }
1903 
1904 inline void** RepeatedPtrFieldBase::raw_mutable_data() const {
1905   return rep_ ? const_cast<void**>(rep_->elements) : NULL;
1906 }
1907 
1908 template <typename TypeHandler>
1909 inline typename TypeHandler::Type** RepeatedPtrFieldBase::mutable_data() {
1910   // TODO(kenton):  Breaks C++ aliasing rules.  We should probably remove this
1911   //   method entirely.
1912   return reinterpret_cast<typename TypeHandler::Type**>(raw_mutable_data());
1913 }
1914 
1915 template <typename TypeHandler>
1916 inline const typename TypeHandler::Type* const* RepeatedPtrFieldBase::data()
1917     const {
1918   // TODO(kenton):  Breaks C++ aliasing rules.  We should probably remove this
1919   //   method entirely.
1920   return reinterpret_cast<const typename TypeHandler::Type* const*>(raw_data());
1921 }
1922 
1923 inline void RepeatedPtrFieldBase::SwapElements(int index1, int index2) {
1924   using std::swap;  // enable ADL with fallback
1925   swap(rep_->elements[index1], rep_->elements[index2]);
1926 }
1927 
1928 template <typename TypeHandler>
1929 inline size_t RepeatedPtrFieldBase::SpaceUsedExcludingSelfLong() const {
1930   size_t allocated_bytes = static_cast<size_t>(total_size_) * sizeof(void*);
1931   if (rep_ != NULL) {
1932     for (int i = 0; i < rep_->allocated_size; ++i) {
1933       allocated_bytes +=
1934           TypeHandler::SpaceUsedLong(*cast<TypeHandler>(rep_->elements[i]));
1935     }
1936     allocated_bytes += kRepHeaderSize;
1937   }
1938   return allocated_bytes;
1939 }
1940 
1941 template <typename TypeHandler>
1942 inline typename TypeHandler::Type* RepeatedPtrFieldBase::AddFromCleared() {
1943   if (rep_ != NULL && current_size_ < rep_->allocated_size) {
1944     return cast<TypeHandler>(rep_->elements[current_size_++]);
1945   } else {
1946     return NULL;
1947   }
1948 }
1949 
1950 // AddAllocated version that implements arena-safe copying behavior.
1951 template <typename TypeHandler>
1952 void RepeatedPtrFieldBase::AddAllocatedInternal(
1953     typename TypeHandler::Type* value, std::true_type) {
1954   Arena* element_arena =
1955       reinterpret_cast<Arena*>(TypeHandler::GetOwningArena(value));
1956   Arena* arena = GetArena();
1957   if (arena == element_arena && rep_ && rep_->allocated_size < total_size_) {
1958     // Fast path: underlying arena representation (tagged pointer) is equal to
1959     // our arena pointer, and we can add to array without resizing it (at least
1960     // one slot that is not allocated).
1961     void** elems = rep_->elements;
1962     if (current_size_ < rep_->allocated_size) {
1963       // Make space at [current] by moving first allocated element to end of
1964       // allocated list.
1965       elems[rep_->allocated_size] = elems[current_size_];
1966     }
1967     elems[current_size_] = value;
1968     current_size_ = current_size_ + 1;
1969     rep_->allocated_size = rep_->allocated_size + 1;
1970   } else {
1971     AddAllocatedSlowWithCopy<TypeHandler>(value, element_arena, arena);
1972   }
1973 }
1974 
1975 // Slowpath handles all cases, copying if necessary.
1976 template <typename TypeHandler>
1977 void RepeatedPtrFieldBase::AddAllocatedSlowWithCopy(
1978     // Pass value_arena and my_arena to avoid duplicate virtual call (value) or
1979     // load (mine).
1980     typename TypeHandler::Type* value, Arena* value_arena, Arena* my_arena) {
1981 #ifdef PROTOBUF_INTERNAL_USE_MUST_USE_RESULT
1982   GOOGLE_DCHECK(value_arena == nullptr || value_arena == my_arena);
1983 #endif  // PROTOBUF_INTERNAL_USE_MUST_USE_RESULT
1984   // Ensure that either the value is in the same arena, or if not, we do the
1985   // appropriate thing: Own() it (if it's on heap and we're in an arena) or copy
1986   // it to our arena/heap (otherwise).
1987   if (my_arena != NULL && value_arena == NULL) {
1988     my_arena->Own(value);
1989   } else if (my_arena != value_arena) {
1990     typename TypeHandler::Type* new_value =
1991         TypeHandler::NewFromPrototype(value, my_arena);
1992     TypeHandler::Merge(*value, new_value);
1993     TypeHandler::Delete(value, value_arena);
1994     value = new_value;
1995   }
1996 
1997   UnsafeArenaAddAllocated<TypeHandler>(value);
1998 }
1999 
2000 // AddAllocated version that does not implement arena-safe copying behavior.
2001 template <typename TypeHandler>
2002 void RepeatedPtrFieldBase::AddAllocatedInternal(
2003     typename TypeHandler::Type* value, std::false_type) {
2004   if (rep_ && rep_->allocated_size < total_size_) {
2005     // Fast path: underlying arena representation (tagged pointer) is equal to
2006     // our arena pointer, and we can add to array without resizing it (at least
2007     // one slot that is not allocated).
2008     void** elems = rep_->elements;
2009     if (current_size_ < rep_->allocated_size) {
2010       // Make space at [current] by moving first allocated element to end of
2011       // allocated list.
2012       elems[rep_->allocated_size] = elems[current_size_];
2013     }
2014     elems[current_size_] = value;
2015     current_size_ = current_size_ + 1;
2016     ++rep_->allocated_size;
2017   } else {
2018     UnsafeArenaAddAllocated<TypeHandler>(value);
2019   }
2020 }
2021 
2022 template <typename TypeHandler>
2023 void RepeatedPtrFieldBase::UnsafeArenaAddAllocated(
2024     typename TypeHandler::Type* value) {
2025   // Make room for the new pointer.
2026   if (!rep_ || current_size_ == total_size_) {
2027     // The array is completely full with no cleared objects, so grow it.
2028     Reserve(total_size_ + 1);
2029     ++rep_->allocated_size;
2030   } else if (rep_->allocated_size == total_size_) {
2031     // There is no more space in the pointer array because it contains some
2032     // cleared objects awaiting reuse.  We don't want to grow the array in this
2033     // case because otherwise a loop calling AddAllocated() followed by Clear()
2034     // would leak memory.
2035     TypeHandler::Delete(cast<TypeHandler>(rep_->elements[current_size_]),
2036                         arena_);
2037   } else if (current_size_ < rep_->allocated_size) {
2038     // We have some cleared objects.  We don't care about their order, so we
2039     // can just move the first one to the end to make space.
2040     rep_->elements[rep_->allocated_size] = rep_->elements[current_size_];
2041     ++rep_->allocated_size;
2042   } else {
2043     // There are no cleared objects.
2044     ++rep_->allocated_size;
2045   }
2046 
2047   rep_->elements[current_size_++] = value;
2048 }
2049 
2050 // ReleaseLast() for types that implement merge/copy behavior.
2051 template <typename TypeHandler>
2052 inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseLastInternal(
2053     std::true_type) {
2054   // First, release an element.
2055   typename TypeHandler::Type* result = UnsafeArenaReleaseLast<TypeHandler>();
2056   // Now perform a copy if we're on an arena.
2057   Arena* arena = GetArena();
2058 
2059   typename TypeHandler::Type* new_result;
2060 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
2061   new_result = copy<TypeHandler>(result);
2062   if (arena == nullptr) delete result;
2063 #else   // PROTOBUF_FORCE_COPY_IN_RELEASE
2064   new_result = (arena == nullptr) ? result : copy<TypeHandler>(result);
2065 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
2066   return new_result;
2067 }
2068 
2069 // ReleaseLast() for types that *do not* implement merge/copy behavior -- this
2070 // is the same as UnsafeArenaReleaseLast(). Note that we GOOGLE_DCHECK-fail if we're on
2071 // an arena, since the user really should implement the copy operation in this
2072 // case.
2073 template <typename TypeHandler>
2074 inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseLastInternal(
2075     std::false_type) {
2076   GOOGLE_DCHECK(GetArena() == nullptr)
2077       << "ReleaseLast() called on a RepeatedPtrField that is on an arena, "
2078       << "with a type that does not implement MergeFrom. This is unsafe; "
2079       << "please implement MergeFrom for your type.";
2080   return UnsafeArenaReleaseLast<TypeHandler>();
2081 }
2082 
2083 template <typename TypeHandler>
2084 inline typename TypeHandler::Type*
2085 RepeatedPtrFieldBase::UnsafeArenaReleaseLast() {
2086   GOOGLE_DCHECK_GT(current_size_, 0);
2087   typename TypeHandler::Type* result =
2088       cast<TypeHandler>(rep_->elements[--current_size_]);
2089   --rep_->allocated_size;
2090   if (current_size_ < rep_->allocated_size) {
2091     // There are cleared elements on the end; replace the removed element
2092     // with the last allocated element.
2093     rep_->elements[current_size_] = rep_->elements[rep_->allocated_size];
2094   }
2095   return result;
2096 }
2097 
2098 inline int RepeatedPtrFieldBase::ClearedCount() const {
2099   return rep_ ? (rep_->allocated_size - current_size_) : 0;
2100 }
2101 
2102 template <typename TypeHandler>
2103 inline void RepeatedPtrFieldBase::AddCleared(
2104     typename TypeHandler::Type* value) {
2105   GOOGLE_DCHECK(GetArena() == NULL)
2106       << "AddCleared() can only be used on a RepeatedPtrField not on an arena.";
2107   GOOGLE_DCHECK(TypeHandler::GetOwningArena(value) == nullptr)
2108       << "AddCleared() can only accept values not on an arena.";
2109   if (!rep_ || rep_->allocated_size == total_size_) {
2110     Reserve(total_size_ + 1);
2111   }
2112   rep_->elements[rep_->allocated_size++] = value;
2113 }
2114 
2115 template <typename TypeHandler>
2116 inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseCleared() {
2117   GOOGLE_DCHECK(GetArena() == NULL)
2118       << "ReleaseCleared() can only be used on a RepeatedPtrField not on "
2119       << "an arena.";
2120   GOOGLE_DCHECK(GetArena() == NULL);
2121   GOOGLE_DCHECK(rep_ != NULL);
2122   GOOGLE_DCHECK_GT(rep_->allocated_size, current_size_);
2123   return cast<TypeHandler>(rep_->elements[--rep_->allocated_size]);
2124 }
2125 
2126 }  // namespace internal
2127 
2128 // -------------------------------------------------------------------
2129 
2130 template <typename Element>
2131 class RepeatedPtrField<Element>::TypeHandler
2132     : public internal::GenericTypeHandler<Element> {};
2133 
2134 template <>
2135 class RepeatedPtrField<std::string>::TypeHandler
2136     : public internal::StringTypeHandler {};
2137 
2138 template <typename Element>
2139 constexpr RepeatedPtrField<Element>::RepeatedPtrField()
2140     : RepeatedPtrFieldBase() {}
2141 
2142 template <typename Element>
2143 inline RepeatedPtrField<Element>::RepeatedPtrField(Arena* arena)
2144     : RepeatedPtrFieldBase(arena) {}
2145 
2146 template <typename Element>
2147 inline RepeatedPtrField<Element>::RepeatedPtrField(
2148     const RepeatedPtrField& other)
2149     : RepeatedPtrFieldBase() {
2150   MergeFrom(other);
2151 }
2152 
2153 template <typename Element>
2154 template <typename Iter, typename>
2155 inline RepeatedPtrField<Element>::RepeatedPtrField(Iter begin, Iter end) {
2156   Add(begin, end);
2157 }
2158 
2159 template <typename Element>
2160 RepeatedPtrField<Element>::~RepeatedPtrField() {
2161   Destroy<TypeHandler>();
2162 }
2163 
2164 template <typename Element>
2165 inline RepeatedPtrField<Element>& RepeatedPtrField<Element>::operator=(
2166     const RepeatedPtrField& other) {
2167   if (this != &other) CopyFrom(other);
2168   return *this;
2169 }
2170 
2171 template <typename Element>
2172 inline RepeatedPtrField<Element>::RepeatedPtrField(
2173     RepeatedPtrField&& other) noexcept
2174     : RepeatedPtrField() {
2175   // We don't just call Swap(&other) here because it would perform 3 copies if
2176   // other is on an arena. This field can't be on an arena because arena
2177   // construction always uses the Arena* accepting constructor.
2178   if (other.GetArena()) {
2179     CopyFrom(other);
2180   } else {
2181     InternalSwap(&other);
2182   }
2183 }
2184 
2185 template <typename Element>
2186 inline RepeatedPtrField<Element>& RepeatedPtrField<Element>::operator=(
2187     RepeatedPtrField&& other) noexcept {
2188   // We don't just call Swap(&other) here because it would perform 3 copies if
2189   // the two fields are on different arenas.
2190   if (this != &other) {
2191     if (this->GetArena() != other.GetArena()) {
2192       CopyFrom(other);
2193     } else {
2194       InternalSwap(&other);
2195     }
2196   }
2197   return *this;
2198 }
2199 
2200 template <typename Element>
2201 inline bool RepeatedPtrField<Element>::empty() const {
2202   return RepeatedPtrFieldBase::empty();
2203 }
2204 
2205 template <typename Element>
2206 inline int RepeatedPtrField<Element>::size() const {
2207   return RepeatedPtrFieldBase::size();
2208 }
2209 
2210 template <typename Element>
2211 inline const Element& RepeatedPtrField<Element>::Get(int index) const {
2212   return RepeatedPtrFieldBase::Get<TypeHandler>(index);
2213 }
2214 
2215 template <typename Element>
2216 inline const Element& RepeatedPtrField<Element>::at(int index) const {
2217   return RepeatedPtrFieldBase::at<TypeHandler>(index);
2218 }
2219 
2220 template <typename Element>
2221 inline Element& RepeatedPtrField<Element>::at(int index) {
2222   return RepeatedPtrFieldBase::at<TypeHandler>(index);
2223 }
2224 
2225 
2226 template <typename Element>
2227 inline Element* RepeatedPtrField<Element>::Mutable(int index) {
2228   return RepeatedPtrFieldBase::Mutable<TypeHandler>(index);
2229 }
2230 
2231 template <typename Element>
2232 inline Element* RepeatedPtrField<Element>::Add() {
2233   return RepeatedPtrFieldBase::Add<TypeHandler>();
2234 }
2235 
2236 template <typename Element>
2237 inline void RepeatedPtrField<Element>::Add(Element&& value) {
2238   RepeatedPtrFieldBase::Add<TypeHandler>(std::move(value));
2239 }
2240 
2241 template <typename Element>
2242 template <typename Iter>
2243 inline void RepeatedPtrField<Element>::Add(Iter begin, Iter end) {
2244   int reserve = internal::CalculateReserve(begin, end);
2245   if (reserve != -1) {
2246     Reserve(size() + reserve);
2247   }
2248   for (; begin != end; ++begin) {
2249     *Add() = *begin;
2250   }
2251 }
2252 
2253 template <typename Element>
2254 inline void RepeatedPtrField<Element>::RemoveLast() {
2255   RepeatedPtrFieldBase::RemoveLast<TypeHandler>();
2256 }
2257 
2258 template <typename Element>
2259 inline void RepeatedPtrField<Element>::DeleteSubrange(int start, int num) {
2260   GOOGLE_DCHECK_GE(start, 0);
2261   GOOGLE_DCHECK_GE(num, 0);
2262   GOOGLE_DCHECK_LE(start + num, size());
2263   for (int i = 0; i < num; ++i) {
2264     RepeatedPtrFieldBase::Delete<TypeHandler>(start + i);
2265   }
2266   UnsafeArenaExtractSubrange(start, num, nullptr);
2267 }
2268 
2269 template <typename Element>
2270 inline void RepeatedPtrField<Element>::ExtractSubrange(int start, int num,
2271                                                        Element** elements) {
2272   typename internal::TypeImplementsMergeBehavior<
2273       typename TypeHandler::Type>::type t;
2274   ExtractSubrangeInternal(start, num, elements, t);
2275 }
2276 
2277 // ExtractSubrange() implementation for types that implement merge/copy
2278 // behavior.
2279 template <typename Element>
2280 inline void RepeatedPtrField<Element>::ExtractSubrangeInternal(
2281     int start, int num, Element** elements, std::true_type) {
2282   GOOGLE_DCHECK_GE(start, 0);
2283   GOOGLE_DCHECK_GE(num, 0);
2284   GOOGLE_DCHECK_LE(start + num, size());
2285 
2286   if (num == 0) return;
2287 
2288 #ifdef PROTOBUF_MUST_USE_EXTRACT_RESULT
2289   GOOGLE_DCHECK_NE(elements, nullptr)
2290       << "Releasing elements without transferring ownership is an unsafe "
2291          "operation.  Use UnsafeArenaExtractSubrange.";
2292 #endif
2293   if (elements == nullptr) {
2294     CloseGap(start, num);
2295     return;
2296   }
2297 
2298   Arena* arena = GetArena();
2299 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
2300   // Always copy.
2301   for (int i = 0; i < num; ++i) {
2302     elements[i] = copy<TypeHandler>(
2303         RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start));
2304   }
2305   if (arena == nullptr) {
2306     for (int i = 0; i < num; ++i) {
2307       delete RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start);
2308     }
2309   }
2310 #else   // PROTOBUF_FORCE_COPY_IN_RELEASE
2311   // If we're on an arena, we perform a copy for each element so that the
2312   // returned elements are heap-allocated. Otherwise, just forward it.
2313   if (arena != nullptr) {
2314     for (int i = 0; i < num; ++i) {
2315       elements[i] = copy<TypeHandler>(
2316           RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start));
2317     }
2318   } else {
2319     for (int i = 0; i < num; ++i) {
2320       elements[i] = RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start);
2321     }
2322   }
2323 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
2324   CloseGap(start, num);
2325 }
2326 
2327 // ExtractSubrange() implementation for types that do not implement merge/copy
2328 // behavior.
2329 template <typename Element>
2330 inline void RepeatedPtrField<Element>::ExtractSubrangeInternal(
2331     int start, int num, Element** elements, std::false_type) {
2332   // This case is identical to UnsafeArenaExtractSubrange(). However, since
2333   // ExtractSubrange() must return heap-allocated objects by contract, and we
2334   // cannot fulfill this contract if we are an on arena, we must GOOGLE_DCHECK() that
2335   // we are not on an arena.
2336   GOOGLE_DCHECK(GetArena() == NULL)
2337       << "ExtractSubrange() when arena is non-NULL is only supported when "
2338       << "the Element type supplies a MergeFrom() operation to make copies.";
2339   UnsafeArenaExtractSubrange(start, num, elements);
2340 }
2341 
2342 template <typename Element>
2343 inline void RepeatedPtrField<Element>::UnsafeArenaExtractSubrange(
2344     int start, int num, Element** elements) {
2345   GOOGLE_DCHECK_GE(start, 0);
2346   GOOGLE_DCHECK_GE(num, 0);
2347   GOOGLE_DCHECK_LE(start + num, size());
2348 
2349   if (num > 0) {
2350     // Save the values of the removed elements if requested.
2351     if (elements != NULL) {
2352       for (int i = 0; i < num; ++i) {
2353         elements[i] = RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start);
2354       }
2355     }
2356     CloseGap(start, num);
2357   }
2358 }
2359 
2360 template <typename Element>
2361 inline void RepeatedPtrField<Element>::Clear() {
2362   RepeatedPtrFieldBase::Clear<TypeHandler>();
2363 }
2364 
2365 template <typename Element>
2366 inline void RepeatedPtrField<Element>::MergeFrom(
2367     const RepeatedPtrField& other) {
2368   RepeatedPtrFieldBase::MergeFrom<TypeHandler>(other);
2369 }
2370 
2371 template <typename Element>
2372 inline void RepeatedPtrField<Element>::CopyFrom(const RepeatedPtrField& other) {
2373   RepeatedPtrFieldBase::CopyFrom<TypeHandler>(other);
2374 }
2375 
2376 template <typename Element>
2377 template <typename Iter>
2378 inline void RepeatedPtrField<Element>::Assign(Iter begin, Iter end) {
2379   Clear();
2380   Add(begin, end);
2381 }
2382 
2383 template <typename Element>
2384 inline typename RepeatedPtrField<Element>::iterator
2385 RepeatedPtrField<Element>::erase(const_iterator position) {
2386   return erase(position, position + 1);
2387 }
2388 
2389 template <typename Element>
2390 inline typename RepeatedPtrField<Element>::iterator
2391 RepeatedPtrField<Element>::erase(const_iterator first, const_iterator last) {
2392   size_type pos_offset = std::distance(cbegin(), first);
2393   size_type last_offset = std::distance(cbegin(), last);
2394   DeleteSubrange(pos_offset, last_offset - pos_offset);
2395   return begin() + pos_offset;
2396 }
2397 
2398 template <typename Element>
2399 inline Element** RepeatedPtrField<Element>::mutable_data() {
2400   return RepeatedPtrFieldBase::mutable_data<TypeHandler>();
2401 }
2402 
2403 template <typename Element>
2404 inline const Element* const* RepeatedPtrField<Element>::data() const {
2405   return RepeatedPtrFieldBase::data<TypeHandler>();
2406 }
2407 
2408 template <typename Element>
2409 inline void RepeatedPtrField<Element>::Swap(RepeatedPtrField* other) {
2410   if (this == other) return;
2411   RepeatedPtrFieldBase::Swap<TypeHandler>(other);
2412 }
2413 
2414 template <typename Element>
2415 inline void RepeatedPtrField<Element>::UnsafeArenaSwap(
2416     RepeatedPtrField* other) {
2417   if (this == other) return;
2418   RepeatedPtrFieldBase::InternalSwap(other);
2419 }
2420 
2421 template <typename Element>
2422 inline void RepeatedPtrField<Element>::SwapElements(int index1, int index2) {
2423   RepeatedPtrFieldBase::SwapElements(index1, index2);
2424 }
2425 
2426 template <typename Element>
2427 inline Arena* RepeatedPtrField<Element>::GetArena() const {
2428   return RepeatedPtrFieldBase::GetArena();
2429 }
2430 
2431 template <typename Element>
2432 inline size_t RepeatedPtrField<Element>::SpaceUsedExcludingSelfLong() const {
2433   return RepeatedPtrFieldBase::SpaceUsedExcludingSelfLong<TypeHandler>();
2434 }
2435 
2436 template <typename Element>
2437 inline void RepeatedPtrField<Element>::AddAllocated(Element* value) {
2438   RepeatedPtrFieldBase::AddAllocated<TypeHandler>(value);
2439 }
2440 
2441 template <typename Element>
2442 inline void RepeatedPtrField<Element>::UnsafeArenaAddAllocated(Element* value) {
2443   RepeatedPtrFieldBase::UnsafeArenaAddAllocated<TypeHandler>(value);
2444 }
2445 
2446 template <typename Element>
2447 inline Element* RepeatedPtrField<Element>::ReleaseLast() {
2448   return RepeatedPtrFieldBase::ReleaseLast<TypeHandler>();
2449 }
2450 
2451 template <typename Element>
2452 inline Element* RepeatedPtrField<Element>::UnsafeArenaReleaseLast() {
2453   return RepeatedPtrFieldBase::UnsafeArenaReleaseLast<TypeHandler>();
2454 }
2455 
2456 template <typename Element>
2457 inline int RepeatedPtrField<Element>::ClearedCount() const {
2458   return RepeatedPtrFieldBase::ClearedCount();
2459 }
2460 
2461 template <typename Element>
2462 inline void RepeatedPtrField<Element>::AddCleared(Element* value) {
2463   return RepeatedPtrFieldBase::AddCleared<TypeHandler>(value);
2464 }
2465 
2466 template <typename Element>
2467 inline Element* RepeatedPtrField<Element>::ReleaseCleared() {
2468   return RepeatedPtrFieldBase::ReleaseCleared<TypeHandler>();
2469 }
2470 
2471 template <typename Element>
2472 inline void RepeatedPtrField<Element>::Reserve(int new_size) {
2473   return RepeatedPtrFieldBase::Reserve(new_size);
2474 }
2475 
2476 template <typename Element>
2477 inline int RepeatedPtrField<Element>::Capacity() const {
2478   return RepeatedPtrFieldBase::Capacity();
2479 }
2480 
2481 // -------------------------------------------------------------------
2482 
2483 namespace internal {
2484 
2485 // STL-like iterator implementation for RepeatedPtrField.  You should not
2486 // refer to this class directly; use RepeatedPtrField<T>::iterator instead.
2487 //
2488 // The iterator for RepeatedPtrField<T>, RepeatedPtrIterator<T>, is
2489 // very similar to iterator_ptr<T**> in util/gtl/iterator_adaptors.h,
2490 // but adds random-access operators and is modified to wrap a void** base
2491 // iterator (since RepeatedPtrField stores its array as a void* array and
2492 // casting void** to T** would violate C++ aliasing rules).
2493 //
2494 // This code based on net/proto/proto-array-internal.h by Jeffrey Yasskin
2495 // (jyasskin@google.com).
2496 template <typename Element>
2497 class RepeatedPtrIterator {
2498  public:
2499   using iterator = RepeatedPtrIterator<Element>;
2500   using iterator_category = std::random_access_iterator_tag;
2501   using value_type = typename std::remove_const<Element>::type;
2502   using difference_type = std::ptrdiff_t;
2503   using pointer = Element*;
2504   using reference = Element&;
2505 
2506   RepeatedPtrIterator() : it_(NULL) {}
2507   explicit RepeatedPtrIterator(void* const* it) : it_(it) {}
2508 
2509   // Allow "upcasting" from RepeatedPtrIterator<T**> to
2510   // RepeatedPtrIterator<const T*const*>.
2511   template <typename OtherElement>
2512   RepeatedPtrIterator(const RepeatedPtrIterator<OtherElement>& other)
2513       : it_(other.it_) {
2514     // Force a compiler error if the other type is not convertible to ours.
2515     if (false) {
2516       implicit_cast<Element*>(static_cast<OtherElement*>(nullptr));
2517     }
2518   }
2519 
2520   // dereferenceable
2521   reference operator*() const { return *reinterpret_cast<Element*>(*it_); }
2522   pointer operator->() const { return &(operator*()); }
2523 
2524   // {inc,dec}rementable
2525   iterator& operator++() {
2526     ++it_;
2527     return *this;
2528   }
2529   iterator operator++(int) { return iterator(it_++); }
2530   iterator& operator--() {
2531     --it_;
2532     return *this;
2533   }
2534   iterator operator--(int) { return iterator(it_--); }
2535 
2536   // equality_comparable
2537   bool operator==(const iterator& x) const { return it_ == x.it_; }
2538   bool operator!=(const iterator& x) const { return it_ != x.it_; }
2539 
2540   // less_than_comparable
2541   bool operator<(const iterator& x) const { return it_ < x.it_; }
2542   bool operator<=(const iterator& x) const { return it_ <= x.it_; }
2543   bool operator>(const iterator& x) const { return it_ > x.it_; }
2544   bool operator>=(const iterator& x) const { return it_ >= x.it_; }
2545 
2546   // addable, subtractable
2547   iterator& operator+=(difference_type d) {
2548     it_ += d;
2549     return *this;
2550   }
2551   friend iterator operator+(iterator it, const difference_type d) {
2552     it += d;
2553     return it;
2554   }
2555   friend iterator operator+(const difference_type d, iterator it) {
2556     it += d;
2557     return it;
2558   }
2559   iterator& operator-=(difference_type d) {
2560     it_ -= d;
2561     return *this;
2562   }
2563   friend iterator operator-(iterator it, difference_type d) {
2564     it -= d;
2565     return it;
2566   }
2567 
2568   // indexable
2569   reference operator[](difference_type d) const { return *(*this + d); }
2570 
2571   // random access iterator
2572   difference_type operator-(const iterator& x) const { return it_ - x.it_; }
2573 
2574  private:
2575   template <typename OtherElement>
2576   friend class RepeatedPtrIterator;
2577 
2578   // The internal iterator.
2579   void* const* it_;
2580 };
2581 
2582 // Provide an iterator that operates on pointers to the underlying objects
2583 // rather than the objects themselves as RepeatedPtrIterator does.
2584 // Consider using this when working with stl algorithms that change
2585 // the array.
2586 // The VoidPtr template parameter holds the type-agnostic pointer value
2587 // referenced by the iterator.  It should either be "void *" for a mutable
2588 // iterator, or "const void* const" for a constant iterator.
2589 template <typename Element, typename VoidPtr>
2590 class RepeatedPtrOverPtrsIterator {
2591  public:
2592   using iterator = RepeatedPtrOverPtrsIterator<Element, VoidPtr>;
2593   using iterator_category = std::random_access_iterator_tag;
2594   using value_type = typename std::remove_const<Element>::type;
2595   using difference_type = std::ptrdiff_t;
2596   using pointer = Element*;
2597   using reference = Element&;
2598 
2599   RepeatedPtrOverPtrsIterator() : it_(NULL) {}
2600   explicit RepeatedPtrOverPtrsIterator(VoidPtr* it) : it_(it) {}
2601 
2602   // dereferenceable
2603   reference operator*() const { return *reinterpret_cast<Element*>(it_); }
2604   pointer operator->() const { return &(operator*()); }
2605 
2606   // {inc,dec}rementable
2607   iterator& operator++() {
2608     ++it_;
2609     return *this;
2610   }
2611   iterator operator++(int) { return iterator(it_++); }
2612   iterator& operator--() {
2613     --it_;
2614     return *this;
2615   }
2616   iterator operator--(int) { return iterator(it_--); }
2617 
2618   // equality_comparable
2619   bool operator==(const iterator& x) const { return it_ == x.it_; }
2620   bool operator!=(const iterator& x) const { return it_ != x.it_; }
2621 
2622   // less_than_comparable
2623   bool operator<(const iterator& x) const { return it_ < x.it_; }
2624   bool operator<=(const iterator& x) const { return it_ <= x.it_; }
2625   bool operator>(const iterator& x) const { return it_ > x.it_; }
2626   bool operator>=(const iterator& x) const { return it_ >= x.it_; }
2627 
2628   // addable, subtractable
2629   iterator& operator+=(difference_type d) {
2630     it_ += d;
2631     return *this;
2632   }
2633   friend iterator operator+(iterator it, difference_type d) {
2634     it += d;
2635     return it;
2636   }
2637   friend iterator operator+(difference_type d, iterator it) {
2638     it += d;
2639     return it;
2640   }
2641   iterator& operator-=(difference_type d) {
2642     it_ -= d;
2643     return *this;
2644   }
2645   friend iterator operator-(iterator it, difference_type d) {
2646     it -= d;
2647     return it;
2648   }
2649 
2650   // indexable
2651   reference operator[](difference_type d) const { return *(*this + d); }
2652 
2653   // random access iterator
2654   difference_type operator-(const iterator& x) const { return it_ - x.it_; }
2655 
2656  private:
2657   template <typename OtherElement>
2658   friend class RepeatedPtrIterator;
2659 
2660   // The internal iterator.
2661   VoidPtr* it_;
2662 };
2663 
2664 void RepeatedPtrFieldBase::InternalSwap(RepeatedPtrFieldBase* other) {
2665   GOOGLE_DCHECK(this != other);
2666 
2667   // Swap all fields at once.
2668   static_assert(std::is_standard_layout<RepeatedPtrFieldBase>::value,
2669                 "offsetof() requires standard layout before c++17");
2670   internal::memswap<offsetof(RepeatedPtrFieldBase, rep_) + sizeof(this->rep_) -
2671                     offsetof(RepeatedPtrFieldBase, arena_)>(
2672       reinterpret_cast<char*>(this) + offsetof(RepeatedPtrFieldBase, arena_),
2673       reinterpret_cast<char*>(other) + offsetof(RepeatedPtrFieldBase, arena_));
2674 }
2675 
2676 }  // namespace internal
2677 
2678 template <typename Element>
2679 inline typename RepeatedPtrField<Element>::iterator
2680 RepeatedPtrField<Element>::begin() {
2681   return iterator(raw_data());
2682 }
2683 template <typename Element>
2684 inline typename RepeatedPtrField<Element>::const_iterator
2685 RepeatedPtrField<Element>::begin() const {
2686   return iterator(raw_data());
2687 }
2688 template <typename Element>
2689 inline typename RepeatedPtrField<Element>::const_iterator
2690 RepeatedPtrField<Element>::cbegin() const {
2691   return begin();
2692 }
2693 template <typename Element>
2694 inline typename RepeatedPtrField<Element>::iterator
2695 RepeatedPtrField<Element>::end() {
2696   return iterator(raw_data() + size());
2697 }
2698 template <typename Element>
2699 inline typename RepeatedPtrField<Element>::const_iterator
2700 RepeatedPtrField<Element>::end() const {
2701   return iterator(raw_data() + size());
2702 }
2703 template <typename Element>
2704 inline typename RepeatedPtrField<Element>::const_iterator
2705 RepeatedPtrField<Element>::cend() const {
2706   return end();
2707 }
2708 
2709 template <typename Element>
2710 inline typename RepeatedPtrField<Element>::pointer_iterator
2711 RepeatedPtrField<Element>::pointer_begin() {
2712   return pointer_iterator(raw_mutable_data());
2713 }
2714 template <typename Element>
2715 inline typename RepeatedPtrField<Element>::const_pointer_iterator
2716 RepeatedPtrField<Element>::pointer_begin() const {
2717   return const_pointer_iterator(const_cast<const void* const*>(raw_data()));
2718 }
2719 template <typename Element>
2720 inline typename RepeatedPtrField<Element>::pointer_iterator
2721 RepeatedPtrField<Element>::pointer_end() {
2722   return pointer_iterator(raw_mutable_data() + size());
2723 }
2724 template <typename Element>
2725 inline typename RepeatedPtrField<Element>::const_pointer_iterator
2726 RepeatedPtrField<Element>::pointer_end() const {
2727   return const_pointer_iterator(
2728       const_cast<const void* const*>(raw_data() + size()));
2729 }
2730 
2731 // Iterators and helper functions that follow the spirit of the STL
2732 // std::back_insert_iterator and std::back_inserter but are tailor-made
2733 // for RepeatedField and RepeatedPtrField. Typical usage would be:
2734 //
2735 //   std::copy(some_sequence.begin(), some_sequence.end(),
2736 //             RepeatedFieldBackInserter(proto.mutable_sequence()));
2737 //
2738 // Ported by johannes from util/gtl/proto-array-iterators.h
2739 
2740 namespace internal {
2741 // A back inserter for RepeatedField objects.
2742 template <typename T>
2743 class RepeatedFieldBackInsertIterator
2744     : public std::iterator<std::output_iterator_tag, T> {
2745  public:
2746   explicit RepeatedFieldBackInsertIterator(
2747       RepeatedField<T>* const mutable_field)
2748       : field_(mutable_field) {}
2749   RepeatedFieldBackInsertIterator<T>& operator=(const T& value) {
2750     field_->Add(value);
2751     return *this;
2752   }
2753   RepeatedFieldBackInsertIterator<T>& operator*() { return *this; }
2754   RepeatedFieldBackInsertIterator<T>& operator++() { return *this; }
2755   RepeatedFieldBackInsertIterator<T>& operator++(int /* unused */) {
2756     return *this;
2757   }
2758 
2759  private:
2760   RepeatedField<T>* field_;
2761 };
2762 
2763 // A back inserter for RepeatedPtrField objects.
2764 template <typename T>
2765 class RepeatedPtrFieldBackInsertIterator
2766     : public std::iterator<std::output_iterator_tag, T> {
2767  public:
2768   RepeatedPtrFieldBackInsertIterator(RepeatedPtrField<T>* const mutable_field)
2769       : field_(mutable_field) {}
2770   RepeatedPtrFieldBackInsertIterator<T>& operator=(const T& value) {
2771     *field_->Add() = value;
2772     return *this;
2773   }
2774   RepeatedPtrFieldBackInsertIterator<T>& operator=(
2775       const T* const ptr_to_value) {
2776     *field_->Add() = *ptr_to_value;
2777     return *this;
2778   }
2779   RepeatedPtrFieldBackInsertIterator<T>& operator=(T&& value) {
2780     *field_->Add() = std::move(value);
2781     return *this;
2782   }
2783   RepeatedPtrFieldBackInsertIterator<T>& operator*() { return *this; }
2784   RepeatedPtrFieldBackInsertIterator<T>& operator++() { return *this; }
2785   RepeatedPtrFieldBackInsertIterator<T>& operator++(int /* unused */) {
2786     return *this;
2787   }
2788 
2789  private:
2790   RepeatedPtrField<T>* field_;
2791 };
2792 
2793 // A back inserter for RepeatedPtrFields that inserts by transferring ownership
2794 // of a pointer.
2795 template <typename T>
2796 class AllocatedRepeatedPtrFieldBackInsertIterator
2797     : public std::iterator<std::output_iterator_tag, T> {
2798  public:
2799   explicit AllocatedRepeatedPtrFieldBackInsertIterator(
2800       RepeatedPtrField<T>* const mutable_field)
2801       : field_(mutable_field) {}
2802   AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator=(
2803       T* const ptr_to_value) {
2804     field_->AddAllocated(ptr_to_value);
2805     return *this;
2806   }
2807   AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator*() { return *this; }
2808   AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++() { return *this; }
2809   AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++(int /* unused */) {
2810     return *this;
2811   }
2812 
2813  private:
2814   RepeatedPtrField<T>* field_;
2815 };
2816 
2817 // Almost identical to AllocatedRepeatedPtrFieldBackInsertIterator. This one
2818 // uses the UnsafeArenaAddAllocated instead.
2819 template <typename T>
2820 class UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator
2821     : public std::iterator<std::output_iterator_tag, T> {
2822  public:
2823   explicit UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator(
2824       RepeatedPtrField<T>* const mutable_field)
2825       : field_(mutable_field) {}
2826   UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator=(
2827       T const* const ptr_to_value) {
2828     field_->UnsafeArenaAddAllocated(const_cast<T*>(ptr_to_value));
2829     return *this;
2830   }
2831   UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator*() {
2832     return *this;
2833   }
2834   UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++() {
2835     return *this;
2836   }
2837   UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++(
2838       int /* unused */) {
2839     return *this;
2840   }
2841 
2842  private:
2843   RepeatedPtrField<T>* field_;
2844 };
2845 
2846 }  // namespace internal
2847 
2848 // Provides a back insert iterator for RepeatedField instances,
2849 // similar to std::back_inserter().
2850 template <typename T>
2851 internal::RepeatedFieldBackInsertIterator<T> RepeatedFieldBackInserter(
2852     RepeatedField<T>* const mutable_field) {
2853   return internal::RepeatedFieldBackInsertIterator<T>(mutable_field);
2854 }
2855 
2856 // Provides a back insert iterator for RepeatedPtrField instances,
2857 // similar to std::back_inserter().
2858 template <typename T>
2859 internal::RepeatedPtrFieldBackInsertIterator<T> RepeatedPtrFieldBackInserter(
2860     RepeatedPtrField<T>* const mutable_field) {
2861   return internal::RepeatedPtrFieldBackInsertIterator<T>(mutable_field);
2862 }
2863 
2864 // Special back insert iterator for RepeatedPtrField instances, just in
2865 // case someone wants to write generic template code that can access both
2866 // RepeatedFields and RepeatedPtrFields using a common name.
2867 template <typename T>
2868 internal::RepeatedPtrFieldBackInsertIterator<T> RepeatedFieldBackInserter(
2869     RepeatedPtrField<T>* const mutable_field) {
2870   return internal::RepeatedPtrFieldBackInsertIterator<T>(mutable_field);
2871 }
2872 
2873 // Provides a back insert iterator for RepeatedPtrField instances
2874 // similar to std::back_inserter() which transfers the ownership while
2875 // copying elements.
2876 template <typename T>
2877 internal::AllocatedRepeatedPtrFieldBackInsertIterator<T>
2878 AllocatedRepeatedPtrFieldBackInserter(
2879     RepeatedPtrField<T>* const mutable_field) {
2880   return internal::AllocatedRepeatedPtrFieldBackInsertIterator<T>(
2881       mutable_field);
2882 }
2883 
2884 // Similar to AllocatedRepeatedPtrFieldBackInserter, using
2885 // UnsafeArenaAddAllocated instead of AddAllocated.
2886 // This is slightly faster if that matters. It is also useful in legacy code
2887 // that uses temporary ownership to avoid copies. Example:
2888 //   RepeatedPtrField<T> temp_field;
2889 //   temp_field.AddAllocated(new T);
2890 //   ... // Do something with temp_field
2891 //   temp_field.ExtractSubrange(0, temp_field.size(), nullptr);
2892 // If you put temp_field on the arena this fails, because the ownership
2893 // transfers to the arena at the "AddAllocated" call and is not released anymore
2894 // causing a double delete. Using UnsafeArenaAddAllocated prevents this.
2895 template <typename T>
2896 internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>
2897 UnsafeArenaAllocatedRepeatedPtrFieldBackInserter(
2898     RepeatedPtrField<T>* const mutable_field) {
2899   return internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>(
2900       mutable_field);
2901 }
2902 
2903 // Extern declarations of common instantiations to reduce library bloat.
2904 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<bool>;
2905 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<int32>;
2906 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<uint32>;
2907 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<int64>;
2908 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<uint64>;
2909 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<float>;
2910 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<double>;
2911 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE
2912     RepeatedPtrField<std::string>;
2913 
2914 }  // namespace protobuf
2915 }  // namespace google
2916 
2917 #include <google/protobuf/port_undef.inc>
2918 
2919 #endif  // GOOGLE_PROTOBUF_REPEATED_FIELD_H__
2920