1 /*
2 * Copyright (c) Facebook, Inc. and its affiliates.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 /*
18 * For high-level documentation and usage examples see
19 * folly/docs/small_vector.md
20 *
21 * @author Jordan DeLong <delong.j@fb.com>
22 */
23
24 #pragma once
25
26 #include <algorithm>
27 #include <cassert>
28 #include <cstdlib>
29 #include <cstring>
30 #include <iterator>
31 #include <stdexcept>
32 #include <type_traits>
33 #include <utility>
34
35 #include <boost/mpl/count.hpp>
36 #include <boost/mpl/empty.hpp>
37 #include <boost/mpl/eval_if.hpp>
38 #include <boost/mpl/filter_view.hpp>
39 #include <boost/mpl/front.hpp>
40 #include <boost/mpl/identity.hpp>
41 #include <boost/mpl/if.hpp>
42 #include <boost/mpl/placeholders.hpp>
43 #include <boost/mpl/size.hpp>
44 #include <boost/mpl/vector.hpp>
45 #include <boost/operators.hpp>
46
47 #include <folly/ConstexprMath.h>
48 #include <folly/FormatTraits.h>
49 #include <folly/Likely.h>
50 #include <folly/Portability.h>
51 #include <folly/ScopeGuard.h>
52 #include <folly/Traits.h>
53 #include <folly/functional/Invoke.h>
54 #include <folly/lang/Align.h>
55 #include <folly/lang/Assume.h>
56 #include <folly/lang/Exception.h>
57 #include <folly/memory/Malloc.h>
58 #include <folly/portability/Malloc.h>
59
60 #if (FOLLY_X64 || FOLLY_PPC64)
61 #define FOLLY_SV_PACK_ATTR FOLLY_PACK_ATTR
62 #define FOLLY_SV_PACK_PUSH FOLLY_PACK_PUSH
63 #define FOLLY_SV_PACK_POP FOLLY_PACK_POP
64 #else
65 #define FOLLY_SV_PACK_ATTR
66 #define FOLLY_SV_PACK_PUSH
67 #define FOLLY_SV_PACK_POP
68 #endif
69
70 // Ignore shadowing warnings within this file, so includers can use -Wshadow.
71 FOLLY_PUSH_WARNING
72 FOLLY_GNU_DISABLE_WARNING("-Wshadow")
73
74 namespace folly {
75
76 //////////////////////////////////////////////////////////////////////
77
78 namespace small_vector_policy {
79
80 //////////////////////////////////////////////////////////////////////
81
82 /*
83 * A flag which makes us refuse to use the heap at all. If we
84 * overflow the in situ capacity we throw an exception.
85 */
86 struct NoHeap;
87
88 //////////////////////////////////////////////////////////////////////
89
90 } // namespace small_vector_policy
91
92 //////////////////////////////////////////////////////////////////////
93
94 template <class T, std::size_t M, class A, class B, class C>
95 class small_vector;
96
97 //////////////////////////////////////////////////////////////////////
98
99 namespace detail {
100
101 /*
102 * Move objects in memory to the right into some uninitialized memory, where
103 * the region overlaps. Then call create() for each hole in reverse order.
104 *
105 * This doesn't just use std::move_backward because move_backward only works
106 * if all the memory is initialized to type T already.
107 *
108 * The create function should return a reference type, to avoid
109 * extra copies and moves for non-trivial types.
110 */
111 template <class T, class Create>
112 typename std::enable_if<!is_trivially_copyable_v<T>>::type
moveObjectsRightAndCreate(T * const first,T * const lastConstructed,T * const realLast,Create && create)113 moveObjectsRightAndCreate(
114 T* const first,
115 T* const lastConstructed,
116 T* const realLast,
117 Create&& create) {
118 if (lastConstructed == realLast) {
119 return;
120 }
121
122 T* out = realLast;
123 T* in = lastConstructed;
124 {
125 auto rollback = makeGuard([&] {
126 // We want to make sure the same stuff is uninitialized memory
127 // if we exit via an exception (this is to make sure we provide
128 // the basic exception safety guarantee for insert functions).
129 if (out < lastConstructed) {
130 out = lastConstructed - 1;
131 }
132 for (auto it = out + 1; it != realLast; ++it) {
133 it->~T();
134 }
135 });
136 // Decrement the pointers only when it is known that the resulting pointer
137 // is within the boundaries of the object. Decrementing past the beginning
138 // of the object is UB. Note that this is asymmetric wrt forward iteration,
139 // as past-the-end pointers are explicitly allowed.
140 for (; in != first && out > lastConstructed;) {
141 // Out must be decremented before an exception can be thrown so that
142 // the rollback guard knows where to start.
143 --out;
144 new (out) T(std::move(*(--in)));
145 }
146 for (; in != first;) {
147 --out;
148 *out = std::move(*(--in));
149 }
150 for (; out > lastConstructed;) {
151 --out;
152 new (out) T(create());
153 }
154 for (; out != first;) {
155 --out;
156 *out = create();
157 }
158 rollback.dismiss();
159 }
160 }
161
162 // Specialization for trivially copyable types. The call to
163 // std::move_backward here will just turn into a memmove.
164 // This must only be used with trivially copyable types because some of the
165 // memory may be uninitialized, and std::move_backward() won't work when it
166 // can't memmove().
167 template <class T, class Create>
168 typename std::enable_if<is_trivially_copyable_v<T>>::type
moveObjectsRightAndCreate(T * const first,T * const lastConstructed,T * const realLast,Create && create)169 moveObjectsRightAndCreate(
170 T* const first,
171 T* const lastConstructed,
172 T* const realLast,
173 Create&& create) {
174 std::move_backward(first, lastConstructed, realLast);
175 T* const end = first - 1;
176 T* out = first + (realLast - lastConstructed) - 1;
177 for (; out != end; --out) {
178 *out = create();
179 }
180 }
181
182 /*
183 * Populate a region of memory using `op' to construct elements. If
184 * anything throws, undo what we did.
185 */
186 template <class T, class Function>
populateMemForward(T * mem,std::size_t n,Function const & op)187 void populateMemForward(T* mem, std::size_t n, Function const& op) {
188 std::size_t idx = 0;
189 {
190 auto rollback = makeGuard([&] {
191 for (std::size_t i = 0; i < idx; ++i) {
192 mem[i].~T();
193 }
194 });
195 for (size_t i = 0; i < n; ++i) {
196 op(&mem[idx]);
197 ++idx;
198 }
199 rollback.dismiss();
200 }
201 }
202
203 /*
204 * Copies `fromSize` elements from `from' to `to', where `to' is only
205 * initialized up to `toSize`, but has enough storage for `fromSize'. If
206 * `toSize' > `fromSize', the extra elements are destructed.
207 */
208 template <class Iterator1, class Iterator2>
partiallyUninitializedCopy(Iterator1 from,size_t fromSize,Iterator2 to,size_t toSize)209 void partiallyUninitializedCopy(
210 Iterator1 from, size_t fromSize, Iterator2 to, size_t toSize) {
211 const size_t minSize = std::min(fromSize, toSize);
212 std::copy(from, from + minSize, to);
213 if (fromSize > toSize) {
214 std::uninitialized_copy(from + minSize, from + fromSize, to + minSize);
215 } else {
216 for (auto it = to + minSize; it != to + toSize; ++it) {
217 using Value = typename std::decay<decltype(*it)>::type;
218 it->~Value();
219 }
220 }
221 }
222
223 template <class SizeType, bool ShouldUseHeap>
224 struct IntegralSizePolicyBase {
225 typedef SizeType InternalSizeType;
226
IntegralSizePolicyBaseIntegralSizePolicyBase227 IntegralSizePolicyBase() : size_(0) {}
228
229 protected:
policyMaxSizeIntegralSizePolicyBase230 static constexpr std::size_t policyMaxSize() { return SizeType(~kClearMask); }
231
doSizeIntegralSizePolicyBase232 std::size_t doSize() const { return size_ & ~kClearMask; }
233
isExternIntegralSizePolicyBase234 std::size_t isExtern() const { return kExternMask & size_; }
235
setExternIntegralSizePolicyBase236 void setExtern(bool b) {
237 if (b) {
238 size_ |= kExternMask;
239 } else {
240 size_ &= ~kExternMask;
241 }
242 }
243
isHeapifiedCapacityIntegralSizePolicyBase244 std::size_t isHeapifiedCapacity() const { return kCapacityMask & size_; }
245
setHeapifiedCapacityIntegralSizePolicyBase246 void setHeapifiedCapacity(bool b) {
247 if (b) {
248 size_ |= kCapacityMask;
249 } else {
250 size_ &= ~kCapacityMask;
251 }
252 }
setSizeIntegralSizePolicyBase253 void setSize(std::size_t sz) {
254 assert(sz <= policyMaxSize());
255 size_ = (kClearMask & size_) | SizeType(sz);
256 }
257
incrementSizeIntegralSizePolicyBase258 void incrementSize(std::size_t n) {
259 // We can safely increment size without overflowing into mask bits because
260 // we always check new size is less than maxPolicySize (see
261 // makeSizeInternal). To be sure, added assertion to verify it.
262 assert(doSize() + n <= policyMaxSize());
263 size_ += SizeType(n);
264 }
getInternalSizeIntegralSizePolicyBase265 std::size_t getInternalSize() { return size_; }
266
swapSizePolicyIntegralSizePolicyBase267 void swapSizePolicy(IntegralSizePolicyBase& o) { std::swap(size_, o.size_); }
268
resetSizePolicyIntegralSizePolicyBase269 void resetSizePolicy() { size_ = 0; }
270
271 protected:
272 static bool constexpr kShouldUseHeap = ShouldUseHeap;
273
274 private:
275 // We reserve two most significant bits of size_.
276 static SizeType constexpr kExternMask =
277 kShouldUseHeap ? SizeType(1) << (sizeof(SizeType) * 8 - 1) : 0;
278
279 static SizeType constexpr kCapacityMask =
280 kShouldUseHeap ? SizeType(1) << (sizeof(SizeType) * 8 - 2) : 0;
281
282 static SizeType constexpr kClearMask =
283 kShouldUseHeap ? SizeType(3) << (sizeof(SizeType) * 8 - 2) : 0;
284
285 SizeType size_;
286 };
287
288 template <class SizeType, bool ShouldUseHeap>
289 struct IntegralSizePolicy;
290
291 template <class SizeType>
292 struct IntegralSizePolicy<SizeType, true>
293 : public IntegralSizePolicyBase<SizeType, true> {
294 public:
295 /*
296 * Move a range to a range of uninitialized memory. Assumes the
297 * ranges don't overlap.
298 */
299 template <class T>
300 typename std::enable_if<!is_trivially_copyable_v<T>>::type
301 moveToUninitialized(T* first, T* last, T* out) {
302 std::size_t idx = 0;
303 {
304 auto rollback = makeGuard([&] {
305 // Even for callers trying to give the strong guarantee
306 // (e.g. push_back) it's ok to assume here that we don't have to
307 // move things back and that it was a copy constructor that
308 // threw: if someone throws from a move constructor the effects
309 // are unspecified.
310 for (std::size_t i = 0; i < idx; ++i) {
311 out[i].~T();
312 }
313 });
314 for (; first != last; ++first, ++idx) {
315 new (&out[idx]) T(std::move(*first));
316 }
317 rollback.dismiss();
318 }
319 }
320
321 // Specialization for trivially copyable types.
322 template <class T>
323 typename std::enable_if<is_trivially_copyable_v<T>>::type moveToUninitialized(
324 T* first, T* last, T* out) {
325 std::memmove(
326 static_cast<void*>(out),
327 static_cast<void const*>(first),
328 (last - first) * sizeof *first);
329 }
330
331 /*
332 * Move a range to a range of uninitialized memory. Assumes the
333 * ranges don't overlap. Inserts an element at out + pos using
334 * emplaceFunc(). out will contain (end - begin) + 1 elements on success and
335 * none on failure. If emplaceFunc() throws [begin, end) is unmodified.
336 */
337 template <class T, class EmplaceFunc>
338 void moveToUninitializedEmplace(
339 T* begin, T* end, T* out, SizeType pos, EmplaceFunc&& emplaceFunc) {
340 // Must be called first so that if it throws [begin, end) is unmodified.
341 // We have to support the strong exception guarantee for emplace_back().
342 emplaceFunc(out + pos);
343 // move old elements to the left of the new one
344 {
345 auto rollback = makeGuard([&] { //
346 out[pos].~T();
347 });
348 this->moveToUninitialized(begin, begin + pos, out);
349 rollback.dismiss();
350 }
351 // move old elements to the right of the new one
352 {
353 auto rollback = makeGuard([&] {
354 for (SizeType i = 0; i <= pos; ++i) {
355 out[i].~T();
356 }
357 });
358 if (begin + pos < end) {
359 this->moveToUninitialized(begin + pos, end, out + pos + 1);
360 }
361 rollback.dismiss();
362 }
363 }
364 };
365
366 template <class SizeType>
367 struct IntegralSizePolicy<SizeType, false>
368 : public IntegralSizePolicyBase<SizeType, false> {
369 public:
370 template <class T>
371 void moveToUninitialized(T* /*first*/, T* /*last*/, T* /*out*/) {
372 assume_unreachable();
373 }
374 template <class T, class EmplaceFunc>
375 void moveToUninitializedEmplace(
376 T* /* begin */,
377 T* /* end */,
378 T* /* out */,
379 SizeType /* pos */,
380 EmplaceFunc&& /* emplaceFunc */) {
381 assume_unreachable();
382 }
383 };
384
385 /*
386 * If you're just trying to use this class, ignore everything about
387 * this next small_vector_base class thing.
388 *
389 * The purpose of this junk is to minimize sizeof(small_vector<>)
390 * and allow specifying the template parameters in whatever order is
391 * convenient for the user. There's a few extra steps here to try
392 * to keep the error messages at least semi-reasonable.
393 *
394 * Apologies for all the black magic.
395 */
396 namespace mpl = boost::mpl;
397 template <
398 class Value,
399 std::size_t RequestedMaxInline,
400 class InPolicyA,
401 class InPolicyB,
402 class InPolicyC>
403 struct small_vector_base {
404 typedef mpl::vector<InPolicyA, InPolicyB, InPolicyC> PolicyList;
405
406 /*
407 * Determine the size type
408 */
409 typedef typename mpl::filter_view<
410 PolicyList,
411 std::is_integral<mpl::placeholders::_1>>::type Integrals;
412 typedef typename mpl::eval_if<
413 mpl::empty<Integrals>,
414 mpl::identity<std::size_t>,
415 mpl::front<Integrals>>::type SizeType;
416
417 static_assert(
418 std::is_unsigned<SizeType>::value,
419 "Size type should be an unsigned integral type");
420 static_assert(
421 mpl::size<Integrals>::value == 0 || mpl::size<Integrals>::value == 1,
422 "Multiple size types specified in small_vector<>");
423
424 /*
425 * Determine whether we should allow spilling to the heap or not.
426 */
427 typedef typename mpl::count<PolicyList, small_vector_policy::NoHeap>::type
428 HasNoHeap;
429
430 static_assert(
431 HasNoHeap::value == 0 || HasNoHeap::value == 1,
432 "Multiple copies of small_vector_policy::NoHeap "
433 "supplied; this is probably a mistake");
434
435 /*
436 * Make the real policy base classes.
437 */
438 typedef IntegralSizePolicy<SizeType, !HasNoHeap::value> ActualSizePolicy;
439
440 /*
441 * Now inherit from them all. This is done in such a convoluted
442 * way to make sure we get the empty base optimization on all these
443 * types to keep sizeof(small_vector<>) minimal.
444 */
445 typedef boost::totally_ordered1<
446 small_vector<Value, RequestedMaxInline, InPolicyA, InPolicyB, InPolicyC>,
447 ActualSizePolicy>
448 type;
449 };
450
451 inline void* unshiftPointer(void* p, size_t sizeBytes) {
452 return static_cast<char*>(p) - sizeBytes;
453 }
454
455 inline void* shiftPointer(void* p, size_t sizeBytes) {
456 return static_cast<char*>(p) + sizeBytes;
457 }
458 } // namespace detail
459
460 //////////////////////////////////////////////////////////////////////
461 FOLLY_SV_PACK_PUSH
462 template <
463 class Value,
464 std::size_t RequestedMaxInline = 1,
465 class PolicyA = void,
466 class PolicyB = void,
467 class PolicyC = void>
468 class small_vector : public detail::small_vector_base<
469 Value,
470 RequestedMaxInline,
471 PolicyA,
472 PolicyB,
473 PolicyC>::type {
474 typedef typename detail::
475 small_vector_base<Value, RequestedMaxInline, PolicyA, PolicyB, PolicyC>::
476 type BaseType;
477 typedef typename BaseType::InternalSizeType InternalSizeType;
478
479 /*
480 * Figure out the max number of elements we should inline. (If
481 * the user asks for less inlined elements than we can fit unioned
482 * into our value_type*, we will inline more than they asked.)
483 */
484 static constexpr auto kSizeOfValuePtr = sizeof(Value*);
485 static constexpr auto kSizeOfValue = sizeof(Value);
486 static constexpr std::size_t MaxInline{
487 constexpr_max(kSizeOfValuePtr / kSizeOfValue, RequestedMaxInline)};
488
489 public:
490 typedef std::size_t size_type;
491 typedef Value value_type;
492 typedef std::allocator<Value> allocator_type;
493 typedef value_type& reference;
494 typedef value_type const& const_reference;
495 typedef value_type* iterator;
496 typedef value_type* pointer;
497 typedef value_type const* const_iterator;
498 typedef value_type const* const_pointer;
499 typedef std::ptrdiff_t difference_type;
500
501 typedef std::reverse_iterator<iterator> reverse_iterator;
502 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
503
504 small_vector() = default;
505 // Allocator is unused here. It is taken in for compatibility with std::vector
506 // interface, but it will be ignored.
507 small_vector(const std::allocator<Value>&) {}
508
509 small_vector(small_vector const& o) {
510 if (kShouldCopyInlineTrivial && !o.isExtern()) {
511 copyInlineTrivial<Value>(o);
512 return;
513 }
514
515 auto n = o.size();
516 makeSize(n);
517 {
518 auto rollback = makeGuard([&] { freeHeap(); });
519 std::uninitialized_copy(o.begin(), o.begin() + n, begin());
520 rollback.dismiss();
521 }
522 this->setSize(n);
523 }
524
525 small_vector(small_vector&& o) noexcept(
526 std::is_nothrow_move_constructible<Value>::value) {
527 if (o.isExtern()) {
528 this->u.pdata_.heap_ = o.u.pdata_.heap_;
529 this->swapSizePolicy(o);
530 if (kHasInlineCapacity) {
531 this->u.setCapacity(o.u.getCapacity());
532 }
533 } else {
534 if (kShouldCopyInlineTrivial) {
535 copyInlineTrivial<Value>(o);
536 o.resetSizePolicy();
537 } else {
538 auto n = o.size();
539 std::uninitialized_copy(
540 std::make_move_iterator(o.begin()),
541 std::make_move_iterator(o.end()),
542 begin());
543 this->setSize(n);
544 o.clear();
545 }
546 }
547 }
548
549 small_vector(std::initializer_list<value_type> il) {
550 constructImpl(il.begin(), il.end(), std::false_type());
551 }
552
553 explicit small_vector(size_type n) {
554 doConstruct(n, [&](void* p) { new (p) value_type(); });
555 }
556
557 small_vector(size_type n, value_type const& t) {
558 doConstruct(n, [&](void* p) { new (p) value_type(t); });
559 }
560
561 template <class Arg>
562 explicit small_vector(Arg arg1, Arg arg2) {
563 // Forward using std::is_arithmetic to get to the proper
564 // implementation; this disambiguates between the iterators and
565 // (size_t, value_type) meaning for this constructor.
566 constructImpl(arg1, arg2, std::is_arithmetic<Arg>());
567 }
568
569 ~small_vector() {
570 for (auto& t : *this) {
571 (&t)->~value_type();
572 }
573 freeHeap();
574 }
575
576 small_vector& operator=(small_vector const& o) {
577 if (FOLLY_LIKELY(this != &o)) {
578 if (kShouldCopyInlineTrivial && !this->isExtern() && !o.isExtern()) {
579 copyInlineTrivial<Value>(o);
580 } else if (o.size() < capacity()) {
581 const size_t oSize = o.size();
582 detail::partiallyUninitializedCopy(o.begin(), oSize, begin(), size());
583 this->setSize(oSize);
584 } else {
585 assign(o.begin(), o.end());
586 }
587 }
588 return *this;
589 }
590
591 small_vector& operator=(small_vector&& o) noexcept(
592 std::is_nothrow_move_constructible<Value>::value) {
593 if (FOLLY_LIKELY(this != &o)) {
594 // If either is external, reduce to the default-constructed case for this,
595 // since there is nothing that we can move in-place.
596 if (this->isExtern() || o.isExtern()) {
597 reset();
598 }
599
600 if (!o.isExtern()) {
601 if (kShouldCopyInlineTrivial) {
602 copyInlineTrivial<Value>(o);
603 o.resetSizePolicy();
604 } else {
605 const size_t oSize = o.size();
606 detail::partiallyUninitializedCopy(
607 std::make_move_iterator(o.u.buffer()),
608 oSize,
609 this->u.buffer(),
610 size());
611 this->setSize(oSize);
612 o.clear();
613 }
614 } else {
615 this->u.pdata_.heap_ = o.u.pdata_.heap_;
616 // this was already reset above, so it's empty and internal.
617 this->swapSizePolicy(o);
618 if (kHasInlineCapacity) {
619 this->u.setCapacity(o.u.getCapacity());
620 }
621 }
622 }
623 return *this;
624 }
625
626 bool operator==(small_vector const& o) const {
627 return size() == o.size() && std::equal(begin(), end(), o.begin());
628 }
629
630 bool operator<(small_vector const& o) const {
631 return std::lexicographical_compare(begin(), end(), o.begin(), o.end());
632 }
633
634 static constexpr size_type max_size() {
635 return !BaseType::kShouldUseHeap ? static_cast<size_type>(MaxInline)
636 : BaseType::policyMaxSize();
637 }
638
639 allocator_type get_allocator() const { return {}; }
640
641 size_type size() const { return this->doSize(); }
642 bool empty() const { return !size(); }
643
644 iterator begin() { return data(); }
645 iterator end() { return data() + size(); }
646 const_iterator begin() const { return data(); }
647 const_iterator end() const { return data() + size(); }
648 const_iterator cbegin() const { return begin(); }
649 const_iterator cend() const { return end(); }
650
651 reverse_iterator rbegin() { return reverse_iterator(end()); }
652 reverse_iterator rend() { return reverse_iterator(begin()); }
653
654 const_reverse_iterator rbegin() const {
655 return const_reverse_iterator(end());
656 }
657
658 const_reverse_iterator rend() const {
659 return const_reverse_iterator(begin());
660 }
661
662 const_reverse_iterator crbegin() const { return rbegin(); }
663 const_reverse_iterator crend() const { return rend(); }
664
665 /*
666 * Usually one of the simplest functions in a Container-like class
667 * but a bit more complex here. We have to handle all combinations
668 * of in-place vs. heap between this and o.
669 */
670 void swap(small_vector& o) noexcept(
671 std::is_nothrow_move_constructible<Value>::value&&
672 IsNothrowSwappable<Value>::value) {
673 using std::swap; // Allow ADL on swap for our value_type.
674
675 if (this->isExtern() && o.isExtern()) {
676 this->swapSizePolicy(o);
677
678 auto* tmp = u.pdata_.heap_;
679 u.pdata_.heap_ = o.u.pdata_.heap_;
680 o.u.pdata_.heap_ = tmp;
681
682 if (kHasInlineCapacity) {
683 const auto capacity_ = this->u.getCapacity();
684 this->setCapacity(o.u.getCapacity());
685 o.u.setCapacity(capacity_);
686 }
687
688 return;
689 }
690
691 if (!this->isExtern() && !o.isExtern()) {
692 auto& oldSmall = size() < o.size() ? *this : o;
693 auto& oldLarge = size() < o.size() ? o : *this;
694
695 for (size_type i = 0; i < oldSmall.size(); ++i) {
696 swap(oldSmall[i], oldLarge[i]);
697 }
698
699 size_type i = oldSmall.size();
700 const size_type ci = i;
701 {
702 auto rollback = makeGuard([&] {
703 oldSmall.setSize(i);
704 for (; i < oldLarge.size(); ++i) {
705 oldLarge[i].~value_type();
706 }
707 oldLarge.setSize(ci);
708 });
709 for (; i < oldLarge.size(); ++i) {
710 auto addr = oldSmall.begin() + i;
711 new (addr) value_type(std::move(oldLarge[i]));
712 oldLarge[i].~value_type();
713 }
714 rollback.dismiss();
715 }
716 oldSmall.setSize(i);
717 oldLarge.setSize(ci);
718 return;
719 }
720
721 // isExtern != o.isExtern()
722 auto& oldExtern = o.isExtern() ? o : *this;
723 auto& oldIntern = o.isExtern() ? *this : o;
724
725 auto oldExternCapacity = oldExtern.capacity();
726 auto oldExternHeap = oldExtern.u.pdata_.heap_;
727
728 auto buff = oldExtern.u.buffer();
729 size_type i = 0;
730 {
731 auto rollback = makeGuard([&] {
732 for (size_type kill = 0; kill < i; ++kill) {
733 buff[kill].~value_type();
734 }
735 for (; i < oldIntern.size(); ++i) {
736 oldIntern[i].~value_type();
737 }
738 oldIntern.resetSizePolicy();
739 oldExtern.u.pdata_.heap_ = oldExternHeap;
740 oldExtern.setCapacity(oldExternCapacity);
741 });
742 for (; i < oldIntern.size(); ++i) {
743 new (&buff[i]) value_type(std::move(oldIntern[i]));
744 oldIntern[i].~value_type();
745 }
746 rollback.dismiss();
747 }
748 oldIntern.u.pdata_.heap_ = oldExternHeap;
749 this->swapSizePolicy(o);
750 oldIntern.setCapacity(oldExternCapacity);
751 }
752
753 void resize(size_type sz) {
754 if (sz <= size()) {
755 downsize(sz);
756 return;
757 }
758 auto extra = sz - size();
759 makeSize(sz);
760 detail::populateMemForward(
761 begin() + size(), extra, [&](void* p) { new (p) value_type(); });
762 this->incrementSize(extra);
763 }
764
765 void resize(size_type sz, value_type const& v) {
766 if (sz < size()) {
767 erase(begin() + sz, end());
768 return;
769 }
770 auto extra = sz - size();
771 makeSize(sz);
772 detail::populateMemForward(
773 begin() + size(), extra, [&](void* p) { new (p) value_type(v); });
774 this->incrementSize(extra);
775 }
776
777 value_type* data() noexcept {
778 return this->isExtern() ? u.heap() : u.buffer();
779 }
780
781 value_type const* data() const noexcept {
782 return this->isExtern() ? u.heap() : u.buffer();
783 }
784
785 template <class... Args>
786 iterator emplace(const_iterator p, Args&&... args) {
787 if (p == cend()) {
788 emplace_back(std::forward<Args>(args)...);
789 return end() - 1;
790 }
791
792 /*
793 * We implement emplace at places other than at the back with a
794 * temporary for exception safety reasons. It is possible to
795 * avoid having to do this, but it becomes hard to maintain the
796 * basic exception safety guarantee (unless you respond to a copy
797 * constructor throwing by clearing the whole vector).
798 *
799 * The reason for this is that otherwise you have to destruct an
800 * element before constructing this one in its place---if the
801 * constructor throws, you either need a nothrow default
802 * constructor or a nothrow copy/move to get something back in the
803 * "gap", and the vector requirements don't guarantee we have any
804 * of these. Clearing the whole vector is a legal response in
805 * this situation, but it seems like this implementation is easy
806 * enough and probably better.
807 */
808 return insert(p, value_type(std::forward<Args>(args)...));
809 }
810
811 void reserve(size_type sz) { makeSize(sz); }
812
813 size_type capacity() const {
814 struct Unreachable {
815 size_t operator()(void*) const { assume_unreachable(); }
816 };
817 using AllocationSizeOrUnreachable =
818 conditional_t<kMustTrackHeapifiedCapacity, Unreachable, AllocationSize>;
819 if (this->isExtern()) {
820 if (hasCapacity()) {
821 return u.getCapacity();
822 }
823 return AllocationSizeOrUnreachable{}(u.pdata_.heap_) / sizeof(value_type);
824 }
825 return MaxInline;
826 }
827
828 void shrink_to_fit() {
829 if (!this->isExtern()) {
830 return;
831 }
832
833 small_vector tmp(begin(), end());
834 tmp.swap(*this);
835 }
836
837 template <class... Args>
838 reference emplace_back(Args&&... args) {
839 auto isize_ = this->getInternalSize();
840 if (isize_ < MaxInline) {
841 new (u.buffer() + isize_) value_type(std::forward<Args>(args)...);
842 this->incrementSize(1);
843 return *(u.buffer() + isize_);
844 }
845 if (!BaseType::kShouldUseHeap) {
846 throw_exception<std::length_error>("max_size exceeded in small_vector");
847 }
848 auto size_ = size();
849 auto capacity_ = capacity();
850 if (capacity_ == size_) {
851 // Any of args may be references into the vector.
852 // When we are reallocating, we have to be careful to construct the new
853 // element before modifying the data in the old buffer.
854 makeSize(
855 size_ + 1,
856 [&](void* p) { new (p) value_type(std::forward<Args>(args)...); },
857 size_);
858 } else {
859 // We know the vector is stored in the heap.
860 new (u.heap() + size_) value_type(std::forward<Args>(args)...);
861 }
862 this->incrementSize(1);
863 return *(u.heap() + size_);
864 }
865
866 void push_back(value_type&& t) { emplace_back(std::move(t)); }
867
868 void push_back(value_type const& t) { emplace_back(t); }
869
870 void pop_back() {
871 // ideally this would be implemented in terms of erase(end() - 1) to reuse
872 // the higher-level abstraction, but neither Clang or GCC are able to
873 // optimize it away. if you change this, please verify (with disassembly)
874 // that the generated code on -O3 (and ideally -O2) stays short
875 downsize(size() - 1);
876 }
877
878 iterator insert(const_iterator constp, value_type&& t) {
879 iterator p = unconst(constp);
880 if (p == end()) {
881 push_back(std::move(t));
882 return end() - 1;
883 }
884
885 auto offset = p - begin();
886 auto size_ = size();
887 if (capacity() == size_) {
888 makeSize(
889 size_ + 1,
890 [&t](void* ptr) { new (ptr) value_type(std::move(t)); },
891 offset);
892 this->incrementSize(1);
893 } else {
894 detail::moveObjectsRightAndCreate(
895 data() + offset,
896 data() + size_,
897 data() + size_ + 1,
898 [&]() mutable -> value_type&& { return std::move(t); });
899 this->incrementSize(1);
900 }
901 return begin() + offset;
902 }
903
904 iterator insert(const_iterator p, value_type const& t) {
905 // Make a copy and forward to the rvalue value_type&& overload
906 // above.
907 return insert(p, value_type(t));
908 }
909
910 iterator insert(const_iterator pos, size_type n, value_type const& val) {
911 auto offset = pos - begin();
912 auto size_ = size();
913 makeSize(size_ + n);
914 detail::moveObjectsRightAndCreate(
915 data() + offset,
916 data() + size_,
917 data() + size_ + n,
918 [&]() mutable -> value_type const& { return val; });
919 this->incrementSize(n);
920 return begin() + offset;
921 }
922
923 template <class Arg>
924 iterator insert(const_iterator p, Arg arg1, Arg arg2) {
925 // Forward using std::is_arithmetic to get to the proper
926 // implementation; this disambiguates between the iterators and
927 // (size_t, value_type) meaning for this function.
928 return insertImpl(unconst(p), arg1, arg2, std::is_arithmetic<Arg>());
929 }
930
931 iterator insert(const_iterator p, std::initializer_list<value_type> il) {
932 return insert(p, il.begin(), il.end());
933 }
934
935 iterator erase(const_iterator q) {
936 // ideally this would be implemented in terms of erase(q, q + 1) to reuse
937 // the higher-level abstraction, but neither Clang or GCC are able to
938 // optimize it away. if you change this, please verify (with disassembly)
939 // that the generated code on -O3 (and ideally -O2) stays short
940 std::move(unconst(q) + 1, end(), unconst(q));
941 downsize(size() - 1);
942 return unconst(q);
943 }
944
945 iterator erase(const_iterator q1, const_iterator q2) {
946 if (q1 == q2) {
947 return unconst(q1);
948 }
949 std::move(unconst(q2), end(), unconst(q1));
950 downsize(size() - std::distance(q1, q2));
951 return unconst(q1);
952 }
953
954 void clear() {
955 // ideally this would be implemented in terms of erase(begin(), end()) to
956 // reuse the higher-level abstraction, but neither Clang or GCC are able to
957 // optimize it away. if you change this, please verify (with disassembly)
958 // that the generated code on -O3 (and ideally -O2) stays short
959 downsize(0);
960 }
961
962 template <class Arg>
963 void assign(Arg first, Arg last) {
964 clear();
965 insert(end(), first, last);
966 }
967
968 void assign(std::initializer_list<value_type> il) {
969 assign(il.begin(), il.end());
970 }
971
972 void assign(size_type n, const value_type& t) {
973 clear();
974 insert(end(), n, t);
975 }
976
977 reference front() {
978 assert(!empty());
979 return *begin();
980 }
981 reference back() {
982 assert(!empty());
983 return *(end() - 1);
984 }
985 const_reference front() const {
986 assert(!empty());
987 return *begin();
988 }
989 const_reference back() const {
990 assert(!empty());
991 return *(end() - 1);
992 }
993
994 reference operator[](size_type i) {
995 assert(i < size());
996 return *(begin() + i);
997 }
998
999 const_reference operator[](size_type i) const {
1000 assert(i < size());
1001 return *(begin() + i);
1002 }
1003
1004 reference at(size_type i) {
1005 if (i >= size()) {
1006 throw_exception<std::out_of_range>("index out of range");
1007 }
1008 return (*this)[i];
1009 }
1010
1011 const_reference at(size_type i) const {
1012 if (i >= size()) {
1013 throw_exception<std::out_of_range>("index out of range");
1014 }
1015 return (*this)[i];
1016 }
1017
1018 private:
1019 static iterator unconst(const_iterator it) {
1020 return const_cast<iterator>(it);
1021 }
1022
1023 void downsize(size_type sz) {
1024 assert(sz <= size());
1025 for (auto it = (begin() + sz); it != end(); ++it) {
1026 it->~value_type();
1027 }
1028 this->setSize(sz);
1029 }
1030
1031 template <class T>
1032 typename std::enable_if<is_trivially_copyable_v<T>>::type copyInlineTrivial(
1033 small_vector const& o) {
1034 // Copy the entire inline storage, instead of just size() values, to make
1035 // the loop fixed-size and unrollable.
1036 std::copy(o.u.buffer(), o.u.buffer() + MaxInline, u.buffer());
1037 this->setSize(o.size());
1038 }
1039
1040 template <class T>
1041 typename std::enable_if<!is_trivially_copyable_v<T>>::type copyInlineTrivial(
1042 small_vector const&) {
1043 assume_unreachable();
1044 }
1045
1046 void reset() {
1047 clear();
1048 freeHeap();
1049 this->resetSizePolicy();
1050 }
1051
1052 // The std::false_type argument is part of disambiguating the
1053 // iterator insert functions from integral types (see insert().)
1054 template <class It>
1055 iterator insertImpl(iterator pos, It first, It last, std::false_type) {
1056 using categ = typename std::iterator_traits<It>::iterator_category;
1057 using it_ref = typename std::iterator_traits<It>::reference;
1058 if (std::is_same<categ, std::input_iterator_tag>::value) {
1059 auto offset = pos - begin();
1060 while (first != last) {
1061 pos = insert(pos, *first++);
1062 ++pos;
1063 }
1064 return begin() + offset;
1065 }
1066
1067 auto const distance = std::distance(first, last);
1068 auto const offset = pos - begin();
1069 auto size_ = size();
1070 assert(distance >= 0);
1071 assert(offset >= 0);
1072 makeSize(size_ + distance);
1073 detail::moveObjectsRightAndCreate(
1074 data() + offset,
1075 data() + size_,
1076 data() + size_ + distance,
1077 [&, in = last]() mutable -> it_ref { return *--in; });
1078 this->incrementSize(distance);
1079 return begin() + offset;
1080 }
1081
1082 iterator insertImpl(
1083 iterator pos, size_type n, const value_type& val, std::true_type) {
1084 // The true_type means this should call the size_t,value_type
1085 // overload. (See insert().)
1086 return insert(pos, n, val);
1087 }
1088
1089 // The std::false_type argument came from std::is_arithmetic as part
1090 // of disambiguating an overload (see the comment in the
1091 // constructor).
1092 template <class It>
1093 void constructImpl(It first, It last, std::false_type) {
1094 typedef typename std::iterator_traits<It>::iterator_category categ;
1095 if (std::is_same<categ, std::input_iterator_tag>::value) {
1096 // With iterators that only allow a single pass, we can't really
1097 // do anything sane here.
1098 while (first != last) {
1099 emplace_back(*first++);
1100 }
1101 return;
1102 }
1103 size_type distance = std::distance(first, last);
1104 if (distance <= MaxInline) {
1105 this->incrementSize(distance);
1106 detail::populateMemForward(
1107 u.buffer(), distance, [&](void* p) { new (p) value_type(*first++); });
1108 return;
1109 }
1110 makeSize(distance);
1111 this->incrementSize(distance);
1112 {
1113 auto rollback = makeGuard([&] { freeHeap(); });
1114 detail::populateMemForward(
1115 u.heap(), distance, [&](void* p) { new (p) value_type(*first++); });
1116 rollback.dismiss();
1117 }
1118 }
1119
1120 template <typename InitFunc>
1121 void doConstruct(size_type n, InitFunc&& func) {
1122 makeSize(n);
1123 assert(size() == 0);
1124 this->incrementSize(n);
1125 {
1126 auto rollback = makeGuard([&] { freeHeap(); });
1127 detail::populateMemForward(data(), n, std::forward<InitFunc>(func));
1128 rollback.dismiss();
1129 }
1130 }
1131
1132 // The true_type means we should forward to the size_t,value_type
1133 // overload.
1134 void constructImpl(size_type n, value_type const& val, std::true_type) {
1135 doConstruct(n, [&](void* p) { new (p) value_type(val); });
1136 }
1137
1138 /*
1139 * Compute the size after growth.
1140 */
1141 size_type computeNewSize() const {
1142 return std::min((3 * capacity()) / 2 + 1, max_size());
1143 }
1144
1145 void makeSize(size_type newSize) {
1146 if (newSize <= capacity()) {
1147 return;
1148 }
1149 makeSizeInternal(
1150 newSize, false, [](void*) { assume_unreachable(); }, 0);
1151 }
1152
1153 template <typename EmplaceFunc>
1154 void makeSize(size_type newSize, EmplaceFunc&& emplaceFunc, size_type pos) {
1155 assert(size() == capacity());
1156 makeSizeInternal(
1157 newSize, true, std::forward<EmplaceFunc>(emplaceFunc), pos);
1158 }
1159
1160 /*
1161 * Ensure we have a large enough memory region to be size `newSize'.
1162 * Will move/copy elements if we are spilling to heap_ or needed to
1163 * allocate a new region, but if resized in place doesn't initialize
1164 * anything in the new region. In any case doesn't change size().
1165 * Supports insertion of new element during reallocation by given
1166 * pointer to new element and position of new element.
1167 * NOTE: If reallocation is not needed, insert must be false,
1168 * because we only know how to emplace elements into new memory.
1169 */
1170 template <typename EmplaceFunc>
1171 void makeSizeInternal(
1172 size_type newSize,
1173 bool insert,
1174 EmplaceFunc&& emplaceFunc,
1175 size_type pos) {
1176 if (newSize > max_size()) {
1177 throw_exception<std::length_error>("max_size exceeded in small_vector");
1178 }
1179 assert(this->kShouldUseHeap);
1180 // This branch isn't needed for correctness, but allows the optimizer to
1181 // skip generating code for the rest of this function in NoHeap
1182 // small_vectors.
1183 if (!this->kShouldUseHeap) {
1184 return;
1185 }
1186
1187 newSize = std::max(newSize, computeNewSize());
1188
1189 const auto needBytes = newSize * sizeof(value_type);
1190 // If the capacity isn't explicitly stored inline, but the heap
1191 // allocation is grown to over some threshold, we should store
1192 // a capacity at the front of the heap allocation.
1193 const bool heapifyCapacity =
1194 !kHasInlineCapacity && needBytes >= kHeapifyCapacityThreshold;
1195 const size_t allocationExtraBytes =
1196 heapifyCapacity ? kHeapifyCapacitySize : 0;
1197 const size_t goodAllocationSizeBytes =
1198 goodMallocSize(needBytes + allocationExtraBytes);
1199 const size_t newCapacity =
1200 (goodAllocationSizeBytes - allocationExtraBytes) / sizeof(value_type);
1201 // Make sure that the allocation request has a size computable from the
1202 // capacity, instead of using goodAllocationSizeBytes, so that we can do
1203 // sized deallocation. If goodMallocSize() gives us extra bytes that are not
1204 // a multiple of the value size we cannot use them anyway.
1205 const size_t sizeBytes =
1206 newCapacity * sizeof(value_type) + allocationExtraBytes;
1207 void* newh = checkedMalloc(sizeBytes);
1208
1209 value_type* newp = static_cast<value_type*>(
1210 heapifyCapacity ? detail::shiftPointer(newh, kHeapifyCapacitySize)
1211 : newh);
1212
1213 {
1214 auto rollback = makeGuard([&] { //
1215 sizedFree(newh, sizeBytes);
1216 });
1217 if (insert) {
1218 // move and insert the new element
1219 this->moveToUninitializedEmplace(
1220 begin(), end(), newp, pos, std::forward<EmplaceFunc>(emplaceFunc));
1221 } else {
1222 // move without inserting new element
1223 this->moveToUninitialized(begin(), end(), newp);
1224 }
1225 rollback.dismiss();
1226 }
1227 for (auto& val : *this) {
1228 val.~value_type();
1229 }
1230 freeHeap();
1231
1232 // Store shifted pointer if capacity is heapified
1233 u.pdata_.heap_ = newp;
1234 this->setHeapifiedCapacity(heapifyCapacity);
1235 this->setExtern(true);
1236 this->setCapacity(newCapacity);
1237 }
1238
1239 /*
1240 * This will set the capacity field, stored inline in the storage_ field
1241 * if there is sufficient room to store it.
1242 */
1243 void setCapacity(size_type newCapacity) {
1244 assert(this->isExtern());
1245 if (hasCapacity()) {
1246 assert(newCapacity < std::numeric_limits<InternalSizeType>::max());
1247 u.setCapacity(newCapacity);
1248 }
1249 }
1250
1251 private:
1252 struct HeapPtrWithCapacity {
1253 value_type* heap_;
1254 InternalSizeType capacity_;
1255
1256 InternalSizeType getCapacity() const { return capacity_; }
1257 void setCapacity(InternalSizeType c) { capacity_ = c; }
1258 size_t allocationExtraBytes() const { return 0; }
1259 } FOLLY_SV_PACK_ATTR;
1260
1261 struct HeapPtr {
1262 // heap[-kHeapifyCapacitySize] contains capacity
1263 value_type* heap_;
1264
1265 InternalSizeType getCapacity() const {
1266 return *static_cast<InternalSizeType*>(
1267 detail::unshiftPointer(heap_, kHeapifyCapacitySize));
1268 }
1269 void setCapacity(InternalSizeType c) {
1270 *static_cast<InternalSizeType*>(
1271 detail::unshiftPointer(heap_, kHeapifyCapacitySize)) = c;
1272 }
1273 size_t allocationExtraBytes() const { return kHeapifyCapacitySize; }
1274 } FOLLY_SV_PACK_ATTR;
1275
1276 typedef aligned_storage_for_t<value_type[MaxInline]> InlineStorageDataType;
1277
1278 typedef typename std::conditional<
1279 sizeof(value_type) * MaxInline != 0,
1280 InlineStorageDataType,
1281 value_type*>::type InlineStorageType;
1282
1283 // If the values are trivially copyable and the storage is small enough, copy
1284 // it entirely. Limit is half of a cache line, to minimize probability of
1285 // introducing a cache miss.
1286 static constexpr bool kShouldCopyInlineTrivial =
1287 is_trivially_copyable_v<Value> &&
1288 sizeof(InlineStorageType) <= hardware_constructive_interference_size / 2;
1289
1290 static bool constexpr kHasInlineCapacity =
1291 sizeof(HeapPtrWithCapacity) < sizeof(InlineStorageType);
1292
1293 // This value should we multiple of word size.
1294 static size_t constexpr kHeapifyCapacitySize = sizeof(
1295 typename std::
1296 aligned_storage<sizeof(InternalSizeType), alignof(value_type)>::type);
1297
1298 struct AllocationSize {
1299 auto operator()(void* ptr) const {
1300 (void)ptr;
1301 #if defined(FOLLY_HAVE_MALLOC_USABLE_SIZE)
1302 return malloc_usable_size(ptr);
1303 #endif
1304 // it is important that this method not return a size_t if we can't call
1305 // malloc_usable_size! kMustTrackHeapifiedCapacity uses the deduced return
1306 // type of this function in order to decide whether small_vector must
1307 // track its own capacity or not.
1308 }
1309 };
1310
1311 static bool constexpr kMustTrackHeapifiedCapacity =
1312 !is_invocable_r_v<size_t, AllocationSize, void*>;
1313
1314 // Threshold to control capacity heapifying.
1315 static size_t constexpr kHeapifyCapacityThreshold =
1316 (kMustTrackHeapifiedCapacity ? 0 : 100) * kHeapifyCapacitySize;
1317
1318 static bool constexpr kAlwaysHasCapacity =
1319 kHasInlineCapacity || kMustTrackHeapifiedCapacity;
1320
1321 typedef typename std::
1322 conditional<kHasInlineCapacity, HeapPtrWithCapacity, HeapPtr>::type
1323 PointerType;
1324
1325 bool hasCapacity() const {
1326 return kAlwaysHasCapacity || !kHeapifyCapacityThreshold ||
1327 this->isHeapifiedCapacity();
1328 }
1329
1330 void freeHeap() {
1331 if (this->isExtern()) {
1332 if (hasCapacity()) {
1333 auto extraBytes = u.pdata_.allocationExtraBytes();
1334 auto vp = detail::unshiftPointer(u.pdata_.heap_, extraBytes);
1335 sizedFree(vp, u.getCapacity() * sizeof(value_type) + extraBytes);
1336 } else {
1337 free(u.pdata_.heap_);
1338 }
1339 }
1340 }
1341
1342 union Data {
1343 explicit Data() { pdata_.heap_ = nullptr; }
1344
1345 PointerType pdata_;
1346 InlineStorageType storage_;
1347
1348 value_type* buffer() noexcept {
1349 void* vp = &storage_;
1350 return static_cast<value_type*>(vp);
1351 }
1352 value_type const* buffer() const noexcept {
1353 return const_cast<Data*>(this)->buffer();
1354 }
1355 value_type* heap() noexcept { return pdata_.heap_; }
1356 value_type const* heap() const noexcept { return pdata_.heap_; }
1357
1358 InternalSizeType getCapacity() const { return pdata_.getCapacity(); }
1359 void setCapacity(InternalSizeType c) { pdata_.setCapacity(c); }
1360
1361 } u;
1362 };
1363 FOLLY_SV_PACK_POP
1364
1365 //////////////////////////////////////////////////////////////////////
1366
1367 // Basic guarantee only, or provides the nothrow guarantee iff T has a
1368 // nothrow move or copy constructor.
1369 template <class T, std::size_t MaxInline, class A, class B, class C>
1370 void swap(
1371 small_vector<T, MaxInline, A, B, C>& a,
1372 small_vector<T, MaxInline, A, B, C>& b) {
1373 a.swap(b);
1374 }
1375
1376 template <class T, std::size_t MaxInline, class A, class B, class C, class U>
1377 void erase(small_vector<T, MaxInline, A, B, C>& v, U value) {
1378 v.erase(std::remove(v.begin(), v.end(), value), v.end());
1379 }
1380
1381 template <
1382 class T,
1383 std::size_t MaxInline,
1384 class A,
1385 class B,
1386 class C,
1387 class Predicate>
1388 void erase_if(small_vector<T, MaxInline, A, B, C>& v, Predicate predicate) {
1389 v.erase(std::remove_if(v.begin(), v.end(), predicate), v.end());
1390 }
1391
1392 //////////////////////////////////////////////////////////////////////
1393
1394 namespace detail {
1395
1396 // Format support.
1397 template <class T, size_t M, class A, class B, class C>
1398 struct IndexableTraits<small_vector<T, M, A, B, C>>
1399 : public IndexableTraitsSeq<small_vector<T, M, A, B, C>> {};
1400
1401 } // namespace detail
1402
1403 } // namespace folly
1404
1405 FOLLY_POP_WARNING
1406
1407 #undef FOLLY_SV_PACK_ATTR
1408 #undef FOLLY_SV_PACK_PUSH
1409 #undef FOLLY_SV_PACK_POP
1410