1 //! The arena, a fast but limited type of allocator.
2 //!
3 //! **A fast (but limited) allocation arena for values of a single type.**
4 //!
5 //! Allocated objects are destroyed all at once, when the arena itself is
6 //! destroyed. There is no deallocation of individual objects while the arena
7 //! itself is still alive. The flipside is that allocation is fast: typically
8 //! just a vector push.
9 //!
10 //! There is also a method `into_vec()` to recover ownership of allocated
11 //! objects when the arena is no longer required, instead of destroying
12 //! everything.
13 //!
14 //! ## Example
15 //!
16 //! ```
17 //! use typed_arena::Arena;
18 //!
19 //! struct Monster {
20 //!     level: u32,
21 //! }
22 //!
23 //! let monsters = Arena::new();
24 //!
25 //! let vegeta = monsters.alloc(Monster { level: 9001 });
26 //! assert!(vegeta.level > 9000);
27 //! ```
28 //!
29 //! ## Safe Cycles
30 //!
31 //! All allocated objects get the same lifetime, so you can safely create cycles
32 //! between them. This can be useful for certain data structures, such as graphs
33 //! and trees with parent pointers.
34 //!
35 //! ```
36 //! use std::cell::Cell;
37 //! use typed_arena::Arena;
38 //!
39 //! struct CycleParticipant<'a> {
40 //!     other: Cell<Option<&'a CycleParticipant<'a>>>,
41 //! }
42 //!
43 //! let arena = Arena::new();
44 //!
45 //! let a = arena.alloc(CycleParticipant { other: Cell::new(None) });
46 //! let b = arena.alloc(CycleParticipant { other: Cell::new(None) });
47 //!
48 //! a.other.set(Some(b));
49 //! b.other.set(Some(a));
50 //! ```
51 
52 // Potential optimizations:
53 // 1) add and stabilize a method for in-place reallocation of vecs.
54 // 2) add and stabilize placement new.
55 // 3) use an iterator. This may add far too much unsafe code.
56 
57 #![deny(missing_docs)]
58 #![cfg_attr(not(any(feature = "std", test)), no_std)]
59 
60 #[cfg(not(feature = "std"))]
61 extern crate alloc;
62 
63 #[cfg(any(feature = "std", test))]
64 extern crate core;
65 
66 #[cfg(not(feature = "std"))]
67 use alloc::vec::Vec;
68 
69 use core::cell::RefCell;
70 use core::cmp;
71 use core::iter;
72 use core::mem;
73 use core::slice;
74 use core::str;
75 
76 use mem::MaybeUninit;
77 
78 #[cfg(test)]
79 mod test;
80 
81 // Initial size in bytes.
82 const INITIAL_SIZE: usize = 1024;
83 // Minimum capacity. Must be larger than 0.
84 const MIN_CAPACITY: usize = 1;
85 
86 /// An arena of objects of type `T`.
87 ///
88 /// ## Example
89 ///
90 /// ```
91 /// use typed_arena::Arena;
92 ///
93 /// struct Monster {
94 ///     level: u32,
95 /// }
96 ///
97 /// let monsters = Arena::new();
98 ///
99 /// let vegeta = monsters.alloc(Monster { level: 9001 });
100 /// assert!(vegeta.level > 9000);
101 /// ```
102 pub struct Arena<T> {
103     chunks: RefCell<ChunkList<T>>,
104 }
105 
106 struct ChunkList<T> {
107     current: Vec<T>,
108     rest: Vec<Vec<T>>,
109 }
110 
111 impl<T> Arena<T> {
112     /// Construct a new arena.
113     ///
114     /// ## Example
115     ///
116     /// ```
117     /// use typed_arena::Arena;
118     ///
119     /// let arena = Arena::new();
120     /// # arena.alloc(1);
121     /// ```
new() -> Arena<T>122     pub fn new() -> Arena<T> {
123         let size = cmp::max(1, mem::size_of::<T>());
124         Arena::with_capacity(INITIAL_SIZE / size)
125     }
126 
127     /// Construct a new arena with capacity for `n` values pre-allocated.
128     ///
129     /// ## Example
130     ///
131     /// ```
132     /// use typed_arena::Arena;
133     ///
134     /// let arena = Arena::with_capacity(1337);
135     /// # arena.alloc(1);
136     /// ```
with_capacity(n: usize) -> Arena<T>137     pub fn with_capacity(n: usize) -> Arena<T> {
138         let n = cmp::max(MIN_CAPACITY, n);
139         Arena {
140             chunks: RefCell::new(ChunkList {
141                 current: Vec::with_capacity(n),
142                 rest: Vec::new(),
143             }),
144         }
145     }
146 
147     /// Return the size of the arena
148     ///
149     /// This is useful for using the size of previous typed arenas to build new typed arenas with large enough spaces.
150     ///
151     /// ## Example
152     ///
153     /// ```
154     ///  use typed_arena::Arena;
155     ///
156     ///  let arena = Arena::with_capacity(0);
157     ///  let a = arena.alloc(1);
158     ///  let b = arena.alloc(2);
159     ///
160     ///  assert_eq!(arena.len(), 2);
161     /// ```
len(&self) -> usize162     pub fn len(&self) -> usize {
163         let chunks = self.chunks.borrow();
164 
165         let mut res = 0;
166         for vec in chunks.rest.iter() {
167             res += vec.len()
168         }
169 
170         res + chunks.current.len()
171     }
172 
173     /// Allocates a value in the arena, and returns a mutable reference
174     /// to that value.
175     ///
176     /// ## Example
177     ///
178     /// ```
179     /// use typed_arena::Arena;
180     ///
181     /// let arena = Arena::new();
182     /// let x = arena.alloc(42);
183     /// assert_eq!(*x, 42);
184     /// ```
185     #[inline]
alloc(&self, value: T) -> &mut T186     pub fn alloc(&self, value: T) -> &mut T {
187         self.alloc_fast_path(value)
188             .unwrap_or_else(|value| self.alloc_slow_path(value))
189     }
190 
191     #[inline]
alloc_fast_path(&self, value: T) -> Result<&mut T, T>192     fn alloc_fast_path(&self, value: T) -> Result<&mut T, T> {
193         let mut chunks = self.chunks.borrow_mut();
194         let len = chunks.current.len();
195         if len < chunks.current.capacity() {
196             chunks.current.push(value);
197             // Avoid going through `Vec::deref_mut`, which overlaps
198             // other references we have already handed out!
199             debug_assert!(len < chunks.current.len()); // bounds check
200             Ok(unsafe { &mut *chunks.current.as_mut_ptr().add(len) })
201         } else {
202             Err(value)
203         }
204     }
205 
alloc_slow_path(&self, value: T) -> &mut T206     fn alloc_slow_path(&self, value: T) -> &mut T {
207         &mut self.alloc_extend(iter::once(value))[0]
208     }
209 
210     /// Uses the contents of an iterator to allocate values in the arena.
211     /// Returns a mutable slice that contains these values.
212     ///
213     /// ## Example
214     ///
215     /// ```
216     /// use typed_arena::Arena;
217     ///
218     /// let arena = Arena::new();
219     /// let abc = arena.alloc_extend("abcdefg".chars().take(3));
220     /// assert_eq!(abc, ['a', 'b', 'c']);
221     /// ```
alloc_extend<I>(&self, iterable: I) -> &mut [T] where I: IntoIterator<Item = T>,222     pub fn alloc_extend<I>(&self, iterable: I) -> &mut [T]
223     where
224         I: IntoIterator<Item = T>,
225     {
226         let mut iter = iterable.into_iter();
227 
228         let mut chunks = self.chunks.borrow_mut();
229 
230         let iter_min_len = iter.size_hint().0;
231         let mut next_item_index;
232         debug_assert!(
233             chunks.current.capacity() >= chunks.current.len(),
234             "capacity is always greater than or equal to len, so we don't need to worry about underflow"
235         );
236         if iter_min_len > chunks.current.capacity() - chunks.current.len() {
237             chunks.reserve(iter_min_len);
238             chunks.current.extend(iter);
239             next_item_index = 0;
240         } else {
241             next_item_index = chunks.current.len();
242             let mut i = 0;
243             while let Some(elem) = iter.next() {
244                 if chunks.current.len() == chunks.current.capacity() {
245                     // The iterator was larger than we could fit into the current chunk.
246                     let chunks = &mut *chunks;
247                     // Create a new chunk into which we can freely push the entire iterator into
248                     chunks.reserve(i + 1);
249                     let previous_chunk = chunks.rest.last_mut().unwrap();
250                     let previous_chunk_len = previous_chunk.len();
251                     // Move any elements we put into the previous chunk into this new chunk
252                     chunks
253                         .current
254                         .extend(previous_chunk.drain(previous_chunk_len - i..));
255                     chunks.current.push(elem);
256                     // And the remaining elements in the iterator
257                     chunks.current.extend(iter);
258                     next_item_index = 0;
259                     break;
260                 } else {
261                     chunks.current.push(elem);
262                 }
263                 i += 1;
264             }
265         }
266         let new_slice_ref = &mut chunks.current[next_item_index..];
267 
268         // Extend the lifetime from that of `chunks_borrow` to that of `self`.
269         // This is OK because we’re careful to never move items
270         // by never pushing to inner `Vec`s beyond their initial capacity.
271         // The returned reference is unique (`&mut`):
272         // the `Arena` never gives away references to existing items.
273         unsafe { mem::transmute::<&mut [T], &mut [T]>(new_slice_ref) }
274     }
275 
276     /// Allocates space for a given number of values, but doesn't initialize it.
277     ///
278     /// ## Safety
279     ///
280     /// After calling this method, the arena considers the elements initialized. If you fail to
281     /// initialize them (which includes because of panicking during the initialization), the arena
282     /// will run destructors on the uninitialized memory. Therefore, you must initialize them.
283     ///
284     /// Considering how easy it is to cause undefined behaviour using this, you're advised to
285     /// prefer the other (safe) methods, like [`alloc_extend`][Arena::alloc_extend].
286     ///
287     /// ## Example
288     ///
289     /// ```rust
290     /// use std::mem::{self, MaybeUninit};
291     /// use std::ptr;
292     /// use typed_arena::Arena;
293     ///
294     /// // Transmute from MaybeUninit slice to slice of initialized T.
295     /// // It is a separate function to preserve the lifetime of the reference.
296     /// unsafe fn transmute_uninit<A>(r: &mut [MaybeUninit<A>]) -> &mut [A] {
297     ///     mem::transmute(r)
298     /// }
299     ///
300     /// let arena: Arena<bool> = Arena::new();
301     /// let slice: &mut [bool];
302     /// unsafe {
303     ///     let uninitialized = arena.alloc_uninitialized(10);
304     ///     for elem in uninitialized.iter_mut() {
305     ///         ptr::write(elem.as_mut_ptr(), true);
306     ///     }
307     ///     slice = transmute_uninit(uninitialized);
308     /// }
309     /// ```
310     ///
311     /// ## Alternative allocation pattern
312     ///
313     /// To avoid the problem of dropping assumed to be initialized elements on panic, it is also
314     /// possible to combine the [`reserve_extend`][Arena::reserve_extend] with
315     /// [`uninitialized_array`][Arena::uninitialized_array], initialize the elements and confirm
316     /// them by this method. In such case, when there's a panic during initialization, the already
317     /// initialized elements would leak but it wouldn't cause UB.
318     ///
319     /// ```rust
320     /// use std::mem::{self, MaybeUninit};
321     /// use std::ptr;
322     /// use typed_arena::Arena;
323     ///
324     /// unsafe fn transmute_uninit<A>(r: &mut [MaybeUninit<A>]) -> &mut [A] {
325     ///     mem::transmute(r)
326     /// }
327     ///
328     /// const COUNT: usize = 2;
329     ///
330     /// let arena: Arena<String> = Arena::new();
331     ///
332     /// arena.reserve_extend(COUNT);
333     /// let slice: &mut [String];
334     /// unsafe {
335     ///     // Perform initialization before we claim the memory.
336     ///     let uninitialized = arena.uninitialized_array();
337     ///     assert!((*uninitialized).len() >= COUNT); // Ensured by the reserve_extend
338     ///     for elem in &mut (*uninitialized)[..COUNT] {
339     ///         ptr::write(elem.as_mut_ptr(), "Hello".to_owned());
340     ///     }
341     ///     let addr = (*uninitialized).as_ptr() as usize;
342     ///
343     ///     // The alloc_uninitialized returns the same memory, but "confirms" its allocation.
344     ///     slice = transmute_uninit(arena.alloc_uninitialized(COUNT));
345     ///     assert_eq!(addr, slice.as_ptr() as usize);
346     ///     assert_eq!(slice, &["Hello".to_owned(), "Hello".to_owned()]);
347     /// }
348     /// ```
alloc_uninitialized(&self, num: usize) -> &mut [MaybeUninit<T>]349     pub unsafe fn alloc_uninitialized(&self, num: usize) -> &mut [MaybeUninit<T>] {
350         let mut chunks = self.chunks.borrow_mut();
351 
352         debug_assert!(
353             chunks.current.capacity() >= chunks.current.len(),
354             "capacity is always greater than or equal to len, so we don't need to worry about underflow"
355         );
356         if num > chunks.current.capacity() - chunks.current.len() {
357             chunks.reserve(num);
358         }
359 
360         // At this point, the current chunk must have free capacity.
361         let next_item_index = chunks.current.len();
362         chunks.current.set_len(next_item_index + num);
363 
364         // Go through pointers, to make sure we never create a reference to uninitialized T.
365         let start = chunks.current.as_mut_ptr().offset(next_item_index as isize);
366         let start_uninit = start as *mut MaybeUninit<T>;
367         slice::from_raw_parts_mut(start_uninit, num)
368     }
369 
370     /// Makes sure there's enough continuous space for at least `num` elements.
371     ///
372     /// This may save some work if called before [`alloc_extend`][Arena::alloc_extend]. It also
373     /// allows somewhat safer use pattern of [`alloc_uninitialized`][Arena::alloc_uninitialized].
374     /// On the other hand this might waste up to `n - 1` elements of space. In case new allocation
375     /// is needed, the unused ones in current chunk are never used.
reserve_extend(&self, num: usize)376     pub fn reserve_extend(&self, num: usize) {
377         let mut chunks = self.chunks.borrow_mut();
378 
379         debug_assert!(
380             chunks.current.capacity() >= chunks.current.len(),
381             "capacity is always greater than or equal to len, so we don't need to worry about underflow"
382         );
383         if num > chunks.current.capacity() - chunks.current.len() {
384             chunks.reserve(num);
385         }
386     }
387 
388     /// Returns unused space.
389     ///
390     /// *This unused space is still not considered "allocated".* Therefore, it
391     /// won't be dropped unless there are further calls to `alloc`,
392     /// [`alloc_uninitialized`][Arena::alloc_uninitialized], or
393     /// [`alloc_extend`][Arena::alloc_extend] which is why the method is safe.
394     ///
395     /// It returns a raw pointer to avoid creating multiple mutable references to the same place.
396     /// It is up to the caller not to dereference it after any of the `alloc_` methods are called.
uninitialized_array(&self) -> *mut [MaybeUninit<T>]397     pub fn uninitialized_array(&self) -> *mut [MaybeUninit<T>] {
398         let mut chunks = self.chunks.borrow_mut();
399         let len = chunks.current.capacity() - chunks.current.len();
400         let next_item_index = chunks.current.len();
401 
402         unsafe {
403         // Go through pointers, to make sure we never create a reference to uninitialized T.
404             let start = chunks.current.as_mut_ptr().offset(next_item_index as isize);
405             let start_uninit = start as *mut MaybeUninit<T>;
406             slice::from_raw_parts_mut(start_uninit, len) as *mut _
407         }
408     }
409 
410     /// Convert this `Arena` into a `Vec<T>`.
411     ///
412     /// Items in the resulting `Vec<T>` appear in the order that they were
413     /// allocated in.
414     ///
415     /// ## Example
416     ///
417     /// ```
418     /// use typed_arena::Arena;
419     ///
420     /// let arena = Arena::new();
421     ///
422     /// arena.alloc("a");
423     /// arena.alloc("b");
424     /// arena.alloc("c");
425     ///
426     /// let easy_as_123 = arena.into_vec();
427     ///
428     /// assert_eq!(easy_as_123, vec!["a", "b", "c"]);
429     /// ```
into_vec(self) -> Vec<T>430     pub fn into_vec(self) -> Vec<T> {
431         let mut chunks = self.chunks.into_inner();
432         // keep order of allocation in the resulting Vec
433         let n = chunks
434             .rest
435             .iter()
436             .fold(chunks.current.len(), |a, v| a + v.len());
437         let mut result = Vec::with_capacity(n);
438         for mut vec in chunks.rest {
439             result.append(&mut vec);
440         }
441         result.append(&mut chunks.current);
442         result
443     }
444 
445     /// Returns an iterator that allows modifying each value.
446     ///
447     /// Items are yielded in the order that they were allocated.
448     ///
449     /// ## Example
450     ///
451     /// ```
452     /// use typed_arena::Arena;
453     ///
454     /// #[derive(Debug, PartialEq, Eq)]
455     /// struct Point { x: i32, y: i32 };
456     ///
457     /// let mut arena = Arena::new();
458     ///
459     /// arena.alloc(Point { x: 0, y: 0 });
460     /// arena.alloc(Point { x: 1, y: 1 });
461     ///
462     /// for point in arena.iter_mut() {
463     ///     point.x += 10;
464     /// }
465     ///
466     /// let points = arena.into_vec();
467     ///
468     /// assert_eq!(points, vec![Point { x: 10, y: 0 }, Point { x: 11, y: 1 }]);
469     ///
470     /// ```
471     ///
472     /// ## Immutable Iteration
473     ///
474     /// Note that there is no corresponding `iter` method. Access to the arena's contents
475     /// requries mutable access to the arena itself.
476     ///
477     /// ```compile_fail
478     /// use typed_arena::Arena;
479     ///
480     /// let mut arena = Arena::new();
481     /// let x = arena.alloc(1);
482     ///
483     /// // borrow error!
484     /// for i in arena.iter_mut() {
485     ///     println!("i: {}", i);
486     /// }
487     ///
488     /// // borrow error!
489     /// *x = 2;
490     /// ```
491     #[inline]
iter_mut(&mut self) -> IterMut<T>492     pub fn iter_mut(&mut self) -> IterMut<T> {
493         let chunks = self.chunks.get_mut();
494         let position = if !chunks.rest.is_empty() {
495             let index = 0;
496             let inner_iter = chunks.rest[index].iter_mut();
497             // Extend the lifetime of the individual elements to that of the arena.
498             // This is OK because we borrow the arena mutably to prevent new allocations
499             // and we take care here to never move items inside the arena while the
500             // iterator is alive.
501             let inner_iter = unsafe { mem::transmute(inner_iter) };
502             IterMutState::ChunkListRest { index, inner_iter }
503         } else {
504             // Extend the lifetime of the individual elements to that of the arena.
505             let iter = unsafe { mem::transmute(chunks.current.iter_mut()) };
506             IterMutState::ChunkListCurrent { iter }
507         };
508         IterMut {
509             chunks,
510             state: position,
511         }
512     }
513 }
514 
515 impl Arena<u8> {
516     /// Allocates a string slice and returns a mutable reference to it.
517     ///
518     /// This is on `Arena<u8>`, because string slices use byte slices (`[u8]`) as their backing
519     /// storage.
520     ///
521     /// # Example
522     ///
523     /// ```
524     /// use typed_arena::Arena;
525     ///
526     /// let arena: Arena<u8> = Arena::new();
527     /// let hello = arena.alloc_str("Hello world");
528     /// assert_eq!("Hello world", hello);
529     /// ```
530     #[inline]
alloc_str(&self, s: &str) -> &mut str531     pub fn alloc_str(&self, s: &str) -> &mut str {
532         let buffer = self.alloc_extend(s.bytes());
533         // Can't fail the utf8 validation, it already came in as utf8
534         unsafe { str::from_utf8_unchecked_mut(buffer) }
535     }
536 }
537 
538 impl<T> Default for Arena<T> {
default() -> Self539     fn default() -> Self {
540         Self::new()
541     }
542 }
543 
544 impl<T> ChunkList<T> {
545     #[inline(never)]
546     #[cold]
reserve(&mut self, additional: usize)547     fn reserve(&mut self, additional: usize) {
548         let double_cap = self
549             .current
550             .capacity()
551             .checked_mul(2)
552             .expect("capacity overflow");
553         let required_cap = additional
554             .checked_next_power_of_two()
555             .expect("capacity overflow");
556         let new_capacity = cmp::max(double_cap, required_cap);
557         let chunk = mem::replace(&mut self.current, Vec::with_capacity(new_capacity));
558         self.rest.push(chunk);
559     }
560 }
561 
562 enum IterMutState<'a, T> {
563     ChunkListRest {
564         index: usize,
565         inner_iter: slice::IterMut<'a, T>,
566     },
567     ChunkListCurrent {
568         iter: slice::IterMut<'a, T>,
569     },
570 }
571 
572 /// Mutable arena iterator.
573 ///
574 /// This struct is created by the [`iter_mut`](struct.Arena.html#method.iter_mut) method on [Arenas](struct.Arena.html).
575 pub struct IterMut<'a, T: 'a> {
576     chunks: &'a mut ChunkList<T>,
577     state: IterMutState<'a, T>,
578 }
579 
580 impl<'a, T> Iterator for IterMut<'a, T> {
581     type Item = &'a mut T;
next(&mut self) -> Option<&'a mut T>582     fn next(&mut self) -> Option<&'a mut T> {
583         loop {
584             self.state = match self.state {
585                 IterMutState::ChunkListRest {
586                     mut index,
587                     ref mut inner_iter,
588                 } => {
589                     match inner_iter.next() {
590                         Some(item) => return Some(item),
591                         None => {
592                             index += 1;
593                             if index < self.chunks.rest.len() {
594                                 let inner_iter = self.chunks.rest[index].iter_mut();
595                                 // Extend the lifetime of the individual elements to that of the arena.
596                                 let inner_iter = unsafe { mem::transmute(inner_iter) };
597                                 IterMutState::ChunkListRest { index, inner_iter }
598                             } else {
599                                 let iter = self.chunks.current.iter_mut();
600                                 // Extend the lifetime of the individual elements to that of the arena.
601                                 let iter = unsafe { mem::transmute(iter) };
602                                 IterMutState::ChunkListCurrent { iter }
603                             }
604                         }
605                     }
606                 }
607                 IterMutState::ChunkListCurrent { ref mut iter } => return iter.next(),
608             };
609         }
610     }
611 
size_hint(&self) -> (usize, Option<usize>)612     fn size_hint(&self) -> (usize, Option<usize>) {
613         let current_len = self.chunks.current.len();
614         let current_cap = self.chunks.current.capacity();
615         if self.chunks.rest.is_empty() {
616             (current_len, Some(current_len))
617         } else {
618             let rest_len = self.chunks.rest.len();
619             let last_chunk_len = self
620                 .chunks
621                 .rest
622                 .last()
623                 .map(|chunk| chunk.len())
624                 .unwrap_or(0);
625 
626             let min = current_len + last_chunk_len;
627             let max = min + (rest_len * current_cap / rest_len);
628 
629             (min, Some(max))
630         }
631     }
632 }
633