1 // Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors
2 // Licensed under the MIT License:
3 //
4 // Permission is hereby granted, free of charge, to any person obtaining a copy
5 // of this software and associated documentation files (the "Software"), to deal
6 // in the Software without restriction, including without limitation the rights
7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 // copies of the Software, and to permit persons to whom the Software is
9 // furnished to do so, subject to the following conditions:
10 //
11 // The above copyright notice and this permission notice shall be included in
12 // all copies or substantial portions of the Software.
13 //
14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20 // THE SOFTWARE.
21 
22 //! Untyped root container for a Cap'n Proto value.
23 //!
24 //! ## Notes about type specialization
25 //! This module provides [TypedReader] and [TypedBuilder] structs which are strongly-typed variants
26 //! of [Reader] and [Builder].
27 //!
28 //! Code autogenerated by capnpc will have an individual module for each of structures and each of
29 //! modules will have `Owned` struct which implements [capnp::traits::Owned] trait.
30 //!
31 //! Example from a real auto-generated file:
32 //!
33 //! ```ignore
34 //! pub mod simple_struct {
35 //!     #[derive(Copy, Clone)]
36 //!     pub struct Owned(());
37 //!     impl <'a> ::capnp::traits::Owned<'a> for Owned { type Reader = Reader<'a>; type Builder = Builder<'a>; }
38 //!     ....
39 //! }
40 //! ```
41 //!
42 //! [TypedReader] and [TypedBuilder] accept generic type parameter `T`. This parameter must be
43 //! a corresponding `Owned` type which was auto-generated inside the corresponding module.
44 //!
45 //! For example, for auto-generated module `crate::test_data::simple_struct` you'd supply
46 //! `crate::test_data::simple_struct::Owned` type into [TypedReader]/[TypedBuilder]
47 //!
48 //! ```ignore
49 //! include!(concat!(env!("OUT_DIR"), "/simple_struct_capnp.rs"));
50 //!
51 //! use capnp::message::{self, TypedBuilder, TypedReader};
52 //!
53 //! fn main() {
54 //!     let mut builder = TypedBuilder::<simple_struct::Owned>::new_default();
55 //!     let mut builder_root = builder.init_root();
56 //!     builder_root.set_x(10);
57 //!     builder_root.set_y(20);
58 //!
59 //!     let mut buffer = vec![];
60 //!     capnp::serialize_packed::write_message(&mut buffer, builder.borrow_inner()).unwrap();
61 //!
62 //!     let reader = capnp::serialize_packed::read_message(buffer.as_slice(), ReaderOptions::new()).unwrap();
63 //!     let typed_reader = TypedReader::<_, simple_struct::Owned>::new(reader);
64 //!
65 //!     let reader_root = typed_reader.get().unwrap();
66 //!     assert_eq!(reader_root.get_x(), 10);
67 //!     assert_eq!(reader_root.get_x(), 20);
68 //! }
69 //!
70 //! ```
71 use alloc::vec::Vec;
72 use core::convert::From;
73 
74 use crate::any_pointer;
75 use crate::private::arena::{BuilderArenaImpl, ReaderArenaImpl, BuilderArena, ReaderArena};
76 use crate::private::layout;
77 use crate::private::units::BYTES_PER_WORD;
78 use crate::traits::{FromPointerReader, FromPointerBuilder, SetPointerBuilder, Owned};
79 use crate::{OutputSegments, Result};
80 
81 /// Options controlling how data is read.
82 #[derive(Clone, Copy, Debug)]
83 pub struct ReaderOptions {
84 
85     /// Limits how many total (8-byte) words of data are allowed to be traversed. Traversal is counted
86     /// when a new struct or list builder is obtained, e.g. from a get() accessor. This means that
87     /// calling the getter for the same sub-struct multiple times will cause it to be double-counted.
88     /// Once the traversal limit is reached, an error will be reported.
89     ///
90     /// This limit exists for security reasons. It is possible for an attacker to construct a message
91     /// in which multiple pointers point at the same location. This is technically invalid, but hard
92     /// to detect. Using such a message, an attacker could cause a message which is small on the wire
93     /// to appear much larger when actually traversed, possibly exhausting server resources leading to
94     /// denial-of-service.
95     ///
96     /// It makes sense to set a traversal limit that is much larger than the underlying message.
97     /// Together with sensible coding practices (e.g. trying to avoid calling sub-object getters
98     /// multiple times, which is expensive anyway), this should provide adequate protection without
99     /// inconvenience.
100     ///
101     /// A traversal limit of `None` means that no limit is enforced.
102     pub traversal_limit_in_words: Option<usize>,
103 
104     /// Limits how deeply nested a message structure can be, e.g. structs containing other structs or
105     /// lists of structs.
106     ///
107     /// Like the traversal limit, this limit exists for security reasons. Since it is common to use
108     /// recursive code to traverse recursive data structures, an attacker could easily cause a stack
109     /// overflow by sending a very-depply-nested (or even cyclic) message, without the message even
110     /// being very large. The default limit of 64 is probably low enough to prevent any chance of
111     /// stack overflow, yet high enough that it is never a problem in practice.
112     pub nesting_limit: i32,
113 }
114 
115 pub const DEFAULT_READER_OPTIONS: ReaderOptions =
116     ReaderOptions { traversal_limit_in_words: Some(8 * 1024 * 1024), nesting_limit: 64 };
117 
118 
119 impl Default for ReaderOptions {
default() -> ReaderOptions120     fn default() -> ReaderOptions {
121         DEFAULT_READER_OPTIONS
122     }
123 }
124 
125 impl ReaderOptions {
new() -> ReaderOptions126     pub fn new() -> ReaderOptions { DEFAULT_READER_OPTIONS }
127 
nesting_limit<'a>(&'a mut self, value: i32) -> &'a mut ReaderOptions128     pub fn nesting_limit<'a>(&'a mut self, value: i32) -> &'a mut ReaderOptions {
129         self.nesting_limit = value;
130         self
131     }
132 
traversal_limit_in_words<'a>(&'a mut self, value: Option<usize>) -> &'a mut ReaderOptions133     pub fn traversal_limit_in_words<'a>(&'a mut self, value: Option<usize>) -> &'a mut ReaderOptions {
134         self.traversal_limit_in_words = value;
135         self
136     }
137 }
138 
139 /// An object that manages the buffers underlying a Cap'n Proto message reader.
140 pub trait ReaderSegments {
141     /// Gets the segment with index `idx`. Returns `None` if `idx` is out of range.
142     ///
143     /// The segment must be 8-byte aligned or the "unaligned" feature must
144     /// be enabled in the capnp crate. (Otherwise reading the segment will return an error.)
145     ///
146     /// The returned slice is required to point to memory that remains valid until the ReaderSegments
147     /// object is dropped. In safe Rust, it should not be possible to violate this requirement.
get_segment<'a>(&'a self, idx: u32) -> Option<&'a [u8]>148     fn get_segment<'a>(&'a self, idx: u32) -> Option<&'a [u8]>;
149 
150     /// Gets the number of segments.
len(&self) -> usize151     fn len(&self) -> usize {
152         for i in 0.. {
153             if self.get_segment(i as u32).is_none() {
154                 return i;
155             }
156         }
157         unreachable!()
158     }
159 }
160 
161 impl <S> ReaderSegments for &S where S: ReaderSegments {
get_segment<'a>(&'a self, idx: u32) -> Option<&'a [u8]>162     fn get_segment<'a>(&'a self, idx: u32) -> Option<&'a [u8]> {
163         (**self).get_segment(idx)
164     }
165 
len(&self) -> usize166     fn len(&self) -> usize {
167         (**self).len()
168     }
169 }
170 
171 /// An array of segments.
172 pub struct SegmentArray<'a> {
173     segments: &'a [&'a [u8]],
174 }
175 
176 impl <'a> SegmentArray<'a> {
new(segments: &'a [&'a [u8]]) -> SegmentArray<'a>177     pub fn new(segments: &'a [&'a [u8]]) -> SegmentArray<'a> {
178         SegmentArray { segments: segments }
179     }
180 }
181 
182 impl <'b> ReaderSegments for SegmentArray<'b> {
get_segment<'a>(&'a self, id: u32) -> Option<&'a [u8]>183     fn get_segment<'a>(&'a self, id: u32) -> Option<&'a [u8]> {
184         self.segments.get(id as usize).map(|slice| *slice)
185     }
186 
len(&self) -> usize187     fn len(&self) -> usize {
188         self.segments.len()
189     }
190 }
191 
192 impl <'b> ReaderSegments for [&'b [u8]] {
get_segment<'a>(&'a self, id: u32) -> Option<&'a [u8]>193     fn get_segment<'a>(&'a self, id: u32) -> Option<&'a [u8]> {
194         self.get(id as usize).map(|slice| *slice)
195     }
196 
len(&self) -> usize197     fn len(&self) -> usize {
198         self.len()
199     }
200 }
201 
202 /// A container used to read a message.
203 pub struct Reader<S> where S: ReaderSegments {
204     arena: ReaderArenaImpl<S>,
205 }
206 
207 impl <S> Reader<S> where S: ReaderSegments {
new(segments: S, options: ReaderOptions) -> Self208     pub fn new(segments: S, options: ReaderOptions) -> Self {
209         Reader {
210             arena: ReaderArenaImpl::new(segments, options),
211         }
212     }
213 
get_root_internal<'a>(&'a self) -> Result<any_pointer::Reader<'a>>214     fn get_root_internal<'a>(&'a self) -> Result<any_pointer::Reader<'a>> {
215         let (segment_start, _seg_len) = self.arena.get_segment(0)?;
216         let pointer_reader = layout::PointerReader::get_root(
217             &self.arena, 0, segment_start, self.arena.nesting_limit())?;
218         Ok(any_pointer::Reader::new(pointer_reader))
219     }
220 
221     /// Gets the root of the message, interpreting it as the given type.
get_root<'a, T: FromPointerReader<'a>>(&'a self) -> Result<T>222     pub fn get_root<'a, T: FromPointerReader<'a>>(&'a self) -> Result<T> {
223         self.get_root_internal()?.get_as()
224     }
225 
into_segments(self) -> S226     pub fn into_segments(self) -> S {
227         self.arena.into_segments()
228     }
229 
230     /// Checks whether the message is [canonical](https://capnproto.org/encoding.html#canonicalization).
is_canonical(&self) -> Result<bool>231     pub fn is_canonical(&self) -> Result<bool> {
232         let (segment_start, seg_len) = self.arena.get_segment(0)?;
233 
234         if self.arena.get_segment(1).is_ok() {
235             // TODO(cleanup, apibump): should there be a nicer way to ask the arena how many
236             // segments there are?
237 
238             // There is more than one segment, so the message cannot be canonical.
239             return Ok(false)
240         }
241 
242         let pointer_reader = layout::PointerReader::get_root(
243             &self.arena, 0, segment_start, self.arena.nesting_limit())?;
244         let read_head = ::core::cell::Cell::new(unsafe {segment_start.offset(BYTES_PER_WORD as isize)});
245         let root_is_canonical = pointer_reader.is_canonical(&read_head)?;
246         let all_words_consumed =
247             (read_head.get() as usize - segment_start as usize) / BYTES_PER_WORD == seg_len as usize;
248         Ok(root_is_canonical && all_words_consumed)
249     }
250 
251     /// Gets the [canonical](https://capnproto.org/encoding.html#canonicalization) form
252     /// of this message. Works by copying the message twice. For a canonicalization
253     /// method that only requires one copy, see `message::Builder::set_root_canonical()`.
canonicalize(&self) -> Result<Vec<crate::Word>>254     pub fn canonicalize(&self) -> Result<Vec<crate::Word>> {
255         let root = self.get_root_internal()?;
256         let size = root.target_size()?.word_count + 1;
257         let mut message = Builder::new(HeapAllocator::new().first_segment_words(size as u32));
258         message.set_root_canonical(root)?;
259         let output_segments = message.get_segments_for_output();
260         assert_eq!(1, output_segments.len());
261         let output = output_segments[0];
262         assert!((output.len() / BYTES_PER_WORD) as u64 <= size);
263         let mut result = crate::Word::allocate_zeroed_vec(output.len() / BYTES_PER_WORD);
264         crate::Word::words_to_bytes_mut(&mut result[..]).copy_from_slice(output);
265         Ok(result)
266     }
267 
into_typed<T: for<'a> Owned<'a>>(self) -> TypedReader<S, T>268     pub fn into_typed<T: for<'a> Owned<'a>>(self) -> TypedReader<S, T> {
269         TypedReader::new(self)
270     }
271 }
272 
273 /// A message reader whose value is known to be of type `T`.
274 /// Please see [module documentation](self) for more info about reader type specialization.
275 pub struct TypedReader<S, T>
276     where S: ReaderSegments,
277           T: for<'a> Owned<'a> {
278     marker: ::core::marker::PhantomData<T>,
279     message: Reader<S>,
280 }
281 
282 impl <S, T> TypedReader<S, T>
283     where S: ReaderSegments,
284           T : for<'a> Owned<'a> {
285 
new(message: Reader<S>) -> Self286     pub fn new(message: Reader<S>) -> Self {
287         TypedReader {
288             marker: ::core::marker::PhantomData,
289             message: message,
290         }
291     }
292 
get<'a> (&'a self) -> Result<<T as Owned<'a>>::Reader>293     pub fn get<'a> (&'a self) -> Result<<T as Owned<'a>>::Reader> {
294         self.message.get_root()
295     }
296 
into_inner(self) -> Reader<S>297     pub fn into_inner(self) -> Reader<S> {
298         self.message
299     }
300 }
301 
302 impl <S, T> From<Reader<S>> for TypedReader<S, T>
303     where S: ReaderSegments,
304           T: for<'a> Owned<'a> {
305 
from(message: Reader<S>) -> TypedReader<S, T>306     fn from(message: Reader<S>) -> TypedReader<S, T> {
307         TypedReader::new(message)
308     }
309 }
310 
311 impl <A, T> From<Builder<A>> for TypedReader<Builder<A>, T>
312     where A: Allocator,
313           T: for<'a> Owned<'a> {
314 
from(message: Builder<A>) -> TypedReader<Builder<A>, T>315     fn from(message: Builder<A>) -> TypedReader<Builder<A>, T> {
316         let reader = message.into_reader();
317         reader.into_typed()
318     }
319 }
320 
321 impl <A, T> From<TypedBuilder<T, A>> for TypedReader<Builder<A>, T>
322     where A: Allocator,
323           T: for<'a> Owned<'a> {
324 
from(builder: TypedBuilder<T, A>) -> TypedReader<Builder<A>, T>325     fn from(builder: TypedBuilder<T, A>) -> TypedReader<Builder<A>, T> {
326         builder.into_reader()
327     }
328 }
329 
330 
331 /// An object that allocates memory for a Cap'n Proto message as it is being built.
332 pub unsafe trait Allocator {
333     /// Allocates zeroed memory for a new segment, returning a pointer to the start of the segment
334     /// and a u32 indicating the length of the segment in words. The allocated segment must be
335     /// at least `minimum_size` words long (`minimum_size * 8` bytes long). Allocator implementations
336     /// commonly allocate much more than the minimum, to reduce the total number of segments needed.
337     /// A reasonable strategy is to allocate the maximum of `minimum_size` and twice the size of the
338     /// previous segment.
339     ///
340     /// UNSAFETY ALERT: Implementors must ensure all of the following:
341     ///     1. the returned memory is initialized to all zeroes,
342     ///     2. the returned memory is valid until deallocate_segment() is called on it,
343     ///     3. the memory doesn't overlap with other allocated memory,
344     ///     4. the memory is 8-byte aligned (or the "unaligned" feature is enabled for the capnp crate).
allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32)345     fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32);
346 
347     /// Indicates that a segment, previously allocated via allocate_segment(), is no longer in use.
348     /// `word_size` is the length of the segment in words, as returned from `allocate_segment()`.
349     /// `words_used` is always less than or equal to `word_size`, and indicates how many
350     /// words (contiguous from the start of the segment) were possibly written with non-zero values.
deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, words_used: u32)351     fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, words_used: u32);
352 }
353 
354 /// A container used to build a message.
355 pub struct Builder<A> where A: Allocator {
356     arena: BuilderArenaImpl<A>,
357 }
358 
359 unsafe impl <A> Send for Builder<A> where A: Send + Allocator {}
360 
_assert_kinds()361 fn _assert_kinds() {
362     fn _assert_send<T: Send>() {}
363     fn _assert_reader<S: ReaderSegments + Send>() {
364         _assert_send::<Reader<S>>();
365     }
366     fn _assert_builder<A: Allocator + Send>() {
367         _assert_send::<Builder<A>>();
368     }
369 }
370 
371 impl <A> Builder<A> where A: Allocator {
new(allocator: A) -> Self372     pub fn new(allocator: A) -> Self {
373         Builder {
374             arena: BuilderArenaImpl::new(allocator),
375         }
376     }
377 
get_root_internal<'a>(&'a mut self) -> any_pointer::Builder<'a>378     fn get_root_internal<'a>(&'a mut self) -> any_pointer::Builder<'a> {
379         if self.arena.len() == 0 {
380             self.arena.allocate_segment(1).expect("allocate root pointer");
381             self.arena.allocate(0, 1).expect("allocate root pointer");
382         }
383         let (seg_start, _seg_len) = self.arena.get_segment_mut(0);
384         let location: *mut u8 = seg_start;
385         let Builder { ref mut arena } = *self;
386 
387         any_pointer::Builder::new(
388             layout::PointerBuilder::get_root(arena, 0, location))
389     }
390 
391     /// Initializes the root as a value of the given type.
init_root<'a, T: FromPointerBuilder<'a>>(&'a mut self) -> T392     pub fn init_root<'a, T: FromPointerBuilder<'a>>(&'a mut self) -> T {
393         let root = self.get_root_internal();
394         root.init_as()
395     }
396 
397     /// Gets the root, interpreting it as the given type.
get_root<'a, T: FromPointerBuilder<'a>>(&'a mut self) -> Result<T>398     pub fn get_root<'a, T: FromPointerBuilder<'a>>(&'a mut self) -> Result<T> {
399         let root = self.get_root_internal();
400         root.get_as()
401     }
402 
get_root_as_reader<'a, T: FromPointerReader<'a>>(&'a self) -> Result<T>403     pub fn get_root_as_reader<'a, T: FromPointerReader<'a>>(&'a self) -> Result<T> {
404         if self.arena.len() == 0 {
405             any_pointer::Reader::new(layout::PointerReader::new_default()).get_as()
406         } else {
407             let (segment_start, _segment_len) = self.arena.get_segment(0)?;
408             let pointer_reader = layout::PointerReader::get_root(
409                 self.arena.as_reader(), 0, segment_start, 0x7fffffff)?;
410             let root = any_pointer::Reader::new(pointer_reader);
411             root.get_as()
412         }
413     }
414 
415     /// Sets the root to a deep copy of the given value.
set_root<From: SetPointerBuilder>(&mut self, value: From) -> Result<()>416     pub fn set_root<From: SetPointerBuilder>(&mut self, value: From) -> Result<()> {
417         let root = self.get_root_internal();
418         root.set_as(value)
419     }
420 
421     /// Sets the root to a canonicalized version of `value`. If this was the first action taken
422     /// on this `Builder`, then a subsequent call to `get_segments_for_output()` should return
423     /// a single segment, containing the full canonicalized message.
set_root_canonical<From: SetPointerBuilder>(&mut self, value: From) -> Result<()>424     pub fn set_root_canonical<From: SetPointerBuilder>(&mut self, value: From) -> Result<()>
425     {
426         if self.arena.len() == 0 {
427             self.arena.allocate_segment(1).expect("allocate root pointer");
428             self.arena.allocate(0, 1).expect("allocate root pointer");
429         }
430         let (seg_start, _seg_len) = self.arena.get_segment_mut(0);
431         let pointer = layout::PointerBuilder::get_root(&self.arena, 0, seg_start);
432         SetPointerBuilder::set_pointer_builder(pointer, value, true)?;
433         assert_eq!(self.get_segments_for_output().len(), 1);
434         Ok(())
435     }
436 
get_segments_for_output<'a>(&'a self) -> OutputSegments<'a>437     pub fn get_segments_for_output<'a>(&'a self) -> OutputSegments<'a> {
438         self.arena.get_segments_for_output()
439     }
440 
into_reader(self) -> Reader<Builder<A>>441     pub fn into_reader(self) -> Reader<Builder<A>> {
442         Reader::new(self, ReaderOptions {
443             traversal_limit_in_words: None,
444             nesting_limit: i32::max_value()
445         })
446     }
447 
into_typed<T: for<'a> Owned<'a>>(self) -> TypedBuilder<T, A>448     pub fn into_typed<T: for<'a> Owned<'a>>(self) -> TypedBuilder<T, A> {
449         TypedBuilder::new(self)
450     }
451 
into_allocator(self) -> A452     pub fn into_allocator(self) -> A {
453         self.arena.into_allocator()
454     }
455 }
456 
457 impl <A> ReaderSegments for Builder<A> where A: Allocator {
get_segment<'a>(&'a self, id: u32) -> Option<&'a [u8]>458     fn get_segment<'a>(&'a self, id: u32) -> Option<&'a [u8]> {
459         self.get_segments_for_output().get(id as usize).map(|x| *x)
460     }
461 
len(&self) -> usize462     fn len(&self) -> usize {
463         self.get_segments_for_output().len()
464     }
465 }
466 
467 /// Stongly typed variant of the [Builder]
468 ///
469 /// Generic type parameters:
470 /// - `T` - type of the capnp message which this builder is specialized on. Please see
471 ///   [module documentation](self) for more info about builder type specialization.
472 /// - `A` - type of allocator
473 pub struct TypedBuilder<T, A = HeapAllocator>
474 where
475     T: for<'a> Owned<'a>,
476     A: Allocator,
477 {
478     marker: ::core::marker::PhantomData<T>,
479     message: Builder<A>,
480 }
481 
482 impl<T> TypedBuilder<T, HeapAllocator>
483 where
484     T: for<'a> Owned<'a>,
485 {
new_default() -> Self486     pub fn new_default() -> Self {
487         TypedBuilder::new(Builder::new_default())
488     }
489 }
490 
491 impl<T, A> TypedBuilder<T, A>
492 where
493     T: for<'a> Owned<'a>,
494     A: Allocator,
495 {
new(message: Builder<A>) -> Self496     pub fn new(message: Builder<A>) -> Self {
497         Self {
498             marker: ::core::marker::PhantomData,
499             message: message,
500         }
501     }
502 
init_root<'a>(&'a mut self) -> <T as Owned<'a>>::Builder503     pub fn init_root<'a>(&'a mut self) -> <T as Owned<'a>>::Builder {
504         self.message.init_root()
505     }
506 
get_root<'a>(&'a mut self) -> Result<<T as Owned<'a>>::Builder>507     pub fn get_root<'a>(&'a mut self) -> Result<<T as Owned<'a>>::Builder> {
508         self.message.get_root()
509     }
510 
get_root_as_reader<'a>(&'a self) -> Result<<T as Owned<'a>>::Reader>511     pub fn get_root_as_reader<'a>(&'a self) -> Result<<T as Owned<'a>>::Reader> {
512         self.message.get_root_as_reader()
513     }
514 
set_root<'a>(&mut self, value: <T as Owned<'a>>::Reader) -> Result<()>515     pub fn set_root<'a>(&mut self, value: <T as Owned<'a>>::Reader) -> Result<()> {
516         self.message.set_root(value)
517     }
518 
into_inner(self) -> Builder<A>519     pub fn into_inner(self) -> Builder<A> {
520         self.message
521     }
522 
borrow_inner(&self) -> &Builder<A>523     pub fn borrow_inner(&self) -> &Builder<A> {
524         &self.message
525     }
526 
borrow_inner_mut(&mut self) -> &mut Builder<A>527     pub fn borrow_inner_mut(&mut self) -> &mut Builder<A> {
528         &mut self.message
529     }
530 
into_reader(self) -> TypedReader<Builder<A>, T>531     pub fn into_reader(self) -> TypedReader<Builder<A>, T> {
532         TypedReader::new(self.message.into_reader())
533     }
534 }
535 
536 impl<T, A> From<Builder<A>> for TypedBuilder<T, A>
537 where
538     T: for<'a> Owned<'a>,
539     A: Allocator,
540 {
from(builder: Builder<A>) -> Self541     fn from(builder: Builder<A>) -> Self {
542         Self::new(builder)
543     }
544 }
545 
546 /// Standard segment allocator. Allocates each segment via `alloc::alloc::alloc_zeroed()`.
547 #[derive(Debug)]
548 pub struct HeapAllocator {
549     // Minimum number of words in the next allocation.
550     next_size: u32,
551 
552     // How to update next_size after an allocation.
553     allocation_strategy: AllocationStrategy,
554 
555     // Maximum number of words to allocate.
556     max_segment_words: u32,
557 }
558 
559 #[derive(Clone, Copy, Debug)]
560 pub enum AllocationStrategy {
561     /// Allocates the same number of words for each segment, to the extent possible.
562     /// This strategy is primarily useful for testing cross-segment pointers.
563     FixedSize,
564 
565     /// Increases segment size by a multiplicative factor for each subsequent segment.
566     GrowHeuristically,
567 }
568 
569 pub const SUGGESTED_FIRST_SEGMENT_WORDS: u32 = 1024;
570 pub const SUGGESTED_ALLOCATION_STRATEGY: AllocationStrategy = AllocationStrategy::GrowHeuristically;
571 
572 impl HeapAllocator {
new() -> HeapAllocator573     pub fn new() -> HeapAllocator {
574         HeapAllocator { next_size: SUGGESTED_FIRST_SEGMENT_WORDS,
575                         allocation_strategy: SUGGESTED_ALLOCATION_STRATEGY,
576                         max_segment_words: 1 << 29, }
577     }
578 
579     /// Sets the size of the initial segment in words, where 1 word = 8 bytes.
first_segment_words(mut self, value: u32) -> HeapAllocator580     pub fn first_segment_words(mut self, value: u32) -> HeapAllocator {
581         assert!(value <= self.max_segment_words);
582         self.next_size = value;
583         self
584     }
585 
586     /// Sets the allocation strategy for segments after the first one.
allocation_strategy(mut self, value: AllocationStrategy) -> HeapAllocator587     pub fn allocation_strategy(mut self, value: AllocationStrategy) -> HeapAllocator {
588         self.allocation_strategy = value;
589         self
590     }
591 
592     /// Sets the maximum number of words allowed in a single allocation.
max_segment_words(mut self, value: u32) -> HeapAllocator593     pub fn max_segment_words(mut self, value: u32) -> HeapAllocator {
594         assert!(self.next_size <= value);
595         self.max_segment_words = value;
596         self
597     }
598 }
599 
600 unsafe impl Allocator for HeapAllocator {
allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32)601     fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32) {
602         let size = core::cmp::max(minimum_size, self.next_size);
603         let ptr = unsafe {
604             alloc::alloc::alloc_zeroed(alloc::alloc::Layout::from_size_align(size as usize * BYTES_PER_WORD, 8).unwrap())
605         };
606         match self.allocation_strategy {
607             AllocationStrategy::GrowHeuristically => {
608                 if size < self.max_segment_words - self.next_size {
609                     self.next_size += size;
610                 } else {
611                     self.next_size = self.max_segment_words;
612                 }
613             }
614             AllocationStrategy::FixedSize => { }
615         }
616         (ptr, size as u32)
617     }
618 
deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, _words_used: u32)619     fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, _words_used: u32) {
620         unsafe {
621             alloc::alloc::dealloc(ptr,
622                                   alloc::alloc::Layout::from_size_align(word_size as usize * BYTES_PER_WORD, 8).unwrap());
623         }
624         self.next_size = SUGGESTED_FIRST_SEGMENT_WORDS;
625     }
626 }
627 
628 #[test]
test_allocate_max()629 fn test_allocate_max() {
630 
631     let allocation_size = 1 << 24;
632     let mut allocator = HeapAllocator::new()
633         .max_segment_words((1 << 25) - 1)
634         .first_segment_words(allocation_size);
635 
636     let (a1,s1) = allocator.allocate_segment(allocation_size);
637     let (a2,s2) = allocator.allocate_segment(allocation_size);
638     let (a3,s3) = allocator.allocate_segment(allocation_size);
639 
640     assert_eq!(s1, allocation_size);
641 
642     // Allocation size tops out at max_segment_words.
643     assert_eq!(s2, allocator.max_segment_words);
644     assert_eq!(s3, allocator.max_segment_words);
645 
646     allocator.deallocate_segment(a1,s1,0);
647     allocator.deallocate_segment(a2,s2,0);
648     allocator.deallocate_segment(a3,s3,0);
649 }
650 
651 impl Builder<HeapAllocator> {
new_default() -> Builder<HeapAllocator>652     pub fn new_default() -> Builder<HeapAllocator> {
653         Builder::new(HeapAllocator::new())
654     }
655 }
656 
657 /// An Allocator whose first segment is a backed by a user-provided buffer.
658 ///
659 /// Recall that an `Allocator` implementation must ensure that allocated segments are
660 /// initially *zeroed*. `ScratchSpaceHeapAllocator` ensures that is the case by zeroing
661 /// the entire buffer upon initial construction, and then zeroing any *potentially used*
662 /// part of the buffer upon `deallocate_segment()`.
663 ///
664 /// You can reuse a `ScratchSpaceHeapAllocator` by calling `message::Builder::into_allocator()`,
665 /// or by initally passing it to `message::Builder::new()` as a `&mut ScratchSpaceHeapAllocator`.
666 /// Such reuse can save significant amounts of zeroing.
667 pub struct ScratchSpaceHeapAllocator<'a> {
668     scratch_space: &'a mut [u8],
669     scratch_space_allocated: bool,
670     allocator: HeapAllocator,
671 }
672 
673 impl <'a> ScratchSpaceHeapAllocator<'a> {
674     /// Writes zeroes into the entire buffer and constructs a new allocator from it.
675     ///
676     /// If the buffer is large, this operation could be relatively expensive. If you want to reuse
677     /// the same scratch space in a later message, you should reuse the entire
678     /// `ScratchSpaceHeapAllocator`, to avoid paying this full cost again.
new(scratch_space: &'a mut [u8]) -> ScratchSpaceHeapAllocator<'a>679     pub fn new(scratch_space: &'a mut [u8]) -> ScratchSpaceHeapAllocator<'a> {
680         #[cfg(not(feature = "unaligned"))]
681         {
682             if scratch_space.as_ptr() as usize % BYTES_PER_WORD != 0 {
683                 panic!("Scratch space must be 8-byte aligned, or you must enable the \"unaligned\" \
684                         feature in the capnp crate");
685             }
686         }
687 
688         // We need to ensure that the buffer is zeroed.
689         for b in &mut scratch_space[..] {
690             *b = 0;
691         }
692         ScratchSpaceHeapAllocator { scratch_space: scratch_space,
693                                     scratch_space_allocated: false,
694                                     allocator: HeapAllocator::new()}
695     }
696 
697     /// Sets the size of the second segment in words, where 1 word = 8 bytes.
698     /// (The first segment is the scratch space passed to `ScratchSpaceHeapAllocator::new()`.
second_segment_words(self, value: u32) -> ScratchSpaceHeapAllocator<'a>699     pub fn second_segment_words(self, value: u32) -> ScratchSpaceHeapAllocator<'a> {
700         ScratchSpaceHeapAllocator { allocator: self.allocator.first_segment_words(value), ..self }
701 
702     }
703 
704     /// Sets the allocation strategy for segments after the second one.
allocation_strategy(self, value: AllocationStrategy) -> ScratchSpaceHeapAllocator<'a>705     pub fn allocation_strategy(self, value: AllocationStrategy) -> ScratchSpaceHeapAllocator<'a> {
706         ScratchSpaceHeapAllocator { allocator: self.allocator.allocation_strategy(value), ..self }
707     }
708 }
709 
710 unsafe impl <'a> Allocator for ScratchSpaceHeapAllocator<'a> {
allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32)711     fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32) {
712         if (minimum_size as usize) < (self.scratch_space.len() / BYTES_PER_WORD) && !self.scratch_space_allocated {
713             self.scratch_space_allocated = true;
714             (self.scratch_space.as_mut_ptr(), (self.scratch_space.len() / BYTES_PER_WORD) as u32)
715         } else {
716             self.allocator.allocate_segment(minimum_size)
717         }
718     }
719 
deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, words_used: u32)720     fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, words_used: u32) {
721         if ptr == self.scratch_space.as_mut_ptr() {
722             // Rezero the slice to allow reuse of the allocator. We only need to write
723             // words that we know might contain nonzero values.
724             unsafe {
725                 core::ptr::write_bytes(ptr, 0u8, (words_used as usize) * BYTES_PER_WORD);
726             }
727             self.scratch_space_allocated = false;
728         } else {
729             self.allocator.deallocate_segment(ptr, word_size, words_used);
730         }
731     }
732 }
733 
734 unsafe impl <'a, A> Allocator for &'a mut A where A: Allocator {
allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32)735     fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32) {
736         (*self).allocate_segment(minimum_size)
737     }
738 
deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, words_used: u32)739     fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, words_used: u32) {
740         (*self).deallocate_segment(ptr, word_size, words_used)
741     }
742 }
743 
744