1 // TODO: Revisit the design of `Event` once the `HashMap` raw interface is stabilised.
2 // Ideally `Value`s would be stored inline in `Event`.
3
4 use indexmap::IndexMap;
5 use std::{
6 borrow::Cow,
7 io::{self, Write},
8 mem,
9 num::NonZeroUsize,
10 };
11
12 use crate::{
13 error::{self, Error, ErrorKind, EventKind},
14 stream::Writer,
15 Date, Integer, Uid,
16 };
17
18 pub struct BinaryWriter<W: Write> {
19 writer: PosWriter<W>,
20 events: Vec<Event>,
21 dictionary_key_events: Vec<usize>,
22 values: IndexMap<Value<'static>, ValueState>,
23 /// Pointers into `events` for each of the currently unclosed `Collection` events.
24 collection_stack: Vec<usize>,
25 /// The number of `Collection` and unique `Value` events in `events`.
26 num_objects: usize,
27 }
28
29 struct PosWriter<W: Write> {
30 writer: W,
31 pos: usize,
32 }
33
34 #[derive(Clone)]
35 struct ObjectRef(NonZeroUsize);
36
37 /// An array of `len` elements is stored as a `Collection` event followed by `skip_len` events
38 /// containing the contents of the array. e.g.
39 ///
40 /// Collection(ty: Array, len: 2, skip_len: 2)
41 /// Value
42 /// Value
43 ///
44 /// If the array contains another array or dictionary `len` and `skip_len` will differ. e.g.
45 ///
46 /// Collection(ty: Array, len: 2, skip_len: 3)
47 /// Value
48 /// Collection(ty: Array, len: 1, skip_len: 1)
49 /// Value
50 ///
51 /// A dictionary of `len` (key, value) pairs is stored as a `Collection` event followed by
52 /// `skip_len` events containing the contents of the dictionary. The dictionary values are stored
53 /// first. These are followed by a `DictionaryKeys` event and then the keys themselves. e.g.
54 ///
55 /// Collection(ty: Dictionary, len: 2, skip_len: 6)
56 /// Value
57 /// Collection(ty: Array, len: 1, skip_len: 1)
58 /// Value
59 /// DictionaryKeys(2)
60 /// Value (Key)
61 /// Value (Key)
62 ///
63 /// This arrangement simplifies writing dictionaries as they must be written in the order
64 /// (key, key, value, value) instead of (key, value, key, value) as they are passed to the writer.
65 /// Unclosed dictionaries have their keys stored in `dictionary_key_events` and these are only
66 /// moved to the end of the `BinaryWriter::events` array once the dictionary is closed in
67 /// `write_end_collection`.
68 enum Event {
69 Collection(Collection),
70 /// Index of the value in the `values` map.
71 Value(usize),
72 /// The number of dictionary keys following this event.
73 DictionaryKeys(usize),
74 }
75
76 struct Collection {
77 ty: CollectionType,
78 /// The number of elements in an array or (key, value) pairs in a dictionary.
79 /// Unclosed dictionaries have a `len` equal to the number of keys plus the number of values
80 /// written so far. This is fixed up in `write_end_collection`.
81 len: usize,
82 /// The number of events to skip to get to the next element after the collection.
83 skip: usize,
84 object_ref: Option<ObjectRef>,
85 }
86
87 #[derive(Eq, PartialEq)]
88 enum CollectionType {
89 Array,
90 Dictionary,
91 }
92
93 #[derive(Eq, Hash, PartialEq)]
94 enum Value<'a> {
95 Boolean(bool),
96 Data(Cow<'a, [u8]>),
97 Date(Date),
98 Integer(Integer),
99 /// Floats are deduplicated based on their bitwise value.
100 Real(u64),
101 String(Cow<'a, str>),
102 Uid(Uid),
103 }
104
105 enum ValueState {
106 /// The value has not been assigned an object reference.
107 Unassigned,
108 /// The value has been assigned an object reference but has not yet been written.
109 Unwritten(ObjectRef),
110 /// The value has been written with the given object reference.
111 Written(ObjectRef),
112 }
113
114 impl<W: Write> BinaryWriter<W> {
new(writer: W) -> BinaryWriter<W>115 pub fn new(writer: W) -> BinaryWriter<W> {
116 BinaryWriter {
117 writer: PosWriter { writer, pos: 0 },
118 events: Vec::new(),
119 dictionary_key_events: Vec::new(),
120 values: IndexMap::new(),
121 collection_stack: Vec::new(),
122 num_objects: 0,
123 }
124 }
125
write_start_collection(&mut self, ty: CollectionType) -> Result<(), Error>126 fn write_start_collection(&mut self, ty: CollectionType) -> Result<(), Error> {
127 if self.expecting_dictionary_key() {
128 let ty_event_kind = match ty {
129 CollectionType::Array => EventKind::StartArray,
130 CollectionType::Dictionary => EventKind::StartDictionary,
131 };
132 return Err(ErrorKind::UnexpectedEventType {
133 expected: EventKind::DictionaryKeyOrEndCollection,
134 found: ty_event_kind,
135 }
136 .without_position());
137 }
138 self.increment_current_collection_len();
139 self.collection_stack.push(self.events.len());
140 self.events.push(Event::Collection(Collection {
141 ty,
142 len: 0,
143 skip: 0,
144 object_ref: None,
145 }));
146 self.num_objects += 1;
147 Ok(())
148 }
149
write_end_collection(&mut self) -> Result<(), Error>150 fn write_end_collection(&mut self) -> Result<(), Error> {
151 let collection_event_index = self.collection_stack.pop().ok_or_else(|| {
152 ErrorKind::UnexpectedEventType {
153 expected: EventKind::ValueOrStartCollection,
154 found: EventKind::EndCollection,
155 }
156 .without_position()
157 })?;
158
159 let current_event_index = self.events.len() - 1;
160 let c = if let Event::Collection(c) = &mut self.events[collection_event_index] {
161 c
162 } else {
163 unreachable!("items in `collection_stack` always point to a collection event");
164 };
165
166 c.skip = current_event_index - collection_event_index;
167
168 if let CollectionType::Dictionary = c.ty {
169 // Ensure that every dictionary key is paired with a value.
170 if !is_even(c.len) {
171 return Err(ErrorKind::UnexpectedEventType {
172 expected: EventKind::DictionaryKeyOrEndCollection,
173 found: EventKind::EndCollection,
174 }
175 .without_position());
176 }
177
178 // Fix up the dictionary length. It should contain the number of key-value pairs,
179 // not the number of keys and values.
180 c.len /= 2;
181
182 // To skip past a dictionary we also need to skip the `DictionaryKeys` event and the
183 // keys that follow it.
184 c.skip += 1 + c.len;
185 let len = c.len;
186 self.events.push(Event::DictionaryKeys(len));
187
188 // Move the cached dictionary keys to the end of the events array.
189 let keys_start_index = self.dictionary_key_events.len() - len;
190 self.events.extend(
191 self.dictionary_key_events
192 .drain(keys_start_index..)
193 .map(Event::Value),
194 );
195 }
196
197 if self.collection_stack.is_empty() {
198 self.write_plist()?;
199 }
200
201 Ok(())
202 }
203
write_value(&mut self, value: Value) -> Result<(), Error>204 fn write_value(&mut self, value: Value) -> Result<(), Error> {
205 let expecting_dictionary_key = self.expecting_dictionary_key();
206
207 // Ensure that all dictionary keys are strings.
208 match (&value, expecting_dictionary_key) {
209 (Value::String(_), true) | (_, false) => (),
210 (_, true) => {
211 return Err(ErrorKind::UnexpectedEventType {
212 expected: EventKind::DictionaryKeyOrEndCollection,
213 found: value.event_kind(),
214 }
215 .without_position())
216 }
217 }
218
219 // Deduplicate `value`. There is one entry in `values` for each unqiue `Value` in the
220 // plist.
221 let value_index = if let Some((value_index, _, _)) = self.values.get_full(&value) {
222 value_index
223 } else {
224 self.num_objects += 1;
225 let value = value.into_owned();
226 let (value_index, _) = self.values.insert_full(value, ValueState::Unassigned);
227 value_index
228 };
229
230 // Dictionary keys are buffered in `dictionary_key_events` until the dictionary is closed
231 // in `write_end_collection` when they are moved to the end of the `events` array.
232 if expecting_dictionary_key {
233 self.dictionary_key_events.push(value_index);
234 } else {
235 self.events.push(Event::Value(value_index));
236 }
237
238 self.increment_current_collection_len();
239
240 if self.collection_stack.is_empty() {
241 self.write_plist()?;
242 }
243
244 Ok(())
245 }
246
expecting_dictionary_key(&self) -> bool247 fn expecting_dictionary_key(&self) -> bool {
248 if let Some(&event_index) = self.collection_stack.last() {
249 if let Event::Collection(c) = &self.events[event_index] {
250 c.ty == CollectionType::Dictionary && is_even(c.len)
251 } else {
252 unreachable!("items in `collection_stack` always point to a collection event");
253 }
254 } else {
255 false
256 }
257 }
258
increment_current_collection_len(&mut self)259 fn increment_current_collection_len(&mut self) {
260 if let Some(&event_index) = self.collection_stack.last() {
261 if let Event::Collection(c) = &mut self.events[event_index] {
262 c.len += 1;
263 } else {
264 unreachable!("items in `collection_stack` always point to a collection event");
265 }
266 }
267 }
268
write_plist(&mut self) -> Result<(), Error>269 fn write_plist(&mut self) -> Result<(), Error> {
270 assert!(self.collection_stack.is_empty());
271
272 // Write header
273 self.writer.write_exact(b"bplist00")?;
274
275 // Write objects
276 let mut events_vec = mem::replace(&mut self.events, Vec::new());
277 let mut events = &mut events_vec[..];
278 let ref_size = plist_ref_size(self.num_objects - 1);
279 let mut offset_table = vec![0; self.num_objects];
280
281 // Assign the first (root) event an object reference of zero.
282 let mut next_object_ref = ObjectRef::zero();
283 match &mut events[0] {
284 Event::Value(value_index) => {
285 let (_, value_state) = value_mut(&mut self.values, *value_index);
286 *value_state = ValueState::Unwritten(next_object_ref.clone_and_increment_self());
287 }
288 Event::Collection(c) => {
289 c.object_ref = Some(next_object_ref.clone_and_increment_self());
290 }
291 Event::DictionaryKeys(_) => {
292 unreachable!("`events` starts with a value or collection event")
293 }
294 }
295
296 while let Some((event, rest)) = events.split_first_mut() {
297 events = rest;
298 match event {
299 Event::Collection(c) => {
300 let collection_events = &mut events[..c.skip];
301 self.write_plist_collection(
302 c,
303 collection_events,
304 ref_size,
305 &mut next_object_ref,
306 &mut offset_table,
307 )?;
308 }
309 Event::Value(value_index) => {
310 self.write_plist_value(*value_index, &mut offset_table)?;
311 }
312 // Dictionary keys will have already been written in `write_plist_collection` so we
313 // skip over them here.
314 Event::DictionaryKeys(len) => {
315 events = &mut events[*len..];
316 }
317 }
318 }
319
320 // Write object offset table
321 let offset_table_offset = self.writer.pos;
322 let offset_size = plist_ref_size(offset_table_offset);
323 for &offset in &offset_table {
324 write_plist_ref(&mut self.writer, offset_size, offset)?;
325 }
326
327 // Write trailer
328 // 6 zero bytes padding
329 // 1 byte offset size
330 // 1 byte object ref size
331 // 8 bytes number of objects
332 // 8 bytes root object ref (always zero)
333 // 8 bytes file offset of the object offset table
334 let mut trailer = [0; 32];
335 trailer[6] = offset_size;
336 trailer[7] = ref_size;
337 trailer[8..16].copy_from_slice(&(self.num_objects as u64).to_be_bytes());
338 trailer[24..32].copy_from_slice(&(offset_table_offset as u64).to_be_bytes());
339 self.writer.write_exact(&trailer)?;
340
341 self.writer
342 .flush()
343 .map_err(error::from_io_without_position)?;
344
345 // Reset plist writer
346 self.writer.pos = 0;
347 events_vec.clear();
348 self.events = events_vec;
349 self.values.clear();
350 self.num_objects = 0;
351
352 Ok(())
353 }
354
write_plist_collection( &mut self, collection: &Collection, events: &mut [Event], ref_size: u8, next_object_ref: &mut ObjectRef, offset_table: &mut Vec<usize>, ) -> Result<(), Error>355 fn write_plist_collection(
356 &mut self,
357 collection: &Collection,
358 events: &mut [Event],
359 ref_size: u8,
360 next_object_ref: &mut ObjectRef,
361 offset_table: &mut Vec<usize>,
362 ) -> Result<(), Error> {
363 if let Some(object_ref) = &collection.object_ref {
364 offset_table[object_ref.value()] = self.writer.pos;
365 } else {
366 unreachable!("collection object refs are assigned before this function is called");
367 }
368
369 // Split the events in the current collection into keys and values (arrays contain only
370 // values). This is required as dictionary keys appear after values in the `events array
371 // but all keys must be written before any values.
372 let (keys, values, ty) = match collection.ty {
373 CollectionType::Array => (&mut [][..], events, 0xa0),
374 CollectionType::Dictionary => {
375 let keys_start_offset = events.len() - collection.len - 1;
376 let (values, keys) = events.split_at_mut(keys_start_offset);
377 (&mut keys[1..], values, 0xd0)
378 }
379 };
380 let mut collection_events = keys.iter_mut().chain(values);
381
382 // Collections are written as a length prefixed array of object references. For an array
383 // the length is the number of elements. For a dictionary it is the number of (key, value)
384 // pairs.
385 write_plist_value_ty_and_size(&mut self.writer, ty, collection.len)?;
386 while let Some(event) = collection_events.next() {
387 let object_ref = match event {
388 Event::Collection(c) => {
389 // We only want to write references to top level elements in the collection so
390 // we skip over the contents of any sub-collections.
391 if c.skip > 0 {
392 let _ = collection_events.nth(c.skip - 1);
393 }
394
395 // Collections are not deduplicated so they must be assigned an object
396 // reference here.
397 assert!(c.object_ref.is_none());
398 let object_ref = next_object_ref.clone_and_increment_self();
399 c.object_ref = Some(object_ref.clone());
400 object_ref
401 }
402 Event::Value(value_index) => {
403 // Values are deduplicated so we only assign an object reference if we have not
404 // already done so previously.
405 let (_, value_state) = value_mut(&mut self.values, *value_index);
406 match value_state {
407 ValueState::Unassigned => {
408 let object_ref = next_object_ref.clone_and_increment_self();
409 *value_state = ValueState::Unwritten(object_ref.clone());
410 object_ref
411 }
412 ValueState::Unwritten(object_ref) | ValueState::Written(object_ref) => {
413 object_ref.clone()
414 }
415 }
416 }
417 Event::DictionaryKeys(_) => unreachable!(
418 "`DictionaryKeys` events are specifically excluded from the iterator"
419 ),
420 };
421 write_plist_ref(&mut self.writer, ref_size, object_ref.value())?;
422 }
423
424 // We write dictionary keys here as they appear after values in the `events` array but
425 // should come before values in the plist stream to reduce seeking on read.
426 for key in keys {
427 if let Event::Value(value_index) = key {
428 self.write_plist_value(*value_index, offset_table)?;
429 } else {
430 unreachable!("dictionary keys are assigned as values in `write_end_collection`");
431 }
432 }
433
434 Ok(())
435 }
436
write_plist_value( &mut self, value_index: usize, offset_table: &mut Vec<usize>, ) -> Result<(), Error>437 fn write_plist_value(
438 &mut self,
439 value_index: usize,
440 offset_table: &mut Vec<usize>,
441 ) -> Result<(), Error> {
442 let (value, value_state) = value_mut(&mut self.values, value_index);
443
444 let object_ref = match value_state {
445 ValueState::Unassigned => {
446 unreachable!("value object refs are assigned before this function is called");
447 }
448 ValueState::Unwritten(object_ref) => object_ref.clone(),
449 ValueState::Written(_) => return Ok(()),
450 };
451
452 offset_table[object_ref.value()] = self.writer.pos;
453 *value_state = ValueState::Written(object_ref);
454
455 match value {
456 Value::Boolean(true) => {
457 self.writer.write_exact(&[0x09])?;
458 }
459 Value::Boolean(false) => {
460 self.writer.write_exact(&[0x08])?;
461 }
462 Value::Data(v) => {
463 write_plist_value_ty_and_size(&mut self.writer, 0x40, v.len())?;
464 self.writer.write_exact(&v[..])?;
465 }
466 Value::Date(v) => {
467 let secs = v.to_seconds_since_plist_epoch();
468 let mut buf: [_; 9] = [0x33, 0, 0, 0, 0, 0, 0, 0, 0];
469 buf[1..].copy_from_slice(&secs.to_bits().to_be_bytes());
470 self.writer.write_exact(&buf)?;
471 }
472 Value::Integer(v) => {
473 if let Some(v) = v.as_signed() {
474 if v >= 0 && v <= i64::from(u8::max_value()) {
475 self.writer.write_exact(&[0x10, v as u8])?;
476 } else if v >= 0 && v <= i64::from(u16::max_value()) {
477 let mut buf: [_; 3] = [0x11, 0, 0];
478 buf[1..].copy_from_slice(&(v as u16).to_be_bytes());
479 self.writer.write_exact(&buf)?;
480 } else if v >= 0 && v <= i64::from(u32::max_value()) {
481 let mut buf: [_; 5] = [0x12, 0, 0, 0, 0];
482 buf[1..].copy_from_slice(&(v as u32).to_be_bytes());
483 self.writer.write_exact(&buf)?;
484 } else {
485 let mut buf: [_; 9] = [0x13, 0, 0, 0, 0, 0, 0, 0, 0];
486 buf[1..].copy_from_slice(&v.to_be_bytes());
487 self.writer.write_exact(&buf)?;
488 }
489 } else if let Some(v) = v.as_unsigned() {
490 // `u64`s larger than `i64::max_value()` are stored as signed 128 bit
491 // integers.
492 let mut buf: [_; 17] = [0x14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
493 buf[1..].copy_from_slice(&i128::from(v).to_be_bytes());
494 self.writer.write_exact(&buf)?;
495 } else {
496 unreachable!("an integer can be represented as either an i64 or u64");
497 }
498 }
499 Value::Real(v) => {
500 let mut buf: [_; 9] = [0x23, 0, 0, 0, 0, 0, 0, 0, 0];
501 buf[1..].copy_from_slice(&v.to_be_bytes());
502 self.writer.write_exact(&buf)?;
503 }
504 Value::String(v) if v.is_ascii() => {
505 let ascii = v.as_bytes();
506 write_plist_value_ty_and_size(&mut self.writer, 0x50, ascii.len())?;
507 self.writer.write_exact(ascii)?;
508 }
509 Value::String(v) => {
510 let utf16_len = v.encode_utf16().count();
511 write_plist_value_ty_and_size(&mut self.writer, 0x60, utf16_len)?;
512 for c in v.encode_utf16() {
513 self.writer.write_exact(&c.to_be_bytes())?;
514 }
515 }
516 Value::Uid(v) => {
517 let v = v.get();
518 if v <= u64::from(u8::max_value()) {
519 self.writer.write_exact(&[0x80, v as u8])?;
520 } else if v <= u64::from(u16::max_value()) {
521 let mut buf: [_; 3] = [0x81, 0, 0];
522 buf[1..].copy_from_slice(&(v as u16).to_be_bytes());
523 self.writer.write_exact(&buf)?;
524 } else if v <= u64::from(u32::max_value()) {
525 let mut buf: [_; 5] = [0x83, 0, 0, 0, 0];
526 buf[1..].copy_from_slice(&(v as u32).to_be_bytes());
527 self.writer.write_exact(&buf)?;
528 } else {
529 let mut buf: [_; 9] = [0x87, 0, 0, 0, 0, 0, 0, 0, 0];
530 buf[1..].copy_from_slice(&(v as u64).to_be_bytes());
531 self.writer.write_exact(&buf)?;
532 }
533 }
534 }
535 Ok(())
536 }
537 }
538
539 impl<W: Write> Writer for BinaryWriter<W> {
write_start_array(&mut self, _len: Option<u64>) -> Result<(), Error>540 fn write_start_array(&mut self, _len: Option<u64>) -> Result<(), Error> {
541 self.write_start_collection(CollectionType::Array)
542 }
write_start_dictionary(&mut self, _len: Option<u64>) -> Result<(), Error>543 fn write_start_dictionary(&mut self, _len: Option<u64>) -> Result<(), Error> {
544 self.write_start_collection(CollectionType::Dictionary)
545 }
write_end_collection(&mut self) -> Result<(), Error>546 fn write_end_collection(&mut self) -> Result<(), Error> {
547 self.write_end_collection()
548 }
549
write_boolean(&mut self, value: bool) -> Result<(), Error>550 fn write_boolean(&mut self, value: bool) -> Result<(), Error> {
551 self.write_value(Value::Boolean(value))
552 }
write_data(&mut self, value: &[u8]) -> Result<(), Error>553 fn write_data(&mut self, value: &[u8]) -> Result<(), Error> {
554 self.write_value(Value::Data(Cow::Borrowed(value)))
555 }
write_date(&mut self, value: Date) -> Result<(), Error>556 fn write_date(&mut self, value: Date) -> Result<(), Error> {
557 self.write_value(Value::Date(value))
558 }
write_integer(&mut self, value: Integer) -> Result<(), Error>559 fn write_integer(&mut self, value: Integer) -> Result<(), Error> {
560 self.write_value(Value::Integer(value))
561 }
write_real(&mut self, value: f64) -> Result<(), Error>562 fn write_real(&mut self, value: f64) -> Result<(), Error> {
563 self.write_value(Value::Real(value.to_bits()))
564 }
write_string(&mut self, value: &str) -> Result<(), Error>565 fn write_string(&mut self, value: &str) -> Result<(), Error> {
566 self.write_value(Value::String(Cow::Borrowed(value)))
567 }
write_uid(&mut self, value: Uid) -> Result<(), Error>568 fn write_uid(&mut self, value: Uid) -> Result<(), Error> {
569 self.write_value(Value::Uid(value))
570 }
571 }
572
is_even(value: usize) -> bool573 fn is_even(value: usize) -> bool {
574 value & 1 == 0
575 }
576
value_mut<'a>( values: &'a mut IndexMap<Value<'static>, ValueState>, value_index: usize, ) -> (&'a mut Value<'static>, &'a mut ValueState)577 fn value_mut<'a>(
578 values: &'a mut IndexMap<Value<'static>, ValueState>,
579 value_index: usize,
580 ) -> (&'a mut Value<'static>, &'a mut ValueState) {
581 values
582 .get_index_mut(value_index)
583 .expect("internal consistency error")
584 }
585
write_plist_value_ty_and_size( writer: &mut PosWriter<impl Write>, token: u8, size: usize, ) -> Result<(), Error>586 fn write_plist_value_ty_and_size(
587 writer: &mut PosWriter<impl Write>,
588 token: u8,
589 size: usize,
590 ) -> Result<(), Error> {
591 if size < 0x0f {
592 writer.write_exact(&[token | (size as u8)])?;
593 } else if size <= u8::max_value() as usize {
594 writer.write_exact(&[token | 0x0f, 0x10, size as u8])?;
595 } else if size <= u16::max_value() as usize {
596 let mut buf: [_; 4] = [token | 0x0f, 0x11, 0, 0];
597 buf[2..].copy_from_slice(&(size as u16).to_be_bytes());
598 writer.write_exact(&buf)?;
599 } else if size <= u32::max_value() as usize {
600 let mut buf: [_; 6] = [token | 0x0f, 0x12, 0, 0, 0, 0];
601 buf[2..].copy_from_slice(&(size as u32).to_be_bytes());
602 writer.write_exact(&buf)?;
603 } else {
604 let mut buf: [_; 10] = [token | 0x0f, 0x13, 0, 0, 0, 0, 0, 0, 0, 0];
605 buf[2..].copy_from_slice(&(size as u64).to_be_bytes());
606 writer.write_exact(&buf)?;
607 }
608 Ok(())
609 }
610
plist_ref_size(max_value: usize) -> u8611 fn plist_ref_size(max_value: usize) -> u8 {
612 let significant_bits = 64 - (max_value as u64).leading_zeros() as u8;
613 // Convert to number of bytes
614 let significant_bytes = (significant_bits + 7) / 8;
615 // Round up to the next integer byte size which must be power of two.
616 significant_bytes.next_power_of_two()
617 }
618
write_plist_ref( writer: &mut PosWriter<impl Write>, ref_size: u8, value: usize, ) -> Result<(), Error>619 fn write_plist_ref(
620 writer: &mut PosWriter<impl Write>,
621 ref_size: u8,
622 value: usize,
623 ) -> Result<(), Error> {
624 match ref_size {
625 1 => writer.write_exact(&[value as u8]),
626 2 => writer.write_exact(&(value as u16).to_be_bytes()),
627 4 => writer.write_exact(&(value as u32).to_be_bytes()),
628 8 => writer.write_exact(&(value as u64).to_be_bytes()),
629 _ => unreachable!("`ref_size` is a power of two less than or equal to 8"),
630 }
631 }
632
633 impl<W: Write> PosWriter<W> {
write_exact(&mut self, buf: &[u8]) -> Result<(), Error>634 fn write_exact(&mut self, buf: &[u8]) -> Result<(), Error> {
635 self.write_all(buf)
636 .map_err(error::from_io_without_position)?;
637 Ok(())
638 }
639 }
640
641 impl<W: Write> Write for PosWriter<W> {
write(&mut self, buf: &[u8]) -> io::Result<usize>642 fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
643 let count = self.writer.write(buf)?;
644 self.pos = self
645 .pos
646 .checked_add(count)
647 .expect("binary plist cannot be larger than `usize::max_value()` bytes");
648 Ok(count)
649 }
650
flush(&mut self) -> io::Result<()>651 fn flush(&mut self) -> io::Result<()> {
652 self.writer.flush()
653 }
654 }
655
656 impl ObjectRef {
zero() -> ObjectRef657 fn zero() -> ObjectRef {
658 ObjectRef(NonZeroUsize::new(1).unwrap())
659 }
660
clone_and_increment_self(&mut self) -> ObjectRef661 fn clone_and_increment_self(&mut self) -> ObjectRef {
662 let current = self.0;
663 self.0 = NonZeroUsize::new(current.get() + 1).unwrap();
664 ObjectRef(current)
665 }
666
value(&self) -> usize667 fn value(&self) -> usize {
668 self.0.get() - 1
669 }
670 }
671
672 impl<'a> Value<'a> {
into_owned(self) -> Value<'static>673 fn into_owned(self) -> Value<'static> {
674 match self {
675 Value::Boolean(v) => Value::Boolean(v),
676 Value::Data(v) => Value::Data(Cow::Owned(v.into_owned())),
677 Value::Date(v) => Value::Date(v),
678 Value::Integer(v) => Value::Integer(v),
679 Value::Real(v) => Value::Real(v),
680 Value::String(v) => Value::String(Cow::Owned(v.into_owned())),
681 Value::Uid(v) => Value::Uid(v),
682 }
683 }
684
event_kind(&self) -> EventKind685 fn event_kind(&self) -> EventKind {
686 match self {
687 Value::Boolean(_) => EventKind::Boolean,
688 Value::Data(_) => EventKind::Data,
689 Value::Date(_) => EventKind::Date,
690 Value::Integer(_) => EventKind::Integer,
691 Value::Real(_) => EventKind::Real,
692 Value::String(_) => EventKind::String,
693 Value::Uid(_) => EventKind::Uid,
694 }
695 }
696 }
697
698 #[cfg(test)]
699 mod tests {
700 use std::{fs::File, io::Cursor, path::Path};
701
702 use crate::{stream::BinaryReader, Value};
703
test_roundtrip(path: &Path)704 fn test_roundtrip(path: &Path) {
705 let reader = File::open(path).unwrap();
706 let streaming_parser = BinaryReader::new(reader);
707 let value_to_encode = Value::from_events(streaming_parser).unwrap();
708
709 let mut buf = Cursor::new(Vec::new());
710 value_to_encode.to_writer_binary(&mut buf).unwrap();
711
712 let buf_inner = buf.into_inner();
713
714 let streaming_parser = BinaryReader::new(Cursor::new(buf_inner));
715
716 let events: Vec<Result<_, _>> = streaming_parser.collect();
717 let value_decoded_from_encode = Value::from_events(events.into_iter()).unwrap();
718
719 assert_eq!(value_to_encode, value_decoded_from_encode);
720 }
721
722 #[test]
bplist_roundtrip()723 fn bplist_roundtrip() {
724 test_roundtrip(&Path::new("./tests/data/binary.plist"))
725 }
726
727 #[test]
utf16_roundtrip()728 fn utf16_roundtrip() {
729 test_roundtrip(&Path::new("./tests/data/utf16_bplist.plist"))
730 }
731
732 #[test]
nskeyedarchiver_roundtrip()733 fn nskeyedarchiver_roundtrip() {
734 test_roundtrip(&Path::new("./tests/data/binary_NSKeyedArchiver.plist"))
735 }
736 }
737