1 // Copyright 2017 Matt Brubeck. See the COPYRIGHT file at the top-level
2 // directory of this distribution and at http://rust-lang.org/COPYRIGHT.
3 //
4 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
5 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
6 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
7 // option. This file may not be copied, modified, or distributed
8 // except according to those terms.
9
10 //! [`SmallBitVec`] is a bit vector, a vector of single-bit values stored compactly in memory.
11 //!
12 //! SmallBitVec grows dynamically, like the standard `Vec<T>` type. It can hold up to about one
13 //! word of bits inline (without a separate heap allocation). If the number of bits exceeds this
14 //! inline capacity, it will allocate a buffer on the heap.
15 //!
16 //! [`SmallBitVec`]: struct.SmallBitVec.html
17 //!
18 //! # Example
19 //!
20 //! ```
21 //! use smallbitvec::SmallBitVec;
22 //!
23 //! let mut v = SmallBitVec::new();
24 //! v.push(true);
25 //! v.push(false);
26 //!
27 //! assert_eq!(v[0], true);
28 //! assert_eq!(v[1], false);
29 //! ```
30
31 #![no_std]
32
33 extern crate alloc;
34
35 use alloc::{vec, vec::Vec, boxed::Box};
36
37 use core::cmp::max;
38 use core::fmt;
39 use core::hash;
40 use core::iter::{DoubleEndedIterator, ExactSizeIterator, FromIterator};
41 use core::mem::{forget, replace, size_of};
42 use core::ops::{Index, Range};
43 use core::slice;
44
45 /// Creates a [`SmallBitVec`] containing the arguments.
46 ///
47 /// `sbvec!` allows `SmallBitVec`s to be defined with the same syntax as array expressions.
48 /// There are two forms of this macro:
49 ///
50 /// - Create a [`SmallBitVec`] containing a given list of elements:
51 ///
52 /// ```
53 /// # #[macro_use] extern crate smallbitvec;
54 /// # use smallbitvec::SmallBitVec;
55 /// # fn main() {
56 /// let v = sbvec![true, false, true];
57 /// assert_eq!(v[0], true);
58 /// assert_eq!(v[1], false);
59 /// assert_eq!(v[2], true);
60 /// # }
61 /// ```
62 ///
63 /// - Create a [`SmallBitVec`] from a given element and size:
64 ///
65 /// ```
66 /// # #[macro_use] extern crate smallbitvec;
67 /// # use smallbitvec::SmallBitVec;
68 /// # fn main() {
69 /// let v = sbvec![true; 3];
70 /// assert!(v.into_iter().eq(vec![true, true, true].into_iter()));
71 /// # }
72 /// ```
73
74 #[macro_export]
75 macro_rules! sbvec {
76 ($elem:expr; $n:expr) => (
77 $crate::SmallBitVec::from_elem($n, $elem)
78 );
79 ($($x:expr),*) => (
80 [$($x),*].iter().cloned().collect::<$crate::SmallBitVec>()
81 );
82 ($($x:expr,)*) => (
83 sbvec![$($x),*]
84 );
85 }
86
87
88 // FIXME: replace this with `debug_assert!` when it’s usable in `const`:
89 // * https://github.com/rust-lang/rust/issues/49146
90 // * https://github.com/rust-lang/rust/issues/51999
91 macro_rules! const_debug_assert_le {
92 ($left: ident <= $right: expr) => {
93 #[cfg(debug_assertions)]
94 // Causes an `index out of bounds` panic if `$left` is too large
95 [(); $right + 1][$left];
96 }
97 }
98
99 #[cfg(test)]
100 mod tests;
101
102 /// A resizable bit vector, optimized for size and inline storage.
103 ///
104 /// `SmallBitVec` is exactly one word wide. Depending on the required capacity, this word
105 /// either stores the bits inline, or it stores a pointer to a separate buffer on the heap.
106 pub struct SmallBitVec {
107 data: usize,
108 }
109
110 /// Total number of bits per word.
111 #[inline(always)]
inline_bits() -> usize112 const fn inline_bits() -> usize {
113 size_of::<usize>() * 8
114 }
115
116 /// For an inline vector, all bits except two can be used as storage capacity:
117 ///
118 /// - The rightmost bit is set to zero to signal an inline vector.
119 /// - The position of the rightmost nonzero bit encodes the length.
120 #[inline(always)]
inline_capacity() -> usize121 const fn inline_capacity() -> usize {
122 inline_bits() - 2
123 }
124
125 /// Left shift amount to access the nth bit
126 #[inline(always)]
inline_shift(n: usize) -> usize127 const fn inline_shift(n: usize) -> usize {
128 const_debug_assert_le!(n <= inline_capacity());
129 // The storage starts at the leftmost bit.
130 inline_bits() - 1 - n
131 }
132
133 /// An inline vector with the nth bit set.
134 #[inline(always)]
inline_index(n: usize) -> usize135 const fn inline_index(n: usize) -> usize {
136 1 << inline_shift(n)
137 }
138
139 /// An inline vector with the leftmost `n` bits set.
140 #[inline(always)]
inline_ones(n: usize) -> usize141 fn inline_ones(n: usize) -> usize {
142 if n == 0 {
143 0
144 } else {
145 !0 << (inline_bits() - n)
146 }
147 }
148
149 /// If the rightmost bit of `data` is set, then the remaining bits of `data`
150 /// are a pointer to a heap allocation.
151 const HEAP_FLAG: usize = 1;
152
153 /// The allocation will contain a `Header` followed by a [Storage] buffer.
154 type Storage = usize;
155
156 /// The number of bits in one `Storage`.
157 #[inline(always)]
bits_per_storage() -> usize158 fn bits_per_storage() -> usize {
159 size_of::<Storage>() * 8
160 }
161
162 /// Data stored at the start of the heap allocation.
163 ///
164 /// `Header` must have the same alignment as `Storage`.
165 struct Header {
166 /// The number of bits in this bit vector.
167 len: Storage,
168
169 /// The number of elements in the [usize] buffer that follows this header.
170 buffer_len: Storage,
171 }
172
173 impl Header {
174 /// Create a heap allocation with enough space for a header,
175 /// plus a buffer of at least `cap` bits, each initialized to `val`.
new(cap: usize, len: usize, val: bool) -> *mut Header176 fn new(cap: usize, len: usize, val: bool) -> *mut Header {
177 let alloc_len = header_len() + buffer_len(cap);
178 let init = if val { !0 } else { 0 };
179
180 let v: Vec<Storage> = vec![init; alloc_len];
181
182 let buffer_len = v.capacity() - header_len();
183 let header_ptr = v.as_ptr() as *mut Header;
184
185 forget(v);
186
187 unsafe {
188 (*header_ptr).len = len;
189 (*header_ptr).buffer_len = buffer_len;
190 }
191 header_ptr
192 }
193 }
194
195 /// The number of `Storage` elements to allocate to hold a header.
196 #[inline(always)]
header_len() -> usize197 fn header_len() -> usize {
198 size_of::<Header>() / size_of::<Storage>()
199 }
200
201 /// The minimum number of `Storage` elements to hold at least `cap` bits.
202 #[inline(always)]
buffer_len(cap: usize) -> usize203 fn buffer_len(cap: usize) -> usize {
204 (cap + bits_per_storage() - 1) / bits_per_storage()
205 }
206
207 /// A typed representation of a `SmallBitVec`'s internal storage.
208 ///
209 /// The layout of the data inside both enum variants is a private implementation detail.
210 pub enum InternalStorage {
211 /// The internal representation of a `SmallBitVec` that has not spilled to a
212 /// heap allocation.
213 Inline(usize),
214
215 /// The contents of the heap allocation of a spilled `SmallBitVec`.
216 Spilled(Box<[usize]>),
217 }
218
219 impl SmallBitVec {
220 /// Create an empty vector.
221 #[inline]
new() -> SmallBitVec222 pub const fn new() -> SmallBitVec {
223 SmallBitVec {
224 data: inline_index(0),
225 }
226 }
227
228 /// Create a vector containing `len` bits, each set to `val`.
229 #[inline]
from_elem(len: usize, val: bool) -> SmallBitVec230 pub fn from_elem(len: usize, val: bool) -> SmallBitVec {
231 if len <= inline_capacity() {
232 return SmallBitVec {
233 data: if val {
234 inline_ones(len + 1)
235 } else {
236 inline_index(len)
237 },
238 };
239 }
240 let header_ptr = Header::new(len, len, val);
241 SmallBitVec {
242 data: (header_ptr as usize) | HEAP_FLAG,
243 }
244 }
245
246 /// Create an empty vector with enough storage pre-allocated to store at least `cap` bits
247 /// without resizing.
248 #[inline]
with_capacity(cap: usize) -> SmallBitVec249 pub fn with_capacity(cap: usize) -> SmallBitVec {
250 // Use inline storage if possible.
251 if cap <= inline_capacity() {
252 return SmallBitVec::new();
253 }
254
255 // Otherwise, allocate on the heap.
256 let header_ptr = Header::new(cap, 0, false);
257 SmallBitVec {
258 data: (header_ptr as usize) | HEAP_FLAG,
259 }
260 }
261
262 /// The number of bits stored in this bit vector.
263 #[inline]
len(&self) -> usize264 pub fn len(&self) -> usize {
265 if self.is_inline() {
266 // The rightmost nonzero bit is a sentinel. All bits to the left of
267 // the sentinel bit are the elements of the bit vector.
268 inline_bits() - self.data.trailing_zeros() as usize - 1
269 } else {
270 self.header().len
271 }
272 }
273
274 /// Returns `true` if this vector contains no bits.
275 #[inline]
is_empty(&self) -> bool276 pub fn is_empty(&self) -> bool {
277 self.len() == 0
278 }
279
280 /// The number of bits that can be stored in this bit vector without re-allocating.
281 #[inline]
capacity(&self) -> usize282 pub fn capacity(&self) -> usize {
283 if self.is_inline() {
284 inline_capacity()
285 } else {
286 self.header().buffer_len * bits_per_storage()
287 }
288 }
289
290 /// Get the nth bit in this bit vector.
291 #[inline]
get(&self, n: usize) -> Option<bool>292 pub fn get(&self, n: usize) -> Option<bool> {
293 if n < self.len() {
294 Some(unsafe { self.get_unchecked(n) })
295 } else {
296 None
297 }
298 }
299
300 /// Get the last bit in this bit vector.
301 #[inline]
last(&self) -> Option<bool>302 pub fn last(&self) -> Option<bool> {
303 self.len().checked_sub(1).map(|n| unsafe { self.get_unchecked(n) })
304 }
305
306 /// Get the nth bit in this bit vector, without bounds checks.
307 #[inline]
get_unchecked(&self, n: usize) -> bool308 pub unsafe fn get_unchecked(&self, n: usize) -> bool {
309 if self.is_inline() {
310 self.data & inline_index(n) != 0
311 } else {
312 let buffer = self.buffer();
313 let i = n / bits_per_storage();
314 let offset = n % bits_per_storage();
315 *buffer.get_unchecked(i) & (1 << offset) != 0
316 }
317 }
318
319 /// Set the nth bit in this bit vector to `val`. Panics if the index is out of bounds.
320 #[inline]
set(&mut self, n: usize, val: bool)321 pub fn set(&mut self, n: usize, val: bool) {
322 assert!(n < self.len(), "Index {} out of bounds", n);
323 unsafe {
324 self.set_unchecked(n, val);
325 }
326 }
327
328 /// Set the nth bit in this bit vector to `val`, without bounds checks.
329 #[inline]
set_unchecked(&mut self, n: usize, val: bool)330 pub unsafe fn set_unchecked(&mut self, n: usize, val: bool) {
331 if self.is_inline() {
332 if val {
333 self.data |= inline_index(n);
334 } else {
335 self.data &= !inline_index(n);
336 }
337 } else {
338 let buffer = self.buffer_mut();
339 let i = n / bits_per_storage();
340 let offset = n % bits_per_storage();
341 if val {
342 *buffer.get_unchecked_mut(i) |= 1 << offset;
343 } else {
344 *buffer.get_unchecked_mut(i) &= !(1 << offset);
345 }
346 }
347 }
348
349 /// Append a bit to the end of the vector.
350 ///
351 /// ```
352 /// use smallbitvec::SmallBitVec;
353 /// let mut v = SmallBitVec::new();
354 /// v.push(true);
355 ///
356 /// assert_eq!(v.len(), 1);
357 /// assert_eq!(v.get(0), Some(true));
358 /// ```
359 #[inline]
push(&mut self, val: bool)360 pub fn push(&mut self, val: bool) {
361 let idx = self.len();
362 if idx == self.capacity() {
363 self.reserve(1);
364 }
365 unsafe {
366 self.set_len(idx + 1);
367 self.set_unchecked(idx, val);
368 }
369 }
370
371 /// Remove the last bit from the vector and return it, if there is one.
372 ///
373 /// ```
374 /// use smallbitvec::SmallBitVec;
375 /// let mut v = SmallBitVec::new();
376 /// v.push(false);
377 ///
378 /// assert_eq!(v.pop(), Some(false));
379 /// assert_eq!(v.len(), 0);
380 /// assert_eq!(v.pop(), None);
381 /// ```
382 #[inline]
pop(&mut self) -> Option<bool>383 pub fn pop(&mut self) -> Option<bool> {
384 self.len().checked_sub(1).map(|last| unsafe {
385 let val = self.get_unchecked(last);
386 self.set_len(last);
387 val
388 })
389 }
390
391 /// Remove and return the bit at index `idx`, shifting all later bits toward the front.
392 ///
393 /// Panics if the index is out of bounds.
394 #[inline]
remove(&mut self, idx: usize) -> bool395 pub fn remove(&mut self, idx: usize) -> bool {
396 let len = self.len();
397 let val = self[idx];
398
399 if self.is_inline() {
400 // Shift later bits, including the length bit, toward the front.
401 let mask = !inline_ones(idx);
402 let new_vals = (self.data & mask) << 1;
403 self.data = (self.data & !mask) | (new_vals & mask);
404 } else {
405 let first = idx / bits_per_storage();
406 let offset = idx % bits_per_storage();
407 let count = buffer_len(len);
408 {
409 // Shift bits within the first storage block.
410 let buf = self.buffer_mut();
411 let mask = !0 << offset;
412 let new_vals = (buf[first] & mask) >> 1;
413 buf[first] = (buf[first] & !mask) | (new_vals & mask);
414 }
415 // Shift bits in subsequent storage blocks.
416 for i in (first + 1)..count {
417 // Move the first bit into the previous block.
418 let bit_idx = i * bits_per_storage();
419 unsafe {
420 let first_bit = self.get_unchecked(bit_idx);
421 self.set_unchecked(bit_idx - 1, first_bit);
422 }
423 // Shift the remaining bits.
424 self.buffer_mut()[i] >>= 1;
425 }
426 // Decrement the length.
427 unsafe {
428 self.set_len(len - 1);
429 }
430 }
431 val
432 }
433
434 /// Remove all elements from the vector, without deallocating its buffer.
435 #[inline]
clear(&mut self)436 pub fn clear(&mut self) {
437 unsafe {
438 self.set_len(0);
439 }
440 }
441
442 /// Reserve capacity for at least `additional` more elements to be inserted.
443 ///
444 /// May reserve more space than requested, to avoid frequent reallocations.
445 ///
446 /// Panics if the new capacity overflows `usize`.
447 ///
448 /// Re-allocates only if `self.capacity() < self.len() + additional`.
449 #[inline]
reserve(&mut self, additional: usize)450 pub fn reserve(&mut self, additional: usize) {
451 let old_cap = self.capacity();
452 let new_cap = self.len()
453 .checked_add(additional)
454 .expect("capacity overflow");
455 if new_cap <= old_cap {
456 return;
457 }
458 // Ensure the new capacity is at least double, to guarantee exponential growth.
459 let double_cap = old_cap.saturating_mul(2);
460 self.reallocate(max(new_cap, double_cap));
461 }
462
463 /// Set the length of the vector. The length must not exceed the capacity.
464 ///
465 /// If this makes the vector longer, then the values of its new elements
466 /// are not specified.
467 #[inline]
set_len(&mut self, len: usize)468 unsafe fn set_len(&mut self, len: usize) {
469 debug_assert!(len <= self.capacity());
470 if self.is_inline() {
471 let sentinel = inline_index(len);
472 let mask = !(sentinel - 1);
473 self.data |= sentinel;
474 self.data &= mask;
475 } else {
476 self.header_mut().len = len;
477 }
478 }
479
480 /// Returns an iterator that yields the bits of the vector in order, as `bool` values.
481 #[inline]
iter(&self) -> Iter482 pub fn iter(&self) -> Iter {
483 Iter {
484 vec: self,
485 range: 0..self.len(),
486 }
487 }
488
489 /// Returns an immutable view of a range of bits from this vec.
490 /// ```
491 /// #[macro_use] extern crate smallbitvec;
492 /// let v = sbvec![true, false, true];
493 /// let r = v.range(1..3);
494 /// assert_eq!(r[1], true);
495 /// ```
496 #[inline]
range(&self, range: Range<usize>) -> VecRange497 pub fn range(&self, range: Range<usize>) -> VecRange {
498 assert!(range.end <= self.len(), "range out of bounds");
499 VecRange { vec: &self, range }
500 }
501
502 /// Returns true if all the bits in the vec are set to zero/false.
503 ///
504 /// On an empty vector, returns true.
505 #[inline]
all_false(&self) -> bool506 pub fn all_false(&self) -> bool {
507 let mut len = self.len();
508 if len == 0 {
509 return true;
510 }
511
512 if self.is_inline() {
513 let mask = inline_ones(len);
514 self.data & mask == 0
515 } else {
516 for &storage in self.buffer() {
517 if len >= bits_per_storage() {
518 if storage != 0 {
519 return false;
520 }
521 len -= bits_per_storage();
522 } else {
523 let mask = (1 << len) - 1;
524 if storage & mask != 0 {
525 return false;
526 }
527 break;
528 }
529 }
530 true
531 }
532 }
533
534 /// Returns true if all the bits in the vec are set to one/true.
535 ///
536 /// On an empty vector, returns true.
537 #[inline]
all_true(&self) -> bool538 pub fn all_true(&self) -> bool {
539 let mut len = self.len();
540 if len == 0 {
541 return true;
542 }
543
544 if self.is_inline() {
545 let mask = inline_ones(len);
546 self.data & mask == mask
547 } else {
548 for &storage in self.buffer() {
549 if len >= bits_per_storage() {
550 if storage != !0 {
551 return false;
552 }
553 len -= bits_per_storage();
554 } else {
555 let mask = (1 << len) - 1;
556 if storage & mask != mask {
557 return false;
558 }
559 break;
560 }
561 }
562 true
563 }
564 }
565
566 /// Shorten the vector, keeping the first `len` elements and dropping the rest.
567 ///
568 /// If `len` is greater than or equal to the vector's current length, this has no
569 /// effect.
570 ///
571 /// This does not re-allocate.
truncate(&mut self, len: usize)572 pub fn truncate(&mut self, len: usize) {
573 unsafe {
574 if len < self.len() {
575 self.set_len(len);
576 }
577 }
578 }
579
580 /// Resizes the vector so that its length is equal to `len`.
581 ///
582 /// If `len` is less than the current length, the vector simply truncated.
583 ///
584 /// If `len` is greater than the current length, `value` is appended to the
585 /// vector until its length equals `len`.
resize(&mut self, len: usize, value: bool)586 pub fn resize(&mut self, len: usize, value: bool) {
587 let old_len = self.len();
588
589 if len > old_len {
590 unsafe {
591 self.reallocate(len);
592 self.set_len(len);
593 for i in old_len..len {
594 self.set(i, value);
595 }
596 }
597 } else {
598 self.truncate(len);
599 }
600 }
601
602 /// Resize the vector to have capacity for at least `cap` bits.
603 ///
604 /// `cap` must be at least as large as the length of the vector.
reallocate(&mut self, cap: usize)605 fn reallocate(&mut self, cap: usize) {
606 let old_cap = self.capacity();
607 if cap <= old_cap {
608 return;
609 }
610 assert!(self.len() <= cap);
611
612 if self.is_heap() {
613 let old_buffer_len = self.header().buffer_len;
614 let new_buffer_len = buffer_len(cap);
615
616 let old_alloc_len = header_len() + old_buffer_len;
617 let new_alloc_len = header_len() + new_buffer_len;
618
619 let old_ptr = self.header_raw() as *mut Storage;
620 let mut v = unsafe { Vec::from_raw_parts(old_ptr, old_alloc_len, old_alloc_len) };
621 v.resize(new_alloc_len, 0);
622 v.shrink_to_fit();
623 self.data = v.as_ptr() as usize | HEAP_FLAG;
624 forget(v);
625
626 self.header_mut().buffer_len = new_buffer_len;
627 } else {
628 let old_self = replace(self, SmallBitVec::with_capacity(cap));
629 unsafe {
630 self.set_len(old_self.len());
631 for i in 0..old_self.len() {
632 self.set_unchecked(i, old_self.get_unchecked(i));
633 }
634 }
635 }
636 }
637
638 /// If the vector owns a heap allocation, returns a pointer to the start of the allocation.
639 ///
640 /// The layout of the data at this allocation is a private implementation detail.
641 #[inline]
heap_ptr(&self) -> Option<*const usize>642 pub fn heap_ptr(&self) -> Option<*const usize> {
643 if self.is_heap() {
644 Some((self.data & !HEAP_FLAG) as *const Storage)
645 } else {
646 None
647 }
648 }
649
650 /// Converts this `SmallBitVec` into its internal representation.
651 ///
652 /// The layout of the data inside both enum variants is a private implementation detail.
653 #[inline]
into_storage(self) -> InternalStorage654 pub fn into_storage(self) -> InternalStorage {
655 if self.is_heap() {
656 let alloc_len = header_len() + self.header().buffer_len;
657 let ptr = self.header_raw() as *mut Storage;
658 let slice = unsafe { Box::from_raw(slice::from_raw_parts_mut(ptr, alloc_len)) };
659 forget(self);
660 InternalStorage::Spilled(slice)
661 } else {
662 InternalStorage::Inline(self.data)
663 }
664 }
665
666 /// Creates a `SmallBitVec` directly from the internal storage of another
667 /// `SmallBitVec`.
668 ///
669 /// # Safety
670 ///
671 /// This is highly unsafe. `storage` needs to have been previously generated
672 /// via `SmallBitVec::into_storage` (at least, it's highly likely to be
673 /// incorrect if it wasn't.) Violating this may cause problems like corrupting the
674 /// allocator's internal data structures.
675 ///
676 /// # Examples
677 ///
678 /// ```
679 /// # use smallbitvec::{InternalStorage, SmallBitVec};
680 ///
681 /// fn main() {
682 /// let v = SmallBitVec::from_elem(200, false);
683 ///
684 /// // Get the internal representation of the SmallBitVec.
685 /// // unless we transfer its ownership somewhere else.
686 /// let storage = v.into_storage();
687 ///
688 /// /// Make a copy of the SmallBitVec's data.
689 /// let cloned_storage = match storage {
690 /// InternalStorage::Spilled(vs) => InternalStorage::Spilled(vs.clone()),
691 /// inline => inline,
692 /// };
693 ///
694 /// /// Create a new SmallBitVec from the coped storage.
695 /// let v = unsafe { SmallBitVec::from_storage(cloned_storage) };
696 /// }
697 /// ```
from_storage(storage: InternalStorage) -> SmallBitVec698 pub unsafe fn from_storage(storage: InternalStorage) -> SmallBitVec {
699 match storage {
700 InternalStorage::Inline(data) => SmallBitVec { data },
701 InternalStorage::Spilled(vs) => {
702 let ptr = Box::into_raw(vs);
703 SmallBitVec {
704 data: (ptr as *mut usize as usize) | HEAP_FLAG,
705 }
706 }
707 }
708 }
709
710 /// If the rightmost bit is set, then we treat it as inline storage.
711 #[inline]
is_inline(&self) -> bool712 fn is_inline(&self) -> bool {
713 self.data & HEAP_FLAG == 0
714 }
715
716 /// Otherwise, `data` is a pointer to a heap allocation.
717 #[inline]
is_heap(&self) -> bool718 fn is_heap(&self) -> bool {
719 !self.is_inline()
720 }
721
722 /// Get the header of a heap-allocated vector.
723 #[inline]
header_raw(&self) -> *mut Header724 fn header_raw(&self) -> *mut Header {
725 assert!(self.is_heap());
726 (self.data & !HEAP_FLAG) as *mut Header
727 }
728
729 #[inline]
header_mut(&mut self) -> &mut Header730 fn header_mut(&mut self) -> &mut Header {
731 unsafe { &mut *self.header_raw() }
732 }
733
734 #[inline]
header(&self) -> &Header735 fn header(&self) -> &Header {
736 unsafe { &*self.header_raw() }
737 }
738
739 /// Get the buffer of a heap-allocated vector.
740 #[inline]
buffer_raw(&self) -> *mut [Storage]741 fn buffer_raw(&self) -> *mut [Storage] {
742 unsafe {
743 let header_ptr = self.header_raw();
744 let buffer_len = (*header_ptr).buffer_len;
745 let buffer_ptr = (header_ptr as *mut Storage)
746 .offset((size_of::<Header>() / size_of::<Storage>()) as isize);
747 slice::from_raw_parts_mut(buffer_ptr, buffer_len)
748 }
749 }
750
751 #[inline]
buffer_mut(&mut self) -> &mut [Storage]752 fn buffer_mut(&mut self) -> &mut [Storage] {
753 unsafe { &mut *self.buffer_raw() }
754 }
755
756 #[inline]
buffer(&self) -> &[Storage]757 fn buffer(&self) -> &[Storage] {
758 unsafe { &*self.buffer_raw() }
759 }
760 }
761
762 // Trait implementations:
763
764 impl fmt::Debug for SmallBitVec {
765 #[inline]
fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result766 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
767 fmt.debug_list()
768 .entries(self.iter().map(|b| b as u8))
769 .finish()
770 }
771 }
772
773 impl Default for SmallBitVec {
774 #[inline]
default() -> Self775 fn default() -> Self {
776 Self::new()
777 }
778 }
779
780 impl PartialEq for SmallBitVec {
eq(&self, other: &Self) -> bool781 fn eq(&self, other: &Self) -> bool {
782 // Compare by inline representation
783 if self.is_inline() && other.is_inline() {
784 return self.data == other.data;
785 }
786
787 let len = self.len();
788 if len != other.len() {
789 return false;
790 }
791
792 // Compare by heap representation
793 if self.is_heap() && other.is_heap() {
794 let buf0 = self.buffer();
795 let buf1 = other.buffer();
796
797 let full_blocks = len / bits_per_storage();
798 let remainder = len % bits_per_storage();
799
800 if buf0[..full_blocks] != buf1[..full_blocks] {
801 return false;
802 }
803
804 if remainder != 0 {
805 let mask = (1 << remainder) - 1;
806 if buf0[full_blocks] & mask != buf1[full_blocks] & mask {
807 return false;
808 }
809 }
810 return true;
811 }
812
813 // Representations differ; fall back to bit-by-bit comparison
814 Iterator::eq(self.iter(), other.iter())
815 }
816 }
817
818 impl Eq for SmallBitVec {}
819
820 impl Drop for SmallBitVec {
drop(&mut self)821 fn drop(&mut self) {
822 if self.is_heap() {
823 unsafe {
824 let header_ptr = self.header_raw();
825 let alloc_ptr = header_ptr as *mut Storage;
826 let alloc_len = header_len() + (*header_ptr).buffer_len;
827 Vec::from_raw_parts(alloc_ptr, alloc_len, alloc_len);
828 }
829 }
830 }
831 }
832
833 impl Clone for SmallBitVec {
clone(&self) -> Self834 fn clone(&self) -> Self {
835 if self.is_inline() {
836 return SmallBitVec { data: self.data };
837 }
838
839 let buffer_len = self.header().buffer_len;
840 let alloc_len = header_len() + buffer_len;
841 let ptr = self.header_raw() as *mut Storage;
842 let raw_allocation = unsafe { slice::from_raw_parts(ptr, alloc_len) };
843
844 let v = raw_allocation.to_vec();
845 let header_ptr = v.as_ptr() as *mut Header;
846 forget(v);
847 SmallBitVec {
848 data: (header_ptr as usize) | HEAP_FLAG,
849 }
850 }
851 }
852
853 impl Index<usize> for SmallBitVec {
854 type Output = bool;
855
856 #[inline(always)]
index(&self, i: usize) -> &bool857 fn index(&self, i: usize) -> &bool {
858 assert!(i < self.len(), "index out of range");
859 if self.get(i).unwrap() {
860 &true
861 } else {
862 &false
863 }
864 }
865 }
866
867 impl hash::Hash for SmallBitVec {
868 #[inline]
hash<H: hash::Hasher>(&self, state: &mut H)869 fn hash<H: hash::Hasher>(&self, state: &mut H) {
870 let len = self.len();
871 len.hash(state);
872 if self.is_inline() {
873 reverse_bits(self.data & inline_ones(len)).hash(state);
874 } else {
875 let full_blocks = len / bits_per_storage();
876 let remainder = len % bits_per_storage();
877 let buffer = self.buffer();
878 if full_blocks != 0 {
879 buffer[..full_blocks].hash(state);
880 }
881 if remainder != 0 {
882 let mask = (1 << remainder) - 1;
883 (buffer[full_blocks] & mask).hash(state);
884 }
885 }
886 }
887 }
888
889 impl Extend<bool> for SmallBitVec {
890 #[inline]
extend<I: IntoIterator<Item = bool>>(&mut self, iter: I)891 fn extend<I: IntoIterator<Item = bool>>(&mut self, iter: I) {
892 let iter = iter.into_iter();
893
894 let (min, _) = iter.size_hint();
895 assert!(min <= usize::max_value(), "capacity overflow");
896 self.reserve(min);
897
898 for element in iter {
899 self.push(element)
900 }
901 }
902 }
903
904 impl FromIterator<bool> for SmallBitVec {
905 #[inline]
from_iter<I: IntoIterator<Item = bool>>(iter: I) -> Self906 fn from_iter<I: IntoIterator<Item = bool>>(iter: I) -> Self {
907 let mut v = SmallBitVec::new();
908 v.extend(iter);
909 v
910 }
911 }
912
913 impl IntoIterator for SmallBitVec {
914 type Item = bool;
915 type IntoIter = IntoIter;
916
917 #[inline]
into_iter(self) -> IntoIter918 fn into_iter(self) -> IntoIter {
919 IntoIter {
920 range: 0..self.len(),
921 vec: self,
922 }
923 }
924 }
925
926 impl<'a> IntoIterator for &'a SmallBitVec {
927 type Item = bool;
928 type IntoIter = Iter<'a>;
929
930 #[inline]
into_iter(self) -> Iter<'a>931 fn into_iter(self) -> Iter<'a> {
932 self.iter()
933 }
934 }
935
936 /// An iterator that owns a SmallBitVec and yields its bits as `bool` values.
937 ///
938 /// Returned from [`SmallBitVec::into_iter`][1].
939 ///
940 /// [1]: struct.SmallBitVec.html#method.into_iter
941 pub struct IntoIter {
942 vec: SmallBitVec,
943 range: Range<usize>,
944 }
945
946 impl Iterator for IntoIter {
947 type Item = bool;
948
949 #[inline]
next(&mut self) -> Option<bool>950 fn next(&mut self) -> Option<bool> {
951 self.range
952 .next()
953 .map(|i| unsafe { self.vec.get_unchecked(i) })
954 }
955
956 #[inline]
size_hint(&self) -> (usize, Option<usize>)957 fn size_hint(&self) -> (usize, Option<usize>) {
958 self.range.size_hint()
959 }
960 }
961
962 impl DoubleEndedIterator for IntoIter {
963 #[inline]
next_back(&mut self) -> Option<bool>964 fn next_back(&mut self) -> Option<bool> {
965 self.range
966 .next_back()
967 .map(|i| unsafe { self.vec.get_unchecked(i) })
968 }
969 }
970
971 impl ExactSizeIterator for IntoIter {}
972
973 /// An iterator that borrows a SmallBitVec and yields its bits as `bool` values.
974 ///
975 /// Returned from [`SmallBitVec::iter`][1].
976 ///
977 /// [1]: struct.SmallBitVec.html#method.iter
978 pub struct Iter<'a> {
979 vec: &'a SmallBitVec,
980 range: Range<usize>,
981 }
982
983 impl<'a> Default for Iter<'a> {
984 #[inline]
default() -> Self985 fn default() -> Self {
986 const EMPTY: &'static SmallBitVec = &SmallBitVec::new();
987 Self {
988 vec: EMPTY,
989 range: 0..0,
990 }
991 }
992 }
993
994 impl<'a> Iterator for Iter<'a> {
995 type Item = bool;
996
997 #[inline]
next(&mut self) -> Option<bool>998 fn next(&mut self) -> Option<bool> {
999 self.range
1000 .next()
1001 .map(|i| unsafe { self.vec.get_unchecked(i) })
1002 }
1003
1004 #[inline]
size_hint(&self) -> (usize, Option<usize>)1005 fn size_hint(&self) -> (usize, Option<usize>) {
1006 self.range.size_hint()
1007 }
1008 }
1009
1010 impl<'a> DoubleEndedIterator for Iter<'a> {
1011 #[inline]
next_back(&mut self) -> Option<bool>1012 fn next_back(&mut self) -> Option<bool> {
1013 self.range
1014 .next_back()
1015 .map(|i| unsafe { self.vec.get_unchecked(i) })
1016 }
1017 }
1018
1019 impl<'a> ExactSizeIterator for Iter<'a> {}
1020
1021 /// An immutable view of a range of bits from a borrowed SmallBitVec.
1022 ///
1023 /// Returned from [`SmallBitVec::range`][1].
1024 ///
1025 /// [1]: struct.SmallBitVec.html#method.range
1026 #[derive(Debug, Clone)]
1027 pub struct VecRange<'a> {
1028 vec: &'a SmallBitVec,
1029 range: Range<usize>,
1030 }
1031
1032 impl<'a> VecRange<'a> {
1033 #[inline]
iter(&self) -> Iter<'a>1034 pub fn iter(&self) -> Iter<'a> {
1035 Iter {
1036 vec: self.vec,
1037 range: self.range.clone(),
1038 }
1039 }
1040 }
1041
1042 impl<'a> Index<usize> for VecRange<'a> {
1043 type Output = bool;
1044
1045 #[inline]
index(&self, i: usize) -> &bool1046 fn index(&self, i: usize) -> &bool {
1047 let vec_i = i + self.range.start;
1048 assert!(vec_i < self.range.end, "index out of range");
1049 &self.vec[vec_i]
1050 }
1051 }
1052
reverse_bits(mut value: usize) -> usize1053 fn reverse_bits(mut value: usize) -> usize {
1054 let mut result = 0;
1055 for _ in 0..inline_bits() {
1056 result <<= 1;
1057 result |= value & 1;
1058 value >>= 1;
1059 }
1060 result
1061 }
1062