1 use super::header::{header, header_mut, Header, HDR};
2 use super::*;
3
4 /// We'd love to share this trait with the sized implementation, but
5 /// unfortunately, the type parameters of almost all methods are
6 /// different.
7 pub(crate) trait Alloc {
8 const OFFSET_SIZE: usize;
9
10 /// Size (including the offset size) of the entry at position
11 /// `cur`.
current_size<K: UnsizedStorable + ?Sized, V: UnsizedStorable + ?Sized>( page: crate::Page, cur: isize, ) -> usize12 fn current_size<K: UnsizedStorable + ?Sized, V: UnsizedStorable + ?Sized>(
13 page: crate::Page,
14 cur: isize,
15 ) -> usize;
16
17 /// Can we allocate an entry of `size` bytes, where `size` doesn't
18 /// include the offset bytes?
can_alloc(hdr: &Header, size: usize, n: usize) -> bool19 fn can_alloc(hdr: &Header, size: usize, n: usize) -> bool {
20 (HDR as usize) + (hdr.n() as usize) * Self::OFFSET_SIZE + n * Self::OFFSET_SIZE + size
21 <= hdr.data() as usize
22 }
23
24 /// If we cloned the page, could we allocate an entry of `size`
25 /// bytes, where `size` doesn't include the offset bytes?
can_compact(hdr: &Header, size: isize, n: usize) -> bool26 fn can_compact(hdr: &Header, size: isize, n: usize) -> bool {
27 (HDR as isize)
28 + ((hdr.left_page() & 0xfff) as isize)
29 + (n * Self::OFFSET_SIZE) as isize
30 + size
31 <= PAGE_SIZE as isize
32 }
33
34 /// Set the right child of the `n`th entry to `r`. If `n == -1`,
35 /// this method can be used to set the leftmost child of a page.
set_right_child(_new: &mut MutPage, _n: isize, _r: u64)36 fn set_right_child(_new: &mut MutPage, _n: isize, _r: u64) {}
37
38 /// Set the n^th offset on the page to `r`, which encodes a page
39 /// offset in its 52 MSBs, and an offset on the page in its 12 LSBs.
set_offset(new: &mut MutPage, n: isize, r: u64)40 fn set_offset(new: &mut MutPage, n: isize, r: u64);
41
42 /// The type of offsets, u64 in internal nodes, u16 in leaves.
43 type Offset: Into<(u64, usize)> + Copy + core::fmt::Debug;
44
offset_slice<'a>(page: crate::Page<'a>) -> Offsets<'a, Self::Offset>45 fn offset_slice<'a>(page: crate::Page<'a>) -> Offsets<'a, Self::Offset>;
46 }
47
48 #[derive(Debug, Clone, Copy)]
49 pub struct Offsets<'a, A>(pub &'a [A]);
50
51 impl<'a, A: Copy> Offsets<'a, A> {
split_at(self, mid: usize) -> (Self, Self)52 pub fn split_at(self, mid: usize) -> (Self, Self) {
53 if mid < self.0.len() {
54 let (a, b) = self.0.split_at(mid);
55 (Offsets(a), Offsets(b))
56 } else {
57 debug_assert_eq!(mid, self.0.len());
58 (self, Offsets(&[][..]))
59 }
60 }
61 }
62
63 pub struct Leaf {}
64 pub struct Internal {}
65
66 impl Alloc for Leaf {
67 const OFFSET_SIZE: usize = 2;
68
69 // Note: the size returned by this function includes the offset.
current_size<K: UnsizedStorable + ?Sized, V: UnsizedStorable + ?Sized>( page: crate::Page, cur: isize, ) -> usize70 fn current_size<K: UnsizedStorable + ?Sized, V: UnsizedStorable + ?Sized>(
71 page: crate::Page,
72 cur: isize,
73 ) -> usize {
74 // Find the offset of the current position, get its size.
75 unsafe {
76 debug_assert!(cur >= 0);
77 let off = *(page.data.as_ptr().add(HDR + 2 * cur as usize) as *const u16);
78 Self::OFFSET_SIZE
79 + entry_size::<K, V>(page.data.as_ptr().add(u16::from_le(off) as usize))
80 }
81 }
82
set_offset(new: &mut MutPage, n: isize, off: u64)83 fn set_offset(new: &mut MutPage, n: isize, off: u64) {
84 unsafe {
85 let ptr = new.0.data.offset(HDR as isize + n * 2) as *mut u16;
86 *ptr = (off as u16).to_le();
87 }
88 }
89
90 type Offset = LeafOffset;
91
offset_slice<'a>(page: crate::Page<'a>) -> Offsets<'a, Self::Offset>92 fn offset_slice<'a>(page: crate::Page<'a>) -> Offsets<'a, Self::Offset> {
93 let hdr_n = header(page).n();
94 unsafe {
95 Offsets(core::slice::from_raw_parts(
96 page.data.as_ptr().add(HDR) as *const LeafOffset,
97 hdr_n as usize,
98 ))
99 }
100 }
101 }
102
103 impl Alloc for Internal {
104 const OFFSET_SIZE: usize = 8;
current_size<K: UnsizedStorable + ?Sized, V: UnsizedStorable + ?Sized>( page: crate::Page, cur: isize, ) -> usize105 fn current_size<K: UnsizedStorable + ?Sized, V: UnsizedStorable + ?Sized>(
106 page: crate::Page,
107 cur: isize,
108 ) -> usize {
109 unsafe {
110 8 + entry_size::<K, V>(page.data.as_ptr().add(
111 (u64::from_le(*(page.data.as_ptr().add(HDR) as *const u64).offset(cur)) & 0xfff)
112 as usize,
113 ))
114 }
115 }
116
117 /// Set the right-hand child in the offset array, preserving the
118 /// current offset.
set_right_child(page: &mut MutPage, n: isize, r: u64)119 fn set_right_child(page: &mut MutPage, n: isize, r: u64) {
120 unsafe {
121 debug_assert_eq!(r & 0xfff, 0);
122 let ptr = page.0.data.offset(HDR as isize + n * 8) as *mut u64;
123 let off = u64::from_le(*ptr) & 0xfff;
124 *ptr = (r | off as u64).to_le();
125 }
126 }
127
set_offset(new: &mut MutPage, n: isize, off: u64)128 fn set_offset(new: &mut MutPage, n: isize, off: u64) {
129 unsafe {
130 let ptr = new.0.data.offset(HDR as isize + n * 8) as *mut u64;
131 *ptr = off.to_le();
132 }
133 }
134
135 type Offset = InternalOffset;
136
offset_slice<'a>(page: crate::Page<'a>) -> Offsets<'a, Self::Offset>137 fn offset_slice<'a>(page: crate::Page<'a>) -> Offsets<'a, Self::Offset> {
138 let hdr_n = header(page).n() as usize;
139 unsafe {
140 Offsets(core::slice::from_raw_parts(
141 page.data.as_ptr().add(HDR) as *const InternalOffset,
142 hdr_n,
143 ))
144 }
145 }
146 }
147
148 #[derive(Debug, Clone, Copy)]
149 #[repr(C)]
150 pub struct LeafOffset(u16);
151
152 impl Into<(u64, usize)> for LeafOffset {
into(self) -> (u64, usize)153 fn into(self) -> (u64, usize) {
154 (0, self.0 as usize)
155 }
156 }
157
158 impl Into<usize> for LeafOffset {
into(self) -> usize159 fn into(self) -> usize {
160 self.0 as usize
161 }
162 }
163
164 #[derive(Debug, Clone, Copy)]
165 #[repr(C)]
166 pub struct InternalOffset(u64);
167
168 impl Into<(u64, usize)> for InternalOffset {
into(self) -> (u64, usize)169 fn into(self) -> (u64, usize) {
170 (self.0 & !0xfff, (self.0 & 0xfff) as usize)
171 }
172 }
173
174 impl Into<usize> for InternalOffset {
into(self) -> usize175 fn into(self) -> usize {
176 self.0 as usize
177 }
178 }
179
180 impl<K: UnsizedStorable + ?Sized, V: UnsizedStorable + ?Sized> AllocWrite<K, V> for Leaf {
alloc_write(new: &mut MutPage, k0: &K, v0: &V, l: u64, r: u64, n: &mut isize)181 fn alloc_write(new: &mut MutPage, k0: &K, v0: &V, l: u64, r: u64, n: &mut isize) {
182 alloc_write::<K, V, Self>(new, k0, v0, l, r, n)
183 }
set_left_child(_new: &mut MutPage, _n: isize, l: u64)184 fn set_left_child(_new: &mut MutPage, _n: isize, l: u64) {
185 debug_assert_eq!(l, 0);
186 }
187 }
188
189 impl<K: UnsizedStorable + ?Sized, V: UnsizedStorable + ?Sized> AllocWrite<K, V> for Internal {
alloc_write(new: &mut MutPage, k0: &K, v0: &V, l: u64, r: u64, n: &mut isize)190 fn alloc_write(new: &mut MutPage, k0: &K, v0: &V, l: u64, r: u64, n: &mut isize) {
191 alloc_write::<K, V, Self>(new, k0, v0, l, r, n)
192 }
set_left_child(new: &mut MutPage, n: isize, l: u64)193 fn set_left_child(new: &mut MutPage, n: isize, l: u64) {
194 if l > 0 {
195 Internal::set_right_child(new, n - 1, l);
196 }
197 }
198 }
199
200 // Allocate a new entry and copy (k0, v0) into the new slot.
alloc_write<K: UnsizedStorable + ?Sized, V: UnsizedStorable + ?Sized, L: Alloc>( new: &mut MutPage, k0: &K, v0: &V, l: u64, r: u64, n: &mut isize, )201 fn alloc_write<K: UnsizedStorable + ?Sized, V: UnsizedStorable + ?Sized, L: Alloc>(
202 new: &mut MutPage,
203 k0: &K,
204 v0: &V,
205 l: u64,
206 r: u64,
207 n: &mut isize,
208 ) {
209 let size = alloc_size(k0, v0);
210 let off_new = alloc_insert::<K, V, L>(new, n, size, r);
211 unsafe {
212 let new_ptr = new.0.data.add(off_new);
213 k0.write_to_page(new_ptr);
214 let ks = k0.size();
215 let v_ptr = new_ptr.add((ks + V::ALIGN - 1) & !(V::ALIGN - 1));
216 v0.write_to_page(v_ptr);
217 }
218 if l > 0 {
219 L::set_right_child(new, *n - 1, l);
220 }
221 *n += 1;
222 }
223
224 /// Reserve space on the page for `size` bytes of data (i.e. not
225 /// counting the offset), set the right child of the new position
226 /// to `r`, add the offset to the new data to the offset array,
227 /// and return the new offset.
alloc_insert<K: UnsizedStorable + ?Sized, V: UnsizedStorable + ?Sized, L: Alloc>( new: &mut MutPage, n: &mut isize, size: usize, r: u64, ) -> usize228 fn alloc_insert<K: UnsizedStorable + ?Sized, V: UnsizedStorable + ?Sized, L: Alloc>(
229 new: &mut MutPage,
230 n: &mut isize,
231 size: usize,
232 r: u64,
233 ) -> usize {
234 let hdr = header_mut(new);
235 let data = hdr.data();
236 // If we're here, the following assertion has been checked by the
237 // `can_alloc` method.
238 debug_assert!(HDR + (hdr.n() as usize + 1) * L::OFFSET_SIZE + size <= data as usize);
239 hdr.set_data(data - size as u16);
240 hdr.set_n(hdr.n() + 1);
241 hdr.incr(L::OFFSET_SIZE + size);
242 let off_new = data - size as u16;
243 let hdr_n = hdr.n();
244
245 // Making space for the new offset
246 unsafe {
247 core::ptr::copy(
248 new.0.data.add(HDR + (*n as usize) * L::OFFSET_SIZE),
249 new.0
250 .data
251 .add(HDR + (*n as usize) * L::OFFSET_SIZE + L::OFFSET_SIZE),
252 (hdr_n as usize - (*n as usize)) * L::OFFSET_SIZE,
253 );
254 }
255 L::set_offset(new, *n, r | (off_new as u64));
256 off_new as usize
257 }
258