diff --git a/src/internals/bptree/cursor.rs b/src/internals/bptree/cursor.rs index ae32589..6344c1f 100644 --- a/src/internals/bptree/cursor.rs +++ b/src/internals/bptree/cursor.rs @@ -66,8 +66,8 @@ impl LinCowCellCapable, Curso // Now when the lock is dropped, both sides see the correct info and garbage for drops. // We are done, time to seal everything. - new.first_seen.iter().for_each(|n| unsafe { - (**n).make_ro(); + new.first_seen.iter().for_each(|n| { + Node::make_ro_raw(*n); }); // Clear first seen, we won't be dropping them from here. new.first_seen.clear(); @@ -105,19 +105,19 @@ impl SuperBlock { // let last_seen: Vec<*mut Node> = Vec::with_capacity(16); let mut first_seen = Vec::with_capacity(16); // Do a pre-verify to be sure it's sane. - assert!(unsafe { (*root).verify() }); + assert!(Node::verify_raw(root)); // Collect anythinng from root into this txid if needed. // Set txid to txid on all tree nodes from the root. first_seen.push(root); - unsafe { (*root).sblock_collect(&mut first_seen) }; + Node::sblock_collect_raw(root, &mut first_seen); // Lock them all - first_seen.iter().for_each(|n| unsafe { - (**n).make_ro(); + first_seen.iter().for_each(|n| { + Node::make_ro_raw(*n); }); // Determine our count internally. - let (length, _) = unsafe { (*root).tree_density() }; + let (length, _) = Node::tree_density_raw(root); // Good to go! SuperBlock { @@ -184,8 +184,8 @@ pub(crate) trait CursorReadOps { #[cfg(test)] fn get_tree_density(&self) -> (usize, usize) { // Walk the tree and calculate the packing efficiency. - let rref = self.get_root_ref(); - rref.tree_density() + let rref = self.get_root(); + Node::tree_density_raw(rref) } fn search(&self, k: &Q) -> Option<&V> @@ -250,7 +250,7 @@ pub(crate) trait CursorReadOps { panic!("Tree depth exceeded max limit (65536). This may indicate memory corruption."); } - fn range<'n, R, T>(&'n self, range: R) -> RangeIter<'n, '_, K, V> + fn range<'n, R, T>(&'n self, range: R) -> RangeIter<'n, 'n, K, V> where K: Borrow, T: Ord + ?Sized, @@ -259,21 +259,21 @@ pub(crate) trait CursorReadOps { RangeIter::new(self.get_root(), range, self.len()) } - fn kv_iter<'n>(&'n self) -> Iter<'n, '_, K, V> { + fn kv_iter<'n>(&'n self) -> Iter<'n, 'n, K, V> { Iter::new(self.get_root(), self.len()) } - fn k_iter<'n>(&'n self) -> KeyIter<'n, '_, K, V> { + fn k_iter<'n>(&'n self) -> KeyIter<'n, 'n, K, V> { KeyIter::new(self.get_root(), self.len()) } - fn v_iter<'n>(&'n self) -> ValueIter<'n, '_, K, V> { + fn v_iter<'n>(&'n self) -> ValueIter<'n, 'n, K, V> { ValueIter::new(self.get_root(), self.len()) } #[cfg(test)] fn verify(&self) -> bool { - self.get_root_ref().no_cycles() && self.get_root_ref().verify() && { + Node::no_cycles_raw(self.get_root()) && Node::verify_raw(self.get_root()) && { let (l, _) = self.get_tree_density(); l == self.len() } @@ -554,10 +554,10 @@ impl CursorWrite { #[cfg(test)] pub(crate) fn tree_density(&self) -> (usize, usize) { - self.get_root_ref().tree_density() + Node::::tree_density_raw(self.get_root()) } - pub(crate) fn range_mut<'n, R, T>(&'n mut self, range: R) -> RangeMutIter<'n, '_, K, V> + pub(crate) fn range_mut<'n, R, T>(&'n mut self, range: R) -> RangeMutIter<'n, 'n, K, V> where K: Borrow, T: Ord + ?Sized, @@ -604,7 +604,7 @@ impl Drop for SuperBlock { let mut first_seen = Vec::with_capacity(16); // eprintln!("{:?}", self.root); first_seen.push(self.root); - unsafe { (*self.root).sblock_collect(&mut first_seen) }; + Node::sblock_collect_raw(self.root, &mut first_seen); first_seen.iter().for_each(|n| Node::free(*n)); } } @@ -1096,7 +1096,7 @@ where K: Clone + Ord + Debug + 'a, V: Clone, { - if self_meta!(node).is_leaf() { + if unsafe {&* node}.meta.is_leaf() { leaf_ref!(node, K, V).get_mut_ref(k) } else { // This nmref binds the life of the reference ... diff --git a/src/internals/bptree/iter.rs b/src/internals/bptree/iter.rs index ea1a6dc..981616a 100644 --- a/src/internals/bptree/iter.rs +++ b/src/internals/bptree/iter.rs @@ -140,7 +140,7 @@ impl<'a, K: Clone + Ord + Debug, V: Clone> Iterator for LeafIter<'a, K, V> { // Return the leaf as we found at the start, regardless of the // stack operations. - Some(leaf_ref!(leafref, K, V)) + Some(leaf_ref_shared!(leafref, K, V)) } fn size_hint(&self) -> (usize, Option) { @@ -181,7 +181,8 @@ where } break; } else { - let bref = branch_ref!(work_node, K, V); + let bref = branch_ref_shared!(work_node, K, V); + let bref_count = bref.count(); match bound { Bound::Excluded(q) | Bound::Included(q) => { let idx = bref.locate_node(q); @@ -192,8 +193,8 @@ where } Bound::Unbounded => { // count shows the most right node. - stack.push_back((work_node, bref.count())); - work_node = branch_ref!(work_node, K, V).get_idx_unchecked(bref.count()); + stack.push_back((work_node, bref_count)); + work_node = branch_ref!(work_node, K, V).get_idx_unchecked(bref_count); } } } @@ -297,7 +298,7 @@ impl<'a, K: Clone + Ord + Debug, V: Clone> Iterator for RevLeafIter<'a, K, V> { // Return the leaf as we found at the start, regardless of the // stack operations. - Some(leaf_ref!(leafref, K, V)) + Some(leaf_ref_shared!(leafref, K, V)) } fn size_hint(&self) -> (usize, Option) { @@ -671,7 +672,7 @@ impl DoubleEndedIterator for RangeIter<'_, '_, fn next_back(&mut self) -> Option { loop { if let Some((node, idx)) = self.right_iter.get_mut() { - let leaf = leaf_ref!(*node, K, V); + let leaf = leaf_ref_shared!(*node, K, V); // Get idx checked. if let Some(r) = leaf.get_kv_idx_checked(*idx) { if let Some((lnode, lidx)) = self.left_iter.get_mut() { diff --git a/src/internals/bptree/macros.rs b/src/internals/bptree/macros.rs index b776e28..3ca16a5 100644 --- a/src/internals/bptree/macros.rs +++ b/src/internals/bptree/macros.rs @@ -23,6 +23,24 @@ macro_rules! branch_ref { }}; } +/// Like [`branch_ref`], but yields &Leaf without coercing from &mut Leaf. This is useful +/// to avoid triggering Miri's analysis. +macro_rules! branch_ref_shared { + ($x:expr, $k:ty, $v:ty) => {{ + debug_assert!(unsafe { (*$x).meta.is_branch() }); + unsafe { &*($x as *const Branch<$k, $v>) } + }}; +} + +/// Like [`leaf_ref`], but yields &Leaf without coercing from &mut Leaf. This is useful +/// to avoid triggering Miri's analysis. +macro_rules! leaf_ref_shared { + ($x:expr, $k:ty, $v:ty) => {{ + debug_assert!(unsafe { (*$x).meta.is_leaf() }); + unsafe { &*($x as *const Leaf<$k, $v>) } + }}; +} + macro_rules! leaf_ref { ($x:expr, $k:ty, $v:ty) => {{ debug_assert!(unsafe { (*$x).meta.is_leaf() }); diff --git a/src/internals/bptree/node.rs b/src/internals/bptree/node.rs index 13d1ec6..091c5a2 100644 --- a/src/internals/bptree/node.rs +++ b/src/internals/bptree/node.rs @@ -203,21 +203,21 @@ impl Node { // println!("Req new branch"); debug_assert!(!l.is_null()); debug_assert!(!r.is_null()); - debug_assert!(unsafe { (*l).verify() }); - debug_assert!(unsafe { (*r).verify() }); + debug_assert!(Node::verify_raw(l)); + debug_assert!(Node::verify_raw(r)); debug_assert!(txid < (TXID_MASK >> TXID_SHF)); let x: Box>> = Box::new(CachePadded::new(Branch { // This sets the default (key) count to 1, since we take an l/r meta: Meta((txid << TXID_SHF) | FLAG_BRANCH | 1), #[cfg(feature = "skinny")] key: [ - MaybeUninit::new(unsafe { (*r).min().clone() }), + MaybeUninit::new(unsafe { &*Node::min_raw(r) }.clone()), MaybeUninit::uninit(), MaybeUninit::uninit(), ], #[cfg(not(feature = "skinny"))] key: [ - MaybeUninit::new(unsafe { (*r).min().clone() }), + MaybeUninit::new(unsafe { &*Node::min_raw(r) }.clone()), MaybeUninit::uninit(), MaybeUninit::uninit(), MaybeUninit::uninit(), @@ -241,21 +241,16 @@ impl Node { #[cfg(all(test, not(miri)))] nid: alloc_nid(), })); - debug_assert!(x.verify()); - Box::into_raw(x) as *mut Branch + let output = Box::into_raw(x) as *mut Branch; + debug_assert!(Self::verify_raw(output as *const _)); + output } #[inline(always)] - pub(crate) fn make_ro(&self) { - match self.meta.0 & FLAG_MASK { - FLAG_LEAF => { - let lref = unsafe { &*(self as *const _ as *const Leaf) }; - lref.make_ro() - } - FLAG_BRANCH => { - let bref = unsafe { &*(self as *const _ as *const Branch) }; - bref.make_ro() - } + pub(crate) fn make_ro_raw(pointer: *const Self) { + match unsafe { &*pointer }.meta.0 & FLAG_MASK { + FLAG_LEAF => Leaf::::make_ro_raw(pointer as *const _), + FLAG_BRANCH => Branch::::make_ro_raw(pointer as *const _), _ => unreachable!(), } } @@ -278,19 +273,19 @@ impl Node { } #[cfg(test)] - pub(crate) fn tree_density(&self) -> (usize, usize) { - match self.meta.0 & FLAG_MASK { + pub(crate) fn tree_density_raw(pointer: *const Self) -> (usize, usize) { + match unsafe { &*pointer }.meta.0 & FLAG_MASK { FLAG_LEAF => { - let lref = unsafe { &*(self as *const _ as *const Leaf) }; + let lref = unsafe { &*(pointer as *const Leaf) }; (lref.count(), L_CAPACITY) } FLAG_BRANCH => { - let bref = unsafe { &*(self as *const _ as *const Branch) }; + let bref = unsafe { &*(pointer as *const Branch) }; let mut lcount = 0; // leaf populated let mut mcount = 0; // leaf max possible for idx in 0..(bref.count() + 1) { let n = bref.nodes[idx] as *mut Node; - let (l, m) = unsafe { (*n).tree_density() }; + let (l, m) = Self::tree_density_raw(n); lcount += l; mcount += m; } @@ -320,20 +315,14 @@ impl Node { #[cfg(test)] #[inline(always)] - pub(crate) fn get_ref(&self, k: &Q) -> Option<&V> + pub(crate) fn get_ref_raw(pointer: *const Self, k: &Q) -> Option<*const V> where K: Borrow, Q: Ord, { - match self.meta.0 & FLAG_MASK { - FLAG_LEAF => { - let lref = unsafe { &*(self as *const _ as *const Leaf) }; - lref.get_ref(k) - } - FLAG_BRANCH => { - let bref = unsafe { &*(self as *const _ as *const Branch) }; - bref.get_ref(k) - } + match unsafe { &*pointer }.meta.0 & FLAG_MASK { + FLAG_LEAF => Leaf::::get_ref_raw(pointer as *const _, k), + FLAG_BRANCH => Branch::::get_ref_raw(pointer as *const _, k), _ => { // println!("FLAGS: {:x}", self.meta.0); unreachable!() @@ -357,49 +346,46 @@ impl Node { } #[inline(always)] - pub(crate) fn max(&self) -> &K { - match self.meta.0 & FLAG_MASK { - FLAG_LEAF => { - let lref = unsafe { &*(self as *const _ as *const Leaf) }; - lref.max() - } - FLAG_BRANCH => { - let bref = unsafe { &*(self as *const _ as *const Branch) }; - bref.max() - } + pub(crate) fn min_raw(pointer: *const Self) -> *const K { + match unsafe { &*pointer }.meta.0 & FLAG_MASK { + FLAG_LEAF => Leaf::::min_raw(pointer as *const _), + FLAG_BRANCH => Branch::::min_raw(pointer as *const _), _ => unreachable!(), } } #[inline(always)] - pub(crate) fn verify(&self) -> bool { - match self.meta.0 & FLAG_MASK { - FLAG_LEAF => { - let lref = unsafe { &*(self as *const _ as *const Leaf) }; - lref.verify() - } - FLAG_BRANCH => { - let bref = unsafe { &*(self as *const _ as *const Branch) }; - bref.verify() - } + pub(crate) fn max_raw(pointer: *const Self) -> *const K { + match unsafe { &*pointer }.meta.0 & FLAG_MASK { + FLAG_LEAF => Leaf::::max_raw(pointer as *const _), + FLAG_BRANCH => Branch::::max_raw(pointer as *const _), + _ => unreachable!(), + } + } + + #[inline(always)] + pub(crate) fn verify_raw(pointer: *const Self) -> bool { + match unsafe { &*pointer }.meta.0 & FLAG_MASK { + FLAG_LEAF => Leaf::::verify_raw(pointer as *const _), + FLAG_BRANCH => Branch::::verify_raw(pointer as *const _), _ => unreachable!(), } } #[cfg(test)] - fn no_cycles_inner(&self, track: &mut BTreeSet<*const Self>) -> bool { - match self.meta.0 & FLAG_MASK { + fn no_cycles_inner_raw(pointer: *const Self, track: &mut BTreeSet<*const Self>) -> bool { + match unsafe { &*pointer }.meta.0 & FLAG_MASK { FLAG_LEAF => { // check if we are in the set? - track.insert(self as *const Self) + track.insert(pointer as *const Self) } FLAG_BRANCH => { - if track.insert(self as *const Self) { + if track.insert(pointer as *const Self) { // check - let bref = unsafe { &*(self as *const _ as *const Branch) }; + let bref = unsafe { &*(pointer as *const Branch) }; for i in 0..(bref.count() + 1) { let n = bref.nodes[i]; - let r = unsafe { (*n).no_cycles_inner(track) }; + let r = Node::no_cycles_inner_raw(n, track); if !r { // panic!(); return false; @@ -419,9 +405,9 @@ impl Node { } #[cfg(test)] - pub(crate) fn no_cycles(&self) -> bool { + pub(crate) fn no_cycles_raw(pointer: *const Self) -> bool { let mut track = BTreeSet::new(); - self.no_cycles_inner(&mut track) + Self::no_cycles_inner_raw(pointer, &mut track) } pub(crate) fn sblock_collect(&mut self, alloc: &mut Vec<*mut Node>) { @@ -439,6 +425,21 @@ impl Node { } } + pub(crate) fn sblock_collect_raw(pointer: *mut Self, alloc: &mut Vec<*mut Node>) { + // Reset our txid. + // self.meta.0 &= FLAG_MASK | COUNT_MASK; + // self.meta.0 |= txid << TXID_SHF; + + if (unsafe { &*pointer }.meta.0 & FLAG_MASK) == FLAG_BRANCH { + let bref = unsafe { &*(pointer as *const Branch) }; + for idx in 0..(bref.count() + 1) { + alloc.push(bref.nodes[idx]); + let n = bref.nodes[idx]; + Node::sblock_collect_raw(n, alloc); + } + } + } + pub(crate) fn free(node: *mut Node) { let self_meta = self_meta!(node); match self_meta.0 & FLAG_MASK { @@ -551,6 +552,19 @@ impl Leaf { .map(|idx| unsafe { &*self.values[idx].as_ptr() }) } + #[cfg(test)] + pub(crate) fn get_ref_raw(pointer: *const Self, k: &Q) -> Option<*const V> + where + K: Borrow, + Q: Ord + ?Sized, + { + let this = unsafe { &*pointer }; + debug_assert_leaf!(this); + key_search!(this, k) + .ok() + .map(|idx| this.values[idx].as_ptr()) + } + pub(crate) fn get_mut_ref(&mut self, k: &Q) -> Option<&mut V> where K: Borrow, @@ -579,11 +593,22 @@ impl Leaf { unsafe { &*self.key[0].as_ptr() } } + pub(crate) fn min_raw<'a>(pointer: *const Self) -> *const K { + unsafe { &*pointer }.min() + } + + #[cfg(test)] pub(crate) fn max(&self) -> &K { debug_assert!(self.count() > 0); unsafe { &*self.key[self.count() - 1].as_ptr() } } + pub(crate) fn max_raw(pointer: *const Self) -> *const K { + let this = unsafe { &*pointer }; + debug_assert!(this.count() > 0); + unsafe { &*this.key[this.count() - 1].as_ptr() } + } + pub(crate) fn min_value(&self) -> Option<(&K, &V)> { if self.count() > 0 { self.get_kv_idx_checked(0) @@ -778,12 +803,12 @@ impl Leaf { */ #[inline(always)] - pub(crate) fn make_ro(&self) { - debug_assert_leaf!(self); + pub(crate) fn make_ro_raw(pointer: *const Self) { + debug_assert_leaf!(unsafe { &*pointer }); /* let r = unsafe { mprotect( - self as *const Leaf as *mut c_void, + this as *const Leaf as *mut c_void, size_of::>(), PROT_READ ) @@ -806,6 +831,7 @@ impl Leaf { right.meta.set_count(0); } + #[cfg(test)] pub(crate) fn verify(&self) -> bool { debug_assert_leaf!(self); // println!("verify leaf -> {:?}", self); @@ -829,6 +855,30 @@ impl Leaf { true } + pub(crate) fn verify_raw(pointer: *const Self) -> bool { + debug_assert_leaf!(unsafe { &*pointer }); + // println!("verify leaf -> {:?}", self); + // Check key sorting + let count = unsafe { &*pointer }.meta.count(); + if count == 0 { + return true; + } + let mut lk: &K = unsafe { &*(&*pointer).key[0].as_ptr() }; + for work_idx in 1..count { + let rk: &K = unsafe { &*(&*pointer).key[work_idx].as_ptr() }; + if lk >= rk { + // println!("{:?}", self); + if cfg!(test) { + return false; + } else { + debug_assert!(false); + } + } + lk = rk; + } + true + } + fn free(node: *mut Self) { unsafe { let _x: Box>> = @@ -914,12 +964,18 @@ impl Branch { unsafe { (*self.nodes[0]).min() } } - // Can't inline as this is recursive! - pub(crate) fn max(&self) -> &K { - debug_assert_branch!(self); + pub(crate) fn min_raw<'a>(pointer: *const Self) -> *const K { + let this = unsafe { &*pointer }; + debug_assert_branch!(this); + Node::min_raw(this.nodes[0]) + } + + pub(crate) fn max_raw(pointer: *const Self) -> *const K { + let this = unsafe { &*pointer }; + debug_assert_branch!(this); // Remember, self.count() is + 1 offset, so this gets // the max node - unsafe { (*self.nodes[self.count()]).max() } + Node::max_raw(this.nodes[this.count()]) } pub(crate) fn min_node(&self) -> *mut Node { @@ -1004,20 +1060,21 @@ impl Branch { } #[cfg(test)] - pub(crate) fn get_ref(&self, k: &Q) -> Option<&V> + pub(crate) fn get_ref_raw(pointer: *const Self, k: &Q) -> Option<*const V> where K: Borrow, Q: Ord, { - debug_assert_branch!(self); + let this = unsafe { &*pointer }; + debug_assert_branch!(this); // If the value is Ok(idx), then that means // we were located to the right node. This is because we // exactly hit and located on the key. // // If the value is Err(idx), then we have the exact index already. // as branches is of-by-one. - let idx = self.locate_node(k); - unsafe { (*self.nodes[idx]).get_ref(k) } + let idx = this.locate_node(k); + Node::get_ref_raw(this.nodes[idx], k) } pub(crate) fn add_node(&mut self, node: *mut Node) -> BranchInsertState { @@ -1032,7 +1089,7 @@ impl Branch { // 2 * The inserted node is between max - 1 and max, causing l(node, max) to be returned. // 3 * The inserted node is a low/middle value, causing max and max -1 to be returned. // - let kr = unsafe { (*node).min() }; + let kr = unsafe { &*Node::min_raw(node) }; let r = key_search!(self, kr); let ins_idx = r.unwrap_err(); // Everything will pop max. @@ -1085,7 +1142,7 @@ impl Branch { } else { // if space -> // Get the nodes min-key - we clone it because we'll certainly be inserting it! - let k: K = unsafe { (*node).min().clone() }; + let k: K = unsafe { &*Node::min_raw(node) }.clone(); // bst and find when min-key < key[idx] let r = key_search!(self, &k); // if r is ever found, I think this is a bug, because we should never be able to @@ -1258,7 +1315,7 @@ impl Branch { // println!("pre-fixup -> {:?}", self); let sibnode = self.nodes[sibidx]; - let nkey: K = unsafe { (*sibnode).min().clone() }; + let nkey: K = unsafe { &*Node::min_raw(sibnode) }.clone(); unsafe { slice_insert(&mut self.key, MaybeUninit::new(nkey), sibidx); @@ -1285,7 +1342,7 @@ impl Branch { // let sibnode = self.nodes[sibidx]; - let nkey: K = unsafe { (*sibnode).min().clone() }; + let nkey: K = unsafe { &*Node::min_raw(sibnode) }.clone(); unsafe { slice_insert(&mut self.nodes, lnode, sibidx); @@ -1458,7 +1515,7 @@ impl Branch { debug_assert!(idx > 0); // For the node listed, rekey it. let nref = self.nodes[idx]; - let nkey = unsafe { ((*nref).min()).clone() }; + let nkey = unsafe { &*Node::min_raw(nref) }.clone(); unsafe { self.key[idx - 1].as_mut_ptr().write(nkey); } @@ -1473,7 +1530,7 @@ impl Branch { if rc == 0 { let node = right.nodes[0]; debug_assert!(!node.is_null()); - let k: K = unsafe { (*node).min().clone() }; + let k: K = unsafe { &*Node::min_raw(node) }.clone(); let ins_idx = self.count(); let leaf_ins_idx = ins_idx + 1; unsafe { @@ -1496,7 +1553,7 @@ impl Branch { // rekey the lowest pointer. unsafe { let nptr = self.nodes[1]; - let k: K = (*nptr).min().clone(); + let k: K = { &*Node::min_raw(nptr) }.clone(); self.key[0].as_mut_ptr().write(k); } // done! @@ -1529,11 +1586,8 @@ impl Branch { // [ k1, k2, k3, k4, k5, k6 ] [ --, --, --, --, ... // [ v1, v2, v3, v4, v5, v6, v7 ] -> [ --, --, --, v8, --, ... // - unsafe { - ptr::swap( - right.nodes.get_unchecked_mut(0), - right.nodes.get_unchecked_mut(count), - ) + if count != 0 { + right.nodes.swap(0, count); } // Move our values from the tail. // We would move 3 now to: @@ -1594,19 +1648,13 @@ impl Branch { // move keys down in right unsafe { - ptr::copy( - right.key.as_ptr().add(count), - right.key.as_mut_ptr(), - start_idx, - ); + let key = right.key.as_mut_ptr(); + ptr::copy(key.add(count), key, start_idx); } // move nodes down in right unsafe { - ptr::copy( - right.nodes.as_ptr().add(count), - right.nodes.as_mut_ptr(), - start_idx + 1, - ); + let nodes = right.nodes.as_mut_ptr(); + ptr::copy(nodes.add(count), nodes, start_idx + 1); } // update counts @@ -1797,12 +1845,12 @@ impl Branch { */ #[inline(always)] - pub(crate) fn make_ro(&self) { - debug_assert_branch!(self); + pub(crate) fn make_ro_raw(pointer: *const Self) { + debug_assert_branch!(unsafe { &*pointer }); /* let r = unsafe { mprotect( - self as *const Branch as *mut c_void, + this as *const Branch as *mut c_void, size_of::>(), PROT_READ ) @@ -1811,18 +1859,19 @@ impl Branch { */ } - pub(crate) fn verify(&self) -> bool { - debug_assert_branch!(self); - if self.count() == 0 { + pub(crate) fn verify_raw(pointer: *const Self) -> bool { + let this = unsafe { &*pointer }; + debug_assert_branch!(this); + if this.count() == 0 { // Not possible to be valid! debug_assert!(false); return false; } // println!("verify branch -> {:?}", self); // Check we are sorted. - let mut lk: &K = unsafe { &*self.key[0].as_ptr() }; - for work_idx in 1..self.count() { - let rk: &K = unsafe { &*self.key[work_idx].as_ptr() }; + let mut lk: &K = unsafe { &*this.key[0].as_ptr() }; + for work_idx in 1..this.count() { + let rk: &K = unsafe { &*this.key[work_idx].as_ptr() }; // println!("{:?} >= {:?}", lk, rk); if lk >= rk { debug_assert!(false); @@ -1831,12 +1880,12 @@ impl Branch { lk = rk; } // Recursively call verify - for work_idx in 0..self.count() { - let node = unsafe { &*self.nodes[work_idx] }; - if !node.verify() { - for work_idx in 0..(self.count() + 1) { - let nref = unsafe { &*self.nodes[work_idx] }; - if !nref.verify() { + for work_idx in 0..this.count() { + let node = this.nodes[work_idx]; + if !Node::verify_raw(node) { + for work_idx in 0..(this.count() + 1) { + let nref = this.nodes[work_idx]; + if !Node::verify_raw(nref) { // println!("Failed children"); debug_assert!(false); return false; @@ -1846,14 +1895,14 @@ impl Branch { } // Check descendants are validly ordered. // V-- remember, there are count + 1 nodes. - for work_idx in 0..self.count() { + for work_idx in 0..this.count() { // get left max and right min - let lnode = unsafe { &*self.nodes[work_idx] }; - let rnode = unsafe { &*self.nodes[work_idx + 1] }; + let lnode = this.nodes[work_idx]; + let rnode = this.nodes[work_idx + 1]; - let pkey = unsafe { &*self.key[work_idx].as_ptr() }; - let lkey = lnode.max(); - let rkey = rnode.min(); + let pkey = unsafe { &*this.key[work_idx].as_ptr() }; + let lkey = unsafe { &*Node::max_raw(lnode as *const _) }; + let rkey = unsafe { &*Node::min_raw(rnode as *const _) }; if lkey >= pkey || pkey > rkey { // println!("++++++"); // println!("{:?} >= {:?}, {:?} > {:?}", lkey, pkey, pkey, rkey); @@ -1936,8 +1985,8 @@ mod tests { #[test] fn test_bptree2_node_test_weird_basics() { - let leaf: *mut Leaf = Node::new_leaf(1); - let leaf = unsafe { &mut *leaf }; + let leaf_raw: *mut Leaf = Node::new_leaf(1); + let leaf = unsafe { &mut *leaf_raw }; assert!(leaf.get_txid() == 1); // println!("{:?}", leaf); @@ -1969,14 +2018,14 @@ mod tests { Branch::free(branch as *mut _); */ - Leaf::free(leaf as *mut _); + Leaf::free(leaf_raw as *mut _); assert_released(); } #[test] fn test_bptree2_node_leaf_in_order() { - let leaf: *mut Leaf = Node::new_leaf(1); - let leaf = unsafe { &mut *leaf }; + let leaf_raw: *mut Leaf = Node::new_leaf(1); + let leaf = unsafe { &mut *leaf_raw }; assert!(leaf.get_txid() == 1); // Check insert to capacity for kv in 0..L_CAPACITY { @@ -1998,15 +2047,15 @@ mod tests { assert!(false); } } - assert!(leaf.verify()); - Leaf::free(leaf as *mut _); + assert!(Leaf::::verify_raw(leaf_raw)); + Leaf::free(leaf_raw); assert_released(); } #[test] fn test_bptree2_node_leaf_out_of_order() { - let leaf: *mut Leaf = Node::new_leaf(1); - let leaf = unsafe { &mut *leaf }; + let leaf_raw: *mut Leaf = Node::new_leaf(1); + let leaf = unsafe { &mut *leaf_raw }; assert!(L_CAPACITY <= 8); let kvs = [7, 5, 1, 6, 2, 3, 0, 8]; @@ -2036,14 +2085,14 @@ mod tests { } assert!(leaf.verify()); assert!(leaf.count() == L_CAPACITY); - Leaf::free(leaf as *mut _); + Leaf::free(leaf_raw); assert_released(); } #[test] fn test_bptree2_node_leaf_min() { - let leaf: *mut Leaf = Node::new_leaf(1); - let leaf = unsafe { &mut *leaf }; + let leaf_raw: *mut Leaf = Node::new_leaf(1); + let leaf = unsafe { &mut *leaf_raw }; assert!(L_CAPACITY <= 8); let kvs = [3, 2, 6, 4, 5, 1, 9, 0]; @@ -2061,14 +2110,14 @@ mod tests { } assert!(leaf.verify()); assert!(leaf.count() == L_CAPACITY); - Leaf::free(leaf as *mut _); + Leaf::free(leaf_raw); assert_released(); } #[test] fn test_bptree2_node_leaf_max() { - let leaf: *mut Leaf = Node::new_leaf(1); - let leaf = unsafe { &mut *leaf }; + let leaf_raw: *mut Leaf = Node::new_leaf(1); + let leaf = unsafe { &mut *leaf_raw }; assert!(L_CAPACITY <= 8); let kvs = [1, 3, 2, 6, 4, 5, 9, 0]; @@ -2086,14 +2135,14 @@ mod tests { } assert!(leaf.verify()); assert!(leaf.count() == L_CAPACITY); - Leaf::free(leaf as *mut _); + Leaf::free(leaf_raw); assert_released(); } #[test] fn test_bptree2_node_leaf_remove_order() { - let leaf: *mut Leaf = Node::new_leaf(1); - let leaf = unsafe { &mut *leaf }; + let leaf_raw: *mut Leaf = Node::new_leaf(1); + let leaf = unsafe { &mut *leaf_raw }; for kv in 0..L_CAPACITY { leaf.insert_or_update(kv, kv); } @@ -2135,14 +2184,14 @@ mod tests { assert!(leaf.count() == 0); assert!(leaf.verify()); - Leaf::free(leaf as *mut _); + Leaf::free(leaf_raw as *mut _); assert_released(); } #[test] fn test_bptree2_node_leaf_remove_out_of_order() { - let leaf: *mut Leaf = Node::new_leaf(1); - let leaf = unsafe { &mut *leaf }; + let leaf_raw: *mut Leaf = Node::new_leaf(1); + let leaf = unsafe { &mut *leaf_raw }; for kv in 0..L_CAPACITY { leaf.insert_or_update(kv, kv); } @@ -2166,14 +2215,14 @@ mod tests { assert!(leaf.count() == 1); assert!(leaf.verify()); - Leaf::free(leaf as *mut _); + Leaf::free(leaf_raw); assert_released(); } #[test] fn test_bptree2_node_leaf_insert_split() { - let leaf: *mut Leaf = Node::new_leaf(1); - let leaf = unsafe { &mut *leaf }; + let leaf_raw: *mut Leaf = Node::new_leaf(1); + let leaf = unsafe { &mut *leaf_raw }; for kv in 0..L_CAPACITY { leaf.insert_or_update(kv + 10, kv + 10); } @@ -2202,7 +2251,7 @@ mod tests { assert!(leaf.count() == L_CAPACITY); assert!(leaf.verify()); - Leaf::free(leaf as *mut _); + Leaf::free(leaf_raw); assert_released(); } @@ -2280,19 +2329,22 @@ mod tests { left as *mut Node, right as *mut Node, ); - let branch_ref = unsafe { &mut *branch }; // verify - assert!(branch_ref.verify()); + assert!(Branch::::verify_raw(branch)); // Test .min works on our descendants - assert!(branch_ref.min() == &10); + assert!(unsafe { *Branch::::min_raw(branch) } == 10); // Test .max works on our descendants. - assert!(branch_ref.max() == &(20 + L_CAPACITY - 1)); + assert!(unsafe { *Branch::::max_raw(branch) } == (20 + L_CAPACITY - 1)); // Get some k within the leaves. - assert!(branch_ref.get_ref(&11) == Some(&11)); - assert!(branch_ref.get_ref(&21) == Some(&21)); + assert!( + Branch::::get_ref_raw(branch, &11).map(|val| unsafe { *val }) == Some(11) + ); + assert!( + Branch::::get_ref_raw(branch, &21).map(|val| unsafe { *val }) == Some(21) + ); // get some k that is out of bounds. - assert!(branch_ref.get_ref(&1).is_none()); - assert!(branch_ref.get_ref(&100).is_none()); + assert!(Branch::::get_ref_raw(branch, &1).is_none()); + assert!(Branch::::get_ref_raw(branch, &100).is_none()); Leaf::free(left as *mut _); Leaf::free(right as *mut _); @@ -2333,9 +2385,9 @@ mod tests { b as *mut Node, c as *mut Node, ); + assert!(Branch::::verify_raw(branch)); let branch_ref = unsafe { &mut *branch }; // verify - assert!(branch_ref.verify()); // Now min node (uses a diff function!) let r = branch_ref.add_node_left(a as *mut Node, 0); match r { @@ -2343,7 +2395,7 @@ mod tests { _ => debug_assert!(false), }; // Assert okay + verify - assert!(branch_ref.verify()); + assert!(Branch::::verify_raw(branch)); Branch::free(branch as *mut _); }) } @@ -2357,16 +2409,16 @@ mod tests { a as *mut Node, c as *mut Node, ); + assert!(Branch::::verify_raw(branch)); let branch_ref = unsafe { &mut *branch }; // verify - assert!(branch_ref.verify()); let r = branch_ref.add_node(b as *mut Node); match r { BranchInsertState::Ok => {} _ => debug_assert!(false), }; // Assert okay + verify - assert!(branch_ref.verify()); + assert!(Branch::::verify_raw(branch)); Branch::free(branch as *mut _); }) } @@ -2380,16 +2432,16 @@ mod tests { a as *mut Node, b as *mut Node, ); - let branch_ref = unsafe { &mut *branch }; // verify - assert!(branch_ref.verify()); + assert!(Branch::::verify_raw(branch)); + let branch_ref = unsafe { &mut *branch }; let r = branch_ref.add_node(c as *mut Node); match r { BranchInsertState::Ok => {} _ => debug_assert!(false), }; // Assert okay + verify - assert!(branch_ref.verify()); + assert!(Branch::::verify_raw(branch)); Branch::free(branch as *mut _); }) } @@ -2485,14 +2537,14 @@ mod tests { match r { BranchInsertState::Split(x, y) => { unsafe { - assert!((*x).min() == &(max - 10)); - assert!((*y).min() == &max); + assert!({ &*Node::::min_raw(x as *const _) } == &(max - 10)); + assert!({ &*Node::::min_raw(y as *const _) } == &max); } // X, Y will be freed by the macro caller. } _ => debug_assert!(false), }; - assert!(branch_ref.verify()); + assert!(Branch::::verify_raw(branch_ref as *mut _)); // Free node. Leaf::free(node as *mut _); }) @@ -2514,14 +2566,15 @@ mod tests { unsafe { // println!("{:?}", (*y).min()); // println!("{:?}", (*mynode).min()); - assert!((*y).min() == &max); - assert!((*mynode).min() == &200); + + assert!({ &*Node::::min_raw(y as *const _) } == &max); + assert!({ &*Node::::min_raw(mynode as *const _) } == &200); } // Y will be freed by the macro caller. } _ => debug_assert!(false), }; - assert!(branch_ref.verify()); + assert!(Branch::::verify_raw(branch_ref as *mut _)); // Free node. Leaf::free(node as *mut _); }) @@ -2542,14 +2595,16 @@ mod tests { match r { BranchInsertState::Split(mynode, y) => { unsafe { - assert!((*mynode).min() == &(max - 5)); - assert!((*y).min() == &max); + assert!( + { &*Node::::min_raw(mynode as *const _) } == &(max - 5) + ); + assert!({ &*Node::::min_raw(y as *const _) } == &max); } // Y will be freed by the macro caller. } _ => debug_assert!(false), }; - assert!(branch_ref.verify()); + assert!(Branch::::verify_raw(branch_ref as *mut _)); // Free node. Leaf::free(node as *mut _); }) diff --git a/src/utils.rs b/src/utils.rs index ec82544..927008a 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -13,24 +13,19 @@ use std::ptr; use serde::de::{Deserialize, MapAccess, Visitor}; pub(crate) unsafe fn slice_insert(slice: &mut [T], new: T, idx: usize) { - // miri doesn't like this - ptr::copy( - slice.as_ptr().add(idx), - slice.as_mut_ptr().add(idx + 1), - slice.len() - idx - 1, - ); - ptr::write(slice.get_unchecked_mut(idx), new); + let len = slice.len(); + let slice = slice.as_mut_ptr(); + ptr::copy(slice.add(idx), slice.add(idx + 1), len - idx - 1); + ptr::write(slice.add(idx), new); } // From std::collections::btree::node.rs pub(crate) unsafe fn slice_remove(slice: &mut [T], idx: usize) -> T { // setup the value to be returned, IE give ownership to ret. + let len = slice.len(); let ret = ptr::read(slice.get_unchecked(idx)); - ptr::copy( - slice.as_ptr().add(idx + 1), - slice.as_mut_ptr().add(idx), - slice.len() - idx - 1, - ); + let slice = slice.as_mut_ptr(); + ptr::copy(slice.add(idx + 1), slice.add(idx), len - idx - 1); ret }