diff --git a/Cargo.toml b/Cargo.toml index ccaafbe..ac9fd4d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "concread" -version = "0.5.1" +version = "0.5.2" authors = ["William Brown "] edition = "2021" description = "Concurrently Readable Data-Structures for Rust" diff --git a/src/arcache/ll.rs b/src/arcache/ll.rs index 8b59be0..db27c87 100644 --- a/src/arcache/ll.rs +++ b/src/arcache/ll.rs @@ -3,6 +3,48 @@ use std::marker::PhantomData; use std::mem::MaybeUninit; use std::ptr; +#[cfg(all(test, not(miri)))] +use std::collections::BTreeSet; +#[cfg(all(test, not(miri)))] +use std::sync::atomic::{AtomicUsize, Ordering}; +#[cfg(all(test, not(miri)))] +use std::sync::Mutex; + +#[cfg(all(test, not(miri)))] +thread_local!(static NODE_COUNTER: AtomicUsize = const { AtomicUsize::new(1) }); +#[cfg(all(test, not(miri)))] +thread_local!(static ALLOC_LIST: Mutex> = const { Mutex::new(BTreeSet::new()) }); + +#[cfg(all(test, not(miri)))] +fn alloc_nid() -> usize { + let nid: usize = NODE_COUNTER.with(|nc| nc.fetch_add(1, Ordering::AcqRel)); + { + ALLOC_LIST.with(|llist| llist.lock().unwrap().insert(nid)); + } + eprintln!("Allocate -> {:?}", nid); + nid +} + +#[cfg(all(test, not(miri)))] +fn release_nid(nid: usize) { + println!("Release -> {:?}", nid); + let r = ALLOC_LIST.with(|llist| llist.lock().unwrap().remove(&nid)); + assert!(r); +} + +#[cfg(test)] +pub fn assert_released() { + #[cfg(not(miri))] + { + let is_empt = ALLOC_LIST.with(|llist| { + let x = llist.lock().unwrap(); + eprintln!("Assert Released - Remaining -> {:?}", x); + x.is_empty() + }); + assert!(is_empt); + } +} + pub trait LLWeight { fn ll_weight(&self) -> usize; } @@ -28,7 +70,7 @@ where } #[derive(Debug)] -pub(crate) struct LLNode +struct LLNode where K: LLWeight + Clone + Debug, { @@ -36,6 +78,112 @@ where next: *mut LLNode, prev: *mut LLNode, // tag: usize, + #[cfg(all(test, not(miri)))] + pub(crate) nid: usize, +} + +#[derive(Copy, Clone, Debug)] +pub(crate) struct LLNodeRef +where + K: LLWeight + Clone + Debug, +{ + inner: *mut LLNode, +} + +impl LLNodeRef +where + K: LLWeight + Clone + Debug, +{ + pub fn is_null(&self) -> bool { + self.inner.is_null() + } + + pub unsafe fn make_mut(&self) -> &mut K { + &mut *(*self.inner).k.as_mut_ptr() + } +} + +impl AsRef for LLNodeRef +where + K: LLWeight + Clone + Debug, +{ + fn as_ref(&self) -> &K { + unsafe { &*(*self.inner).k.as_ptr() } + } +} + +impl PartialEq<&LLNodeOwned> for &LLNodeRef +where + K: LLWeight + Clone + Debug, +{ + fn eq(&self, other: &&LLNodeOwned) -> bool { + self.inner == other.inner + } +} + +#[derive(Debug)] +pub(crate) struct LLNodeOwned +where + K: LLWeight + Clone + Debug, +{ + inner: *mut LLNode, +} + +impl LLNodeOwned +where + K: LLWeight + Clone + Debug, +{ + fn into_inner(&mut self) -> *mut LLNode { + let x = self.inner; + self.inner = ptr::null_mut(); + x + } + + pub fn is_null(&self) -> bool { + self.inner.is_null() + } +} + +impl PartialEq for &LLNodeOwned +where + K: LLWeight + Clone + Debug, +{ + fn eq(&self, other: &Self) -> bool { + self.inner == other.inner + } +} + +impl AsRef for LLNodeOwned +where + K: LLWeight + Clone + Debug, +{ + fn as_ref(&self) -> &K { + unsafe { &*(*self.inner).k.as_ptr() } + } +} + +impl AsMut for LLNodeOwned +where + K: LLWeight + Clone + Debug, +{ + fn as_mut(&mut self) -> &mut K { + unsafe { &mut *(*self.inner).k.as_mut_ptr() } + } +} + +impl Drop for LLNodeOwned +where + K: LLWeight + Clone + Debug, +{ + fn drop(&mut self) { + if !self.inner.is_null() { + unsafe { + debug_assert!((*self.inner).next.is_null()); + debug_assert!((*self.inner).prev.is_null()); + } + assert!(false); + } + } } #[derive(Clone, Debug)] @@ -74,15 +222,15 @@ where } // Append a k to the set, and return it's pointer. - pub(crate) fn append_k(&mut self, k: K) -> *mut LLNode { + pub(crate) fn append_k(&mut self, k: K) -> LLNodeRef { let n = LLNode::new(k); - self.append_n(n); - n + self.append_n(n) } // Append an arbitrary node into this set. - pub(crate) fn append_n(&mut self, n: *mut LLNode) { + pub(crate) fn append_n(&mut self, mut owned: LLNodeOwned) -> LLNodeRef { // Who is to the left of tail? + let n = owned.into_inner(); unsafe { self.size += (*(*n).k.as_ptr()).ll_weight(); // must be untagged @@ -105,61 +253,64 @@ where debug_assert!(!(*(*n).next).prev.is_null()); debug_assert!((*(*n).prev).next == n); debug_assert!((*(*n).next).prev == n); - } + }; + LLNodeRef { inner: n } } // Given a node ptr, extract and put it at the tail. IE hit. - pub(crate) fn touch(&mut self, n: *mut LLNode) { + pub(crate) fn touch(&mut self, n: LLNodeRef) { debug_assert!(self.size > 0); - if n == unsafe { (*self.tail).prev } { + if n.inner == unsafe { (*self.tail).prev } { // Done, no-op } else { - self.extract(n); - self.append_n(n); + let owned = self.extract(n); + self.append_n(owned); } } // remove this node from the ll, and return it's ptr. - pub(crate) fn pop(&mut self) -> *mut LLNode { + pub(crate) fn pop(&mut self) -> LLNodeOwned { let n = unsafe { (*self.head).next }; - self.extract(n); - debug_assert!(!n.is_null()); - debug_assert!(n != self.head); - debug_assert!(n != self.tail); - n + let owned = self.extract(LLNodeRef { inner: n }); + debug_assert!(!owned.is_null()); + debug_assert!(owned.inner != self.head); + debug_assert!(owned.inner != self.tail); + owned } // Cut a node out from this list from any location. - pub(crate) fn extract(&mut self, n: *mut LLNode) { + pub(crate) fn extract(&mut self, n: LLNodeRef) -> LLNodeOwned { assert!(self.size > 0); assert!(!n.is_null()); unsafe { // We should have a prev and next - debug_assert!(!(*n).prev.is_null()); - debug_assert!(!(*n).next.is_null()); + debug_assert!(!(*n.inner).prev.is_null()); + debug_assert!(!(*n.inner).next.is_null()); // And that prev's next is us, and next's prev is us. - debug_assert!(!(*(*n).prev).next.is_null()); - debug_assert!(!(*(*n).next).prev.is_null()); - debug_assert!((*(*n).prev).next == n); - debug_assert!((*(*n).next).prev == n); + debug_assert!(!(*(*n.inner).prev).next.is_null()); + debug_assert!(!(*(*n.inner).next).prev.is_null()); + debug_assert!((*(*n.inner).prev).next == n.inner); + debug_assert!((*(*n.inner).next).prev == n.inner); // And we belong to this set // assert!((*n).tag == self.tag); - self.size -= (*(*n).k.as_ptr()).ll_weight(); + self.size -= (*(*n.inner).k.as_ptr()).ll_weight(); } unsafe { - let prev = (*n).prev; - let next = (*n).next; + let prev = (*n.inner).prev; + let next = (*n.inner).next; // prev <-> n <-> next (*next).prev = prev; (*prev).next = next; // Null things for paranoia. if cfg!(test) || cfg!(debug_assertions) { - (*n).prev = ptr::null_mut(); - (*n).next = ptr::null_mut(); + (*n.inner).prev = ptr::null_mut(); + (*n.inner).next = ptr::null_mut(); } // (*n).tag = 0; } + + LLNodeOwned { inner: n.inner } } pub(crate) fn len(&self) -> usize { @@ -204,15 +355,31 @@ where fn drop(&mut self) { let head = self.head; let tail = self.tail; + + debug_assert!(head != tail); + + // Get the first node. let mut n = unsafe { (*head).next }; while n != tail { - let next = unsafe { (*n).next }; - unsafe { ptr::drop_in_place((*n).k.as_mut_ptr()) }; - LLNode::free(n); - n = next; + unsafe { + let next = (*n).next; + // For sanity - we want to check that the node preceeding us is the correct link. + debug_assert!((*next).prev == n); + + // K is not a null pointer. + debug_assert!(!(*n).k.as_mut_ptr().is_null()); + ptr::drop_in_place((*n).k.as_mut_ptr()); + // Now we can proceed. + LLNode::free(n); + n = next; + } } + LLNode::free(head); LLNode::free(tail); + + // #[cfg(all(test, not(miri)))] + // assert_released(); } } @@ -227,15 +394,20 @@ where next: ptr::null_mut(), prev: ptr::null_mut(), // tag: 0, + #[cfg(all(test, not(miri)))] + nid: alloc_nid(), })); let tail = Box::into_raw(Box::new(LLNode { k: MaybeUninit::uninit(), next: ptr::null_mut(), - prev: head, + prev: ptr::null_mut(), // tag: 0, + #[cfg(all(test, not(miri)))] + nid: alloc_nid(), })); unsafe { (*head).next = tail; + (*tail).prev = head; } (head, tail) } @@ -244,20 +416,25 @@ where pub(crate) fn new( k: K, // tag: usize - ) -> *mut Self { + ) -> LLNodeOwned { let b = Box::new(LLNode { k: MaybeUninit::new(k), next: ptr::null_mut(), prev: ptr::null_mut(), // tag, + #[cfg(all(test, not(miri)))] + nid: alloc_nid(), }); - Box::into_raw(b) + let inner = Box::into_raw(b); + LLNodeOwned { inner } } #[inline] fn free(v: *mut Self) { debug_assert!(!v.is_null()); - let _ = unsafe { Box::from_raw(v) }; + let _llnode = unsafe { Box::from_raw(v) }; + #[cfg(all(test, not(miri)))] + release_nid(_llnode.nid) } } @@ -305,7 +482,7 @@ where #[cfg(test)] mod tests { - use super::{LLWeight, LL}; + use super::{LLWeight, LL, assert_released}; impl LLWeight for Box { #[inline] @@ -322,7 +499,7 @@ mod tests { // Allocate new nodes let n1 = ll.append_k(Box::new(1)); let n2 = ll.append_k(Box::new(2)); - let n3 = ll.append_k(Box::new(3)); + let _n3 = ll.append_k(Box::new(3)); let n4 = ll.append_k(Box::new(4)); // Check that n1 is the head, n3 is tail. assert!(ll.len() == 4); @@ -330,36 +507,36 @@ mod tests { assert!(ll.peek_tail().unwrap().as_ref() == &4); // Touch 2, it's now tail. - ll.touch(n2); + ll.touch(n2.clone()); assert!(ll.len() == 4); assert!(ll.peek_head().unwrap().as_ref() == &1); assert!(ll.peek_tail().unwrap().as_ref() == &2); // Touch 1 (head), it's the tail now. - ll.touch(n1); + ll.touch(n1.clone()); assert!(ll.len() == 4); assert!(ll.peek_head().unwrap().as_ref() == &3); assert!(ll.peek_tail().unwrap().as_ref() == &1); // Touch 1 (tail), it stays as tail. - ll.touch(n1); + ll.touch(n1.clone()); assert!(ll.len() == 4); assert!(ll.peek_head().unwrap().as_ref() == &3); assert!(ll.peek_tail().unwrap().as_ref() == &1); // pop from head - let _n3 = ll.pop(); + let n3 = ll.pop(); assert!(ll.len() == 3); assert!(ll.peek_head().unwrap().as_ref() == &4); assert!(ll.peek_tail().unwrap().as_ref() == &1); // cut a node out from any (head, mid, tail) - ll.extract(n2); + let n2 = ll.extract(n2); assert!(ll.len() == 2); assert!(ll.peek_head().unwrap().as_ref() == &4); assert!(ll.peek_tail().unwrap().as_ref() == &1); - ll.extract(n1); + let n1 = ll.extract(n1); assert!(ll.len() == 1); assert!(ll.peek_head().unwrap().as_ref() == &4); assert!(ll.peek_tail().unwrap().as_ref() == &4); @@ -370,7 +547,7 @@ mod tests { assert!(ll.peek_head().unwrap().as_ref() == &4); assert!(ll.peek_tail().unwrap().as_ref() == &4); // Remove last - let _n4 = ll.pop(); + let n4 = ll.pop(); assert!(ll.len() == 0); assert!(ll.peek_head().is_none()); assert!(ll.peek_tail().is_none()); @@ -380,6 +557,10 @@ mod tests { ll.append_n(n2); ll.append_n(n3); ll.append_n(n4); + + drop(ll); + + assert_released(); } #[derive(Clone, Debug)] @@ -409,5 +590,9 @@ mod tests { // Add back so they drop ll.append_n(n1); ll.append_n(n2); + + drop(ll); + + assert_released(); } } diff --git a/src/arcache/mod.rs b/src/arcache/mod.rs index ad967ba..933b36f 100644 --- a/src/arcache/mod.rs +++ b/src/arcache/mod.rs @@ -14,7 +14,7 @@ mod ll; /// Stats collection for [ARCache] pub mod stats; -use self::ll::{LLNode, LLWeight, LL}; +use self::ll::{LLNodeRef, LLWeight, LL}; use self::stats::{ARCacheReadStat, ARCacheWriteStat}; // use self::traits::ArcWeight; @@ -82,11 +82,11 @@ enum CacheItem where K: Hash + Eq + Ord + Clone + Debug + Sync + Send + 'static, { - Freq(*mut LLNode>, V), - Rec(*mut LLNode>, V), - GhostFreq(*mut LLNode>), - GhostRec(*mut LLNode>), - Haunted(*mut LLNode>), + Freq(LLNodeRef>, V), + Rec(LLNodeRef>, V), + GhostFreq(LLNodeRef>), + GhostRec(LLNodeRef>), + Haunted(LLNodeRef>), } unsafe impl< @@ -237,7 +237,7 @@ where { // cache of our missed items to send forward. // On drop we drain this to the channel - set: Map>>, + set: Map>>, read_size: usize, tlru: LL>, } @@ -327,7 +327,7 @@ impl< fn to_kvsref(&self) -> Option<(&K, &V, usize)> { match &self { CacheItem::Freq(lln, v) | CacheItem::Rec(lln, v) => { - let cii = unsafe { &*((**lln).k.as_ptr()) }; + let cii = lln.as_ref(); Some((&cii.k, v, cii.size)) } _ => None, @@ -356,25 +356,25 @@ macro_rules! drain_ll_to_ghost { $stats:expr ) => {{ while $ll.len() > 0 { - let n = $ll.pop(); - debug_assert!(!n.is_null()); - unsafe { - // Set the item's evict txid. - (*n).as_mut().txid = $txid; - } - let mut r = $cache.get_mut(unsafe { &(*n).as_mut().k }); + let mut owned = $ll.pop(); + debug_assert!(!owned.is_null()); + // Set the item's evict txid. + owned.as_mut().txid = $txid; + let mut r = $cache.get_mut(&owned.as_ref().k); match r { Some(ref mut ci) => { let mut next_state = match &ci { CacheItem::Freq(n, _) => { - $gf.append_n(*n); - $stats.evict_from_frequent(unsafe { &(**n).as_mut().k }); - CacheItem::GhostFreq(*n) + debug_assert!(n == &owned); + $stats.evict_from_frequent(&owned.as_ref().k); + let pointer = $gf.append_n(owned); + CacheItem::GhostFreq(pointer) } CacheItem::Rec(n, _) => { - $gr.append_n(*n); - $stats.evict_from_recent(unsafe { &(**n).as_mut().k }); - CacheItem::GhostRec(*n) + debug_assert!(n == &owned); + $stats.evict_from_recent(&owned.as_ref().k); + let pointer = $gr.append_n(owned); + CacheItem::GhostRec(pointer) } _ => { // Impossible state! @@ -405,31 +405,29 @@ macro_rules! evict_to_len { debug_assert!($ll.len() >= $size); while $ll.len() > $size { - let n = $ll.pop(); - debug_assert!(!n.is_null()); - let mut r = $cache.get_mut(unsafe { &(*n).as_mut().k }); - unsafe { - // Set the item's evict txid. - (*n).as_mut().txid = $txid; - } + let mut owned = $ll.pop(); + debug_assert!(!owned.is_null()); + let mut r = $cache.get_mut(&owned.as_ref().k); + // Set the item's evict txid. + owned.as_mut().txid = $txid; match r { Some(ref mut ci) => { let mut next_state = match &ci { CacheItem::Freq(llp, _v) => { - debug_assert!(*llp == n); + debug_assert!(llp == &owned); // No need to extract, already popped! // $ll.extract(*llp); - $to_ll.append_n(*llp); - $stats.evict_from_frequent(unsafe { &(**llp).as_mut().k }); - CacheItem::GhostFreq(*llp) + $stats.evict_from_frequent(&owned.as_ref().k); + let pointer = $to_ll.append_n(owned); + CacheItem::GhostFreq(pointer) } CacheItem::Rec(llp, _v) => { - debug_assert!(*llp == n); + debug_assert!(llp == &owned); // No need to extract, already popped! // $ll.extract(*llp); - $to_ll.append_n(*llp); - $stats.evict_from_recent(unsafe { &(**llp).as_mut().k }); - CacheItem::GhostRec(*llp) + $stats.evict_from_recent(&owned.as_mut().k); + let pointer = $to_ll.append_n(owned); + CacheItem::GhostRec(pointer) } _ => { // Impossible state! @@ -459,18 +457,19 @@ macro_rules! evict_to_haunted_len { debug_assert!($ll.len() >= $size); while $ll.len() > $size { - let n = $ll.pop(); - debug_assert!(!n.is_null()); - $to_ll.append_n(n); - let mut r = $cache.get_mut(unsafe { &(*n).as_mut().k }); - unsafe { - // Set the item's evict txid. - (*n).as_mut().txid = $txid; - } + let mut owned = $ll.pop(); + debug_assert!(!owned.is_null()); + + // Set the item's evict txid. + owned.as_mut().txid = $txid; + + let pointer = $to_ll.append_n(owned); + let mut r = $cache.get_mut(&pointer.as_ref().k); + match r { Some(ref mut ci) => { // Now change the state. - let mut next_state = CacheItem::Haunted(n); + let mut next_state = CacheItem::Haunted(pointer); mem::swap(*ci, &mut next_state); } None => { @@ -959,37 +958,37 @@ impl< let mut next_state = match ci { CacheItem::Freq(llp, _v) => { // println!("tlocal {:?} Freq -> Freq", k); - inner.freq.extract(*llp); - unsafe { (**llp).as_mut().txid = commit_txid }; - inner.haunted.append_n(*llp); - CacheItem::Haunted(*llp) + let mut owned = inner.freq.extract(llp.clone()); + owned.as_mut().txid = commit_txid; + let pointer = inner.haunted.append_n(owned); + CacheItem::Haunted(pointer) } CacheItem::Rec(llp, _v) => { // println!("tlocal {:?} Rec -> Freq", k); // Remove the node and put it into freq. - inner.rec.extract(*llp); - unsafe { (**llp).as_mut().txid = commit_txid }; - inner.haunted.append_n(*llp); - CacheItem::Haunted(*llp) + let mut owned = inner.rec.extract(llp.clone()); + owned.as_mut().txid = commit_txid; + let pointer = inner.haunted.append_n(owned); + CacheItem::Haunted(pointer) } CacheItem::GhostFreq(llp) => { // println!("tlocal {:?} GhostFreq -> Freq", k); - inner.ghost_freq.extract(*llp); - unsafe { (**llp).as_mut().txid = commit_txid }; - inner.haunted.append_n(*llp); - CacheItem::Haunted(*llp) + let mut owned = inner.ghost_freq.extract(llp.clone()); + owned.as_mut().txid = commit_txid; + let pointer = inner.haunted.append_n(owned); + CacheItem::Haunted(pointer) } CacheItem::GhostRec(llp) => { // println!("tlocal {:?} GhostRec -> Rec", k); - inner.ghost_rec.extract(*llp); - unsafe { (**llp).as_mut().txid = commit_txid }; - inner.haunted.append_n(*llp); - CacheItem::Haunted(*llp) + let mut owned = inner.ghost_rec.extract(llp.clone()); + owned.as_mut().txid = commit_txid; + let pointer = inner.haunted.append_n(owned); + CacheItem::Haunted(pointer) } CacheItem::Haunted(llp) => { // println!("tlocal {:?} Haunted -> Rec", k); - unsafe { (**llp).as_mut().txid = commit_txid }; - CacheItem::Haunted(*llp) + unsafe { llp.make_mut().txid = commit_txid }; + CacheItem::Haunted(llp.clone()) } }; // Now change the state. @@ -1004,24 +1003,24 @@ impl< let mut next_state = match ci { CacheItem::Freq(llp, _v) => { // println!("tlocal {:?} Freq -> Freq", k); - inner.freq.extract(*llp); - unsafe { (**llp).as_mut().txid = commit_txid }; - unsafe { (**llp).as_mut().size = size }; + let mut owned = inner.freq.extract(llp.clone()); + owned.as_mut().txid = commit_txid; + owned.as_mut().size = size; // Move the list item to it's head. - inner.freq.append_n(*llp); - stats.modify(unsafe { &(**llp).as_mut().k }); + stats.modify(&owned.as_ref().k); + let pointer = inner.freq.append_n(owned); // Update v. - CacheItem::Freq(*llp, tci) + CacheItem::Freq(pointer, tci) } CacheItem::Rec(llp, _v) => { // println!("tlocal {:?} Rec -> Freq", k); // Remove the node and put it into freq. - inner.rec.extract(*llp); - unsafe { (**llp).as_mut().txid = commit_txid }; - unsafe { (**llp).as_mut().size = size }; - inner.freq.append_n(*llp); - stats.modify(unsafe { &(**llp).as_mut().k }); - CacheItem::Freq(*llp, tci) + let mut owned = inner.rec.extract(llp.clone()); + owned.as_mut().txid = commit_txid; + owned.as_mut().size = size; + stats.modify(&owned.as_ref().k); + let pointer = inner.freq.append_n(owned); + CacheItem::Freq(pointer, tci) } CacheItem::GhostFreq(llp) => { // println!("tlocal {:?} GhostFreq -> Freq", k); @@ -1032,12 +1031,12 @@ impl< &mut inner.p, size, ); - inner.ghost_freq.extract(*llp); - unsafe { (**llp).as_mut().txid = commit_txid }; - unsafe { (**llp).as_mut().size = size }; - inner.freq.append_n(*llp); - stats.ghost_frequent_revive(unsafe { &(**llp).as_mut().k }); - CacheItem::Freq(*llp, tci) + let mut owned = inner.ghost_freq.extract(llp.clone()); + owned.as_mut().txid = commit_txid; + owned.as_mut().size = size; + stats.ghost_frequent_revive(&owned.as_ref().k); + let pointer = inner.freq.append_n(owned); + CacheItem::Freq(pointer, tci) } CacheItem::GhostRec(llp) => { // println!("tlocal {:?} GhostRec -> Rec", k); @@ -1049,22 +1048,22 @@ impl< &mut inner.p, size, ); - inner.ghost_rec.extract(*llp); - unsafe { (**llp).as_mut().txid = commit_txid }; - unsafe { (**llp).as_mut().size = size }; - inner.rec.append_n(*llp); - stats.ghost_recent_revive(unsafe { &(**llp).as_mut().k }); - CacheItem::Rec(*llp, tci) + let mut owned = inner.ghost_rec.extract(llp.clone()); + owned.as_mut().txid = commit_txid; + owned.as_mut().size = size; + stats.ghost_recent_revive(&owned.as_ref().k); + let pointer = inner.rec.append_n(owned); + CacheItem::Rec(pointer, tci) } CacheItem::Haunted(llp) => { // stats.write_includes += 1; // println!("tlocal {:?} Haunted -> Rec", k); - inner.haunted.extract(*llp); - unsafe { (**llp).as_mut().txid = commit_txid }; - unsafe { (**llp).as_mut().size = size }; - inner.rec.append_n(*llp); - stats.include_haunted(unsafe { &(**llp).as_mut().k }); - CacheItem::Rec(*llp, tci) + let mut owned = inner.haunted.extract(llp.clone()); + owned.as_mut().txid = commit_txid; + owned.as_mut().size = size; + stats.include_haunted(&owned.as_ref().k); + let pointer = inner.rec.append_n(owned); + CacheItem::Rec(pointer, tci) } }; // Now change the state. @@ -1090,31 +1089,31 @@ impl< let mut next_state = match &ci.v { CacheItem::Freq(llp, v) => { // println!("rxhit {:?} Freq -> Freq", k); - inner.freq.touch(*llp); - CacheItem::Freq(*llp, v.clone()) + inner.freq.touch(llp.clone()); + CacheItem::Freq(llp.clone(), v.clone()) } CacheItem::Rec(llp, v) => { // println!("rxhit {:?} Rec -> Freq", k); - inner.rec.extract(*llp); - inner.freq.append_n(*llp); - CacheItem::Freq(*llp, v.clone()) + let owned = inner.rec.extract(llp.clone()); + let pointer = inner.freq.append_n(owned); + CacheItem::Freq(pointer, v.clone()) } // While we can't add this from nothing, we can // at least keep it in the ghost sets. CacheItem::GhostFreq(llp) => { // println!("rxhit {:?} GhostFreq -> GhostFreq", k); - inner.ghost_freq.touch(*llp); - CacheItem::GhostFreq(*llp) + inner.ghost_freq.touch(llp.clone()); + CacheItem::GhostFreq(llp.clone()) } CacheItem::GhostRec(llp) => { // println!("rxhit {:?} GhostRec -> GhostRec", k); - inner.ghost_rec.touch(*llp); - CacheItem::GhostRec(*llp) + inner.ghost_rec.touch(llp.clone()); + CacheItem::GhostRec(llp.clone()) } CacheItem::Haunted(llp) => { // println!("rxhit {:?} Haunted -> Haunted", k); // We can't do anything about this ... - CacheItem::Haunted(*llp) + CacheItem::Haunted(llp.clone()) } }; mem::swap(&mut ci.v, &mut next_state); @@ -1153,50 +1152,50 @@ impl< Some(ref mut ci) => { let mut next_state = match &ci { CacheItem::Freq(llp, _v) => { - if unsafe { (**llp).as_ref().txid >= txid } || inner.min_txid > txid { + if llp.as_ref().txid >= txid || inner.min_txid > txid { // println!("rxinc {:?} Freq -> Freq (touch only)", k); // Our cache already has a newer value, keep it. - inner.freq.touch(*llp); + inner.freq.touch(llp.clone()); None } else { // println!("rxinc {:?} Freq -> Freq (update)", k); // The value is newer, update. - inner.freq.extract(*llp); - unsafe { (**llp).as_mut().txid = txid }; - unsafe { (**llp).as_mut().size = size }; - inner.freq.append_n(*llp); - stats.modify(unsafe { &(**llp).as_mut().k }); - Some(CacheItem::Freq(*llp, iv)) + let mut owned = inner.freq.extract(llp.clone()); + owned.as_mut().txid = txid; + owned.as_mut().size = size; + stats.modify(&owned.as_mut().k); + let pointer = inner.freq.append_n(owned); + Some(CacheItem::Freq(pointer, iv)) } } CacheItem::Rec(llp, v) => { - inner.rec.extract(*llp); - if unsafe { (**llp).as_ref().txid >= txid } || inner.min_txid > txid { + let mut owned = inner.rec.extract(llp.clone()); + if llp.as_ref().txid >= txid || inner.min_txid > txid { // println!("rxinc {:?} Rec -> Freq (touch only)", k); - inner.freq.append_n(*llp); - Some(CacheItem::Freq(*llp, v.clone())) + let pointer = inner.freq.append_n(owned); + Some(CacheItem::Freq(pointer, v.clone())) } else { // println!("rxinc {:?} Rec -> Freq (update)", k); - unsafe { (**llp).as_mut().txid = txid }; - unsafe { (**llp).as_mut().size = size }; - inner.freq.append_n(*llp); - stats.modify(unsafe { &(**llp).as_mut().k }); - Some(CacheItem::Freq(*llp, iv)) + owned.as_mut().txid = txid; + owned.as_mut().size = size; + stats.modify(&owned.as_mut().k); + let pointer = inner.freq.append_n(owned); + Some(CacheItem::Freq(pointer, iv)) } } CacheItem::GhostFreq(llp) => { // Adjust p - if unsafe { (**llp).as_ref().txid > txid } || inner.min_txid > txid { + if llp.as_ref().txid > txid || inner.min_txid > txid { // println!("rxinc {:?} GhostFreq -> GhostFreq", k); // The cache version is newer, this is just a hit. - let size = unsafe { (**llp).as_mut().size }; + let size = llp.as_ref().size; Self::calc_p_freq( inner.ghost_rec.len(), inner.ghost_freq.len(), &mut inner.p, size, ); - inner.ghost_freq.touch(*llp); + inner.ghost_freq.touch(llp.clone()); None } else { // This item is newer, so we can include it. @@ -1207,19 +1206,19 @@ impl< &mut inner.p, size, ); - inner.ghost_freq.extract(*llp); - unsafe { (**llp).as_mut().txid = txid }; - unsafe { (**llp).as_mut().size = size }; - inner.freq.append_n(*llp); - stats.ghost_frequent_revive(unsafe { &(**llp).as_mut().k }); - Some(CacheItem::Freq(*llp, iv)) + let mut owned = inner.ghost_freq.extract(llp.clone()); + owned.as_mut().txid = txid; + owned.as_mut().size = size; + stats.ghost_frequent_revive(&owned.as_mut().k); + let pointer = inner.freq.append_n(owned); + Some(CacheItem::Freq(pointer, iv)) } } CacheItem::GhostRec(llp) => { // Adjust p - if unsafe { (**llp).as_ref().txid > txid } || inner.min_txid > txid { + if llp.as_ref().txid > txid || inner.min_txid > txid { // println!("rxinc {:?} GhostRec -> GhostRec", k); - let size = unsafe { (**llp).as_mut().size }; + let size = llp.as_ref().size; Self::calc_p_rec( shared.max, inner.ghost_rec.len(), @@ -1227,7 +1226,7 @@ impl< &mut inner.p, size, ); - inner.ghost_rec.touch(*llp); + inner.ghost_rec.touch(llp.clone()); None } else { // println!("rxinc {:?} GhostRec -> Rec", k); @@ -1238,26 +1237,26 @@ impl< &mut inner.p, size, ); - inner.ghost_rec.extract(*llp); - unsafe { (**llp).as_mut().txid = txid }; - unsafe { (**llp).as_mut().size = size }; - inner.rec.append_n(*llp); - stats.ghost_recent_revive(unsafe { &(**llp).as_mut().k }); - Some(CacheItem::Rec(*llp, iv)) + let mut owned = inner.ghost_rec.extract(llp.clone()); + owned.as_mut().txid = txid; + owned.as_mut().size = size; + stats.ghost_recent_revive(&owned.as_ref().k); + let pointer = inner.rec.append_n(owned); + Some(CacheItem::Rec(pointer, iv)) } } CacheItem::Haunted(llp) => { - if unsafe { (**llp).as_ref().txid > txid } || inner.min_txid > txid { + if llp.as_ref().txid > txid || inner.min_txid > txid { // println!("rxinc {:?} Haunted -> Haunted", k); None } else { // println!("rxinc {:?} Haunted -> Rec", k); - inner.haunted.extract(*llp); - unsafe { (**llp).as_mut().txid = txid }; - unsafe { (**llp).as_mut().size = size }; - inner.rec.append_n(*llp); - stats.include_haunted(unsafe { &(**llp).as_mut().k }); - Some(CacheItem::Rec(*llp, iv)) + let mut owned = inner.haunted.extract(llp.clone()); + owned.as_mut().txid = txid; + owned.as_mut().size = size; + stats.include_haunted(&owned.as_mut().k); + let pointer = inner.rec.append_n(owned); + Some(CacheItem::Rec(pointer, iv)) } } }; @@ -1318,20 +1317,20 @@ impl< // TODO: find a way to remove these clones let mut next_state = match &ci.v { CacheItem::Freq(llp, v) => { - if unsafe { (**llp).as_ref().txid != commit_txid } { + if llp.as_ref().txid != commit_txid { // println!("hit {:?} Freq -> Freq", k); - inner.freq.touch(*llp); - Some(CacheItem::Freq(*llp, v.clone())) + inner.freq.touch(llp.clone()); + Some(CacheItem::Freq(llp.clone(), v.clone())) } else { None } } CacheItem::Rec(llp, v) => { - if unsafe { (**llp).as_ref().txid != commit_txid } { + if llp.as_ref().txid != commit_txid { // println!("hit {:?} Rec -> Freq", k); - inner.rec.extract(*llp); - inner.freq.append_n(*llp); - Some(CacheItem::Freq(*llp, v.clone())) + let owned = inner.rec.extract(llp.clone()); + let pointer = inner.freq.append_n(owned); + Some(CacheItem::Freq(pointer, v.clone())) } else { None } @@ -1939,14 +1938,14 @@ impl< /// Yield an iterator over all currently live and valid cache items. pub fn iter(&self) -> impl Iterator { self.cache.values().filter_map(|ci| match &ci { - CacheItem::Rec(lln, v) => unsafe { - let cii = &*((**lln).k.as_ptr()); + CacheItem::Rec(lln, v) => { + let cii = lln.as_ref(); Some((&cii.k, v)) - }, - CacheItem::Freq(lln, v) => unsafe { - let cii = &*((**lln).k.as_ptr()); + } + CacheItem::Freq(lln, v) => { + let cii = lln.as_ref(); Some((&cii.k, v)) - }, + } _ => None, }) } @@ -1955,10 +1954,10 @@ impl< /// recent access list. pub fn iter_rec(&self) -> impl Iterator { self.cache.values().filter_map(|ci| match &ci { - CacheItem::Rec(lln, _) => unsafe { - let cii = &*((**lln).k.as_ptr()); + CacheItem::Rec(lln, _) => { + let cii = lln.as_ref(); Some(&cii.k) - }, + } _ => None, }) } @@ -1967,10 +1966,10 @@ impl< /// frequent access list. pub fn iter_freq(&self) -> impl Iterator { self.cache.values().filter_map(|ci| match &ci { - CacheItem::Rec(lln, _) => unsafe { - let cii = &*((**lln).k.as_ptr()); + CacheItem::Rec(lln, _) => { + let cii = lln.as_ref(); Some(&cii.k) - }, + } _ => None, }) } @@ -1978,10 +1977,10 @@ impl< #[cfg(test)] pub(crate) fn iter_ghost_rec(&self) -> impl Iterator { self.cache.values().filter_map(|ci| match &ci { - CacheItem::GhostRec(lln) => unsafe { - let cii = &*((**lln).k.as_ptr()); + CacheItem::GhostRec(lln) => { + let cii = lln.as_ref(); Some(&cii.k) - }, + } _ => None, }) } @@ -1989,10 +1988,10 @@ impl< #[cfg(test)] pub(crate) fn iter_ghost_freq(&self) -> impl Iterator { self.cache.values().filter_map(|ci| match &ci { - CacheItem::GhostFreq(lln) => unsafe { - let cii = &*((**lln).k.as_ptr()); + CacheItem::GhostFreq(lln) => { + let cii = lln.as_ref(); Some(&cii.k) - }, + } _ => None, }) } @@ -2063,7 +2062,7 @@ impl< .tlocal .as_ref() .and_then(|cache| { - cache.set.get(k).map(|v| unsafe { + cache.set.get(k).map(|v| { // Indicate a hit on the tlocal cache. tlocal_hits = true; @@ -2073,14 +2072,16 @@ impl< k_hash, }); } - let v = &(**v).as_ref().v as *const _; - // This discards the lifetime and repins it to the lifetime of `self`. - &(*v) + unsafe { + let v = &v.as_ref().v as *const _; + // This discards the lifetime and repins it to the lifetime of `self`. + &(*v) + } }) }) .or_else(|| { self.cache.get_prehashed(k, k_hash).and_then(|v| { - (*v).to_vref().map(|vin| unsafe { + (*v).to_vref().map(|vin| { // Indicate a hit on the main cache. hits = true; @@ -2091,8 +2092,10 @@ impl< }); } - let vin = vin as *const _; - &(*vin) + unsafe { + let vin = vin as *const _; + &(*vin) + } }) }) }); @@ -2141,16 +2144,15 @@ impl< if let Some(ref mut cache) = self.tlocal { self.stats.local_include(); let n = if cache.tlru.len() >= cache.read_size { - let n = cache.tlru.pop(); + let mut owned = cache.tlru.pop(); // swap the old_key/old_val out let mut k_clone = k.clone(); - unsafe { - mem::swap(&mut k_clone, &mut (*n).as_mut().k); - mem::swap(&mut v, &mut (*n).as_mut().v); - } + mem::swap(&mut k_clone, &mut owned.as_mut().k); + mem::swap(&mut v, &mut owned.as_mut().v); // remove old K from the tree: cache.set.remove(&k_clone); - n + // Return the owned node into the lru + cache.tlru.append_n(owned) } else { // Just add it! cache.tlru.append_k(ReadCacheItem {