Skip to content

Commit

Permalink
Remove ValueTransformer + IdTrans (#214)
Browse files Browse the repository at this point in the history
  • Loading branch information
rkuris authored Aug 24, 2023
1 parent 2c6810a commit dbac196
Show file tree
Hide file tree
Showing 5 changed files with 32 additions and 54 deletions.
7 changes: 3 additions & 4 deletions firewood/src/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ pub use crate::{
};
use crate::{
file,
merkle::{IdTrans, Merkle, MerkleError, Node, TrieHash, TRIE_HASH_LEN},
merkle::{Merkle, MerkleError, Node, TrieHash, TRIE_HASH_LEN},
proof::{Proof, ProofError},
storage::{
buffer::{BufferWrite, DiskBuffer, DiskBufferRequester},
Expand Down Expand Up @@ -293,7 +293,7 @@ impl<S: ShaleStore<Node> + Send + Sync> DbRev<S> {
/// Get root hash of the generic key-value storage.
pub fn kv_root_hash(&self) -> Result<TrieHash, DbError> {
self.merkle
.root_hash::<IdTrans>(self.header.kv_root)
.root_hash(self.header.kv_root)
.map_err(DbError::Merkle)
}

Expand All @@ -315,8 +315,7 @@ impl<S: ShaleStore<Node> + Send + Sync> DbRev<S> {

/// Provides a proof that a key is in the Trie.
pub fn prove<K: AsRef<[u8]>>(&self, key: K) -> Result<Proof, MerkleError> {
self.merkle
.prove::<&[u8], IdTrans>(key.as_ref(), self.header.kv_root)
self.merkle.prove(key, self.header.kv_root)
}

/// Verifies a range proof is valid for a set of keys.
Expand Down
16 changes: 5 additions & 11 deletions firewood/src/merkle.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,7 @@ mod node;
mod partial_path;
mod trie_hash;

pub use node::{
BranchNode, Data, ExtNode, IdTrans, LeafNode, Node, NodeType, ValueTransformer, NBRANCH,
};
pub use node::{BranchNode, Data, ExtNode, LeafNode, Node, NodeType, NBRANCH};
pub use partial_path::PartialPath;
pub use trie_hash::{TrieHash, TRIE_HASH_LEN};

Expand Down Expand Up @@ -113,10 +111,7 @@ impl<S: ShaleStore<Node> + Send + Sync> Merkle<S> {
})
}

pub fn root_hash<T: ValueTransformer>(
&self,
root: DiskAddress,
) -> Result<TrieHash, MerkleError> {
pub fn root_hash(&self, root: DiskAddress) -> Result<TrieHash, MerkleError> {
let root = self
.get_node(root)?
.inner
Expand All @@ -125,7 +120,7 @@ impl<S: ShaleStore<Node> + Send + Sync> Merkle<S> {
.chd[0];
Ok(if let Some(root) = root {
let mut node = self.get_node(root)?;
let res = node.get_root_hash::<T, S>(self.store.as_ref()).clone();
let res = node.get_root_hash::<S>(self.store.as_ref()).clone();
if node.lazy_dirty.load(Ordering::Relaxed) {
node.write(|_| {}).unwrap();
node.lazy_dirty.store(false, Ordering::Relaxed);
Expand Down Expand Up @@ -1036,10 +1031,9 @@ impl<S: ShaleStore<Node> + Send + Sync> Merkle<S> {
/// If the trie does not contain a value for key, the returned proof contains
/// all nodes of the longest existing prefix of the key, ending with the node
/// that proves the absence of the key (at least the root node).
pub fn prove<K, T>(&self, key: K, root: DiskAddress) -> Result<Proof, MerkleError>
pub fn prove<K: AsRef<[u8]>>(&self, key: K, root: DiskAddress) -> Result<Proof, MerkleError>
where
K: AsRef<[u8]>,
T: ValueTransformer,
{
let key_nibbles = Nibbles::<0>::new(key.as_ref());

Expand Down Expand Up @@ -1112,7 +1106,7 @@ impl<S: ShaleStore<Node> + Send + Sync> Merkle<S> {
// Get the hashes of the nodes.
for node in nodes {
let node = self.get_node(node)?;
let rlp = <&[u8]>::clone(&node.get_eth_rlp::<T, S>(self.store.as_ref()));
let rlp = <&[u8]>::clone(&node.get_eth_rlp::<S>(self.store.as_ref()));
let hash: [u8; TRIE_HASH_LEN] = sha3::Keccak256::digest(rlp).into();
proofs.insert(hash, rlp.to_vec());
}
Expand Down
55 changes: 20 additions & 35 deletions firewood/src/merkle/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,18 +26,6 @@ impl std::ops::Deref for Data {
}
}

pub trait ValueTransformer {
fn transform(bytes: &[u8]) -> Vec<u8>;
}

pub struct IdTrans;

impl ValueTransformer for IdTrans {
fn transform(bytes: &[u8]) -> Vec<u8> {
bytes.to_vec()
}
}

#[derive(PartialEq, Eq, Clone)]
pub struct BranchNode {
pub(super) chd: [Option<DiskAddress>; NBRANCH],
Expand Down Expand Up @@ -86,21 +74,21 @@ impl BranchNode {
(only_chd, has_chd)
}

fn calc_eth_rlp<T: ValueTransformer, S: ShaleStore<Node>>(&self, store: &S) -> Vec<u8> {
fn calc_eth_rlp<S: ShaleStore<Node>>(&self, store: &S) -> Vec<u8> {
let mut stream = rlp::RlpStream::new_list(17);
for (i, c) in self.chd.iter().enumerate() {
match c {
Some(c) => {
let mut c_ref = store.get_item(*c).unwrap();
if c_ref.get_eth_rlp_long::<T, S>(store) {
stream.append(&&(*c_ref.get_root_hash::<T, S>(store))[..]);
if c_ref.get_eth_rlp_long::<S>(store) {
stream.append(&&(*c_ref.get_root_hash::<S>(store))[..]);
// See struct docs for ordering requirements
if c_ref.lazy_dirty.load(Ordering::Relaxed) {
c_ref.write(|_| {}).unwrap();
c_ref.lazy_dirty.store(false, Ordering::Relaxed)
}
} else {
let c_rlp = &c_ref.get_eth_rlp::<T, S>(store);
let c_rlp = &c_ref.get_eth_rlp::<S>(store);
stream.append_raw(c_rlp, 1);
}
}
Expand Down Expand Up @@ -170,10 +158,10 @@ impl Debug for LeafNode {
}

impl LeafNode {
fn calc_eth_rlp<T: ValueTransformer>(&self) -> Vec<u8> {
fn calc_eth_rlp(&self) -> Vec<u8> {
rlp::encode_list::<Vec<u8>, _>(&[
from_nibbles(&self.0.encode(true)).collect(),
T::transform(&self.1),
self.1.to_vec(),
])
.into()
}
Expand Down Expand Up @@ -205,19 +193,19 @@ impl Debug for ExtNode {
}

impl ExtNode {
fn calc_eth_rlp<T: ValueTransformer, S: ShaleStore<Node>>(&self, store: &S) -> Vec<u8> {
fn calc_eth_rlp<S: ShaleStore<Node>>(&self, store: &S) -> Vec<u8> {
let mut stream = rlp::RlpStream::new_list(2);
if !self.1.is_null() {
let mut r = store.get_item(self.1).unwrap();
stream.append(&from_nibbles(&self.0.encode(false)).collect::<Vec<_>>());
if r.get_eth_rlp_long::<T, S>(store) {
stream.append(&&(*r.get_root_hash::<T, S>(store))[..]);
if r.get_eth_rlp_long(store) {
stream.append(&&(*r.get_root_hash(store))[..]);
if r.lazy_dirty.load(Ordering::Relaxed) {
r.write(|_| {}).unwrap();
r.lazy_dirty.store(false, Ordering::Relaxed);
}
} else {
stream.append_raw(r.get_eth_rlp::<T, S>(store), 1);
stream.append_raw(r.get_eth_rlp(store), 1);
}
} else {
// Check if there is already a caclucated rlp for the child, which
Expand Down Expand Up @@ -307,11 +295,11 @@ pub enum NodeType {
}

impl NodeType {
fn calc_eth_rlp<T: ValueTransformer, S: ShaleStore<Node>>(&self, store: &S) -> Vec<u8> {
fn calc_eth_rlp<S: ShaleStore<Node>>(&self, store: &S) -> Vec<u8> {
match &self {
NodeType::Leaf(n) => n.calc_eth_rlp::<T>(),
NodeType::Extension(n) => n.calc_eth_rlp::<T, S>(store),
NodeType::Branch(n) => n.calc_eth_rlp::<T, S>(store),
NodeType::Leaf(n) => n.calc_eth_rlp(),
NodeType::Extension(n) => n.calc_eth_rlp(store),
NodeType::Branch(n) => n.calc_eth_rlp(store),
}
}
}
Expand Down Expand Up @@ -339,25 +327,22 @@ impl Node {
})
}

pub(super) fn get_eth_rlp<T: ValueTransformer, S: ShaleStore<Node>>(&self, store: &S) -> &[u8] {
pub(super) fn get_eth_rlp<S: ShaleStore<Node>>(&self, store: &S) -> &[u8] {
self.eth_rlp
.get_or_init(|| self.inner.calc_eth_rlp::<T, S>(store))
.get_or_init(|| self.inner.calc_eth_rlp::<S>(store))
}

pub(super) fn get_root_hash<T: ValueTransformer, S: ShaleStore<Node>>(
&self,
store: &S,
) -> &TrieHash {
pub(super) fn get_root_hash<S: ShaleStore<Node>>(&self, store: &S) -> &TrieHash {
self.root_hash.get_or_init(|| {
self.lazy_dirty.store(true, Ordering::Relaxed);
TrieHash(Keccak256::digest(self.get_eth_rlp::<T, S>(store)).into())
TrieHash(Keccak256::digest(self.get_eth_rlp::<S>(store)).into())
})
}

fn get_eth_rlp_long<T: ValueTransformer, S: ShaleStore<Node>>(&self, store: &S) -> bool {
fn get_eth_rlp_long<S: ShaleStore<Node>>(&self, store: &S) -> bool {
*self.eth_rlp_long.get_or_init(|| {
self.lazy_dirty.store(true, Ordering::Relaxed);
self.get_eth_rlp::<T, S>(store).len() >= TRIE_HASH_LEN
self.get_eth_rlp(store).len() >= TRIE_HASH_LEN
})
}

Expand Down
6 changes: 3 additions & 3 deletions firewood/src/merkle_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
// See the file LICENSE.md for licensing terms.

use crate::{
merkle::{IdTrans, Merkle, Node, Ref, RefMut, TrieHash},
merkle::{Merkle, Node, Ref, RefMut, TrieHash},
proof::{Proof, ProofError},
};
use shale::{
Expand Down Expand Up @@ -74,7 +74,7 @@ impl<S: ShaleStore<Node> + Send + Sync> MerkleSetup<S> {

pub fn root_hash(&self) -> Result<TrieHash, DataStoreError> {
self.merkle
.root_hash::<IdTrans>(self.root)
.root_hash(self.root)
.map_err(|_err| DataStoreError::RootHashError)
}

Expand All @@ -88,7 +88,7 @@ impl<S: ShaleStore<Node> + Send + Sync> MerkleSetup<S> {

pub fn prove<K: AsRef<[u8]>>(&self, key: K) -> Result<Proof, DataStoreError> {
self.merkle
.prove::<K, IdTrans>(key, self.root)
.prove(key, self.root)
.map_err(|_err| DataStoreError::ProofError)
}

Expand Down
2 changes: 1 addition & 1 deletion growth-ring/src/wal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,7 @@ impl<F: WalFile + 'static, S: WalStore<F>> WalFilePool<F, S> {
let mut last_h: Option<
Pin<Box<dyn Future<Output = Result<WalFileHandle<'a, F, S>, WalError>> + 'a>>,
> = None;
for ((next_fid, wl), h) in meta.into_iter().zip(files.into_iter()) {
for ((next_fid, wl), h) in meta.into_iter().zip(files) {
if let Some(lh) = last_h.take() {
if next_fid != fid {
lh.await?
Expand Down

0 comments on commit dbac196

Please sign in to comment.