Skip to content

Commit

Permalink
Lint cleanups
Browse files Browse the repository at this point in the history
Also removed some errors and other things never used
and improved visibility (rather than doc methods for
external use)
  • Loading branch information
rkuris committed Nov 3, 2024
1 parent 279e7e2 commit 6eb1850
Show file tree
Hide file tree
Showing 12 changed files with 149 additions and 73 deletions.
54 changes: 22 additions & 32 deletions firewood/src/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,28 +15,26 @@ use std::error::Error;
use std::fmt;
use std::io::Write;
use std::path::Path;
use std::sync::{Arc, RwLock};
use std::sync::Arc;
use storage::{Committed, FileBacked, HashedNodeReader, ImmutableProposal, NodeStore, TrieHash};
use tokio::sync::RwLock;
use typed_builder::TypedBuilder;

#[derive(Debug)]
#[non_exhaustive]
/// Represents the different types of errors that can occur in the database.
pub enum DbError {
InvalidParams,
/// Merkle error occurred.
Merkle(MerkleError),
CreateError,
/// I/O error occurred.
IO(std::io::Error),
InvalidProposal,
}

impl fmt::Display for DbError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
DbError::InvalidParams => write!(f, "invalid parameters provided"),
DbError::Merkle(e) => write!(f, "merkle error: {e:?}"),
DbError::CreateError => write!(f, "database create error"),
DbError::IO(e) => write!(f, "I/O error: {e:?}"),
DbError::InvalidProposal => write!(f, "invalid proposal"),
}
}
}
Expand All @@ -51,6 +49,8 @@ impl Error for DbError {}

type HistoricalRev = NodeStore<Committed, FileBacked>;

/// Metrics for the database.
/// TODO: Add more metrics
pub struct DbMetrics {
proposals: metrics::Counter,
}
Expand Down Expand Up @@ -109,11 +109,13 @@ pub struct DbConfig {
/// existing contents will be lost.
#[builder(default = false)]
pub truncate: bool,
/// Revision manager configuration.
#[builder(default = RevisionManagerConfig::builder().build())]
pub manager: RevisionManagerConfig,
}

#[derive(Debug)]
/// A database instance.
pub struct Db {
metrics: Arc<DbMetrics>,
// TODO: consider using https://docs.rs/lock_api/latest/lock_api/struct.RwLock.html#method.upgradable_read
Expand All @@ -134,20 +136,16 @@ where
Self: 'p;

async fn revision(&self, root_hash: TrieHash) -> Result<Arc<Self::Historical>, api::Error> {
let nodestore = self
.manager
.read()
.expect("poisoned lock")
.revision(root_hash)?;
let nodestore = self.manager.read().await.revision(root_hash)?;
Ok(nodestore)
}

async fn root_hash(&self) -> Result<Option<TrieHash>, api::Error> {
Ok(self.manager.read().expect("poisoned lock").root_hash()?)
Ok(self.manager.read().await.root_hash()?)
}

async fn all_hashes(&self) -> Result<Vec<TrieHash>, api::Error> {
Ok(self.manager.read().expect("poisoned lock").all_hashes())
Ok(self.manager.read().await.all_hashes())
}

async fn propose<'p, K: KeyType, V: ValueType>(
Expand All @@ -157,11 +155,7 @@ where
where
Self: 'p,
{
let parent = self
.manager
.read()
.expect("poisoned lock")
.current_revision();
let parent = self.manager.read().await.current_revision();
let proposal = NodeStore::new(parent)?;
let mut merkle = Merkle::from(proposal);
for op in batch {
Expand All @@ -177,10 +171,7 @@ where
let nodestore = merkle.into_inner();
let immutable: Arc<NodeStore<Arc<ImmutableProposal>, FileBacked>> =
Arc::new(nodestore.into());
self.manager
.write()
.expect("poisoned lock")
.add_proposal(immutable.clone());
self.manager.write().await.add_proposal(immutable.clone());

self.metrics.proposals.increment(1);

Expand All @@ -193,6 +184,7 @@ where
}

impl Db {
/// Create a new database instance.
pub async fn new<P: AsRef<Path>>(db_path: P, cfg: DbConfig) -> Result<Self, api::Error> {
let metrics = Arc::new(DbMetrics {
proposals: counter!("firewood.proposals"),
Expand All @@ -211,24 +203,22 @@ impl Db {
}

/// Dump the Trie of the latest revision.
pub fn dump(&self, w: &mut dyn Write) -> Result<(), DbError> {
let latest_rev_nodestore = self
.manager
.read()
.expect("poisoned lock")
.current_revision();
pub async fn dump(&self, w: &mut dyn Write) -> Result<(), DbError> {
let latest_rev_nodestore = self.manager.read().await.current_revision();
let merkle = Merkle::from(latest_rev_nodestore);
// TODO: This should be a stream
let output = merkle.dump().map_err(DbError::Merkle)?;
write!(w, "{}", output).map_err(DbError::IO)
}

/// Get a copy of the database metrics
pub fn metrics(&self) -> Arc<DbMetrics> {
self.metrics.clone()
}
}

#[derive(Debug)]
/// A user-visible database proposal
pub struct Proposal<'p> {
nodestore: Arc<NodeStore<Arc<ImmutableProposal>, FileBacked>>,
db: &'p Db,
Expand Down Expand Up @@ -299,7 +289,7 @@ impl<'a> api::Proposal for Proposal<'a> {
self.db
.manager
.write()
.expect("poisoned lock")
.await
.add_proposal(immutable.clone());

Ok(Self::Proposal {
Expand All @@ -312,7 +302,7 @@ impl<'a> api::Proposal for Proposal<'a> {
async fn commit(self: Arc<Self>) -> Result<(), api::Error> {
match Arc::into_inner(self) {
Some(proposal) => {
let mut manager = proposal.db.manager.write().expect("poisoned lock");
let mut manager = proposal.db.manager.write().await;
Ok(manager.commit(proposal.nodestore.clone())?)
}
None => Err(api::Error::CannotCommitClonedProposal),
Expand Down
14 changes: 14 additions & 0 deletions firewood/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,13 +105,27 @@
//! A commit involves simply writing the nodes and the freelist to disk. If the proposal is
//! abandoned, nothing has actually been written to disk.
//!
#![warn(missing_debug_implementations, rust_2018_idioms, missing_docs)]
/// Database module for Firewood.
pub mod db;

/// Database manager module
pub mod manager;

/// Merkle module, containing merkle operations
pub mod merkle;

/// Proof module
pub mod proof;

/// Range proof module
pub mod range_proof;

/// Stream module, for both node and key-value streams
pub mod stream;

/// Version 2 API
pub mod v2;

/// Expose the storage logger
pub use storage::logger;
2 changes: 2 additions & 0 deletions firewood/src/manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,13 @@ use crate::v2::api::HashKey;
use storage::{Committed, FileBacked, ImmutableProposal, NodeStore, Parentable, TrieHash};

#[derive(Clone, Debug, TypedBuilder)]
/// Revision manager configuratoin
pub struct RevisionManagerConfig {
/// The number of historical revisions to keep in memory.
#[builder(default = 128)]
max_revisions: usize,

/// The size of the node cache
#[builder(default_code = "NonZero::new(1500000).expect(\"non-zero\")")]
node_cache_size: NonZero<usize>,

Expand Down
46 changes: 24 additions & 22 deletions firewood/src/merkle.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,29 +23,23 @@ use storage::{

use thiserror::Error;

/// Keys are boxed u8 slices
pub type Key = Box<[u8]>;

/// Values are vectors
/// TODO: change to Box<[u8]>
pub type Value = Vec<u8>;

#[derive(Debug, Error)]
/// Errors that can occur when interacting with the Merkle trie
pub enum MerkleError {
/// Can't generate proof for empty node
#[error("can't generate proof for empty node")]
Empty,
#[error("read only")]
ReadOnly,
#[error("node not a branch node")]
NotBranchNode,

/// IO error
#[error("IO error: {0:?}")]
IO(#[from] std::io::Error),
#[error("parent should not be a leaf branch")]
ParentLeafBranch,
#[error("removing internal node references failed")]
UnsetInternal,
#[error("merkle serde error: {0}")]
BinarySerdeError(String),
#[error("invalid utf8")]
UTF8Error,
#[error("node not found")]
NodeNotFound,
}

// convert a set of nibbles into a printable string
Expand Down Expand Up @@ -141,12 +135,13 @@ fn get_helper<T: TrieReader>(
}

#[derive(Debug)]
/// Merkle operations against a nodestore
pub struct Merkle<T> {
nodestore: T,
}

impl<T> Merkle<T> {
pub fn into_inner(self) -> T {
pub(crate) fn into_inner(self) -> T {
self.nodestore
}
}
Expand All @@ -158,11 +153,12 @@ impl<T> From<T> for Merkle<T> {
}

impl<T: TrieReader> Merkle<T> {
pub fn root(&self) -> Option<Arc<Node>> {
pub(crate) fn root(&self) -> Option<Arc<Node>> {
self.nodestore.root_node()
}

pub const fn nodestore(&self) -> &T {
#[cfg(test)]
pub(crate) const fn nodestore(&self) -> &T {
&self.nodestore
}

Expand Down Expand Up @@ -211,6 +207,7 @@ impl<T: TrieReader> Merkle<T> {
Ok(Proof(proof.into_boxed_slice()))
}

/// Verify a proof that a key has a certain value, or that the key isn't in the trie.
pub fn verify_range_proof<V: AsRef<[u8]>>(
&self,
_proof: &Proof<impl Hashable>,
Expand All @@ -222,7 +219,10 @@ impl<T: TrieReader> Merkle<T> {
todo!()
}

pub fn path_iter<'a>(&self, key: &'a [u8]) -> Result<PathIterator<'_, 'a, T>, MerkleError> {
pub(crate) fn path_iter<'a>(
&self,
key: &'a [u8],
) -> Result<PathIterator<'_, 'a, T>, MerkleError> {
PathIterator::new(&self.nodestore, key)
}

Expand Down Expand Up @@ -329,14 +329,14 @@ impl<T: TrieReader> Merkle<T> {
})
}

pub fn get_value(&self, key: &[u8]) -> Result<Option<Box<[u8]>>, MerkleError> {
pub(crate) fn get_value(&self, key: &[u8]) -> Result<Option<Box<[u8]>>, MerkleError> {
let Some(node) = self.get_node(key)? else {
return Ok(None);
};
Ok(node.value().map(|v| v.to_vec().into_boxed_slice()))
}

pub fn get_node(&self, key: &[u8]) -> Result<Option<Arc<Node>>, MerkleError> {
pub(crate) fn get_node(&self, key: &[u8]) -> Result<Option<Arc<Node>>, MerkleError> {
let Some(root) = self.root() else {
return Ok(None);
};
Expand All @@ -347,7 +347,7 @@ impl<T: TrieReader> Merkle<T> {
}

impl<T: HashedNodeReader> Merkle<T> {
pub fn dump_node(
pub(crate) fn dump_node(
&self,
addr: LinearAddress,
hash: Option<&TrieHash>,
Expand Down Expand Up @@ -392,7 +392,7 @@ impl<T: HashedNodeReader> Merkle<T> {
Ok(())
}

pub fn dump(&self) -> Result<String, MerkleError> {
pub(crate) fn dump(&self) -> Result<String, MerkleError> {
let mut result = vec![];
writeln!(result, "digraph Merkle {{\n rankdir=LR;")?;
if let Some((root_addr, root_hash)) = self.nodestore.root_address_and_hash()? {
Expand All @@ -417,6 +417,8 @@ impl<S: ReadableStorage> From<Merkle<NodeStore<MutableProposal, S>>>
}

impl<S: ReadableStorage> Merkle<NodeStore<MutableProposal, S>> {
/// Convert a merkle backed by an MutableProposal into an ImmutableProposal
/// TODO: We probably don't need this function
pub fn hash(self) -> Merkle<NodeStore<Arc<ImmutableProposal>, S>> {
self.into()
}
Expand Down
Loading

0 comments on commit 6eb1850

Please sign in to comment.