Skip to content

Commit

Permalink
fix for verify feature
Browse files Browse the repository at this point in the history
  • Loading branch information
ogabrielides committed Apr 30, 2024
1 parent 0fe432a commit 5dfb118
Show file tree
Hide file tree
Showing 8 changed files with 119 additions and 102 deletions.
112 changes: 15 additions & 97 deletions grovedb/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -166,13 +166,13 @@ mod util;
mod versioning;
#[cfg(feature = "full")]
mod visualize;
#[cfg(feature = "full")]
mod replication;

use std::collections::{BTreeMap, BTreeSet};
use std::collections::BTreeSet;
#[cfg(feature = "full")]
use std::{collections::HashMap, fmt, option::Option::None, path::Path};
use std::{collections::HashMap, option::Option::None, path::Path};

#[cfg(any(feature = "full", feature = "verify"))]
use element::helpers;
#[cfg(any(feature = "full", feature = "verify"))]
pub use element::Element;
#[cfg(feature = "full")]
Expand All @@ -197,10 +197,11 @@ use grovedb_merk::tree::kv::ValueDefinedCostType;
#[cfg(feature = "full")]
use grovedb_merk::{
self,
tree::{combine_hash, value_hash},
BatchEntry, CryptoHash, KVIterator, Merk,
BatchEntry,
CryptoHash, KVIterator, Merk, tree::{combine_hash, value_hash},
};
use grovedb_merk::{proofs::Op, ChunkProducer, Restorer};
#[cfg(feature = "full")]
use grovedb_merk::{ChunkProducer, proofs::Op, Restorer};
use grovedb_path::SubtreePath;
#[cfg(feature = "full")]
use grovedb_storage::rocksdb_storage::PrefixedRocksDbImmediateStorageContext;
Expand All @@ -221,10 +222,14 @@ pub use query::{PathQuery, SizedQuery};
#[cfg(any(feature = "full", feature = "verify"))]
pub use crate::error::Error;
#[cfg(feature = "full")]
use crate::helpers::raw_decode;
use crate::element::helpers::raw_decode;
#[cfg(feature = "full")]
use crate::util::{root_merk_optional_tx, storage_context_optional_tx};
use crate::Error::MerkError;
#[cfg(feature = "full")]
pub use crate::replication::StateSyncInfo;
#[cfg(feature = "full")]
use crate::replication::SubtreesMetadata;

#[cfg(feature = "full")]
type Hash = [u8; 32];
Expand All @@ -235,61 +240,8 @@ pub struct GroveDb {
db: RocksDbStorage,
}

// Struct governing state sync
pub struct StateSyncInfo<'db> {
// Current Chunk restorer
restorer: Option<Restorer<PrefixedRocksDbImmediateStorageContext<'db>>>,
// Set of processed prefixes (Path digests)
processed_prefixes: BTreeSet<SubtreePrefix>,
// Current processed prefix (Path digest)
current_prefix: Option<SubtreePrefix>,
// Set of global chunk ids requested to be fetched and pending for processing. For the
// description of global chunk id check fetch_chunk().
pending_chunks: BTreeSet<Vec<u8>>,
// Number of processed chunks in current prefix (Path digest)
num_processed_chunks: usize,
}

pub(crate) type SubtreePrefix = [u8; blake3::OUT_LEN];

// Struct containing information about current subtrees found in GroveDB
pub struct SubtreesMetadata {
// Map of Prefix (Path digest) -> (Actual path, Parent Subtree actual_value_hash, Parent
// Subtree elem_value_hash) Note: Parent Subtree actual_value_hash, Parent Subtree
// elem_value_hash are needed when verifying the new constructed subtree after wards.
pub data: BTreeMap<SubtreePrefix, (Vec<Vec<u8>>, CryptoHash, CryptoHash)>,
}

impl SubtreesMetadata {
pub fn new() -> SubtreesMetadata {
SubtreesMetadata {
data: BTreeMap::new(),
}
}
}

impl Default for SubtreesMetadata {
fn default() -> Self {
Self::new()
}
}

impl fmt::Debug for SubtreesMetadata {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for (prefix, metadata) in self.data.iter() {
let metadata_path = &metadata.0;
let metadata_path_str = util_path_to_string(metadata_path);
writeln!(
f,
" prefix:{:?} -> path:{:?}\n",
hex::encode(prefix),
metadata_path_str
);
}
Ok(())
}
}

/// Transaction
#[cfg(feature = "full")]
pub type Transaction<'db> = <RocksDbStorage as Storage<'db>>::Transaction;
Expand Down Expand Up @@ -1258,7 +1210,7 @@ impl GroveDb {
let mut res = vec![];

let (global_chunk_id, chunk_data) = chunk;
let (chunk_prefix, chunk_id) = util_split_global_chunk_id(global_chunk_id)?;
let (chunk_prefix, chunk_id) = replication::util_split_global_chunk_id(global_chunk_id)?;

match (
&mut state_sync_info.restorer,
Expand Down Expand Up @@ -1315,7 +1267,7 @@ impl GroveDb {

let subtrees_metadata = self.get_subtrees_metadata(tx)?;
if let Some(value) = subtrees_metadata.data.get(&current_prefix) {
println!(" path:{:?} done", util_path_to_string(&value.0));
println!(" path:{:?} done", replication::util_path_to_string(&value.0));
}

for (prefix, prefix_metadata) in &subtrees_metadata.data {
Expand Down Expand Up @@ -1352,37 +1304,3 @@ impl GroveDb {
Ok((res, state_sync_info))
}
}

// Converts a path into a human-readable string (for debuting)
pub fn util_path_to_string(path: &[Vec<u8>]) -> Vec<String> {
let mut subtree_path_str: Vec<String> = vec![];
for subtree in path {
let string = std::str::from_utf8(subtree).unwrap();
subtree_path_str.push(string.parse().unwrap());
}
subtree_path_str
}

// Splits the given global chunk id into [SUBTREE_PREFIX:CHUNK_ID]
pub fn util_split_global_chunk_id(
global_chunk_id: &[u8],
) -> Result<(SubtreePrefix, String), Error> {
let chunk_prefix_length: usize = 32;
if global_chunk_id.len() < chunk_prefix_length {
return Err(Error::CorruptedData(
"expected global chunk id of at least 32 length".to_string(),
));
}

let (chunk_prefix, chunk_id) = global_chunk_id.split_at(chunk_prefix_length);
let mut array = [0u8; 32];
array.copy_from_slice(chunk_prefix);
let chunk_prefix_key: SubtreePrefix = array;
let str_chunk_id = String::from_utf8(chunk_id.to_vec());
match str_chunk_id {
Ok(s) => Ok((chunk_prefix_key, s)),
Err(_) => Err(Error::CorruptedData(
"unable to convert chunk id to string".to_string(),
)),
}
}
95 changes: 95 additions & 0 deletions grovedb/src/replication.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
use grovedb_merk::merk::restore::Restorer;
use grovedb_storage::rocksdb_storage::storage_context::context_immediate::PrefixedRocksDbImmediateStorageContext;
use std::collections::{BTreeMap, BTreeSet};
use grovedb_merk::tree::hash::CryptoHash;
use std::fmt;
use crate::Error;

pub(crate) type SubtreePrefix = [u8; blake3::OUT_LEN];

// Struct governing state sync
pub struct StateSyncInfo<'db> {
// Current Chunk restorer
pub restorer: Option<Restorer<PrefixedRocksDbImmediateStorageContext<'db>>>,
// Set of processed prefixes (Path digests)
pub processed_prefixes: BTreeSet<SubtreePrefix>,
// Current processed prefix (Path digest)
pub current_prefix: Option<SubtreePrefix>,
// Set of global chunk ids requested to be fetched and pending for processing. For the
// description of global chunk id check fetch_chunk().
pub pending_chunks: BTreeSet<Vec<u8>>,
// Number of processed chunks in current prefix (Path digest)
pub num_processed_chunks: usize,
}

// Struct containing information about current subtrees found in GroveDB
pub struct SubtreesMetadata {
// Map of Prefix (Path digest) -> (Actual path, Parent Subtree actual_value_hash, Parent
// Subtree elem_value_hash) Note: Parent Subtree actual_value_hash, Parent Subtree
// elem_value_hash are needed when verifying the new constructed subtree after wards.
pub data: BTreeMap<SubtreePrefix, (Vec<Vec<u8>>, CryptoHash, CryptoHash)>,
}

impl SubtreesMetadata {
pub fn new() -> SubtreesMetadata {
SubtreesMetadata {
data: BTreeMap::new(),
}
}
}

impl Default for SubtreesMetadata {
fn default() -> Self {
Self::new()
}
}

impl fmt::Debug for SubtreesMetadata {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for (prefix, metadata) in self.data.iter() {
let metadata_path = &metadata.0;
let metadata_path_str = util_path_to_string(metadata_path);
writeln!(
f,
" prefix:{:?} -> path:{:?}\n",
hex::encode(prefix),
metadata_path_str
);
}
Ok(())
}
}

// Converts a path into a human-readable string (for debuting)
pub fn util_path_to_string(path: &[Vec<u8>]) -> Vec<String> {
let mut subtree_path_str: Vec<String> = vec![];
for subtree in path {
let string = std::str::from_utf8(subtree).unwrap();
subtree_path_str.push(string.parse().unwrap());
}
subtree_path_str
}

// Splits the given global chunk id into [SUBTREE_PREFIX:CHUNK_ID]
pub fn util_split_global_chunk_id(
global_chunk_id: &[u8],
) -> Result<(crate::SubtreePrefix, String), Error> {
let chunk_prefix_length: usize = 32;
if global_chunk_id.len() < chunk_prefix_length {
return Err(Error::CorruptedData(
"expected global chunk id of at least 32 length".to_string(),
));
}

let (chunk_prefix, chunk_id) = global_chunk_id.split_at(chunk_prefix_length);
let mut array = [0u8; 32];
array.copy_from_slice(chunk_prefix);
let chunk_prefix_key: crate::SubtreePrefix = array;
let str_chunk_id = String::from_utf8(chunk_id.to_vec());
match str_chunk_id {
Ok(s) => Ok((chunk_prefix_key, s)),
Err(_) => Err(Error::CorruptedData(
"unable to convert chunk id to string".to_string(),
)),
}
}
4 changes: 3 additions & 1 deletion merk/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
// DEALINGS IN THE SOFTWARE.

//! Errors

#[cfg(feature = "full")]
use crate::proofs::chunk::error::ChunkError;

#[cfg(any(feature = "full", feature = "verify"))]
Expand Down Expand Up @@ -64,6 +64,7 @@ pub enum Error {
CorruptedState(&'static str),

/// Chunking error
#[cfg(feature = "full")]
#[error("chunking error {0}")]
ChunkingError(ChunkError),

Expand All @@ -73,6 +74,7 @@ pub enum Error {
OldChunkingError(&'static str),

/// Chunk restoring error
#[cfg(feature = "full")]
#[error("chunk restoring error {0}")]
ChunkRestoringError(ChunkError),

Expand Down
2 changes: 1 addition & 1 deletion merk/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ extern crate core;

/// The top-level store API.
#[cfg(feature = "full")]
mod merk;
pub mod merk;

#[cfg(feature = "full")]
pub use crate::merk::{chunks::ChunkProducer, options::MerkOptions, restore::Restorer};
Expand Down
2 changes: 2 additions & 0 deletions merk/src/proofs/tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ use super::{Node, Op};
use crate::tree::{combine_hash, kv_digest_to_kv_hash, kv_hash, node_hash, value_hash, NULL_HASH};
#[cfg(any(feature = "full", feature = "verify"))]
use crate::{error::Error, tree::CryptoHash};
#[cfg(feature = "full")]
use crate::{
proofs::chunk::chunk::{LEFT, RIGHT},
Link,
Expand All @@ -61,6 +62,7 @@ pub struct Child {
}

impl Child {
#[cfg(feature = "full")]
pub fn as_link(&self) -> Link {
let (key, sum) = match &self.tree.node {
Node::KV(key, _) | Node::KVValueHash(key, ..) => (key.as_slice(), None),
Expand Down
2 changes: 1 addition & 1 deletion merk/src/tree/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ mod encoding;
#[cfg(feature = "full")]
mod fuzz_tests;
#[cfg(any(feature = "full", feature = "verify"))]
mod hash;
pub mod hash;
#[cfg(feature = "full")]
mod iter;
#[cfg(feature = "full")]
Expand Down
2 changes: 1 addition & 1 deletion storage/src/rocksdb_storage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@

//! GroveDB storage layer implemented over RocksDB backend.
mod storage;
mod storage_context;
pub mod storage_context;
pub mod test_utils;
#[cfg(test)]
mod tests;
Expand Down
2 changes: 1 addition & 1 deletion storage/src/rocksdb_storage/storage_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
//! Implementation of prefixed storage context.

mod batch;
mod context_immediate;
pub mod context_immediate;
mod context_no_tx;
mod context_tx;
mod raw_iterator;
Expand Down

0 comments on commit 5dfb118

Please sign in to comment.