Skip to content

Commit

Permalink
fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
ogabrielides committed May 2, 2024
1 parent 7695634 commit 936d3f8
Show file tree
Hide file tree
Showing 4 changed files with 85 additions and 39 deletions.
4 changes: 2 additions & 2 deletions grovedb/src/replication.rs
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ impl GroveDb {
"Unable to create to load chunk".to_string(),
)),
}
},
}
Err(_) => Err(Error::CorruptedData(
"Unable to create Chunk producer".to_string(),
)),
Expand All @@ -271,7 +271,7 @@ impl GroveDb {
"Unable to create to load chunk".to_string(),
)),
}
},
}
Err(_) => Err(Error::CorruptedData(
"Unable to create Chunk producer".to_string(),
)),
Expand Down
10 changes: 6 additions & 4 deletions merk/src/merk/chunks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,9 @@ use crate::{
error::ChunkError,
util::{
chunk_height, chunk_index_from_traversal_instruction,
chunk_index_from_traversal_instruction_with_recovery, generate_traversal_instruction,
generate_traversal_instruction_as_vec_bytes, vec_bytes_as_traversal_instruction,
number_of_chunks,
chunk_index_from_traversal_instruction_with_recovery,
generate_traversal_instruction, generate_traversal_instruction_as_vec_bytes,
number_of_chunks, vec_bytes_as_traversal_instruction,
},
},
Node, Op,
Expand Down Expand Up @@ -383,7 +383,9 @@ where
self.chunk_with_index(self.index)
.and_then(|(chunk, chunk_index)| {
chunk_index
.map(|index| generate_traversal_instruction_as_vec_bytes(self.height, index))
.map(|index| {
generate_traversal_instruction_as_vec_bytes(self.height, index)
})
.transpose()
.map(|v| (chunk, v))
}),
Expand Down
83 changes: 58 additions & 25 deletions merk/src/merk/restore.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ use crate::{
chunk::{LEFT, RIGHT},
chunk_op::ChunkOp,
error::{ChunkError, ChunkError::InternalError},
util::{vec_bytes_as_traversal_instruction, traversal_instruction_as_vec_bytes},
util::{traversal_instruction_as_vec_bytes, vec_bytes_as_traversal_instruction},
},
tree::{execute, Child, Tree as ProofTree},
Node, Op,
Expand Down Expand Up @@ -122,7 +122,10 @@ impl<'db, S: StorageContext<'db>> Restorer<S> {

/// Process multi chunks (space optimized chunk proofs that can contain
/// multiple singular chunks)
pub fn process_multi_chunk(&mut self, multi_chunk: Vec<ChunkOp>) -> Result<Vec<Vec<u8>>, Error> {
pub fn process_multi_chunk(
&mut self,
multi_chunk: Vec<ChunkOp>,
) -> Result<Vec<Vec<u8>>, Error> {
let mut expect_chunk_id = true;
let mut chunk_ids = vec![];
let mut current_chunk_id = vec![];
Expand Down Expand Up @@ -241,7 +244,8 @@ impl<'db, S: StorageContext<'db>> Restorer<S> {
Node::Hash(hash) => {
// the node hash points to the root of another chunk
// we get the chunk id and add the hash to restorer state
let chunk_id = traversal_instruction_as_vec_bytes(node_traversal_instruction);
let chunk_id =
traversal_instruction_as_vec_bytes(node_traversal_instruction);
new_chunk_ids.push(chunk_id.to_vec());
self.chunk_id_to_root_hash.insert(chunk_id.to_vec(), *hash);
// TODO: handle unwrap
Expand Down Expand Up @@ -670,7 +674,10 @@ mod tests {
let (chunk, _) = chunk_producer.chunk_with_index(1).unwrap();
// apply first chunk
let new_chunk_ids = restorer
.process_chunk(&traversal_instruction_as_vec_bytes(vec![].as_slice()), chunk)
.process_chunk(
&traversal_instruction_as_vec_bytes(vec![].as_slice()),
chunk,
)
.expect("should process chunk successfully");
assert_eq!(new_chunk_ids.len(), 4);

Expand All @@ -679,22 +686,22 @@ mod tests {
assert_eq!(restorer.chunk_id_to_root_hash.len(), 4);
// assert all the chunk hash values
assert_eq!(
restorer.chunk_id_to_root_hash.get(vec![1,1].as_slice()),
restorer.chunk_id_to_root_hash.get(vec![1, 1].as_slice()),
Some(get_node_hash(traverse_get_node_hash(&mut tree_walker, &[LEFT, LEFT])).unwrap())
.as_ref()
);
assert_eq!(
restorer.chunk_id_to_root_hash.get(vec![1,0].as_slice()),
restorer.chunk_id_to_root_hash.get(vec![1, 0].as_slice()),
Some(get_node_hash(traverse_get_node_hash(&mut tree_walker, &[LEFT, RIGHT])).unwrap())
.as_ref()
);
assert_eq!(
restorer.chunk_id_to_root_hash.get(vec![0,1].as_slice()),
restorer.chunk_id_to_root_hash.get(vec![0, 1].as_slice()),
Some(get_node_hash(traverse_get_node_hash(&mut tree_walker, &[RIGHT, LEFT])).unwrap())
.as_ref()
);
assert_eq!(
restorer.chunk_id_to_root_hash.get(vec![0,0].as_slice()),
restorer.chunk_id_to_root_hash.get(vec![0, 0].as_slice()),
Some(get_node_hash(traverse_get_node_hash(&mut tree_walker, &[RIGHT, RIGHT])).unwrap())
.as_ref()
);
Expand All @@ -703,18 +710,26 @@ mod tests {
let (chunk, _) = chunk_producer.chunk_with_index(2).unwrap();
// apply second chunk
let new_chunk_ids = restorer
.process_chunk(&traversal_instruction_as_vec_bytes(&vec![LEFT, LEFT]), chunk)
.process_chunk(
&traversal_instruction_as_vec_bytes(&vec![LEFT, LEFT]),
chunk,
)
.unwrap();
assert_eq!(new_chunk_ids.len(), 0);
// chunk_map should have 1 less element
assert_eq!(restorer.chunk_id_to_root_hash.len(), 3);
assert_eq!(restorer.chunk_id_to_root_hash.get(vec![1,1].as_slice()), None);
assert_eq!(
restorer.chunk_id_to_root_hash.get(vec![1, 1].as_slice()),
None
);

// let's try to apply the second chunk again, should not work
let (chunk, _) = chunk_producer.chunk_with_index(2).unwrap();
// apply second chunk
let chunk_process_result =
restorer.process_chunk(&traversal_instruction_as_vec_bytes(&vec![LEFT, LEFT]), chunk);
let chunk_process_result = restorer.process_chunk(
&traversal_instruction_as_vec_bytes(&vec![LEFT, LEFT]),
chunk,
);
assert!(chunk_process_result.is_err());
assert!(matches!(
chunk_process_result,
Expand All @@ -724,8 +739,10 @@ mod tests {
// next let's get a random but expected chunk and work with that e.g. chunk 4
// but let's apply it to the wrong place
let (chunk, _) = chunk_producer.chunk_with_index(4).unwrap();
let chunk_process_result =
restorer.process_chunk(&traversal_instruction_as_vec_bytes(&vec![LEFT, RIGHT]), chunk);
let chunk_process_result = restorer.process_chunk(
&traversal_instruction_as_vec_bytes(&vec![LEFT, RIGHT]),
chunk,
);
assert!(chunk_process_result.is_err());
assert!(matches!(
chunk_process_result,
Expand All @@ -738,34 +755,52 @@ mod tests {
let (chunk, _) = chunk_producer.chunk_with_index(5).unwrap();
// apply second chunk
let new_chunk_ids = restorer
.process_chunk(&traversal_instruction_as_vec_bytes(&vec![RIGHT, RIGHT]), chunk)
.process_chunk(
&traversal_instruction_as_vec_bytes(&vec![RIGHT, RIGHT]),
chunk,
)
.unwrap();
assert_eq!(new_chunk_ids.len(), 0);
// chunk_map should have 1 less element
assert_eq!(restorer.chunk_id_to_root_hash.len(), 2);
assert_eq!(restorer.chunk_id_to_root_hash.get(vec![0,0].as_slice()), None);
assert_eq!(
restorer.chunk_id_to_root_hash.get(vec![0, 0].as_slice()),
None
);

// correctly apply chunk 3
let (chunk, _) = chunk_producer.chunk_with_index(3).unwrap();
// apply second chunk
let new_chunk_ids = restorer
.process_chunk(&traversal_instruction_as_vec_bytes(&vec![LEFT, RIGHT]), chunk)
.process_chunk(
&traversal_instruction_as_vec_bytes(&vec![LEFT, RIGHT]),
chunk,
)
.unwrap();
assert_eq!(new_chunk_ids.len(), 0);
// chunk_map should have 1 less element
assert_eq!(restorer.chunk_id_to_root_hash.len(), 1);
assert_eq!(restorer.chunk_id_to_root_hash.get(vec![1,0].as_slice()), None);
assert_eq!(
restorer.chunk_id_to_root_hash.get(vec![1, 0].as_slice()),
None
);

// correctly apply chunk 4
let (chunk, _) = chunk_producer.chunk_with_index(4).unwrap();
// apply second chunk
let new_chunk_ids = restorer
.process_chunk(&traversal_instruction_as_vec_bytes(&vec![RIGHT, LEFT]), chunk)
.process_chunk(
&traversal_instruction_as_vec_bytes(&vec![RIGHT, LEFT]),
chunk,
)
.unwrap();
assert_eq!(new_chunk_ids.len(), 0);
// chunk_map should have 1 less element
assert_eq!(restorer.chunk_id_to_root_hash.len(), 0);
assert_eq!(restorer.chunk_id_to_root_hash.get(vec![0,1].as_slice()), None);
assert_eq!(
restorer.chunk_id_to_root_hash.get(vec![0, 1].as_slice()),
None
);

// finalize merk
let restored_merk = restorer.finalize().expect("should finalized successfully");
Expand Down Expand Up @@ -862,9 +897,7 @@ mod tests {
// perform chunk production and processing
let mut chunk_id_opt = Some(vec![]);
while let Some(chunk_id) = chunk_id_opt {
let (chunk, next_chunk_id) = chunk_producer
.chunk(&chunk_id)
.expect("should get chunk");
let (chunk, next_chunk_id) = chunk_producer.chunk(&chunk_id).expect("should get chunk");
restorer
.process_chunk(&chunk_id, chunk)
.expect("should process chunk successfully");
Expand Down Expand Up @@ -1072,7 +1105,7 @@ mod tests {
// should only contain the first chunk
assert_eq!(multi_chunk.chunk.len(), 2);
// should point to chunk 2
assert_eq!(multi_chunk.next_index, Some(vec![1,1]));
assert_eq!(multi_chunk.next_index, Some(vec![1, 1]));
let next_ids = restorer.process_multi_chunk(multi_chunk.chunk).unwrap();
assert_eq!(next_ids.len(), 4);
assert_eq!(restorer.chunk_id_to_root_hash.len(), 4);
Expand All @@ -1085,7 +1118,7 @@ mod tests {
.multi_chunk_with_limit(multi_chunk.next_index.unwrap().as_slice(), Some(645))
.unwrap();
assert_eq!(multi_chunk.chunk.len(), 4);
assert_eq!(multi_chunk.next_index, Some(vec![0u8,1u8]));
assert_eq!(multi_chunk.next_index, Some(vec![0u8, 1u8]));
let next_ids = restorer.process_multi_chunk(multi_chunk.chunk).unwrap();
// chunks 2 and 3 are leaf chunks
assert_eq!(next_ids.len(), 0);
Expand Down
27 changes: 19 additions & 8 deletions merk/src/proofs/chunk/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,10 @@ fn exit_node_count(height: usize) -> usize {
}

/// Generate instruction for traversing to a given chunk index in a binary tree
pub fn generate_traversal_instruction(height: usize, chunk_index: usize) -> Result<Vec<bool>, Error> {
pub fn generate_traversal_instruction(
height: usize,
chunk_index: usize,
) -> Result<Vec<bool>, Error> {
let mut instructions = vec![];

let total_chunk_count = number_of_chunks(height);
Expand Down Expand Up @@ -226,8 +229,8 @@ pub fn generate_traversal_instruction(height: usize, chunk_index: usize) -> Resu
Ok(instructions)
}

/// Determine the chunk index given the traversal instruction and the max height of
/// the tree
/// Determine the chunk index given the traversal instruction and the max height
/// of the tree
pub fn chunk_index_from_traversal_instruction(
traversal_instruction: &[bool],
height: usize,
Expand Down Expand Up @@ -309,8 +312,8 @@ pub fn chunk_index_from_traversal_instruction(
Ok(current_chunk_index)
}

/// Determine the chunk index given the traversal instruction and the max height of
/// the tree. This can recover from traversal instructions not pointing to a
/// Determine the chunk index given the traversal instruction and the max height
/// of the tree. This can recover from traversal instructions not pointing to a
/// chunk boundary, in such a case, it backtracks until it hits a chunk
/// boundary.
pub fn chunk_index_from_traversal_instruction_with_recovery(
Expand Down Expand Up @@ -349,7 +352,9 @@ pub fn traversal_instruction_as_vec_bytes(instruction: &[bool]) -> Vec<u8> {

/// Converts a vec bytes that represents a traversal instruction
/// to a vec of bool, true = left and false = right
pub fn vec_bytes_as_traversal_instruction(instruction_vec_bytes: &[u8]) -> Result<Vec<bool>, Error> {
pub fn vec_bytes_as_traversal_instruction(
instruction_vec_bytes: &[u8],
) -> Result<Vec<bool>, Error> {
instruction_vec_bytes
.iter()
.map(|byte| match byte {
Expand Down Expand Up @@ -579,8 +584,14 @@ mod test {

#[test]
fn test_instruction_string_to_traversal_instruction() {
assert_eq!(vec_bytes_as_traversal_instruction(&vec![1u8]).unwrap(), vec![LEFT]);
assert_eq!(vec_bytes_as_traversal_instruction(&vec![0u8]).unwrap(), vec![RIGHT]);
assert_eq!(
vec_bytes_as_traversal_instruction(&vec![1u8]).unwrap(),
vec![LEFT]
);
assert_eq!(
vec_bytes_as_traversal_instruction(&vec![0u8]).unwrap(),
vec![RIGHT]
);
assert_eq!(
vec_bytes_as_traversal_instruction(&vec![0u8, 0u8, 1u8]).unwrap(),
vec![RIGHT, RIGHT, LEFT]
Expand Down

0 comments on commit 936d3f8

Please sign in to comment.