diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 348be1dd..4c7760b6 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -16,7 +16,7 @@ jobs: steps: - name: Checkout Sources - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Rust Toolchain uses: actions-rs/toolchain@v1 @@ -37,29 +37,23 @@ jobs: command: test args: --workspace - - name: Check trie-db Without Std + - name: Check subtrie Without Std uses: actions-rs/cargo@v1 with: command: check - args: --manifest-path trie-db/Cargo.toml --no-default-features + args: --manifest-path subtrie/Cargo.toml --no-default-features - - name: Check memory-db Without Std + - name: Check subtrie-test With bench uses: actions-rs/cargo@v1 with: command: check - args: --manifest-path memory-db/Cargo.toml --no-default-features - - - name: Check trie-root Without Std - uses: actions-rs/cargo@v1 - with: - command: check - args: --manifest-path trie-root/Cargo.toml --no-default-features + args: --manifest-path test/Cargo.toml --benches fmt: name: Rustfmt runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions-rs/toolchain@v1 with: profile: minimal diff --git a/Cargo.toml b/Cargo.toml index 607558fa..b0794c99 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,16 +1,7 @@ [workspace] members = [ - "hash-db", - "memory-db", "hash256-std-hasher", - "test-support/keccak-hasher", - "test-support/reference-trie", - "test-support/trie-standardmap", - "test-support/trie-bench", - "trie-db", - "trie-db/test", - "trie-eip1186", - "trie-eip1186/test", - "trie-root", - "trie-root/test" + "subtrie", + "test", + "reference-trie", ] diff --git a/README.md b/README.md index 8f523fe4..921a8aad 100644 --- a/README.md +++ b/README.md @@ -4,40 +4,11 @@ A generic implementation of the Base-16 Modified Merkle Tree ("Trie") data structure, provided under the Apache2 license. -The implementation comes in two formats: - -- Trie DB (`trie-db` crate) which can be combined with a backend database to provide - a persistent trie structure whose contents can be modified and whose root hash - is recalculated efficiently. -- Trie Root (`trie-root` crate) which provides a closed-form function that accepts a - enumeration of keys and values and provides a root calculated entirely in-memory and - closed form. - Trie Hash alone is able to be used in `no_std` builds by disabling its (default) `std` feature. +Implementation is in `subtrie` crate. -In addition to these, several support crates are provided: - -- `hash-db` crate, used to provide `Hasher` (trait for all things that - can make cryptographic hashes) and `HashDB` (trait for databases that can have byte - slices pushed into them and allow for them to be retrieved based on their hash). - Suitable for `no_std`, though in this case will only provide `Hasher`. -- `memory-db` crate, contains `MemoryDB`, an implementation of a `HashDB` using only - in in-memory map. -- `hash256-std-hasher` crate, an implementation of a `std::hash::Hasher` for 32-byte - keys that have already been hashed. Useful to build the backing `HashMap` for `MemoryDB`. - -There are also three crates used only for testing: - -- `keccak-hasher` crate, an implementation of `Hasher` based on the Keccak-256 algorithm. -- `reference-trie` crate, an implementation of a simple trie format; this provides both - a `NodeCodec` and `TrieStream` implementation making it suitable for both Trie DB and - Trie Root. -- `trie-standardmap` crate, a key/value generation tool for creating large test datasets - to specific qualities. -- `trie-bench` crate, a comprehensive standard benchmarking tool for trie format - implementations. Works using the `criterion` project so benchmarking can be done with - the stable rustc branch. +Testing in `reference-trie` crate and `trie-db-test`. In the spirit of all things Rust, this aims to be reliable, secure, and high performance. diff --git a/hash-db/CHANGELOG.md b/hash-db/CHANGELOG.md deleted file mode 100644 index e5a6a55b..00000000 --- a/hash-db/CHANGELOG.md +++ /dev/null @@ -1,10 +0,0 @@ -# Changelog - -The format is based on [Keep a Changelog]. - -[Keep a Changelog]: http://keepachangelog.com/en/1.0.0/ - -## [0.16.0] - 2023-03-14 -- Requires Hash to be Ord. [#188](https://github.com/paritytech/trie/pull/188) - - diff --git a/hash-db/Cargo.toml b/hash-db/Cargo.toml deleted file mode 100644 index d0c6f104..00000000 --- a/hash-db/Cargo.toml +++ /dev/null @@ -1,14 +0,0 @@ -[package] -name = "hash-db" -version = "0.16.0" -authors = ["Parity Technologies "] -description = "Trait for hash-keyed databases." -license = "Apache-2.0" -categories = [ "no-std" ] -repository = "https://github.com/paritytech/trie" -edition = "2018" - -[features] -default = ["std"] -std = [ -] diff --git a/hash-db/README.md b/hash-db/README.md deleted file mode 100644 index 23b49ae8..00000000 --- a/hash-db/README.md +++ /dev/null @@ -1,4 +0,0 @@ -# HashDB -`HashDB` defines a common interface for databases of byte-slices keyed to their hash. It is generic over hash type through the `Hasher` trait. - -The `Hasher` trait can be used in a `no_std` context. \ No newline at end of file diff --git a/hash-db/src/lib.rs b/hash-db/src/lib.rs deleted file mode 100644 index 4825aada..00000000 --- a/hash-db/src/lib.rs +++ /dev/null @@ -1,216 +0,0 @@ -// Copyright 2017, 2021 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Database of byte-slices keyed to their hash. - -#![cfg_attr(not(feature = "std"), no_std)] - -#[cfg(not(feature = "std"))] -use core::hash; -#[cfg(feature = "std")] -use std::fmt::Debug; -#[cfg(feature = "std")] -use std::hash; - -#[cfg(feature = "std")] -pub trait MaybeDebug: Debug {} -#[cfg(feature = "std")] -impl MaybeDebug for T {} -#[cfg(not(feature = "std"))] -pub trait MaybeDebug {} -#[cfg(not(feature = "std"))] -impl MaybeDebug for T {} - -/// A trie node prefix, it is the nibble path from the trie root -/// to the trie node. -/// For a node containing no partial key value it is the full key. -/// For a value node or node containing a partial key, it is the full key minus its node partial -/// nibbles (the node key can be split into prefix and node partial). -/// Therefore it is always the leftmost portion of the node key, so its internal representation -/// is a non expanded byte slice followed by a last padded byte representation. -/// The padded byte is an optional padded value. -pub type Prefix<'a> = (&'a [u8], Option); - -/// An empty prefix constant. -/// Can be use when the prefix is not use internally -/// or for root nodes. -pub static EMPTY_PREFIX: Prefix<'static> = (&[], None); - -/// Trait describing an object that can hash a slice of bytes. Used to abstract -/// other types over the hashing algorithm. Defines a single `hash` method and an -/// `Out` associated type with the necessary bounds. -pub trait Hasher: Sync + Send { - /// The output type of the `Hasher` - type Out: AsRef<[u8]> - + AsMut<[u8]> - + Default - + MaybeDebug - + core::cmp::Ord - + PartialEq - + Eq - + hash::Hash - + Send - + Sync - + Clone - + Copy; - /// What to use to build `HashMap`s with this `Hasher`. - type StdHasher: Sync + Send + Default + hash::Hasher; - /// The length in bytes of the `Hasher` output. - const LENGTH: usize; - - /// Compute the hash of the provided slice of bytes returning the `Out` type of the `Hasher`. - fn hash(x: &[u8]) -> Self::Out; -} - -/// Trait modelling a plain datastore whose key is a fixed type. -/// The caller should ensure that a key only corresponds to -/// one value. -pub trait PlainDB: Send + Sync + AsPlainDB { - /// Look up a given hash into the bytes that hash to it, returning None if the - /// hash is not known. - fn get(&self, key: &K) -> Option; - - /// Check for the existence of a hash-key. - fn contains(&self, key: &K) -> bool; - - /// Insert a datum item into the DB. Insertions are counted and the equivalent - /// number of `remove()`s must be performed before the data is considered dead. - /// The caller should ensure that a key only corresponds to one value. - fn emplace(&mut self, key: K, value: V); - - /// Remove a datum previously inserted. Insertions can be "owed" such that the - /// same number of `insert()`s may happen without the data being eventually - /// being inserted into the DB. It can be "owed" more than once. - /// The caller should ensure that a key only corresponds to one value. - fn remove(&mut self, key: &K); -} - -/// Trait for immutable reference of PlainDB. -pub trait PlainDBRef { - /// Look up a given hash into the bytes that hash to it, returning None if the - /// hash is not known. - fn get(&self, key: &K) -> Option; - - /// Check for the existance of a hash-key. - fn contains(&self, key: &K) -> bool; -} - -impl<'a, K, V> PlainDBRef for &'a dyn PlainDB { - fn get(&self, key: &K) -> Option { - PlainDB::get(*self, key) - } - fn contains(&self, key: &K) -> bool { - PlainDB::contains(*self, key) - } -} - -impl<'a, K, V> PlainDBRef for &'a mut dyn PlainDB { - fn get(&self, key: &K) -> Option { - PlainDB::get(*self, key) - } - fn contains(&self, key: &K) -> bool { - PlainDB::contains(*self, key) - } -} - -/// Trait modelling datastore keyed by a hash defined by the `Hasher`. -pub trait HashDB: Send + Sync + AsHashDB { - /// Look up a given hash into the bytes that hash to it, returning None if the - /// hash is not known. - fn get(&self, key: &H::Out, prefix: Prefix) -> Option; - - /// Check for the existence of a hash-key. - fn contains(&self, key: &H::Out, prefix: Prefix) -> bool; - - /// Insert a datum item into the DB and return the datum's hash for a later lookup. Insertions - /// are counted and the equivalent number of `remove()`s must be performed before the data - /// is considered dead. - fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H::Out; - - /// Like `insert()`, except you provide the key and the data is all moved. - fn emplace(&mut self, key: H::Out, prefix: Prefix, value: T); - - /// Remove a datum previously inserted. Insertions can be "owed" such that the same number of - /// `insert()`s may happen without the data being eventually being inserted into the DB. - /// It can be "owed" more than once. - fn remove(&mut self, key: &H::Out, prefix: Prefix); -} - -/// Trait for immutable reference of HashDB. -pub trait HashDBRef { - /// Look up a given hash into the bytes that hash to it, returning None if the - /// hash is not known. - fn get(&self, key: &H::Out, prefix: Prefix) -> Option; - - /// Check for the existance of a hash-key. - fn contains(&self, key: &H::Out, prefix: Prefix) -> bool; -} - -impl<'a, H: Hasher, T> HashDBRef for &'a dyn HashDB { - fn get(&self, key: &H::Out, prefix: Prefix) -> Option { - HashDB::get(*self, key, prefix) - } - fn contains(&self, key: &H::Out, prefix: Prefix) -> bool { - HashDB::contains(*self, key, prefix) - } -} - -impl<'a, H: Hasher, T> HashDBRef for &'a mut dyn HashDB { - fn get(&self, key: &H::Out, prefix: Prefix) -> Option { - HashDB::get(*self, key, prefix) - } - fn contains(&self, key: &H::Out, prefix: Prefix) -> bool { - HashDB::contains(*self, key, prefix) - } -} - -/// Upcast trait for HashDB. -pub trait AsHashDB { - /// Perform upcast to HashDB for anything that derives from HashDB. - fn as_hash_db(&self) -> &dyn HashDB; - /// Perform mutable upcast to HashDB for anything that derives from HashDB. - fn as_hash_db_mut<'a>(&'a mut self) -> &'a mut (dyn HashDB + 'a); -} - -/// Upcast trait for PlainDB. -pub trait AsPlainDB { - /// Perform upcast to PlainDB for anything that derives from PlainDB. - fn as_plain_db(&self) -> &dyn PlainDB; - /// Perform mutable upcast to PlainDB for anything that derives from PlainDB. - fn as_plain_db_mut<'a>(&'a mut self) -> &'a mut (dyn PlainDB + 'a); -} - -// NOTE: There used to be a `impl AsHashDB for T` but that does not work with generics. -// See https://stackoverflow.com/questions/48432842/ -// implementing-a-trait-for-reference-and-non-reference-types-causes-conflicting-im -// This means we need concrete impls of AsHashDB in several places, which somewhat defeats -// the point of the trait. -impl<'a, H: Hasher, T> AsHashDB for &'a mut dyn HashDB { - fn as_hash_db(&self) -> &dyn HashDB { - &**self - } - fn as_hash_db_mut<'b>(&'b mut self) -> &'b mut (dyn HashDB + 'b) { - &mut **self - } -} - -#[cfg(feature = "std")] -impl<'a, K, V> AsPlainDB for &'a mut dyn PlainDB { - fn as_plain_db(&self) -> &dyn PlainDB { - &**self - } - fn as_plain_db_mut<'b>(&'b mut self) -> &'b mut (dyn PlainDB + 'b) { - &mut **self - } -} diff --git a/hash256-std-hasher/Cargo.toml b/hash256-std-hasher/Cargo.toml index addd32fb..bdeb08ef 100644 --- a/hash256-std-hasher/Cargo.toml +++ b/hash256-std-hasher/Cargo.toml @@ -16,7 +16,7 @@ harness = false crunchy = "0.2.1" [dev-dependencies] -criterion = "0.4.0" +criterion = "0.5.1" [features] default = ["std"] diff --git a/memory-db/CHANGELOG.md b/memory-db/CHANGELOG.md deleted file mode 100644 index 532c86a4..00000000 --- a/memory-db/CHANGELOG.md +++ /dev/null @@ -1,45 +0,0 @@ -# Changelog - -The format is based on [Keep a Changelog]. - -[Keep a Changelog]: http://keepachangelog.com/en/1.0.0/ - -## [0.32.0] - 2023-03-14 -- Switch no_std storage to BtreeMap. [#188](https://github.com/paritytech/trie/pull/188) - -## [0.31.0] - 2022-11-29 -- Removed `parity-util-mem` support. [#172](https://github.com/paritytech/trie/pull/172) - -## [0.30.0] - 2022-09-20 -- Update `parity-util-mem` to 0.12. [#166](https://github.com/paritytech/trie/pull/166) - -## [0.29.0] - 2022-02-04 -- Update `parity-util-mem` to 0.11. [#150](https://github.com/paritytech/trie/pull/150) - -## [0.28.0] - 2021-10-19 -- Change in api bound. [#142](https://github.com/paritytech/trie/pull/142) - -## [0.27.0] - 2021-07-02 -- Update `parity-util-mem` to 0.10. [#137](https://github.com/paritytech/trie/pull/137) - -## [0.26.0] - 2021-01-27 -- Update `parity-util-mem` to 0.9. [#123](https://github.com/paritytech/trie/pull/123) - -## [0.25.0] - 2021-01-05 -- Update `parity-util-mem` and `hashbrown`, removed `heapsize`. [#118](https://github.com/paritytech/trie/pull/118) - -## [0.24.1] - 2020-07-20 -- Add `shrink_to_fit` method. [#102](https://github.com/paritytech/trie/pull/102) - -## [0.24.0] - 2020-07-07 -- Disable memory tracking for no_std target by default. [#99](https://github.com/paritytech/trie/pull/99) - -## [0.22.0] - 2020-07-06 -- Type parameter to count `malloc_size_of` on memory-db. [#94](https://github.com/paritytech/trie/pull/94) -- Update hashbrown to 0.8. [#97](https://github.com/paritytech/trie/pull/97) - -## [0.20.0] - 2020-03-21 -- Update parity-util-mem to v0.6 [#82](https://github.com/paritytech/trie/pull/82) - -## [0.19.0] - 2020-02-07 -- Update parity-util-mem to v0.5.1 [#78](https://github.com/paritytech/trie/pull/78) diff --git a/memory-db/Cargo.toml b/memory-db/Cargo.toml deleted file mode 100644 index adad836a..00000000 --- a/memory-db/Cargo.toml +++ /dev/null @@ -1,25 +0,0 @@ -[package] -name = "memory-db" -version = "0.32.0" -authors = ["Parity Technologies "] -description = "In-memory implementation of hash-db, useful for tests" -repository = "https://github.com/paritytech/trie" -license = "Apache-2.0" -edition = "2018" - -[dependencies] -hash-db = { version = "0.16.0", path = "../hash-db", default-features = false } - -[dev-dependencies] -keccak-hasher = { path = "../test-support/keccak-hasher" } -criterion = "0.4.0" - -[features] -default = ["std"] -std = [ - "hash-db/std", -] - -[[bench]] -name = "bench" -harness = false diff --git a/memory-db/README.md b/memory-db/README.md index fc0c6309..497359e2 100644 --- a/memory-db/README.md +++ b/memory-db/README.md @@ -1 +1 @@ -MemoryDB is a reference counted memory-based [`HashDB`](https://github.com/paritytech/parity-common/tree/master/hash-db) implementation backed by a `HashMap`. \ No newline at end of file +MemoryDB is a reference counted memory-based [`NodeDB`](https://github.com/paritytech/parity-common/tree/master/hash-db) implementation backed by a `HashMap`. diff --git a/test-support/reference-trie/CHANGELOG.md b/reference-trie/CHANGELOG.md similarity index 100% rename from test-support/reference-trie/CHANGELOG.md rename to reference-trie/CHANGELOG.md diff --git a/test-support/reference-trie/Cargo.toml b/reference-trie/Cargo.toml similarity index 55% rename from test-support/reference-trie/Cargo.toml rename to reference-trie/Cargo.toml index 90fe577a..f26ed81e 100644 --- a/test-support/reference-trie/Cargo.toml +++ b/reference-trie/Cargo.toml @@ -8,17 +8,13 @@ license = "Apache-2.0" edition = "2018" [dependencies] -hash-db = { path = "../../hash-db" , version = "0.16.0"} -keccak-hasher = { path = "../keccak-hasher", version = "0.16.0" } -trie-db = { path = "../../trie-db", default-features = false, version = "0.27.0" } -trie-root = { path = "../../trie-root", default-features = false, version = "0.18.0" } +trie-db = { package = "subtrie", path = "../subtrie", default-features = false, version = "0.0.1" } parity-scale-codec = { version = "3.0.0", features = ["derive"] } -hashbrown = { version = "0.13.2", default-features = false, features = ["ahash"] } +hashbrown = { version = "0.14.1", default-features = false, features = ["ahash"] } paste = "1.0.12" [dev-dependencies] -trie-bench = { path = "../trie-bench" } -criterion = "0.4.0" +criterion = "0.5.1" [[bench]] name = "bench" @@ -29,5 +25,4 @@ default = ["std"] # no actual support for std, only to avoid a cargo issues std = [ "trie-db/std", - "trie-root/std", ] diff --git a/test-support/reference-trie/benches/bench.rs b/reference-trie/benches/bench.rs similarity index 95% rename from test-support/reference-trie/benches/bench.rs rename to reference-trie/benches/bench.rs index b3466d28..4e1cb969 100644 --- a/test-support/reference-trie/benches/bench.rs +++ b/reference-trie/benches/bench.rs @@ -18,7 +18,7 @@ criterion_group!(benches, benchmark); criterion_main!(benches); fn benchmark(c: &mut Criterion) { - trie_bench::standard_benchmark::< + trie_db::bench::standard_benchmark::< reference_trie::ExtensionLayout, reference_trie::ReferenceTrieStream, >(c, "ref"); diff --git a/test-support/reference-trie/src/lib.rs b/reference-trie/src/lib.rs similarity index 83% rename from test-support/reference-trie/src/lib.rs rename to reference-trie/src/lib.rs index be626801..51e55fd4 100644 --- a/test-support/reference-trie/src/lib.rs +++ b/reference-trie/src/lib.rs @@ -18,15 +18,17 @@ use hashbrown::{hash_map::Entry, HashMap}; use parity_scale_codec::{Compact, Decode, Encode, Error as CodecError, Input, Output}; use std::{borrow::Borrow, fmt, iter::once, marker::PhantomData, ops::Range}; use trie_db::{ + memory_db::{KeyFunction, MemoryDB, PrefixedKey}, nibble_ops, node::{NibbleSlicePlan, NodeHandlePlan, NodeOwned, NodePlan, Value, ValuePlan}, + node_db, + node_db::Hasher, + trie_root::{self, TrieStream, Value as TrieStreamValue}, trie_visit, triedbmut::ChildReference, - DBValue, NodeCodec, Trie, TrieBuilder, TrieConfiguration, TrieDBBuilder, TrieDBMutBuilder, - TrieHash, TrieLayout, TrieMut, TrieRoot, + DBValue, Location, NodeCodec, Trie, TrieBuilder, TrieConfiguration, TrieDBBuilder, + TrieDBMutBuilder, TrieHash, TrieLayout, TrieRoot, }; -pub use trie_root::TrieStream; -use trie_root::{Hasher, Value as TrieStreamValue}; mod substrate; mod substrate_like; @@ -41,9 +43,13 @@ pub use substrate_like::{ pub use paste::paste; pub use substrate::{LayoutV0 as SubstrateV0, LayoutV1 as SubstrateV1}; +pub use trie_db::mem_tree_db::{Location as MemLocation, MemTreeDB}; /// Reference hasher is a keccak hasher. -pub type RefHasher = keccak_hasher::KeccakHasher; +pub type RefHasher = trie_db::keccak_hasher::KeccakHasher; + +pub type PrefixedMemoryDB = + MemoryDB<::Hash, PrefixedKey<::Hash>, DBValue>; /// Apply a test method on every test layouts. #[macro_export] @@ -51,14 +57,39 @@ macro_rules! test_layouts { ($test:ident, $test_internal:ident) => { #[test] fn $test() { - eprintln!("Running with layout `HashedValueNoExtThreshold`"); - $test_internal::<$crate::HashedValueNoExtThreshold<1>>(); - eprintln!("Running with layout `HashedValueNoExt`"); - $test_internal::<$crate::HashedValueNoExt>(); - eprintln!("Running with layout `NoExtensionLayout`"); - $test_internal::<$crate::NoExtensionLayout>(); - eprintln!("Running with layout `ExtensionLayout`"); - $test_internal::<$crate::ExtensionLayout>(); + eprintln!("Running with layout `HashedValueNoExtThreshold` and MemTreeDB"); + $test_internal::< + $crate::HashedValueNoExtThreshold<1, $crate::MemLocation>, + $crate::MemTreeDB<$crate::RefHasher>, + >(); + eprintln!("Running with layout `HashedValueNoExt` and MemTreeDB"); + $test_internal::<$crate::HashedValueNoExt, $crate::MemTreeDB<$crate::RefHasher>>(); + eprintln!("Running with layout `NoExtensionLayout` and MemTreeDB"); + $test_internal::< + $crate::GenericNoExtensionLayout<$crate::RefHasher, $crate::MemLocation>, + $crate::MemTreeDB<$crate::RefHasher>, + >(); + + eprintln!("Running with layout `HashedValueNoExtThreshold` and MemoryDB"); + $test_internal::< + $crate::HashedValueNoExtThreshold<1, ()>, + $crate::PrefixedMemoryDB<$crate::HashedValueNoExtThreshold<1, ()>>, + >(); + eprintln!("Running with layout `HashedValueNoExt` and MemoryDB"); + $test_internal::< + $crate::HashedValueNoExt, + $crate::PrefixedMemoryDB<$crate::HashedValueNoExt>, + >(); + eprintln!("Running with layout `NoExtensionLayout` and MemoryDB"); + $test_internal::< + $crate::NoExtensionLayout, + $crate::PrefixedMemoryDB<$crate::NoExtensionLayout>, + >(); + eprintln!("Running with layout `ExtensionLayout` and MemoryDB"); + $test_internal::< + $crate::ExtensionLayout, + $crate::PrefixedMemoryDB<$crate::ExtensionLayout>, + >(); } }; } @@ -101,32 +132,34 @@ impl TrieLayout for ExtensionLayout { const MAX_INLINE_VALUE: Option = None; type Hash = RefHasher; type Codec = ReferenceNodeCodec; + type Location = (); } impl TrieConfiguration for ExtensionLayout {} /// Trie layout without extension nodes, allowing /// generic hasher. -pub struct GenericNoExtensionLayout(PhantomData); +pub struct GenericNoExtensionLayout(PhantomData<(H, L)>); -impl Default for GenericNoExtensionLayout { +impl Default for GenericNoExtensionLayout { fn default() -> Self { GenericNoExtensionLayout(PhantomData) } } -impl Clone for GenericNoExtensionLayout { +impl Clone for GenericNoExtensionLayout { fn clone(&self) -> Self { GenericNoExtensionLayout(PhantomData) } } -impl TrieLayout for GenericNoExtensionLayout { +impl TrieLayout for GenericNoExtensionLayout { const USE_EXTENSION: bool = false; const ALLOW_EMPTY: bool = false; const MAX_INLINE_VALUE: Option = None; type Hash = H; type Codec = ReferenceNodeCodecNoExt; + type Location = L; } /// Trie that allows empty values. @@ -139,12 +172,13 @@ impl TrieLayout for AllowEmptyLayout { const MAX_INLINE_VALUE: Option = None; type Hash = RefHasher; type Codec = ReferenceNodeCodec; + type Location = (); } -impl TrieConfiguration for GenericNoExtensionLayout {} +impl TrieConfiguration for GenericNoExtensionLayout {} /// Trie layout without extension nodes. -pub type NoExtensionLayout = GenericNoExtensionLayout; +pub type NoExtensionLayout = GenericNoExtensionLayout; /// Children bitmap codec for radix 16 trie. pub struct Bitmap(u16); @@ -184,10 +218,6 @@ pub type RefTrieDBMutAllowEmpty<'a> = trie_db::TrieDBMut<'a, AllowEmptyLayout>; pub type RefTrieDBMutAllowEmptyBuilder<'a> = trie_db::TrieDBMutBuilder<'a, AllowEmptyLayout>; pub type RefTestTrieDBCache = TestTrieCache; pub type RefTestTrieDBCacheNoExt = TestTrieCache; -pub type RefFatDB<'a, 'cache> = trie_db::FatDB<'a, 'cache, ExtensionLayout>; -pub type RefFatDBMut<'a> = trie_db::FatDBMut<'a, ExtensionLayout>; -pub type RefSecTrieDB<'a, 'cache> = trie_db::SecTrieDB<'a, 'cache, ExtensionLayout>; -pub type RefSecTrieDBMut<'a> = trie_db::SecTrieDBMut<'a, ExtensionLayout>; pub type RefLookup<'a, 'cache, Q> = trie_db::Lookup<'a, 'cache, ExtensionLayout, Q>; pub type RefLookupNoExt<'a, 'cache, Q> = trie_db::Lookup<'a, 'cache, NoExtensionLayout, Q>; @@ -624,12 +654,14 @@ impl NodeCodec for ReferenceNodeCodec { None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ]; + let mut i_hash = 0; for i in 0..nibble_ops::NIBBLE_LENGTH { if bitmap.value_at(i) { let count = >::decode(&mut input)?.0 as usize; let range = input.take(count)?; children[i] = Some(if count == H::LENGTH { - NodeHandlePlan::Hash(range) + i_hash += 1; + NodeHandlePlan::Hash(range, i_hash - 1) } else { NodeHandlePlan::Inline(range) }); @@ -646,7 +678,7 @@ impl NodeCodec for ReferenceNodeCodec { let count = >::decode(&mut input)?.0 as usize; let range = input.take(count)?; let child = if count == H::LENGTH { - NodeHandlePlan::Hash(range) + NodeHandlePlan::Hash(range, 0) } else { NodeHandlePlan::Inline(range) }; @@ -679,7 +711,11 @@ impl NodeCodec for ReferenceNodeCodec { &[EMPTY_TRIE] } - fn leaf_node(partial: impl Iterator, number_nibble: usize, value: Value) -> Vec { + fn leaf_node( + partial: impl Iterator, + number_nibble: usize, + value: Value, + ) -> Vec { let mut output = partial_from_iterator_to_key(partial, number_nibble, LEAF_NODE_OFFSET, LEAF_NODE_OVER); match value { @@ -692,10 +728,10 @@ impl NodeCodec for ReferenceNodeCodec { output } - fn extension_node( + fn extension_node( partial: impl Iterator, number_nibble: usize, - child: ChildReference, + child: ChildReference, ) -> Vec { let mut output = partial_from_iterator_to_key( partial, @@ -704,16 +740,16 @@ impl NodeCodec for ReferenceNodeCodec { EXTENSION_NODE_OVER, ); match child { - ChildReference::Hash(h) => h.as_ref().encode_to(&mut output), + ChildReference::Hash(h, _) => h.as_ref().encode_to(&mut output), ChildReference::Inline(inline_data, len) => (&AsRef::<[u8]>::as_ref(&inline_data)[..len]).encode_to(&mut output), }; output } - fn branch_node( - children: impl Iterator>>>, - maybe_value: Option, + fn branch_node( + children: impl Iterator>>>, + maybe_value: Option>, ) -> Vec { let mut output = vec![0; BITMAP_LENGTH + 1]; let mut prefix: [u8; 3] = [0; 3]; @@ -727,7 +763,7 @@ impl NodeCodec for ReferenceNodeCodec { _ => unimplemented!("unsupported"), }; let has_children = children.map(|maybe_child| match maybe_child.borrow() { - Some(ChildReference::Hash(h)) => { + Some(ChildReference::Hash(h, _)) => { h.as_ref().encode_to(&mut output); true }, @@ -742,11 +778,11 @@ impl NodeCodec for ReferenceNodeCodec { output } - fn branch_node_nibbled( + fn branch_node_nibbled( _partial: impl Iterator, _number_nibble: usize, - _children: impl Iterator>>>, - _maybe_value: Option, + _children: impl Iterator>>>, + _maybe_value: Option>, ) -> Vec { unreachable!("codec with extension branch") } @@ -791,12 +827,14 @@ impl NodeCodec for ReferenceNodeCodecNoExt { None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ]; + let mut i_hash = 0; for i in 0..nibble_ops::NIBBLE_LENGTH { if bitmap.value_at(i) { let count = >::decode(&mut input)?.0 as usize; let range = input.take(count)?; children[i] = Some(if count == H::LENGTH { - NodeHandlePlan::Hash(range) + i_hash += 1; + NodeHandlePlan::Hash(range, i_hash - 1) } else { NodeHandlePlan::Inline(range) }); @@ -835,7 +873,11 @@ impl NodeCodec for ReferenceNodeCodecNoExt { &[EMPTY_TRIE_NO_EXT] } - fn leaf_node(partial: impl Iterator, number_nibble: usize, value: Value) -> Vec { + fn leaf_node( + partial: impl Iterator, + number_nibble: usize, + value: Value, + ) -> Vec { let mut output = partial_from_iterator_encode(partial, number_nibble, NodeKindNoExt::Leaf); match value { Value::Inline(value) => { @@ -847,26 +889,26 @@ impl NodeCodec for ReferenceNodeCodecNoExt { output } - fn extension_node( + fn extension_node( _partial: impl Iterator, _nbnibble: usize, - _child: ChildReference<::Out>, + _child: ChildReference<::Out, L>, ) -> Vec { unreachable!("no extension codec") } - fn branch_node( - _children: impl Iterator::Out>>>>, - _maybe_value: Option, + fn branch_node( + _children: impl Iterator::Out, L>>>>, + _maybe_value: Option>, ) -> Vec { unreachable!("no extension codec") } - fn branch_node_nibbled( + fn branch_node_nibbled( partial: impl Iterator, number_nibble: usize, - children: impl Iterator>>>, - maybe_value: Option, + children: impl Iterator>>>, + maybe_value: Option>, ) -> Vec { let mut output = if maybe_value.is_none() { partial_from_iterator_encode(partial, number_nibble, NodeKindNoExt::BranchNoValue) @@ -887,7 +929,7 @@ impl NodeCodec for ReferenceNodeCodecNoExt { Bitmap::encode( children.map(|maybe_child| match maybe_child.borrow() { - Some(ChildReference::Hash(h)) => { + Some(ChildReference::Hash(h, _)) => { h.as_ref().encode_to(&mut output); true }, @@ -906,33 +948,34 @@ impl NodeCodec for ReferenceNodeCodecNoExt { } /// Compare trie builder and in memory trie. -pub fn compare_implementations(data: Vec<(Vec, Vec)>, mut memdb: DB, mut hashdb: DB) +pub fn compare_implementations(data: Vec<(Vec, Vec)>) where T: TrieLayout, - DB: hash_db::HashDB + Eq, + T::Location: std::fmt::Debug, + K: KeyFunction + Send + Sync, { - let root_new = calc_root_build::(data.clone(), &mut hashdb); + let (mut mem_db1, _) = MemoryDB::::default_with_root(); + let (mut mem_db2, _) = MemoryDB::::default_with_root(); + let root_new = calc_root_build::(data.clone(), &mut mem_db1); let root = { - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); + let mut t = TrieDBMutBuilder::::new(&mut mem_db2).build(); for i in 0..data.len() { t.insert(&data[i].0[..], &data[i].1[..]).unwrap(); } - t.commit(); - *t.root() + t.commit().apply_to(&mut mem_db2) }; if root_new != root { { - let db: &dyn hash_db::HashDB<_, _> = &hashdb; - let t = TrieDBBuilder::::new(&db, &root_new).build(); + let db: &dyn node_db::NodeDB<_, _, _> = &mem_db1; + let t = TrieDBBuilder::::new(db, &root_new).build(); println!("{:?}", t); for a in t.iter().unwrap() { println!("a:{:x?}", a); } } { - let db: &dyn hash_db::HashDB<_, _> = &memdb; - let t = TrieDBBuilder::::new(&db, &root).build(); + let db: &dyn node_db::NodeDB<_, _, _> = &mem_db2; + let t = TrieDBBuilder::::new(db, &root).build(); println!("{:?}", t); for a in t.iter().unwrap() { println!("a:{:x?}", a); @@ -942,22 +985,21 @@ where assert_eq!(root, root_new); // compare db content for key fuzzing - assert!(memdb == hashdb); + assert!(mem_db1 == mem_db2); } /// Compare trie builder and trie root implementations. -pub fn compare_root>( +pub fn compare_root>( data: Vec<(Vec, Vec)>, - mut memdb: DB, + memdb: DB, ) { let root_new = reference_trie_root_iter_build::(data.clone()); let root = { - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); + let mut t = TrieDBMutBuilder::::new(&memdb).build(); for i in 0..data.len() { t.insert(&data[i].0[..], &data[i].1[..]).unwrap(); } - *t.root() + *t.commit().hash() }; assert_eq!(root, root_new); @@ -1002,57 +1044,60 @@ where } /// Trie builder trie building utility. -pub fn calc_root_build(data: I, hashdb: &mut DB) -> ::Out +pub fn calc_root_build( + data: I, + memdb: &mut MemoryDB, +) -> TrieHash where T: TrieLayout, I: IntoIterator, A: AsRef<[u8]> + Ord + fmt::Debug, B: AsRef<[u8]> + fmt::Debug, - DB: hash_db::HashDB, + K: KeyFunction + Send + Sync, { - let mut cb = TrieBuilder::::new(hashdb); + let mut cb = TrieBuilder::::new(memdb); trie_visit::(data.into_iter(), &mut cb); cb.root.unwrap_or_default() } /// `compare_implementations_no_extension` for unordered input (trie_root does /// ordering before running when trie_build expect correct ordering). -pub fn compare_implementations_unordered( - data: Vec<(Vec, Vec)>, - mut memdb: DB, - mut hashdb: DB, -) where + +pub fn compare_implementations_unordered(data: Vec<(Vec, Vec)>) +where T: TrieLayout, - DB: hash_db::HashDB + Eq, + T::Location: std::fmt::Debug, + K: KeyFunction + Send + Sync, { + let mut mem_db1 = MemoryDB::::default(); + let mut mem_db2 = MemoryDB::::default(); let mut b_map = std::collections::btree_map::BTreeMap::new(); let root = { - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); + let mut t = TrieDBMutBuilder::::new(&mut mem_db1).build(); for i in 0..data.len() { t.insert(&data[i].0[..], &data[i].1[..]).unwrap(); b_map.insert(data[i].0.clone(), data[i].1.clone()); } - *t.root() + *t.commit().hash() }; let root_new = { - let mut cb = TrieBuilder::::new(&mut hashdb); + let mut cb = TrieBuilder::::new(&mut mem_db2); trie_visit::(b_map.into_iter(), &mut cb); cb.root.unwrap_or_default() }; if root != root_new { { - let db: &dyn hash_db::HashDB<_, _> = &memdb; - let t = TrieDBBuilder::::new(&db, &root).build(); + let db: &dyn node_db::NodeDB<_, _, _> = &mem_db1; + let t = TrieDBBuilder::::new(db, &root).build(); println!("{:?}", t); for a in t.iter().unwrap() { println!("a:{:?}", a); } } { - let db: &dyn hash_db::HashDB<_, _> = &hashdb; - let t = TrieDBBuilder::::new(&db, &root_new).build(); + let db: &dyn node_db::NodeDB<_, _, _> = &mem_db2; + let t = TrieDBBuilder::::new(db, &root_new).build(); println!("{:?}", t); for a in t.iter().unwrap() { println!("a:{:?}", a); @@ -1065,24 +1110,22 @@ pub fn compare_implementations_unordered( /// Testing utility that uses some periodic removal over /// its input test data. -pub fn compare_insert_remove>( - data: Vec<(bool, Vec, Vec)>, - mut memdb: DB, -) where +pub fn compare_insert_remove(data: Vec<(bool, Vec, Vec)>) +where T: TrieLayout, - DB: hash_db::HashDB + Eq, + K: KeyFunction + Send + Sync, { let mut data2 = std::collections::BTreeMap::new(); - let mut root = Default::default(); let mut a = 0; - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - t.commit(); - } + let mut memdb = MemoryDB::::default(); + let mut root = { + let t = TrieDBMutBuilder::::new(&memdb).build(); + *t.commit().hash() + }; while a < data.len() { // new triemut every 3 element root = { - let mut t = TrieDBMutBuilder::::from_existing(&mut memdb, &mut root).build(); + let mut t = TrieDBMutBuilder::::from_existing(&memdb, root).build(); for _ in 0..3 { if data[a].0 { // remove @@ -1099,14 +1142,12 @@ pub fn compare_insert_remove>( break } } - t.commit(); - *t.root() + t.commit().apply_to(&mut memdb) }; } - let mut t = TrieDBMutBuilder::::from_existing(&mut memdb, &mut root).build(); // we are testing the RefTrie code here so we do not sort or check uniqueness // before. - assert_eq!(*t.root(), calc_root::(data2)); + assert_eq!(root, calc_root::(data2)); } /// Example trie cache implementation. @@ -1114,8 +1155,8 @@ pub fn compare_insert_remove>( /// Should not be used for anything in production. pub struct TestTrieCache { /// In a real implementation we need to make sure that this is unique per trie root. - value_cache: HashMap, trie_db::CachedValue>>, - node_cache: HashMap, NodeOwned>>, + value_cache: HashMap, trie_db::CachedValue, L::Location>>, + node_cache: HashMap, NodeOwned, L::Location>>, } impl TestTrieCache { @@ -1136,24 +1177,32 @@ impl Default for TestTrieCache { } } -impl trie_db::TrieCache for TestTrieCache { - fn lookup_value_for_key(&mut self, key: &[u8]) -> Option<&trie_db::CachedValue>> { +impl trie_db::TrieCache for TestTrieCache { + fn lookup_value_for_key( + &mut self, + key: &[u8], + ) -> Option<&trie_db::CachedValue, L::Location>> { self.value_cache.get(key) } - fn cache_value_for_key(&mut self, key: &[u8], value: trie_db::CachedValue>) { + fn cache_value_for_key( + &mut self, + key: &[u8], + value: trie_db::CachedValue, L::Location>, + ) { self.value_cache.insert(key.to_vec(), value); } fn get_or_insert_node( &mut self, hash: TrieHash, + _location: L::Location, fetch_node: &mut dyn FnMut() -> trie_db::Result< - NodeOwned>, + NodeOwned, L::Location>, TrieHash, trie_db::CError, >, - ) -> trie_db::Result<&NodeOwned>, TrieHash, trie_db::CError> { + ) -> trie_db::Result<&NodeOwned, L::Location>, TrieHash, trie_db::CError> { match self.node_cache.entry(hash) { Entry::Occupied(e) => Ok(e.into_mut()), Entry::Vacant(e) => { @@ -1163,9 +1212,15 @@ impl trie_db::TrieCache for TestTrieCache { } } - fn get_node(&mut self, hash: &TrieHash) -> Option<&NodeOwned>> { + fn get_node( + &mut self, + hash: &TrieHash, + _location: L::Location, + ) -> Option<&NodeOwned, L::Location>> { self.node_cache.get(hash) } + + fn insert_new_node(&mut self, _hash: &TrieHash) {} } #[cfg(test)] @@ -1203,9 +1258,10 @@ mod tests { let enc = as NodeCodec>::leaf_node( input.iter().cloned(), input.len() * NIBBLE_PER_BYTE, - Value::Inline(&[1]), + Value::<()>::Inline(&[1]), ); - let dec = as NodeCodec>::decode(&enc).unwrap(); + let dec = + as NodeCodec>::decode(&enc, &[] as &[()]).unwrap(); let o_sl = if let Node::Leaf(sl, _) = dec { Some(sl) } else { None }; assert!(o_sl.is_some()); } diff --git a/test-support/reference-trie/src/substrate.rs b/reference-trie/src/substrate.rs similarity index 96% rename from test-support/reference-trie/src/substrate.rs rename to reference-trie/src/substrate.rs index 9b1573f1..a841a603 100644 --- a/test-support/reference-trie/src/substrate.rs +++ b/reference-trie/src/substrate.rs @@ -17,12 +17,12 @@ //! Codec and layout directly copy-pasted from substrate with minimal modifications. use core::{borrow::Borrow, iter::once, marker::PhantomData, ops::Range}; -use hash_db::Hasher; use parity_scale_codec as codec; use parity_scale_codec::{Compact, Decode, Encode, Input, Output}; use trie_db::{ nibble_ops, node::{NibbleSlicePlan, NodeHandlePlan, NodePlan, Value, ValuePlan}, + node_db::Hasher, ChildReference, NodeCodec as NodeCodecT, TrieConfiguration, TrieLayout, }; @@ -78,8 +78,8 @@ fn fuse_nibbles_node(nibbles: &[u8], kind: NodeKind) -> impl Iterator .chain(nibbles[nibbles.len() % 2..].chunks(2).map(|ch| ch[0] << 4 | ch[1])) } -use trie_root::Value as TrieStreamValue; -impl trie_root::TrieStream for TrieStream { +use trie_db::trie_root::{self, Value as TrieStreamValue}; +impl trie_db::trie_root::TrieStream for TrieStream { fn new() -> Self { Self { buffer: Vec::new() } } @@ -259,12 +259,14 @@ where None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ]; + let mut i_hash = 0; for i in 0..nibble_ops::NIBBLE_LENGTH { if bitmap.value_at(i) { let count = >::decode(&mut input)?.0 as usize; let range = input.take(count)?; children[i] = Some(if count == H::LENGTH { - NodeHandlePlan::Hash(range) + i_hash += 1; + NodeHandlePlan::Hash(range, i_hash - 1) } else { NodeHandlePlan::Inline(range) }); @@ -310,7 +312,11 @@ where &[trie_constants::EMPTY_TRIE] } - fn leaf_node(partial: impl Iterator, number_nibble: usize, value: Value) -> Vec { + fn leaf_node( + partial: impl Iterator, + number_nibble: usize, + value: Value, + ) -> Vec { let contains_hash = matches!(&value, Value::Node(..)); let mut output = if contains_hash { partial_from_iterator_encode(partial, number_nibble, NodeKind::HashedValueLeaf) @@ -322,7 +328,7 @@ where Compact(value.len() as u32).encode_to(&mut output); output.extend_from_slice(value); }, - Value::Node(hash) => { + Value::Node(hash, _) => { debug_assert!(hash.len() == H::LENGTH); output.extend_from_slice(hash); }, @@ -330,26 +336,26 @@ where output } - fn extension_node( + fn extension_node( _partial: impl Iterator, _nbnibble: usize, - _child: ChildReference<::Out>, + _child: ChildReference<::Out, L>, ) -> Vec { unreachable!("No extension codec.") } - fn branch_node( - _children: impl Iterator::Out>>>>, - _maybe_value: Option, + fn branch_node( + _children: impl Iterator::Out, L>>>>, + _maybe_value: Option>, ) -> Vec { unreachable!("No extension codec.") } - fn branch_node_nibbled( + fn branch_node_nibbled( partial: impl Iterator, number_nibble: usize, - children: impl Iterator::Out>>>>, - value: Option, + children: impl Iterator::Out, L>>>>, + value: Option>, ) -> Vec { let contains_hash = matches!(&value, Some(Value::Node(..))); let mut output = match (&value, contains_hash) { @@ -369,7 +375,7 @@ where Compact(value.len() as u32).encode_to(&mut output); output.extend_from_slice(value); }, - Some(Value::Node(hash)) => { + Some(Value::Node(hash, _)) => { debug_assert!(hash.len() == H::LENGTH); output.extend_from_slice(hash); }, @@ -377,7 +383,7 @@ where } Bitmap::encode( children.map(|maybe_child| match maybe_child.borrow() { - Some(ChildReference::Hash(h)) => { + Some(ChildReference::Hash(h, _)) => { h.as_ref().encode_to(&mut output); true }, @@ -470,6 +476,7 @@ where type Hash = H; type Codec = NodeCodec; + type Location = (); } impl TrieConfiguration for LayoutV0 @@ -512,6 +519,7 @@ where type Hash = H; type Codec = NodeCodec; + type Location = (); } impl TrieConfiguration for LayoutV1 diff --git a/test-support/reference-trie/src/substrate_like.rs b/reference-trie/src/substrate_like.rs similarity index 94% rename from test-support/reference-trie/src/substrate_like.rs rename to reference-trie/src/substrate_like.rs index 37cccefc..f1493799 100644 --- a/test-support/reference-trie/src/substrate_like.rs +++ b/reference-trie/src/substrate_like.rs @@ -22,7 +22,7 @@ pub struct HashedValueNoExt; /// No extension trie which stores value above a static size /// as external node. -pub struct HashedValueNoExtThreshold; +pub struct HashedValueNoExtThreshold(PhantomData); impl TrieLayout for HashedValueNoExt { const USE_EXTENSION: bool = false; @@ -31,15 +31,17 @@ impl TrieLayout for HashedValueNoExt { type Hash = RefHasher; type Codec = ReferenceNodeCodecNoExtMeta; + type Location = trie_db::mem_tree_db::Location; } -impl TrieLayout for HashedValueNoExtThreshold { +impl TrieLayout for HashedValueNoExtThreshold { const USE_EXTENSION: bool = false; const ALLOW_EMPTY: bool = false; const MAX_INLINE_VALUE: Option = Some(C); type Hash = RefHasher; type Codec = ReferenceNodeCodecNoExtMeta; + type Location = L; } /// Constants specific to encoding with external value node support. @@ -101,12 +103,14 @@ impl NodeCodec { None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ]; + let mut i_hash = 0; for i in 0..nibble_ops::NIBBLE_LENGTH { if bitmap.value_at(i) { let count = >::decode(&mut input)?.0 as usize; let range = input.take(count)?; children[i] = Some(if count == H::LENGTH { - NodeHandlePlan::Hash(range) + i_hash += 1; + NodeHandlePlan::Hash(range, i_hash - 1) } else { NodeHandlePlan::Inline(range) }); @@ -169,7 +173,11 @@ where &[trie_constants::EMPTY_TRIE] } - fn leaf_node(partial: impl Iterator, number_nibble: usize, value: Value) -> Vec { + fn leaf_node( + partial: impl Iterator, + number_nibble: usize, + value: Value, + ) -> Vec { let contains_hash = matches!(&value, Value::Node(..)); let mut output = if contains_hash { partial_from_iterator_encode(partial, number_nibble, NodeKind::HashedValueLeaf) @@ -181,7 +189,7 @@ where Compact(value.len() as u32).encode_to(&mut output); output.extend_from_slice(value); }, - Value::Node(hash) => { + Value::Node(hash, _) => { debug_assert!(hash.len() == H::LENGTH); output.extend_from_slice(hash); }, @@ -189,26 +197,26 @@ where output } - fn extension_node( + fn extension_node( _partial: impl Iterator, _nbnibble: usize, - _child: ChildReference<::Out>, + _child: ChildReference<::Out, L>, ) -> Vec { unreachable!("Codec without extension.") } - fn branch_node( - _children: impl Iterator::Out>>>>, - _maybe_value: Option, + fn branch_node( + _children: impl Iterator::Out, L>>>>, + _maybe_value: Option>, ) -> Vec { unreachable!("Codec without extension.") } - fn branch_node_nibbled( + fn branch_node_nibbled( partial: impl Iterator, number_nibble: usize, - children: impl Iterator::Out>>>>, - value: Option, + children: impl Iterator::Out, L>>>>, + value: Option>, ) -> Vec { let contains_hash = matches!(&value, Some(Value::Node(..))); let mut output = match (&value, contains_hash) { @@ -228,7 +236,7 @@ where Compact(value.len() as u32).encode_to(&mut output); output.extend_from_slice(value); }, - Some(Value::Node(hash)) => { + Some(Value::Node(hash, _)) => { debug_assert!(hash.len() == H::LENGTH); output.extend_from_slice(hash); }, @@ -236,7 +244,7 @@ where } Bitmap::encode( children.map(|maybe_child| match maybe_child.borrow() { - Some(ChildReference::Hash(h)) => { + Some(ChildReference::Hash(h, _)) => { h.as_ref().encode_to(&mut output); true }, @@ -459,8 +467,8 @@ fn fuse_nibbles_node<'a>(nibbles: &'a [u8], kind: NodeKind) -> impl Iterator Self { Self { buffer: Vec::new() } } diff --git a/trie-db/CHANGELOG.md b/subtrie/CHANGELOG.md similarity index 90% rename from trie-db/CHANGELOG.md rename to subtrie/CHANGELOG.md index ac450d40..08d51546 100644 --- a/trie-db/CHANGELOG.md +++ b/subtrie/CHANGELOG.md @@ -4,6 +4,11 @@ The format is based on [Keep a Changelog]. [Keep a Changelog]: http://keepachangelog.com/en/1.0.0/ +## [0.28.0] - 2023-09-12 +- Make `trie_nodes_recorded_for_key` work for inline values [#194](https://github.com/paritytech/trie/pull/194) +- trie-db: Fetch the closest merkle value [#199](https://github.com/paritytech/trie/pull/199) +- fixing triedbmut lookup, added some testing in test. [#198](https://github.com/paritytech/trie/pull/198) + ## [0.27.1] - 2023-03-17 - Fix `TrieDBRawIterator::prefix_then_seek` [#190](https://github.com/paritytech/trie/pull/190) diff --git a/subtrie/Cargo.toml b/subtrie/Cargo.toml new file mode 100644 index 00000000..a0c607ad --- /dev/null +++ b/subtrie/Cargo.toml @@ -0,0 +1,39 @@ +[package] +name = "subtrie" +version = "0.0.1" +authors = ["Parity Technologies "] +description = "Merkle-Patricia Trie generic over key hasher and node encoding" +repository = "https://github.com/paritytech/trie" +license = "Apache-2.0" +edition = "2018" + +[dependencies] +hash256-std-hasher = { path = "../hash256-std-hasher", version = "0.15.2", optional = true } +log = "0.4" +smallvec = { version = "1.0.0", features = ["union", "const_new"] } +rustc-hex = { version = "2.1.0", default-features = false, optional = true } +tiny-keccak = { version = "2.0.2", features = ["keccak"], optional = true } +parity-scale-codec = { version = "3.0.0", optional = true } +criterion = { version = "0.5.1", optional = true } + +[dev-dependencies] +criterion = "0.5.1" +hash256-std-hasher = { path = "../hash256-std-hasher", version = "0.15.2" } +tiny-keccak = { version = "2.0.2", features = ["keccak"] } + +[features] +default = ["std"] +bench = [ + "std", + "criterion", + "test_utils", +] +test_utils = [ + "std", + "hash256-std-hasher", + "tiny-keccak", + "parity-scale-codec", +] +std = [ + "rustc-hex", +] diff --git a/trie-db/fuzz/Cargo.toml b/subtrie/fuzz/Cargo.toml similarity index 89% rename from trie-db/fuzz/Cargo.toml rename to subtrie/fuzz/Cargo.toml index 84a03257..24e4a837 100644 --- a/trie-db/fuzz/Cargo.toml +++ b/subtrie/fuzz/Cargo.toml @@ -9,13 +9,11 @@ edition = "2018" cargo-fuzz = true [dependencies] -hash-db = { path = "../../hash-db", version = "0.16.0" } -memory-db = { path = "../../memory-db", version = "0.32.0" } reference-trie = { path = "../../test-support/reference-trie", version = "0.29.0" } arbitrary = { version = "1.3.0", features = ["derive"] } array-bytes = "6.0.0" -[dependencies.trie-db] +[dependencies.subtrie] path = ".." [dependencies.libfuzzer-sys] diff --git a/trie-db/fuzz/fuzz_targets/no_ext_insert.rs b/subtrie/fuzz/fuzz_targets/no_ext_insert.rs similarity index 100% rename from trie-db/fuzz/fuzz_targets/no_ext_insert.rs rename to subtrie/fuzz/fuzz_targets/no_ext_insert.rs diff --git a/trie-db/fuzz/fuzz_targets/no_ext_insert_rem.rs b/subtrie/fuzz/fuzz_targets/no_ext_insert_rem.rs similarity index 100% rename from trie-db/fuzz/fuzz_targets/no_ext_insert_rem.rs rename to subtrie/fuzz/fuzz_targets/no_ext_insert_rem.rs diff --git a/trie-db/fuzz/fuzz_targets/prefix_iter.rs b/subtrie/fuzz/fuzz_targets/prefix_iter.rs similarity index 100% rename from trie-db/fuzz/fuzz_targets/prefix_iter.rs rename to subtrie/fuzz/fuzz_targets/prefix_iter.rs diff --git a/trie-db/fuzz/fuzz_targets/prefix_seek_iter.rs b/subtrie/fuzz/fuzz_targets/prefix_seek_iter.rs similarity index 100% rename from trie-db/fuzz/fuzz_targets/prefix_seek_iter.rs rename to subtrie/fuzz/fuzz_targets/prefix_seek_iter.rs diff --git a/trie-db/fuzz/fuzz_targets/seek_iter.rs b/subtrie/fuzz/fuzz_targets/seek_iter.rs similarity index 100% rename from trie-db/fuzz/fuzz_targets/seek_iter.rs rename to subtrie/fuzz/fuzz_targets/seek_iter.rs diff --git a/trie-db/fuzz/fuzz_targets/trie_codec_proof.rs b/subtrie/fuzz/fuzz_targets/trie_codec_proof.rs similarity index 100% rename from trie-db/fuzz/fuzz_targets/trie_codec_proof.rs rename to subtrie/fuzz/fuzz_targets/trie_codec_proof.rs diff --git a/trie-db/fuzz/fuzz_targets/trie_proof_invalid.rs b/subtrie/fuzz/fuzz_targets/trie_proof_invalid.rs similarity index 100% rename from trie-db/fuzz/fuzz_targets/trie_proof_invalid.rs rename to subtrie/fuzz/fuzz_targets/trie_proof_invalid.rs diff --git a/trie-db/fuzz/fuzz_targets/trie_proof_valid.rs b/subtrie/fuzz/fuzz_targets/trie_proof_valid.rs similarity index 100% rename from trie-db/fuzz/fuzz_targets/trie_proof_valid.rs rename to subtrie/fuzz/fuzz_targets/trie_proof_valid.rs diff --git a/trie-db/fuzz/fuzz_targets/trie_root.rs b/subtrie/fuzz/fuzz_targets/trie_root.rs similarity index 100% rename from trie-db/fuzz/fuzz_targets/trie_root.rs rename to subtrie/fuzz/fuzz_targets/trie_root.rs diff --git a/trie-db/fuzz/fuzz_targets/trie_root_fix_len.rs b/subtrie/fuzz/fuzz_targets/trie_root_fix_len.rs similarity index 100% rename from trie-db/fuzz/fuzz_targets/trie_root_fix_len.rs rename to subtrie/fuzz/fuzz_targets/trie_root_fix_len.rs diff --git a/trie-db/fuzz/fuzz_targets/trie_root_new.rs b/subtrie/fuzz/fuzz_targets/trie_root_new.rs similarity index 100% rename from trie-db/fuzz/fuzz_targets/trie_root_new.rs rename to subtrie/fuzz/fuzz_targets/trie_root_new.rs diff --git a/trie-db/fuzz/src/lib.rs b/subtrie/fuzz/src/lib.rs similarity index 83% rename from trie-db/fuzz/src/lib.rs rename to subtrie/fuzz/src/lib.rs index f9de8c07..537a75c7 100644 --- a/trie-db/fuzz/src/lib.rs +++ b/subtrie/fuzz/src/lib.rs @@ -13,15 +13,15 @@ // limitations under the License. use arbitrary::Arbitrary; -use hash_db::Hasher; -use memory_db::{HashKey, MemoryDB, PrefixedKey}; use reference_trie::{ calc_root, compare_insert_remove, reference_trie_root_iter_build as reference_trie_root, }; -use std::convert::TryInto; -use trie_db::{ +use std::{convert::TryInto, fmt::Debug}; +use subtrie::{ + memory_db::{HashKey, MemoryDB, PrefixedKey}, + node_db::Hasher, proof::{generate_proof, verify_proof}, - DBValue, Trie, TrieDBBuilder, TrieDBIterator, TrieDBMutBuilder, TrieLayout, TrieMut, + DBValue, Trie, TrieDBBuilder, TrieDBIterator, TrieDBMutBuilder, TrieLayout, }; fn fuzz_to_data(input: &[u8]) -> Vec<(Vec, Vec)> { @@ -91,24 +91,22 @@ fn fuzz_removal(data: Vec<(Vec, Vec)>) -> Vec<(bool, Vec, Vec)> pub fn fuzz_that_reference_trie_root(input: &[u8]) { let data = data_sorted_unique(fuzz_to_data(input)); - let mut memdb = MemoryDB::<_, HashKey<_>, _>::default(); - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); + let memdb = MemoryDB::<_, HashKey<_>, _>::default(); + let mut t = TrieDBMutBuilder::::new(&memdb).build(); for a in 0..data.len() { t.insert(&data[a].0[..], &data[a].1[..]).unwrap(); } - assert_eq!(*t.root(), reference_trie_root::(data)); + assert_eq!(t.commit().root_hash(), reference_trie_root::(data)); } pub fn fuzz_that_reference_trie_root_fix_length(input: &[u8]) { let data = data_sorted_unique(fuzz_to_data_fix_length(input)); - let mut memdb = MemoryDB::<_, HashKey<_>, _>::default(); - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); + let memdb = MemoryDB::<_, HashKey<_>, _>::default(); + let mut t = TrieDBMutBuilder::::new(&memdb).build(); for a in 0..data.len() { t.insert(&data[a].0[..], &data[a].1[..]).unwrap(); } - assert_eq!(*t.root(), reference_trie_root::(data)); + assert_eq!(t.commit().root_hash(), reference_trie_root::(data)); } fn fuzz_to_data_fix_length(input: &[u8]) -> Vec<(Vec, Vec)> { @@ -132,20 +130,18 @@ fn data_sorted_unique(input: Vec<(Vec, Vec)>) -> Vec<(Vec, Vec)> m.into_iter().collect() } -pub fn fuzz_that_compare_implementations(input: &[u8]) { +pub fn fuzz_that_compare_implementations(input: &[u8]) + where T::Location: Debug, +{ let data = data_sorted_unique(fuzz_to_data(input)); - //println!("data:{:?}", &data); - let memdb = MemoryDB::<_, PrefixedKey<_>, _>::default(); - let hashdb = MemoryDB::, DBValue>::default(); - reference_trie::compare_implementations::(data, memdb, hashdb); + reference_trie::compare_implementations::>(data); } pub fn fuzz_that_no_extension_insert(input: &[u8]) { let data = fuzz_to_data(input); //println!("data{:?}", data); - let mut memdb = MemoryDB::<_, HashKey<_>, _>::default(); - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); + let memdb = MemoryDB::<_, HashKey<_>, _>::default(); + let mut t = TrieDBMutBuilder::::new(&memdb).build(); for a in 0..data.len() { t.insert(&data[a].0[..], &data[a].1[..]).unwrap(); } @@ -153,28 +149,25 @@ pub fn fuzz_that_no_extension_insert(input: &[u8]) { // before. let data = data_sorted_unique(fuzz_to_data(input)); //println!("data{:?}", data); - assert_eq!(*t.root(), calc_root::(data)); + assert_eq!(t.commit().root_hash(), calc_root::(data)); } pub fn fuzz_that_no_extension_insert_remove(input: &[u8]) { let data = fuzz_to_data(input); let data = fuzz_removal(data); - let memdb = MemoryDB::<_, PrefixedKey<_>, _>::default(); - compare_insert_remove::(data, memdb); + compare_insert_remove::>(data); } pub fn fuzz_seek_iter(input: &[u8]) { let data = data_sorted_unique(fuzz_to_data_fix_length(input)); let mut memdb = MemoryDB::<_, HashKey<_>, _>::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - for a in 0..data.len() { - t.insert(&data[a].0[..], &data[a].1[..]).unwrap(); - } + let mut t = TrieDBMutBuilder::::new(&memdb).build(); + for a in 0..data.len() { + t.insert(&data[a].0[..], &data[a].1[..]).unwrap(); } + let root = t.commit().apply_to(&mut memdb); // fuzzing around a fix prefix of 6 nibble. let prefix = &b"012"[..]; @@ -217,13 +210,11 @@ pub fn fuzz_prefix_iter(input: &[u8]) { let data = data_sorted_unique(fuzz_to_data_fix_length(input)); let mut memdb = MemoryDB::<_, HashKey<_>, _>::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - for a in 0..data.len() { - t.insert(&data[a].0[..], &data[a].1[..]).unwrap(); - } + let mut t = TrieDBMutBuilder::::new(&memdb).build(); + for a in 0..data.len() { + t.insert(&data[a].0[..], &data[a].1[..]).unwrap(); } + let root = t.commit().apply_to(&mut memdb); // fuzzing around a fix prefix of 6 nibble. let prefix = &b"012"[..]; @@ -283,17 +274,15 @@ pub fn fuzz_prefix_seek_iter(mut input: PrefixSeekTestInput) { input.keys.dedup(); let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - for (index, key) in input.keys.iter().enumerate() { - t.insert(&key, &[index as u8]).unwrap(); - } + let mut t = TrieDBMutBuilder::::new(&memdb).build(); + for (index, key) in input.keys.iter().enumerate() { + t.insert(&key, &[index as u8]).unwrap(); } + let root = t.commit().apply_to(&mut memdb); let trie = TrieDBBuilder::::new(&memdb, &root).build(); let iter = - trie_db::TrieDBIterator::new_prefixed_then_seek(&trie, &input.prefix_key, &input.seek_key) + subtrie::TrieDBIterator::new_prefixed_then_seek(&trie, &input.prefix_key, &input.seek_key) .unwrap(); let output_keys: Vec<_> = iter.map(|item| item.unwrap().0).collect(); @@ -393,13 +382,11 @@ fn test_generate_proof( // Populate DB with full trie from entries. let (db, root) = { let mut db = , _>>::default(); - let mut root = Default::default(); - { - let mut trie = TrieDBMutBuilder::::new(&mut db, &mut root).build(); - for (key, value) in entries { - trie.insert(&key, &value).unwrap(); - } + let mut trie = TrieDBMutBuilder::::new(&mut db).build(); + for (key, value) in entries { + trie.insert(&key, &value).unwrap(); } + let root = trie.commit().apply_to(&mut db); (db, root) }; @@ -418,19 +405,16 @@ fn test_generate_proof( } fn test_trie_codec_proof(entries: Vec<(Vec, Vec)>, keys: Vec>) { - use hash_db::{HashDB, EMPTY_PREFIX}; - use trie_db::{decode_compact, encode_compact, Recorder}; + use subtrie::{node_db::EMPTY_PREFIX, decode_compact, encode_compact, Recorder}; // Populate DB with full trie from entries. let (db, root) = { let mut db = , _>>::default(); - let mut root = Default::default(); - { - let mut trie = TrieDBMutBuilder::::new(&mut db, &mut root).build(); - for (key, value) in entries { - trie.insert(&key, &value).unwrap(); - } + let mut trie = TrieDBMutBuilder::::new(&db).build(); + for (key, value) in entries { + trie.insert(&key, &value).unwrap(); } + let root = trie.commit().apply_to(&mut db); (db, root) }; let expected_root = root; @@ -461,7 +445,7 @@ fn test_trie_codec_proof(entries: Vec<(Vec, Vec)>, keys: let expected_used = compact_trie.len(); // Reconstruct the partial DB from the compact encoding. let mut db = , _>>::default(); - let (root, used) = decode_compact::(&mut db, &compact_trie).unwrap(); + let (root, used) = decode_compact::(&mut db, &compact_trie).unwrap(); assert_eq!(root, expected_root); assert_eq!(used, expected_used); diff --git a/test-support/trie-bench/src/lib.rs b/subtrie/src/bench.rs similarity index 99% rename from test-support/trie-bench/src/lib.rs rename to subtrie/src/bench.rs index afa45849..870b945e 100644 --- a/test-support/trie-bench/src/lib.rs +++ b/subtrie/src/bench.rs @@ -14,15 +14,17 @@ //! Standard trie benchmarking tool. +use crate::{ + keccak_hasher::KeccakHasher, + memory_db::{HashKey, MemoryDB}, + node_db::Hasher, + test_utils::*, + trie_root::{trie_root, TrieStream}, + NodeCodec, Trie, TrieDBBuilder, TrieDBMutBuilder, TrieLayout, +}; use criterion::{black_box, BenchmarkId, Criterion}; -use hash_db::Hasher; -use keccak_hasher::KeccakHasher; -use memory_db::{HashKey, MemoryDB}; use parity_scale_codec::{Compact, Encode}; use std::default::Default; -use trie_db::{NodeCodec, Trie, TrieDBBuilder, TrieDBMutBuilder, TrieHash, TrieLayout, TrieMut}; -use trie_root::{trie_root, TrieStream}; -use trie_standardmap::*; struct TrieInsertionList(Vec<(Vec, Vec)>); impl ::std::fmt::Display for TrieInsertionList { @@ -60,8 +62,7 @@ fn benchmark( |b, d: &TrieInsertionList| { b.iter(&mut || { let mut memdb = MemoryDB::<_, HashKey, _>::new(L::Codec::empty_node()); - let mut root = >::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); for i in d.0.iter() { t.insert(&i.0, &i.1).unwrap(); } @@ -73,13 +74,15 @@ fn benchmark( bench_list, |b, d: &TrieInsertionList| { let mut memdb = MemoryDB::<_, HashKey<_>, _>::new(L::Codec::empty_node()); - let mut root = >::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); + let commit = { + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); for i in d.0.iter() { t.insert(&i.0, &i.1).unwrap(); } - } + t.commit() + }; + let root = commit.hash(); + commit.apply_to(&mut memdb); b.iter(&mut || { let t = TrieDBBuilder::::new(&memdb, &root).build(); for n in t.iter().unwrap() { diff --git a/trie-db/src/iter_build.rs b/subtrie/src/iter_build.rs similarity index 91% rename from trie-db/src/iter_build.rs rename to subtrie/src/iter_build.rs index c843c3f1..25463ddc 100644 --- a/trie-db/src/iter_build.rs +++ b/subtrie/src/iter_build.rs @@ -18,14 +18,15 @@ //! See `trie_visit` function. use crate::{ + memory_db::{KeyFunction, MemoryDB}, nibble::{nibble_ops, NibbleSlice}, node::Value, node_codec::NodeCodec, + node_db::{Hasher, Prefix, EMPTY_PREFIX}, rstd::{cmp::max, marker::PhantomData, vec::Vec}, triedbmut::ChildReference, DBValue, TrieHash, TrieLayout, }; -use hash_db::{HashDB, Hasher, Prefix}; macro_rules! exponential_out { (@3, [$($inpp:expr),*]) => { exponential_out!(@2, [$($inpp,)* $($inpp),*]) }; @@ -33,7 +34,7 @@ macro_rules! exponential_out { (@1, [$($inpp:expr),*]) => { [$($inpp,)* $($inpp),*] }; } -type CacheNode = Option>; +type CacheNode = Option>; #[inline(always)] fn new_vec_slice_buffer() -> [CacheNode; 16] { @@ -134,7 +135,7 @@ where value } else { hashed = callback.process_inner_hashed_value((k2.as_ref(), None), v2.as_ref()); - Value::Node(hashed.as_ref()) + Value::Node(hashed.as_ref(), ()) }; let encoded = T::Codec::leaf_node(nkey.right_iter(), nkey.len(), value); let hash = callback.process(pr.left(), encoded, false); @@ -184,7 +185,7 @@ where branch_d: usize, is_root: bool, nkey: Option<(usize, usize)>, - ) -> ChildReference> { + ) -> ChildReference, ()> { let last = self.0.len() - 1; assert_eq!(self.0[last].2, branch_d); @@ -201,7 +202,7 @@ where let mut prefix = NibbleSlice::new_offset(&key_branch, 0); prefix.advance(branch_d); hashed = callback.process_inner_hashed_value(prefix.left(), v.as_ref()); - Value::Node(hashed.as_ref()) + Value::Node(hashed.as_ref(), ()) }) } else { None @@ -229,7 +230,7 @@ where branch_d: usize, is_root: bool, nkey: Option<(usize, usize)>, - ) -> ChildReference> { + ) -> ChildReference, ()> { let (children, v, depth) = self.0.pop().expect("checked"); debug_assert!(branch_d == depth); @@ -244,7 +245,7 @@ where let mut prefix = NibbleSlice::new_offset(&key_branch, 0); prefix.advance(branch_d); hashed = callback.process_inner_hashed_value(prefix.left(), v.as_ref()); - Value::Node(hashed.as_ref()) + Value::Node(hashed.as_ref(), ()) }) } else { None @@ -318,7 +319,7 @@ where value } else { hashed = callback.process_inner_hashed_value((k2.as_ref(), None), v2.as_ref()); - Value::Node(hashed.as_ref()) + Value::Node(hashed.as_ref(), ()) }; let encoded = T::Codec::leaf_node(nkey.right_iter(), nkey.len(), value); @@ -330,7 +331,7 @@ where } } else { // nothing null root corner case - callback.process(hash_db::EMPTY_PREFIX, T::Codec::empty_node().to_vec(), true); + callback.process(EMPTY_PREFIX, T::Codec::empty_node().to_vec(), true); } } @@ -342,43 +343,43 @@ pub trait ProcessEncodedNode { /// Note that the returned value can change depending on implementation, /// but usually it should be the Hash of encoded node. /// This is not something direcly related to encoding but is here for - /// optimisation purpose (builder hash_db does return this value). + /// optimisation purpose (builder node_db does return this value). fn process( &mut self, prefix: Prefix, encoded_node: Vec, is_root: bool, - ) -> ChildReference; + ) -> ChildReference; /// Callback for hashed value in encoded node. fn process_inner_hashed_value(&mut self, prefix: Prefix, value: &[u8]) -> HO; } -/// Get trie root and insert visited node in a hash_db. +/// Get trie root and insert visited node in a node_db. /// As for all `ProcessEncodedNode` implementation, it /// is only for full trie parsing (not existing trie). -pub struct TrieBuilder<'a, T: TrieLayout, DB> { - db: &'a mut DB, +pub struct TrieBuilder<'a, T: TrieLayout, K: KeyFunction + Send + Sync> { + db: &'a mut MemoryDB, pub root: Option>, } -impl<'a, T: TrieLayout, DB> TrieBuilder<'a, T, DB> { - pub fn new(db: &'a mut DB) -> Self { +impl<'a, T: TrieLayout, K: KeyFunction + Send + Sync> TrieBuilder<'a, T, K> { + pub fn new(db: &'a mut MemoryDB) -> Self { TrieBuilder { db, root: None } } } -impl<'a, T, DB> ProcessEncodedNode> for TrieBuilder<'a, T, DB> +impl<'a, T, K: KeyFunction + Send + Sync> ProcessEncodedNode> + for TrieBuilder<'a, T, K> where T: TrieLayout, - DB: HashDB, { fn process( &mut self, prefix: Prefix, encoded_node: Vec, is_root: bool, - ) -> ChildReference> { + ) -> ChildReference, ()> { let len = encoded_node.len(); if !is_root && len < ::LENGTH { let mut h = <::Out as Default>::default(); @@ -390,7 +391,7 @@ where if is_root { self.root = Some(hash); }; - ChildReference::Hash(hash) + ChildReference::Hash(hash, ()) } fn process_inner_hashed_value(&mut self, prefix: Prefix, value: &[u8]) -> TrieHash { @@ -416,7 +417,7 @@ impl ProcessEncodedNode> for TrieRoot { _: Prefix, encoded_node: Vec, is_root: bool, - ) -> ChildReference> { + ) -> ChildReference, ()> { let len = encoded_node.len(); if !is_root && len < ::LENGTH { let mut h = <::Out as Default>::default(); @@ -428,7 +429,7 @@ impl ProcessEncodedNode> for TrieRoot { if is_root { self.root = Some(hash); }; - ChildReference::Hash(hash) + ChildReference::Hash(hash, ()) } fn process_inner_hashed_value(&mut self, _prefix: Prefix, value: &[u8]) -> TrieHash { @@ -472,7 +473,7 @@ impl ProcessEncodedNode> for TrieRootPrint { p: Prefix, encoded_node: Vec, is_root: bool, - ) -> ChildReference> { + ) -> ChildReference, ()> { println!("Encoded node: {:x?}", &encoded_node); println!(" with prefix: {:x?}", &p); let len = encoded_node.len(); @@ -488,7 +489,7 @@ impl ProcessEncodedNode> for TrieRootPrint { self.root = Some(hash); }; println!(" hashed to {:x?}", hash.as_ref()); - ChildReference::Hash(hash) + ChildReference::Hash(hash, ()) } fn process_inner_hashed_value(&mut self, _prefix: Prefix, value: &[u8]) -> TrieHash { @@ -503,7 +504,7 @@ impl ProcessEncodedNode> for TrieRootUnhashed { _: Prefix, encoded_node: Vec, is_root: bool, - ) -> ChildReference<::Out> { + ) -> ChildReference<::Out, ()> { let len = encoded_node.len(); if !is_root && len < ::LENGTH { let mut h = <::Out as Default>::default(); @@ -516,7 +517,7 @@ impl ProcessEncodedNode> for TrieRootUnhashed { if is_root { self.root = Some(encoded_node); }; - ChildReference::Hash(hash) + ChildReference::Hash(hash, ()) } fn process_inner_hashed_value(&mut self, _prefix: Prefix, value: &[u8]) -> TrieHash { diff --git a/trie-db/src/iterator.rs b/subtrie/src/iterator.rs similarity index 51% rename from trie-db/src/iterator.rs rename to subtrie/src/iterator.rs index ca382b70..4a18b322 100644 --- a/trie-db/src/iterator.rs +++ b/subtrie/src/iterator.rs @@ -16,10 +16,10 @@ use super::{CError, DBValue, Result, Trie, TrieHash, TrieIterator, TrieLayout}; use crate::{ nibble::{nibble_ops, NibbleSlice, NibbleVec}, node::{Node, NodeHandle, NodePlan, OwnedNode, Value}, + node_db::{self, Prefix, EMPTY_PREFIX}, triedb::TrieDB, - TrieError, TrieItem, TrieKeyItem, + TrieDoubleEndedIterator, TrieError, TrieItem, TrieKeyItem, }; -use hash_db::{Hasher, Prefix, EMPTY_PREFIX}; use crate::rstd::{boxed::Box, sync::Arc, vec::Vec}; @@ -30,29 +30,40 @@ enum Status { At, AtChild(usize), Exiting, + AftExiting, } #[cfg_attr(feature = "std", derive(Debug))] #[derive(Eq, PartialEq)] -struct Crumb { - hash: Option, - node: Arc>, +struct Crumb { + hash: Option>, + node: Arc>, status: Status, } -impl Crumb { - /// Move on to next status in the node's sequence. - fn increment(&mut self) { +impl Crumb { + /// Move on to the next status in the node's sequence in a direction. + fn step(&mut self, fwd: bool) { self.status = match (self.status, self.node.node_plan()) { (Status::Entering, NodePlan::Extension { .. }) => Status::At, (Status::Entering, NodePlan::Branch { .. }) | (Status::Entering, NodePlan::NibbledBranch { .. }) => Status::At, (Status::At, NodePlan::Branch { .. }) | - (Status::At, NodePlan::NibbledBranch { .. }) => Status::AtChild(0), + (Status::At, NodePlan::NibbledBranch { .. }) => + if fwd { + Status::AtChild(0) + } else { + Status::AtChild(nibble_ops::NIBBLE_LENGTH - 1) + }, (Status::AtChild(x), NodePlan::Branch { .. }) | (Status::AtChild(x), NodePlan::NibbledBranch { .. }) - if x < (nibble_ops::NIBBLE_LENGTH - 1) => + if fwd && x < (nibble_ops::NIBBLE_LENGTH - 1) => Status::AtChild(x + 1), + (Status::AtChild(x), NodePlan::Branch { .. }) | + (Status::AtChild(x), NodePlan::NibbledBranch { .. }) + if !fwd && x > 0 => + Status::AtChild(x - 1), + (Status::Exiting, _) => Status::AftExiting, _ => Status::Exiting, } } @@ -60,7 +71,9 @@ impl Crumb { /// Iterator for going through all nodes in the trie in pre-order traversal order. pub struct TrieDBRawIterator { - trail: Vec>, + /// Forward trail of nodes to visit. + trail: Vec>, + /// Forward iteration key nibbles of the current node. key_nibbles: NibbleVec, } @@ -76,10 +89,11 @@ impl TrieDBRawIterator { TrieDBRawIterator { trail: Vec::with_capacity(8), key_nibbles: NibbleVec::new() }; let (root_node, root_hash) = db.get_raw_or_lookup( *db.root(), - NodeHandle::Hash(db.root().as_ref()), + NodeHandle::Hash(db.root().as_ref(), db.root_location()), EMPTY_PREFIX, true, )?; + r.descend(root_node, root_hash); Ok(r) } @@ -87,7 +101,7 @@ impl TrieDBRawIterator { /// Create a new iterator, but limited to a given prefix. pub fn new_prefixed(db: &TrieDB, prefix: &[u8]) -> Result, CError> { let mut iter = TrieDBRawIterator::new(db)?; - iter.prefix(db, prefix)?; + iter.prefix(db, prefix, true)?; Ok(iter) } @@ -105,8 +119,8 @@ impl TrieDBRawIterator { Ok(iter) } - /// Descend into a payload. - fn descend(&mut self, node: OwnedNode, node_hash: Option>) { + /// Descend into a node. + fn descend(&mut self, node: OwnedNode, node_hash: Option>) { self.trail .push(Crumb { hash: node_hash, status: Status::Entering, node: Arc::new(node) }); } @@ -116,10 +130,11 @@ impl TrieDBRawIterator { db: &TrieDB, key: &[u8], prefix: Prefix, + location: L::Location, ) -> Result, CError> { let mut res = TrieHash::::default(); res.as_mut().copy_from_slice(key); - db.fetch_value(res, prefix) + db.fetch_value(res, prefix, location) } /// Seek a node position at 'key' for iterator. @@ -132,6 +147,7 @@ impl TrieDBRawIterator { &mut self, db: &TrieDB, key: &[u8], + fwd: bool, ) -> Result, CError> { self.trail.clear(); self.key_nibbles.clear(); @@ -139,7 +155,7 @@ impl TrieDBRawIterator { let (mut node, mut node_hash) = db.get_raw_or_lookup( >::default(), - NodeHandle::Hash(db.root().as_ref()), + NodeHandle::Hash(db.root().as_ref(), Default::default()), EMPTY_PREFIX, true, )?; @@ -149,29 +165,30 @@ impl TrieDBRawIterator { let (next_node, next_node_hash) = { self.descend(node, node_hash); let crumb = self.trail.last_mut().expect( - "descend_into_node pushes a crumb onto the trial; \ + "descend pushes a crumb onto the trail; \ thus the trail is non-empty; qed", ); let node_data = crumb.node.data(); + let locations = crumb.node.locations(); match crumb.node.node_plan() { NodePlan::Leaf { partial: partial_plan, .. } => { let slice = partial_plan.build(node_data); - if slice < partial { + if (fwd && slice < partial) || (!fwd && slice > partial) { crumb.status = Status::Exiting; - return Ok(false) + return Ok(false); } - return Ok(slice.starts_with(&partial)) + return Ok(slice.starts_with(&partial)); }, NodePlan::Extension { partial: partial_plan, child } => { let slice = partial_plan.build(node_data); if !partial.starts_with(&slice) { - if slice < partial { + if (fwd && slice < partial) || (!fwd && slice > partial) { crumb.status = Status::Exiting; self.key_nibbles.append_partial(slice.right()); - return Ok(false) + return Ok(false); } - return Ok(slice.starts_with(&partial)) + return Ok(slice.starts_with(&partial)); } full_key_nibbles += slice.len(); @@ -182,52 +199,60 @@ impl TrieDBRawIterator { let prefix = key.back(full_key_nibbles); db.get_raw_or_lookup( node_hash.unwrap_or_default(), - child.build(node_data), + child.build(node_data, locations.first().copied().unwrap_or_default()), prefix.left(), true, )? }, - NodePlan::Branch { value: _, children } => { + NodePlan::Branch { value, children } => { if partial.is_empty() { - return Ok(true) + return Ok(true); } let i = partial.at(0); crumb.status = Status::AtChild(i as usize); self.key_nibbles.push(i); - if let Some(child) = &children[i as usize] { + if children[i as usize].is_some() { + let child = NodePlan::build_child( + value.as_ref(), + children, + i as usize, + node_data, + locations, + ); + full_key_nibbles += 1; partial = partial.mid(1); let prefix = key.back(full_key_nibbles); db.get_raw_or_lookup( node_hash.unwrap_or_default(), - child.build(node_data), + child.unwrap(), prefix.left(), true, )? } else { - return Ok(false) + return Ok(false); } }, - NodePlan::NibbledBranch { partial: partial_plan, value: _, children } => { + NodePlan::NibbledBranch { partial: partial_plan, value, children } => { let slice = partial_plan.build(node_data); if !partial.starts_with(&slice) { - if slice < partial { + if (fwd && slice < partial) || (!fwd && slice > partial) { crumb.status = Status::Exiting; self.key_nibbles.append_partial(slice.right()); self.key_nibbles.push((nibble_ops::NIBBLE_LENGTH - 1) as u8); - return Ok(false) + return Ok(false); } - return Ok(slice.starts_with(&partial)) + return Ok(slice.starts_with(&partial)); } full_key_nibbles += slice.len(); partial = partial.mid(slice.len()); if partial.is_empty() { - return Ok(true) + return Ok(true); } let i = partial.at(0); @@ -235,27 +260,35 @@ impl TrieDBRawIterator { self.key_nibbles.append_partial(slice.right()); self.key_nibbles.push(i); - if let Some(child) = &children[i as usize] { + if children[i as usize].is_some() { + let child = NodePlan::build_child( + value.as_ref(), + children, + i as usize, + node_data, + locations, + ); + full_key_nibbles += 1; partial = partial.mid(1); let prefix = key.back(full_key_nibbles); db.get_raw_or_lookup( node_hash.unwrap_or_default(), - child.build(node_data), + child.unwrap(), prefix.left(), true, )? } else { - return Ok(false) + return Ok(false); } }, NodePlan::Empty => { if !partial.is_empty() { crumb.status = Status::Exiting; - return Ok(false) + return Ok(false); } - return Ok(true) + return Ok(true); }, } }; @@ -267,8 +300,13 @@ impl TrieDBRawIterator { /// Advance the iterator into a prefix, no value out of the prefix will be accessed /// or returned after this operation. - fn prefix(&mut self, db: &TrieDB, prefix: &[u8]) -> Result<(), TrieHash, CError> { - if self.seek(db, prefix)? { + fn prefix( + &mut self, + db: &TrieDB, + prefix: &[u8], + fwd: bool, + ) -> Result<(), TrieHash, CError> { + if self.seek(db, prefix, fwd)? { if let Some(v) = self.trail.pop() { self.trail.clear(); self.trail.push(v); @@ -290,31 +328,31 @@ impl TrieDBRawIterator { ) -> Result<(), TrieHash, CError> { if prefix.is_empty() { // There's no prefix, so just seek. - return self.seek(db, seek).map(|_| ()) + return self.seek(db, seek, true).map(|_| ()); } if seek.is_empty() || seek <= prefix { // Either we're not supposed to seek anywhere, // or we're supposed to seek *before* the prefix, // so just directly go to the prefix. - return self.prefix(db, prefix) + return self.prefix(db, prefix, true); } if !seek.starts_with(prefix) { // We're supposed to seek *after* the prefix, // so just return an empty iterator. self.trail.clear(); - return Ok(()) + return Ok(()); } - if !self.seek(db, prefix)? { + if !self.seek(db, prefix, true)? { // The database doesn't have a key with such a prefix. self.trail.clear(); - return Ok(()) + return Ok(()); } // Now seek forward again. - self.seek(db, seek)?; + self.seek(db, seek, true)?; let prefix_len = prefix.len() * crate::nibble::nibble_ops::NIBBLE_PER_BYTE; let mut len = 0; @@ -338,7 +376,7 @@ impl TrieDBRawIterator { } if len > prefix_len { self.trail = self.trail.split_off(i); - return Ok(()) + return Ok(()); } } @@ -349,12 +387,15 @@ impl TrieDBRawIterator { /// Fetches the next raw item. // /// Must be called with the same `db` as when the iterator was created. + /// + /// Specify `fwd` to indicate the direction of the iteration (`true` for forward). pub(crate) fn next_raw_item( &mut self, db: &TrieDB, + fwd: bool, ) -> Option< Result< - (&NibbleVec, Option<&TrieHash>, &Arc>), + (&NibbleVec, Option<&TrieHash>, &Arc>), TrieHash, CError, >, @@ -362,13 +403,20 @@ impl TrieDBRawIterator { loop { let crumb = self.trail.last_mut()?; let node_data = crumb.node.data(); + let locations = crumb.node.locations(); match (crumb.status, crumb.node.node_plan()) { - (Status::Entering, _) => { - // This is only necessary due to current borrow checker's limitation. - let crumb = self.trail.last_mut().expect("we've just fetched the last element using `last_mut` so this cannot fail; qed"); - crumb.increment(); - return Some(Ok((&self.key_nibbles, crumb.hash.as_ref(), &crumb.node))) + (Status::Entering, _) => + if fwd { + let crumb = self.trail.last_mut().expect("we've just fetched the last element using `last_mut` so this cannot fail; qed"); + crumb.step(fwd); + return Some(Ok((&self.key_nibbles, crumb.hash.as_ref(), &crumb.node))); + } else { + crumb.step(fwd); + }, + (Status::AftExiting, _) => { + self.trail.pop().expect("we've just fetched the last element using `last_mut` so this cannot fail; qed"); + self.trail.last_mut()?.step(fwd); }, (Status::Exiting, node) => { match node { @@ -383,8 +431,11 @@ impl TrieDBRawIterator { self.key_nibbles.drop_lasts(partial.len() + 1); }, } - self.trail.pop().expect("we've just fetched the last element using `last_mut` so this cannot fail; qed"); - self.trail.last_mut()?.increment(); + self.trail.last_mut()?.step(fwd); + if !fwd { + let crumb = self.trail.last_mut().expect("we've just fetched the last element using `last_mut` so this cannot fail; qed"); + return Some(Ok((&self.key_nibbles, crumb.hash.as_ref(), &crumb.node))); + } }, (Status::At, NodePlan::Extension { partial: partial_plan, child }) => { let partial = partial_plan.build(node_data); @@ -392,7 +443,7 @@ impl TrieDBRawIterator { match db.get_raw_or_lookup( crumb.hash.unwrap_or_default(), - child.build(node_data), + child.build(node_data, locations.first().copied().unwrap_or_default()), self.key_nibbles.as_prefix(), true, ) { @@ -400,30 +451,46 @@ impl TrieDBRawIterator { self.descend(node, node_hash); }, Err(err) => { - crumb.increment(); - return Some(Err(err)) + crumb.step(fwd); + return Some(Err(err)); }, } }, (Status::At, NodePlan::Branch { .. }) => { - self.key_nibbles.push(0); - crumb.increment(); + self.key_nibbles.push(if fwd { + 0 + } else { + (nibble_ops::NIBBLE_LENGTH - 1) as u8 + }); + crumb.step(fwd); }, (Status::At, NodePlan::NibbledBranch { partial: partial_plan, .. }) => { let partial = partial_plan.build(node_data); self.key_nibbles.append_partial(partial.right()); - self.key_nibbles.push(0); - crumb.increment(); + self.key_nibbles.push(if fwd { + 0 + } else { + (nibble_ops::NIBBLE_LENGTH - 1) as u8 + }); + crumb.step(fwd); }, - (Status::AtChild(i), NodePlan::Branch { children, .. }) | - (Status::AtChild(i), NodePlan::NibbledBranch { children, .. }) => { - if let Some(child) = &children[i] { + (Status::AtChild(i), NodePlan::Branch { value, children, .. }) | + (Status::AtChild(i), NodePlan::NibbledBranch { value, children, .. }) => { + if children[i].is_some() { + let child = NodePlan::build_child( + value.as_ref(), + children, + i, + node_data, + locations, + ); + self.key_nibbles.pop(); self.key_nibbles.push(i as u8); match db.get_raw_or_lookup( crumb.hash.unwrap_or_default(), - child.build(node_data), + child.unwrap(), self.key_nibbles.as_prefix(), true, ) { @@ -431,16 +498,16 @@ impl TrieDBRawIterator { self.descend(node, node_hash); }, Err(err) => { - crumb.increment(); - return Some(Err(err)) + crumb.step(fwd); + return Some(Err(err)); }, } } else { - crumb.increment(); + crumb.step(fwd); } }, _ => panic!( - "Crumb::increment and TrieDBNodeIterator are implemented so that \ + "Crumb::step and TrieDBNodeIterator are implemented so that \ the above arms are the only possible states" ), } @@ -451,92 +518,168 @@ impl TrieDBRawIterator { /// /// Must be called with the same `db` as when the iterator was created. pub fn next_item(&mut self, db: &TrieDB) -> Option, CError>> { - while let Some(raw_item) = self.next_raw_item(db) { + while let Some(raw_item) = self.next_raw_item(db, true) { let (prefix, _, node) = match raw_item { Ok(raw_item) => raw_item, Err(err) => return Some(Err(err)), }; - let mut prefix = prefix.clone(); - let value = match node.node() { - Node::Leaf(partial, value) => { - prefix.append_partial(partial.right()); - value - }, - Node::Branch(_, value) => match value { - Some(value) => value, - None => continue, - }, - Node::NibbledBranch(partial, _, value) => { - prefix.append_partial(partial.right()); - match value { - Some(value) => value, - None => continue, - } - }, - _ => continue, + match Self::value_from_raw(prefix, node, db) { + Some(r) => return Some(r), + None => continue, + } + } + None + } + + /// Fetches the previous trie item. + /// + /// Must be called with the same `db` as when the iterator was created. + pub fn prev_item(&mut self, db: &TrieDB) -> Option, CError>> { + while let Some(raw_item) = self.next_raw_item(db, false) { + let (prefix, _, node) = match raw_item { + Ok(raw_item) => raw_item, + Err(err) => return Some(Err(err)), }; - let (key_slice, maybe_extra_nibble) = prefix.as_prefix(); - let key = key_slice.to_vec(); - if let Some(extra_nibble) = maybe_extra_nibble { - return Some(Err(Box::new(TrieError::ValueAtIncompleteKey(key, extra_nibble)))) + match Self::value_from_raw(prefix, node, db) { + Some(r) => return Some(r), + None => continue, } + } + None + } + + pub(crate) fn value_from_raw( + prefix: &NibbleVec, + node: &Arc>, + db: &TrieDB, + ) -> Option, CError>> { + let mut prefix = prefix.clone(); + let value = match node.node() { + Node::Leaf(partial, value) => { + prefix.append_partial(partial.right()); + value + }, + Node::Branch(_, value) => match value { + Some(value) => value, + None => return None, + }, + Node::NibbledBranch(partial, _, value) => { + prefix.append_partial(partial.right()); + match value { + Some(value) => value, + None => return None, + } + }, + _ => return None, + }; + + let (key_slice, maybe_extra_nibble) = prefix.as_prefix(); + let key = key_slice.to_vec(); + if let Some(extra_nibble) = maybe_extra_nibble { + return Some(Err(Box::new(TrieError::ValueAtIncompleteKey(key, extra_nibble)))) + } - let value = match value { - Value::Node(hash) => match Self::fetch_value(db, &hash, (key_slice, None)) { + let value = match value { + Value::Node(hash, location) => + match Self::fetch_value(db, &hash, (key_slice, None), location) { Ok(value) => value, Err(err) => return Some(Err(err)), }, - Value::Inline(value) => value.to_vec(), - }; + Value::Inline(value) => value.to_vec(), + }; - return Some(Ok((key, value))) - } - None + return Some(Ok((key, value))) } /// Fetches the next key. /// /// Must be called with the same `db` as when the iterator was created. pub fn next_key(&mut self, db: &TrieDB) -> Option, CError>> { - while let Some(raw_item) = self.next_raw_item(db) { - let (prefix, _, node) = match raw_item { - Ok(raw_item) => raw_item, - Err(err) => return Some(Err(err)), + while let Some(raw_item) = self.next_raw_item(db, true) { + let (key, maybe_extra_nibble, _) = match Self::extract_key_from_raw_item(raw_item) { + Some(Ok(k)) => k, + Some(Err(err)) => return Some(Err(err)), + None => continue, }; - let mut prefix = prefix.clone(); - match node.node() { - Node::Leaf(partial, _) => { - prefix.append_partial(partial.right()); - }, - Node::Branch(_, value) => - if value.is_none() { - continue - }, - Node::NibbledBranch(partial, _, value) => { - prefix.append_partial(partial.right()); - if value.is_none() { - continue - } - }, - _ => continue, + if let Some(extra_nibble) = maybe_extra_nibble { + return Some(Err(Box::new(TrieError::ValueAtIncompleteKey(key, extra_nibble)))); + } + + return Some(Ok(key)); + } + None + } + + /// Fetches the previous key. + /// + /// Must be called with the same `db` as when the iterator was created. + pub fn prev_key(&mut self, db: &TrieDB) -> Option, CError>> { + while let Some(raw_item) = self.next_raw_item(db, false) { + let (key, maybe_extra_nibble, _) = match Self::extract_key_from_raw_item(raw_item) { + Some(Ok(k)) => k, + Some(Err(err)) => return Some(Err(err)), + None => continue, }; - let (key_slice, maybe_extra_nibble) = prefix.as_prefix(); - let key = key_slice.to_vec(); if let Some(extra_nibble) = maybe_extra_nibble { - return Some(Err(Box::new(TrieError::ValueAtIncompleteKey(key, extra_nibble)))) + return Some(Err(Box::new(TrieError::ValueAtIncompleteKey(key, extra_nibble)))); } - return Some(Ok(key)) + return Some(Ok(key)); } None } + + /// Extracts the key from the result of a raw item retrieval. + /// + /// Given a raw item, it extracts the key information, including the key bytes, an optional + /// extra nibble (prefix padding), and the node value. + fn extract_key_from_raw_item<'a>( + raw_item: Result< + (&NibbleVec, Option<&TrieHash>, &'a Arc>), + TrieHash, + CError, + >, + ) -> Option, Option, Value<'a, L::Location>), TrieHash, CError>> { + let (prefix, _, node) = match raw_item { + Ok(raw_item) => raw_item, + Err(err) => return Some(Err(err)), + }; + + let mut prefix = prefix.clone(); + let value = match node.node() { + Node::Leaf(partial, value) => { + prefix.append_partial(partial.right()); + value + }, + Node::Branch(_, value) => match value { + Some(value) => value, + None => return None, + }, + Node::NibbledBranch(partial, _, value) => { + prefix.append_partial(partial.right()); + match value { + Some(value) => value, + None => return None, + } + }, + _ => return None, + }; + + let (key_slice, maybe_extra_nibble) = prefix.as_prefix(); + + Some(Ok((key_slice.to_vec(), maybe_extra_nibble, value))) + } } /// Iterator for going through all nodes in the trie in pre-order traversal order. +/// +/// You can reduce the number of iterations and simultaneously iterate in both directions with two +/// cursors by using `TrieDBNodeDoubleEndedIterator`. You can convert this iterator into a double +/// ended iterator with `into_double_ended_iter`. pub struct TrieDBNodeIterator<'a, 'cache, L: TrieLayout> { db: &'a TrieDB<'a, 'cache, L>, raw_iter: TrieDBRawIterator, @@ -563,14 +706,15 @@ impl<'a, 'cache, L: TrieLayout> TrieDBNodeIterator<'a, 'cache, L> { &self, key: &[u8], prefix: Prefix, + location: L::Location, ) -> Result, CError> { - TrieDBRawIterator::fetch_value(self.db, key, prefix) + TrieDBRawIterator::fetch_value(self.db, key, prefix, location) } /// Advance the iterator into a prefix, no value out of the prefix will be accessed /// or returned after this operation. pub fn prefix(&mut self, prefix: &[u8]) -> Result<(), TrieHash, CError> { - self.raw_iter.prefix(self.db, prefix) + self.raw_iter.prefix(self.db, prefix, true) } /// Advance the iterator into a prefix, no value out of the prefix will be accessed @@ -584,23 +728,137 @@ impl<'a, 'cache, L: TrieLayout> TrieDBNodeIterator<'a, 'cache, L> { } /// Access inner hash db. - pub fn db(&self) -> &dyn hash_db::HashDBRef { + pub fn db(&self) -> &dyn node_db::NodeDB { self.db.db() } + + /// Access value of an item. + pub fn item_from_raw( + &self, + item: &(NibbleVec, Option>, Arc>), + ) -> Option, CError>> { + TrieDBRawIterator::value_from_raw(&item.0, &item.2, self.db) + } } impl<'a, 'cache, L: TrieLayout> TrieIterator for TrieDBNodeIterator<'a, 'cache, L> { fn seek(&mut self, key: &[u8]) -> Result<(), TrieHash, CError> { - self.raw_iter.seek(self.db, key).map(|_| ()) + self.raw_iter.seek(self.db, key, true).map(|_| ()) } } impl<'a, 'cache, L: TrieLayout> Iterator for TrieDBNodeIterator<'a, 'cache, L> { - type Item = - Result<(NibbleVec, Option>, Arc>), TrieHash, CError>; + type Item = Result< + (NibbleVec, Option>, Arc>), + TrieHash, + CError, + >; fn next(&mut self) -> Option { - self.raw_iter.next_raw_item(self.db).map(|result| { + self.raw_iter.next_raw_item(self.db, true).map(|result| { + result.map(|(nibble, hash, node)| (nibble.clone(), hash.cloned(), node.clone())) + }) + } +} + +/// Double ended iterator for going through all nodes in the trie in pre-order traversal order. +pub struct TrieDBNodeDoubleEndedIterator<'a, 'cache, L: TrieLayout> { + db: &'a TrieDB<'a, 'cache, L>, + raw_iter: TrieDBRawIterator, + back_raw_iter: TrieDBRawIterator, +} + +impl<'a, 'cache, L: TrieLayout> TrieDBNodeDoubleEndedIterator<'a, 'cache, L> { + /// Create a new double ended iterator. + pub fn new(db: &'a TrieDB<'a, 'cache, L>) -> Result, CError> { + Ok(Self { + db, + raw_iter: TrieDBRawIterator::new(db)?, + back_raw_iter: TrieDBRawIterator::new(db)?, + }) + } + + /// Restore an iterator from a raw iterators. + pub fn from_raw( + db: &'a TrieDB<'a, 'cache, L>, + raw_iter: TrieDBRawIterator, + back_raw_iter: TrieDBRawIterator, + ) -> Self { + Self { db, raw_iter, back_raw_iter } + } + + /// Convert the iterator to a raw forward iterator. + pub fn into_raw(self) -> TrieDBRawIterator { + self.raw_iter + } + + /// Convert the iterator to a raw backward iterator. + pub fn into_raw_back(self) -> TrieDBRawIterator { + self.back_raw_iter + } + + /// Fetch value by hash at a current node height + pub fn fetch_value( + &self, + key: &[u8], + prefix: Prefix, + location: L::Location, + ) -> Result, CError> { + TrieDBRawIterator::fetch_value(self.db, key, prefix, location) + } + + /// Advance the iterator into a prefix, no value out of the prefix will be accessed + /// or returned after this operation. + pub fn prefix(&mut self, prefix: &[u8]) -> Result<(), TrieHash, CError> { + self.raw_iter.prefix(self.db, prefix, true)?; + self.back_raw_iter.prefix(self.db, prefix, false) + } + + /// Advance the iterator into a prefix, no value out of the prefix will be accessed + /// or returned after this operation. + pub fn prefix_then_seek( + &mut self, + prefix: &[u8], + seek: &[u8], + ) -> Result<(), TrieHash, CError> { + self.raw_iter.prefix_then_seek(self.db, prefix, seek)?; + self.back_raw_iter.prefix_then_seek(self.db, prefix, seek) + } + + /// Access inner hash db. + pub fn db(&self) -> &dyn node_db::NodeDB { + self.db.db() + } +} + +impl TrieDoubleEndedIterator for TrieDBNodeDoubleEndedIterator<'_, '_, L> {} + +impl<'a, 'cache, L: TrieLayout> TrieIterator for TrieDBNodeDoubleEndedIterator<'a, 'cache, L> { + fn seek(&mut self, key: &[u8]) -> Result<(), TrieHash, CError> { + self.raw_iter.seek(self.db, key, true).map(|_| ())?; + self.back_raw_iter.seek(self.db, key, false).map(|_| ()) + } +} + +impl<'a, 'cache, L: TrieLayout> Iterator for TrieDBNodeDoubleEndedIterator<'a, 'cache, L> { + type Item = Result< + (NibbleVec, Option>, Arc>), + TrieHash, + CError, + >; + + fn next(&mut self) -> Option { + self.raw_iter.next_raw_item(self.db, true).map(|result| { + result.map(|(nibble, hash, node)| (nibble.clone(), hash.cloned(), node.clone())) + }) + } +} + +impl<'a, 'cache, L: TrieLayout> DoubleEndedIterator + for TrieDBNodeDoubleEndedIterator<'a, 'cache, L> +{ + fn next_back(&mut self) -> Option { + self.back_raw_iter.next_raw_item(self.db, false).map(|result| { result.map(|(nibble, hash, node)| (nibble.clone(), hash.cloned(), node.clone())) }) } diff --git a/test-support/keccak-hasher/src/lib.rs b/subtrie/src/keccak_hasher.rs similarity index 98% rename from test-support/keccak-hasher/src/lib.rs rename to subtrie/src/keccak_hasher.rs index b4692f77..05ea572b 100644 --- a/test-support/keccak-hasher/src/lib.rs +++ b/subtrie/src/keccak_hasher.rs @@ -14,8 +14,8 @@ //! Hasher implementation for the Keccak-256 hash +use crate::node_db::Hasher; use hash256_std_hasher::Hash256StdHasher; -use hash_db::Hasher; use tiny_keccak::{Hasher as _, Keccak}; /// The `Keccak` hash output type. diff --git a/trie-db/src/lib.rs b/subtrie/src/lib.rs similarity index 73% rename from trie-db/src/lib.rs rename to subtrie/src/lib.rs index b09372b2..51421177 100644 --- a/trie-db/src/lib.rs +++ b/subtrie/src/lib.rs @@ -21,14 +21,21 @@ extern crate alloc; #[cfg(feature = "std")] mod rstd { pub use std::{ - borrow, boxed, cmp, collections::VecDeque, convert, error::Error, fmt, hash, iter, marker, - mem, ops, rc, result, sync, vec, + borrow, boxed, cmp, + collections::{BTreeMap, VecDeque}, + convert, + error::Error, + fmt, hash, iter, marker, mem, ops, result, sync, vec, }; } #[cfg(not(feature = "std"))] mod rstd { - pub use alloc::{borrow, boxed, collections::VecDeque, rc, sync, vec}; + pub use alloc::{ + borrow, boxed, + collections::{btree_map::BTreeMap, VecDeque}, + rc, sync, vec, + }; pub use core::{cmp, convert, fmt, hash, iter, marker, mem, ops, result}; pub trait Error {} impl Error for T {} @@ -38,19 +45,27 @@ mod rstd { use self::rstd::{fmt, Error}; use self::rstd::{boxed::Box, vec::Vec}; -use hash_db::MaybeDebug; +pub use iterator::TrieDBNodeDoubleEndedIterator; use node::NodeOwned; +use node_db::MaybeDebug; +#[cfg(feature = "bench")] +pub mod bench; +#[cfg(any(feature = "test_utils", test))] +pub mod keccak_hasher; +#[cfg(feature = "std")] +pub mod mem_tree_db; +pub mod memory_db; pub mod node; +pub mod node_db; pub mod proof; pub mod recorder; -pub mod sectriedb; -pub mod sectriedbmut; +#[cfg(feature = "test_utils")] +pub mod test_utils; +pub mod trie_root; pub mod triedb; pub mod triedbmut; -mod fatdb; -mod fatdbmut; mod iter_build; mod iterator; mod lookup; @@ -59,23 +74,22 @@ mod node_codec; mod trie_codec; pub use self::{ - fatdb::{FatDB, FatDBIterator}, - fatdbmut::FatDBMut, lookup::Lookup, nibble::{nibble_ops, NibbleSlice, NibbleVec}, recorder::Recorder, - sectriedb::SecTrieDB, - sectriedbmut::SecTrieDBMut, triedb::{TrieDB, TrieDBBuilder, TrieDBIterator, TrieDBKeyIterator}, - triedbmut::{ChildReference, TrieDBMut, TrieDBMutBuilder, Value}, + triedbmut::{ + Changeset, ChildReference, ExistingChangesetNode, NewChangesetNode, OwnedPrefix, TrieDBMut, + TrieDBMutBuilder, Value, + }, }; +use crate::node_db::Hasher; pub use crate::{ iter_build::{trie_visit, ProcessEncodedNode, TrieBuilder, TrieRoot, TrieRootUnhashed}, iterator::{TrieDBNodeIterator, TrieDBRawIterator}, node_codec::{NodeCodec, Partial}, trie_codec::{decode_compact, decode_compact_from_iter, encode_compact}, }; -pub use hash_db::{HashDB, HashDBRef, Hasher}; #[cfg(feature = "std")] pub use crate::iter_build::TrieRootPrint; @@ -162,9 +176,9 @@ pub trait Query { /// If a cache is used, [`Self::Key`] and [`Self::NodeOwned`] are possible /// values. Otherwise only [`Self::EncodedNode`] is a possible value. #[cfg_attr(feature = "std", derive(Debug))] -pub enum TrieAccess<'a, H> { +pub enum TrieAccess<'a, H, L> { /// The given [`NodeOwned`] was accessed using its `hash`. - NodeOwned { hash: H, node_owned: &'a NodeOwned }, + NodeOwned { hash: H, node_owned: &'a NodeOwned }, /// The given `encoded_node` was accessed using its `hash`. EncodedNode { hash: H, encoded_node: rstd::borrow::Cow<'a, [u8]> }, /// The given `value` was accessed using its `hash`. @@ -173,6 +187,13 @@ pub enum TrieAccess<'a, H> { /// /// Should map to [`RecordedForKey::Value`] when checking the recorder. Value { hash: H, value: rstd::borrow::Cow<'a, [u8]>, full_key: &'a [u8] }, + /// A value was accessed that is stored inline a node. + /// + /// As the value is stored inline there is no need to separately record the value as it is part + /// of a node. The given `full_key` is the key to access this value in the trie. + /// + /// Should map to [`RecordedForKey::Value`] when checking the recorder. + InlineValue { full_key: &'a [u8] }, /// The hash of the value for the given `full_key` was accessed. /// /// Should map to [`RecordedForKey::Hash`] when checking the recorder. @@ -184,7 +205,7 @@ pub enum TrieAccess<'a, H> { } /// Result of [`TrieRecorder::trie_nodes_recorded_for_key`]. -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum RecordedForKey { /// We recorded all trie nodes up to the value for a storage key. /// @@ -223,12 +244,12 @@ impl RecordedForKey { /// /// To build a trie proof a recorder is required that records all trie accesses. These recorded trie /// accesses can then be used to create the proof. -pub trait TrieRecorder { +pub trait TrieRecorder { /// Record the given [`TrieAccess`]. /// /// Depending on the [`TrieAccess`] a call of [`Self::trie_nodes_recorded_for_key`] afterwards /// must return the correct recorded state. - fn record<'a>(&mut self, access: TrieAccess<'a, H>); + fn record<'a>(&mut self, access: TrieAccess<'a, H, L>); /// Check if we have recorded any trie nodes for the given `key`. /// @@ -251,6 +272,11 @@ pub trait Trie { /// Return the root of the trie. fn root(&self) -> &TrieHash; + /// Return the root location of the trie if it was set. + fn root_location(&self) -> L::Location { + Default::default() + } + /// Is the trie empty? fn is_empty(&self) -> bool { *self.root() == L::Codec::hashed_null_node() @@ -277,6 +303,17 @@ pub trait Trie { query: Q, ) -> Result, TrieHash, CError>; + /// Look up the [`MerkleValue`] of the node that is the closest descendant for the provided + /// key. + /// + /// When the provided key leads to a node, then the merkle value of that node + /// is returned. However, if the key does not lead to a node, then the merkle value + /// of the closest descendant is returned. `None` if no such descendant exists. + fn lookup_first_descendant( + &self, + key: &[u8], + ) -> Result>>, TrieHash, CError>; + /// Returns a depth-first iterator over the elements of trie. fn iter<'a>( &'a self, @@ -296,183 +333,26 @@ pub trait Trie { >; } -/// A key-value datastore implemented as a database-backed modified Merkle tree. -pub trait TrieMut { - /// Return the root of the trie. - fn root(&mut self) -> &TrieHash; - - /// Is the trie empty? - fn is_empty(&self) -> bool; - - /// Does the trie contain a given key? - fn contains(&self, key: &[u8]) -> Result, CError> { - self.get(key).map(|x| x.is_some()) - } - - /// What is the value of the given key in this trie? - fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result, TrieHash, CError> - where - 'a: 'key; - - /// Insert a `key`/`value` pair into the trie. An empty value is equivalent to removing - /// `key` from the trie. Returns the old value associated with this key, if it existed. - fn insert( - &mut self, - key: &[u8], - value: &[u8], - ) -> Result>, TrieHash, CError>; - - /// Remove a `key` from the trie. Equivalent to making it equal to the empty - /// value. Returns the old value associated with this key, if it existed. - fn remove(&mut self, key: &[u8]) -> Result>, TrieHash, CError>; -} - /// A trie iterator that also supports random access (`seek()`). pub trait TrieIterator: Iterator { /// Position the iterator on the first element with key >= `key` fn seek(&mut self, key: &[u8]) -> Result<(), TrieHash, CError>; } +/// Extending the `TrieIterator` trait with `DoubleEndedIterator` trait. +pub trait TrieDoubleEndedIterator: TrieIterator + DoubleEndedIterator {} + /// Trie types #[derive(PartialEq, Clone)] #[cfg_attr(feature = "std", derive(Debug))] pub enum TrieSpec { /// Generic trie. Generic, - /// Secure trie. - Secure, - /// Secure trie with fat database. - Fat, } impl Default for TrieSpec { fn default() -> TrieSpec { - TrieSpec::Secure - } -} - -/// Trie factory. -#[derive(Default, Clone)] -pub struct TrieFactory { - spec: TrieSpec, -} - -/// All different kinds of tries. -/// This is used to prevent a heap allocation for every created trie. -pub enum TrieKinds<'db, 'cache, L: TrieLayout> { - /// A generic trie db. - Generic(TrieDB<'db, 'cache, L>), - /// A secure trie db. - Secure(SecTrieDB<'db, 'cache, L>), - /// A fat trie db. - Fat(FatDB<'db, 'cache, L>), -} - -// wrapper macro for making the match easier to deal with. -macro_rules! wrapper { - ($me: ident, $f_name: ident, $($param: ident),*) => { - match *$me { - TrieKinds::Generic(ref t) => t.$f_name($($param),*), - TrieKinds::Secure(ref t) => t.$f_name($($param),*), - TrieKinds::Fat(ref t) => t.$f_name($($param),*), - } - } -} - -impl<'db, 'cache, L: TrieLayout> Trie for TrieKinds<'db, 'cache, L> { - fn root(&self) -> &TrieHash { - wrapper!(self, root,) - } - - fn is_empty(&self) -> bool { - wrapper!(self, is_empty,) - } - - fn contains(&self, key: &[u8]) -> Result, CError> { - wrapper!(self, contains, key) - } - - fn get_hash(&self, key: &[u8]) -> Result>, TrieHash, CError> { - wrapper!(self, get_hash, key) - } - - fn get_with>( - &self, - key: &[u8], - query: Q, - ) -> Result, TrieHash, CError> { - wrapper!(self, get_with, key, query) - } - - fn iter<'a>( - &'a self, - ) -> Result< - Box, CError>> + 'a>, - TrieHash, - CError, - > { - wrapper!(self, iter,) - } - - fn key_iter<'a>( - &'a self, - ) -> Result< - Box, CError>> + 'a>, - TrieHash, - CError, - > { - wrapper!(self, key_iter,) - } -} - -impl TrieFactory { - /// Creates new factory. - pub fn new(spec: TrieSpec) -> Self { - TrieFactory { spec } - } - - /// Create new immutable instance of Trie. - pub fn readonly<'db, 'cache, L: TrieLayout>( - &self, - db: &'db dyn HashDBRef, - root: &'db TrieHash, - ) -> TrieKinds<'db, 'cache, L> { - match self.spec { - TrieSpec::Generic => TrieKinds::Generic(TrieDBBuilder::new(db, root).build()), - TrieSpec::Secure => TrieKinds::Secure(SecTrieDB::new(db, root)), - TrieSpec::Fat => TrieKinds::Fat(FatDB::new(db, root)), - } - } - - /// Create new mutable instance of Trie. - pub fn create<'db, L: TrieLayout + 'db>( - &self, - db: &'db mut dyn HashDB, - root: &'db mut TrieHash, - ) -> Box + 'db> { - match self.spec { - TrieSpec::Generic => Box::new(TrieDBMutBuilder::::new(db, root).build()), - TrieSpec::Secure => Box::new(SecTrieDBMut::::new(db, root)), - TrieSpec::Fat => Box::new(FatDBMut::::new(db, root)), - } - } - - /// Create new mutable instance of trie and check for errors. - pub fn from_existing<'db, L: TrieLayout + 'db>( - &self, - db: &'db mut dyn HashDB, - root: &'db mut TrieHash, - ) -> Box + 'db> { - match self.spec { - TrieSpec::Generic => Box::new(TrieDBMutBuilder::::from_existing(db, root).build()), - TrieSpec::Secure => Box::new(SecTrieDBMut::::from_existing(db, root)), - TrieSpec::Fat => Box::new(FatDBMut::::from_existing(db, root)), - } - } - - /// Returns true iff the trie DB is a fat DB (allows enumeration of keys). - pub fn is_fat(&self) -> bool { - self.spec == TrieSpec::Fat + TrieSpec::Generic } } @@ -494,21 +374,29 @@ pub trait TrieLayout { type Hash: Hasher; /// Codec to use (needs to match hasher and nibble ops). type Codec: NodeCodec::Out>; + type Location: Location; } +/// Trait alias for requirement of location with `TrieLayout`. +pub trait Location: Copy + Default + Eq + PartialEq + MaybeDebug {} + +impl Location for T {} + /// This trait associates a trie definition with preferred methods. /// It also contains own default implementations and can be /// used to allow switching implementation. pub trait TrieConfiguration: Sized + TrieLayout { /// Operation to build a trie db from its ordered iterator over its key/values. - fn trie_build(db: &mut DB, input: I) -> ::Out + fn trie_build( + db: &mut memory_db::MemoryDB, DBValue>, + input: I, + ) -> ::Out where - DB: HashDB, I: IntoIterator, A: AsRef<[u8]> + Ord, B: AsRef<[u8]>, { - let mut cb = TrieBuilder::::new(db); + let mut cb = TrieBuilder::>::new(db); trie_visit::(input.into_iter(), &mut cb); cb.root.unwrap_or_default() } @@ -560,11 +448,11 @@ pub type CError = <::Codec as NodeCodec>::Error; /// A value as cached by the [`TrieCache`]. #[derive(Clone, Debug)] -pub enum CachedValue { +pub enum CachedValue { /// The value doesn't exist in the trie. NonExisting, - /// We cached the hash, because we did not yet accessed the data. - ExistingHash(H), + /// We cached the hash and location, because we did not yet accessed the data. + ExistingHash(H, L), /// The value exists in the trie. Existing { /// The hash of the value. @@ -578,7 +466,7 @@ pub enum CachedValue { }, } -impl CachedValue { +impl CachedValue { /// Returns the data of the value. /// /// If a value doesn't exist in the trie or only the value hash is cached, this function returns @@ -596,36 +484,24 @@ impl CachedValue { /// Returns only `None` when the value doesn't exist. pub fn hash(&self) -> Option { match self { - Self::ExistingHash(hash) | Self::Existing { hash, .. } => Some(*hash), + Self::ExistingHash(hash, _) | Self::Existing { hash, .. } => Some(*hash), Self::NonExisting => None, } } } -impl From<(Bytes, H)> for CachedValue { +impl From<(Bytes, H)> for CachedValue { fn from(value: (Bytes, H)) -> Self { Self::Existing { hash: value.1, data: value.0.into() } } } -impl From for CachedValue { - fn from(value: H) -> Self { - Self::ExistingHash(value) - } -} - -impl From> for CachedValue { +impl From> for CachedValue { fn from(value: Option<(Bytes, H)>) -> Self { value.map_or(Self::NonExisting, |v| Self::Existing { hash: v.1, data: v.0.into() }) } } -impl From> for CachedValue { - fn from(value: Option) -> Self { - value.map_or(Self::NonExisting, |v| Self::ExistingHash(v)) - } -} - /// A cache that can be used to speed-up certain operations when accessing the trie. /// /// The [`TrieDB`]/[`TrieDBMut`] by default are working with the internal hash-db in a non-owning @@ -642,7 +518,7 @@ impl From> for CachedValue { /// different values under the same key, it up to the cache implementation to ensure that the /// correct value is returned. As each trie has a different root, this root can be used to /// differentiate values under the same key. -pub trait TrieCache { +pub trait TrieCache { /// Lookup value for the given `key`. /// /// Returns the `None` if the `key` is unknown or otherwise `Some(_)` with the associated @@ -656,7 +532,7 @@ pub trait TrieCache { /// The cache can be used for different tries, aka with different roots. This means /// that the cache implementation needs to take care of always returning the correct value /// for the current trie root. - fn lookup_value_for_key(&mut self, key: &[u8]) -> Option<&CachedValue>; + fn lookup_value_for_key(&mut self, key: &[u8]) -> Option<&CachedValue>; /// Cache the given `value` for the given `key`. /// @@ -665,7 +541,7 @@ pub trait TrieCache { /// The cache can be used for different tries, aka with different roots. This means /// that the cache implementation needs to take care of caching `value` for the current /// trie root. - fn cache_value_for_key(&mut self, key: &[u8], value: CachedValue); + fn cache_value_for_key(&mut self, key: &[u8], value: CachedValue); /// Get or insert a [`NodeOwned`]. /// @@ -677,11 +553,15 @@ pub trait TrieCache { fn get_or_insert_node( &mut self, hash: NC::HashOut, - fetch_node: &mut dyn FnMut() -> Result, NC::HashOut, NC::Error>, - ) -> Result<&NodeOwned, NC::HashOut, NC::Error>; + location: L, + fetch_node: &mut dyn FnMut() -> Result, NC::HashOut, NC::Error>, + ) -> Result<&NodeOwned, NC::HashOut, NC::Error>; /// Get the [`NodeOwned`] that corresponds to the given `hash`. - fn get_node(&mut self, hash: &NC::HashOut) -> Option<&NodeOwned>; + fn get_node(&mut self, hash: &NC::HashOut, location: L) -> Option<&NodeOwned>; + + /// Put a new node. This is used to clear location info for existing nodes with the same hash. + fn insert_new_node(&mut self, hash: &NC::HashOut); } /// A container for storing bytes. @@ -738,3 +618,18 @@ impl From for BytesWeak { Self(rstd::sync::Arc::downgrade(&bytes.0)) } } + +/// Either the `hash` or `value` of a node depending on its size. +/// +/// If the size of the node `value` is bigger or equal than `MAX_INLINE_VALUE` the `hash` is +/// returned. +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum MerkleValue { + /// The merkle value is the node data itself when the + /// node data is smaller than `MAX_INLINE_VALUE`. + /// + /// Note: The case of inline nodes. + Node(Vec), + /// The merkle value is the hash of the node. + Hash(H), +} diff --git a/trie-db/src/lookup.rs b/subtrie/src/lookup.rs similarity index 58% rename from trie-db/src/lookup.rs rename to subtrie/src/lookup.rs index 5e34055f..45accfeb 100644 --- a/trie-db/src/lookup.rs +++ b/subtrie/src/lookup.rs @@ -12,30 +12,32 @@ // See the License for the specific language governing permissions and // limitations under the License. -//! Trie lookup via HashDB. +//! Trie lookup via NodeDB. use crate::{ nibble::NibbleSlice, node::{decode_hash, Node, NodeHandle, NodeHandleOwned, NodeOwned, Value, ValueOwned}, node_codec::NodeCodec, - rstd::boxed::Box, - Bytes, CError, CachedValue, DBValue, Query, RecordedForKey, Result, TrieAccess, TrieCache, - TrieError, TrieHash, TrieLayout, TrieRecorder, + node_db::{Hasher, NodeDB, Prefix}, + rstd::{boxed::Box, vec::Vec}, + Bytes, CError, CachedValue, DBValue, MerkleValue, Query, RecordedForKey, Result, TrieAccess, + TrieCache, TrieError, TrieHash, TrieLayout, TrieRecorder, }; -use hash_db::{HashDBRef, Hasher, Prefix}; /// Trie lookup helper object. pub struct Lookup<'a, 'cache, L: TrieLayout, Q: Query> { /// database to query from. - pub db: &'a dyn HashDBRef, + pub db: &'a dyn NodeDB, /// Query object to record nodes and transform data. pub query: Q, /// Hash to start at pub hash: TrieHash, + /// Optionally location to start at. + pub location: L::Location, /// Optional cache that should be used to speed up the lookup. - pub cache: Option<&'cache mut dyn TrieCache>, + pub cache: Option<&'cache mut dyn TrieCache>, /// Optional recorder that will be called to record all trie accesses. - pub recorder: Option<&'cache mut dyn TrieRecorder>>, + pub recorder: Option<&'cache mut dyn TrieRecorder, L::Location>>, } impl<'a, 'cache, L, Q> Lookup<'a, 'cache, L, Q> @@ -50,19 +52,25 @@ where /// /// Returns the bytes representing the value. fn load_value( - v: Value, + v: Value, prefix: Prefix, full_key: &[u8], - db: &dyn HashDBRef, - recorder: &mut Option<&mut dyn TrieRecorder>>, + db: &dyn NodeDB, + recorder: &mut Option<&mut dyn TrieRecorder, L::Location>>, query: Q, ) -> Result, CError> { match v { - Value::Inline(value) => Ok(query.decode(&value)), - Value::Node(hash) => { + Value::Inline(value) => { + if let Some(recorder) = recorder { + recorder.record(TrieAccess::InlineValue { full_key }); + } + + Ok(query.decode(&value)) + }, + Value::Node(hash, location) => { let mut res = TrieHash::::default(); res.as_mut().copy_from_slice(hash); - if let Some(value) = db.get(&res, prefix) { + if let Some((value, _)) = db.get(&res, prefix, location) { if let Some(recorder) = recorder { recorder.record(TrieAccess::Value { hash: res, @@ -86,19 +94,25 @@ where /// /// Returns the bytes representing the value and its hash. fn load_owned_value( - v: ValueOwned>, + v: ValueOwned, L::Location>, prefix: Prefix, full_key: &[u8], - cache: &mut dyn crate::TrieCache, - db: &dyn HashDBRef, - recorder: &mut Option<&mut dyn TrieRecorder>>, + cache: &mut dyn crate::TrieCache, + db: &dyn NodeDB, + recorder: &mut Option<&mut dyn TrieRecorder, L::Location>>, ) -> Result<(Bytes, TrieHash), TrieHash, CError> { match v { - ValueOwned::Inline(value, hash) => Ok((value.clone(), hash)), - ValueOwned::Node(hash) => { - let node = cache.get_or_insert_node(hash, &mut || { - let value = db - .get(&hash, prefix) + ValueOwned::Inline(value, hash) => { + if let Some(recorder) = recorder { + recorder.record(TrieAccess::InlineValue { full_key }); + } + + Ok((value.clone(), hash)) + }, + ValueOwned::Node(hash, location) => { + let node = cache.get_or_insert_node(hash, location, &mut || { + let (value, _) = db + .get(&hash, prefix, location) .ok_or_else(|| Box::new(TrieError::IncompleteDatabase(hash)))?; Ok(NodeOwned::Value(value.into(), hash)) @@ -125,14 +139,240 @@ where } } - fn record<'b>(&mut self, get_access: impl FnOnce() -> TrieAccess<'b, TrieHash>) + fn record<'b>(&mut self, get_access: impl FnOnce() -> TrieAccess<'b, TrieHash, L::Location>) where TrieHash: 'b, + L::Location: 'b, { if let Some(recorder) = self.recorder.as_mut() { recorder.record(get_access()); } } + /// Look up the merkle value (hash) of the node that is the closest descendant for the provided + /// key. + /// + /// When the provided key leads to a node, then the merkle value (hash) of that node + /// is returned. However, if the key does not lead to a node, then the merkle value + /// of the closest descendant is returned. `None` if no such descendant exists. + pub fn lookup_first_descendant( + mut self, + full_key: &[u8], + nibble_key: NibbleSlice, + ) -> Result>>, TrieHash, CError> { + let mut partial = nibble_key; + let mut hash = self.hash; + let mut location = self.location; + let mut key_nibbles = 0; + + let mut cache = self.cache.take(); + + // this loop iterates through non-inline nodes. + for depth in 0.. { + // Ensure the owned node reference lives long enough. + // Value is never read, but the reference is. + let mut _owned_node = NodeOwned::Empty; + + // The binary encoded data of the node fetched from the database. + // + // Populated by `get_owned_node` to avoid one extra allocation by not + // calling `NodeOwned::to_encoded` when computing the hash of inlined nodes. + let mut node_data = Vec::new(); + + // Get the owned node representation from the database. + let mut get_owned_node = |depth: i32| { + let (data, locations) = + match self.db.get(&hash, nibble_key.mid(key_nibbles).left(), location) { + Some(value) => value, + None => + return Err(Box::new(match depth { + 0 => TrieError::InvalidStateRoot(hash), + _ => TrieError::IncompleteDatabase(hash), + })), + }; + + let decoded = match L::Codec::decode(&data[..], &locations) { + Ok(node) => node, + Err(e) => return Err(Box::new(TrieError::DecoderError(hash, e))), + }; + + let owned = decoded.to_owned_node::()?; + node_data = data; + Ok(owned) + }; + + let mut node = if let Some(cache) = &mut cache { + let node = + cache.get_or_insert_node(hash, location, &mut || get_owned_node(depth))?; + + self.record(|| TrieAccess::NodeOwned { hash, node_owned: node }); + + node + } else { + _owned_node = get_owned_node(depth)?; + + self.record(|| TrieAccess::EncodedNode { + hash, + encoded_node: node_data.as_slice().into(), + }); + + &_owned_node + }; + + // this loop iterates through all inline children (usually max 1) + // without incrementing the depth. + let mut is_inline = false; + loop { + let next_node = match node { + NodeOwned::Leaf(slice, _) => { + // The leaf slice can be longer than remainder of the provided key + // (descendent), but not the other way around. + if !slice.starts_with_slice(&partial) { + self.record(|| TrieAccess::NonExisting { full_key }); + return Ok(None) + } + + if partial.len() != slice.len() { + self.record(|| TrieAccess::NonExisting { full_key }); + } + + let res = is_inline + .then(|| MerkleValue::Node(node_data)) + .unwrap_or_else(|| MerkleValue::Hash(hash)); + return Ok(Some(res)) + }, + NodeOwned::Extension(slice, item) => { + if partial.len() < slice.len() { + self.record(|| TrieAccess::NonExisting { full_key }); + + // Extension slice can be longer than remainder of the provided key + // (descendent), ensure the extension slice starts with the remainder + // of the provided key. + return if slice.starts_with_slice(&partial) { + let res = is_inline + .then(|| MerkleValue::Node(node_data)) + .unwrap_or_else(|| MerkleValue::Hash(hash)); + Ok(Some(res)) + } else { + Ok(None) + } + } + + // Remainder of the provided key is longer than the extension slice, + // must advance the node iteration if and only if keys share + // a common prefix. + if partial.starts_with_vec(&slice) { + // Empties the partial key if the extension slice is longer. + partial = partial.mid(slice.len()); + key_nibbles += slice.len(); + item + } else { + self.record(|| TrieAccess::NonExisting { full_key }); + + return Ok(None) + } + }, + NodeOwned::Branch(children, value) => + if partial.is_empty() { + if value.is_none() { + self.record(|| TrieAccess::NonExisting { full_key }); + } + let res = is_inline + .then(|| MerkleValue::Node(node_data)) + .unwrap_or_else(|| MerkleValue::Hash(hash)); + return Ok(Some(res)) + } else { + match &children[partial.at(0) as usize] { + Some(x) => { + partial = partial.mid(1); + key_nibbles += 1; + x + }, + None => { + self.record(|| TrieAccess::NonExisting { full_key }); + + return Ok(None) + }, + } + }, + NodeOwned::NibbledBranch(slice, children, value) => { + // Not enough remainder key to continue the search. + if partial.len() < slice.len() { + self.record(|| TrieAccess::NonExisting { full_key }); + + // Branch slice starts with the remainder key, there's nothing to + // advance. + return if slice.starts_with_slice(&partial) { + let res = is_inline + .then(|| MerkleValue::Node(node_data)) + .unwrap_or_else(|| MerkleValue::Hash(hash)); + Ok(Some(res)) + } else { + Ok(None) + } + } + + // Partial key is longer or equal than the branch slice. + // Ensure partial key starts with the branch slice. + if !partial.starts_with_vec(&slice) { + self.record(|| TrieAccess::NonExisting { full_key }); + return Ok(None) + } + + // Partial key starts with the branch slice. + if partial.len() == slice.len() { + if value.is_none() { + self.record(|| TrieAccess::NonExisting { full_key }); + } + + let res = is_inline + .then(|| MerkleValue::Node(node_data)) + .unwrap_or_else(|| MerkleValue::Hash(hash)); + return Ok(Some(res)) + } else { + match &children[partial.at(slice.len()) as usize] { + Some(x) => { + partial = partial.mid(slice.len() + 1); + key_nibbles += slice.len() + 1; + x + }, + None => { + self.record(|| TrieAccess::NonExisting { full_key }); + + return Ok(None) + }, + } + } + }, + NodeOwned::Empty => { + self.record(|| TrieAccess::NonExisting { full_key }); + + return Ok(None) + }, + NodeOwned::Value(_, _) => { + unreachable!( + "`NodeOwned::Value` can not be reached by using the hash of a node. \ + `NodeOwned::Value` is only constructed when loading a value into memory, \ + which needs to have a different hash than any node; qed", + ) + }, + }; + + // check if new node data is inline or hash. + match next_node { + NodeHandleOwned::Hash(new_hash, new_location) => { + hash = *new_hash; + location = *new_location; + break + }, + NodeHandleOwned::Inline(inline_node) => { + node = &inline_node; + is_inline = true; + }, + } + } + } + Ok(None) + } /// Look up the given `nibble_key`. /// @@ -167,8 +407,17 @@ where full_key, |v, _, full_key, _, recorder, _| { Ok(match v { - Value::Inline(v) => L::Hash::hash(&v), - Value::Node(hash_bytes) => { + Value::Inline(v) => { + if let Some(recoder) = recorder.as_mut() { + // We can record this as `InlineValue`, even we are just returning + // the `hash`. This is done to prevent requiring to re-record this + // key. + recoder.record(TrieAccess::InlineValue { full_key }); + } + + L::Hash::hash(&v) + }, + Value::Node(hash_bytes, _location) => { if let Some(recoder) = recorder.as_mut() { recoder.record(TrieAccess::Hash { full_key }); } @@ -190,7 +439,7 @@ where mut self, full_key: &[u8], nibble_key: NibbleSlice, - cache: &mut dyn crate::TrieCache, + cache: &mut dyn crate::TrieCache, ) -> Result>, TrieHash, CError> { let value_cache_allowed = self .recorder @@ -211,21 +460,31 @@ where full_key, cache, |value, _, full_key, _, _, recorder| match value { - ValueOwned::Inline(value, hash) => Ok((hash, Some(value.clone()))), - ValueOwned::Node(hash) => { + ValueOwned::Inline(value, hash) => { + if let Some(recoder) = recorder.as_mut() { + // We can record this as `InlineValue`, even we are just returning + // the `hash`. This is done to prevent requiring to re-record this + // key. + recoder.record(TrieAccess::InlineValue { full_key }); + } + + Ok((hash, Some(value.clone()), Default::default())) + }, + ValueOwned::Node(hash, location) => { if let Some(recoder) = recorder.as_mut() { recoder.record(TrieAccess::Hash { full_key }); } - Ok((hash, None)) + Ok((hash, None, location)) }, }, )?; match &hash_and_value { - Some((hash, Some(value))) => + Some((hash, Some(value), _location)) => cache.cache_value_for_key(full_key, (value.clone(), *hash).into()), - Some((hash, None)) => cache.cache_value_for_key(full_key, (*hash).into()), + Some((hash, None, location)) => + cache.cache_value_for_key(full_key, CachedValue::ExistingHash(*hash, *location)), None => cache.cache_value_for_key(full_key, CachedValue::NonExisting), } @@ -243,7 +502,7 @@ where mut self, full_key: &[u8], nibble_key: NibbleSlice, - cache: &mut dyn crate::TrieCache, + cache: &mut dyn crate::TrieCache, ) -> Result, TrieHash, CError> { let trie_nodes_recorded = self.recorder.as_ref().map(|r| r.trie_nodes_recorded_for_key(full_key)); @@ -260,7 +519,7 @@ where }; let lookup_data = |lookup: &mut Self, - cache: &mut dyn crate::TrieCache| + cache: &mut dyn crate::TrieCache| -> Result, TrieHash, CError> { let data = lookup.look_up_with_cache_internal( nibble_key, @@ -277,11 +536,11 @@ where let res = match value_cache_allowed.then(|| cache.lookup_value_for_key(full_key)).flatten() { Some(CachedValue::NonExisting) => None, - Some(CachedValue::ExistingHash(hash)) => { + Some(CachedValue::ExistingHash(hash, location)) => { let data = Self::load_owned_value( // If we only have the hash cached, this can only be a value node. // For inline nodes we cache them directly as `CachedValue::Existing`. - ValueOwned::Node(*hash), + ValueOwned::Node(*hash, *location), nibble_key.original_data_as_prefix(), full_key, cache, @@ -324,33 +583,35 @@ where &mut self, nibble_key: NibbleSlice, full_key: &[u8], - cache: &mut dyn crate::TrieCache, + cache: &mut dyn crate::TrieCache, load_value_owned: impl Fn( - ValueOwned>, + ValueOwned, L::Location>, Prefix, &[u8], - &mut dyn crate::TrieCache, - &dyn HashDBRef, - &mut Option<&mut dyn TrieRecorder>>, + &mut dyn crate::TrieCache, + &dyn NodeDB, + &mut Option<&mut dyn TrieRecorder, L::Location>>, ) -> Result, CError>, ) -> Result, TrieHash, CError> { let mut partial = nibble_key; let mut hash = self.hash; + let mut location = self.location; let mut key_nibbles = 0; // this loop iterates through non-inline nodes. for depth in 0.. { - let mut node = cache.get_or_insert_node(hash, &mut || { - let node_data = match self.db.get(&hash, nibble_key.mid(key_nibbles).left()) { - Some(value) => value, - None => - return Err(Box::new(match depth { - 0 => TrieError::InvalidStateRoot(hash), - _ => TrieError::IncompleteDatabase(hash), - })), - }; - - let decoded = match L::Codec::decode(&node_data[..]) { + let mut node = cache.get_or_insert_node(hash, location, &mut || { + let (node_data, locations) = + match self.db.get(&hash, nibble_key.mid(key_nibbles).left(), location) { + Some(value) => value, + None => + return Err(Box::new(match depth { + 0 => TrieError::InvalidStateRoot(hash), + _ => TrieError::IncompleteDatabase(hash), + })), + }; + + let decoded = match L::Codec::decode(&node_data[..], &locations) { Ok(node) => node, Err(e) => return Err(Box::new(TrieError::DecoderError(hash, e))), }; @@ -367,7 +628,6 @@ where NodeOwned::Leaf(slice, value) => return if partial == *slice { let value = (*value).clone(); - drop(node); load_value_owned( value, nibble_key.original_data_as_prefix(), @@ -395,7 +655,6 @@ where NodeOwned::Branch(children, value) => if partial.is_empty() { return if let Some(value) = value.clone() { - drop(node); load_value_owned( value, nibble_key.original_data_as_prefix(), @@ -433,7 +692,6 @@ where if partial.len() == slice.len() { return if let Some(value) = value.clone() { - drop(node); load_value_owned( value, nibble_key.original_data_as_prefix(), @@ -479,8 +737,9 @@ where // check if new node data is inline or hash. match next_node { - NodeHandleOwned::Hash(new_hash) => { + NodeHandleOwned::Hash(new_hash, new_location) => { hash = *new_hash; + location = *new_location; break }, NodeHandleOwned::Inline(inline_node) => { @@ -503,28 +762,30 @@ where nibble_key: NibbleSlice, full_key: &[u8], load_value: impl Fn( - Value, + Value, Prefix, &[u8], - &dyn HashDBRef, - &mut Option<&mut dyn TrieRecorder>>, + &dyn NodeDB, + &mut Option<&mut dyn TrieRecorder, L::Location>>, Q, ) -> Result, CError>, ) -> Result, TrieHash, CError> { let mut partial = nibble_key; let mut hash = self.hash; + let mut location = self.location; let mut key_nibbles = 0; // this loop iterates through non-inline nodes. for depth in 0.. { - let node_data = match self.db.get(&hash, nibble_key.mid(key_nibbles).left()) { - Some(value) => value, - None => - return Err(Box::new(match depth { - 0 => TrieError::InvalidStateRoot(hash), - _ => TrieError::IncompleteDatabase(hash), - })), - }; + let (node_data, locations) = + match self.db.get(&hash, nibble_key.mid(key_nibbles).left(), location) { + Some(value) => value, + None => + return Err(Box::new(match depth { + 0 => TrieError::InvalidStateRoot(hash), + _ => TrieError::IncompleteDatabase(hash), + })), + }; self.record(|| TrieAccess::EncodedNode { hash, @@ -535,7 +796,7 @@ where // without incrementing the depth. let mut node_data = &node_data[..]; loop { - let decoded = match L::Codec::decode(node_data) { + let decoded = match L::Codec::decode(node_data, &locations) { Ok(node) => node, Err(e) => return Err(Box::new(TrieError::DecoderError(hash, e))), }; @@ -585,7 +846,8 @@ where Ok(None) } } else { - match children[partial.at(0) as usize] { + let i = partial.at(0) as usize; + match children[i] { Some(x) => { partial = partial.mid(1); key_nibbles += 1; @@ -622,7 +884,8 @@ where Ok(None) } } else { - match children[partial.at(slice.len()) as usize] { + let i = partial.at(slice.len()) as usize; + match children[i] { Some(x) => { partial = partial.mid(slice.len() + 1); key_nibbles += slice.len() + 1; @@ -645,9 +908,10 @@ where // check if new node data is inline or hash. match next_node { - NodeHandle::Hash(data) => { + NodeHandle::Hash(data, l) => { hash = decode_hash::(data) .ok_or_else(|| Box::new(TrieError::InvalidHash(hash, data.to_vec())))?; + location = l; break }, NodeHandle::Inline(data) => { diff --git a/subtrie/src/mem_tree_db.rs b/subtrie/src/mem_tree_db.rs new file mode 100644 index 00000000..b7c17f0e --- /dev/null +++ b/subtrie/src/mem_tree_db.rs @@ -0,0 +1,367 @@ +// Copyright 2017-2020 Parity Technologies +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Flat memory-based `NodeDB` implementation. + +use std::collections::HashMap; + +use crate::{ + node_db::{Hasher, NodeDB, NodeDBMut, Prefix}, + Changeset, NewChangesetNode, +}; + +/// Node location which is just an index into the `nodes` vector. +pub type Location = Option; + +/// Tree based `NodeDB` implementation. +#[derive(Clone)] +pub struct MemTreeDB +where + H: Hasher, +{ + nodes: Vec>, + roots: HashMap, + hashed_null_node: H::Out, + null_node_data: Vec, +} + +#[derive(Clone)] +enum NodeEntry { + Live { key: H, data: Vec, children: Vec, rc: u32 }, + Removed, +} + +impl Default for MemTreeDB +where + H: Hasher, +{ + fn default() -> Self { + Self::from_null_node(&[0u8][..], [0u8][..].into()) + } +} + +impl MemTreeDB +where + H: Hasher, +{ + /// Create a new `MemoryDB` from a given null key/data + pub fn from_null_node(null_key: &[u8], null_node_data: &[u8]) -> Self { + MemTreeDB { + nodes: vec![], + roots: HashMap::default(), + hashed_null_node: H::hash(null_key), + null_node_data: null_node_data.to_vec(), + } + } + + /// Create a new instance of `Self`. + pub fn new(data: &[u8]) -> Self { + Self::from_null_node(data, data.into()) + } + + pub fn clear(&mut self) { + self.nodes.clear(); + self.roots.clear(); + } + + pub fn remove_root(&mut self, key: &H::Out) { + let Some(location) = self.roots.get(key) else { + return; + }; + + if self.remove_tree(*location) { + self.roots.remove(key); + } + } + + pub fn remove_node(&mut self, k: &H::Out) { + let rem_root = self.roots.remove(k); + #[cfg(debug_assertions)] + { + for (i, node) in self.nodes.iter_mut().enumerate() { + if matches!(node, NodeEntry::Live { key, .. } if key == k) { + assert!(rem_root.map(|r| r == i).unwrap_or(false)); + *node = NodeEntry::Removed; + } + } + } + if let Some(rem_root_ix) = rem_root { + self.nodes[rem_root_ix] = NodeEntry::Removed; + } + } + + pub fn test_remove_node(&mut self, k: &H::Out) { + self.roots.remove(k); + for node in self.nodes.iter_mut() { + if matches!(node, NodeEntry::Live { key, .. } if key == k) { + *node = NodeEntry::Removed; + } + } + } + + fn remove_tree(&mut self, location: usize) -> bool { + let entry = self.nodes.get_mut(location).unwrap(); + match entry { + NodeEntry::Live { rc, children, .. } => + if *rc == 1 { + let children = std::mem::take(children); + *entry = NodeEntry::Removed; + for c in children { + self.remove_tree(c); + } + true + } else { + *rc -= 1; + false + }, + NodeEntry::Removed => { + panic!("Accessing removed node"); + }, + } + } + + pub fn is_empty(&self) -> bool { + self.roots.is_empty() + } + + fn apply(&mut self, c: &Changeset) -> usize { + match c { + Changeset::Existing(e) => { + let location = e.location.unwrap_or_else(|| *self.roots.get(&e.hash).unwrap()); + let entry = self.nodes.get_mut(location).unwrap(); + match entry { + NodeEntry::Live { rc, .. } => { + *rc += 1; + }, + NodeEntry::Removed => { + panic!("Accessing removed node"); + }, + }; + location + }, + Changeset::New(n) => { + let children = n.children.iter().map(|c| self.apply(c)).collect(); + self.nodes.push(NodeEntry::Live { + key: n.hash, + data: n.data.clone(), + children, + rc: 1, + }); + self.nodes.len() - 1 + }, + } + } + + pub fn apply_commit(&mut self, commit: Changeset) -> H::Out { + let root = commit.root_hash(); + if root != self.hashed_null_node { + let root = self.apply(&commit); + let key = commit.hash(); + self.roots.insert(*key, root); + } + // In non test use, the root should be store before calling commit (known + // from tree where commit was build from). + if let Changeset::New(NewChangesetNode { removed_keys: Some((_, removed)), .. }) = &commit { + for (k, _) in removed { + self.remove_root(&k); + } + } + root + } +} + +impl NodeDB, Location> for MemTreeDB +where + H: Hasher, +{ + fn get( + &self, + k: &H::Out, + _prefix: Prefix, + location: Location, + ) -> Option<(Vec, Vec)> { + if k == &self.hashed_null_node { + return Some((self.null_node_data.clone(), Default::default())) + } + + let location = match location { + Some(l) => l, + None => + if let Some(l) = self.roots.get(k) { + *l + } else { + return None + }, + }; + match self.nodes.get(location) { + Some(NodeEntry::Live { data, children, key, .. }) => { + assert_eq!(k, key); + Some((data.clone(), children.iter().map(|l| Some(*l)).collect())) + }, + _ => None, + } + } + + fn contains(&self, key: &H::Out, _prefix: Prefix, location: Location) -> bool { + if key == &self.hashed_null_node { + return true; + } + if let Some(l) = location { + l < self.nodes.len() && !matches!(self.nodes[l], NodeEntry::Removed) + } else { + self.roots.contains_key(key) + } + } +} + +impl NodeDBMut, Location> for MemTreeDB +where + H: Hasher, +{ + fn apply_changeset(&mut self, commit: Changeset) -> H::Out { + self.apply_commit(commit) + } +} + +#[cfg(test)] +mod tests { + use super::{MemTreeDB, NodeEntry}; + use crate::{ + keccak_hasher::{KeccakHash, KeccakHasher}, + node_db::{Hasher, NodeDB}, + Changeset, ExistingChangesetNode, NewChangesetNode, + }; + + fn hash(i: u32) -> KeccakHash { + KeccakHasher::hash(&i.to_le_bytes()) + } + + #[test] + fn test_apply_existing_node() { + let mut db = MemTreeDB::::default(); + + // First, apply a new node + let new_node = Changeset::New(NewChangesetNode { + hash: hash(1), + prefix: Default::default(), + data: vec![1, 2, 3], + children: vec![], + removed_keys: None, + }); + let new_location = db.apply(&new_node); + + // Then, apply an existing node that refers to the new node + let existing_node = Changeset::Existing(ExistingChangesetNode { + hash: hash(1), + location: Some(new_location), + prefix: Default::default(), + }); + let existing_location = db.apply(&existing_node); + + assert_eq!(existing_location, new_location); + } + + #[test] + fn test_apply_new_node() { + let mut db = MemTreeDB::::default(); + let node = Changeset::New(NewChangesetNode { + hash: KeccakHash::default(), + prefix: Default::default(), + data: vec![1, 2, 3], + children: vec![], + removed_keys: None, + }); + let location = db.apply(&node); + assert_eq!(location, db.nodes.len() - 1); + } + + #[test] + fn test_apply_commit() { + let mut db = MemTreeDB::::default(); + let commit = Changeset::New(NewChangesetNode { + hash: KeccakHash::default(), + prefix: Default::default(), + data: vec![1, 2, 3], + children: vec![], + removed_keys: None, + }); + db.apply_commit(commit); + assert_eq!(db.roots.len(), 1); + } + + #[test] + fn test_commit_changeset_with_children() { + let mut db = MemTreeDB::::default(); + + // Create two child nodes + let child1 = Changeset::New(NewChangesetNode { + hash: hash(1), + prefix: Default::default(), + data: vec![1, 2, 3], + children: vec![], + removed_keys: None, + }); + let child2 = Changeset::New(NewChangesetNode { + hash: hash(2), + prefix: Default::default(), + data: vec![4, 5, 6], + children: vec![], + removed_keys: None, + }); + + // Create a root node that refers to the child nodes + let commit = Changeset::New(NewChangesetNode { + hash: hash(0), + prefix: Default::default(), + data: vec![7, 8, 9], + children: vec![child1, child2], + removed_keys: None, + }); + + db.apply_commit(commit); + + // Check that the root node and child nodes are in the database + assert_eq!(db.nodes.len(), 3); + assert_eq!(db.roots.len(), 1); + } + + #[test] + fn test_get() { + let mut db = MemTreeDB::::default(); + let key = KeccakHash::default(); + db.nodes.push(NodeEntry::Live { + key: key.clone(), + data: vec![1, 2, 3], + children: vec![], + rc: 1, + }); + db.roots.insert(key.clone(), 0); + let result = db.get(&key, Default::default(), None); + assert_eq!(result, Some((vec![1, 2, 3], vec![]))); + } + + #[test] + fn test_contains() { + let mut db = MemTreeDB::::default(); + let key = KeccakHash::default(); + db.nodes.push(NodeEntry::Live { + key: key.clone(), + data: vec![1, 2, 3], + children: vec![], + rc: 1, + }); + db.roots.insert(key.clone(), 0); + assert!(db.contains(&key, Default::default(), None)); + } +} diff --git a/memory-db/src/lib.rs b/subtrie/src/memory_db.rs similarity index 71% rename from memory-db/src/lib.rs rename to subtrie/src/memory_db.rs index 9aff2c54..7f3aad37 100644 --- a/memory-db/src/lib.rs +++ b/subtrie/src/memory_db.rs @@ -12,33 +12,21 @@ // See the License for the specific language governing permissions and // limitations under the License. -//! Reference-counted memory-based `HashDB` implementation. +//! Reference-counted memory-based `NodeDB` implementation. -#![cfg_attr(not(feature = "std"), no_std)] - -#[cfg(not(feature = "std"))] -extern crate alloc; - -use hash_db::{ - AsHashDB, AsPlainDB, HashDB, HashDBRef, Hasher as KeyHasher, MaybeDebug, PlainDB, PlainDBRef, - Prefix, +use crate::{ + node_db::{Hasher as KeyHasher, MaybeDebug, NodeDB, NodeDBMut, Prefix}, + rstd::{cmp::Eq, hash, marker::PhantomData, mem, vec::Vec}, + Changeset, DBValue, }; + #[cfg(feature = "std")] -use std::{ - borrow::Borrow, cmp::Eq, collections::hash_map::Entry, collections::HashMap as Map, hash, - marker::PhantomData, mem, -}; +use std::collections::hash_map::{Entry, HashMap as Map}; #[cfg(not(feature = "std"))] use alloc::collections::btree_map::{BTreeMap as Map, Entry}; -#[cfg(not(feature = "std"))] -use core::{borrow::Borrow, cmp::Eq, hash, marker::PhantomData, mem}; - -#[cfg(not(feature = "std"))] -use alloc::vec::Vec; - -/// Reference-counted memory-based `HashDB` implementation. +/// Reference-counted memory-based `NodeDB` implementation. /// /// Use `new()` to create a new database. Insert items with `insert()`, remove items /// with `remove()`, check for existence with `contains()` and lookup a hash to derive @@ -47,9 +35,12 @@ use alloc::vec::Vec; /// /// # Example /// ```rust -/// use hash_db::{Hasher, HashDB, EMPTY_PREFIX}; -/// use keccak_hasher::KeccakHasher; -/// use memory_db::{MemoryDB, HashKey}; +/// #[cfg(feature = "test_utils")] +/// { +/// use subtrie::node_db::Hasher; +/// use subtrie::node_db::{EMPTY_PREFIX}; +/// use subtrie::keccak_hasher::KeccakHasher; +/// use subtrie::memory_db::{MemoryDB, HashKey}; /// /// let mut m = MemoryDB::, Vec>::default(); /// let d = "Hello world!".as_bytes(); @@ -79,6 +70,7 @@ use alloc::vec::Vec; /// /// m.remove(&k, EMPTY_PREFIX); /// assert!(!m.contains(&k, EMPTY_PREFIX)); +/// } /// ``` pub struct MemoryDB where @@ -114,7 +106,7 @@ where T: Eq + MaybeDebug, { fn eq(&self, other: &MemoryDB) -> bool { - for a in self.data.iter() { + for a in self.data.iter().filter(|(_, (_, rc))| *rc > 0) { match other.data.get(a.0) { Some(v) if v != a.1 => return false, None => return false, @@ -201,43 +193,6 @@ pub fn prefixed_key(key: &H::Out, prefix: Prefix) -> Vec { prefixed_key } -/// Key function that concatenates prefix and hash. -/// This is doing useless computation and should only be -/// used for legacy purpose. -/// It shall be remove in the future. -#[derive(Clone, Debug)] -#[deprecated(since = "0.22.0")] -pub struct LegacyPrefixedKey(PhantomData); - -#[allow(deprecated)] -impl KeyFunction for LegacyPrefixedKey { - type Key = Vec; - - fn key(hash: &H::Out, prefix: Prefix) -> Vec { - legacy_prefixed_key::(hash, prefix) - } -} - -/// Legacy method for db using previous version of prefix encoding. -/// Only for trie radix 16 trie. -#[deprecated(since = "0.22.0")] -pub fn legacy_prefixed_key(key: &H::Out, prefix: Prefix) -> Vec { - let mut prefixed_key = Vec::with_capacity(key.as_ref().len() + prefix.0.len() + 1); - if let Some(last) = prefix.1 { - let mut prev = 0x01u8; - for i in prefix.0.iter() { - prefixed_key.push((prev << 4) + (*i >> 4)); - prev = *i; - } - prefixed_key.push((prev << 4) + (last >> 4)); - } else { - prefixed_key.push(0); - prefixed_key.extend_from_slice(prefix.0); - } - prefixed_key.extend_from_slice(key.as_ref()); - prefixed_key -} - impl Default for MemoryDB where H: KeyHasher, @@ -323,13 +278,12 @@ where /// /// # Examples /// ```rust - /// extern crate hash_db; - /// extern crate keccak_hasher; - /// extern crate memory_db; - /// - /// use hash_db::{Hasher, HashDB, EMPTY_PREFIX}; - /// use keccak_hasher::KeccakHasher; - /// use memory_db::{MemoryDB, HashKey}; + /// #[cfg(feature = "test_utils")] + /// { + /// use subtrie::node_db::Hasher; + /// use subtrie::node_db::{NodeDB, EMPTY_PREFIX}; + /// use subtrie::keccak_hasher::KeccakHasher; + /// use subtrie::memory_db::{MemoryDB, HashKey}; /// /// fn main() { /// let mut m = MemoryDB::, Vec>::default(); @@ -339,6 +293,7 @@ where /// m.clear(); /// assert!(!m.contains(&hash, EMPTY_PREFIX)); /// } + /// } /// ``` pub fn clear(&mut self) { self.data.clear(); @@ -396,78 +351,39 @@ where } } -impl PlainDB for MemoryDB +impl NodeDB for MemoryDB where H: KeyHasher, - T: Default + PartialEq + for<'a> From<&'a [u8]> + Clone + Send + Sync, - KF: Send + Sync + KeyFunction, - KF::Key: Borrow<[u8]> + for<'a> From<&'a [u8]>, + T: Default + PartialEq + AsRef<[u8]> + for<'a> From<&'a [u8]> + Clone + Send + Sync, + KF: KeyFunction + Send + Sync, { - fn get(&self, key: &H::Out) -> Option { - match self.data.get(key.as_ref()) { - Some(&(ref d, rc)) if rc > 0 => Some(d.clone()), - _ => None, - } + fn get(&self, key: &H::Out, prefix: Prefix, _location: L) -> Option<(T, Vec)> { + MemoryDB::get(self, key, prefix).map(|d| (d, Default::default())) } - fn contains(&self, key: &H::Out) -> bool { - match self.data.get(key.as_ref()) { - Some(&(_, x)) if x > 0 => true, - _ => false, - } - } - - fn emplace(&mut self, key: H::Out, value: T) { - match self.data.entry(key.as_ref().into()) { - Entry::Occupied(mut entry) => { - let &mut (ref mut old_value, ref mut rc) = entry.get_mut(); - if *rc <= 0 { - *old_value = value; - } - *rc += 1; - }, - Entry::Vacant(entry) => { - entry.insert((value, 1)); - }, - } - } - - fn remove(&mut self, key: &H::Out) { - match self.data.entry(key.as_ref().into()) { - Entry::Occupied(mut entry) => { - let &mut (_, ref mut rc) = entry.get_mut(); - *rc -= 1; - }, - Entry::Vacant(entry) => { - let value = T::default(); - entry.insert((value, -1)); - }, - } + fn contains(&self, key: &H::Out, prefix: Prefix, _location: L) -> bool { + MemoryDB::contains(self, key, prefix) } } -impl PlainDBRef for MemoryDB +impl NodeDBMut for MemoryDB where H: KeyHasher, - T: Default + PartialEq + for<'a> From<&'a [u8]> + Clone + Send + Sync, - KF: Send + Sync + KeyFunction, - KF::Key: Borrow<[u8]> + for<'a> From<&'a [u8]>, + KF: KeyFunction + Send + Sync, + L: Default, { - fn get(&self, key: &H::Out) -> Option { - PlainDB::get(self, key) - } - fn contains(&self, key: &H::Out) -> bool { - PlainDB::contains(self, key) + fn apply_changeset(&mut self, commit: Changeset) -> H::Out { + commit.apply_to(self) } } -impl HashDB for MemoryDB +impl MemoryDB where H: KeyHasher, T: Default + PartialEq + AsRef<[u8]> + for<'a> From<&'a [u8]> + Clone + Send + Sync, KF: KeyFunction + Send + Sync, { - fn get(&self, key: &H::Out, prefix: Prefix) -> Option { + pub fn get(&self, key: &H::Out, prefix: Prefix) -> Option { if key == &self.hashed_null_node { return Some(self.null_node_data.clone()) } @@ -479,7 +395,7 @@ where } } - fn contains(&self, key: &H::Out, prefix: Prefix) -> bool { + pub fn contains(&self, key: &H::Out, prefix: Prefix) -> bool { if key == &self.hashed_null_node { return true } @@ -491,7 +407,7 @@ where } } - fn emplace(&mut self, key: H::Out, prefix: Prefix, value: T) { + pub fn emplace(&mut self, key: H::Out, prefix: Prefix, value: T) { if value == self.null_node_data { return } @@ -511,17 +427,17 @@ where } } - fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H::Out { + pub fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H::Out { if T::from(value) == self.null_node_data { return self.hashed_null_node } let key = H::hash(value); - HashDB::emplace(self, key, prefix, value.into()); + self.emplace(key, prefix, value.into()); key } - fn remove(&mut self, key: &H::Out, prefix: Prefix) { + pub fn remove(&mut self, key: &H::Out, prefix: Prefix) { if key == &self.hashed_null_node { return } @@ -540,54 +456,13 @@ where } } -impl HashDBRef for MemoryDB -where - H: KeyHasher, - T: Default + PartialEq + AsRef<[u8]> + for<'a> From<&'a [u8]> + Clone + Send + Sync, - KF: KeyFunction + Send + Sync, -{ - fn get(&self, key: &H::Out, prefix: Prefix) -> Option { - HashDB::get(self, key, prefix) - } - fn contains(&self, key: &H::Out, prefix: Prefix) -> bool { - HashDB::contains(self, key, prefix) - } -} - -impl AsPlainDB for MemoryDB -where - H: KeyHasher, - T: Default + PartialEq + for<'a> From<&'a [u8]> + Clone + Send + Sync, - KF: KeyFunction + Send + Sync, - KF::Key: Borrow<[u8]> + for<'a> From<&'a [u8]>, -{ - fn as_plain_db(&self) -> &dyn PlainDB { - self - } - fn as_plain_db_mut(&mut self) -> &mut dyn PlainDB { - self - } -} - -impl AsHashDB for MemoryDB -where - H: KeyHasher, - T: Default + PartialEq + AsRef<[u8]> + for<'a> From<&'a [u8]> + Clone + Send + Sync, - KF: KeyFunction + Send + Sync, -{ - fn as_hash_db(&self) -> &dyn HashDB { - self - } - fn as_hash_db_mut(&mut self) -> &mut dyn HashDB { - self - } -} - #[cfg(test)] -mod tests { - use super::{HashDB, HashKey, KeyHasher, MemoryDB}; - use hash_db::EMPTY_PREFIX; - use keccak_hasher::KeccakHasher; +mod test { + use crate::{ + keccak_hasher::KeccakHasher, + memory_db::{HashKey, MemoryDB}, + node_db::{Hasher as KeyHasher, EMPTY_PREFIX}, + }; #[test] fn memorydb_remove_and_purge() { diff --git a/trie-db/src/nibble/leftnibbleslice.rs b/subtrie/src/nibble/leftnibbleslice.rs similarity index 100% rename from trie-db/src/nibble/leftnibbleslice.rs rename to subtrie/src/nibble/leftnibbleslice.rs diff --git a/trie-db/src/nibble/mod.rs b/subtrie/src/nibble/mod.rs similarity index 98% rename from trie-db/src/nibble/mod.rs rename to subtrie/src/nibble/mod.rs index e1d758e9..413cbfef 100644 --- a/trie-db/src/nibble/mod.rs +++ b/subtrie/src/nibble/mod.rs @@ -142,7 +142,7 @@ pub mod nibble_ops { } /// Backing storage for `NibbleVec`s. -pub(crate) type BackingByteVec = smallvec::SmallVec<[u8; 40]>; +pub type BackingByteVec = smallvec::SmallVec<[u8; 40]>; /// Owning, nibble-oriented byte vector. Counterpart to `NibbleSlice`. /// Nibbles are always left aligned, so making a `NibbleVec` from diff --git a/trie-db/src/nibble/nibbleslice.rs b/subtrie/src/nibble/nibbleslice.rs similarity index 99% rename from trie-db/src/nibble/nibbleslice.rs rename to subtrie/src/nibble/nibbleslice.rs index f91fad7f..e5e062a9 100644 --- a/trie-db/src/nibble/nibbleslice.rs +++ b/subtrie/src/nibble/nibbleslice.rs @@ -17,8 +17,7 @@ use super::{nibble_ops, BackingByteVec, NibbleSlice, NibbleSliceIterator, NibbleVec}; #[cfg(feature = "std")] use crate::rstd::fmt; -use crate::{node::NodeKey, node_codec::Partial, rstd::cmp::*}; -use hash_db::Prefix; +use crate::{node::NodeKey, node_codec::Partial, node_db::Prefix, rstd::cmp::*}; impl<'a> Iterator for NibbleSliceIterator<'a> { type Item = u8; diff --git a/trie-db/src/nibble/nibblevec.rs b/subtrie/src/nibble/nibblevec.rs similarity index 94% rename from trie-db/src/nibble/nibblevec.rs rename to subtrie/src/nibble/nibblevec.rs index f612585a..c2256a49 100644 --- a/trie-db/src/nibble/nibblevec.rs +++ b/subtrie/src/nibble/nibblevec.rs @@ -19,8 +19,8 @@ use crate::{ nibble::{nibble_ops, BackingByteVec, NibbleSlice}, node::NodeKey, node_codec::Partial, + node_db::Prefix, }; -use hash_db::Prefix; impl Default for NibbleVec { fn default() -> Self { @@ -117,6 +117,12 @@ impl NibbleVec { } } + /// Get `Prefix` representation of this `NibbleVec`. + pub fn as_owned_prefix(&self) -> (BackingByteVec, Option) { + let (inner, pad) = self.as_prefix(); + (inner.into(), pad) + } + /// Append another `NibbleVec`. Can be slow (alignement of second vec). pub fn append(&mut self, v: &NibbleVec) { if v.len == 0 { @@ -235,6 +241,25 @@ impl NibbleVec { true } + /// Same as [`Self::starts_with`] but using [`NibbleSlice`]. + pub fn starts_with_slice(&self, other: &NibbleSlice) -> bool { + if self.len() < other.len() { + return false + } + + match self.as_nibbleslice() { + Some(slice) => slice.starts_with(&other), + None => { + for i in 0..other.len() { + if self.at(i) != other.at(i) { + return false + } + } + true + }, + } + } + /// Return an iterator over `Partial` bytes representation. pub fn right_iter<'a>(&'a self) -> impl Iterator + 'a { let require_padding = self.len % nibble_ops::NIBBLE_PER_BYTE != 0; diff --git a/trie-db/src/node.rs b/subtrie/src/node.rs similarity index 68% rename from trie-db/src/node.rs rename to subtrie/src/node.rs index 19ed9162..d0a36804 100644 --- a/trie-db/src/node.rs +++ b/subtrie/src/node.rs @@ -15,11 +15,11 @@ use crate::{ nibble::{self, nibble_ops, NibbleSlice, NibbleVec}, node_codec::NodeCodec, + node_db::Hasher, Bytes, CError, ChildReference, Result, TrieError, TrieHash, TrieLayout, }; #[cfg(not(feature = "std"))] use alloc::{boxed::Box, vec::Vec}; -use hash_db::Hasher; use crate::rstd::{borrow::Borrow, mem, ops::Range}; @@ -29,22 +29,25 @@ pub type NodeKey = (usize, nibble::BackingByteVec); /// A reference to a trie node which may be stored within another trie node. #[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum NodeHandle<'a> { - Hash(&'a [u8]), +pub enum NodeHandle<'a, L> { + Hash(&'a [u8], L), Inline(&'a [u8]), } -impl NodeHandle<'_> { +impl<'a, L: Copy + Default> NodeHandle<'a, L> { /// Converts this node handle into a [`NodeHandleOwned`]. - pub fn to_owned_handle( + pub fn to_owned_handle( &self, - ) -> Result>, TrieHash, CError> { + ) -> Result, TL::Location>, TrieHash, CError> + where + TL::Location: From, + { match self { - Self::Hash(h) => decode_hash::(h) + Self::Hash(h, l) => decode_hash::(h) .ok_or_else(|| Box::new(TrieError::InvalidHash(Default::default(), h.to_vec()))) - .map(NodeHandleOwned::Hash), - Self::Inline(i) => match L::Codec::decode(i) { - Ok(node) => Ok(NodeHandleOwned::Inline(Box::new(node.to_owned_node::()?))), + .map(|h| NodeHandleOwned::Hash(h, (*l).into())), + Self::Inline(i) => match TL::Codec::decode(i, &[] as &[L]) { + Ok(node) => Ok(NodeHandleOwned::Inline(Box::new(node.to_owned_node::()?))), Err(e) => Err(Box::new(TrieError::DecoderError(Default::default(), e))), }, } @@ -54,14 +57,15 @@ impl NodeHandle<'_> { /// Owned version of [`NodeHandleOwned`]. #[derive(Clone, PartialEq, Eq)] #[cfg_attr(feature = "std", derive(Debug))] -pub enum NodeHandleOwned { - Hash(H), - Inline(Box>), +pub enum NodeHandleOwned { + Hash(H, L), + Inline(Box>), } -impl NodeHandleOwned +impl NodeHandleOwned where H: Default + AsRef<[u8]> + AsMut<[u8]> + Copy, + L: Default + Copy, { /// Returns `self` as a [`ChildReference`]. /// @@ -69,9 +73,9 @@ where /// /// This function panics if `self == Self::Inline(_)` and the inline node encoded length is /// greater then the length of the hash. - fn as_child_reference>(&self) -> ChildReference { + fn as_child_reference>(&self) -> ChildReference { match self { - NodeHandleOwned::Hash(h) => ChildReference::Hash(*h), + NodeHandleOwned::Hash(h, l) => ChildReference::Hash(*h, *l), NodeHandleOwned::Inline(n) => { let encoded = n.to_encoded::(); let mut store = H::default(); @@ -84,11 +88,11 @@ where } } -impl NodeHandleOwned { +impl NodeHandleOwned { /// Returns `self` as inline node. - pub fn as_inline(&self) -> Option<&NodeOwned> { + pub fn as_inline(&self) -> Option<&NodeOwned> { match self { - Self::Hash(_) => None, + Self::Hash(_, _) => None, Self::Inline(node) => Some(&*node), } } @@ -107,14 +111,14 @@ pub fn decode_hash(data: &[u8]) -> Option { /// Value representation in `Node`. #[derive(Eq, PartialEq, Clone)] #[cfg_attr(feature = "std", derive(Debug))] -pub enum Value<'a> { +pub enum Value<'a, L> { /// Value byte slice as stored in a trie node. Inline(&'a [u8]), /// Hash byte slice as stored in a trie node. - Node(&'a [u8]), + Node(&'a [u8], L), } -impl<'a> Value<'a> { +impl<'a, L: Copy + Default> Value<'a, L> { pub(crate) fn new_inline(value: &'a [u8], threshold: Option) -> Option { if let Some(threshold) = threshold { if value.len() >= threshold as usize { @@ -127,14 +131,17 @@ impl<'a> Value<'a> { } } - pub fn to_owned_value(&self) -> ValueOwned> { + pub fn to_owned_value(&self) -> ValueOwned, TL::Location> + where + TL::Location: From, + { match self { - Self::Inline(data) => ValueOwned::Inline(Bytes::from(*data), L::Hash::hash(data)), - Self::Node(hash) => { - let mut res = TrieHash::::default(); + Self::Inline(data) => ValueOwned::Inline(Bytes::from(*data), TL::Hash::hash(data)), + Self::Node(hash, l) => { + let mut res = TrieHash::::default(); res.as_mut().copy_from_slice(hash); - ValueOwned::Node(res) + ValueOwned::Node(res, (*l).into()) }, } } @@ -143,19 +150,19 @@ impl<'a> Value<'a> { /// Owned value representation in `Node`. #[derive(Eq, PartialEq, Clone)] #[cfg_attr(feature = "std", derive(Debug))] -pub enum ValueOwned { +pub enum ValueOwned { /// Value bytes as stored in a trie node and its hash. Inline(Bytes, H), /// Hash byte slice as stored in a trie node. - Node(H), + Node(H, L), } -impl + Copy> ValueOwned { +impl + Copy, L: Copy + Default> ValueOwned { /// Returns self as [`Value`]. - pub fn as_value(&self) -> Value { + pub fn as_value(&self) -> Value { match self { Self::Inline(data, _) => Value::Inline(&data), - Self::Node(hash) => Value::Node(hash.as_ref()), + Self::Node(hash, location) => Value::Node(hash.as_ref(), *location), } } @@ -163,17 +170,17 @@ impl + Copy> ValueOwned { pub fn data_hash(&self) -> Option { match self { Self::Inline(_, hash) => Some(*hash), - Self::Node(hash) => Some(*hash), + Self::Node(hash, _) => Some(*hash), } } } -impl ValueOwned { +impl ValueOwned { /// Returns the data stored in self. pub fn data(&self) -> Option<&Bytes> { match self { Self::Inline(data, _) => Some(data), - Self::Node(_) => None, + Self::Node(_, _) => None, } } } @@ -181,29 +188,33 @@ impl ValueOwned { /// Type of node in the trie and essential information thereof. #[derive(Eq, PartialEq, Clone)] #[cfg_attr(feature = "std", derive(Debug))] -pub enum Node<'a> { +pub enum Node<'a, L> { /// Null trie node; could be an empty root or an empty branch entry. Empty, /// Leaf node; has key slice and value. Value may not be empty. - Leaf(NibbleSlice<'a>, Value<'a>), + Leaf(NibbleSlice<'a>, Value<'a, L>), /// Extension node; has key slice and node data. Data may not be null. - Extension(NibbleSlice<'a>, NodeHandle<'a>), + Extension(NibbleSlice<'a>, NodeHandle<'a, L>), /// Branch node; has slice of child nodes (each possibly null) /// and an optional immediate node data. - Branch([Option>; nibble_ops::NIBBLE_LENGTH], Option>), + Branch([Option>; nibble_ops::NIBBLE_LENGTH], Option>), /// Branch node with support for a nibble (when extension nodes are not used). NibbledBranch( NibbleSlice<'a>, - [Option>; nibble_ops::NIBBLE_LENGTH], - Option>, + [Option>; nibble_ops::NIBBLE_LENGTH], + Option>, ), } -impl Node<'_> { +impl Node<'_, Location> { /// Converts this node into a [`NodeOwned`]. pub fn to_owned_node( &self, - ) -> Result>, TrieHash, CError> { + ) -> Result, L::Location>, TrieHash, CError> + where + L::Location: From, + Location: Copy + Default, + { match self { Self::Empty => Ok(NodeOwned::Empty), Self::Leaf(n, d) => Ok(NodeOwned::Leaf((*n).into(), d.to_owned_value::())), @@ -248,21 +259,21 @@ impl Node<'_> { /// Owned version of [`Node`]. #[derive(Eq, PartialEq, Clone)] #[cfg_attr(feature = "std", derive(Debug))] -pub enum NodeOwned { +pub enum NodeOwned { /// Null trie node; could be an empty root or an empty branch entry. Empty, /// Leaf node; has key slice and value. Value may not be empty. - Leaf(NibbleVec, ValueOwned), + Leaf(NibbleVec, ValueOwned), /// Extension node; has key slice and node data. Data may not be null. - Extension(NibbleVec, NodeHandleOwned), + Extension(NibbleVec, NodeHandleOwned), /// Branch node; has slice of child nodes (each possibly null) /// and an optional immediate node data. - Branch([Option>; nibble_ops::NIBBLE_LENGTH], Option>), + Branch([Option>; nibble_ops::NIBBLE_LENGTH], Option>), /// Branch node with support for a nibble (when extension nodes are not used). NibbledBranch( NibbleVec, - [Option>; nibble_ops::NIBBLE_LENGTH], - Option>, + [Option>; nibble_ops::NIBBLE_LENGTH], + Option>, ), /// Node that represents a value. /// @@ -271,7 +282,7 @@ pub enum NodeOwned { Value(Bytes, H), } -impl NodeOwned +impl NodeOwned where H: Default + AsRef<[u8]> + AsMut<[u8]> + Copy, { @@ -304,15 +315,15 @@ where } /// Returns an iterator over all existing children with their optional nibble. - pub fn child_iter(&self) -> impl Iterator, &NodeHandleOwned)> { - enum ChildIter<'a, H> { + pub fn child_iter(&self) -> impl Iterator, &NodeHandleOwned)> { + enum ChildIter<'a, H, L> { Empty, - Single(&'a NodeHandleOwned, bool), - Array(&'a [Option>; nibble_ops::NIBBLE_LENGTH], usize), + Single(&'a NodeHandleOwned, bool), + Array(&'a [Option>; nibble_ops::NIBBLE_LENGTH], usize), } - impl<'a, H> Iterator for ChildIter<'a, H> { - type Item = (Option, &'a NodeHandleOwned); + impl<'a, H, L> Iterator for ChildIter<'a, H, L> { + type Item = (Option, &'a NodeHandleOwned); fn next(&mut self) -> Option { loop { @@ -362,7 +373,7 @@ where } } -impl NodeOwned { +impl NodeOwned { /// Returns the data attached to this node. pub fn data(&self) -> Option<&Bytes> { match &self { @@ -391,8 +402,8 @@ impl NodeOwned { pub fn size_in_bytes(&self) -> usize { let self_size = mem::size_of::(); - fn childs_size<'a, H: 'a>( - childs: impl Iterator>>, + fn childs_size<'a, H: 'a, L: 'a>( + childs: impl Iterator>>, ) -> usize { // If a `child` isn't an inline node, its size is already taken account for by // `self_size`. @@ -429,9 +440,10 @@ impl NodeOwned { /// A `NodeHandlePlan` is a decoding plan for constructing a `NodeHandle` from an encoded trie /// node. This is used as a substructure of `NodePlan`. See `NodePlan` for details. +/// Number of existing node is stored (allow fast access to children locations). #[derive(Debug, Clone, PartialEq, Eq)] pub enum NodeHandlePlan { - Hash(Range), + Hash(Range, u8), Inline(Range), } @@ -439,12 +451,20 @@ impl NodeHandlePlan { /// Build a node handle by decoding a byte slice according to the node handle plan. It is the /// responsibility of the caller to ensure that the node plan was created for the argument /// data, otherwise the call may decode incorrectly or panic. - pub fn build<'a, 'b>(&'a self, data: &'b [u8]) -> NodeHandle<'b> { + pub fn build<'a, 'b, L>(&'a self, data: &'b [u8], location: L) -> NodeHandle<'b, L> { match self { - NodeHandlePlan::Hash(range) => NodeHandle::Hash(&data[range.clone()]), + NodeHandlePlan::Hash(range, _) => NodeHandle::Hash(&data[range.clone()], location), NodeHandlePlan::Inline(range) => NodeHandle::Inline(&data[range.clone()]), } } + + /// Check if the node is innline. + pub fn is_inline(&self) -> bool { + match self { + NodeHandlePlan::Hash(..) => false, + NodeHandlePlan::Inline(..) => true, + } + } } /// A `NibbleSlicePlan` is a blueprint for decoding a nibble slice from a byte slice. The @@ -488,10 +508,18 @@ pub enum ValuePlan { impl ValuePlan { /// Build a value slice by decoding a byte slice according to the plan. - pub fn build<'a, 'b>(&'a self, data: &'b [u8]) -> Value<'b> { + pub fn build<'a, 'b, L>(&'a self, data: &'b [u8], location: L) -> Value<'b, L> { match self { ValuePlan::Inline(range) => Value::Inline(&data[range.clone()]), - ValuePlan::Node(range) => Value::Node(&data[range.clone()]), + ValuePlan::Node(range) => Value::Node(&data[range.clone()], location), + } + } + + /// Check if the value is inline. + pub fn is_inline(&self) -> bool { + match self { + ValuePlan::Inline(_) => true, + ValuePlan::Node(_) => false, } } } @@ -526,36 +554,98 @@ pub enum NodePlan { } impl NodePlan { - /// Build a node by decoding a byte slice according to the node plan. It is the responsibility - /// of the caller to ensure that the node plan was created for the argument data, otherwise the - /// call may decode incorrectly or panic. - pub fn build<'a, 'b>(&'a self, data: &'b [u8]) -> Node<'b> { + /// Build a node by decoding a byte slice according to the node plan and attaching location + /// dats. It is the responsibility of the caller to ensure that the node plan was created for + /// the argument data, otherwise the call may decode incorrectly or panic. + pub fn build<'a, 'b, L: Copy + Default>( + &'a self, + data: &'b [u8], + locations: &[L], + ) -> Node<'b, L> { match self { NodePlan::Empty => Node::Empty, - NodePlan::Leaf { partial, value } => Node::Leaf(partial.build(data), value.build(data)), - NodePlan::Extension { partial, child } => - Node::Extension(partial.build(data), child.build(data)), + NodePlan::Leaf { partial, value } => Node::Leaf( + partial.build(data), + value.build(data, locations.first().copied().unwrap_or_default()), + ), + NodePlan::Extension { partial, child } => Node::Extension( + partial.build(data), + child.build(data, locations.first().copied().unwrap_or_default()), + ), NodePlan::Branch { value, children } => { - let mut child_slices = [None; nibble_ops::NIBBLE_LENGTH]; - for i in 0..nibble_ops::NIBBLE_LENGTH { - child_slices[i] = children[i].as_ref().map(|child| child.build(data)); - } - Node::Branch(child_slices, value.as_ref().map(|v| v.build(data))) + let (value, child_slices) = + Self::build_value_and_children(value.as_ref(), children, data, locations); + Node::Branch(child_slices, value) }, NodePlan::NibbledBranch { partial, value, children } => { - let mut child_slices = [None; nibble_ops::NIBBLE_LENGTH]; - for i in 0..nibble_ops::NIBBLE_LENGTH { - child_slices[i] = children[i].as_ref().map(|child| child.build(data)); - } - Node::NibbledBranch( - partial.build(data), - child_slices, - value.as_ref().map(|v| v.build(data)), - ) + let (value, child_slices) = + Self::build_value_and_children(value.as_ref(), children, data, locations); + Node::NibbledBranch(partial.build(data), child_slices, value) }, } } + fn build_value_and_children<'a, 'b, L: Copy + Default>( + value: Option<&'a ValuePlan>, + children: &'a [Option; nibble_ops::NIBBLE_LENGTH], + data: &'b [u8], + locations: &[L], + ) -> (Option>, [Option>; nibble_ops::NIBBLE_LENGTH]) { + let mut child_slices = [None; nibble_ops::NIBBLE_LENGTH]; + let mut nc = 0; + let value = if let Some(v) = value { + if v.is_inline() { + Some(v.build(data, Default::default())) + } else { + nc += 1; + Some(v.build(data, locations.first().copied().unwrap_or_default())) + } + } else { + None + }; + for i in 0..nibble_ops::NIBBLE_LENGTH { + if let Some(child) = &children[i] { + let location = if child.is_inline() { + Default::default() + } else { + let l = locations.get(nc).copied().unwrap_or_default(); + nc += 1; + l + }; + child_slices[i] = Some(child.build(data, location)); + } + } + (value, child_slices) + } + + pub(crate) fn build_child<'a, 'b, L: Copy + Default>( + value: Option<&'a ValuePlan>, + children: &'a [Option; nibble_ops::NIBBLE_LENGTH], + index: usize, + data: &'b [u8], + locations: &[L], + ) -> Option> { + let mut location_value_offset = 0; + if let Some(v) = value { + if !v.is_inline() { + location_value_offset = 1; + } + } + if let Some(child) = &children[index] { + let location = if let NodeHandlePlan::Hash(_, i_hash) = child { + locations + .get(location_value_offset + *i_hash as usize) + .copied() + .unwrap_or_default() + } else { + Default::default() + }; + Some(child.build(data, location)) + } else { + None + } + } + /// Access value plan from node plan, return `None` for /// node that cannot contain a `ValuePlan`. pub fn value_plan(&self) -> Option<&ValuePlan> { @@ -577,22 +667,58 @@ impl NodePlan { value.as_mut(), } } + + /// Check if the node has a location for value. + pub fn has_location_for_value(&self) -> bool { + self.value_plan().map(|v| !v.is_inline()).unwrap_or(false) + } + + fn num_children_locations(&self) -> usize { + match self { + NodePlan::Extension { child: NodeHandlePlan::Hash(..), .. } => 1, + NodePlan::Branch { children, .. } | NodePlan::NibbledBranch { children, .. } => { + let mut count = 0; + for child in children { + if let Some(NodeHandlePlan::Hash(..)) = child { + count += 1; + } + } + count + }, + _ => 0, + } + } + + /// Check if an extra location is defined, it can be attached state. + /// This method is counting and should be call only when needed. + pub fn additional_ref_location(&self, locations: &[L]) -> Option { + let offset = + if self.has_location_for_value() { 1 } else { 0 } + self.num_children_locations(); + if locations.len() > offset { + // only one additional location expected with current code. + debug_assert!(locations.len() == offset + 1); + Some(locations[offset]) + } else { + None + } + } } /// An `OwnedNode` is an owned type from which a `Node` can be constructed which borrows data from /// the `OwnedNode`. This is useful for trie iterators. #[cfg_attr(feature = "std", derive(Debug))] #[derive(PartialEq, Eq)] -pub struct OwnedNode> { +pub struct OwnedNode, L> { data: D, plan: NodePlan, + locations: Vec, } -impl> OwnedNode { +impl, L: Default + Copy> OwnedNode { /// Construct an `OwnedNode` by decoding an owned data source according to some codec. - pub fn new(data: D) -> core::result::Result { + pub fn new(data: D, locations: Vec) -> core::result::Result { let plan = C::decode_plan(data.borrow())?; - Ok(OwnedNode { data, plan }) + Ok(OwnedNode { data, plan, locations }) } /// Returns a reference to the backing data. @@ -600,6 +726,11 @@ impl> OwnedNode { self.data.borrow() } + /// Returns a reference to children locations. + pub fn locations(&self) -> &[L] { + &self.locations + } + /// Returns a reference to the node decode plan. pub fn node_plan(&self) -> &NodePlan { &self.plan @@ -611,7 +742,7 @@ impl> OwnedNode { } /// Construct a `Node` by borrowing data from this struct. - pub fn node(&self) -> Node { - self.plan.build(self.data.borrow()) + pub fn node(&self) -> Node { + self.plan.build(self.data.borrow(), &self.locations) } } diff --git a/trie-db/src/node_codec.rs b/subtrie/src/node_codec.rs similarity index 84% rename from trie-db/src/node_codec.rs rename to subtrie/src/node_codec.rs index eb9b1f67..c4aa2635 100644 --- a/trie-db/src/node_codec.rs +++ b/subtrie/src/node_codec.rs @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -//! Generic trait for trie node encoding/decoding. Takes a `hash_db::Hasher` +//! Generic trait for trie node encoding/decoding. Takes a `trie_db::node_db::Hasher` //! to parametrize the hashes used in the codec. use crate::{ @@ -59,8 +59,11 @@ pub trait NodeCodec: Sized { fn decode_plan(data: &[u8]) -> Result; /// Decode bytes to a `Node`. Returns `Self::E` on failure. - fn decode<'a>(data: &'a [u8]) -> Result, Self::Error> { - Ok(Self::decode_plan(data)?.build(data)) + fn decode<'a, L: Copy + Default>( + data: &'a [u8], + locations: &[L], + ) -> Result, Self::Error> { + Ok(Self::decode_plan(data)?.build(data, locations)) } /// Check if the provided bytes correspond to the codecs "empty" node. @@ -74,32 +77,36 @@ pub trait NodeCodec: Sized { /// Note that number_nibble is the number of element of the iterator /// it can possibly be obtain by `Iterator` `size_hint`, but /// for simplicity it is used directly as a parameter. - fn leaf_node(partial: impl Iterator, number_nibble: usize, value: Value) -> Vec; + fn leaf_node( + partial: impl Iterator, + number_nibble: usize, + value: Value, + ) -> Vec; /// Returns an encoded extension node /// /// Note that number_nibble is the number of element of the iterator /// it can possibly be obtain by `Iterator` `size_hint`, but /// for simplicity it is used directly as a parameter. - fn extension_node( + fn extension_node( partial: impl Iterator, number_nibble: usize, - child_ref: ChildReference, + child_ref: ChildReference, ) -> Vec; /// Returns an encoded branch node. /// Takes an iterator yielding `ChildReference` and an optional value. - fn branch_node( - children: impl Iterator>>>, - value: Option, + fn branch_node( + children: impl Iterator>>>, + value: Option>, ) -> Vec; /// Returns an encoded branch node with a possible partial path. /// `number_nibble` is the partial path length as in `extension_node`. - fn branch_node_nibbled( + fn branch_node_nibbled( partial: impl Iterator, number_nibble: usize, - children: impl Iterator>>>, - value: Option, + children: impl Iterator>>>, + value: Option>, ) -> Vec; } diff --git a/subtrie/src/node_db.rs b/subtrie/src/node_db.rs new file mode 100644 index 00000000..95005de0 --- /dev/null +++ b/subtrie/src/node_db.rs @@ -0,0 +1,97 @@ +// Copyright 2017, 2021 Parity Technologies +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Database of byte-slices keyed to their hash. + +use crate::{ + rstd::{hash, vec::Vec}, + Changeset, +}; + +#[cfg(feature = "std")] +use std::fmt::Debug; + +#[cfg(feature = "std")] +pub trait MaybeDebug: Debug {} +#[cfg(feature = "std")] +impl MaybeDebug for T {} +#[cfg(not(feature = "std"))] +pub trait MaybeDebug {} +#[cfg(not(feature = "std"))] +impl MaybeDebug for T {} + +/// A trie node prefix, it is the nibble path from the trie root +/// to the trie node. +/// For a node containing no partial key value it is the full key. +/// For a value node or node containing a partial key, it is the full key minus its node partial +/// nibbles (the node key can be split into prefix and node partial). +/// Therefore it is always the leftmost portion of the node key, so its internal representation +/// is a non expanded byte slice followed by a last padded byte representation. +/// The padded byte is an optional padded value. +pub type Prefix<'a> = (&'a [u8], Option); + +/// An empty prefix constant. +/// Can be use when the prefix is not use internally +/// or for root nodes. +pub static EMPTY_PREFIX: Prefix<'static> = (&[], None); + +/// Trait describing an object that can hash a slice of bytes. Used to abstract +/// other types over the hashing algorithm. Defines a single `hash` method and an +/// `Out` associated type with the necessary bounds. +pub trait Hasher: Sync + Send { + /// The output type of the `Hasher` + type Out: AsRef<[u8]> + + AsMut<[u8]> + + Default + + MaybeDebug + + core::cmp::Ord + + PartialEq + + Eq + + hash::Hash + + Send + + Sync + + Clone + + Copy; + /// What to use to build `HashMap`s with this `Hasher`. + type StdHasher: Sync + Send + Default + hash::Hasher; + /// The length in bytes of the `Hasher` output. + const LENGTH: usize; + + /// Compute the hash of the provided slice of bytes returning the `Out` type of the `Hasher`. + fn hash(x: &[u8]) -> Self::Out; +} + +/// Trait modelling datastore keyed by a hash defined by the `Hasher` and optional location tag. +pub trait NodeDB: Send + Sync { + /// Look up a trie node by hash and location. + /// Returns the node bytes and the list of children node locations if any. + fn get(&self, key: &H::Out, prefix: Prefix, location: L) -> Option<(T, Vec)>; + + /// Check for the existence of a hash-key at the location. + fn contains(&self, key: &H::Out, prefix: Prefix, location: L) -> bool { + self.get(key, prefix, location).is_some() + } + + /// Compute value hash. + fn hash(&self, value: &[u8]) -> H::Out { + H::hash(value) + } +} + +/// Trait for node db that can get update by a CommitSet. +/// Mostly usefull for testing. +pub trait NodeDBMut: NodeDB { + /// Insert commit set to the db. + fn apply_changeset(&mut self, commit: Changeset) -> H::Out; +} diff --git a/trie-db/src/proof/generate.rs b/subtrie/src/proof/generate.rs similarity index 79% rename from trie-db/src/proof/generate.rs rename to subtrie/src/proof/generate.rs index 2db1eafa..a874e0e2 100644 --- a/trie-db/src/proof/generate.rs +++ b/subtrie/src/proof/generate.rs @@ -16,22 +16,22 @@ use crate::rstd::{boxed::Box, convert::TryInto, marker::PhantomData, vec, vec::Vec}; -use hash_db::{HashDBRef, Hasher}; +use crate::node_db::{Hasher, NodeDB}; use crate::{ nibble::LeftNibbleSlice, nibble_ops::NIBBLE_LENGTH, - node::{NodeHandle, NodeHandlePlan, NodePlan, OwnedNode, Value, ValuePlan}, + node::{Node, NodeHandle, NodeHandlePlan, NodePlan, OwnedNode, Value, ValuePlan}, recorder::Record, CError, ChildReference, DBValue, NibbleSlice, NodeCodec, Recorder, Result as TrieResult, Trie, TrieDBBuilder, TrieError, TrieHash, TrieLayout, }; -struct StackEntry<'a, C: NodeCodec> { +struct StackEntry<'a, C: NodeCodec, L> { /// The prefix is the nibble path to the node in the trie. prefix: LeftNibbleSlice<'a>, /// Stacked node. - node: OwnedNode>, + node: OwnedNode, L>, /// The hash of the node or None if it is referenced inline. node_hash: Option, /// Whether the value should be omitted in the generated proof. @@ -40,20 +40,21 @@ struct StackEntry<'a, C: NodeCodec> { /// nodes, the index is in [0, NIBBLE_LENGTH] and for extension nodes, the index is in [0, 1]. child_index: usize, /// The child references to use in constructing the proof nodes. - children: Vec>>, + children: Vec>>, /// The index into the proof vector that the encoding of this entry should be placed at. output_index: Option, _marker: PhantomData, } -impl<'a, C: NodeCodec> StackEntry<'a, C> { +impl<'a, C: NodeCodec, L: Copy + Default> StackEntry<'a, C, L> { fn new( prefix: LeftNibbleSlice<'a>, node_data: Vec, + locations: Vec, node_hash: Option, output_index: Option, ) -> TrieResult { - let node = OwnedNode::new::(node_data) + let node = OwnedNode::new::(node_data, locations) .map_err(|err| Box::new(TrieError::DecoderError(node_hash.unwrap_or_default(), err)))?; let children_len = match node.node_plan() { NodePlan::Empty | NodePlan::Leaf { .. } => 0, @@ -76,9 +77,9 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { fn encode_node(mut self) -> TrieResult, C::HashOut, C::Error> { let omit_value = self.omit_value; let node_data = self.node.data(); - let value_with_omission = |value_range: ValuePlan| -> Option { + let value_with_omission = |value_range: ValuePlan| -> Option> { if !omit_value { - Some(value_range.build(&node_data)) + Some(value_range.build(&node_data, Default::default())) } else { None } @@ -87,12 +88,12 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { NodePlan::Empty => node_data.to_vec(), NodePlan::Leaf { .. } if !omit_value => node_data.to_vec(), NodePlan::Leaf { partial, value: _ } => { - let partial = partial.build(node_data); - C::leaf_node(partial.right_iter(), partial.len(), Value::Inline(&[])) + let partial = partial.build(&node_data); + C::leaf_node::(partial.right_iter(), partial.len(), Value::Inline(&[])) }, NodePlan::Extension { .. } if self.child_index == 0 => node_data.to_vec(), NodePlan::Extension { partial: partial_plan, child: _ } => { - let partial = partial_plan.build(node_data); + let partial = partial_plan.build(&node_data); let child = self.children[0].expect( "for extension nodes, children[0] is guaranteed to be Some when \ child_index > 0; \ @@ -102,8 +103,8 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { }, NodePlan::Branch { value, children } => { Self::complete_branch_children( - node_data, - children, + &node_data, + &children, self.child_index, &mut self.children, )?; @@ -113,10 +114,10 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { ) }, NodePlan::NibbledBranch { partial: partial_plan, value, children } => { - let partial = partial_plan.build(node_data); + let partial = partial_plan.build(&node_data); Self::complete_branch_children( - node_data, - children, + &node_data, + &children, self.child_index, &mut self.children, )?; @@ -140,13 +141,13 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { node_data: &[u8], child_handles: &[Option; NIBBLE_LENGTH], child_index: usize, - children: &mut [Option>], + children: &mut [Option>], ) -> TrieResult<(), C::HashOut, C::Error> { for i in child_index..NIBBLE_LENGTH { children[i] = child_handles[i] .as_ref() .map(|child_plan| { - child_plan.build(node_data).try_into().map_err(|hash| { + child_plan.build(node_data, Default::default()).try_into().map_err(|hash| { Box::new(TrieError::InvalidHash(C::HashOut::default(), hash)) }) }) @@ -172,7 +173,7 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { set_child is called when the only child is popped from the stack; \ child_index is 0 before child is pushed to the stack; qed" ); - Some(Self::replacement_child_ref(encoded_child, child)) + Some(Self::replacement_child_ref(encoded_child, &child)) }, NodePlan::Branch { children, .. } | NodePlan::NibbledBranch { children, .. } => { assert!( @@ -196,10 +197,10 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { fn replacement_child_ref( encoded_child: &[u8], child: &NodeHandlePlan, - ) -> ChildReference { + ) -> ChildReference { match child { - NodeHandlePlan::Hash(_) => ChildReference::Inline(C::HashOut::default(), 0), - NodeHandlePlan::Inline(_) => { + NodeHandlePlan::Hash(..) => ChildReference::Inline(C::HashOut::default(), 0), + NodeHandlePlan::Inline(..) => { let mut hash = C::HashOut::default(); assert!( encoded_child.len() <= hash.as_ref().len(), @@ -224,7 +225,7 @@ pub fn generate_proof<'a, D, L, I, K>( keys: I, ) -> TrieResult>, TrieHash, CError> where - D: HashDBRef, + D: NodeDB, L: TrieLayout, I: IntoIterator, K: 'a + AsRef<[u8]>, @@ -236,7 +237,7 @@ where // The stack of nodes through a path in the trie. Each entry is a child node of the preceding // entry. - let mut stack = >>::new(); + let mut stack = >>::new(); // The mutated trie nodes comprising the final proof. let mut proof_nodes = Vec::new(); @@ -274,9 +275,8 @@ where loop { let step = match stack.last_mut() { - Some(entry) => match_key_to_node::( - entry.node.data(), - entry.node.node_plan(), + Some(entry) => match_key_to_node::( + &entry.node, &mut entry.omit_value, &mut entry.child_index, &mut entry.children, @@ -285,15 +285,17 @@ where &mut recorded_nodes, )?, // If stack is empty, descend into the root node. - None => - Step::Descend { child_prefix_len: 0, child: NodeHandle::Hash(root.as_ref()) }, + None => Step::Descend { + child_prefix_len: 0, + child: NodeHandle::Hash(root.as_ref(), Default::default()), + }, }; match step { Step::Descend { child_prefix_len, child } => { let child_prefix = key.truncate(child_prefix_len); let child_entry = match child { - NodeHandle::Hash(hash) => { + NodeHandle::Hash(hash, _) => { let child_record = recorded_nodes.next().expect( "this function's trie traversal logic mirrors that of Lookup; \ thus the sequence of traversed nodes must be the same; \ @@ -310,6 +312,7 @@ where StackEntry::new( child_prefix, child_record.data, + Vec::new(), Some(child_record.hash), Some(output_index), )? @@ -321,7 +324,7 @@ where data.to_vec(), ))) } - StackEntry::new(child_prefix, data.to_vec(), None, None)? + StackEntry::new(child_prefix, data.to_vec(), Vec::new(), None, None)? }, }; stack.push(child_entry); @@ -330,7 +333,7 @@ where assert_eq!( Some(&value), expected_value.as_ref(), - "expected_value is found using `trie_db::Lookup`; \ + "expected_value is found using `subtrie::Lookup`; \ value is found by traversing the same nodes recorded during the lookup \ using the same logic; \ thus the values found must be equal" @@ -349,7 +352,7 @@ where assert_eq!( value, expected_value.as_ref().map(|v| v.as_ref()), - "expected_value is found using `trie_db::Lookup`; \ + "expected_value is found using `subtrie::Lookup`; \ value is found by traversing the same nodes recorded during the lookup \ using the same logic; \ thus the values found must be equal" @@ -368,19 +371,19 @@ where } } - unwind_stack::(&mut stack, &mut proof_nodes, None)?; + unwind_stack::(&mut stack, &mut proof_nodes, None)?; Ok(proof_nodes) } -enum Step<'a> { - Descend { child_prefix_len: usize, child: NodeHandle<'a> }, +enum Step<'a, L> { + Descend { child_prefix_len: usize, child: NodeHandle<'a, L> }, FoundValue(Option<&'a [u8]>), FoundHashedValue(Vec), } -fn resolve_value( +fn resolve_value( recorded_nodes: &mut dyn Iterator>, -) -> TrieResult, C::HashOut, C::Error> { +) -> TrieResult, C::HashOut, C::Error> { if let Some(resolve_value) = recorded_nodes.next() { Ok(Step::FoundHashedValue(resolve_value.data)) } else { @@ -390,49 +393,44 @@ fn resolve_value( /// Determine the next algorithmic step to take by matching the current key against the current top /// entry on the stack. -fn match_key_to_node<'a, C: NodeCodec>( - node_data: &'a [u8], - node_plan: &NodePlan, +fn match_key_to_node<'a, C: NodeCodec, L: Copy + Default>( + node: &'a OwnedNode, L>, omit_value: &mut bool, child_index: &mut usize, - children: &mut [Option>], - key: &LeftNibbleSlice, + children: &mut [Option>], + key: &'a LeftNibbleSlice, prefix_len: usize, recorded_nodes: &mut dyn Iterator>, -) -> TrieResult, C::HashOut, C::Error> { - Ok(match node_plan { - NodePlan::Empty => Step::FoundValue(None), - NodePlan::Leaf { partial: partial_plan, value: value_range } => { - let partial = partial_plan.build(node_data); +) -> TrieResult, C::HashOut, C::Error> { + let node = node.node(); + Ok(match node { + Node::Empty => Step::FoundValue(None), + Node::Leaf(partial, value) => { if key.contains(&partial, prefix_len) && key.len() == prefix_len + partial.len() { - match value_range { - ValuePlan::Inline(value_range) => { + match value { + Value::Inline(data) => { *omit_value = true; - Step::FoundValue(Some(&node_data[value_range.clone()])) + Step::FoundValue(Some(data)) }, - ValuePlan::Node(..) => { + Value::Node(..) => { *omit_value = true; - resolve_value::(recorded_nodes)? + resolve_value::(recorded_nodes)? }, } } else { Step::FoundValue(None) } }, - NodePlan::Extension { partial: partial_plan, child: child_plan } => { - let partial = partial_plan.build(node_data); + Node::Extension(partial, child) => if key.contains(&partial, prefix_len) { assert_eq!(*child_index, 0); let child_prefix_len = prefix_len + partial.len(); - let child = child_plan.build(&node_data); Step::Descend { child_prefix_len, child } } else { Step::FoundValue(None) - } - }, - NodePlan::Branch { value, children: child_handles } => match_key_to_branch_node::( - node_data, - value.as_ref(), + }, + Node::Branch(child_handles, value) => match_key_to_branch_node::( + value.clone(), &child_handles, omit_value, child_index, @@ -442,47 +440,44 @@ fn match_key_to_node<'a, C: NodeCodec>( NibbleSlice::new(&[]), recorded_nodes, )?, - NodePlan::NibbledBranch { partial: partial_plan, value, children: child_handles } => - match_key_to_branch_node::( - node_data, - value.as_ref(), - &child_handles, - omit_value, - child_index, - children, - key, - prefix_len, - partial_plan.build(node_data), - recorded_nodes, - )?, + Node::NibbledBranch(partial, child_handles, value) => match_key_to_branch_node::( + value.clone(), + &child_handles, + omit_value, + child_index, + children, + key, + prefix_len, + partial, + recorded_nodes, + )?, }) } -fn match_key_to_branch_node<'a, 'b, C: NodeCodec>( - node_data: &'a [u8], - value_range: Option<&'b ValuePlan>, - child_handles: &'b [Option; NIBBLE_LENGTH], +fn match_key_to_branch_node<'a, 'b, C: NodeCodec, L: Copy + Default>( + value: Option>, + child_handles: &'b [Option>; NIBBLE_LENGTH], omit_value: &mut bool, child_index: &mut usize, - children: &mut [Option>], - key: &'b LeftNibbleSlice<'b>, + children: &mut [Option>], + key: &'b LeftNibbleSlice<'a>, prefix_len: usize, - partial: NibbleSlice<'b>, + partial: NibbleSlice<'a>, recorded_nodes: &mut dyn Iterator>, -) -> TrieResult, C::HashOut, C::Error> { +) -> TrieResult, C::HashOut, C::Error> { if !key.contains(&partial, prefix_len) { return Ok(Step::FoundValue(None)) } if key.len() == prefix_len + partial.len() { - let value = match value_range { - Some(ValuePlan::Inline(range)) => { + let value = match value { + Some(Value::Inline(data)) => { *omit_value = true; - Some(&node_data[range.clone()]) + Some(data) }, - Some(ValuePlan::Node(..)) => { + Some(Value::Node(_, _)) => { *omit_value = true; - return resolve_value::(recorded_nodes) + return resolve_value::(recorded_nodes) }, None => None, }; @@ -499,20 +494,17 @@ fn match_key_to_branch_node<'a, 'b, C: NodeCodec>( while *child_index < new_index { children[*child_index] = child_handles[*child_index] .as_ref() - .map(|child_plan| { - child_plan - .build(node_data) + .map(|child| { + child + .clone() .try_into() .map_err(|hash| Box::new(TrieError::InvalidHash(C::HashOut::default(), hash))) }) .transpose()?; *child_index += 1; } - if let Some(child_plan) = &child_handles[*child_index] { - Ok(Step::Descend { - child_prefix_len: prefix_len + partial.len() + 1, - child: child_plan.build(node_data), - }) + if let Some(child) = &child_handles[*child_index] { + Ok(Step::Descend { child_prefix_len: prefix_len + partial.len() + 1, child: child.clone() }) } else { Ok(Step::FoundValue(None)) } @@ -521,8 +513,8 @@ fn match_key_to_branch_node<'a, 'b, C: NodeCodec>( /// Unwind the stack until the given key is prefixed by the entry at the top of the stack. If the /// key is None, unwind the stack completely. As entries are popped from the stack, they are /// encoded into proof nodes and added to the finalized proof. -fn unwind_stack( - stack: &mut Vec>, +fn unwind_stack( + stack: &mut Vec>, proof_nodes: &mut Vec>, maybe_key: Option<&LeftNibbleSlice>, ) -> TrieResult<(), C::HashOut, C::Error> { diff --git a/trie-db/src/proof/mod.rs b/subtrie/src/proof/mod.rs similarity index 100% rename from trie-db/src/proof/mod.rs rename to subtrie/src/proof/mod.rs diff --git a/trie-db/src/proof/verify.rs b/subtrie/src/proof/verify.rs similarity index 96% rename from trie-db/src/proof/verify.rs rename to subtrie/src/proof/verify.rs index fedd0579..d0e0cde1 100644 --- a/trie-db/src/proof/verify.rs +++ b/subtrie/src/proof/verify.rs @@ -18,10 +18,10 @@ use crate::{ nibble::LeftNibbleSlice, nibble_ops::NIBBLE_LENGTH, node::{Node, NodeHandle, Value}, + node_db::Hasher, rstd::{convert::TryInto, iter::Peekable, marker::PhantomData, result::Result, vec, vec::Vec}, CError, ChildReference, NodeCodec, TrieHash, TrieLayout, }; -use hash_db::Hasher; /// Errors that may occur during proof verification. Most of the errors types simply indicate that /// the proof is invalid with respect to the statement being verified, and the exact error type can @@ -89,15 +89,15 @@ impl std::error::Error for struct StackEntry<'a, L: TrieLayout> { /// The prefix is the nibble path to the node in the trie. prefix: LeftNibbleSlice<'a>, - node: Node<'a>, + node: Node<'a, ()>, is_inline: bool, /// The value associated with this trie node. - value: Option>, + value: Option>, /// The next entry in the stack is a child of the preceding entry at this index. For branch /// nodes, the index is in [0, NIBBLE_LENGTH] and for extension nodes, the index is in [0, 1]. child_index: usize, /// The child references to use in reconstructing the trie nodes. - children: Vec>>>, + children: Vec, ()>>>, /// Technical to attach lifetime to entry. next_value_hash: Option>, _marker: PhantomData, @@ -109,7 +109,7 @@ impl<'a, L: TrieLayout> StackEntry<'a, L> { prefix: LeftNibbleSlice<'a>, is_inline: bool, ) -> Result, CError>> { - let node = L::Codec::decode(&node_data[..]).map_err(Error::DecodeError)?; + let node = L::Codec::decode(&node_data[..], &[]).map_err(Error::DecodeError)?; let children_len = match &node { Node::Empty | Node::Leaf(..) => 0, Node::Extension(..) => 1, @@ -132,9 +132,9 @@ impl<'a, L: TrieLayout> StackEntry<'a, L> { }) } - fn value(&self) -> Option { + fn value(&self) -> Option> { if let Some(hash) = self.next_value_hash.as_ref() { - Some(Value::Node(hash.as_ref())) + Some(Value::Node(hash.as_ref(), ())) } else { self.value.clone() } @@ -226,7 +226,7 @@ impl<'a, L: TrieLayout> StackEntry<'a, L> { fn make_child_entry( proof_iter: &mut I, - child: NodeHandle<'a>, + child: NodeHandle<'a, ()>, prefix: LeftNibbleSlice<'a>, ) -> Result, CError>> where @@ -240,7 +240,7 @@ impl<'a, L: TrieLayout> StackEntry<'a, L> { } else { StackEntry::new(data, prefix, true) }, - NodeHandle::Hash(data) => { + NodeHandle::Hash(data, _) => { let mut hash = TrieHash::::default(); if data.len() != hash.as_ref().len() { return Err(Error::InvalidChildReference(data.to_vec())) @@ -323,7 +323,7 @@ enum ValueMatch<'a> { fn match_key_to_node<'a>( key: &LeftNibbleSlice<'a>, prefix_len: usize, - node: &Node, + node: &Node<()>, ) -> ValueMatch<'a> { match node { Node::Empty => ValueMatch::NotFound, @@ -365,8 +365,8 @@ fn match_key_to_node<'a>( fn match_key_to_branch_node<'a>( key: &LeftNibbleSlice<'a>, prefix_plus_partial_len: usize, - children: &[Option; NIBBLE_LENGTH], - value: Option<&Value>, + children: &[Option>; NIBBLE_LENGTH], + value: Option<&Value<()>>, ) -> ValueMatch<'a> { if key.len() == prefix_plus_partial_len { if value.is_none() { @@ -455,7 +455,7 @@ where ChildReference::Inline(hash, node_data.len()) } else { let hash = L::Hash::hash(&node_data); - ChildReference::Hash(hash) + ChildReference::Hash(hash, ()) }; if let Some(entry) = stack.pop() { @@ -467,7 +467,7 @@ where return Err(Error::ExtraneousNode) } let computed_root = match child_ref { - ChildReference::Hash(hash) => hash, + ChildReference::Hash(hash, _) => hash, ChildReference::Inline(_, _) => panic!("the bottom item on the stack has is_inline = false; qed"), }; diff --git a/trie-db/src/recorder.rs b/subtrie/src/recorder.rs similarity index 79% rename from trie-db/src/recorder.rs rename to subtrie/src/recorder.rs index ba4c77e5..1c081d7c 100644 --- a/trie-db/src/recorder.rs +++ b/subtrie/src/recorder.rs @@ -14,8 +14,10 @@ //! Trie query recorder. -use crate::{rstd::vec::Vec, RecordedForKey, TrieAccess, TrieHash, TrieLayout, TrieRecorder}; -use hashbrown::HashMap; +use crate::{ + rstd::{vec::Vec, BTreeMap}, + RecordedForKey, TrieAccess, TrieHash, TrieLayout, TrieRecorder, +}; /// The record of a visited node. #[cfg_attr(feature = "std", derive(Debug))] @@ -31,7 +33,7 @@ pub struct Record { #[cfg_attr(feature = "std", derive(Debug))] pub struct Recorder { nodes: Vec>>, - recorded_keys: HashMap, RecordedForKey>, + recorded_keys: BTreeMap, RecordedForKey>, } impl Default for Recorder { @@ -53,8 +55,8 @@ impl Recorder { } } -impl TrieRecorder> for Recorder { - fn record<'a>(&mut self, access: TrieAccess<'a, TrieHash>) { +impl TrieRecorder, L::Location> for Recorder { + fn record<'a>(&mut self, access: TrieAccess<'a, TrieHash, L::Location>) { match access { TrieAccess::EncodedNode { hash, encoded_node, .. } => { self.nodes.push(Record { hash, data: encoded_node.to_vec() }); @@ -64,14 +66,17 @@ impl TrieRecorder> for Recorder { }, TrieAccess::Value { hash, value, full_key } => { self.nodes.push(Record { hash, data: value.to_vec() }); - self.recorded_keys.entry(full_key.to_vec()).insert(RecordedForKey::Value); + self.recorded_keys.insert(full_key.to_vec(), RecordedForKey::Value); }, TrieAccess::Hash { full_key } => { self.recorded_keys.entry(full_key.to_vec()).or_insert(RecordedForKey::Hash); }, TrieAccess::NonExisting { full_key } => { // We handle the non existing value/hash like having recorded the value. - self.recorded_keys.entry(full_key.to_vec()).insert(RecordedForKey::Value); + self.recorded_keys.insert(full_key.to_vec(), RecordedForKey::Value); + }, + TrieAccess::InlineValue { full_key } => { + self.recorded_keys.insert(full_key.to_vec(), RecordedForKey::Value); }, } } diff --git a/test-support/trie-standardmap/src/lib.rs b/subtrie/src/test_utils.rs similarity index 96% rename from test-support/trie-standardmap/src/lib.rs rename to subtrie/src/test_utils.rs index 8e01ad12..8e503bdd 100644 --- a/test-support/trie-standardmap/src/lib.rs +++ b/subtrie/src/test_utils.rs @@ -12,12 +12,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -//! Key-value datastore with a modified Merkle tree. +//! Test utilities. -use hash_db::Hasher; -use keccak_hasher::KeccakHasher; +use crate::{keccak_hasher::KeccakHasher, node_db::Hasher}; -type H256 = ::Out; +type H256 = ::Out; /// Alphabet to use when creating words for insertion into tries. pub enum Alphabet { diff --git a/trie-db/src/trie_codec.rs b/subtrie/src/trie_codec.rs similarity index 87% rename from trie-db/src/trie_codec.rs rename to subtrie/src/trie_codec.rs index 9a1f51b3..c401f55c 100644 --- a/trie-db/src/trie_codec.rs +++ b/subtrie/src/trie_codec.rs @@ -26,21 +26,22 @@ //! trie. use crate::{ + memory_db::{HashKey, MemoryDB}, nibble_ops::NIBBLE_LENGTH, node::{Node, NodeHandle, NodeHandlePlan, NodePlan, OwnedNode, ValuePlan}, + node_db::Prefix, rstd::{boxed::Box, convert::TryInto, marker::PhantomData, result, sync::Arc, vec, vec::Vec}, CError, ChildReference, DBValue, NibbleVec, NodeCodec, Result, TrieDB, TrieDBRawIterator, TrieError, TrieHash, TrieLayout, }; -use hash_db::{HashDB, Prefix}; -const OMIT_VALUE_HASH: crate::node::Value<'static> = crate::node::Value::Inline(&[]); +const OMIT_VALUE_HASH: crate::node::Value<'static, ()> = crate::node::Value::Inline(&[]); -struct EncoderStackEntry { +struct EncoderStackEntry { /// The prefix is the nibble path to the node in the trie. prefix: NibbleVec, /// Node in memory content. - node: Arc>, + node: Arc>, /// The next entry in the stack is a child of the preceding entry at this index. For branch /// nodes, the index is in [0, NIBBLE_LENGTH] and for extension nodes, the index is in [0, 1]. child_index: usize, @@ -54,7 +55,7 @@ struct EncoderStackEntry { _marker: PhantomData, } -impl EncoderStackEntry { +impl EncoderStackEntry { /// Given the prefix of the next child node, identify its index and advance `child_index` to /// that. For a given entry, this must be called sequentially only with strictly increasing /// child prefixes. Returns an error if the child prefix is not a child of this entry or if @@ -116,14 +117,14 @@ impl EncoderStackEntry { node_data.to_vec() } else { let partial = partial.build(node_data); - let empty_child = ChildReference::Inline(C::HashOut::default(), 0); + let empty_child = ChildReference::<_, ()>::Inline(C::HashOut::default(), 0); C::extension_node(partial.right_iter(), partial.len(), empty_child) }, NodePlan::Branch { value, children } => { let value = if self.omit_value { value.is_some().then_some(OMIT_VALUE_HASH) } else { - value.as_ref().map(|v| v.build(node_data)) + value.as_ref().map(|v| v.build(node_data, ())) }; C::branch_node( Self::branch_children(node_data, &children, &self.omit_children)?.iter(), @@ -135,7 +136,7 @@ impl EncoderStackEntry { let value = if self.omit_value { value.is_some().then_some(OMIT_VALUE_HASH) } else { - value.as_ref().map(|v| v.build(node_data)) + value.as_ref().map(|v| v.build(node_data, ())) }; C::branch_node_nibbled( partial.right_iter(), @@ -165,14 +166,14 @@ impl EncoderStackEntry { node_data: &[u8], child_handles: &[Option; NIBBLE_LENGTH], omit_children: &[bool], - ) -> Result<[Option>; NIBBLE_LENGTH], C::HashOut, C::Error> { + ) -> Result<[Option>; NIBBLE_LENGTH], C::HashOut, C::Error> { let empty_child = ChildReference::Inline(C::HashOut::default(), 0); let mut children = [None; NIBBLE_LENGTH]; for i in 0..NIBBLE_LENGTH { children[i] = if omit_children[i] { Some(empty_child) } else if let Some(child_plan) = &child_handles[i] { - let child_ref = child_plan.build(node_data).try_into().map_err(|hash| { + let child_ref = child_plan.build(node_data, ()).try_into().map_err(|hash| { Box::new(TrieError::InvalidHash(C::HashOut::default(), hash)) })?; Some(child_ref) @@ -192,13 +193,17 @@ fn detached_value( value: &ValuePlan, node_data: &[u8], node_prefix: Prefix, + location: L::Location, ) -> Option> { let fetched; match value { ValuePlan::Node(hash_plan) => { - if let Ok(value) = - TrieDBRawIterator::fetch_value(db, &node_data[hash_plan.clone()], node_prefix) - { + if let Ok(value) = TrieDBRawIterator::fetch_value( + db, + &node_data[hash_plan.clone()], + node_prefix, + location, + ) { fetched = value; } else { return None @@ -224,7 +229,7 @@ where // The stack of nodes through a path in the trie. Each entry is a child node of the preceding // entry. - let mut stack: Vec> = Vec::new(); + let mut stack: Vec> = Vec::new(); // TrieDBRawIterator guarantees that: // - It yields at least one node. @@ -236,7 +241,7 @@ where // iteration of the loop below, the stack always has at least one entry and the bottom (front) // of the stack is the root node, which is not inline. Furthermore, the iterator is not empty, // so at least one iteration always occurs. - while let Some(item) = iter.next_raw_item(db) { + while let Some(item) = iter.next_raw_item(db, true) { match item { Ok((prefix, node_hash, node)) => { // Skip inline nodes, as they cannot contain hash references to other nodes by @@ -268,12 +273,28 @@ where let (children_len, detached_value) = match node.node_plan() { NodePlan::Empty => (0, None), - NodePlan::Leaf { value, .. } => - (0, detached_value(db, value, node.data(), prefix.as_prefix())), + NodePlan::Leaf { value, .. } => ( + 0, + detached_value( + db, + &value, + node.data(), + prefix.as_prefix(), + node.locations().first().copied().unwrap_or_default(), + ), + ), NodePlan::Extension { .. } => (1, None), NodePlan::NibbledBranch { value: Some(value), .. } | - NodePlan::Branch { value: Some(value), .. } => - (NIBBLE_LENGTH, detached_value(db, value, node.data(), prefix.as_prefix())), + NodePlan::Branch { value: Some(value), .. } => ( + NIBBLE_LENGTH, + detached_value( + db, + &value, + node.data(), + prefix.as_prefix(), + node.locations().last().copied().unwrap_or_default(), + ), + ), NodePlan::NibbledBranch { value: None, .. } | NodePlan::Branch { value: None, .. } => (NIBBLE_LENGTH, None), }; @@ -311,19 +332,19 @@ where Ok(output) } -struct DecoderStackEntry<'a, C: NodeCodec> { - node: Node<'a>, +struct DecoderStackEntry<'a, C: NodeCodec, L> { + node: Node<'a, L>, /// The next entry in the stack is a child of the preceding entry at this index. For branch /// nodes, the index is in [0, NIBBLE_LENGTH] and for extension nodes, the index is in [0, 1]. child_index: usize, /// The reconstructed child references. - children: Vec>>, + children: Vec>>, /// A value attached as a node. The node will need to use its hash as value. attached_value: Option<&'a [u8]>, _marker: PhantomData, } -impl<'a, C: NodeCodec> DecoderStackEntry<'a, C> { +impl<'a, C: NodeCodec, L: Copy + Default> DecoderStackEntry<'a, C, L> { /// Advance the child index until either it exceeds the number of children or the child is /// marked as omitted. Omitted children are indicated by an empty inline reference. For each /// child that is passed over and not omitted, copy over the child reference from the node to @@ -407,7 +428,7 @@ impl<'a, C: NodeCodec> DecoderStackEntry<'a, C> { /// Preconditions: /// - if node is an extension node, then `children[0]` is Some. fn encode_node(self, attached_hash: Option<&[u8]>) -> Vec { - let attached_hash = attached_hash.map(|h| crate::node::Value::Node(h)); + let attached_hash = attached_hash.map(|h| crate::node::Value::Node(h, Default::default())); match self.node { Node::Empty => C::empty_node().to_vec(), Node::Leaf(partial, value) => @@ -433,40 +454,38 @@ impl<'a, C: NodeCodec> DecoderStackEntry<'a, C> { /// Reconstructs a partial trie DB from a compact representation. The encoding is a vector of /// mutated trie nodes with those child references omitted. The decode function reads them in order -/// from the given slice, reconstructing the full nodes and inserting them into the given `HashDB`. -/// It stops after fully constructing one partial trie and returns the root hash and the number of -/// nodes read. If an error occurs during decoding, there are no guarantees about which entries -/// were or were not added to the DB. +/// from the given slice, reconstructing the full nodes and inserting them into the given +/// `MemoryDB`. It stops after fully constructing one partial trie and returns the root hash and the +/// number of nodes read. If an error occurs during decoding, there are no guarantees about which +/// entries were or were not added to the DB. /// /// The number of nodes read may be fewer than the total number of items in `encoded`. This allows /// one to concatenate multiple compact encodings together and still reconstruct them all. // /// This function makes the assumption that all child references in an inline trie node are inline /// references. -pub fn decode_compact( - db: &mut DB, +pub fn decode_compact( + db: &mut MemoryDB, DBValue>, encoded: &[Vec], ) -> Result<(TrieHash, usize), TrieHash, CError> where L: TrieLayout, - DB: HashDB, { - decode_compact_from_iter::(db, encoded.iter().map(Vec::as_slice)) + decode_compact_from_iter::(db, encoded.iter().map(Vec::as_slice)) } /// Variant of 'decode_compact' that accept an iterator of encoded nodes as input. -pub fn decode_compact_from_iter<'a, L, DB, I>( - db: &mut DB, +pub fn decode_compact_from_iter<'a, L, I>( + db: &mut MemoryDB, DBValue>, encoded: I, ) -> Result<(TrieHash, usize), TrieHash, CError> where L: TrieLayout, - DB: HashDB, I: IntoIterator, { // The stack of nodes through a path in the trie. Each entry is a child node of the preceding // entry. - let mut stack: Vec> = Vec::new(); + let mut stack: Vec> = Vec::new(); // The prefix of the next item to be read from the slice of encoded items. let mut prefix = NibbleVec::new(); @@ -479,7 +498,7 @@ where attached_node = 1; } } - let node = L::Codec::decode(&encoded_node[attached_node..]) + let node = L::Codec::decode(&encoded_node[attached_node..], &[]) .map_err(|err| Box::new(TrieError::DecoderError(>::default(), err)))?; let children_len = match node { @@ -523,7 +542,8 @@ where if let Some(entry) = stack.pop() { last_entry = entry; last_entry.pop_from_prefix(&mut prefix); - last_entry.children[last_entry.child_index] = Some(ChildReference::Hash(node_hash)); + last_entry.children[last_entry.child_index] = + Some(ChildReference::Hash(node_hash, Default::default())); last_entry.child_index += 1; } else { return Ok((node_hash, i + 1)) diff --git a/trie-root/src/lib.rs b/subtrie/src/trie_root.rs similarity index 96% rename from trie-root/src/lib.rs rename to subtrie/src/trie_root.rs index e184cfbb..d27f1641 100644 --- a/trie-root/src/lib.rs +++ b/subtrie/src/trie_root.rs @@ -16,32 +16,10 @@ //! //! This module should be used to generate trie root hash. -#![cfg_attr(not(feature = "std"), no_std)] - -#[cfg(not(feature = "std"))] -extern crate alloc; - -#[cfg(feature = "std")] -mod rstd { - pub use std::{ - cmp, - collections::{BTreeMap, VecDeque}, - vec::Vec, - }; -} - -#[cfg(not(feature = "std"))] -mod rstd { - pub use alloc::{ - collections::{BTreeMap, VecDeque}, - vec::Vec, - }; - pub use core::cmp; -} - -use self::rstd::*; - -pub use hash_db::Hasher; +use crate::{ + node_db::Hasher, + rstd::{cmp, vec::Vec, BTreeMap}, +}; /// Different possible value to use for node encoding. #[derive(Clone)] diff --git a/trie-db/src/triedb.rs b/subtrie/src/triedb.rs similarity index 65% rename from trie-db/src/triedb.rs rename to subtrie/src/triedb.rs index ad6166eb..9d649615 100644 --- a/trie-db/src/triedb.rs +++ b/subtrie/src/triedb.rs @@ -13,13 +13,15 @@ // limitations under the License. use crate::{ - iterator::TrieDBRawIterator, + iterator::{TrieDBNodeDoubleEndedIterator, TrieDBRawIterator}, lookup::Lookup, nibble::NibbleSlice, node::{decode_hash, NodeHandle, OwnedNode}, + node_db::{NodeDB, Prefix, EMPTY_PREFIX}, rstd::boxed::Box, - CError, DBValue, Query, Result, Trie, TrieAccess, TrieCache, TrieError, TrieHash, TrieItem, - TrieIterator, TrieKeyItem, TrieLayout, TrieRecorder, + CError, DBValue, MerkleValue, Query, Result, Trie, TrieAccess, TrieCache, + TrieDoubleEndedIterator, TrieError, TrieHash, TrieItem, TrieIterator, TrieKeyItem, TrieLayout, + TrieRecorder, }; #[cfg(feature = "std")] use crate::{ @@ -27,14 +29,14 @@ use crate::{ node::Node, rstd::{fmt, vec::Vec}, }; -use hash_db::{HashDBRef, Prefix, EMPTY_PREFIX}; /// A builder for creating a [`TrieDB`]. pub struct TrieDBBuilder<'db, 'cache, L: TrieLayout> { - db: &'db dyn HashDBRef, + db: &'db dyn NodeDB, root: &'db TrieHash, - cache: Option<&'cache mut dyn TrieCache>, - recorder: Option<&'cache mut dyn TrieRecorder>>, + root_location: L::Location, + cache: Option<&'cache mut dyn TrieCache>, + recorder: Option<&'cache mut dyn TrieRecorder, L::Location>>, } impl<'db, 'cache, L: TrieLayout> TrieDBBuilder<'db, 'cache, L> { @@ -43,13 +45,23 @@ impl<'db, 'cache, L: TrieLayout> TrieDBBuilder<'db, 'cache, L> { /// This doesn't check if `root` exists in the given `db`. If `root` doesn't exist it will fail /// when trying to lookup any key. #[inline] - pub fn new(db: &'db dyn HashDBRef, root: &'db TrieHash) -> Self { - Self { db, root, cache: None, recorder: None } + pub fn new(db: &'db dyn NodeDB, root: &'db TrieHash) -> Self { + Self { db, root, cache: None, recorder: None, root_location: Default::default() } + } + + /// Same as `new` but indicating db location of root. Warning root hash will not be checked. + #[inline] + pub fn new_with_db_location( + db: &'db dyn NodeDB, + root: &'db TrieHash, + root_location: L::Location, + ) -> Self { + Self { db, root, cache: None, recorder: None, root_location } } /// Use the given `cache` for the db. #[inline] - pub fn with_cache(mut self, cache: &'cache mut dyn TrieCache) -> Self { + pub fn with_cache(mut self, cache: &'cache mut dyn TrieCache) -> Self { self.cache = Some(cache); self } @@ -58,7 +70,7 @@ impl<'db, 'cache, L: TrieLayout> TrieDBBuilder<'db, 'cache, L> { #[inline] pub fn with_optional_cache<'ocache: 'cache>( mut self, - cache: Option<&'ocache mut dyn TrieCache>, + cache: Option<&'ocache mut dyn TrieCache>, ) -> Self { // Make the compiler happy by "converting" the lifetime self.cache = cache.map(|c| c as _); @@ -67,7 +79,10 @@ impl<'db, 'cache, L: TrieLayout> TrieDBBuilder<'db, 'cache, L> { /// Use the given `recorder` to record trie accesses. #[inline] - pub fn with_recorder(mut self, recorder: &'cache mut dyn TrieRecorder>) -> Self { + pub fn with_recorder( + mut self, + recorder: &'cache mut dyn TrieRecorder, L::Location>, + ) -> Self { self.recorder = Some(recorder); self } @@ -76,7 +91,7 @@ impl<'db, 'cache, L: TrieLayout> TrieDBBuilder<'db, 'cache, L> { #[inline] pub fn with_optional_recorder<'recorder: 'cache>( mut self, - recorder: Option<&'recorder mut dyn TrieRecorder>>, + recorder: Option<&'recorder mut dyn TrieRecorder, L::Location>>, ) -> Self { // Make the compiler happy by "converting" the lifetime self.recorder = recorder.map(|r| r as _); @@ -89,13 +104,14 @@ impl<'db, 'cache, L: TrieLayout> TrieDBBuilder<'db, 'cache, L> { TrieDB { db: self.db, root: self.root, + root_location: self.root_location, cache: self.cache.map(core::cell::RefCell::new), recorder: self.recorder.map(core::cell::RefCell::new), } } } -/// A `Trie` implementation using a generic `HashDB` backing database, a `Hasher` +/// A `Trie` implementation using a generic `NodeDB` backing database, a `Hasher` /// implementation to generate keys and a `NodeCodec` implementation to encode/decode /// the nodes. /// @@ -104,9 +120,9 @@ impl<'db, 'cache, L: TrieLayout> TrieDBBuilder<'db, 'cache, L> { /// /// # Example /// ```ignore -/// use hash_db::Hasher; -/// use reference_trie::{RefTrieDBMut, RefTrieDB, Trie, TrieMut}; -/// use trie_db::DBValue; +/// use subtrie::node_db::Hasher; +/// use reference_trie::{RefTrieDBMut, RefTrieDB, Trie}; +/// use subtrie::DBValue; /// use keccak_hasher::KeccakHasher; /// use memory_db::*; /// @@ -121,10 +137,11 @@ pub struct TrieDB<'db, 'cache, L> where L: TrieLayout, { - db: &'db dyn HashDBRef, + db: &'db dyn NodeDB, root: &'db TrieHash, - cache: Option>>, - recorder: Option>>>, + root_location: L::Location, + cache: Option>>, + recorder: Option, L::Location>>>, } impl<'db, 'cache, L> TrieDB<'db, 'cache, L> @@ -132,10 +149,31 @@ where L: TrieLayout, { /// Get the backing database. - pub fn db(&'db self) -> &'db dyn HashDBRef { + pub fn db(&'db self) -> &'db dyn NodeDB { self.db } + /// Create `TrieDBDoubleEndedIterator` from `TrieDB`. + pub fn into_double_ended_iter( + &'db self, + ) -> Result, TrieHash, CError> { + TrieDBDoubleEndedIterator::new(&self) + } + + /// Create `TrieDBNodeDoubleEndedIterator` from `TrieDB`. + pub fn into_node_double_ended_iter( + &'db self, + ) -> Result, TrieHash, CError> { + TrieDBNodeDoubleEndedIterator::new(&self) + } + + /// create `TrieDBKeyDoubleEndedIterator` from `TrieDB`. + pub fn into_key_double_ended_iter( + &'db self, + ) -> Result, TrieHash, CError> { + TrieDBKeyDoubleEndedIterator::new(&self) + } + /// Given some node-describing data `node`, and node key return the actual node RLP. /// This could be a simple identity operation in the case that the node is sufficiently small, /// but may require a database lookup. @@ -150,27 +188,28 @@ where pub(crate) fn get_raw_or_lookup( &self, parent_hash: TrieHash, - node_handle: NodeHandle, + node_handle: NodeHandle, partial_key: Prefix, record_access: bool, - ) -> Result<(OwnedNode, Option>), TrieHash, CError> { - let (node_hash, node_data) = match node_handle { - NodeHandle::Hash(data) => { + ) -> Result<(OwnedNode, Option>), TrieHash, CError> { + let (node_hash, (node_data, locations)) = match node_handle { + NodeHandle::Hash(data, location) => { let node_hash = decode_hash::(data) .ok_or_else(|| Box::new(TrieError::InvalidHash(parent_hash, data.to_vec())))?; - let node_data = self.db.get(&node_hash, partial_key).ok_or_else(|| { - if partial_key == EMPTY_PREFIX { - Box::new(TrieError::InvalidStateRoot(node_hash)) - } else { - Box::new(TrieError::IncompleteDatabase(node_hash)) - } - })?; + let node_data = + self.db.get(&node_hash, partial_key, location).ok_or_else(|| { + if partial_key == EMPTY_PREFIX { + Box::new(TrieError::InvalidStateRoot(node_hash)) + } else { + Box::new(TrieError::IncompleteDatabase(node_hash)) + } + })?; (Some(node_hash), node_data) }, - NodeHandle::Inline(data) => (None, data.to_vec()), + NodeHandle::Inline(data) => (None, (data.to_vec(), Default::default())), }; - let owned_node = OwnedNode::new::(node_data) + let owned_node = OwnedNode::new::(node_data, locations) .map_err(|e| Box::new(TrieError::DecoderError(node_hash.unwrap_or(parent_hash), e)))?; if record_access { @@ -192,10 +231,11 @@ where &self, hash: TrieHash, prefix: Prefix, + location: L::Location, ) -> Result, CError> { - let value = self + let (value, _) = self .db - .get(&hash, prefix) + .get(&hash, prefix, location) .ok_or_else(|| Box::new(TrieError::IncompleteDatabase(hash)))?; if let Some(recorder) = self.recorder.as_ref() { @@ -220,6 +260,10 @@ where self.root } + fn root_location(&self) -> L::Location { + self.root_location + } + fn get_hash(&self, key: &[u8]) -> Result>, TrieHash, CError> { let mut cache = self.cache.as_ref().map(|c| c.borrow_mut()); let mut recorder = self.recorder.as_ref().map(|r| r.borrow_mut()); @@ -228,8 +272,11 @@ where db: self.db, query: |_: &[u8]| (), hash: *self.root, - cache: cache.as_mut().map(|c| &mut ***c as &mut dyn TrieCache), - recorder: recorder.as_mut().map(|r| &mut ***r as &mut dyn TrieRecorder>), + location: self.root_location, + cache: cache.as_mut().map(|c| &mut ***c as &mut dyn TrieCache), + recorder: recorder + .as_mut() + .map(|r| &mut ***r as &mut dyn TrieRecorder, L::Location>), } .look_up_hash(key, NibbleSlice::new(key)) } @@ -246,12 +293,35 @@ where db: self.db, query, hash: *self.root, - cache: cache.as_mut().map(|c| &mut ***c as &mut dyn TrieCache), - recorder: recorder.as_mut().map(|r| &mut ***r as &mut dyn TrieRecorder>), + location: self.root_location, + cache: cache.as_mut().map(|c| &mut ***c as &mut dyn TrieCache), + recorder: recorder + .as_mut() + .map(|r| &mut ***r as &mut dyn TrieRecorder, L::Location>), } .look_up(key, NibbleSlice::new(key)) } + fn lookup_first_descendant( + &self, + key: &[u8], + ) -> Result>>, TrieHash, CError> { + let mut cache = self.cache.as_ref().map(|c| c.borrow_mut()); + let mut recorder = self.recorder.as_ref().map(|r| r.borrow_mut()); + + Lookup:: { + db: self.db, + query: |_: &[u8]| (), + hash: *self.root, + location: self.root_location, + cache: cache.as_mut().map(|c| &mut ***c as &mut dyn TrieCache), + recorder: recorder + .as_mut() + .map(|r| &mut ***r as &mut dyn TrieRecorder, L::Location>), + } + .lookup_first_descendant(key, NibbleSlice::new(key)) + } + fn iter<'a>( &'a self, ) -> Result< @@ -280,7 +350,7 @@ where L: TrieLayout, { trie: &'db TrieDB<'db, 'cache, L>, - node_key: NodeHandle<'a>, + node_key: NodeHandle<'a, L::Location>, partial_key: NibbleVec, index: Option, } @@ -289,6 +359,7 @@ where impl<'db, 'cache, 'a, L> fmt::Debug for TrieAwareDebugNode<'db, 'cache, 'a, L> where L: TrieLayout, + L::Location: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.trie.get_raw_or_lookup( @@ -386,6 +457,7 @@ where impl<'db, 'cache, L> fmt::Debug for TrieDB<'db, 'cache, L> where L: TrieLayout, + L::Location: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("TrieDB") @@ -393,7 +465,7 @@ where "root", &TrieAwareDebugNode { trie: self, - node_key: NodeHandle::Hash(self.root().as_ref()), + node_key: NodeHandle::Hash(self.root().as_ref(), Default::default()), partial_key: NibbleVec::new(), index: None, }, @@ -414,6 +486,45 @@ pub struct TrieDBKeyIterator<'a, 'cache, L: TrieLayout> { raw_iter: TrieDBRawIterator, } +/// Double ended iterator for going through all of key with values in the trie in pre-order +/// traversal order. +pub struct TrieDBKeyDoubleEndedIterator<'a, 'cache, L: TrieLayout> { + db: &'a TrieDB<'a, 'cache, L>, + raw_iter: TrieDBRawIterator, + back_raw_iter: TrieDBRawIterator, +} + +impl<'a, 'cache, L: TrieLayout> TrieDBKeyDoubleEndedIterator<'a, 'cache, L> { + /// Create a new double ended iterator. + pub fn new(db: &'a TrieDB<'a, 'cache, L>) -> Result, CError> { + Ok(Self { + db, + raw_iter: TrieDBRawIterator::new(db)?, + back_raw_iter: TrieDBRawIterator::new(db)?, + }) + } +} + +/// Double ended iterator for going through all values in the trie in pre-order traversal order. +pub struct TrieDBDoubleEndedIterator<'a, 'cache, L: TrieLayout> { + db: &'a TrieDB<'a, 'cache, L>, + raw_iter: TrieDBRawIterator, + back_raw_iter: TrieDBRawIterator, +} + +impl<'a, 'cache, L: TrieLayout> TrieDBDoubleEndedIterator<'a, 'cache, L> { + /// Create a new double ended iterator. + pub fn new(db: &'a TrieDB<'a, 'cache, L>) -> Result, CError> { + Ok(Self { + db, + raw_iter: TrieDBRawIterator::new(db)?, + back_raw_iter: TrieDBRawIterator::new(db)?, + }) + } +} + +impl TrieDoubleEndedIterator for TrieDBDoubleEndedIterator<'_, '_, L> {} + impl<'a, 'cache, L: TrieLayout> TrieDBIterator<'a, 'cache, L> { /// Create a new iterator. pub fn new(db: &'a TrieDB<'a, 'cache, L>) -> Result, CError> { @@ -453,7 +564,36 @@ impl<'a, 'cache, L: TrieLayout> TrieDBIterator<'a, 'cache, L> { impl<'a, 'cache, L: TrieLayout> TrieIterator for TrieDBIterator<'a, 'cache, L> { /// Position the iterator on the first element with key >= `key` fn seek(&mut self, key: &[u8]) -> Result<(), TrieHash, CError> { - self.raw_iter.seek(self.db, key).map(|_| ()) + self.raw_iter.seek(self.db, key, true).map(|_| ()) + } +} + +impl<'a, 'cache, L: TrieLayout> Iterator for TrieDBIterator<'a, 'cache, L> { + type Item = TrieItem, CError>; + + fn next(&mut self) -> Option { + self.raw_iter.next_item(self.db) + } +} + +impl<'a, 'cache, L: TrieLayout> TrieIterator for TrieDBDoubleEndedIterator<'a, 'cache, L> { + fn seek(&mut self, key: &[u8]) -> Result<(), TrieHash, CError> { + self.raw_iter.seek(self.db, key, true).map(|_| ())?; + self.back_raw_iter.seek(self.db, key, false).map(|_| ()) + } +} + +impl<'a, 'cache, L: TrieLayout> Iterator for TrieDBDoubleEndedIterator<'a, 'cache, L> { + type Item = TrieItem, CError>; + + fn next(&mut self) -> Option { + self.raw_iter.next_item(self.db) + } +} + +impl<'a, 'cache, L: TrieLayout> DoubleEndedIterator for TrieDBDoubleEndedIterator<'a, 'cache, L> { + fn next_back(&mut self) -> Option { + self.back_raw_iter.prev_item(self.db) } } @@ -496,22 +636,38 @@ impl<'a, 'cache, L: TrieLayout> TrieDBKeyIterator<'a, 'cache, L> { impl<'a, 'cache, L: TrieLayout> TrieIterator for TrieDBKeyIterator<'a, 'cache, L> { /// Position the iterator on the first element with key >= `key` fn seek(&mut self, key: &[u8]) -> Result<(), TrieHash, CError> { - self.raw_iter.seek(self.db, key).map(|_| ()) + self.raw_iter.seek(self.db, key, true).map(|_| ()) } } -impl<'a, 'cache, L: TrieLayout> Iterator for TrieDBIterator<'a, 'cache, L> { - type Item = TrieItem, CError>; +impl<'a, 'cache, L: TrieLayout> Iterator for TrieDBKeyIterator<'a, 'cache, L> { + type Item = TrieKeyItem, CError>; fn next(&mut self) -> Option { - self.raw_iter.next_item(self.db) + self.raw_iter.next_key(self.db) } } -impl<'a, 'cache, L: TrieLayout> Iterator for TrieDBKeyIterator<'a, 'cache, L> { +impl<'a, 'cache, L: TrieLayout> TrieIterator for TrieDBKeyDoubleEndedIterator<'a, 'cache, L> { + /// Position the iterator on the first element with key >= `key` + fn seek(&mut self, key: &[u8]) -> Result<(), TrieHash, CError> { + self.raw_iter.seek(self.db, key, true).map(|_| ())?; + self.back_raw_iter.seek(self.db, key, false).map(|_| ()) + } +} + +impl<'a, 'cache, L: TrieLayout> Iterator for TrieDBKeyDoubleEndedIterator<'a, 'cache, L> { type Item = TrieKeyItem, CError>; fn next(&mut self) -> Option { self.raw_iter.next_key(self.db) } } + +impl<'a, 'cache, L: TrieLayout> DoubleEndedIterator + for TrieDBKeyDoubleEndedIterator<'a, 'cache, L> +{ + fn next_back(&mut self) -> Option { + self.back_raw_iter.prev_key(self.db) + } +} diff --git a/trie-db/src/triedbmut.rs b/subtrie/src/triedbmut.rs similarity index 67% rename from trie-db/src/triedbmut.rs rename to subtrie/src/triedbmut.rs index bf3edbcb..1b1c9c80 100644 --- a/trie-db/src/triedbmut.rs +++ b/subtrie/src/triedbmut.rs @@ -23,13 +23,19 @@ use crate::{ }, node_codec::NodeCodec, rstd::{boxed::Box, convert::TryFrom, mem, ops::Index, result, vec::Vec, VecDeque}, - Bytes, CError, CachedValue, DBValue, Result, TrieAccess, TrieCache, TrieError, TrieHash, - TrieLayout, TrieMut, TrieRecorder, + Bytes, CError, DBValue, Result, TrieAccess, TrieCache, TrieError, TrieHash, TrieLayout, + TrieRecorder, }; -use hash_db::{HashDB, Hasher, Prefix, EMPTY_PREFIX}; -use hashbrown::HashSet; +use crate::node_db::{Hasher, NodeDB, Prefix}; +#[cfg(feature = "std")] +use std::collections::HashSet as Set; + +#[cfg(not(feature = "std"))] +use alloc::collections::btree_set::BTreeSet as Set; + +use crate::memory_db::{KeyFunction, MemoryDB}; #[cfg(feature = "std")] use log::trace; @@ -43,26 +49,28 @@ struct StorageHandle(usize); // Handles to nodes in the trie. #[cfg_attr(feature = "std", derive(Debug))] -enum NodeHandle { +enum NodeHandle { /// Loaded into memory. InMemory(StorageHandle), /// Either a hash or an inline node - Hash(H), + Hash(H, L), } -impl From for NodeHandle { +impl From for NodeHandle { fn from(handle: StorageHandle) -> Self { NodeHandle::InMemory(handle) } } -fn empty_children() -> Box<[Option>; nibble_ops::NIBBLE_LENGTH]> { +fn empty_children() -> Box<[Option>; nibble_ops::NIBBLE_LENGTH]> { Box::new([ None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ]) } +pub type TreeRefChangeset = Box, ::Location>>; + /// Type alias to indicate the nible covers a full key, /// therefore its left side is a full prefix. type NibbleFullKey<'key> = NibbleSlice<'key>; @@ -73,7 +81,7 @@ pub enum Value { /// Value bytes inlined in a trie node. Inline(Bytes), /// Hash of the value. - Node(TrieHash), + Node(TrieHash, L::Location), /// Hash of value bytes if calculated and value bytes. /// The hash may be undefined until it node is added /// to the db. @@ -84,7 +92,7 @@ impl PartialEq for Value { fn eq(&self, other: &Self) -> bool { match (self, other) { (Value::Inline(v), Value::Inline(ov)) => v == ov, - (Value::Node(h), Value::Node(oh)) => h == oh, + (Value::Node(h, _), Value::Node(oh, _)) => h == oh, (Value::NewNode(Some(h), _), Value::NewNode(Some(oh), _)) => h == oh, (Value::NewNode(_, v), Value::NewNode(_, ov)) => v == ov, // Note that for uncalculated hash we do not calculate it and default to true. @@ -94,24 +102,24 @@ impl PartialEq for Value { } } -impl<'a, L: TrieLayout> From> for Value { - fn from(v: EncodedValue<'a>) -> Self { +impl<'a, L: TrieLayout> From> for Value { + fn from(v: EncodedValue<'a, L::Location>) -> Self { match v { EncodedValue::Inline(value) => Value::Inline(value.into()), - EncodedValue::Node(hash) => { + EncodedValue::Node(hash, l) => { let mut h = TrieHash::::default(); h.as_mut().copy_from_slice(hash); - Value::Node(h) + Value::Node(h, l) }, } } } -impl From<&ValueOwned>> for Value { - fn from(val: &ValueOwned>) -> Self { +impl From<&ValueOwned, L::Location>> for Value { + fn from(val: &ValueOwned, L::Location>) -> Self { match val { ValueOwned::Inline(data, _) => Self::Inline(data.clone()), - ValueOwned::Node(hash) => Self::Node(*hash), + ValueOwned::Node(hash, location) => Self::Node(*hash, *location), } } } @@ -120,7 +128,7 @@ impl From<(Bytes, Option)> for Value { fn from((v, threshold): (Bytes, Option)) -> Self { match v { value => - if threshold.map(|threshold| value.len() >= threshold as usize).unwrap_or(false) { + if threshold.map_or(false, |threshold| value.len() >= threshold as usize) { Value::NewNode(None, value) } else { Value::Inline(value) @@ -129,9 +137,9 @@ impl From<(Bytes, Option)> for Value { } } -enum NodeToEncode<'a, H> { +enum NodeToEncode<'a, H, L> { Node(&'a [u8]), - TrieNode(NodeHandle), + TrieNode(NodeHandle), } impl Value { @@ -143,31 +151,35 @@ impl Value { &'a mut self, partial: Option<&NibbleSlice>, f: &mut F, - ) -> EncodedValue<'a> + ) -> EncodedValue<'a, L::Location> where F: FnMut( - NodeToEncode>, + NodeToEncode, L::Location>, Option<&NibbleSlice>, Option, - ) -> ChildReference>, + ) -> ChildReference, L::Location>, { if let Value::NewNode(hash, value) = self { - let new_hash = - if let ChildReference::Hash(hash) = f(NodeToEncode::Node(&value), partial, None) { - hash - } else { - unreachable!("Value node can never be inlined; qed") - }; + let new_hash = if let ChildReference::Hash(hash, _) = + f(NodeToEncode::Node(&value), partial, None) + { + hash + } else { + unreachable!("Value node can never be inlined; qed") + }; if let Some(h) = hash.as_ref() { debug_assert!(h == &new_hash); } else { *hash = Some(new_hash); } + } else if let Value::Node(hash, location) = self { + f(NodeToEncode::TrieNode(NodeHandle::Hash(*hash, *location)), partial, None); } let value = match &*self { Value::Inline(value) => EncodedValue::Inline(&value), - Value::Node(hash) => EncodedValue::Node(hash.as_ref()), - Value::NewNode(Some(hash), _value) => EncodedValue::Node(hash.as_ref()), + Value::Node(hash, location) => EncodedValue::Node(hash.as_ref(), *location), + Value::NewNode(Some(hash), _value) => + EncodedValue::Node(hash.as_ref(), Default::default()), Value::NewNode(None, _value) => unreachable!("New external value are always added before encoding anode"), }; @@ -177,15 +189,15 @@ impl Value { fn in_memory_fetched_value( &self, prefix: Prefix, - db: &dyn HashDB, - recorder: &Option>>>, + db: &dyn NodeDB, + recorder: &Option, L::Location>>>, full_key: &[u8], ) -> Result, TrieHash, CError> { Ok(Some(match self { Value::Inline(value) => value.to_vec(), Value::NewNode(_, value) => value.to_vec(), - Value::Node(hash) => - if let Some(value) = db.get(hash, prefix) { + Value::Node(hash, location) => + if let Some((value, _)) = db.get(hash, prefix, *location) { recorder.as_ref().map(|r| { r.borrow_mut().record(TrieAccess::Value { hash: *hash, @@ -209,19 +221,24 @@ enum Node { /// A leaf node contains the end of a key and a value. /// This key is encoded from a `NibbleSlice`, meaning it contains /// a flag indicating it is a leaf. - Leaf(NodeKey, Value), + Leaf(NodeKey, Value, Option>), /// An extension contains a shared portion of a key and a child node. /// The shared portion is encoded from a `NibbleSlice` meaning it contains /// a flag indicating it is an extension. /// The child node is always a branch. - Extension(NodeKey, NodeHandle>), + Extension(NodeKey, NodeHandle, L::Location>), /// A branch has up to 16 children and an optional value. - Branch(Box<[Option>>; nibble_ops::NIBBLE_LENGTH]>, Option>), + Branch( + Box<[Option, L::Location>>; nibble_ops::NIBBLE_LENGTH]>, + Option>, + Option>, + ), /// Branch node with support for a nibble (to avoid extension node). NibbledBranch( NodeKey, - Box<[Option>>; nibble_ops::NIBBLE_LENGTH]>, + Box<[Option, L::Location>>; nibble_ops::NIBBLE_LENGTH]>, Option>, + Option>, ), } @@ -243,7 +260,7 @@ impl Debug for Value { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match self { Self::Inline(value) => write!(fmt, "Some({:?})", ToHex(value)), - Self::Node(hash) => write!(fmt, "Hash({:?})", ToHex(hash.as_ref())), + Self::Node(hash, _l) => write!(fmt, "Hash({:?})", ToHex(hash.as_ref())), Self::NewNode(Some(hash), _) => write!(fmt, "Hash({:?})", ToHex(hash.as_ref())), Self::NewNode(_hash, value) => write!(fmt, "Some({:?})", ToHex(value)), } @@ -254,16 +271,15 @@ impl Debug for Value { impl Debug for Node where L::Hash: Debug, + L::Location: Debug, { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - match *self { + match self { Self::Empty => write!(fmt, "Empty"), - Self::Leaf((ref a, ref b), ref c) => - write!(fmt, "Leaf({:?}, {:?})", (a, ToHex(&*b)), c), - Self::Extension((ref a, ref b), ref c) => - write!(fmt, "Extension({:?}, {:?})", (a, ToHex(&*b)), c), - Self::Branch(ref a, ref b) => write!(fmt, "Branch({:?}, {:?}", a, b), - Self::NibbledBranch((ref a, ref b), ref c, ref d) => + Self::Leaf((a, b), c, _) => write!(fmt, "Leaf({:?}, {:?})", (a, ToHex(&*b)), c), + Self::Extension((a, b), c) => write!(fmt, "Extension({:?}, {:?})", (a, ToHex(&*b)), c), + Self::Branch(a, b, _) => write!(fmt, "Branch({:?}, {:?}", a, b), + Self::NibbledBranch((a, b), c, d, _) => write!(fmt, "NibbledBranch({:?}, {:?}, {:?})", (a, ToHex(&*b)), c, d), } } @@ -273,17 +289,17 @@ impl Node { // load an inline node into memory or get the hash to do the lookup later. fn inline_or_hash( parent_hash: TrieHash, - child: EncodedNodeHandle, + child: EncodedNodeHandle, storage: &mut NodeStorage, - ) -> Result>, TrieHash, CError> { + ) -> Result, L::Location>, TrieHash, CError> { let handle = match child { - EncodedNodeHandle::Hash(data) => { + EncodedNodeHandle::Hash(data, location) => { let hash = decode_hash::(data) .ok_or_else(|| Box::new(TrieError::InvalidHash(parent_hash, data.to_vec())))?; - NodeHandle::Hash(hash) + NodeHandle::Hash(hash, location) }, EncodedNodeHandle::Inline(data) => { - let child = Node::from_encoded(parent_hash, data, storage)?; + let child = Node::from_encoded(parent_hash, data, &[], storage)?; NodeHandle::InMemory(storage.alloc(Stored::New(child))) }, }; @@ -292,11 +308,11 @@ impl Node { // load an inline node into memory or get the hash to do the lookup later. fn inline_or_hash_owned( - child: &NodeHandleOwned>, + child: &NodeHandleOwned, L::Location>, storage: &mut NodeStorage, - ) -> NodeHandle> { + ) -> NodeHandle, L::Location> { match child { - NodeHandleOwned::Hash(hash) => NodeHandle::Hash(*hash), + NodeHandleOwned::Hash(hash, location) => NodeHandle::Hash(*hash, *location), NodeHandleOwned::Inline(node) => { let child = Node::from_node_owned(&**node, storage); NodeHandle::InMemory(storage.alloc(Stored::New(child))) @@ -308,13 +324,14 @@ impl Node { fn from_encoded<'a, 'b>( node_hash: TrieHash, data: &'a [u8], + locations: &[L::Location], storage: &'b mut NodeStorage, ) -> Result, CError> { - let encoded_node = - L::Codec::decode(data).map_err(|e| Box::new(TrieError::DecoderError(node_hash, e)))?; + let encoded_node = L::Codec::decode(data, locations) + .map_err(|e| Box::new(TrieError::DecoderError(node_hash, e)))?; let node = match encoded_node { EncodedNode::Empty => Node::Empty, - EncodedNode::Leaf(k, v) => Node::Leaf(k.into(), v.into()), + EncodedNode::Leaf(k, v) => Node::Leaf(k.into(), v.into(), None), EncodedNode::Extension(key, cb) => Node::Extension(key.into(), Self::inline_or_hash(node_hash, cb, storage)?), EncodedNode::Branch(encoded_children, val) => { @@ -342,7 +359,7 @@ impl Node { child(15)?, ]); - Node::Branch(children, val.map(Into::into)) + Node::Branch(children, val.map(Into::into), None) }, EncodedNode::NibbledBranch(k, encoded_children, val) => { let mut child = |i: usize| match encoded_children[i] { @@ -369,17 +386,20 @@ impl Node { child(15)?, ]); - Node::NibbledBranch(k.into(), children, val.map(Into::into)) + Node::NibbledBranch(k.into(), children, val.map(Into::into), None) }, }; Ok(node) } /// Decode a node from a [`NodeOwned`]. - fn from_node_owned(node_owned: &NodeOwned>, storage: &mut NodeStorage) -> Self { + fn from_node_owned( + node_owned: &NodeOwned, L::Location>, + storage: &mut NodeStorage, + ) -> Self { match node_owned { NodeOwned::Empty => Node::Empty, - NodeOwned::Leaf(k, v) => Node::Leaf(k.into(), v.into()), + NodeOwned::Leaf(k, v) => Node::Leaf(k.into(), v.into(), None), NodeOwned::Extension(key, cb) => Node::Extension(key.into(), Self::inline_or_hash_owned(cb, storage)), NodeOwned::Branch(encoded_children, val) => { @@ -408,7 +428,7 @@ impl Node { child(15), ]); - Node::Branch(children, val.as_ref().map(Into::into)) + Node::Branch(children, val.as_ref().map(Into::into), None) }, NodeOwned::NibbledBranch(k, encoded_children, val) => { let mut child = |i: usize| { @@ -436,7 +456,7 @@ impl Node { child(15), ]); - Node::NibbledBranch(k.into(), children, val.as_ref().map(Into::into)) + Node::NibbledBranch(k.into(), children, val.as_ref().map(Into::into), None) }, NodeOwned::Value(_, _) => unreachable!("`NodeOwned::Value` can only be returned for the hash of a value."), @@ -446,70 +466,73 @@ impl Node { // TODO: parallelize /// Here `child_cb` should process the first parameter to either insert an external /// node value or to encode and add a new branch child node. - fn into_encoded(self, mut child_cb: F) -> Vec + fn into_encoded(self, mut child_cb: F) -> (Vec, Option>) where F: FnMut( - NodeToEncode>, + NodeToEncode, L::Location>, Option<&NibbleSlice>, Option, - ) -> ChildReference>, + ) -> ChildReference, L::Location>, { match self { - Node::Empty => L::Codec::empty_node().to_vec(), - Node::Leaf(partial, mut value) => { + Node::Empty => (L::Codec::empty_node().to_vec(), None), + Node::Leaf(partial, mut value, tree_ref) => { let pr = NibbleSlice::new_offset(&partial.1[..], partial.0); let value = value.into_encoded::(Some(&pr), &mut child_cb); - L::Codec::leaf_node(pr.right_iter(), pr.len(), value) + (L::Codec::leaf_node(pr.right_iter(), pr.len(), value), tree_ref) }, Node::Extension(partial, child) => { let pr = NibbleSlice::new_offset(&partial.1[..], partial.0); let it = pr.right_iter(); let c = child_cb(NodeToEncode::TrieNode(child), Some(&pr), None); - L::Codec::extension_node(it, pr.len(), c) + (L::Codec::extension_node(it, pr.len(), c), None) }, - Node::Branch(mut children, mut value) => { + Node::Branch(mut children, mut value, tree_ref) => { let value = value.as_mut().map(|v| v.into_encoded::(None, &mut child_cb)); - L::Codec::branch_node( - // map the `NodeHandle`s from the Branch to `ChildReferences` - children.iter_mut().map(Option::take).enumerate().map(|(i, maybe_child)| { - maybe_child.map(|child| { - child_cb(NodeToEncode::TrieNode(child), None, Some(i as u8)) - }) - }), - value, + ( + L::Codec::branch_node( + // map the `NodeHandle`s from the Branch to `ChildReferences` + children.iter_mut().map(Option::take).enumerate().map( + |(i, maybe_child)| { + maybe_child.map(|child| { + child_cb(NodeToEncode::TrieNode(child), None, Some(i as u8)) + }) + }, + ), + value, + ), + tree_ref, ) }, - Node::NibbledBranch(partial, mut children, mut value) => { + Node::NibbledBranch(partial, mut children, mut value, tree_ref) => { let pr = NibbleSlice::new_offset(&partial.1[..], partial.0); let value = value.as_mut().map(|v| v.into_encoded::(Some(&pr), &mut child_cb)); let it = pr.right_iter(); - L::Codec::branch_node_nibbled( - it, - pr.len(), - // map the `NodeHandle`s from the Branch to `ChildReferences` - children.iter_mut().map(Option::take).enumerate().map(|(i, maybe_child)| { - //let branch_index = [i as u8]; - maybe_child.map(|child| { - let pr = NibbleSlice::new_offset(&partial.1[..], partial.0); - child_cb(NodeToEncode::TrieNode(child), Some(&pr), Some(i as u8)) - }) - }), - value, + ( + L::Codec::branch_node_nibbled( + it, + pr.len(), + // map the `NodeHandle`s from the Branch to `ChildReferences` + children.iter_mut().map(Option::take).enumerate().map( + |(i, maybe_child)| { + //let branch_index = [i as u8]; + maybe_child.map(|child| { + let pr = NibbleSlice::new_offset(&partial.1[..], partial.0); + child_cb( + NodeToEncode::TrieNode(child), + Some(&pr), + Some(i as u8), + ) + }) + }, + ), + value, + ), + tree_ref, ) }, } } - - /// Returns the key partial key of this node. - fn partial_key(&self) -> Option<&NodeKey> { - match &self { - Self::Empty => None, - Self::Leaf(key, _) => Some(key), - Self::Branch(_, _) => None, - Self::NibbledBranch(key, _, _) => Some(key), - Self::Extension(key, _) => Some(key), - } - } } // post-inspect action. @@ -519,7 +542,7 @@ enum Action { // Restore the original node. This trusts that the node is actually the original. Restore(Node), // if it is a new node, just clears the storage. - Delete, + Delete(Option>), } // post-insert action. Same as action without delete @@ -551,33 +574,33 @@ enum Stored { // A new node. New(Node), // A cached node, loaded from the DB. - Cached(Node, TrieHash), + Cached(Node, TrieHash, L::Location), } /// Used to build a collection of child nodes from a collection of `NodeHandle`s #[derive(Clone, Copy)] #[cfg_attr(feature = "std", derive(Debug))] -pub enum ChildReference { +pub enum ChildReference { // `HO` is e.g. `H256`, i.e. the output of a `Hasher` - Hash(HO), + Hash(HO, L), Inline(HO, usize), // usize is the length of the node data we store in the `H::Out` } -impl<'a, HO> TryFrom> for ChildReference +impl<'a, HO, L> TryFrom> for ChildReference where HO: AsRef<[u8]> + AsMut<[u8]> + Default + Clone + Copy, { type Error = Vec; - fn try_from(handle: EncodedNodeHandle<'a>) -> result::Result> { + fn try_from(handle: EncodedNodeHandle<'a, L>) -> result::Result> { match handle { - EncodedNodeHandle::Hash(data) => { + EncodedNodeHandle::Hash(data, location) => { let mut hash = HO::default(); if data.len() != hash.as_ref().len() { return Err(data.to_vec()) } hash.as_mut().copy_from_slice(data); - Ok(ChildReference::Hash(hash)) + Ok(ChildReference::Hash(hash, location)) }, EncodedNodeHandle::Inline(data) => { let mut hash = HO::default(); @@ -629,26 +652,26 @@ impl<'a, L: TrieLayout> Index<&'a StorageHandle> for NodeStorage { fn index(&self, handle: &'a StorageHandle) -> &Node { match self.nodes[handle.0] { Stored::New(ref node) => node, - Stored::Cached(ref node, _) => node, + Stored::Cached(ref node, _, _) => node, } } } /// A builder for creating a [`TrieDBMut`]. pub struct TrieDBMutBuilder<'db, L: TrieLayout> { - db: &'db mut dyn HashDB, - root: &'db mut TrieHash, - cache: Option<&'db mut dyn TrieCache>, - recorder: Option<&'db mut dyn TrieRecorder>>, + db: &'db dyn NodeDB, + root: TrieHash, + root_location: L::Location, + cache: Option<&'db mut dyn TrieCache>, + recorder: Option<&'db mut dyn TrieRecorder, L::Location>>, } impl<'db, L: TrieLayout> TrieDBMutBuilder<'db, L> { /// Create a builder for constructing a new trie with the backing database `db` and empty /// `root`. - pub fn new(db: &'db mut dyn HashDB, root: &'db mut TrieHash) -> Self { - *root = L::Codec::hashed_null_node(); - - Self { root, db, cache: None, recorder: None } + pub fn new(db: &'db dyn NodeDB) -> Self { + let root = L::Codec::hashed_null_node(); + Self { root, db, cache: None, recorder: None, root_location: Default::default() } } /// Create a builder for constructing a new trie with the backing database `db` and `root`. @@ -656,14 +679,24 @@ impl<'db, L: TrieLayout> TrieDBMutBuilder<'db, L> { /// This doesn't check if `root` exists in the given `db`. If `root` doesn't exist it will fail /// when trying to lookup any key. pub fn from_existing( - db: &'db mut dyn HashDB, - root: &'db mut TrieHash, + db: &'db dyn NodeDB, + root: TrieHash, ) -> Self { - Self { db, root, cache: None, recorder: None } + Self { db, root, cache: None, recorder: None, root_location: Default::default() } + } + + /// Same as `from_existing` but force a db location to access root. + /// Note root in parameter is not checked. + pub fn from_existing_with_db_location( + db: &'db dyn NodeDB, + root: TrieHash, + root_location: L::Location, + ) -> Self { + Self { db, root, cache: None, recorder: None, root_location } } /// Use the given `cache` for the db. - pub fn with_cache(mut self, cache: &'db mut dyn TrieCache) -> Self { + pub fn with_cache(mut self, cache: &'db mut dyn TrieCache) -> Self { self.cache = Some(cache); self } @@ -671,7 +704,7 @@ impl<'db, L: TrieLayout> TrieDBMutBuilder<'db, L> { /// Use the given optional `cache` for the db. pub fn with_optional_cache<'cache: 'db>( mut self, - cache: Option<&'cache mut dyn TrieCache>, + cache: Option<&'cache mut dyn TrieCache>, ) -> Self { // Make the compiler happy by "converting" the lifetime self.cache = cache.map(|c| c as _); @@ -679,7 +712,10 @@ impl<'db, L: TrieLayout> TrieDBMutBuilder<'db, L> { } /// Use the given `recorder` to record trie accesses. - pub fn with_recorder(mut self, recorder: &'db mut dyn TrieRecorder>) -> Self { + pub fn with_recorder( + mut self, + recorder: &'db mut dyn TrieRecorder, L::Location>, + ) -> Self { self.recorder = Some(recorder); self } @@ -687,7 +723,7 @@ impl<'db, L: TrieLayout> TrieDBMutBuilder<'db, L> { /// Use the given optional `recorder` to record trie accesses. pub fn with_optional_recorder<'recorder: 'db>( mut self, - recorder: Option<&'recorder mut dyn TrieRecorder>>, + recorder: Option<&'recorder mut dyn TrieRecorder, L::Location>>, ) -> Self { // Make the compiler happy by "converting" the lifetime self.recorder = recorder.map(|r| r as _); @@ -696,36 +732,150 @@ impl<'db, L: TrieLayout> TrieDBMutBuilder<'db, L> { /// Build the [`TrieDBMut`]. pub fn build(self) -> TrieDBMut<'db, L> { - let root_handle = NodeHandle::Hash(*self.root); + let root_handle = NodeHandle::Hash(self.root, self.root_location); TrieDBMut { db: self.db, root: self.root, cache: self.cache, recorder: self.recorder.map(core::cell::RefCell::new), - hash_count: 0, storage: NodeStorage::empty(), death_row: Default::default(), + death_row_child: Default::default(), root_handle, } } } -/// A `Trie` implementation using a generic `HashDB` backing database. +#[derive(Debug)] +pub struct NewChangesetNode { + pub hash: H, + pub prefix: OwnedPrefix, + pub data: Vec, + pub children: Vec>, + // Storing the key and removed nodes related + // to this change set node (only needed for old trie). + pub removed_keys: Option<(Option>, Vec<(H, OwnedPrefix)>)>, +} + +#[derive(Debug)] +pub struct ExistingChangesetNode { + pub hash: H, + pub prefix: OwnedPrefix, + pub location: DL, +} + +#[derive(Debug)] +pub enum Changeset { + New(NewChangesetNode), + Existing(ExistingChangesetNode), +} + +impl Changeset { + pub fn hash(&self) -> &H { + match self { + Changeset::New(node) => &node.hash, + Changeset::Existing(node) => &node.hash, + } + } +} + +impl Changeset { + /// In case the underlying db do not + /// do empty node optimization, it can + /// make sense to insert the empty node. + pub fn new_empty>() -> Self { + Self::New(NewChangesetNode { + hash: C::hashed_null_node(), + prefix: Default::default(), + data: C::empty_node().to_vec(), + children: Default::default(), + removed_keys: None, + }) + } +} +pub fn prefix_prefix(ks: &[u8], prefix: Prefix) -> (Vec, Option) { + let mut result = Vec::with_capacity(ks.len() + prefix.0.len()); + result.extend_from_slice(ks); + result.extend_from_slice(prefix.0); + (result, prefix.1) +} + +impl Changeset { + pub fn apply_to(&self, mem_db: &mut MemoryDB) -> H + where + K: KeyFunction + Send + Sync, + MH: Hasher + Send + Sync, + { + fn apply_node<'a, H, DL, MH, K>( + node: &'a Changeset, + mem_db: &mut MemoryDB, + mut ks: Option<&'a [u8]>, + ) where + K: KeyFunction + Send + Sync, + MH: Hasher + Send + Sync, + { + match node { + Changeset::New(node) => { + if let Some((k, removed)) = node.removed_keys.as_ref() { + for (hash, p) in removed.iter() { + if let Some(k) = k { + let prefixed = prefix_prefix(k.as_slice(), (p.0.as_slice(), p.1)); + mem_db.remove(hash, (prefixed.0.as_slice(), prefixed.1)); + ks = Some(k.as_slice()); + } else { + mem_db.remove(hash, (p.0.as_slice(), p.1)); + } + } + } + for child in &node.children { + apply_node(child, mem_db, ks); + } + if let Some(ks) = ks { + let prefixed = prefix_prefix(ks, (node.prefix.0.as_slice(), node.prefix.1)); + mem_db.insert((prefixed.0.as_slice(), prefixed.1), &node.data); + } else { + mem_db.insert((node.prefix.0.as_slice(), node.prefix.1), &node.data); + } + }, + Changeset::Existing(_) => {}, + } + } + apply_node::(&self, mem_db, None); + self.root_hash() + } + + pub fn root_hash(&self) -> H { + match &self { + Changeset::New(node) => node.hash, + Changeset::Existing(node) => node.hash, + } + } + + pub fn unchanged(root: H) -> Self { + Changeset::Existing(ExistingChangesetNode { + hash: root, + prefix: (BackingByteVec::new(), None), + location: Default::default(), + }) + } +} + +pub type OwnedPrefix = (BackingByteVec, Option); + +/// A `Trie` implementation using a generic `NodeDB` backing database. /// -/// Use it as a `TrieMut` trait object. You can use `db()` to get the backing database object. +/// You can use `db()` to get the backing database object. /// Note that changes are not committed to the database until `commit` is called. /// -/// Querying the root or dropping the trie will commit automatically. -/// /// /// # Example /// ```ignore -/// use hash_db::Hasher; -/// use reference_trie::{RefTrieDBMut, TrieMut}; -/// use trie_db::DBValue; +/// use subtrie::node_db::Hasher; +/// use reference_trie::RefTrieDBMut; +/// use subtrie::DBValue; /// use keccak_hasher::KeccakHasher; -/// use memory_db::*; +/// use subtrie::memory_db::*; /// /// let mut memdb = MemoryDB::, DBValue>::default(); /// let mut root = Default::default(); @@ -743,17 +893,15 @@ where L: TrieLayout, { storage: NodeStorage, - db: &'a mut dyn HashDB, - root: &'a mut TrieHash, - root_handle: NodeHandle>, - death_row: HashSet<(TrieHash, (BackingByteVec, Option))>, - /// The number of hash operations this trie has performed. - /// Note that none are performed until changes are committed. - hash_count: usize, + db: &'a dyn NodeDB, + root: TrieHash, + root_handle: NodeHandle, L::Location>, + death_row: Set<(TrieHash, OwnedPrefix)>, + death_row_child: Vec>, /// Optional cache for speeding up the lookup of nodes. - cache: Option<&'a mut dyn TrieCache>, + cache: Option<&'a mut dyn TrieCache>, /// Optional trie recorder for recording trie accesses. - recorder: Option>>>, + recorder: Option, L::Location>>>, } impl<'a, L> TrieDBMut<'a, L> @@ -761,12 +909,7 @@ where L: TrieLayout, { /// Get the backing database. - pub fn db(&self) -> &dyn HashDB { - self.db - } - - /// Get the backing database mutably. - pub fn db_mut(&mut self) -> &mut dyn HashDB { + pub fn db(&self) -> &dyn NodeDB { self.db } @@ -775,12 +918,13 @@ where &mut self, hash: TrieHash, key: Prefix, + location: L::Location, ) -> Result, CError> { // We only check the `cache` for a node with `get_node` and don't insert // the node if it wasn't there, because in substrate we only access the node while computing // a new trie (aka some branch). We assume that this node isn't that important // to have it being cached. - let node = match self.cache.as_mut().and_then(|c| c.get_node(&hash)) { + let node = match self.cache.as_mut().and_then(|c| c.get_node(&hash, location)) { Some(node) => { if let Some(recorder) = self.recorder.as_mut() { recorder.borrow_mut().record(TrieAccess::NodeOwned { hash, node_owned: &node }); @@ -789,9 +933,9 @@ where Node::from_node_owned(&node, &mut self.storage) }, None => { - let node_encoded = self + let (node_encoded, locations) = self .db - .get(&hash, key) + .get(&hash, key, location) .ok_or_else(|| Box::new(TrieError::IncompleteDatabase(hash)))?; if let Some(recorder) = self.recorder.as_mut() { @@ -801,11 +945,11 @@ where }); } - Node::from_encoded(hash, &node_encoded, &mut self.storage)? + Node::from_encoded(hash, &node_encoded, &locations, &mut self.storage)? }, }; - Ok(self.storage.alloc(Stored::Cached(node, hash))) + Ok(self.storage.alloc(Stored::Cached(node, hash, location))) } // Inspect a node, choosing either to replace, restore, or delete it. @@ -828,16 +972,20 @@ where Stored::New(node) => match inspector(self, node, key)? { Action::Restore(node) => Some((Stored::New(node), false)), Action::Replace(node) => Some((Stored::New(node), true)), - Action::Delete => None, + Action::Delete(tree_ref) => { + tree_ref.map(|c| self.death_row_child.push(c)); + None + }, }, - Stored::Cached(node, hash) => match inspector(self, node, key)? { - Action::Restore(node) => Some((Stored::Cached(node, hash), false)), + Stored::Cached(node, hash, location) => match inspector(self, node, key)? { + Action::Restore(node) => Some((Stored::Cached(node, hash, location), false)), Action::Replace(node) => { self.death_row.insert((hash, current_key.left_owned())); Some((Stored::New(node), true)) }, - Action::Delete => { + Action::Delete(tree_ref) => { self.death_row.insert((hash, current_key.left_owned())); + tree_ref.map(|c| self.death_row_child.push(c)); None }, }, @@ -849,30 +997,32 @@ where &self, full_key: &[u8], mut partial: NibbleSlice, - handle: &NodeHandle>, + handle: &NodeHandle, L::Location>, ) -> Result, TrieHash, CError> { let mut handle = handle; + // prefix only use for value node access, so this is always correct. let prefix = (full_key, None); loop { let (mid, child) = match handle { - NodeHandle::Hash(hash) => { + NodeHandle::Hash(hash, location) => { let mut recorder = self.recorder.as_ref().map(|r| r.borrow_mut()); return Lookup:: { - db: &self.db, + db: self.db, query: |v: &[u8]| v.to_vec(), hash: *hash, + location: *location, cache: None, recorder: recorder .as_mut() - .map(|r| &mut ***r as &mut dyn TrieRecorder>), + .map(|r| &mut ***r as &mut dyn TrieRecorder, L::Location>), } .look_up(full_key, partial) }, NodeHandle::InMemory(handle) => match &self.storage[handle] { Node::Empty => return Ok(None), - Node::Leaf(key, value) => - if NibbleSlice::from_stored(key) == partial { + Node::Leaf(slice, value, _) => + if NibbleSlice::from_stored(slice) == partial { return Ok(value.in_memory_fetched_value( prefix, self.db, @@ -890,7 +1040,7 @@ where return Ok(None) } }, - Node::Branch(children, value) => + Node::Branch(children, value, _) => if partial.is_empty() { return Ok(if let Some(v) = value.as_ref() { v.in_memory_fetched_value( @@ -909,7 +1059,7 @@ where None => return Ok(None), } }, - Node::NibbledBranch(slice, children, value) => { + Node::NibbledBranch(slice, children, value, _) => { let slice = NibbleSlice::from_stored(slice); if slice == partial { return Ok(if let Some(v) = value.as_ref() { @@ -923,7 +1073,7 @@ where None }) } else if partial.starts_with(&slice) { - let idx = partial.at(0); + let idx = partial.at(slice.len()); match children[idx as usize].as_ref() { Some(child) => (1 + slice.len(), child), None => return Ok(None), @@ -943,20 +1093,22 @@ where /// Insert a key-value pair into the trie, creating new nodes if necessary. fn insert_at( &mut self, - handle: NodeHandle>, + handle: NodeHandle, L::Location>, key: &mut NibbleFullKey, value: Bytes, old_val: &mut Option>, + tree_ref: Option>, ) -> Result<(StorageHandle, bool), TrieHash, CError> { let h = match handle { NodeHandle::InMemory(h) => h, - NodeHandle::Hash(h) => self.cache(h, key.left())?, + NodeHandle::Hash(h, l) => self.cache(h, key.left(), l)?, }; // cache then destroy for hash handle (handle being root in most case) let stored = self.storage.destroy(h); let (new_stored, changed) = self .inspect(stored, key, move |trie, stored, key| { - trie.insert_inspector(stored, key, value, old_val).map(|a| a.into_action()) + trie.insert_inspector(stored, key, value, old_val, tree_ref) + .map(|a| a.into_action()) })? .expect("Insertion never deletes."); @@ -971,7 +1123,7 @@ where ) { match &stored_value { Some(Value::NewNode(Some(hash), _)) // also removing new node in case commit is called multiple times - | Some(Value::Node(hash)) => { + | Some(Value::Node(hash, _)) => { self.death_row.insert(( hash.clone(), (prefix.0.into(), prefix.1), @@ -989,6 +1141,7 @@ where key: &mut NibbleFullKey, value: Bytes, old_val: &mut Option>, + tree_ref: Option>, ) -> Result, TrieHash, CError> { let partial = *key; @@ -1000,9 +1153,9 @@ where #[cfg(feature = "std")] trace!(target: "trie", "empty: COMPOSE"); let value = Value::new(value, L::MAX_INLINE_VALUE); - InsertAction::Replace(Node::Leaf(partial.to_stored(), value)) + InsertAction::Replace(Node::Leaf(partial.to_stored(), value, tree_ref)) }, - Node::Branch(mut children, stored_value) => { + Node::Branch(mut children, stored_value, b_tree_ref) => { debug_assert!(L::USE_EXTENSION); #[cfg(feature = "std")] trace!(target: "trie", "branch: ROUTE,AUGMENT"); @@ -1010,7 +1163,7 @@ where if partial.is_empty() { let value = Some(Value::new(value, L::MAX_INLINE_VALUE)); let unchanged = stored_value == value; - let branch = Node::Branch(children, value); + let branch = Node::Branch(children, value, tree_ref); self.replace_old_value(old_val, stored_value, key.left()); @@ -1024,25 +1177,33 @@ where key.advance(1); if let Some(child) = children[idx].take() { // Original had something there. recurse down into it. - let (new_child, changed) = self.insert_at(child, key, value, old_val)?; + let (new_child, changed) = + self.insert_at(child, key, value, old_val, tree_ref)?; children[idx] = Some(new_child.into()); if !changed { // The new node we composed didn't change. // It means our branch is untouched too. - return Ok(InsertAction::Restore(Node::Branch(children, stored_value))) + return Ok(InsertAction::Restore(Node::Branch( + children, + stored_value, + b_tree_ref, + ))) } } else { // Original had nothing there. compose a leaf. let value = Value::new(value, L::MAX_INLINE_VALUE); - let leaf = - self.storage.alloc(Stored::New(Node::Leaf(key.to_stored(), value))); + let leaf = self.storage.alloc(Stored::New(Node::Leaf( + key.to_stored(), + value, + tree_ref, + ))); children[idx] = Some(leaf.into()); } - InsertAction::Replace(Node::Branch(children, stored_value)) + InsertAction::Replace(Node::Branch(children, stored_value, b_tree_ref)) } }, - Node::NibbledBranch(encoded, mut children, stored_value) => { + Node::NibbledBranch(encoded, mut children, stored_value, b_tree_ref) => { debug_assert!(!L::USE_EXTENSION); #[cfg(feature = "std")] trace!(target: "trie", "branch: ROUTE,AUGMENT"); @@ -1052,7 +1213,8 @@ where if common == existing_key.len() && common == partial.len() { let value = Some(Value::new(value, L::MAX_INLINE_VALUE)); let unchanged = stored_value == value; - let branch = Node::NibbledBranch(existing_key.to_stored(), children, value); + let branch = + Node::NibbledBranch(existing_key.to_stored(), children, value, tree_ref); let mut key_val = key.clone(); key_val.advance(existing_key.len()); @@ -1075,7 +1237,8 @@ where common, ); let nbranch_partial = existing_key.mid(common + 1).to_stored(); - let low = Node::NibbledBranch(nbranch_partial, children, stored_value); + let low = + Node::NibbledBranch(nbranch_partial, children, stored_value, b_tree_ref); let ix = existing_key.at(common); let mut children = empty_children(); let alloc_storage = self.storage.alloc(Stored::New(low)); @@ -1088,10 +1251,12 @@ where existing_key.to_stored_range(common), children, Some(value), + tree_ref, )) } else { let ix = partial.at(common); - let stored_leaf = Node::Leaf(partial.mid(common + 1).to_stored(), value); + let stored_leaf = + Node::Leaf(partial.mid(common + 1).to_stored(), value, tree_ref); let leaf = self.storage.alloc(Stored::New(stored_leaf)); @@ -1100,6 +1265,7 @@ where existing_key.to_stored_range(common), children, None, + None, )) } } else { @@ -1110,7 +1276,8 @@ where key.advance(common + 1); if let Some(child) = children[idx].take() { // Original had something there. recurse down into it. - let (new_child, changed) = self.insert_at(child, key, value, old_val)?; + let (new_child, changed) = + self.insert_at(child, key, value, old_val, tree_ref)?; children[idx] = Some(new_child.into()); if !changed { // The new node we composed didn't change. @@ -1119,14 +1286,18 @@ where existing_key.to_stored(), children, stored_value, + b_tree_ref, ); return Ok(InsertAction::Restore(n_branch)) } } else { // Original had nothing there. compose a leaf. let value = Value::new(value, L::MAX_INLINE_VALUE); - let leaf = - self.storage.alloc(Stored::New(Node::Leaf(key.to_stored(), value))); + let leaf = self.storage.alloc(Stored::New(Node::Leaf( + key.to_stored(), + value, + tree_ref, + ))); children[idx] = Some(leaf.into()); } @@ -1134,10 +1305,11 @@ where existing_key.to_stored(), children, stored_value, + b_tree_ref, )) } }, - Node::Leaf(encoded, stored_value) => { + Node::Leaf(encoded, stored_value, l_tree_ref) => { let existing_key = NibbleSlice::from_stored(&encoded); let common = partial.common_prefix(&existing_key); if common == existing_key.len() && common == partial.len() { @@ -1151,9 +1323,9 @@ where self.replace_old_value(old_val, Some(stored_value), key_val.left()); if unchanged { // unchanged. restore - InsertAction::Restore(Node::Leaf(encoded.clone(), value)) + InsertAction::Restore(Node::Leaf(encoded.clone(), value, l_tree_ref)) } else { - InsertAction::Replace(Node::Leaf(encoded.clone(), value)) + InsertAction::Replace(Node::Leaf(encoded.clone(), value, tree_ref)) } } else if (L::USE_EXTENSION && common == 0) || (!L::USE_EXTENSION && common < existing_key.len()) @@ -1171,24 +1343,32 @@ where let mut children = empty_children(); let branch = if L::USE_EXTENSION && existing_key.is_empty() { // always replace since branch isn't leaf. - Node::Branch(children, Some(stored_value)) + Node::Branch(children, Some(stored_value), l_tree_ref) } else { let idx = existing_key.at(common) as usize; - let new_leaf = - Node::Leaf(existing_key.mid(common + 1).to_stored(), stored_value); + let new_leaf = Node::Leaf( + existing_key.mid(common + 1).to_stored(), + stored_value, + l_tree_ref, + ); children[idx] = Some(self.storage.alloc(Stored::New(new_leaf)).into()); if L::USE_EXTENSION { - Node::Branch(children, None) + Node::Branch(children, None, None) } else { - Node::NibbledBranch(partial.to_stored_range(common), children, None) + Node::NibbledBranch( + partial.to_stored_range(common), + children, + None, + None, + ) } }; // always replace because whatever we get out here // is not the branch we started with. let branch_action = - self.insert_inspector(branch, key, value, old_val)?.unwrap_node(); + self.insert_inspector(branch, key, value, old_val, tree_ref)?.unwrap_node(); InsertAction::Replace(branch_action) } else if !L::USE_EXTENSION { #[cfg(feature = "std")] @@ -1200,9 +1380,11 @@ where existing_key.to_stored(), empty_children(), Some(stored_value), + l_tree_ref, ); // augment the new branch. - let branch = self.insert_inspector(branch, key, value, old_val)?.unwrap_node(); + let branch = + self.insert_inspector(branch, key, value, old_val, tree_ref)?.unwrap_node(); InsertAction::Replace(branch) } else if common == existing_key.len() { @@ -1212,10 +1394,11 @@ where // fully-shared prefix for an extension. // make a stub branch and an extension. - let branch = Node::Branch(empty_children(), Some(stored_value)); + let branch = Node::Branch(empty_children(), Some(stored_value), l_tree_ref); // augment the new branch. key.advance(common); - let branch = self.insert_inspector(branch, key, value, old_val)?.unwrap_node(); + let branch = + self.insert_inspector(branch, key, value, old_val, tree_ref)?.unwrap_node(); // always replace since we took a leaf and made an extension. let leaf = self.storage.alloc(Stored::New(branch)); @@ -1234,13 +1417,14 @@ where // partially-shared prefix for an extension. // start by making a leaf. - let low = Node::Leaf(existing_key.mid(common).to_stored(), stored_value); + let low = + Node::Leaf(existing_key.mid(common).to_stored(), stored_value, l_tree_ref); // augment it. this will result in the Leaf -> common == 0 routine, // which creates a branch. key.advance(common); let augmented_low = - self.insert_inspector(low, key, value, old_val)?.unwrap_node(); + self.insert_inspector(low, key, value, old_val, tree_ref)?.unwrap_node(); // make an extension using it. this is a replacement. InsertAction::Replace(Node::Extension( existing_key.to_stored_range(common), @@ -1280,7 +1464,13 @@ where // continue inserting. let branch_action = self - .insert_inspector(Node::Branch(children, None), key, value, old_val)? + .insert_inspector( + Node::Branch(children, None, None), + key, + value, + old_val, + tree_ref, + )? .unwrap_node(); InsertAction::Replace(branch_action) } else if common == existing_key.len() { @@ -1291,7 +1481,8 @@ where // insert into the child node. key.advance(common); - let (new_child, changed) = self.insert_at(child_branch, key, value, old_val)?; + let (new_child, changed) = + self.insert_at(child_branch, key, value, old_val, tree_ref)?; let new_ext = Node::Extension(existing_key.to_stored(), new_child.into()); @@ -1318,7 +1509,7 @@ where // creating a branch. key.advance(common); let augmented_low = - self.insert_inspector(low, key, value, old_val)?.unwrap_node(); + self.insert_inspector(low, key, value, old_val, tree_ref)?.unwrap_node(); // always replace, since this extension is not the one we started with. // this is known because the partial key is only the common prefix. @@ -1334,20 +1525,21 @@ where /// Removes a node from the trie based on key. fn remove_at( &mut self, - handle: NodeHandle>, + handle: NodeHandle, L::Location>, key: &mut NibbleFullKey, old_val: &mut Option>, + tree_ref: Option>, ) -> Result, TrieHash, CError> { let stored = match handle { NodeHandle::InMemory(h) => self.storage.destroy(h), - NodeHandle::Hash(h) => { - let handle = self.cache(h, key.left())?; + NodeHandle::Hash(h, l) => { + let handle = self.cache(h, key.left(), l)?; self.storage.destroy(handle) }, }; let opt = self.inspect(stored, key, move |trie, node, key| { - trie.remove_inspector(node, key, old_val) + trie.remove_inspector(node, key, old_val, tree_ref) })?; Ok(opt.map(|(new, changed)| (self.storage.alloc(new), changed))) @@ -1359,24 +1551,25 @@ where node: Node, key: &mut NibbleFullKey, old_val: &mut Option>, + tree_ref: Option>, ) -> Result, TrieHash, CError> { let partial = *key; Ok(match (node, partial.is_empty()) { - (Node::Empty, _) => Action::Delete, - (Node::Branch(c, None), true) => Action::Restore(Node::Branch(c, None)), - (Node::NibbledBranch(n, c, None), true) => - Action::Restore(Node::NibbledBranch(n, c, None)), - (Node::Branch(children, val), true) => { + (Node::Empty, _) => Action::Delete(None), + (Node::Branch(c, None, _), true) => Action::Restore(Node::Branch(c, None, tree_ref)), + (Node::NibbledBranch(n, c, None, _), true) => + Action::Restore(Node::NibbledBranch(n, c, None, tree_ref)), + (Node::Branch(children, val, _), true) => { self.replace_old_value(old_val, val, key.left()); // always replace since we took the value out. - Action::Replace(self.fix(Node::Branch(children, None), *key)?) + Action::Replace(self.fix(Node::Branch(children, None, tree_ref), *key)?) }, - (Node::NibbledBranch(n, children, val), true) => { + (Node::NibbledBranch(n, children, val, _), true) => { self.replace_old_value(old_val, val, key.left()); // always replace since we took the value out. - Action::Replace(self.fix(Node::NibbledBranch(n, children, None), *key)?) + Action::Replace(self.fix(Node::NibbledBranch(n, children, None, tree_ref), *key)?) }, - (Node::Branch(mut children, value), false) => { + (Node::Branch(mut children, value, btreerefset), false) => { let idx = partial.at(0) as usize; if let Some(child) = children[idx].take() { #[cfg(feature = "std")] @@ -1387,10 +1580,10 @@ where ); let prefix = *key; key.advance(1); - match self.remove_at(child, key, old_val)? { + match self.remove_at(child, key, old_val, tree_ref)? { Some((new, changed)) => { children[idx] = Some(new.into()); - let branch = Node::Branch(children, value); + let branch = Node::Branch(children, value, btreerefset); match changed { // child was changed, so we were too. true => Action::Replace(branch), @@ -1403,15 +1596,17 @@ where // the node may need fixing. #[cfg(feature = "std")] trace!(target: "trie", "branch child deleted, partial={:?}", partial); - Action::Replace(self.fix(Node::Branch(children, value), prefix)?) + Action::Replace( + self.fix(Node::Branch(children, value, btreerefset), prefix)?, + ) }, } } else { // no change needed. - Action::Restore(Node::Branch(children, value)) + Action::Restore(Node::Branch(children, value, btreerefset)) } }, - (Node::NibbledBranch(encoded, mut children, value), false) => { + (Node::NibbledBranch(encoded, mut children, value, btreerefset), false) => { let (common, existing_length) = { let existing_key = NibbleSlice::from_stored(&encoded); (existing_key.common_prefix(&partial), existing_key.len()) @@ -1422,14 +1617,15 @@ where let mut key_val = key.clone(); key_val.advance(existing_length); self.replace_old_value(old_val, Some(value), key_val.left()); - let f = self.fix(Node::NibbledBranch(encoded, children, None), *key); + let f = + self.fix(Node::NibbledBranch(encoded, children, None, tree_ref), *key); Action::Replace(f?) } else { - Action::Restore(Node::NibbledBranch(encoded, children, None)) + Action::Restore(Node::NibbledBranch(encoded, children, None, tree_ref)) } } else if common < existing_length { // partway through an extension -- nothing to do here. - Action::Restore(Node::NibbledBranch(encoded, children, value)) + Action::Restore(Node::NibbledBranch(encoded, children, value, btreerefset)) } else { // common == existing_length && common < partial.len() : check children let idx = partial.at(common) as usize; @@ -1443,10 +1639,11 @@ where ); let prefix = *key; key.advance(common + 1); - match self.remove_at(child, key, old_val)? { + match self.remove_at(child, key, old_val, tree_ref)? { Some((new, changed)) => { children[idx] = Some(new.into()); - let branch = Node::NibbledBranch(encoded, children, value); + let branch = + Node::NibbledBranch(encoded, children, value, btreerefset); match changed { // child was changed, so we were too. true => Action::Replace(branch), @@ -1463,28 +1660,26 @@ where "branch child deleted, partial={:?}", partial, ); - Action::Replace( - self.fix( - Node::NibbledBranch(encoded, children, value), - prefix, - )?, - ) + Action::Replace(self.fix( + Node::NibbledBranch(encoded, children, value, btreerefset), + prefix, + )?) }, } } else { // no change needed. - Action::Restore(Node::NibbledBranch(encoded, children, value)) + Action::Restore(Node::NibbledBranch(encoded, children, value, btreerefset)) } } }, - (Node::Leaf(encoded, value), _) => { + (Node::Leaf(encoded, value, ltreerefset), _) => { let existing_key = NibbleSlice::from_stored(&encoded); if existing_key == partial { // this is the node we were looking for. Let's delete it. let mut key_val = key.clone(); key_val.advance(existing_key.len()); self.replace_old_value(old_val, Some(value), key_val.left()); - Action::Delete + Action::Delete(tree_ref) } else { // leaf the node alone. #[cfg(feature = "std")] @@ -1494,7 +1689,7 @@ where partial, NibbleSlice::from_stored(&encoded), ); - Action::Restore(Node::Leaf(encoded, value)) + Action::Restore(Node::Leaf(encoded, value, ltreerefset)) } }, (Node::Extension(encoded, child_branch), _) => { @@ -1508,7 +1703,7 @@ where trace!(target: "trie", "removing from extension child, partial={:?}", partial); let prefix = *key; key.advance(common); - match self.remove_at(child_branch, key, old_val)? { + match self.remove_at(child_branch, key, old_val, tree_ref)? { Some((new_child, changed)) => { // if the child branch was unchanged, then the extension is too. // otherwise, this extension may need fixing. @@ -1523,7 +1718,7 @@ where None => { // the whole branch got deleted. // that means that this extension is useless. - Action::Delete + Action::Delete(None) }, } } else { @@ -1550,7 +1745,7 @@ where recurse_extension: bool, ) -> Result, TrieHash, CError> { match node { - Node::Branch(mut children, value) => { + Node::Branch(mut children, value, btreerefset) => { // if only a single value, transmute to leaf/extension and feed through fixed. #[cfg_attr(feature = "std", derive(Debug))] enum UsedIndex { @@ -1588,17 +1783,17 @@ where // make a leaf. #[cfg(feature = "std")] trace!(target: "trie", "fixing: branch -> leaf"); - Ok(Node::Leaf(NibbleSlice::new(&[]).to_stored(), value)) + Ok(Node::Leaf(NibbleSlice::new(&[]).to_stored(), value, btreerefset)) }, (_, value) => { // all is well. #[cfg(feature = "std")] trace!(target: "trie", "fixing: restoring branch"); - Ok(Node::Branch(children, value)) + Ok(Node::Branch(children, value, btreerefset)) }, } }, - Node::NibbledBranch(enc_nibble, mut children, value) => { + Node::NibbledBranch(enc_nibble, mut children, value, btreerefset) => { // if only a single value, transmute to leaf/extension and feed through fixed. #[cfg_attr(feature = "std", derive(Debug))] enum UsedIndex { @@ -1645,37 +1840,48 @@ where ); let stored = match child { NodeHandle::InMemory(h) => self.storage.destroy(h), - NodeHandle::Hash(h) => { - let handle = self.cache(h, child_prefix)?; + NodeHandle::Hash(h, l) => { + let handle = self.cache(h, child_prefix, l)?; self.storage.destroy(handle) }, }; let child_node = match stored { Stored::New(node) => node, - Stored::Cached(node, hash) => { + Stored::Cached(node, hash, _location) => { self.death_row .insert((hash, (child_prefix.0[..].into(), child_prefix.1))); node }, }; + btreerefset.map(|c| self.death_row_child.push(c)); match child_node { - Node::Leaf(sub_partial, value) => { + Node::Leaf(sub_partial, value, ltreerefset) => { let mut enc_nibble = enc_nibble; combine_key( &mut enc_nibble, (nibble_ops::NIBBLE_PER_BYTE - 1, &[a][..]), ); combine_key(&mut enc_nibble, (sub_partial.0, &sub_partial.1[..])); - Ok(Node::Leaf(enc_nibble, value)) + Ok(Node::Leaf(enc_nibble, value, ltreerefset)) }, - Node::NibbledBranch(sub_partial, ch_children, ch_value) => { + Node::NibbledBranch( + sub_partial, + ch_children, + ch_value, + ntreerefset, + ) => { let mut enc_nibble = enc_nibble; combine_key( &mut enc_nibble, (nibble_ops::NIBBLE_PER_BYTE - 1, &[a][..]), ); combine_key(&mut enc_nibble, (sub_partial.0, &sub_partial.1[..])); - Ok(Node::NibbledBranch(enc_nibble, ch_children, ch_value)) + Ok(Node::NibbledBranch( + enc_nibble, + ch_children, + ch_value, + ntreerefset, + )) }, _ => unreachable!(), } @@ -1684,13 +1890,13 @@ where // make a leaf. #[cfg(feature = "std")] trace!(target: "trie", "fixing: branch -> leaf"); - Ok(Node::Leaf(enc_nibble, value)) + Ok(Node::Leaf(enc_nibble, value, btreerefset)) }, (_, value) => { // all is well. #[cfg(feature = "std")] trace!(target: "trie", "fixing: restoring branch"); - Ok(Node::NibbledBranch(enc_nibble, children, value)) + Ok(Node::NibbledBranch(enc_nibble, children, value, btreerefset)) }, } }, @@ -1730,15 +1936,15 @@ where let stored = match child { NodeHandle::InMemory(h) => self.storage.destroy(h), - NodeHandle::Hash(h) => { - let handle = self.cache(h, child_prefix)?; + NodeHandle::Hash(h, l) => { + let handle = self.cache(h, child_prefix, l)?; self.storage.destroy(handle) }, }; - let (child_node, maybe_hash) = match stored { - Stored::New(node) => (node, None), - Stored::Cached(node, hash) => (node, Some(hash)), + let (child_node, maybe_hash, child_location) = match stored { + Stored::New(node) => (node, None, Default::default()), + Stored::Cached(node, hash, location) => (node, Some(hash), location), }; match child_node { @@ -1761,7 +1967,7 @@ where self.fix_inner(Node::Extension(partial, sub_child), key.into(), true) }, - Node::Leaf(sub_partial, value) => { + Node::Leaf(sub_partial, value, ltreerefset) => { // combine with node below. if let Some(hash) = maybe_hash { // delete the cached child since we are going to replace it. @@ -1777,7 +1983,7 @@ where "fixing: extension -> leaf. new_partial={:?}", partial, ); - Ok(Node::Leaf(partial, value)) + Ok(Node::Leaf(partial, value, ltreerefset)) }, child_node => { #[cfg(feature = "std")] @@ -1785,7 +1991,7 @@ where // reallocate the child node. let stored = if let Some(hash) = maybe_hash { - Stored::Cached(child_node, hash) + Stored::Cached(child_node, hash, child_location) } else { Stored::New(child_node) }; @@ -1798,146 +2004,128 @@ where } } - /// Commit the in-memory changes to disk, freeing their storage and - /// updating the state root. - pub fn commit(&mut self) { + /// Calculate the changeset for the trie. + /// Note `keyspace` only apply for hash base storage to avoid key collision + /// between composed tree states. + pub fn commit(self) -> Changeset, L::Location> { + self.commit_inner(None) + } + + /// Same as commit but use a keyspace to isolate + /// stored date. + /// `keyspace` only apply for hash base storage to avoid key collision + /// between composed tree states. + pub fn commit_with_keyspace(self, keyspace: &[u8]) -> Changeset, L::Location> { + self.commit_inner(Some(keyspace)) + } + + fn commit_inner(mut self, keyspace: Option<&[u8]>) -> Changeset, L::Location> { #[cfg(feature = "std")] trace!(target: "trie", "Committing trie changes to db."); // always kill all the nodes on death row. #[cfg(feature = "std")] trace!(target: "trie", "{:?} nodes to remove from db", self.death_row.len()); + let mut removed = Vec::with_capacity(self.death_row.len()); + + #[cfg(feature = "std")] for (hash, prefix) in self.death_row.drain() { - self.db.remove(&hash, (&prefix.0[..], prefix.1)); + removed.push((hash, prefix)); } let handle = match self.root_handle() { - NodeHandle::Hash(_) => return, // no changes necessary. + NodeHandle::Hash(hash, location) => { + debug_assert!(removed.is_empty()); + return Changeset::Existing(ExistingChangesetNode { + hash, + prefix: Default::default(), + location, + }); + }, // no changes necessary. NodeHandle::InMemory(h) => h, }; match self.storage.destroy(handle) { Stored::New(node) => { - // Reconstructs the full key for root node. - let full_key = self.cache.as_ref().and_then(|_| { - node.partial_key().and_then(|k| Some(NibbleSlice::from_stored(k).into())) - }); - let mut k = NibbleVec::new(); + let mut children = Vec::new(); - let encoded_root = node.into_encoded(|node, o_slice, o_index| { + let (encoded_root, roottreerefset) = node.into_encoded(|node, o_slice, o_index| { let mov = k.append_optional_slice_and_nibble(o_slice, o_index); match node { NodeToEncode::Node(value) => { - let value_hash = self.db.insert(k.as_prefix(), value); + let value_hash = self.db.hash(value); self.cache_value(k.inner(), value, value_hash); + children.push(Changeset::New(NewChangesetNode { + hash: value_hash, + prefix: k.as_owned_prefix(), + data: value.to_vec(), //TODO: avoid allocation + children: Default::default(), + removed_keys: None, + })); + k.drop_lasts(mov); - ChildReference::Hash(value_hash) + ChildReference::Hash(value_hash, Default::default()) }, NodeToEncode::TrieNode(child) => { - let result = self.commit_child(child, &mut k); + let result = self.commit_child(child, &mut k, &mut children); k.drop_lasts(mov); result }, } }); + roottreerefset.map(|c| { + children.push(*c); + }); #[cfg(feature = "std")] trace!(target: "trie", "encoded root node: {:?}", ToHex(&encoded_root[..])); - *self.root = self.db.insert(EMPTY_PREFIX, &encoded_root); - self.hash_count += 1; + self.root = self.db.hash(&encoded_root); - self.cache_node(*self.root, &encoded_root, full_key); + self.cache_node(self.root); - self.root_handle = NodeHandle::Hash(*self.root); + self.root_handle = NodeHandle::Hash(self.root, Default::default()); + Changeset::New(NewChangesetNode { + hash: self.root.clone(), + prefix: Default::default(), + data: encoded_root, + children, + removed_keys: Some((keyspace.map(|s| s.to_vec()), removed)), + }) }, - Stored::Cached(node, hash) => { + Stored::Cached(node, hash, location) => { // probably won't happen, but update the root and move on. - *self.root = hash; - self.root_handle = - NodeHandle::InMemory(self.storage.alloc(Stored::Cached(node, hash))); + self.root = hash; + self.root_handle = NodeHandle::InMemory(self.storage.alloc(Stored::Cached( + node, + hash, + // TODO should we use location here?? likely yes TODO write a test with cache + // usage and location in triedbmut + Default::default(), + ))); + debug_assert!(removed.is_empty()); + Changeset::Existing(ExistingChangesetNode { + hash, + prefix: Default::default(), + location, + }) }, } } /// Cache the given `encoded` node. - fn cache_node(&mut self, hash: TrieHash, encoded: &[u8], full_key: Option) { - // If we have a cache, cache our node directly. + fn cache_node(&mut self, hash: TrieHash) { + // Mark the node as new so that it is removed from the shared cache. if let Some(cache) = self.cache.as_mut() { - let node = cache.get_or_insert_node(hash, &mut || { - Ok(L::Codec::decode(&encoded) - .ok() - .and_then(|n| n.to_owned_node::().ok()) - .expect("Just encoded the node, so it should decode without any errors; qed")) - }); - - // `node` should always be `OK`, but let's play it safe. - let node = if let Ok(node) = node { node } else { return }; - - let mut values_to_cache = Vec::new(); - - // If the given node has data attached, the `full_key` is the full key to this node. - if let Some(full_key) = full_key { - node.data().and_then(|v| node.data_hash().map(|h| (&full_key, v, h))).map( - |(k, v, h)| { - values_to_cache.push((k.inner().to_vec(), (v.clone(), h).into())); - }, - ); - - fn cache_child_values( - node: &NodeOwned>, - values_to_cache: &mut Vec<(Vec, CachedValue>)>, - full_key: NibbleVec, - ) { - node.child_iter().flat_map(|(n, c)| c.as_inline().map(|c| (n, c))).for_each( - |(n, c)| { - let mut key = full_key.clone(); - n.map(|n| key.push(n)); - c.partial_key().map(|p| key.append(p)); - - if let Some((hash, data)) = - c.data().and_then(|d| c.data_hash().map(|h| (h, d))) - { - values_to_cache - .push((key.inner().to_vec(), (data.clone(), hash).into())); - } - - cache_child_values::(c, values_to_cache, key); - }, - ); - } - - // Also cache values of inline nodes. - cache_child_values::(&node, &mut values_to_cache, full_key.clone()); - } - - drop(node); - values_to_cache.into_iter().for_each(|(k, v)| cache.cache_value_for_key(&k, v)); + cache.insert_new_node(&hash); } } /// Cache the given `value`. /// - /// `hash` is the hash of `value`. - fn cache_value(&mut self, full_key: &[u8], value: impl Into, hash: TrieHash) { - if let Some(cache) = self.cache.as_mut() { - let value = value.into(); - - // `get_or_insert` should always return `Ok`, but be safe. - let value = if let Ok(value) = cache - .get_or_insert_node(hash, &mut || Ok(NodeOwned::Value(value.clone(), hash))) - .map(|n| n.data().cloned()) - { - value - } else { - None - }; - - if let Some(value) = value { - cache.cache_value_for_key(full_key, (value, hash).into()) - } - } - } + /// Cache is not done here as we want to cache the location from the db, + /// and location on new_nodes are not resolved here. + fn cache_value(&mut self, _full_key: &[u8], _value: impl Into, _hash: TrieHash) {} /// Commit a node by hashing it and writing it to the db. Returns a /// `ChildReference` which in most cases carries a normal hash but for the @@ -1946,40 +2134,58 @@ where /// `into_encoded` method of `Node`. fn commit_child( &mut self, - handle: NodeHandle>, + handle: NodeHandle, L::Location>, prefix: &mut NibbleVec, - ) -> ChildReference> { + children: &mut Vec, L::Location>>, + ) -> ChildReference, L::Location> { match handle { - NodeHandle::Hash(hash) => ChildReference::Hash(hash), + NodeHandle::Hash(hash, location) => { + children.push(Changeset::Existing(ExistingChangesetNode { + hash, + prefix: prefix.as_owned_prefix(), + location, + })); + ChildReference::Hash(hash, location) + }, NodeHandle::InMemory(storage_handle) => { match self.storage.destroy(storage_handle) { - Stored::Cached(_, hash) => ChildReference::Hash(hash), + Stored::Cached(_, hash, location) => { + children.push(Changeset::Existing(ExistingChangesetNode { + hash, + prefix: prefix.as_owned_prefix(), + location, + })); + ChildReference::Hash(hash, location) + }, Stored::New(node) => { - // Reconstructs the full key - let full_key = self.cache.as_ref().and_then(|_| { - let mut prefix = prefix.clone(); - if let Some(partial) = node.partial_key() { - prefix.append_partial(NibbleSlice::from_stored(partial).right()); - } - Some(prefix) - }); - - let encoded = { - let commit_child = |node: NodeToEncode>, + let mut sub_children = Vec::new(); + let (encoded, child_set) = { + let commit_child = |node: NodeToEncode, L::Location>, o_slice: Option<&NibbleSlice>, o_index: Option| { let mov = prefix.append_optional_slice_and_nibble(o_slice, o_index); match node { NodeToEncode::Node(value) => { - let value_hash = self.db.insert(prefix.as_prefix(), value); + let value_hash = self.db.hash(value); + sub_children.push(Changeset::New(NewChangesetNode { + hash: value_hash, + prefix: prefix.as_owned_prefix(), + data: value.to_vec(), //TODO: avoid allocation + children: Default::default(), + removed_keys: None, + })); self.cache_value(prefix.inner(), value, value_hash); prefix.drop_lasts(mov); - ChildReference::Hash(value_hash) + ChildReference::Hash(value_hash, Default::default()) }, NodeToEncode::TrieNode(node_handle) => { - let result = self.commit_child(node_handle, prefix); + let result = self.commit_child( + node_handle, + prefix, + &mut sub_children, + ); prefix.drop_lasts(mov); result }, @@ -1987,13 +2193,20 @@ where }; node.into_encoded(commit_child) }; - if encoded.len() >= L::Hash::LENGTH { - let hash = self.db.insert(prefix.as_prefix(), &encoded); - self.hash_count += 1; - - self.cache_node(hash, &encoded, full_key); - - ChildReference::Hash(hash) + let result = if encoded.len() >= L::Hash::LENGTH { + let hash = self.db.hash(&encoded); + self.cache_node(hash); + child_set.map(|c| { + sub_children.push(*c); + }); + children.push(Changeset::New(NewChangesetNode { + hash, + prefix: prefix.as_owned_prefix(), + data: encoded, + children: sub_children, + removed_keys: None, + })); + ChildReference::Hash(hash, Default::default()) } else { // it's a small value, so we cram it into a `TrieHash` // and tag with length @@ -2002,7 +2215,8 @@ where h.as_mut()[..len].copy_from_slice(&encoded[..len]); ChildReference::Inline(h, len) - } + }; + result }, } }, @@ -2010,26 +2224,16 @@ where } // a hack to get the root node's handle - fn root_handle(&self) -> NodeHandle> { + fn root_handle(&self) -> NodeHandle, L::Location> { match self.root_handle { - NodeHandle::Hash(h) => NodeHandle::Hash(h), + NodeHandle::Hash(h, l) => NodeHandle::Hash(h, l), NodeHandle::InMemory(StorageHandle(x)) => NodeHandle::InMemory(StorageHandle(x)), } } -} -impl<'a, L> TrieMut for TrieDBMut<'a, L> -where - L: TrieLayout, -{ - fn root(&mut self) -> &TrieHash { - self.commit(); - self.root - } - - fn is_empty(&self) -> bool { + pub fn is_empty(&self) -> bool { match self.root_handle { - NodeHandle::Hash(h) => h == L::Codec::hashed_null_node(), + NodeHandle::Hash(h, _) => h == L::Codec::hashed_null_node(), NodeHandle::InMemory(ref h) => match self.storage[h] { Node::Empty => true, _ => false, @@ -2037,18 +2241,39 @@ where } } - fn get<'x, 'key>(&'x self, key: &'key [u8]) -> Result, TrieHash, CError> + pub fn get<'x, 'key>( + &'x self, + key: &'key [u8], + ) -> Result, TrieHash, CError> where 'x: 'key, { self.lookup(key, NibbleSlice::new(key), &self.root_handle) } - fn insert( + pub fn insert( + &mut self, + key: &[u8], + value: &[u8], + ) -> Result>, TrieHash, CError> { + self.insert_with_tree_ref(key, value, None) + } + + pub fn insert_with_tree_ref( &mut self, key: &[u8], value: &[u8], + tree_ref: Option>, ) -> Result>, TrieHash, CError> { + // expect for the child changes to have a key. + debug_assert!(tree_ref + .as_ref() + .map(|c| if let Changeset::New(set) = c.as_ref() { + set.removed_keys.is_some() + } else { + true + }) + .unwrap_or(true)); if !L::ALLOW_EMPTY && value.is_empty() { return self.remove(key) } @@ -2061,7 +2286,7 @@ where let value = Bytes::from(value); let root_handle = self.root_handle(); let (new_handle, _changed) = - self.insert_at(root_handle, &mut NibbleSlice::new(key), value, &mut old_val)?; + self.insert_at(root_handle, &mut NibbleSlice::new(key), value, &mut old_val, tree_ref)?; #[cfg(feature = "std")] trace!(target: "trie", "insert: altered trie={}", _changed); @@ -2070,7 +2295,25 @@ where Ok(old_val) } - fn remove(&mut self, key: &[u8]) -> Result>, TrieHash, CError> { + pub fn remove(&mut self, key: &[u8]) -> Result>, TrieHash, CError> { + self.remove_with_tree_ref(key, None) + } + + pub fn remove_with_tree_ref( + &mut self, + key: &[u8], + tree_ref: Option>, + ) -> Result>, TrieHash, CError> { + // expect for the child changes to have a key. + debug_assert!(tree_ref + .as_ref() + .map(|c| if let Changeset::New(set) = c.as_ref() { + set.removed_keys.is_some() + } else { + true + }) + .unwrap_or(true)); + #[cfg(feature = "std")] trace!(target: "trie", "remove: key={:?}", ToHex(key)); @@ -2078,7 +2321,7 @@ where let mut key_slice = NibbleSlice::new(key); let mut old_val = None; - match self.remove_at(root_handle, &mut key_slice, &mut old_val)? { + match self.remove_at(root_handle, &mut key_slice, &mut old_val, tree_ref)? { Some((handle, _changed)) => { #[cfg(feature = "std")] trace!(target: "trie", "remove: altered trie={}", _changed); @@ -2087,8 +2330,8 @@ where None => { #[cfg(feature = "std")] trace!(target: "trie", "remove: obliterated trie"); - self.root_handle = NodeHandle::Hash(L::Codec::hashed_null_node()); - *self.root = L::Codec::hashed_null_node(); + let handle = self.storage.alloc(Stored::New(Node::Empty)); + self.root_handle = NodeHandle::InMemory(handle); }, } @@ -2096,15 +2339,6 @@ where } } -impl<'a, L> Drop for TrieDBMut<'a, L> -where - L: TrieLayout, -{ - fn drop(&mut self) { - self.commit(); - } -} - /// combine two NodeKeys fn combine_key(start: &mut NodeKey, end: (usize, &[u8])) { debug_assert!(start.0 < nibble_ops::NIBBLE_PER_BYTE); diff --git a/test-support/keccak-hasher/CHANGELOG.md b/test-support/keccak-hasher/CHANGELOG.md deleted file mode 100644 index 4da7ff58..00000000 --- a/test-support/keccak-hasher/CHANGELOG.md +++ /dev/null @@ -1,11 +0,0 @@ -# Changelog - -The format is based on [Keep a Changelog]. - -[Keep a Changelog]: http://keepachangelog.com/en/1.0.0/ - -## [0.16.0] - 2023-03-14 -- Switch to `hash-db` 0.16. [#188](https://github.com/paritytech/trie/pull/188) - -## [0.15.3] - 2020-07-24 -- Update `tiny-keccak` to 0.2. [#105](https://github.com/paritytech/trie/pull/105) diff --git a/test-support/keccak-hasher/Cargo.toml b/test-support/keccak-hasher/Cargo.toml deleted file mode 100644 index d933d44d..00000000 --- a/test-support/keccak-hasher/Cargo.toml +++ /dev/null @@ -1,19 +0,0 @@ -[package] -name = "keccak-hasher" -version = "0.16.0" -authors = ["Parity Technologies "] -description = "Keccak-256 implementation of the Hasher trait" -repository = "https://github.com/paritytech/parity/" -license = "Apache-2.0" -edition = "2018" - -[dependencies] -tiny-keccak = { version = "2.0.2", features = ["keccak"] } -hash-db = { path = "../../hash-db", default-features = false, version = "0.16.0" } -hash256-std-hasher = { path = "../../hash256-std-hasher", version = "0.15.2" } - -[features] -default = ["std"] -std = [ - "hash-db/std", -] diff --git a/test-support/trie-bench/CHANGELOG.md b/test-support/trie-bench/CHANGELOG.md deleted file mode 100644 index 8634244b..00000000 --- a/test-support/trie-bench/CHANGELOG.md +++ /dev/null @@ -1,66 +0,0 @@ -# Changelog - -The format is based on [Keep a Changelog]. - -[Keep a Changelog]: http://keepachangelog.com/en/1.0.0/ - -## [Unreleased] - -## [0.37.0] - 2023-03-14 -- Update dependencies. [#188](https://github.com/paritytech/trie/pull/188) and [#187](https://github.com/paritytech/trie/pull/187) - -## [0.36.0] - 2023-02-24 -- Updated `trie-db` to 0.26.0. [#185](https://github.com/paritytech/trie/pull/185) - -## [0.35.0] - 2023-02-03 -- Updated `trie-db` to 0.25.0. - -## [0.34.0] - 2023-01-23 -- Updated `criterion` to 0.4. [#176](https://github.com/paritytech/trie/pull/176) - -## [0.33.0] - 2022-11-29 -- Updated `memory-db` to 0.31. [#172](https://github.com/paritytech/trie/pull/172) - -## [0.32.0] - 2022-09-20 -- Updated `memory-db` to 0.30. [#166](https://github.com/paritytech/trie/pull/166) - -## [0.31.0] - 2022-08-04 -- Update trie-db to 0.24.0. [#163](https://github.com/paritytech/trie/pull/163) - -## [0.30.0] - 2022-02-04 -- Updated `memory-db` to 0.29. [#150](https://github.com/paritytech/trie/pull/150) - -## [0.29.0] - 2021-10-19 -- Updated memory-db, triedb and trie-root. [#142](https://github.com/paritytech/trie/pull/142) - -## [0.28.0] - 2021-07-02 -- Updated memory-db to 0.27. [#139](https://github.com/paritytech/trie/pull/139) -- Updated parity-scale-codec to 2.0. [#137](https://github.com/paritytech/trie/pull/137) - -## [0.27.1] - 2021-06-24 -- Updated parity-scale-codec to 2.2.0-rc.2. - -## [0.27.0] - 2021-01-27 -- Updated memory-db to 0.26. -- Updated parity-scale-codec to 2.0. - -## [0.26.0] - 2021-01-05 -- Updated memory-db to 0.25. [#118](https://github.com/paritytech/trie/pull/118) - -## [0.25.0] - 2020-07-24 -- Updated criterion to 0.3. [#106](https://github.com/paritytech/trie/pull/106) - -## [0.24.0] - 2020-07-07 -- Updated memory-db to 0.24. [#99](https://github.com/paritytech/trie/pull/99) - -## [0.23.0] - 2020-07-06 -- Updated memory-db to 0.22. [#98](https://github.com/paritytech/trie/pull/98) - -## [0.21.0] - 2020-03-21 -- Updated memory-db to 0.20.0 [#82](https://github.com/paritytech/trie/pull/82) - -## [0.20.0] - 2020-02-07 -- Updated trie-root to v0.16.0 and memory-db to v0.19.0 and trie-db to v0.20.0 [#78](https://github.com/paritytech/trie/pull/78) - -## [0.19.0] - 2020-01-17 -- Updated trie-db to 0.19.0. [#75](https://github.com/paritytech/trie/pull/75) diff --git a/test-support/trie-bench/Cargo.toml b/test-support/trie-bench/Cargo.toml deleted file mode 100644 index f8de1f96..00000000 --- a/test-support/trie-bench/Cargo.toml +++ /dev/null @@ -1,18 +0,0 @@ -[package] -name = "trie-bench" -description = "Standard benchmarking suite for tries" -version = "0.37.0" -authors = ["Parity Technologies "] -repository = "https://github.com/paritytech/trie/" -license = "Apache-2.0" -edition = "2018" - -[dependencies] -keccak-hasher = { path = "../keccak-hasher", version = "0.16.0" } -trie-standardmap = { path = "../trie-standardmap", version = "0.16.0" } -hash-db = { path = "../../hash-db" , version = "0.16.0"} -memory-db = { path = "../../memory-db", version = "0.32.0" } -trie-root = { path = "../../trie-root", version = "0.18.0" } -trie-db = { path = "../../trie-db", version = "0.27.0" } -criterion = "0.4.0" -parity-scale-codec = "3.0.0" diff --git a/test-support/trie-standardmap/CHANGELOG.md b/test-support/trie-standardmap/CHANGELOG.md deleted file mode 100644 index 39a5cbbf..00000000 --- a/test-support/trie-standardmap/CHANGELOG.md +++ /dev/null @@ -1,8 +0,0 @@ -# Changelog - -The format is based on [Keep a Changelog]. - -[Keep a Changelog]: http://keepachangelog.com/en/1.0.0/ - -## [0.16.0] - 2023-03-14 -- Update dependencies. [#188](https://github.com/paritytech/trie/pull/188) and [#187](https://github.com/paritytech/trie/pull/187) diff --git a/test-support/trie-standardmap/Cargo.toml b/test-support/trie-standardmap/Cargo.toml deleted file mode 100644 index b1d2d199..00000000 --- a/test-support/trie-standardmap/Cargo.toml +++ /dev/null @@ -1,11 +0,0 @@ -[package] -name = "trie-standardmap" -description = "Standard test map for profiling tries" -version = "0.16.0" -authors = ["Parity Technologies "] -license = "Apache-2.0" -edition = "2018" - -[dependencies] -keccak-hasher = { path = "../keccak-hasher", version = "0.16.0"} -hash-db = { path = "../../hash-db" , version = "0.16.0"} diff --git a/trie-db/test/Cargo.toml b/test/Cargo.toml similarity index 56% rename from trie-db/test/Cargo.toml rename to test/Cargo.toml index 547df945..14cbc60d 100644 --- a/trie-db/test/Cargo.toml +++ b/test/Cargo.toml @@ -11,15 +11,16 @@ edition = "2018" name = "bench" harness = false +[[bench]] +name = "memory_db" +harness = false + [dependencies] -trie-db = { path = "..", version = "0.27.0"} -hash-db = { path = "../../hash-db", version = "0.16.0"} -memory-db = { path = "../../memory-db", version = "0.32.0" } +trie-db = { package = "subtrie", path = "../subtrie", version = "0.0.1", features = ["test_utils", "bench"]} rand = { version = "0.8", default-features = false, features = ["small_rng"] } -trie-standardmap = { path = "../../test-support/trie-standardmap", version = "0.16.0" } -reference-trie = { path = "../../test-support/reference-trie", version = "0.29.0" } -hex-literal = "0.3" -criterion = "0.4.0" +reference-trie = { path = "../reference-trie", version = "0.29.0" } +hex-literal = "0.4" +criterion = "0.5.1" env_logger = { version = "0.10", default-features = false } log = "0.4" diff --git a/trie-db/test/benches/bench.rs b/test/benches/bench.rs similarity index 98% rename from trie-db/test/benches/bench.rs rename to test/benches/bench.rs index bd2611d5..93075ffa 100644 --- a/trie-db/test/benches/bench.rs +++ b/test/benches/bench.rs @@ -17,10 +17,11 @@ use std::collections::BTreeMap; use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; use reference_trie::ExtensionLayout as Layout; use trie_db::{ + memory_db, proof::{generate_proof, verify_proof}, + test_utils::{Alphabet, StandardMap, ValueMode}, NibbleSlice, Trie, }; -use trie_standardmap::{Alphabet, StandardMap, ValueMode}; criterion_group!( benches, @@ -254,7 +255,6 @@ fn trie_mut_ref_root(c: &mut Criterion) { fn trie_mut(c: &mut Criterion) { use memory_db::HashKey; - use trie_db::TrieMut; let params = vec![(29, 204800 / 2, 512 * 2), (29, 204800, 32)]; @@ -265,10 +265,8 @@ fn trie_mut(c: &mut Criterion) { let param = format!("seed({}), len({}), value_length({})", seed, len, value_length); group.bench_with_input(BenchmarkId::new("trie_mut", param), &input, |b, i| { b.iter(|| { - let mut root = Default::default(); let mut mdb = memory_db::MemoryDB::<_, HashKey<_>, _>::default(); - let mut trie = - trie_db::TrieDBMutBuilder::::new(&mut mdb, &mut root).build(); + let mut trie = trie_db::TrieDBMutBuilder::::new(&mut mdb).build(); for (key, value) in i { trie.insert(&key, &value).expect( "changes trie: insertion to trie is not allowed to fail within runtime", diff --git a/memory-db/benches/bench.rs b/test/benches/memory_db.rs similarity index 95% rename from memory-db/benches/bench.rs rename to test/benches/memory_db.rs index b3e0fd9a..148393f3 100644 --- a/memory-db/benches/bench.rs +++ b/test/benches/memory_db.rs @@ -13,9 +13,11 @@ // limitations under the License. use criterion::{black_box, criterion_group, criterion_main, Criterion}; -use hash_db::{HashDB, Hasher, EMPTY_PREFIX}; -use keccak_hasher::KeccakHasher; -use memory_db::{HashKey, MemoryDB}; +use trie_db::{ + keccak_hasher::KeccakHasher, + memory_db::{HashKey, MemoryDB}, + node_db::{Hasher, EMPTY_PREFIX}, +}; criterion_group!( benches, diff --git a/test/src/double_ended_iterator.rs b/test/src/double_ended_iterator.rs new file mode 100644 index 00000000..77fa4ce1 --- /dev/null +++ b/test/src/double_ended_iterator.rs @@ -0,0 +1,410 @@ +// Copyright 2017, 2020 Parity Technologies +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use hex_literal::hex; +use reference_trie::test_layouts; +use trie_db::{ + node::Node, node_db::Hasher, NibbleSlice, TrieDBBuilder, TrieDBNodeDoubleEndedIterator, + TrieDoubleEndedIterator, TrieLayout, +}; + +use crate::{ + iterator::{build_trie_db, nibble_vec}, + TestDB, +}; + +test_layouts!(node_double_ended_iterator_works, node_double_ended_iterator); +fn node_double_ended_iterator>() { + let pairs = vec![ + (hex!("01").to_vec(), b"aaaa".to_vec()), + (hex!("0123").to_vec(), b"bbbb".to_vec()), + (hex!("02").to_vec(), vec![1; 32]), + ]; + + let (memdb, root) = build_trie_db::(&pairs); + let trie = TrieDBBuilder::::new(&memdb, &root).build(); + let mut iter = TrieDBNodeDoubleEndedIterator::new(&trie).unwrap(); + + if T::USE_EXTENSION { + match iter.next_back() { + Some(Ok((prefix, Some(_), node))) => { + assert_eq!(prefix, nibble_vec(hex!("02"), 2)); + match node.node() { + Node::Leaf(partial, _) => assert_eq!(partial, NibbleSlice::new(&hex!("")[..])), + _ => panic!("unexpected node"), + } + }, + _ => panic!("unexpected item"), + } + + match iter.next_back() { + Some(Ok((prefix, None, node))) => { + assert_eq!(prefix, nibble_vec(hex!("0120"), 3)); + match node.node() { + Node::Leaf(partial, _) => + assert_eq!(partial, NibbleSlice::new_offset(&hex!("03")[..], 1)), + _ => panic!("unexpected node"), + } + }, + _ => panic!("unexpected item"), + } + + match iter.next_back() { + Some(Ok((prefix, None, node))) => { + assert_eq!(prefix, nibble_vec(hex!("01"), 2)); + match node.node() { + Node::Branch(_, _) => {}, + _ => panic!("unexpected node"), + } + }, + _ => panic!("unexpected item"), + } + + match iter.next_back() { + Some(Ok((prefix, Some(_), node))) => { + assert_eq!(prefix, nibble_vec(hex!("00"), 1)); + match node.node() { + Node::Branch(_, _) => {}, + _ => panic!("unexpected node"), + } + }, + _ => panic!("unexpected item"), + } + + match iter.next_back() { + Some(Ok((prefix, Some(_), node))) => { + assert_eq!(prefix, nibble_vec(hex!(""), 0)); + match node.node() { + Node::Extension(partial, _) => + assert_eq!(partial, NibbleSlice::new_offset(&hex!("00")[..], 1)), + _ => panic!("unexpected node"), + } + }, + _ => panic!("unexpected item"), + } + + assert!(iter.next_back().is_none()); + } else { + let can_expand = + T::MAX_INLINE_VALUE.unwrap_or(T::Hash::LENGTH as u32) < T::Hash::LENGTH as u32; + + match iter.next_back() { + Some(Ok((prefix, Some(_), node))) => { + assert_eq!(prefix, nibble_vec(hex!("02"), 2)); + match node.node() { + Node::Leaf(partial, _) => assert_eq!(partial, NibbleSlice::new(&hex!("")[..])), + _ => panic!("unexpected node"), + } + }, + _ => panic!("unexpected item"), + } + + match iter.next_back() { + Some(Ok((prefix, hash, node))) => { + if !can_expand { + assert!(hash.is_none()); + } + assert_eq!(prefix, nibble_vec(hex!("0120"), 3)); + match node.node() { + Node::Leaf(partial, _) => + assert_eq!(partial, NibbleSlice::new_offset(&hex!("03")[..], 1)), + _ => panic!("unexpected node"), + } + }, + _ => panic!("unexpected item"), + } + + match iter.next_back() { + Some(Ok((prefix, hash, node))) => { + if !can_expand { + assert!(hash.is_none()); + } + assert_eq!(prefix, nibble_vec(hex!("01"), 2)); + match node.node() { + Node::NibbledBranch(partial, _, _) => + assert_eq!(partial, NibbleSlice::new_offset(&hex!("")[..], 0)), + _ => panic!("unexpected node"), + } + }, + _ => panic!("unexpected item"), + } + + match iter.next_back() { + Some(Ok((prefix, Some(_), node))) => { + assert_eq!(prefix, nibble_vec(hex!(""), 0)); + match node.node() { + Node::NibbledBranch(partial, _, _) => + assert_eq!(partial, NibbleSlice::new_offset(&hex!("00")[..], 1)), + _ => panic!("unexpected node"), + } + }, + _ => panic!("unexpected item"), + } + + assert!(iter.next_back().is_none()); + } +} + +test_layouts!(seek_back_over_empty_works, seek_back_over_empty_works_internal); +fn seek_back_over_empty_works_internal>() { + let (memdb, root) = build_trie_db::(&[]); + let trie = TrieDBBuilder::::new(&memdb, &root).build(); + let mut iter = TrieDBNodeDoubleEndedIterator::new(&trie).unwrap(); + + >::seek(&mut iter, &hex!("")[..]).unwrap(); + match iter.next_back() { + Some(Ok((prefix, _, node))) => { + assert_eq!(prefix, nibble_vec(hex!(""), 0)); + match node.node() { + Node::Empty => {}, + _ => panic!("unexpected node"), + } + }, + _ => panic!("unexpected item"), + } + + assert!(iter.next_back().is_none()); + + >::seek(&mut iter, &hex!("00")[..]).unwrap(); + match iter.next_back() { + Some(Ok((prefix, _, node))) => { + assert_eq!(prefix, nibble_vec(hex!(""), 0)); + match node.node() { + Node::Empty => {}, + _ => panic!("unexpected node"), + } + }, + _ => panic!("unexpected item"), + } +} + +test_layouts!(seek_back_works, seek_back_works_internal); +fn seek_back_works_internal>() { + let pairs = vec![ + (hex!("01").to_vec(), b"aaaa".to_vec()), + (hex!("0123").to_vec(), b"bbbb".to_vec()), + (hex!("0122").to_vec(), b"cccc".to_vec()), + (hex!("02").to_vec(), vec![1; 32]), + ]; + + let (memdb, root) = build_trie_db::(&pairs); + let trie = TrieDBBuilder::::new(&memdb, &root).build(); + let mut iter = TrieDBNodeDoubleEndedIterator::new(&trie).unwrap(); + + >::seek(&mut iter, &hex!("")[..]).unwrap(); + match iter.next_back() { + Some(Ok((prefix, _, _))) => assert_eq!(prefix, nibble_vec(hex!(""), 0)), + _ => panic!("unexpected item"), + } + + >::seek(&mut iter, &hex!("03")[..]).unwrap(); + match iter.next_back() { + Some(Ok((prefix, _, _))) => assert_eq!(prefix, nibble_vec(hex!("02"), 2)), + _ => panic!("unexpected item"), + } + + >::seek(&mut iter, &hex!("02")[..]).unwrap(); + match iter.next_back() { + Some(Ok((prefix, _, _))) => assert_eq!(prefix, nibble_vec(hex!("02"), 2)), + _ => panic!("unexpected item"), + } + + >::seek(&mut iter, &hex!("01")[..]).unwrap(); + match iter.next_back() { + Some(Ok((prefix, _, _))) => { + assert_eq!(prefix, nibble_vec(hex!("0123"), 4)); + }, + _ => panic!("unexpected item"), + } + + match iter.next_back() { + Some(Ok((prefix, _, _))) => { + assert_eq!(prefix, nibble_vec(hex!("0122"), 4)); + }, + _ => panic!("unexpected item"), + } + + match iter.next_back() { + Some(Ok((prefix, _, _))) => { + assert_eq!(prefix, nibble_vec(hex!("0120"), 3)); + }, + _ => panic!("unexpected item"), + } + + match iter.next_back() { + Some(Ok((prefix, _, _))) => { + assert_eq!(prefix, nibble_vec(hex!("01"), 2)); + }, + _ => panic!("unexpected item"), + } + + >::seek(&mut iter, &hex!("0125")[..]).unwrap(); + match iter.next_back() { + Some(Ok((prefix, _, _))) => { + assert_eq!(prefix, nibble_vec(hex!("0123"), 4)); + }, + _ => panic!("unexpected item"), + } + + match iter.next_back() { + Some(Ok((prefix, _, _))) => { + assert_eq!(prefix, nibble_vec(hex!("0122"), 4)); + }, + _ => panic!("unexpected item"), + } + + >::seek(&mut iter, &hex!("0120")[..]).unwrap(); + match iter.next_back() { + Some(Ok((prefix, _, _))) => { + assert_eq!(prefix, nibble_vec(hex!("0120"), 3)); + }, + _ => panic!("unexpected item"), + } + + match iter.next_back() { + Some(Ok((prefix, _, _))) => { + assert_eq!(prefix, nibble_vec(hex!("01"), 2)); + }, + _ => panic!("unexpected item"), + } +} + +test_layouts!(prefix_back_works, prefix_back_works_internal); +fn prefix_back_works_internal>() { + let can_expand = T::MAX_INLINE_VALUE.unwrap_or(T::Hash::LENGTH as u32) < T::Hash::LENGTH as u32; + let pairs = vec![ + (hex!("01").to_vec(), b"aaaa".to_vec()), + (hex!("0123").to_vec(), b"bbbb".to_vec()), + (hex!("0122").to_vec(), b"cccc".to_vec()), + (hex!("02").to_vec(), vec![1; 32]), + ]; + + let (memdb, root) = build_trie_db::(&pairs); + let trie = TrieDBBuilder::::new(&memdb, &root).build(); + let mut iter = TrieDBNodeDoubleEndedIterator::new(&trie).unwrap(); + + iter.prefix(&hex!("01").to_vec()[..]).unwrap(); + + if T::USE_EXTENSION { + match iter.next_back() { + Some(Ok((prefix, None, node))) => { + assert_eq!(prefix, nibble_vec(hex!("0123"), 4)); + match node.node() { + Node::Leaf(partial, _) => { + assert_eq!(partial, NibbleSlice::new_offset(&hex!("")[..], 0)) + }, + _ => panic!("unexpected node"), + } + }, + _ => panic!("unexpected item"), + } + } else { + match iter.next_back() { + Some(Ok((prefix, hash, node))) => { + if !can_expand { + debug_assert!(hash.is_none()); + } + assert_eq!(prefix, nibble_vec(hex!("0123"), 4)); + match node.node() { + Node::Leaf(partial, _) => { + assert_eq!(partial, NibbleSlice::new_offset(&hex!("")[..], 0)) + }, + _ => panic!("unexpected node"), + } + }, + _ => panic!("unexpected item"), + } + } + + match iter.next_back() { + Some(Ok((prefix, hash, node))) => { + if !can_expand { + debug_assert!(hash.is_none()); + } + assert_eq!(prefix, nibble_vec(hex!("0122"), 4)); + match node.node() { + Node::Leaf(partial, _) => { + assert_eq!(partial, NibbleSlice::new_offset(&hex!("")[..], 0)) + }, + _ => panic!("unexpected node"), + } + }, + _ => panic!("unexpected item"), + } + + match iter.next_back() { + Some(Ok((prefix, hash, node))) => { + if !can_expand { + debug_assert!(hash.is_none()); + } + assert_eq!(prefix, nibble_vec(hex!("0120"), 3)); + match node.node() { + Node::NibbledBranch(partial, _, _) => + assert_eq!(partial, NibbleSlice::new_offset(&hex!("")[..], 0)), + Node::Branch(_, _) => {}, + _ => panic!("unexpected node"), + } + }, + _ => panic!("unexpected item"), + } + + match iter.next_back() { + Some(Ok((prefix, hash, node))) => { + if !can_expand { + debug_assert!(hash.is_none()); + } + assert_eq!(prefix, nibble_vec(hex!("01"), 2)); + match node.node() { + Node::NibbledBranch(partial, _, _) => + assert_eq!(partial, NibbleSlice::new_offset(&hex!("")[..], 0)), + Node::Branch(_, _) => {}, + _ => panic!("unexpected node"), + } + }, + _ => panic!("unexpected item"), + } + + assert!(iter.next_back().is_none()); + + let mut iter = TrieDBNodeDoubleEndedIterator::new(&trie).unwrap(); + iter.prefix(&hex!("0010").to_vec()[..]).unwrap(); + assert!(iter.next_back().is_none()); + let mut iter = TrieDBNodeDoubleEndedIterator::new(&trie).unwrap(); + iter.prefix(&hex!("10").to_vec()[..]).unwrap(); + assert!(iter.next_back().is_none()); +} + +test_layouts!(prefix_over_empty_works, prefix_over_empty_works_internal); +fn prefix_over_empty_works_internal>() { + let (memdb, root) = build_trie_db::(&[]); + let trie = TrieDBBuilder::::new(&memdb, &root).build(); + let mut iter = TrieDBNodeDoubleEndedIterator::new(&trie).unwrap(); + iter.prefix(&hex!("")[..]).unwrap(); + match iter.next_back() { + Some(Ok((prefix, Some(_), node))) => { + assert_eq!(prefix, nibble_vec(hex!(""), 0)); + match node.node() { + Node::Empty => {}, + _ => panic!("unexpected node"), + } + }, + _ => panic!("unexpected item"), + } + + assert!(iter.next_back().is_none()); + + let mut iter = TrieDBNodeDoubleEndedIterator::new(&trie).unwrap(); + iter.prefix(&hex!("00")[..]).unwrap(); + assert!(iter.next_back().is_none()); +} diff --git a/trie-db/test/src/iter_build.rs b/test/src/iter_build.rs similarity index 84% rename from trie-db/test/src/iter_build.rs rename to test/src/iter_build.rs index bb4c0d85..546f6ed9 100644 --- a/trie-db/test/src/iter_build.rs +++ b/test/src/iter_build.rs @@ -12,12 +12,15 @@ // See the License for the specific language governing permissions and // limitations under the License. -use memory_db::{HashKey, MemoryDB, PrefixedKey}; use reference_trie::{ test_layouts, ExtensionLayout, HashedValueNoExt, HashedValueNoExtThreshold, NoExtensionLayout, - RefHasher, }; -use trie_db::{DBValue, Trie, TrieDBBuilder, TrieDBMutBuilder, TrieLayout, TrieMut}; +use trie_db::{ + memory_db::{HashKey, MemoryDB, PrefixedKey}, + DBValue, Trie, TrieDBBuilder, TrieDBMutBuilder, TrieLayout, +}; + +use crate::TestDB; #[test] fn trie_root_empty() { @@ -39,16 +42,17 @@ fn root_extension_one() { fn test_iter(data: Vec<(Vec, Vec)>) { let mut db = MemoryDB::, DBValue>::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut db, &mut root).build(); + let changeset = { + let mut t = TrieDBMutBuilder::::new(&mut db).build(); for i in 0..data.len() { let key: &[u8] = &data[i].0; let value: &[u8] = &data[i].1; t.insert(key, value).unwrap(); } - } - let t = TrieDBBuilder::::new(&db, &root).build(); + t.commit() + }; + changeset.apply_to(&mut db); + let t = TrieDBBuilder::::new(&db, changeset.hash()).build(); for (i, kv) in t.iter().unwrap().enumerate() { let (k, v) = kv.unwrap(); let key: &[u8] = &data[i].0; @@ -62,7 +66,7 @@ fn test_iter(data: Vec<(Vec, Vec)>) { } fn compare_implementations(data: Vec<(Vec, Vec)>) { - test_iter::>(data.clone()); + test_iter::>(data.clone()); test_iter::(data.clone()); test_iter::(data.clone()); test_iter::(data.clone()); @@ -71,38 +75,37 @@ fn compare_implementations(data: Vec<(Vec, Vec)>) { } fn compare_implementations_prefixed(data: Vec<(Vec, Vec)>) { - compare_implementations_prefixed_internal::>(data.clone()); + compare_implementations_prefixed_internal::>(data.clone()); compare_implementations_prefixed_internal::(data.clone()); compare_implementations_prefixed_internal::(data.clone()); compare_implementations_prefixed_internal::(data.clone()); } -fn compare_implementations_prefixed_internal(data: Vec<(Vec, Vec)>) { - let memdb = MemoryDB::<_, PrefixedKey<_>, _>::default(); - let hashdb = MemoryDB::, DBValue>::default(); - reference_trie::compare_implementations::(data, memdb, hashdb); +fn compare_implementations_prefixed_internal(data: Vec<(Vec, Vec)>) +where + T::Location: std::fmt::Debug, +{ + reference_trie::compare_implementations::>(data); } fn compare_implementations_h(data: Vec<(Vec, Vec)>) { - compare_implementations_h_internal::>(data.clone()); + compare_implementations_h_internal::>(data.clone()); compare_implementations_h_internal::(data.clone()); compare_implementations_h_internal::(data.clone()); compare_implementations_h_internal::(data.clone()); } -fn compare_implementations_h_internal(data: Vec<(Vec, Vec)>) { - let memdb = MemoryDB::<_, HashKey<_>, _>::default(); - let hashdb = MemoryDB::, DBValue>::default(); - reference_trie::compare_implementations::(data.clone(), memdb, hashdb); +fn compare_implementations_h_internal(data: Vec<(Vec, Vec)>) +where + T::Location: std::fmt::Debug, +{ + reference_trie::compare_implementations::>(data.clone()); } fn compare_implementations_no_extension_unordered(data: Vec<(Vec, Vec)>) { - let memdb = MemoryDB::<_, HashKey<_>, _>::default(); - let hashdb = MemoryDB::, DBValue>::default(); - reference_trie::compare_implementations_unordered::(data, memdb, hashdb); + reference_trie::compare_implementations_unordered::>(data); } fn compare_insert_remove(data: Vec<(bool, Vec, Vec)>) { - let memdb = MemoryDB::<_, PrefixedKey<_>, _>::default(); - reference_trie::compare_insert_remove::(data, memdb); + reference_trie::compare_insert_remove::>(data); } -fn compare_root(data: Vec<(Vec, Vec)>) { - let memdb = MemoryDB::, _>::default(); +fn compare_root>(data: Vec<(Vec, Vec)>) { + let memdb = DB::default(); reference_trie::compare_root::(data, memdb); } fn compare_unhashed(data: Vec<(Vec, Vec)>) { @@ -132,8 +135,8 @@ fn trie_middle_node2() { ]); } test_layouts!(root_extension_bis, root_extension_bis_internal); -fn root_extension_bis_internal() { - compare_root::(vec![ +fn root_extension_bis_internal>() { + compare_root::(vec![ (vec![1u8, 2u8, 3u8, 3u8], vec![8u8; 32]), (vec![1u8, 2u8, 3u8, 4u8], vec![7u8; 32]), ]); @@ -239,7 +242,7 @@ fn fuzz_no_extension4() { ]); } test_layouts!(fuzz_no_extension_insert_remove_1, fuzz_no_extension_insert_remove_1_internal); -fn fuzz_no_extension_insert_remove_1_internal() { +fn fuzz_no_extension_insert_remove_1_internal>() { let data = vec![ (false, vec![0], vec![251, 255]), (false, vec![0, 1], vec![251, 255]), @@ -249,7 +252,7 @@ fn fuzz_no_extension_insert_remove_1_internal() { compare_insert_remove::(data); } test_layouts!(fuzz_no_extension_insert_remove_2, fuzz_no_extension_insert_remove_2_internal); -fn fuzz_no_extension_insert_remove_2_internal() { +fn fuzz_no_extension_insert_remove_2_internal>() { let data = vec![ (false, vec![0x00], vec![0xfd, 0xff]), (false, vec![0x10, 0x00], vec![1; 32]), diff --git a/trie-db/test/src/iterator.rs b/test/src/iterator.rs similarity index 89% rename from trie-db/test/src/iterator.rs rename to test/src/iterator.rs index e36ef6d0..4225e5b0 100644 --- a/trie-db/test/src/iterator.rs +++ b/test/src/iterator.rs @@ -12,36 +12,32 @@ // See the License for the specific language governing permissions and // limitations under the License. -use hash_db::{HashDB, Hasher}; use hex_literal::hex; use reference_trie::test_layouts; use trie_db::{ node::{Node, Value}, - DBValue, NibbleSlice, NibbleVec, TrieDBBuilder, TrieDBNodeIterator, TrieError, TrieIterator, - TrieLayout, TrieMut, + node_db::Hasher, + NibbleSlice, NibbleVec, TrieDBBuilder, TrieDBNodeIterator, TrieError, TrieIterator, TrieLayout, }; -type MemoryDB = memory_db::MemoryDB< - ::Hash, - memory_db::PrefixedKey<::Hash>, - DBValue, ->; +use crate::TestDB; -fn build_trie_db( +pub(crate) fn build_trie_db>( pairs: &[(Vec, Vec)], -) -> (MemoryDB, ::Out) { - let mut memdb = MemoryDB::::default(); - let mut root = Default::default(); - { - let mut t = trie_db::TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); +) -> (DB, ::Out) { + let mut memdb = DB::default(); + let changeset = { + let mut t = trie_db::TrieDBMutBuilder::::new(&mut memdb).build(); for (x, y) in pairs.iter() { t.insert(x, y).unwrap(); } - } + t.commit() + }; + let root = memdb.commit(changeset); (memdb, root) } -fn nibble_vec>(bytes: T, len: usize) -> NibbleVec { +pub(crate) fn nibble_vec>(bytes: T, len: usize) -> NibbleVec { let slice = NibbleSlice::new(bytes.as_ref()); let mut v = NibbleVec::new(); @@ -52,14 +48,14 @@ fn nibble_vec>(bytes: T, len: usize) -> NibbleVec { } test_layouts!(iterator_works, iterator_works_internal); -fn iterator_works_internal() { +fn iterator_works_internal>() { let pairs = vec![ (hex!("01").to_vec(), b"aaaa".to_vec()), (hex!("0123").to_vec(), b"bbbb".to_vec()), (hex!("02").to_vec(), vec![1; 32]), ]; - let (memdb, root) = build_trie_db::(&pairs); + let (memdb, root) = build_trie_db::(&pairs); let trie = TrieDBBuilder::::new(&memdb, &root).build(); let mut iter = TrieDBNodeIterator::new(&trie).unwrap(); @@ -184,8 +180,8 @@ fn iterator_works_internal() { } test_layouts!(iterator_over_empty_works, iterator_over_empty_works_internal); -fn iterator_over_empty_works_internal() { - let (memdb, root) = build_trie_db::(&[]); +fn iterator_over_empty_works_internal>() { + let (memdb, root) = build_trie_db::(&[]); let trie = TrieDBBuilder::::new(&memdb, &root).build(); let mut iter = TrieDBNodeIterator::new(&trie).unwrap(); @@ -204,14 +200,14 @@ fn iterator_over_empty_works_internal() { } test_layouts!(seek_works, seek_works_internal); -fn seek_works_internal() { +fn seek_works_internal>() { let pairs = vec![ (hex!("01").to_vec(), b"aaaa".to_vec()), (hex!("0123").to_vec(), b"bbbb".to_vec()), (hex!("02").to_vec(), vec![1; 32]), ]; - let (memdb, root) = build_trie_db::(&pairs); + let (memdb, root) = build_trie_db::(&pairs); let trie = TrieDBBuilder::::new(&memdb, &root).build(); let mut iter = TrieDBNodeIterator::new(&trie).unwrap(); @@ -244,8 +240,8 @@ fn seek_works_internal() { } test_layouts!(seek_over_empty_works, seek_over_empty_works_internal); -fn seek_over_empty_works_internal() { - let (memdb, root) = build_trie_db::(&[]); +fn seek_over_empty_works_internal>() { + let (memdb, root) = build_trie_db::(&[]); let trie = TrieDBBuilder::::new(&memdb, &root).build(); let mut iter = TrieDBNodeIterator::new(&trie).unwrap(); @@ -266,7 +262,10 @@ fn seek_over_empty_works_internal() { } test_layouts!(iterate_over_incomplete_db, iterate_over_incomplete_db_internal); -fn iterate_over_incomplete_db_internal() { +fn iterate_over_incomplete_db_internal>() +where + T::Location: std::fmt::Debug, +{ let pairs = vec![ (hex!("01").to_vec(), b"aaaa".to_vec()), (hex!("0123").to_vec(), b"bbbb".to_vec()), @@ -274,7 +273,7 @@ fn iterate_over_incomplete_db_internal() { (hex!("03").to_vec(), vec![2; 32]), ]; - let (mut memdb, root) = build_trie_db::(&pairs); + let (mut memdb, root) = build_trie_db::(&pairs); // Look up the leaf node with prefix "02". let leaf_hash = { @@ -338,7 +337,7 @@ fn iterate_over_incomplete_db_internal() { } test_layouts!(prefix_works, prefix_works_internal); -fn prefix_works_internal() { +fn prefix_works_internal>() { let can_expand = T::MAX_INLINE_VALUE.unwrap_or(T::Hash::LENGTH as u32) < T::Hash::LENGTH as u32; let pairs = vec![ (hex!("01").to_vec(), b"aaaa".to_vec()), @@ -346,7 +345,7 @@ fn prefix_works_internal() { (hex!("02").to_vec(), vec![1; 32]), ]; - let (memdb, root) = build_trie_db::(&pairs); + let (memdb, root) = build_trie_db::(&pairs); let trie = TrieDBBuilder::::new(&memdb, &root).build(); let mut iter = TrieDBNodeIterator::new(&trie).unwrap(); @@ -407,8 +406,8 @@ fn prefix_works_internal() { } test_layouts!(prefix_over_empty_works, prefix_over_empty_works_internal); -fn prefix_over_empty_works_internal() { - let (memdb, root) = build_trie_db::(&[]); +fn prefix_over_empty_works_internal>() { + let (memdb, root) = build_trie_db::(&[]); let trie = TrieDBBuilder::::new(&memdb, &root).build(); let mut iter = TrieDBNodeIterator::new(&trie).unwrap(); iter.prefix(&hex!("")[..]).unwrap(); diff --git a/test/src/lib.rs b/test/src/lib.rs new file mode 100644 index 00000000..d0a7a688 --- /dev/null +++ b/test/src/lib.rs @@ -0,0 +1,114 @@ +// Copyright 2020 Parity Technologies +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Tests for subtrie crate. + +#[cfg(test)] +mod double_ended_iterator; +#[cfg(test)] +mod iter_build; +#[cfg(test)] +mod iterator; +#[cfg(test)] +mod proof; +#[cfg(test)] +mod recorder; +#[cfg(test)] +mod trie_codec; +#[cfg(test)] +mod trie_root; +#[cfg(test)] +mod triedb; +#[cfg(test)] +mod triedbmut; + +#[cfg(test)] +use trie_db::{ + mem_tree_db::{Location, MemTreeDB}, + memory_db::{KeyFunction, MemoryDB}, + node_db::{self, Hasher, Prefix}, + Changeset, DBValue, TrieHash, TrieLayout, +}; + +#[cfg(test)] +trait TestDB: node_db::NodeDB + Clone + Default { + fn commit( + &mut self, + commit: trie_db::Changeset<<::Hash as Hasher>::Out, T::Location>, + ) -> TrieHash; + fn remove(&mut self, hash: &::Out, prefix: Prefix); + fn is_empty(&self) -> bool; + fn support_location() -> bool { + false + } +} + +#[cfg(test)] +impl, H, KF> TestDB for MemoryDB +where + H: Hasher, + KF: KeyFunction + Send + Sync, +{ + fn commit( + &mut self, + commit: trie_db::Changeset::Location>, + ) -> H::Out { + commit.apply_to(self) + } + + fn remove(&mut self, hash: &::Out, prefix: Prefix) { + MemoryDB::remove(self, hash, prefix); + } + + fn is_empty(&self) -> bool { + self.keys().is_empty() + } +} + +#[cfg(test)] +impl, H> TestDB for MemTreeDB +where + H: Hasher + Clone, +{ + fn commit(&mut self, commit: trie_db::Changeset) -> H::Out { + self.apply_commit(commit) + } + + fn remove(&mut self, hash: &H::Out, _prefix: Prefix) { + MemTreeDB::test_remove_node(self, hash); + } + + fn is_empty(&self) -> bool { + MemTreeDB::is_empty(self) + } + + fn support_location() -> bool { + true + } +} + +#[cfg(test)] +trait TestCommit { + fn commit_to>(self, db: &mut DB) -> TrieHash; +} + +#[cfg(test)] +impl> TestCommit for Changeset +where + T::Hash: Hasher, +{ + fn commit_to>(self, db: &mut DB) -> TrieHash { + db.commit(self) + } +} diff --git a/trie-db/test/src/proof.rs b/test/src/proof.rs similarity index 72% rename from trie-db/test/src/proof.rs rename to test/src/proof.rs index cca2c70e..331f43b0 100644 --- a/trie-db/test/src/proof.rs +++ b/test/src/proof.rs @@ -12,17 +12,19 @@ // See the License for the specific language governing permissions and // limitations under the License. -use hash_db::Hasher; use reference_trie::{test_layouts, NoExtensionLayout}; +use trie_db::node_db::Hasher; use trie_db::{ proof::{generate_proof, verify_proof, VerifyError}, - DBValue, Trie, TrieDBBuilder, TrieDBMutBuilder, TrieLayout, TrieMut, + DBValue, Trie, TrieDBBuilder, TrieDBMutBuilder, TrieLayout, }; -type MemoryDB = memory_db::MemoryDB< +use crate::TestDB; + +type MemoryDB = trie_db::memory_db::MemoryDB< ::Hash, - memory_db::HashKey<::Hash>, + trie_db::memory_db::HashKey<::Hash>, DBValue, >; @@ -44,20 +46,19 @@ fn test_entries() -> Vec<(&'static [u8], &'static [u8])> { ] } -fn test_generate_proof( +fn test_generate_proof>( entries: Vec<(&'static [u8], &'static [u8])>, keys: Vec<&'static [u8]>, ) -> (::Out, Vec>, Vec<(&'static [u8], Option)>) { // Populate DB with full trie from entries. let (db, root) = { - let mut db = >::default(); - let mut root = Default::default(); - { - let mut trie = >::new(&mut db, &mut root).build(); - for (key, value) in entries.iter() { - trie.insert(key, value).unwrap(); - } + let mut db = DB::default(); + let mut trie = >::new(&db).build(); + for (key, value) in entries.iter() { + trie.insert(key, value).unwrap(); } + let commit = trie.commit(); + let root = db.commit(commit); (db, root) }; @@ -70,8 +71,8 @@ fn test_generate_proof( } test_layouts!(trie_proof_works2, trie_proof_works_internal2); -fn trie_proof_works_internal2() { - let (root, proof, items) = test_generate_proof::( +fn trie_proof_works_internal2>() { + let (root, proof, items) = test_generate_proof::( vec![ // "do" is at a hash-referenced branch node. (&b"do"[..], b"verb"), @@ -85,8 +86,8 @@ fn trie_proof_works_internal2() { } test_layouts!(trie_proof_works, trie_proof_works_internal); -fn trie_proof_works_internal() { - let (root, proof, items) = test_generate_proof::( +fn trie_proof_works_internal>() { + let (root, proof, items) = test_generate_proof::( test_entries(), vec![ b"do", b"dog", b"doge", b"bravo", b"alfabet", // None, not found under leaf node @@ -100,16 +101,16 @@ fn trie_proof_works_internal() { } test_layouts!(trie_proof_works_for_empty_trie, trie_proof_works_for_empty_trie_internal); -fn trie_proof_works_for_empty_trie_internal() { +fn trie_proof_works_for_empty_trie_internal>() { let (root, proof, items) = - test_generate_proof::(vec![], vec![b"alpha", b"bravo", b"\x42\x42"]); + test_generate_proof::(vec![], vec![b"alpha", b"bravo", b"\x42\x42"]); verify_proof::(&root, &proof, items.iter()).unwrap(); } test_layouts!(test_verify_duplicate_keys, test_verify_duplicate_keys_internal); -fn test_verify_duplicate_keys_internal() { - let (root, proof, _) = test_generate_proof::(test_entries(), vec![b"bravo"]); +fn test_verify_duplicate_keys_internal>() { + let (root, proof, _) = test_generate_proof::(test_entries(), vec![b"bravo"]); let items = vec![(b"bravo", Some(b"bravo")), (b"bravo", Some(b"bravo"))]; assert!(if let Err(VerifyError::DuplicateKey(key)) = @@ -122,8 +123,8 @@ fn test_verify_duplicate_keys_internal() { } test_layouts!(test_verify_extraneaous_node, test_verify_extraneaous_node_internal); -fn test_verify_extraneaous_node_internal() { - let (root, proof, _) = test_generate_proof::(test_entries(), vec![b"bravo", b"do"]); +fn test_verify_extraneaous_node_internal>() { + let (root, proof, _) = test_generate_proof::(test_entries(), vec![b"bravo", b"do"]); let items = vec![(b"bravo", Some(b"bravo"))]; assert!(matches!( @@ -133,8 +134,8 @@ fn test_verify_extraneaous_node_internal() { } test_layouts!(test_verify_extraneaous_value, test_verify_extraneaous_value_internal); -fn test_verify_extraneaous_value_internal() { - let (root, proof, _) = test_generate_proof::(test_entries(), vec![b"doge"]); +fn test_verify_extraneaous_value_internal>() { + let (root, proof, _) = test_generate_proof::(test_entries(), vec![b"doge"]); let items = vec![(&b"do"[..], Some(&b"verb"[..])), (&b"doge"[..], Some(&[0; 32][..]))]; assert!(if let Err(VerifyError::ExtraneousValue(val)) = @@ -148,7 +149,10 @@ fn test_verify_extraneaous_value_internal() { #[test] fn test_verify_extraneous_hash_reference() { - let (root, proof, _) = test_generate_proof::(test_entries(), vec![b"do"]); + let (root, proof, _) = test_generate_proof::>( + test_entries(), + vec![b"do"], + ); let items = vec![(&b"alfa"[..], Some(&[0; 32][..])), (&b"do"[..], Some(&b"verb"[..]))]; match verify_proof::(&root, &proof, items.iter()) { @@ -158,8 +162,8 @@ fn test_verify_extraneous_hash_reference() { } test_layouts!(test_verify_invalid_child_reference, test_verify_invalid_child_reference_internal); -fn test_verify_invalid_child_reference_internal() { - let (root, proof, _) = test_generate_proof::(test_entries(), vec![b"bravo"]); +fn test_verify_invalid_child_reference_internal>() { + let (root, proof, _) = test_generate_proof::(test_entries(), vec![b"bravo"]); if T::MAX_INLINE_VALUE.map_or(false, |t| t as usize <= b"bravo".len()) { // node will not be inline: ignore test @@ -178,8 +182,8 @@ test_layouts!( test_verify_value_mismatch_some_to_none, test_verify_value_mismatch_some_to_none_internal ); -fn test_verify_value_mismatch_some_to_none_internal() { - let (root, proof, _) = test_generate_proof::(test_entries(), vec![b"horse"]); +fn test_verify_value_mismatch_some_to_none_internal>() { + let (root, proof, _) = test_generate_proof::(test_entries(), vec![b"horse"]); let items = vec![(&b"horse"[..], Some(&b"stallion"[..])), (&b"halp"[..], Some(&b"plz"[..]))]; assert!(if let Err(VerifyError::ValueMismatch(val)) = @@ -195,8 +199,8 @@ test_layouts!( test_verify_value_mismatch_none_to_some, test_verify_value_mismatch_none_to_some_internal ); -fn test_verify_value_mismatch_none_to_some_internal() { - let (root, proof, _) = test_generate_proof::(test_entries(), vec![b"alfa", b"bravo"]); +fn test_verify_value_mismatch_none_to_some_internal>() { + let (root, proof, _) = test_generate_proof::(test_entries(), vec![b"alfa", b"bravo"]); let items = vec![(&b"alfa"[..], Some(&[0; 32][..])), (&b"bravo"[..], None)]; assert!(if let Err(VerifyError::ValueMismatch(val)) = @@ -209,8 +213,8 @@ fn test_verify_value_mismatch_none_to_some_internal() { } test_layouts!(test_verify_incomplete_proof, test_verify_incomplete_proof_internal); -fn test_verify_incomplete_proof_internal() { - let (root, mut proof, items) = test_generate_proof::(test_entries(), vec![b"alfa"]); +fn test_verify_incomplete_proof_internal>() { + let (root, mut proof, items) = test_generate_proof::(test_entries(), vec![b"alfa"]); proof.pop(); assert!(matches!( @@ -220,8 +224,8 @@ fn test_verify_incomplete_proof_internal() { } test_layouts!(test_verify_root_mismatch, test_verify_root_mismatch_internal); -fn test_verify_root_mismatch_internal() { - let (root, proof, _) = test_generate_proof::(test_entries(), vec![b"bravo"]); +fn test_verify_root_mismatch_internal>() { + let (root, proof, _) = test_generate_proof::(test_entries(), vec![b"bravo"]); let items = vec![(b"bravo", Some("incorrect"))]; match verify_proof::(&root, &proof, items.iter()) { @@ -231,8 +235,8 @@ fn test_verify_root_mismatch_internal() { } test_layouts!(test_verify_decode_error, test_verify_decode_error_internal); -fn test_verify_decode_error_internal() { - let (root, mut proof, items) = test_generate_proof::(test_entries(), vec![b"bravo"]); +fn test_verify_decode_error_internal>() { + let (root, mut proof, items) = test_generate_proof::(test_entries(), vec![b"bravo"]); proof.insert(0, b"this is not a trie node".to_vec()); match verify_proof::(&root, &proof, items.iter()) { diff --git a/trie-db/test/src/recorder.rs b/test/src/recorder.rs similarity index 83% rename from trie-db/test/src/recorder.rs rename to test/src/recorder.rs index 485d0153..baa39ad8 100644 --- a/trie-db/test/src/recorder.rs +++ b/test/src/recorder.rs @@ -14,26 +14,28 @@ //! Trie query recorder. -use memory_db::{HashKey, MemoryDB}; use reference_trie::{NoExtensionLayout, RefHasher, RefTrieDBBuilder, RefTrieDBMutBuilder}; -use trie_db::{Recorder, Trie, TrieMut}; +use trie_db::{ + memory_db::{HashKey, MemoryDB}, + Recorder, Trie, +}; #[test] fn trie_record() { let mut db = MemoryDB::, _>::default(); - let mut root = Default::default(); - { - let mut x = RefTrieDBMutBuilder::new(&mut db, &mut root).build(); + let mut x = RefTrieDBMutBuilder::new(&mut db).build(); - x.insert(b"dog", b"cat").unwrap(); - x.insert(b"lunch", b"time").unwrap(); - x.insert(b"notdog", b"notcat").unwrap(); - x.insert(b"hotdog", b"hotcat").unwrap(); - x.insert(b"letter", b"confusion").unwrap(); - x.insert(b"insert", b"remove").unwrap(); - x.insert(b"pirate", b"aargh!").unwrap(); - x.insert(b"yo ho ho", b"and a bottle of rum").unwrap(); - } + x.insert(b"dog", b"cat").unwrap(); + x.insert(b"lunch", b"time").unwrap(); + x.insert(b"notdog", b"notcat").unwrap(); + x.insert(b"hotdog", b"hotcat").unwrap(); + x.insert(b"letter", b"confusion").unwrap(); + x.insert(b"insert", b"remove").unwrap(); + x.insert(b"pirate", b"aargh!").unwrap(); + x.insert(b"yo ho ho", b"and a bottle of rum").unwrap(); + let commit = x.commit(); + let root = *commit.hash(); + commit.apply_to(&mut db); { let mut recorder = Recorder::::new(); diff --git a/trie-db/test/src/trie_codec.rs b/test/src/trie_codec.rs similarity index 79% rename from trie-db/test/src/trie_codec.rs rename to test/src/trie_codec.rs index 17b52cb5..6227bd2b 100644 --- a/trie-db/test/src/trie_codec.rs +++ b/test/src/trie_codec.rs @@ -12,33 +12,34 @@ // See the License for the specific language governing permissions and // limitations under the License. -use hash_db::{HashDB, Hasher, EMPTY_PREFIX}; use reference_trie::{test_layouts, ExtensionLayout}; use trie_db::{ - decode_compact, encode_compact, DBValue, NodeCodec, Recorder, Trie, TrieDBBuilder, - TrieDBMutBuilder, TrieError, TrieLayout, TrieMut, + decode_compact, encode_compact, + node_db::{Hasher, EMPTY_PREFIX}, + DBValue, NodeCodec, Recorder, Trie, TrieDBBuilder, TrieDBMutBuilder, TrieError, TrieLayout, }; -type MemoryDB = memory_db::MemoryDB< +use crate::TestDB; + +type MemoryDB = trie_db::memory_db::MemoryDB< ::Hash, - memory_db::HashKey<::Hash>, + trie_db::memory_db::HashKey<::Hash>, DBValue, >; -fn test_encode_compact( +fn test_encode_compact>( entries: Vec<(&'static [u8], &'static [u8])>, keys: Vec<&'static [u8]>, ) -> (::Out, Vec>, Vec<(&'static [u8], Option)>) { // Populate DB with full trie from entries. let (db, root) = { - let mut db = >::default(); - let mut root = Default::default(); - { - let mut trie = >::new(&mut db, &mut root).build(); - for (key, value) in entries.iter() { - trie.insert(key, value).unwrap(); - } + let mut db = DB::default(); + let mut trie = >::new(&mut db).build(); + for (key, value) in entries.iter() { + trie.insert(key, value).unwrap(); } + let commit = trie.commit(); + let root = db.commit(commit); (db, root) }; @@ -77,7 +78,7 @@ fn test_decode_compact( ) { // Reconstruct the partial DB from the compact encoding. let mut db = MemoryDB::::default(); - let (root, used) = decode_compact::(&mut db, encoded).unwrap(); + let (root, used) = decode_compact::(&mut db, encoded).unwrap(); assert_eq!(root, expected_root); assert_eq!(used, expected_used); @@ -89,8 +90,8 @@ fn test_decode_compact( } test_layouts!(trie_compact_encoding_works, trie_compact_encoding_works_internal); -fn trie_compact_encoding_works_internal() { - let (root, mut encoded, items) = test_encode_compact::( +fn trie_compact_encoding_works_internal>() { + let (root, mut encoded, items) = test_encode_compact::( vec![ // "alfa" is at a hash-referenced leaf node. (b"alfa", &[0; 32]), @@ -122,15 +123,17 @@ test_layouts!( trie_decoding_fails_with_incomplete_database, trie_decoding_fails_with_incomplete_database_internal ); -fn trie_decoding_fails_with_incomplete_database_internal() { - let (_, encoded, _) = - test_encode_compact::(vec![(b"alfa", &[0; 32]), (b"bravo", b"bravo")], vec![b"alfa"]); +fn trie_decoding_fails_with_incomplete_database_internal>() { + let (_, encoded, _) = test_encode_compact::( + vec![(b"alfa", &[0; 32]), (b"bravo", b"bravo")], + vec![b"alfa"], + ); assert!(encoded.len() > 1); // Reconstruct the partial DB from the compact encoding. let mut db = MemoryDB::::default(); - match decode_compact::(&mut db, &encoded[..encoded.len() - 1]) { + match decode_compact::(&mut db, &encoded[..encoded.len() - 1]) { Err(err) => match *err { TrieError::IncompleteDatabase(_) => {}, _ => panic!("got unexpected TrieError"), @@ -160,14 +163,14 @@ fn encoding_node_owned_and_decoding_node_works() { // Populate DB with full trie from entries. let mut recorder = { let mut db = >::default(); - let mut root = Default::default(); let mut recorder = Recorder::::new(); - { - let mut trie = >::new(&mut db, &mut root).build(); - for (key, value) in entries.iter() { - trie.insert(key, value).unwrap(); - } + let mut trie = >::new(&mut db).build(); + for (key, value) in entries.iter() { + trie.insert(key, value).unwrap(); } + let commit = trie.commit(); + commit.apply_to(&mut db); + let root = commit.root_hash(); let trie = TrieDBBuilder::::new(&db, &root) .with_recorder(&mut recorder) @@ -181,7 +184,8 @@ fn encoding_node_owned_and_decoding_node_works() { for record in recorder.drain() { let node = - <::Codec as NodeCodec>::decode(&record.data).unwrap(); + <::Codec as NodeCodec>::decode(&record.data, &[(); 0]) + .unwrap(); let node_owned = node.to_owned_node::().unwrap(); assert_eq!(record.data, node_owned.to_encoded::<::Codec>()); diff --git a/test/src/trie_root.rs b/test/src/trie_root.rs new file mode 100644 index 00000000..bf4e5309 --- /dev/null +++ b/test/src/trie_root.rs @@ -0,0 +1,44 @@ +// Copyright 2017, 2020 Parity Technologies +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Test for trie-root module. + +use hex_literal::hex; +use reference_trie::ReferenceTrieStream; +use trie_db::{ + keccak_hasher::KeccakHasher, + trie_root::{sec_trie_root, trie_root}, +}; + +#[test] +fn previous_doc_test_1() { + let v = vec![("doe", "reindeer"), ("dog", "puppy"), ("dogglesworth", "cat")]; + + let root = hex!["d6e02b2bd48aa04fd2ad87cfac1144a29ca7f7dc60f4526c7b7040763abe3d43"]; + assert_eq!( + sec_trie_root::(v, Default::default()), + root + ); +} + +#[test] +fn previous_doc_test_2() { + let v = vec![("doe", "reindeer"), ("dog", "puppy"), ("dogglesworth", "cat")]; + + let root = hex!["0807d5393ae7f349481063ebb5dbaf6bda58db282a385ca97f37dccba717cb79"]; + assert_eq!( + trie_root::(v, Default::default()), + root + ); +} diff --git a/trie-db/test/src/triedb.rs b/test/src/triedb.rs similarity index 65% rename from trie-db/test/src/triedb.rs rename to test/src/triedb.rs index 9825ab50..0ff0b18a 100644 --- a/trie-db/test/src/triedb.rs +++ b/test/src/triedb.rs @@ -14,37 +14,38 @@ use std::ops::Deref; -use hash_db::{HashDB, Hasher, EMPTY_PREFIX}; use hex_literal::hex; -use memory_db::{HashKey, MemoryDB, PrefixedKey}; use reference_trie::{ - test_layouts, test_layouts_substrate, HashedValueNoExtThreshold, TestTrieCache, + test_layouts, test_layouts_substrate, HashedValueNoExtThreshold, PrefixedMemoryDB, + TestTrieCache, }; use trie_db::{ - encode_compact, CachedValue, DBValue, Lookup, NibbleSlice, Recorder, Trie, TrieCache, - TrieDBBuilder, TrieDBMutBuilder, TrieLayout, TrieMut, + encode_compact, + memory_db::{HashKey, MemoryDB}, + node_db::{Hasher, EMPTY_PREFIX}, + CachedValue, DBValue, Lookup, NibbleSlice, RecordedForKey, Recorder, Trie, TrieCache, + TrieDBBuilder, TrieDBMutBuilder, TrieLayout, TrieRecorder, }; -type PrefixedMemoryDB = - MemoryDB<::Hash, PrefixedKey<::Hash>, DBValue>; +use crate::{TestCommit, TestDB}; + type MemoryDBProof = MemoryDB<::Hash, HashKey<::Hash>, DBValue>; test_layouts!(iterator_works, iterator_works_internal); -fn iterator_works_internal() { +fn iterator_works_internal>() { let pairs = vec![ (hex!("0103000000000000000464").to_vec(), hex!("fffffffffe").to_vec()), (hex!("0103000000000010000469").to_vec(), hex!("ffffffffff").to_vec()), ]; - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - for (x, y) in &pairs { - t.insert(x, y).unwrap(); - } + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + for (x, y) in &pairs { + t.insert(x, y).unwrap(); } + let commit = t.commit(); + let root = memdb.commit(commit); let trie = TrieDBBuilder::::new(&memdb, &root).build(); @@ -59,20 +60,18 @@ fn iterator_works_internal() { } test_layouts!(iterator_seek_works, iterator_seek_works_internal); -fn iterator_seek_works_internal() { +fn iterator_seek_works_internal>() { let pairs = vec![ (hex!("0103000000000000000464").to_vec(), hex!("fffffffffe").to_vec()), (hex!("0103000000000000000469").to_vec(), hex!("ffffffffff").to_vec()), ]; - let mut memdb = MemoryDB::, DBValue>::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - for (x, y) in &pairs { - t.insert(x, y).unwrap(); - } + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + for (x, y) in &pairs { + t.insert(x, y).unwrap(); } + let root = t.commit().commit_to(&mut memdb); let t = TrieDBBuilder::::new(&memdb, &root).build(); @@ -94,18 +93,50 @@ fn iterator_seek_works_internal() { ); } +test_layouts!(double_ended_iterator, double_ended_iterator_internal); +fn double_ended_iterator_internal>() { + let pairs = vec![ + (hex!("01").to_vec(), hex!("01").to_vec()), + (hex!("02").to_vec(), hex!("02").to_vec()), + (hex!("03").to_vec(), hex!("03").to_vec()), + (hex!("10").to_vec(), hex!("10").to_vec()), + (hex!("11").to_vec(), hex!("11").to_vec()), + ]; + + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + for (x, y) in &pairs { + t.insert(x, y).unwrap(); + } + let commit = t.commit(); + let root = memdb.commit(commit); + + let t = TrieDBBuilder::::new(&memdb, &root).build(); + assert_eq!(pairs, t.iter().unwrap().map(|x| x.unwrap()).collect::>()); + + let mut iter = t.into_double_ended_iter().unwrap(); + + for i in 0..pairs.len() { + assert_eq!(iter.next().unwrap().unwrap(), pairs[i].clone()); + } + assert!(iter.next().is_none()); + + for i in (0..pairs.len()).rev() { + assert_eq!(iter.next_back().unwrap().unwrap(), pairs[i].clone()); + } + assert!(iter.next_back().is_none()); +} + test_layouts!(iterator, iterator_internal); -fn iterator_internal() { +fn iterator_internal>() { let d = vec![b"A".to_vec(), b"AA".to_vec(), b"AB".to_vec(), b"B".to_vec()]; - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - for x in &d { - t.insert(x, x).unwrap(); - } + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + for x in &d { + t.insert(x, x).unwrap(); } + let root = t.commit().commit_to(&mut memdb); let t = TrieDBBuilder::::new(&memdb, &root).build(); assert_eq!( @@ -116,18 +147,16 @@ fn iterator_internal() { } test_layouts!(iterator_seek, iterator_seek_internal); -fn iterator_seek_internal() { +fn iterator_seek_internal>() { let d = vec![b"A".to_vec(), b"AA".to_vec(), b"AB".to_vec(), b"AS".to_vec(), b"B".to_vec()]; let vals = vec![vec![0; 32], vec![1; 32], vec![2; 32], vec![4; 32], vec![3; 32]]; - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - for (k, val) in d.iter().zip(vals.iter()) { - t.insert(k, val.as_slice()).unwrap(); - } + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + for (k, val) in d.iter().zip(vals.iter()) { + t.insert(k, val.as_slice()).unwrap(); } + let root = t.commit().commit_to(&mut memdb); let t = TrieDBBuilder::::new(&memdb, &root).build(); let mut iter = t.iter().unwrap(); @@ -171,24 +200,24 @@ fn iterator_seek_internal() { assert_eq!(&vals[5..], &iter.map(|x| x.unwrap().1).collect::>()[..]); } -fn trie_from_hex_keys(keys: &[&str], callback: impl FnOnce(&mut trie_db::TrieDB)) -where +fn trie_from_hex_keys>( + keys: &[&str], + callback: impl FnOnce(&mut trie_db::TrieDB), +) where T: TrieLayout, { - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - for (index, key) in keys.iter().enumerate() { - t.insert(&array_bytes::hex2bytes(key).unwrap(), &[index as u8]).unwrap(); - } + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + for (index, key) in keys.iter().enumerate() { + t.insert(&array_bytes::hex2bytes(key).unwrap(), &[index as u8]).unwrap(); } + let root = t.commit().commit_to(&mut memdb); let mut t = TrieDBBuilder::::new(&memdb, &root).build(); callback(&mut t); } -fn test_prefixed_then_seek( +fn test_prefixed_then_seek>( keys: &[&str], prefix_key: &str, seek_key: &str, @@ -197,7 +226,7 @@ fn test_prefixed_then_seek( let prefix_key = array_bytes::hex2bytes(prefix_key).unwrap(); let seek_key = array_bytes::hex2bytes(seek_key).unwrap(); - trie_from_hex_keys::(keys, |trie| { + trie_from_hex_keys::(keys, |trie| { let iter = trie_db::TrieDBIterator::new_prefixed_then_seek(&trie, &prefix_key, &seek_key).unwrap(); let output: Vec<_> = iter.map(|x| array_bytes::bytes2hex("", x.unwrap().0)).collect(); @@ -248,36 +277,36 @@ fn iterator_prefixed_then_seek_real_world() { ]; let target_key = "7474449cca95dc5d0c00e71735a6d17d3cd15a3fd6e04e47bee3922dbfa92c8da7dad55cf08ffe8194efa962146801b0503092b1ed6a3fa6aee9107334aefd7965bbe568c3d24c6d"; - test_prefixed_then_seek::(keys, target_key, target_key, &[]); + test_prefixed_then_seek::>(keys, target_key, target_key, &[]); } // This is the real-word test, but simplified. test_layouts_substrate!(iterator_prefixed_then_seek_simple); fn iterator_prefixed_then_seek_simple() { - test_prefixed_then_seek::(&["0100"], "00", "00", &[]); + test_prefixed_then_seek::>(&["0100"], "00", "00", &[]); } // These are just tests that the fuzzer barfed out while working on the fix for the real-world // issue. test_layouts_substrate!(iterator_prefixed_then_seek_testcase_1); fn iterator_prefixed_then_seek_testcase_1() { - test_prefixed_then_seek::(&["00"], "00", "", &["00"]) + test_prefixed_then_seek::>(&["00"], "00", "", &["00"]) } test_layouts_substrate!(iterator_prefixed_then_seek_testcase_2); fn iterator_prefixed_then_seek_testcase_2() { - test_prefixed_then_seek::(&["00", "0003"], "00", "", &["00", "0003"]) + test_prefixed_then_seek::>(&["00", "0003"], "00", "", &["00", "0003"]) } test_layouts_substrate!(iterator_prefixed_then_seek_testcase_3); fn iterator_prefixed_then_seek_testcase_3() { - test_prefixed_then_seek::(&["20"], "20", "0700", &["20"]) + test_prefixed_then_seek::>(&["20"], "20", "0700", &["20"]) } test_layouts_substrate!(iterator_prefixed_then_seek_testcase_4); fn iterator_prefixed_then_seek_testcase_4() { let keys = &["1701", "ffffffffffffffffffffffdfffffffffffffffffffffffffffffffffffffffff"]; - test_prefixed_then_seek::( + test_prefixed_then_seek::>( keys, "1701", "ffff27272727274949494949ce494949494949494949491768687b737373732b", @@ -287,18 +316,16 @@ fn iterator_prefixed_then_seek_testcase_4() { test_layouts_substrate!(iterator_prefixed_then_seek_testcase_5); fn iterator_prefixed_then_seek_testcase_5() { - test_prefixed_then_seek::(&["20"], "20", "20", &["20"]) + test_prefixed_then_seek::>(&["20"], "20", "20", &["20"]) } test_layouts!(get_length_with_extension, get_length_with_extension_internal); -fn get_length_with_extension_internal() { - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - t.insert(b"A", b"ABC").unwrap(); - t.insert(b"B", b"ABCBAAAAAAAAAAAAAAAAAAAAAAAAAAAA").unwrap(); - } +fn get_length_with_extension_internal>() { + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + t.insert(b"A", b"ABC").unwrap(); + t.insert(b"B", b"ABCBAAAAAAAAAAAAAAAAAAAAAAAAAAAA").unwrap(); + let root = t.commit().commit_to(&mut memdb); let t = TrieDBBuilder::::new(&memdb, &root).build(); assert_eq!(t.get_with(b"A", |x: &[u8]| x.len()).unwrap(), Some(3)); @@ -307,18 +334,19 @@ fn get_length_with_extension_internal() { } test_layouts!(debug_output_supports_pretty_print, debug_output_supports_pretty_print_internal); -fn debug_output_supports_pretty_print_internal() { +fn debug_output_supports_pretty_print_internal>() +where + T::Location: std::fmt::Debug, +{ let d = vec![b"A".to_vec(), b"AA".to_vec(), b"AB".to_vec(), b"B".to_vec()]; - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - let root = { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - for x in &d { - t.insert(x, x).unwrap(); - } - t.root().clone() - }; + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + for x in &d { + t.insert(x, x).unwrap(); + } + let root = t.commit().commit_to(&mut memdb); + let t = TrieDBBuilder::::new(&memdb, &root).build(); if T::USE_EXTENSION { @@ -391,26 +419,31 @@ test_layouts!( test_lookup_with_corrupt_data_returns_decoder_error, test_lookup_with_corrupt_data_returns_decoder_error_internal ); -fn test_lookup_with_corrupt_data_returns_decoder_error_internal() { - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - t.insert(b"A", b"ABC").unwrap(); - t.insert(b"B", b"ABCBA").unwrap(); - } +fn test_lookup_with_corrupt_data_returns_decoder_error_internal>() { + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + t.insert(b"A", b"ABC").unwrap(); + t.insert(b"B", b"ABCBA").unwrap(); + let root = t.commit().commit_to(&mut memdb); let t = TrieDBBuilder::::new(&memdb, &root).build(); // query for an invalid data type to trigger an error let q = |x: &[u8]| x.len() < 64; - let lookup = Lookup:: { db: t.db(), query: q, hash: root, cache: None, recorder: None }; + let lookup = Lookup:: { + db: t.db(), + query: q, + hash: root, + location: Default::default(), + cache: None, + recorder: None, + }; let query_result = lookup.look_up(&b"A"[..], NibbleSlice::new(b"A")); assert_eq!(query_result.unwrap().unwrap(), true); } test_layouts!(test_recorder, test_recorder_internal); -fn test_recorder_internal() { +fn test_recorder_internal>() { let key_value = vec![ (b"A".to_vec(), vec![1; 64]), (b"AA".to_vec(), vec![2; 64]), @@ -418,14 +451,12 @@ fn test_recorder_internal() { (b"B".to_vec(), vec![4; 64]), ]; - let mut memdb = MemoryDB::, DBValue>::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - for (key, value) in &key_value { - t.insert(key, value).unwrap(); - } + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&memdb).build(); + for (key, value) in &key_value { + t.insert(key, value).unwrap(); } + let root = memdb.commit(t.commit()); let mut recorder = Recorder::::new(); { @@ -452,7 +483,7 @@ fn test_recorder_internal() { } test_layouts!(test_recorder_with_cache, test_recorder_with_cache_internal); -fn test_recorder_with_cache_internal() { +fn test_recorder_with_cache_internal>() { let key_value = vec![ (b"A".to_vec(), vec![1; 64]), (b"AA".to_vec(), vec![2; 64]), @@ -460,15 +491,13 @@ fn test_recorder_with_cache_internal() { (b"B".to_vec(), vec![4; 64]), ]; - let mut memdb = MemoryDB::, DBValue>::default(); - let mut root = Default::default(); + let mut memdb = DB::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - for (key, value) in &key_value { - t.insert(key, value).unwrap(); - } + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + for (key, value) in &key_value { + t.insert(key, value).unwrap(); } + let root = t.commit().commit_to(&mut memdb); let mut cache = TestTrieCache::::default(); @@ -480,7 +509,7 @@ fn test_recorder_with_cache_internal() { } // Root should now be cached. - assert!(cache.get_node(&root).is_some()); + assert!(cache.get_node(&root, Default::default()).is_some()); // Also the data should be cached. let value = cache.lookup_value_for_key(&key_value[1].0).unwrap(); @@ -539,7 +568,7 @@ fn test_recorder_with_cache_internal() { } test_layouts!(test_recorder_with_cache_get_hash, test_recorder_with_cache_get_hash_internal); -fn test_recorder_with_cache_get_hash_internal() { +fn test_recorder_with_cache_get_hash_internal>() { let key_value = vec![ (b"A".to_vec(), vec![1; 64]), (b"AA".to_vec(), vec![2; 64]), @@ -547,15 +576,13 @@ fn test_recorder_with_cache_get_hash_internal() { (b"B".to_vec(), vec![4; 64]), ]; - let mut memdb = MemoryDB::, DBValue>::default(); - let mut root = Default::default(); + let mut memdb = DB::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - for (key, value) in &key_value { - t.insert(key, value).unwrap(); - } + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + for (key, value) in &key_value { + t.insert(key, value).unwrap(); } + let root = t.commit().commit_to(&mut memdb); let mut cache = TestTrieCache::::default(); @@ -570,7 +597,7 @@ fn test_recorder_with_cache_get_hash_internal() { } // Root should now be cached. - assert!(cache.get_node(&root).is_some()); + assert!(cache.get_node(&root, Default::default()).is_some()); // Also the data should be cached. if T::MAX_INLINE_VALUE.map_or(true, |l| l as usize > key_value[1].1.len()) { @@ -581,7 +608,7 @@ fn test_recorder_with_cache_get_hash_internal() { } else { assert!(matches!( cache.lookup_value_for_key(&key_value[1].0).unwrap(), - CachedValue::ExistingHash(hash) if *hash == T::Hash::hash(&key_value[1].1) + CachedValue::ExistingHash(hash, _) if *hash == T::Hash::hash(&key_value[1].1) )); } @@ -644,19 +671,211 @@ fn test_recorder_with_cache_get_hash_internal() { } } +test_layouts!(test_merkle_value, test_merkle_value_internal); +fn test_merkle_value_internal>() { + let mut memdb = DB::default(); + + // Data set. + let key_value = vec![ + (b"A".to_vec(), vec![1; 64]), + (b"AA".to_vec(), vec![2; 64]), + (b"AAAA".to_vec(), vec![3; 64]), + (b"AAB".to_vec(), vec![4; 64]), + (b"AABBBB".to_vec(), vec![4; 1]), + (b"AB".to_vec(), vec![5; 1]), + (b"B".to_vec(), vec![6; 1]), + ]; + let mut t = TrieDBMutBuilder::::new(&memdb).build(); + for (key, value) in &key_value { + t.insert(key, value).unwrap(); + } + let root = memdb.commit(t.commit()); + + // Ensure we can fetch the merkle values for all present keys. + let trie = TrieDBBuilder::::new(&memdb, &root).build(); + for (key, _) in &key_value { + trie.lookup_first_descendant(key).unwrap().unwrap(); + } + + // Key is not present and has no descedant, but shares a prefix. + let hash = trie.lookup_first_descendant(b"AAAAX").unwrap(); + assert!(hash.is_none()); + let hash = trie.lookup_first_descendant(b"AABX").unwrap(); + assert!(hash.is_none()); + let hash = trie.lookup_first_descendant(b"AABC").unwrap(); + assert!(hash.is_none()); + let hash = trie.lookup_first_descendant(b"ABX").unwrap(); + assert!(hash.is_none()); + let hash = trie.lookup_first_descendant(b"AABBBBX").unwrap(); + assert!(hash.is_none()); + let hash = trie.lookup_first_descendant(b"BX").unwrap(); + assert!(hash.is_none()); + let hash = trie.lookup_first_descendant(b"AC").unwrap(); + assert!(hash.is_none()); + let hash = trie.lookup_first_descendant(b"BC").unwrap(); + assert!(hash.is_none()); + let hash = trie.lookup_first_descendant(b"AAAAX").unwrap(); + assert!(hash.is_none()); + // Key shares the first nibble with b"A". + let hash = trie.lookup_first_descendant(b"C").unwrap(); + assert!(hash.is_none()); + + // Key not present, but has a descendent. + let hash = trie.lookup_first_descendant(b"AAA").unwrap().unwrap(); + let expected = trie.lookup_first_descendant(b"AAAA").unwrap().unwrap(); + assert_eq!(hash, expected); + let hash = trie.lookup_first_descendant(b"AABB").unwrap().unwrap(); + let expected = trie.lookup_first_descendant(b"AABBBB").unwrap().unwrap(); + assert_eq!(hash, expected); + let hash = trie.lookup_first_descendant(b"AABBB").unwrap().unwrap(); + let expected = trie.lookup_first_descendant(b"AABBBB").unwrap().unwrap(); + assert_eq!(hash, expected); + + // Prefix AABB in between AAB and AABBBB, but has different ending char. + let hash = trie.lookup_first_descendant(b"AABBX").unwrap(); + assert!(hash.is_none()); +} + +test_layouts!(test_merkle_value_single_key, test_merkle_value_single_key_internal); +fn test_merkle_value_single_key_internal>() { + let mut memdb = DB::default(); + + // Data set. + let key_value = vec![(b"AAA".to_vec(), vec![1; 64])]; + let mut t = TrieDBMutBuilder::::new(&memdb).build(); + for (key, value) in &key_value { + t.insert(key, value).unwrap(); + } + let root = memdb.commit(t.commit()); + + let trie = TrieDBBuilder::::new(&memdb, &root).build(); + + let hash = trie.lookup_first_descendant(b"AA").unwrap().unwrap(); + let expected = trie.lookup_first_descendant(b"AAA").unwrap().unwrap(); + assert_eq!(hash, expected); + + // Trie does not contain AAC or AAAA. + let hash = trie.lookup_first_descendant(b"AAC").unwrap(); + assert!(hash.is_none()); + let hash = trie.lookup_first_descendant(b"AAAA").unwrap(); + assert!(hash.is_none()); +} + +test_layouts!(test_merkle_value_branches, test_merkle_value_branches_internal); +fn test_merkle_value_branches_internal>() { + let mut memdb = DB::default(); + + // Data set. + let key_value = vec![(b"AAAA".to_vec(), vec![1; 64]), (b"AABA".to_vec(), vec![2; 64])]; + let mut t = TrieDBMutBuilder::::new(&memdb).build(); + for (key, value) in &key_value { + t.insert(key, value).unwrap(); + } + let root = memdb.commit(t.commit()); + + let trie = TrieDBBuilder::::new(&memdb, &root).build(); + + // The hash is returned from the branch node. + let hash = trie.lookup_first_descendant(b"A").unwrap().unwrap(); + let aaaa_hash = trie.lookup_first_descendant(b"AAAA").unwrap().unwrap(); + let aaba_hash = trie.lookup_first_descendant(b"AABA").unwrap().unwrap(); + // Ensure the hash is not from any leaf. + assert_ne!(hash, aaaa_hash); + assert_ne!(hash, aaba_hash); +} + +test_layouts!(test_merkle_value_empty_trie, test_merkle_value_empty_trie_internal); +fn test_merkle_value_empty_trie_internal>() { + let mut memdb = DB::default(); + + // Valid state root. + let mut t = TrieDBMutBuilder::::new(&memdb).build(); + t.insert(&[], &[]).unwrap(); + let root = memdb.commit(t.commit()); + + // Data set is empty. + let trie = TrieDBBuilder::::new(&memdb, &root).build(); + + let hash = trie.lookup_first_descendant(b"").unwrap(); + assert!(hash.is_none()); + + let hash = trie.lookup_first_descendant(b"A").unwrap(); + assert!(hash.is_none()); + + let hash = trie.lookup_first_descendant(b"AA").unwrap(); + assert!(hash.is_none()); + + let hash = trie.lookup_first_descendant(b"AAA").unwrap(); + assert!(hash.is_none()); + + let hash = trie.lookup_first_descendant(b"AAAA").unwrap(); + assert!(hash.is_none()); +} + +test_layouts!(test_merkle_value_modification, test_merkle_value_modification_internal); +fn test_merkle_value_modification_internal>() { + let mut memdb = DB::default(); + + let key_value = vec![(b"AAAA".to_vec(), vec![1; 64]), (b"AABA".to_vec(), vec![2; 64])]; + let mut t = TrieDBMutBuilder::::new(&memdb).build(); + for (key, value) in &key_value { + t.insert(key, value).unwrap(); + } + let root = memdb.commit(t.commit()); + + let (a_hash_lhs, aaaa_hash_lhs, aaba_hash_lhs) = { + let trie = TrieDBBuilder::::new(&memdb, &root).build(); + + // The hash is returned from the branch node. + let hash = trie.lookup_first_descendant(b"A").unwrap().unwrap(); + let aaaa_hash = trie.lookup_first_descendant(b"AAAA").unwrap().unwrap(); + let aaba_hash = trie.lookup_first_descendant(b"AABA").unwrap().unwrap(); + + // Ensure the hash is not from any leaf. + assert_ne!(hash, aaaa_hash); + assert_ne!(hash, aaba_hash); + + (hash, aaaa_hash, aaba_hash) + }; + + // Modify AABA and expect AAAA to return the same merkle value. + let mut t = TrieDBMutBuilder::::from_existing(&memdb, root).build(); + t.insert(b"AABA", &vec![3; 64]).unwrap(); + let root = memdb.commit(t.commit()); + + let (a_hash_rhs, aaaa_hash_rhs, aaba_hash_rhs) = { + let trie = TrieDBBuilder::::new(&memdb, &root).build(); + + // The hash is returned from the branch node. + let hash = trie.lookup_first_descendant(b"A").unwrap().unwrap(); + let aaaa_hash = trie.lookup_first_descendant(b"AAAA").unwrap().unwrap(); + let aaba_hash = trie.lookup_first_descendant(b"AABA").unwrap().unwrap(); + + // Ensure the hash is not from any leaf. + assert_ne!(hash, aaaa_hash); + assert_ne!(hash, aaba_hash); + + (hash, aaaa_hash, aaba_hash) + }; + + // AAAA was not modified. + assert_eq!(aaaa_hash_lhs, aaaa_hash_rhs); + // Changes to AABA must propagate to the root. + assert_ne!(aaba_hash_lhs, aaba_hash_rhs); + assert_ne!(a_hash_lhs, a_hash_rhs); +} + test_layouts!(iterator_seek_with_recorder, iterator_seek_with_recorder_internal); -fn iterator_seek_with_recorder_internal() { +fn iterator_seek_with_recorder_internal>() { let d = vec![b"A".to_vec(), b"AA".to_vec(), b"AB".to_vec(), b"B".to_vec()]; let vals = vec![vec![0; 64], vec![1; 64], vec![2; 64], vec![3; 64]]; - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - for (k, val) in d.iter().zip(vals.iter()) { - t.insert(k, val.as_slice()).unwrap(); - } + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + for (k, val) in d.iter().zip(vals.iter()) { + t.insert(k, val.as_slice()).unwrap(); } + let root = t.commit().commit_to(&mut memdb); let mut recorder = Recorder::::new(); { @@ -682,7 +901,7 @@ fn iterator_seek_with_recorder_internal() { } test_layouts!(test_cache, test_cache_internal); -fn test_cache_internal() { +fn test_cache_internal>() { let key_value = vec![ (b"A".to_vec(), vec![1; 64]), (b"AA".to_vec(), vec![2; 64]), @@ -691,16 +910,20 @@ fn test_cache_internal() { (b"BC".to_vec(), vec![4; 64]), ]; - let mut memdb = MemoryDB::, DBValue>::default(); - let mut root = Default::default(); + let mut memdb = DB::default(); let mut cache = TestTrieCache::::default(); - { - let mut t = - TrieDBMutBuilder::::new(&mut memdb, &mut root).with_cache(&mut cache).build(); + let changeset = { + let mut t = TrieDBMutBuilder::::new(&memdb).with_cache(&mut cache).build(); for (key, value) in &key_value { t.insert(key, value).unwrap(); } + t.commit() + }; + let root = memdb.commit(changeset); + let t = TrieDBBuilder::::new(&memdb, &root).with_cache(&mut cache).build(); + for (key, _) in &key_value { + t.get(key).unwrap(); } // Ensure that when we cache the same value multiple times under different keys, @@ -719,13 +942,11 @@ fn test_cache_internal() { let cached_value = cache.lookup_value_for_key(&b"AB"[..]).unwrap().clone(); assert_eq!(cached_value.data().flatten().unwrap(), vec![3u8; 4]); - { - let mut t = - TrieDBMutBuilder::::new(&mut memdb, &mut root).with_cache(&mut cache).build(); - for (key, value) in &key_value { - t.insert(key, value).unwrap(); - } + let mut t = TrieDBMutBuilder::::new(&memdb).with_cache(&mut cache).build(); + for (key, value) in &key_value { + t.insert(key, value).unwrap(); } + let root = memdb.commit(t.commit()); assert_eq!( cache.lookup_value_for_key(&b"AB"[..]).unwrap().data().flatten().unwrap(), @@ -737,7 +958,7 @@ fn test_cache_internal() { cache.clear_node_cache(); { - let t = TrieDBBuilder::::new(&mut memdb, &mut root).with_cache(&mut cache).build(); + let t = TrieDBBuilder::::new(&memdb, &root).with_cache(&mut cache).build(); for (key, value) in &key_value { assert_eq!(*value, t.get(key).unwrap().unwrap()); } @@ -747,7 +968,7 @@ fn test_cache_internal() { cache.clear_node_cache(); { - let t = TrieDBBuilder::::new(&mut memdb, &mut root).with_cache(&mut cache).build(); + let t = TrieDBBuilder::::new(&mut memdb, &root).with_cache(&mut cache).build(); for (key, value) in &key_value { assert_eq!(T::Hash::hash(value), t.get_hash(key).unwrap().unwrap()); } @@ -756,19 +977,17 @@ fn test_cache_internal() { #[test] fn test_record_value() { - type L = HashedValueNoExtThreshold<33>; + type L = HashedValueNoExtThreshold<33, ()>; // one root branch and two leaf, one with inline value, the other with node value. let key_value = vec![(b"A".to_vec(), vec![1; 32]), (b"B".to_vec(), vec![1; 33])]; // Add some initial data to the trie let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - for (key, value) in key_value.iter() { - t.insert(key, value).unwrap(); - } + let mut t = TrieDBMutBuilder::::new(&memdb).build(); + for (key, value) in key_value.iter() { + t.insert(key, value).unwrap(); } + let root = t.commit().apply_to(&mut memdb); // Value access would record a two nodes (branch and leaf with value 32 len inline). let mut recorder = Recorder::::new(); @@ -896,3 +1115,73 @@ fn test_record_value() { // leaf with value hash only. assert_eq!(compact_proof[1].len(), 33); } + +test_layouts!(test_trie_nodes_recorded, test_trie_nodes_recorded_internal); +fn test_trie_nodes_recorded_internal>() { + let key_value = vec![ + (b"A".to_vec(), vec![1; 64]), + (b"AA".to_vec(), vec![2; 64]), + (b"AB".to_vec(), vec![3; 4]), + (b"B".to_vec(), vec![4; 64]), + (b"BC".to_vec(), vec![4; 64]), + ]; + const NON_EXISTENT_KEY: &[u8] = &*b"NOT"; + + let mut memdb = DB::default(); + + let mut t = TrieDBMutBuilder::::new(&memdb).build(); + for (key, value) in &key_value { + t.insert(key, value).unwrap(); + } + let root = memdb.commit(t.commit()); + + for mut cache in [Some(TestTrieCache::::default()), None] { + for get_hash in [true, false] { + let mut recorder = Recorder::::default(); + { + let trie = TrieDBBuilder::::new(&memdb, &root) + .with_recorder(&mut recorder) + .with_optional_cache(cache.as_mut().map(|c| c as &mut _)) + .build(); + for (key, _) in &key_value { + if get_hash { + assert!(trie.get_hash(key).unwrap().is_some()); + } else { + assert!(trie.get(key).unwrap().is_some()); + } + } + + if get_hash { + assert!(trie.get_hash(&NON_EXISTENT_KEY).unwrap().is_none()); + } else { + assert!(trie.get(&NON_EXISTENT_KEY).unwrap().is_none()); + } + } + + for (key, value) in &key_value { + let recorded = recorder.trie_nodes_recorded_for_key(&key); + + let is_inline = T::MAX_INLINE_VALUE.map_or(true, |m| value.len() < m as usize); + + let expected = if get_hash && !is_inline { + RecordedForKey::Hash + } else { + RecordedForKey::Value + }; + + assert_eq!( + expected, + recorded, + "{:?} max_inline: {:?} get_hash: {get_hash}", + String::from_utf8(key.to_vec()), + T::MAX_INLINE_VALUE + ); + } + + assert_eq!( + RecordedForKey::Value, + recorder.trie_nodes_recorded_for_key(&NON_EXISTENT_KEY), + ); + } + } +} diff --git a/test/src/triedbmut.rs b/test/src/triedbmut.rs new file mode 100644 index 00000000..951d1319 --- /dev/null +++ b/test/src/triedbmut.rs @@ -0,0 +1,1051 @@ +// Copyright 2017, 2020 Parity Technologies +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use std::ops::Deref; + +use env_logger; +use log::debug; +use reference_trie::{ + reference_trie_root, test_layouts, ExtensionLayout, HashedValueNoExt, + HashedValueNoExtThreshold, NoExtensionLayout, PrefixedMemoryDB, RefHasher, ReferenceNodeCodec, + ReferenceNodeCodecNoExt, TestTrieCache, +}; +use trie_db::{ + memory_db::{HashKey, MemoryDB, PrefixedKey}, + node_db::{Hasher, NodeDB, Prefix, EMPTY_PREFIX}, + test_utils::*, + CachedValue, Changeset, DBValue, NodeCodec, Recorder, Trie, TrieCache, TrieDBBuilder, + TrieDBMut, TrieDBMutBuilder, TrieDBNodeIterator, TrieError, TrieHash, TrieLayout, Value, +}; + +use crate::{TestCommit, TestDB}; + +type MemoryDBProof = + MemoryDB<::Hash, HashKey<::Hash>, DBValue>; + +fn populate_trie<'db, T: TrieLayout>( + db: &'db dyn NodeDB, + v: &[(Vec, Vec)], +) -> TrieDBMut<'db, T> { + let mut t = TrieDBMutBuilder::::new(db).build(); + for i in 0..v.len() { + let key: &[u8] = &v[i].0; + let val: &[u8] = &v[i].1; + t.insert(key, val).unwrap(); + } + t +} + +fn unpopulate_trie<'db, T: TrieLayout>( + t: &mut TrieDBMut<'db, T>, + v: &[(Vec, Vec)], +) -> bool { + for (_ix, i) in v.into_iter().enumerate() { + let key: &[u8] = &i.0; + if t.remove(key).is_err() { + return false + } + } + true +} + +fn reference_hashed_null_node() -> ::Out { + if T::USE_EXTENSION { + as NodeCodec>::hashed_null_node() + } else { + as NodeCodec>::hashed_null_node() + } +} + +#[test] +fn playpen() { + env_logger::init(); + playpen_internal::>(); + playpen_internal::(); + playpen_internal::(); + playpen_internal::(); +} +fn playpen_internal() { + let mut seed = [0u8; 32]; + for test_i in 0..10_000 { + if test_i % 50 == 0 { + debug!("{:?} of 10000 stress tests done", test_i); + } + let initial_seed = seed.clone(); + let x = StandardMap { + alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".to_vec()), + min_key: 5, + journal_key: 0, + value_mode: ValueMode::Index, + count: 100, + } + .make_with(&mut seed); + + let real = reference_trie_root::(x.clone()); + let mut memdb = PrefixedMemoryDB::::default(); + let memtrie = populate_trie::(&memdb, &x); + // avoid duplicate + let value_set: std::collections::BTreeMap<&[u8], &[u8]> = + x.iter().map(|(k, v)| (k.as_slice(), v.as_slice())).collect(); + for (k, v) in value_set { + assert_eq!(memtrie.get(k).unwrap().unwrap(), v); + } + let commit = memtrie.commit(); + let root = commit.apply_to(&mut memdb); + + if root != real { + println!("TRIE MISMATCH"); + println!(); + println!("{:?} vs {:?}", root, real); + for i in &x { + println!("{:#x?} -> {:#x?}", i.0, i.1); + } + } + assert_eq!(root, real); + + let mut memtrie = TrieDBMutBuilder::::from_existing(&memdb, root).build(); + assert!(unpopulate_trie(&mut memtrie, &x), "{:?}", (test_i, initial_seed)); + let root = memtrie.commit().apply_to(&mut memdb); + let hashed_null_node = reference_hashed_null_node::(); + if root != hashed_null_node { + println!("- TRIE MISMATCH"); + println!(); + println!("{:#x?} vs {:#x?}", root, hashed_null_node); + for i in &x { + println!("{:#x?} -> {:#x?}", i.0, i.1); + } + } + assert_eq!(root, hashed_null_node); + } +} + +test_layouts!(init, init_internal); +fn init_internal>() { + let memdb = DB::default(); + let t = TrieDBMutBuilder::::new(&memdb).build(); + let hashed_null_node = reference_hashed_null_node::(); + assert_eq!(t.commit().root_hash(), hashed_null_node); +} + +test_layouts!(insert_on_empty, insert_on_empty_internal); +fn insert_on_empty_internal>() { + let memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&memdb).build(); + t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); + assert_eq!( + t.commit().root_hash(), + reference_trie_root::(vec![(vec![0x01u8, 0x23], vec![0x01u8, 0x23])]), + ); +} + +test_layouts!(remove_to_empty, remove_to_empty_internal); +fn remove_to_empty_internal>() { + let big_value = b"00000000000000000000000000000000"; + + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + + t.insert(&[0x01], big_value).unwrap(); + t.insert(&[0x01, 0x23], big_value).unwrap(); + t.insert(&[0x01, 0x34], big_value).unwrap(); + t.remove(&[0x01]).unwrap(); + t.remove(&[0x01, 0x23]).unwrap(); + t.remove(&[0x01, 0x34]).unwrap(); + t.commit().commit_to(&mut memdb); + assert!(memdb.is_empty()); +} + +test_layouts!(remove_to_empty_checked, remove_to_empty_checked_internal); +fn remove_to_empty_checked_internal>() { + let big_value = b"00000000000000000000000000000000"; + + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + t.insert(&[0x01], big_value).unwrap(); + t.insert(&[0x01, 0x23], big_value).unwrap(); + t.insert(&[0x01, 0x34], big_value).unwrap(); + let root = t.commit().commit_to(&mut memdb); + let mut t = TrieDBMutBuilder::::from_existing(&mut memdb, root).build(); + assert_eq!(t.get(&[0x01]).unwrap(), Some(big_value.to_vec()),); + assert_eq!(t.get(&[0x01, 0x34]).unwrap(), Some(big_value.to_vec()),); + t.remove(&[0x01]).unwrap(); + t.remove(&[0x01, 0x23]).unwrap(); + t.remove(&[0x01, 0x34]).unwrap(); + t.commit().commit_to(&mut memdb); + assert!(memdb.is_empty()); +} + +test_layouts!(remove_to_empty_no_extension, remove_to_empty_no_extension_internal); +fn remove_to_empty_no_extension_internal>() { + let big_value = b"00000000000000000000000000000000"; + let big_value2 = b"00000000000000000000000000000002"; + let big_value3 = b"00000000000000000000000000000004"; + + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + + t.insert(&[0x01, 0x23], big_value3).unwrap(); + t.insert(&[0x01], big_value2).unwrap(); + t.insert(&[0x01, 0x34], big_value).unwrap(); + t.remove(&[0x01]).unwrap(); + + let root = t.commit().commit_to(&mut memdb); + assert_eq!( + &root, + &reference_trie::calc_root::(vec![ + (vec![0x01u8, 0x23], big_value3.to_vec()), + (vec![0x01u8, 0x34], big_value.to_vec()), + ]) + ); +} + +test_layouts!(insert_replace_root, insert_replace_root_internal); +fn insert_replace_root_internal>() { + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); + t.insert(&[0x01u8, 0x23], &[0x23u8, 0x45]).unwrap(); + let root = t.commit().commit_to(&mut memdb); + assert_eq!( + root, + reference_trie_root::(vec![(vec![0x01u8, 0x23], vec![0x23u8, 0x45])]), + ); +} + +test_layouts!(insert_make_branch_root, insert_make_branch_root_internal); +fn insert_make_branch_root_internal>() { + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); + t.insert(&[0x11u8, 0x23], &[0x11u8, 0x23]).unwrap(); + let root = t.commit().commit_to(&mut memdb); + assert_eq!( + root, + reference_trie_root::(vec![ + (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), + (vec![0x11u8, 0x23], vec![0x11u8, 0x23]) + ]) + ); +} + +test_layouts!(insert_into_branch_root, insert_into_branch_root_internal); +fn insert_into_branch_root_internal>() { + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); + t.insert(&[0xf1u8, 0x23], &[0xf1u8, 0x23]).unwrap(); + t.insert(&[0x81u8, 0x23], &[0x81u8, 0x23]).unwrap(); + let root = t.commit().commit_to(&mut memdb); + assert_eq!( + root, + reference_trie_root::(vec![ + (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), + (vec![0x81u8, 0x23], vec![0x81u8, 0x23]), + (vec![0xf1u8, 0x23], vec![0xf1u8, 0x23]), + ]) + ); +} + +test_layouts!(insert_value_into_branch_root, insert_value_into_branch_root_internal); +fn insert_value_into_branch_root_internal>() { + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); + t.insert(&[], &[0x0]).unwrap(); + let root = t.commit().commit_to(&mut memdb); + assert_eq!( + root, + reference_trie_root::(vec![ + (vec![], vec![0x0]), + (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), + ]) + ); +} + +test_layouts!(insert_split_leaf, insert_split_leaf_internal); +fn insert_split_leaf_internal>() { + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); + t.insert(&[0x01u8, 0x34], &[0x01u8, 0x34]).unwrap(); + let root = t.commit().commit_to(&mut memdb); + assert_eq!( + root, + reference_trie_root::(vec![ + (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), + (vec![0x01u8, 0x34], vec![0x01u8, 0x34]), + ]) + ); +} + +test_layouts!(insert_split_extenstion, insert_split_extenstion_internal); +fn insert_split_extenstion_internal>() { + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + t.insert(&[0x01, 0x23, 0x45], &[0x01]).unwrap(); + t.insert(&[0x01, 0xf3, 0x45], &[0x02]).unwrap(); + t.insert(&[0x01, 0xf3, 0xf5], &[0x03]).unwrap(); + let root = t.commit().commit_to(&mut memdb); + assert_eq!( + root, + reference_trie_root::(vec![ + (vec![0x01, 0x23, 0x45], vec![0x01]), + (vec![0x01, 0xf3, 0x45], vec![0x02]), + (vec![0x01, 0xf3, 0xf5], vec![0x03]), + ]) + ); +} + +test_layouts!(insert_big_value, insert_big_value_internal); +fn insert_big_value_internal>() { + let big_value0 = b"00000000000000000000000000000000"; + let big_value1 = b"11111111111111111111111111111111"; + + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + t.insert(&[0x01u8, 0x23], big_value0).unwrap(); + t.insert(&[0x11u8, 0x23], big_value1).unwrap(); + let root = t.commit().commit_to(&mut memdb); + assert_eq!( + root, + reference_trie_root::(vec![ + (vec![0x01u8, 0x23], big_value0.to_vec()), + (vec![0x11u8, 0x23], big_value1.to_vec()) + ]) + ); +} + +test_layouts!(insert_duplicate_value, insert_duplicate_value_internal); +fn insert_duplicate_value_internal>() { + let big_value = b"00000000000000000000000000000000"; + + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + t.insert(&[0x01u8, 0x23], big_value).unwrap(); + t.insert(&[0x11u8, 0x23], big_value).unwrap(); + let root = t.commit().commit_to(&mut memdb); + assert_eq!( + root, + reference_trie_root::(vec![ + (vec![0x01u8, 0x23], big_value.to_vec()), + (vec![0x11u8, 0x23], big_value.to_vec()) + ]) + ); +} + +test_layouts!(test_at_empty, test_at_empty_internal); +fn test_at_empty_internal>() { + let mut memdb = DB::default(); + let t = TrieDBMutBuilder::::new(&mut memdb).build(); + assert_eq!(t.get(&[0x5]).unwrap(), None); +} + +test_layouts!(test_at_one_and_two, test_at_one_and_two_internal); +fn test_at_one_and_two_internal>() { + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); + assert_eq!(t.get(&[0x1, 0x23]).unwrap().unwrap(), vec![0x1u8, 0x23]); + let root = t.commit().commit_to(&mut memdb); + let mut t = TrieDBMutBuilder::::from_existing(&mut memdb, root).build(); + assert_eq!(t.get(&[0x1, 0x23]).unwrap().unwrap(), vec![0x1u8, 0x23]); + t.insert(&[0x01u8, 0x23, 0x00], &[0x01u8, 0x24]).unwrap(); + let root = t.commit().commit_to(&mut memdb); + let mut t = TrieDBMutBuilder::::from_existing(&mut memdb, root).build(); + t.insert(&[0x01u8, 0x23, 0x00], &[0x01u8, 0x25]).unwrap(); + // This test that middle node get resolved correctly (modified + // triedbmut node due to change of child node). + assert_eq!(t.get(&[0x1, 0x23]).unwrap().unwrap(), vec![0x1u8, 0x23]); +} + +test_layouts!(test_at_three, test_at_three_internal); +fn test_at_three_internal>() { + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&memdb).build(); + t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); + t.insert(&[0xf1u8, 0x23], &[0xf1u8, 0x23]).unwrap(); + t.insert(&[0x81u8, 0x23], &[0x81u8, 0x23]).unwrap(); + assert_eq!(t.get(&[0x01, 0x23]).unwrap().unwrap(), vec![0x01u8, 0x23]); + assert_eq!(t.get(&[0xf1, 0x23]).unwrap().unwrap(), vec![0xf1u8, 0x23]); + assert_eq!(t.get(&[0x81, 0x23]).unwrap().unwrap(), vec![0x81u8, 0x23]); + assert_eq!(t.get(&[0x82, 0x23]).unwrap(), None); + let root = memdb.commit(t.commit()); + let t = TrieDBMutBuilder::::from_existing(&memdb, root).build(); + assert_eq!(t.get(&[0x01, 0x23]).unwrap().unwrap(), vec![0x01u8, 0x23]); + assert_eq!(t.get(&[0xf1, 0x23]).unwrap().unwrap(), vec![0xf1u8, 0x23]); + assert_eq!(t.get(&[0x81, 0x23]).unwrap().unwrap(), vec![0x81u8, 0x23]); + assert_eq!(t.get(&[0x82, 0x23]).unwrap(), None); +} + +#[test] +fn test_nibbled_branch_changed_value() { + let memdb = MemoryDB::, DBValue>::default(); + let mut t = reference_trie::RefTrieDBMutNoExtBuilder::new(&memdb).build(); + t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); + t.insert(&[0x01u8, 0x23, 0x11], &[0xf1u8, 0x23]).unwrap(); + assert_eq!(t.get(&[0x01u8, 0x23]).unwrap(), Some(vec![0x01u8, 0x23])); +} + +test_layouts!(stress, stress_internal); +fn stress_internal>() { + let mut seed = Default::default(); + for _ in 0..1000 { + let x = StandardMap { + alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".to_vec()), + min_key: 5, + journal_key: 0, + value_mode: ValueMode::Index, + count: 4, + } + .make_with(&mut seed); + + let real = reference_trie_root::(x.clone()); + let mut memdb = DB::default(); + let memtrie = populate_trie::(&mut memdb, &x); + let mut y = x.clone(); + y.sort_by(|ref a, ref b| a.0.cmp(&b.0)); + let mut memdb2 = DB::default(); + let memtrie_sorted = populate_trie::(&mut memdb2, &y); + let root = memtrie.commit().commit_to(&mut memdb); + let root2 = memtrie_sorted.commit().commit_to(&mut memdb2); + if root != real || root2 != real { + println!("TRIE MISMATCH"); + println!(); + println!("ORIGINAL... {:#x?}", root); + for i in &x { + println!("{:#x?} -> {:#x?}", i.0, i.1); + } + println!("SORTED... {:#x?}", root2); + for i in &y { + println!("{:#x?} -> {:#x?}", i.0, i.1); + } + } + assert_eq!(root, real); + assert_eq!(root2, real); + } +} + +test_layouts!(test_trie_existing, test_trie_existing_internal); +fn test_trie_existing_internal>() { + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); + let root = t.commit().commit_to(&mut memdb); + let _ = TrieDBMutBuilder::::from_existing(&memdb, root); +} + +test_layouts!(insert_empty, insert_empty_internal); +fn insert_empty_internal>() { + let mut seed = Default::default(); + let x = StandardMap { + alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".to_vec()), + min_key: 5, + journal_key: 0, + value_mode: ValueMode::Index, + count: 4, + } + .make_with(&mut seed); + + let mut db = DB::default(); + let mut t = TrieDBMutBuilder::::new(&db).build(); + for &(ref key, ref value) in &x { + t.insert(key, value).unwrap(); + } + let root = db.commit(t.commit()); + + assert_eq!(root, reference_trie_root::(x.clone())); + + let mut t = TrieDBMutBuilder::::from_existing(&db, root).build(); + for &(ref key, _) in &x { + t.insert(key, &[]).unwrap(); + } + assert!(t.is_empty()); + let root = db.commit(t.commit()); + + let hashed_null_node = reference_hashed_null_node::(); + assert_eq!(root, hashed_null_node); +} + +test_layouts!(return_old_values, return_old_values_internal); +fn return_old_values_internal>() { + let threshold = T::MAX_INLINE_VALUE; + let mut seed = Default::default(); + let x = StandardMap { + alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".to_vec()), + min_key: 5, + journal_key: 0, + value_mode: ValueMode::Index, + count: 2, + } + .make_with(&mut seed); + + let mut db = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut db).build(); + for &(ref key, ref value) in &x { + assert!(t.insert(key, value).unwrap() == None); + if threshold.map(|t| value.len() < t as usize).unwrap_or(true) { + assert_eq!(t.insert(key, value).unwrap(), Some(Value::Inline(value.clone().into()))); + } else { + assert!(matches!(t.insert(key, value).unwrap(), Some(Value::NewNode(..)))); + } + } + for (key, value) in x { + if threshold.map(|t| value.len() < t as usize).unwrap_or(true) { + assert_eq!(t.remove(&key).unwrap(), Some(Value::Inline(value.into()))); + } else { + assert!(matches!(t.remove(&key).unwrap(), Some(Value::NewNode(..)))); + } + assert_eq!(t.remove(&key).unwrap(), None); + } +} + +#[test] +fn insert_empty_allowed() { + let mut db = MemoryDB::, DBValue>::default(); + let mut t = reference_trie::RefTrieDBMutAllowEmptyBuilder::new(&db).build(); + t.insert(b"test", &[]).unwrap(); + let root = t.commit().apply_to(&mut db); + + assert_eq!( + root, + reference_trie_root::(vec![( + b"test".to_vec(), + Vec::new() + )],) + ); + let t = reference_trie::RefTrieDBMutAllowEmptyBuilder::from_existing(&db, root).build(); + assert_eq!(t.get(b"test").unwrap(), Some(Vec::new())); +} + +#[test] +fn register_proof_without_value() { + use reference_trie::HashedValueNoExtThreshold; + use std::{cell::RefCell, collections::HashMap}; + use Prefix; + + type Layout = HashedValueNoExtThreshold<1, ()>; + type MemoryDB = trie_db::memory_db::MemoryDB, DBValue>; + let x = [ + (b"test1".to_vec(), vec![1; 32]), // inline + (b"test1234".to_vec(), vec![2; 36]), + (b"te".to_vec(), vec![3; 32]), + ]; + + let mut memdb = MemoryDB::default(); + let t = populate_trie::(&mut memdb, &x); + let root = t.commit().apply_to(&mut memdb); + { + let trie = TrieDBBuilder::::new(&memdb, &root).build(); + println!("{:?}", trie); + } + + struct ProofRecorder { + db: MemoryDB, + record: RefCell, Vec>>, + } + // Only to test without threads. + unsafe impl Send for ProofRecorder {} + unsafe impl Sync for ProofRecorder {} + + impl NodeDB for ProofRecorder { + fn get( + &self, + key: &::Out, + prefix: Prefix, + _location: (), + ) -> Option<(DBValue, Vec<()>)> { + let v = NodeDB::get(&self.db, key, prefix, ()); + if let Some((v, _)) = v.as_ref() { + self.record.borrow_mut().entry(key[..].to_vec()).or_insert_with(|| v.clone()); + } + v + } + fn contains( + &self, + key: &::Out, + prefix: Prefix, + _locatoin: (), + ) -> bool { + self.get(key, prefix, ()).is_some() + } + } + + let mut memdb = ProofRecorder { db: memdb, record: Default::default() }; + + let root_proof = root.clone(); + let mut trie = TrieDBMutBuilder::::from_existing(&mut memdb, root).build(); + // touch te value (test1 remains untouch). + trie.get(b"te").unwrap(); + // cut test_1234 prefix + trie.insert(b"test12", &[2u8; 36][..]).unwrap(); + // remove 1234 + trie.remove(b"test1234").unwrap(); + + // proof should contain value for 'te' only. + type MemoryDBProof = trie_db::memory_db::MemoryDB, DBValue>; + let mut memdb_from_proof = MemoryDBProof::default(); + for (_key, value) in memdb.record.into_inner().into_iter() { + memdb_from_proof.insert(EMPTY_PREFIX, value.as_slice()); + } + + let db_unpacked = memdb_from_proof.clone(); + let root_unpacked = root_proof.clone(); + + let mut memdb_from_proof = db_unpacked.clone(); + let root_proof = root_unpacked.clone(); + { + let mut trie = + TrieDBMutBuilder::::from_existing(&mut memdb_from_proof, root_proof).build(); + trie.get(b"te").unwrap(); + trie.insert(b"test12", &[2u8; 36][..]).unwrap(); + trie.remove(b"test1234").unwrap(); + } + + let mut memdb_from_proof = db_unpacked.clone(); + let root_proof = root_unpacked.clone(); + { + use trie_db::Trie; + let trie = TrieDBBuilder::::new(&memdb_from_proof, &root_proof).build(); + assert!(trie.get(b"te").unwrap().is_some()); + assert!(matches!( + trie.get(b"test1").map_err(|e| *e), + Err(TrieError::IncompleteDatabase(..)) + )); + } + + { + let trie = + TrieDBMutBuilder::::from_existing(&mut memdb_from_proof, root_proof).build(); + assert!(trie.get(b"te").unwrap().is_some()); + assert!(matches!( + trie.get(b"test1").map_err(|e| *e), + Err(TrieError::IncompleteDatabase(..)) + )); + } +} + +test_layouts!(test_recorder, test_recorder_internal); +fn test_recorder_internal>() { + let key_value = vec![ + (b"A".to_vec(), vec![1; 64]), + (b"AA".to_vec(), vec![2; 64]), + (b"AB".to_vec(), vec![3; 64]), + (b"B".to_vec(), vec![4; 64]), + ]; + + // Add some initial data to the trie + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + for (key, value) in key_value.iter().take(1) { + t.insert(key, value).unwrap(); + } + let root = t.commit().commit_to(&mut memdb); + + // Add more data, but this time only to the overlay. + // While doing that we record all trie accesses to replay this operation. + let mut recorder = Recorder::::new(); + let mut overlay = memdb.clone(); + let new_root = root; + { + let mut trie = TrieDBMutBuilder::::from_existing(&mut overlay, new_root) + .with_recorder(&mut recorder) + .build(); + + for (key, value) in key_value.iter().skip(1) { + trie.insert(key, value).unwrap(); + } + } + + let mut partial_db = MemoryDBProof::::default(); + for record in recorder.drain() { + partial_db.insert(EMPTY_PREFIX, &record.data); + } + + // Replay the it, but this time we use the proof. + let validated_root = root; + { + let mut trie = + TrieDBMutBuilder::::from_existing(&mut partial_db, validated_root).build(); + + for (key, value) in key_value.iter().skip(1) { + trie.insert(key, value).unwrap(); + } + } + + assert_eq!(new_root, validated_root); +} + +test_layouts!(test_recorder_witch_cache, test_recorder_with_cache_internal); +fn test_recorder_with_cache_internal>() { + let key_value = vec![ + (b"A".to_vec(), vec![1; 64]), + (b"AA".to_vec(), vec![2; 64]), + (b"AB".to_vec(), vec![3; 64]), + (b"B".to_vec(), vec![4; 64]), + ]; + + // Add some initial data to the trie + let mut memdb = DB::default(); + let mut t = TrieDBMutBuilder::::new(&mut memdb).build(); + for (key, value) in key_value.iter().take(1) { + t.insert(key, value).unwrap(); + } + let root = t.commit().commit_to(&mut memdb); + let mut validated_root = root; + + let mut cache = TestTrieCache::::default(); + + { + let trie = TrieDBBuilder::::new(&memdb, &root).with_cache(&mut cache).build(); + + // Only read one entry. + assert_eq!(key_value[0].1, trie.get(&key_value[0].0).unwrap().unwrap()); + } + + // Root should now be cached. + assert!(cache.get_node(&root, Default::default()).is_some()); + + // Add more data, but this time only to the overlay. + // While doing that we record all trie accesses to replay this operation. + let mut recorder = Recorder::::new(); + let mut overlay = memdb.clone(); + let mut trie = TrieDBMutBuilder::::from_existing(&mut overlay, root) + .with_recorder(&mut recorder) + .with_cache(&mut cache) + .build(); + + for (key, value) in key_value.iter().skip(1) { + trie.insert(key, value).unwrap(); + } + let new_root = trie.commit().commit_to(&mut overlay); + + let t = TrieDBBuilder::::new(&overlay, &new_root).with_cache(&mut cache).build(); + for (key, _) in key_value.iter().skip(1) { + t.get(key).unwrap(); + } + + for (key, value) in key_value.iter().skip(1) { + let cached_value = cache.lookup_value_for_key(key).unwrap(); + + assert_eq!(value, cached_value.data().flatten().unwrap().deref()); + assert_eq!(T::Hash::hash(&value), cached_value.hash().unwrap()); + } + + let mut partial_db = MemoryDBProof::::default(); + for record in recorder.drain() { + partial_db.insert(EMPTY_PREFIX, &record.data); + } + + // Replay the it, but this time we use the proof. + { + let mut trie = TrieDBMutBuilder::::from_existing(&partial_db, validated_root).build(); + + for (key, value) in key_value.iter().skip(1) { + trie.insert(key, value).unwrap(); + } + validated_root = trie.commit().apply_to(&mut partial_db); + } + + assert_eq!(new_root, validated_root); +} + +test_layouts!(test_insert_remove_data_with_cache, test_insert_remove_data_with_cache_internal); +fn test_insert_remove_data_with_cache_internal>() { + let key_value = vec![ + (b"A".to_vec(), vec![1; 64]), + (b"AA".to_vec(), vec![2; 64]), + // Should be inlined + (b"AC".to_vec(), vec![7; 4]), + (b"AB".to_vec(), vec![3; 64]), + (b"B".to_vec(), vec![4; 64]), + ]; + + let mut cache = TestTrieCache::::default(); + let mut recorder = Recorder::::new(); + let mut memdb = DB::default(); + let mut trie = TrieDBMutBuilder::::new(&mut memdb) + .with_recorder(&mut recorder) + .with_cache(&mut cache) + .build(); + + // Add all values + for (key, value) in key_value.iter() { + trie.insert(key, value).unwrap(); + } + + // Remove only the last 2 elements + for (key, _) in key_value.iter().skip(3) { + let _ = trie.remove(key); + } + let mut memdb = MemoryDB::, DBValue>::default(); + let root = trie.commit().apply_to(&mut memdb); + let t = TrieDBBuilder::::new(&memdb, &root).with_cache(&mut cache).build(); + for (key, _) in &key_value { + t.get(key).unwrap(); + } + + // Then only the first 3 elements should be in the cache and the last + // two ones should be added as non-existent. + for (key, value) in key_value.iter().take(3) { + let key_str = String::from_utf8_lossy(key); + + let cached_value = cache + .lookup_value_for_key(key) + .unwrap_or_else(|| panic!("Failed to lookup `{}`", key_str)); + + assert_eq!(value, cached_value.data().flatten().unwrap().deref(), "{:?}", key_str); + assert_eq!(T::Hash::hash(&value), cached_value.hash().unwrap()); + } + + for (key, _) in key_value.iter().skip(3) { + assert!(matches!(cache.lookup_value_for_key(key).unwrap(), CachedValue::NonExisting)); + } +} + +#[test] +fn test_two_assets_memory_db() { + test_two_assets_memory_db_inner_1::>(); + test_two_assets_memory_db_inner_2::>(); +} +fn test_two_assets_memory_db_inner_1() { + let memdb = PrefixedMemoryDB::::new(&[0u8]); + let mut state = TrieDBMutBuilder::::new(&memdb).build(); + + let key1 = [1u8; 3]; + let data1 = [1u8; 2]; + state.insert(key1.as_ref(), &data1).unwrap(); + assert_eq!(state.get(key1.as_ref()).unwrap().unwrap(), data1); //PASSING + let key2 = [2u8; 3]; + let data2 = [2u8; 2]; + state.insert(key2.as_ref(), &data2).unwrap(); + assert_eq!(state.get(key1.as_ref()).unwrap().unwrap(), data1); + + state.commit(); +} + +fn test_two_assets_memory_db_inner_2() { + let memdb = PrefixedMemoryDB::::new(&[0u8]); + let mut state = TrieDBMutBuilder::::new(&memdb).build(); + + let key1 = [1u8]; + let data1 = [1u8; 2]; + state.insert(key1.as_ref(), &data1).unwrap(); + assert_eq!(state.get(key1.as_ref()).unwrap().unwrap(), data1); + let key2 = [1u8, 2]; + let data2 = [2u8; 2]; + state.insert(key2.as_ref(), &data2).unwrap(); + assert_eq!(state.get(key1.as_ref()).unwrap().unwrap(), data1); + assert_eq!(state.get(key2.as_ref()).unwrap().unwrap(), data2); + + let key3 = [1u8, 3]; + let data3 = [3u8; 2]; + state.insert(key3.as_ref(), &data3).unwrap(); + assert_eq!(state.get(key1.as_ref()).unwrap().unwrap(), data1); + assert_eq!(state.get(key2.as_ref()).unwrap().unwrap(), data2); + assert_eq!(state.get(key3.as_ref()).unwrap().unwrap(), data3); +} + +test_layouts!(attached_trie, attached_trie_internal); +fn attached_trie_internal>() { + use std::collections::BTreeMap; + struct ATrie { + root: TrieHash, + data: BTreeMap, Vec>, + changeset: Option, T::Location>>>, + } + // Running a typical attached trie scenario (childtrie on substrate): + // different trie, attached trie root written all + // at once with treerefset before parent tree commit. + // Direct copy if using ref counting and location in db. + // Pruning. + let mut seed = Default::default(); + let nb_attached_trie = 10; + // let nb_attached_trie = 1; + let support_location = DB::support_location(); + let mut memdb = DB::default(); + let mut attached_tries: BTreeMap, ATrie> = Default::default(); + let mut keyspaced_memdb; + let mut main_trie: ATrie = + ATrie { root: Default::default(), data: Default::default(), changeset: None }; + for i in 0..nb_attached_trie + 1 { + let x = StandardMap { + alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".to_vec()), + min_key: 3, + journal_key: 0, + value_mode: ValueMode::Index, + count: 20, + //count: 2, + } + .make_with(&mut seed); + + let mut memtrie = populate_trie::(&mut memdb, &x); + let data: BTreeMap, Vec> = x.iter().cloned().collect(); + if i == nb_attached_trie { + for (k, c) in attached_tries.iter_mut() { + let key: &[u8] = &k[..]; + let val: &[u8] = c.root.as_ref(); + let changeset = c.changeset.take().unwrap(); + memtrie.insert_with_tree_ref(key, val, Some(changeset)).unwrap(); + } + let changeset = memtrie.commit(); + let root = changeset.commit_to(&mut memdb); + main_trie.root = root; + main_trie.data = data; + } else { + let attached_trie_root_key = data.iter().next().unwrap().0; + let changeset = memtrie.commit_with_keyspace(attached_trie_root_key); + let root = changeset.root_hash(); + attached_tries.insert( + attached_trie_root_key.clone(), + ATrie { root, data, changeset: Some(changeset.into()) }, + ); + } + } + // check data + { + let trie = TrieDBBuilder::::new(&memdb, &main_trie.root).build(); + for (k, v) in main_trie.data.iter() { + assert_eq!(&trie.get(k).unwrap().unwrap(), v); + } + } + for (root_key, attached_trie) in &attached_tries { + let (attached_trie_root, attached_trie_location) = + attached_trie_root(&memdb, &main_trie.root, root_key).unwrap(); + + let child_memdb: &dyn NodeDB<_, _, _> = if support_location { + &memdb + } else { + assert!(attached_trie_location.is_none()); + keyspaced_memdb = KeySpacedDB::new(&memdb, &root_key[..]); + &keyspaced_memdb + }; + + let trie = TrieDBBuilder::::new_with_db_location( + child_memdb, + &attached_trie_root, + attached_trie_location.unwrap_or_default(), + ) + .build(); + for (k, v) in attached_trie.data.iter() { + assert_eq!(&trie.get(k).unwrap().unwrap(), v); + } + } + // Modifying an existing child trie. + let (root_key, a_attached_trie) = attached_tries.iter().next().unwrap(); + let (a_attached_trie_root, attached_trie_location) = + attached_trie_root(&memdb, &main_trie.root, &root_key).unwrap(); + let (tree_ref_changeset, treeref_root_hash) = { + assert_eq!(a_attached_trie_root, a_attached_trie.root); + let child_memdb: &dyn NodeDB<_, _, _> = if support_location { + &memdb + } else { + keyspaced_memdb = KeySpacedDB::new(&memdb, &root_key[..]); + &keyspaced_memdb + }; + let mut attached_trie = TrieDBMutBuilder::::from_existing_with_db_location( + child_memdb, + a_attached_trie_root, + attached_trie_location.unwrap_or_default(), + ) + .build(); + attached_trie.remove(a_attached_trie.data.iter().next().unwrap().0).unwrap(); + attached_trie.insert(b"make_sure_it_changes", b"value").unwrap(); + let changeset = attached_trie.commit_with_keyspace(root_key); + let new_root = changeset.root_hash(); + assert!(new_root != a_attached_trie_root); + (changeset, new_root) + }; + let mut main_trie = TrieDBMutBuilder::::from_existing(&memdb, main_trie.root).build(); + main_trie + .insert_with_tree_ref(root_key, treeref_root_hash.as_ref(), Some(tree_ref_changeset.into())) + .unwrap(); + let changeset = main_trie.commit(); + let main_root = changeset.root_hash(); + changeset.commit_to(&mut memdb); + // checking modification + let (a_attached_trie_root, attached_trie_location) = + attached_trie_root(&memdb, &main_root, root_key).unwrap(); + let child_memdb: &dyn NodeDB<_, _, _> = if support_location { + &memdb + } else { + keyspaced_memdb = KeySpacedDB::new(&memdb, &root_key[..]); + &keyspaced_memdb + }; + let trie = TrieDBBuilder::::new_with_db_location( + child_memdb, + &a_attached_trie_root, + attached_trie_location.unwrap_or_default(), + ) + .build(); + trie.get(b"make_sure_it_changes").unwrap().unwrap(); + let mut first = true; + for (k, v) in a_attached_trie.data.iter() { + if first { + assert!(&trie.get(k).unwrap().is_none()); + first = false; + } else { + assert_eq!(&trie.get(k).unwrap().unwrap(), v); + } + } + trie.get(b"make_sure_it_changes").unwrap().unwrap(); +} + +#[cfg(test)] +fn attached_trie_root>( + memdb: &DB, + main_root: &TrieHash, + root_key: &[u8], +) -> Option<(TrieHash, Option)> { + let trie = TrieDBBuilder::::new(memdb, main_root).build(); + // Note could have a variant of get_with here that goes into + // encoded node hash and locations. + let mut iter = TrieDBNodeIterator::new(&trie).unwrap(); + use trie_db::TrieIterator; + iter.seek(root_key).unwrap(); + let item = iter.next()?.unwrap(); + let node = &item.2; + let location = node.node_plan().additional_ref_location(node.locations()); + let root = iter.item_from_raw(&item)?.unwrap(); + if root.0.as_slice() != root_key { + return None; + } + let mut root_hash = TrieHash::::default(); + root_hash.as_mut().copy_from_slice(&root.1); + Some((root_hash, location)) +} + +pub struct KeySpacedDB<'a, H, T, DL>(&'a dyn NodeDB, &'a [u8]); + +impl<'a, H, T, DL> KeySpacedDB<'a, H, T, DL> { + #[inline] + pub fn new(db: &'a dyn NodeDB, ks: &'a [u8]) -> Self { + KeySpacedDB(db, ks) + } +} + +impl<'a, H, T, L> NodeDB for KeySpacedDB<'a, H, T, L> +where + H: Hasher, + T: From<&'static [u8]>, +{ + fn get(&self, key: &H::Out, prefix: Prefix, location: L) -> Option<(T, Vec)> { + let derived_prefix = trie_db::triedbmut::prefix_prefix(self.1, prefix); + self.0.get(key, (&derived_prefix.0, derived_prefix.1), location) + } + + fn contains(&self, key: &H::Out, prefix: Prefix, location: L) -> bool { + let derived_prefix = trie_db::triedbmut::prefix_prefix(self.1, prefix); + self.0.contains(key, (&derived_prefix.0, derived_prefix.1), location) + } +} diff --git a/trie-db/Cargo.toml b/trie-db/Cargo.toml deleted file mode 100644 index f8e6b00c..00000000 --- a/trie-db/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -name = "trie-db" -version = "0.27.1" -authors = ["Parity Technologies "] -description = "Merkle-Patricia Trie generic over key hasher and node encoding" -repository = "https://github.com/paritytech/trie" -license = "Apache-2.0" -edition = "2018" - -[dependencies] -log = "0.4" -smallvec = { version = "1.0.0", features = ["union", "const_new"] } -hash-db = { path = "../hash-db", default-features = false, version = "0.16.0"} -hashbrown = { version = "0.13.2", default-features = false, features = ["ahash"] } -rustc-hex = { version = "2.1.0", default-features = false, optional = true } - -[features] -default = ["std"] -std = [ - "hash-db/std", - "rustc-hex", -] diff --git a/trie-db/src/fatdb.rs b/trie-db/src/fatdb.rs deleted file mode 100644 index 16883394..00000000 --- a/trie-db/src/fatdb.rs +++ /dev/null @@ -1,187 +0,0 @@ -// Copyright 2017, 2021 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use super::{ - CError, DBValue, Query, Result, Trie, TrieDB, TrieDBIterator, TrieDBKeyIterator, TrieHash, - TrieItem, TrieIterator, TrieKeyItem, TrieLayout, -}; -use hash_db::{HashDBRef, Hasher}; - -use crate::{rstd::boxed::Box, TrieDBBuilder}; - -/// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database. -/// Additionaly it stores inserted hash-key mappings for later retrieval. -/// -/// Use it as a `Trie` or `TrieMut` trait object. -pub struct FatDB<'db, 'cache, L> -where - L: TrieLayout, -{ - raw: TrieDB<'db, 'cache, L>, -} - -impl<'db, 'cache, L> FatDB<'db, 'cache, L> -where - L: TrieLayout, -{ - /// Create a new trie with the backing database `db` and empty `root` - /// Initialise to the state entailed by the genesis block. - /// This guarantees the trie is built correctly. - pub fn new(db: &'db dyn HashDBRef, root: &'db TrieHash) -> Self { - FatDB { raw: TrieDBBuilder::new(db, root).build() } - } - - /// Get the backing database. - pub fn db(&self) -> &dyn HashDBRef { - self.raw.db() - } -} - -impl<'db, 'cache, L> Trie for FatDB<'db, 'cache, L> -where - L: TrieLayout, -{ - fn root(&self) -> &TrieHash { - self.raw.root() - } - - fn contains(&self, key: &[u8]) -> Result, CError> { - self.raw.contains(L::Hash::hash(key).as_ref()) - } - - fn get_hash(&self, key: &[u8]) -> Result>, TrieHash, CError> { - self.raw.get_hash(key) - } - - fn get_with>( - &self, - key: &[u8], - query: Q, - ) -> Result, TrieHash, CError> { - self.raw.get_with(L::Hash::hash(key).as_ref(), query) - } - - fn iter<'a>( - &'a self, - ) -> Result< - Box, CError>> + 'a>, - TrieHash, - CError, - > { - FatDBIterator::::new(&self.raw).map(|iter| Box::new(iter) as Box<_>) - } - - fn key_iter<'a>( - &'a self, - ) -> Result< - Box, CError>> + 'a>, - TrieHash, - CError, - > { - FatDBKeyIterator::::new(&self.raw).map(|iter| Box::new(iter) as Box<_>) - } -} - -/// Iterator over inserted pairs of key values. -pub struct FatDBIterator<'db, 'cache, L> -where - L: TrieLayout, -{ - trie_iterator: TrieDBIterator<'db, 'cache, L>, - trie: &'db TrieDB<'db, 'cache, L>, -} - -impl<'db, 'cache, L> FatDBIterator<'db, 'cache, L> -where - L: TrieLayout, -{ - /// Creates new iterator. - pub fn new(trie: &'db TrieDB<'db, 'cache, L>) -> Result, CError> { - Ok(FatDBIterator { trie_iterator: TrieDBIterator::new(trie)?, trie }) - } -} - -impl<'db, 'cache, L> TrieIterator for FatDBIterator<'db, 'cache, L> -where - L: TrieLayout, -{ - fn seek(&mut self, key: &[u8]) -> Result<(), TrieHash, CError> { - let hashed_key = L::Hash::hash(key); - self.trie_iterator.seek(hashed_key.as_ref()) - } -} - -impl<'db, 'cache, L> Iterator for FatDBIterator<'db, 'cache, L> -where - L: TrieLayout, -{ - type Item = TrieItem, CError>; - - fn next(&mut self) -> Option { - self.trie_iterator.next().map(|res| { - res.map(|(hash, value)| { - let aux_hash = L::Hash::hash(&hash); - ( - self.trie.db().get(&aux_hash, Default::default()).expect("Missing fatdb hash"), - value, - ) - }) - }) - } -} - -/// Iterator over inserted keys. -pub struct FatDBKeyIterator<'db, 'cache, L> -where - L: TrieLayout, -{ - trie_iterator: TrieDBKeyIterator<'db, 'cache, L>, - trie: &'db TrieDB<'db, 'cache, L>, -} - -impl<'db, 'cache, L> FatDBKeyIterator<'db, 'cache, L> -where - L: TrieLayout, -{ - /// Creates new iterator. - pub fn new(trie: &'db TrieDB<'db, 'cache, L>) -> Result, CError> { - Ok(FatDBKeyIterator { trie_iterator: TrieDBKeyIterator::new(trie)?, trie }) - } -} - -impl<'db, 'cache, L> TrieIterator for FatDBKeyIterator<'db, 'cache, L> -where - L: TrieLayout, -{ - fn seek(&mut self, key: &[u8]) -> Result<(), TrieHash, CError> { - let hashed_key = L::Hash::hash(key); - self.trie_iterator.seek(hashed_key.as_ref()) - } -} - -impl<'db, 'cache, L> Iterator for FatDBKeyIterator<'db, 'cache, L> -where - L: TrieLayout, -{ - type Item = TrieKeyItem, CError>; - - fn next(&mut self) -> Option { - self.trie_iterator.next().map(|res| { - res.map(|hash| { - let aux_hash = L::Hash::hash(&hash); - self.trie.db().get(&aux_hash, Default::default()).expect("Missing fatdb hash") - }) - }) - } -} diff --git a/trie-db/src/fatdbmut.rs b/trie-db/src/fatdbmut.rs deleted file mode 100644 index fa7a8f07..00000000 --- a/trie-db/src/fatdbmut.rs +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2017, 2020 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use crate::{ - triedbmut::{TrieDBMutBuilder, Value}, - CError, DBValue, Result, TrieDBMut, TrieHash, TrieLayout, TrieMut, -}; -use hash_db::{HashDB, Hasher, EMPTY_PREFIX}; - -/// A mutable `Trie` implementation which hashes keys and uses a generic `HashDB` backing database. -/// Additionaly it stores inserted hash-key mappings for later retrieval. -/// -/// Use it as a `Trie` or `TrieMut` trait object. -pub struct FatDBMut<'db, L> -where - L: TrieLayout, -{ - raw: TrieDBMut<'db, L>, -} - -impl<'db, L> FatDBMut<'db, L> -where - L: TrieLayout, -{ - /// Create a new trie with the backing database `db` and empty `root` - /// Initialise to the state entailed by the genesis block. - /// This guarantees the trie is built correctly. - pub fn new(db: &'db mut dyn HashDB, root: &'db mut TrieHash) -> Self { - FatDBMut { raw: TrieDBMutBuilder::new(db, root).build() } - } - - /// Create a new trie with the backing database `db` and `root`. - /// - /// Returns an error if root does not exist. - pub fn from_existing( - db: &'db mut dyn HashDB, - root: &'db mut TrieHash, - ) -> Self { - FatDBMut { raw: TrieDBMutBuilder::from_existing(db, root).build() } - } - - /// Get the backing database. - pub fn db(&self) -> &dyn HashDB { - self.raw.db() - } - - /// Get the backing database. - pub fn db_mut(&mut self) -> &mut dyn HashDB { - self.raw.db_mut() - } -} - -impl<'db, L> TrieMut for FatDBMut<'db, L> -where - L: TrieLayout, -{ - fn root(&mut self) -> &TrieHash { - self.raw.root() - } - - fn is_empty(&self) -> bool { - self.raw.is_empty() - } - - fn contains(&self, key: &[u8]) -> Result, CError> { - self.raw.contains(L::Hash::hash(key).as_ref()) - } - - fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result, TrieHash, CError> - where - 'a: 'key, - { - self.raw.get(L::Hash::hash(key).as_ref()) - } - - fn insert( - &mut self, - key: &[u8], - value: &[u8], - ) -> Result>, TrieHash, CError> { - let hash = L::Hash::hash(key); - let out = self.raw.insert(hash.as_ref(), value)?; - let db = self.raw.db_mut(); - - // insert if it doesn't exist. - if out.is_none() { - let aux_hash = L::Hash::hash(hash.as_ref()); - db.emplace(aux_hash, EMPTY_PREFIX, key.to_vec()); - } - Ok(out) - } - - fn remove(&mut self, key: &[u8]) -> Result>, TrieHash, CError> { - let hash = L::Hash::hash(key); - let out = self.raw.remove(hash.as_ref())?; - - // remove if it already exists. - if out.is_some() { - let aux_hash = L::Hash::hash(hash.as_ref()); - self.raw.db_mut().remove(&aux_hash, EMPTY_PREFIX); - } - - Ok(out) - } -} diff --git a/trie-db/src/sectriedb.rs b/trie-db/src/sectriedb.rs deleted file mode 100644 index ccfbd971..00000000 --- a/trie-db/src/sectriedb.rs +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright 2017, 2020 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use crate::{ - rstd::boxed::Box, triedb::TrieDB, CError, DBValue, Query, Result, Trie, TrieDBBuilder, - TrieHash, TrieItem, TrieIterator, TrieKeyItem, TrieLayout, -}; -use hash_db::{HashDBRef, Hasher}; - -/// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database. -/// -/// Use it as a `Trie` trait object. You can use `raw()` to get the backing `TrieDB` object. -pub struct SecTrieDB<'db, 'cache, L> -where - L: TrieLayout, -{ - raw: TrieDB<'db, 'cache, L>, -} - -impl<'db, 'cache, L> SecTrieDB<'db, 'cache, L> -where - L: TrieLayout, -{ - /// Create a new trie with the backing database `db` and `root`. - /// - /// Initialise to the state entailed by the genesis block. - /// This guarantees the trie is built correctly. - pub fn new(db: &'db dyn HashDBRef, root: &'db TrieHash) -> Self { - SecTrieDB { raw: TrieDBBuilder::new(db, root).build() } - } - - /// Get a reference to the underlying raw `TrieDB` struct. - pub fn raw(&self) -> &TrieDB<'db, 'cache, L> { - &self.raw - } - - /// Get a mutable reference to the underlying raw `TrieDB` struct. - pub fn raw_mut(&mut self) -> &mut TrieDB<'db, 'cache, L> { - &mut self.raw - } -} - -impl<'db, 'cache, L> Trie for SecTrieDB<'db, 'cache, L> -where - L: TrieLayout, -{ - fn root(&self) -> &TrieHash { - self.raw.root() - } - - fn contains(&self, key: &[u8]) -> Result, CError> { - self.raw.contains(L::Hash::hash(key).as_ref()) - } - - fn get_hash(&self, key: &[u8]) -> Result>, TrieHash, CError> { - self.raw.get_hash(key) - } - - fn get_with>( - &self, - key: &[u8], - query: Q, - ) -> Result, TrieHash, CError> { - self.raw.get_with(L::Hash::hash(key).as_ref(), query) - } - - fn iter<'a>( - &'a self, - ) -> Result< - Box, CError>> + 'a>, - TrieHash, - CError, - > { - TrieDB::iter(&self.raw) - } - - fn key_iter<'a>( - &'a self, - ) -> Result< - Box, CError>> + 'a>, - TrieHash, - CError, - > { - TrieDB::key_iter(&self.raw) - } -} diff --git a/trie-db/src/sectriedbmut.rs b/trie-db/src/sectriedbmut.rs deleted file mode 100644 index b56dc55a..00000000 --- a/trie-db/src/sectriedbmut.rs +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2017, 2020 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use crate::{ - triedbmut::TrieDBMutBuilder, CError, DBValue, Result, TrieDBMut, TrieHash, TrieLayout, TrieMut, - Value, -}; -use hash_db::{HashDB, Hasher}; - -/// A mutable `Trie` implementation which hashes keys and uses a generic `HashDB` backing database. -/// -/// Use it as a `Trie` or `TrieMut` trait object. You can use `raw()` to get the backing `TrieDBMut` -/// object. -pub struct SecTrieDBMut<'db, L> -where - L: TrieLayout, -{ - raw: TrieDBMut<'db, L>, -} - -impl<'db, L> SecTrieDBMut<'db, L> -where - L: TrieLayout, -{ - /// Create a new trie with the backing database `db` and empty `root` - /// Initialize to the state entailed by the genesis block. - /// This guarantees the trie is built correctly. - pub fn new(db: &'db mut dyn HashDB, root: &'db mut TrieHash) -> Self { - SecTrieDBMut { raw: TrieDBMutBuilder::new(db, root).build() } - } - - /// Create a new trie with the backing database `db` and `root`. - pub fn from_existing( - db: &'db mut dyn HashDB, - root: &'db mut TrieHash, - ) -> Self { - SecTrieDBMut { raw: TrieDBMutBuilder::from_existing(db, root).build() } - } - - /// Get the backing database. - pub fn db(&self) -> &dyn HashDB { - self.raw.db() - } - - /// Get the backing database. - pub fn db_mut(&mut self) -> &mut dyn HashDB { - self.raw.db_mut() - } -} - -impl<'db, L> TrieMut for SecTrieDBMut<'db, L> -where - L: TrieLayout, -{ - fn root(&mut self) -> &TrieHash { - self.raw.root() - } - - fn is_empty(&self) -> bool { - self.raw.is_empty() - } - - fn contains(&self, key: &[u8]) -> Result, CError> { - self.raw.contains(&L::Hash::hash(key).as_ref()) - } - - fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result, TrieHash, CError> - where - 'a: 'key, - { - self.raw.get(&L::Hash::hash(key).as_ref()) - } - - fn insert( - &mut self, - key: &[u8], - value: &[u8], - ) -> Result>, TrieHash, CError> { - self.raw.insert(&L::Hash::hash(key).as_ref(), value) - } - - fn remove(&mut self, key: &[u8]) -> Result>, TrieHash, CError> { - self.raw.remove(&L::Hash::hash(key).as_ref()) - } -} diff --git a/trie-db/test/src/fatdb.rs b/trie-db/test/src/fatdb.rs deleted file mode 100644 index fb8e7350..00000000 --- a/trie-db/test/src/fatdb.rs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2017, 2020 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use memory_db::{HashKey, MemoryDB}; -use reference_trie::{RefFatDB, RefFatDBMut, RefHasher}; -use trie_db::{DBValue, Trie, TrieMut}; - -#[test] -fn fatdb_to_trie() { - let mut memdb = MemoryDB::, DBValue>::default(); - let mut root = Default::default(); - { - let mut t = RefFatDBMut::new(&mut memdb, &mut root); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - } - let t = RefFatDB::new(&memdb, &root); - assert_eq!(t.get(&[0x01u8, 0x23]).unwrap().unwrap(), vec![0x01u8, 0x23]); - assert_eq!( - t.iter().unwrap().map(Result::unwrap).collect::>(), - vec![(vec![0x01u8, 0x23], vec![0x01u8, 0x23])] - ); - assert_eq!( - t.key_iter().unwrap().map(Result::unwrap).collect::>(), - vec![vec![0x01u8, 0x23]] - ); -} diff --git a/trie-db/test/src/fatdbmut.rs b/trie-db/test/src/fatdbmut.rs deleted file mode 100644 index 519f5f0f..00000000 --- a/trie-db/test/src/fatdbmut.rs +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2017, 2020 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use hash_db::{Hasher, EMPTY_PREFIX}; -use memory_db::{HashKey, MemoryDB}; -use reference_trie::{RefFatDBMut, RefHasher, RefTrieDBBuilder}; -use trie_db::{Trie, TrieMut}; - -#[test] -fn fatdbmut_to_trie() { - let mut memdb = MemoryDB::, _>::default(); - let mut root = Default::default(); - { - let mut t = RefFatDBMut::new(&mut memdb, &mut root); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - } - - let t = RefTrieDBBuilder::new(&memdb, &root).build(); - assert_eq!(t.get(&RefHasher::hash(&[0x01u8, 0x23])), Ok(Some(vec![0x01u8, 0x23])),); -} - -#[test] -fn fatdbmut_insert_remove_key_mapping() { - let mut memdb = MemoryDB::, _>::default(); - let mut root = Default::default(); - let key = [0x01u8, 0x23]; - let val = [0x01u8, 0x24]; - let key_hash = RefHasher::hash(&key); - let aux_hash = RefHasher::hash(&key_hash); - let mut t = RefFatDBMut::new(&mut memdb, &mut root); - t.insert(&key, &val).unwrap(); - assert_eq!(t.get(&key), Ok(Some(val.to_vec()))); - assert_eq!(t.db().get(&aux_hash, EMPTY_PREFIX), Some(key.to_vec())); - t.remove(&key).unwrap(); - assert_eq!(t.db().get(&aux_hash, EMPTY_PREFIX), None); -} diff --git a/trie-db/test/src/lib.rs b/trie-db/test/src/lib.rs deleted file mode 100644 index 7802247e..00000000 --- a/trie-db/test/src/lib.rs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2020 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Tests for trie-db crate. - -#[cfg(test)] -mod fatdb; -#[cfg(test)] -mod fatdbmut; -#[cfg(test)] -mod iter_build; -#[cfg(test)] -mod iterator; -#[cfg(test)] -mod proof; -#[cfg(test)] -mod recorder; -#[cfg(test)] -mod sectriedb; -#[cfg(test)] -mod sectriedbmut; -#[cfg(test)] -mod trie_codec; -#[cfg(test)] -mod triedb; -#[cfg(test)] -mod triedbmut; diff --git a/trie-db/test/src/sectriedb.rs b/trie-db/test/src/sectriedb.rs deleted file mode 100644 index bc04a5c3..00000000 --- a/trie-db/test/src/sectriedb.rs +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2017, 2020 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use hash_db::Hasher; -use memory_db::{HashKey, MemoryDB}; -use reference_trie::{RefHasher, RefSecTrieDB, RefTrieDBMutBuilder}; -use trie_db::{DBValue, Trie, TrieMut}; - -#[test] -fn trie_to_sectrie() { - let mut db = MemoryDB::, DBValue>::default(); - let mut root = Default::default(); - { - let mut t = RefTrieDBMutBuilder::new(&mut db, &mut root).build(); - t.insert(&RefHasher::hash(&[0x01u8, 0x23]), &[0x01u8, 0x23]).unwrap(); - } - let t = RefSecTrieDB::new(&db, &root); - assert_eq!(t.get(&[0x01u8, 0x23]).unwrap().unwrap(), vec![0x01u8, 0x23]); -} diff --git a/trie-db/test/src/sectriedbmut.rs b/trie-db/test/src/sectriedbmut.rs deleted file mode 100644 index e984d2e2..00000000 --- a/trie-db/test/src/sectriedbmut.rs +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2017, 2020 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use hash_db::Hasher; -use memory_db::{HashKey, MemoryDB}; -use reference_trie::{RefHasher, RefSecTrieDBMut, RefTrieDBBuilder}; -use trie_db::{DBValue, Trie, TrieMut}; - -#[test] -fn sectrie_to_trie() { - let mut memdb = MemoryDB::, DBValue>::default(); - let mut root = Default::default(); - { - let mut t = RefSecTrieDBMut::new(&mut memdb, &mut root); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - } - let t = RefTrieDBBuilder::new(&memdb, &root).build(); - assert_eq!(t.get(&RefHasher::hash(&[0x01u8, 0x23])).unwrap().unwrap(), vec![0x01u8, 0x23],); -} diff --git a/trie-db/test/src/triedbmut.rs b/trie-db/test/src/triedbmut.rs deleted file mode 100644 index 02316892..00000000 --- a/trie-db/test/src/triedbmut.rs +++ /dev/null @@ -1,838 +0,0 @@ -// Copyright 2017, 2020 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use std::ops::Deref; - -use env_logger; -use hash_db::{HashDB, Hasher, EMPTY_PREFIX}; -use log::debug; -use memory_db::{HashKey, MemoryDB, PrefixedKey}; -use reference_trie::{ - reference_trie_root, test_layouts, ExtensionLayout, HashedValueNoExt, - HashedValueNoExtThreshold, NoExtensionLayout, RefHasher, ReferenceNodeCodec, - ReferenceNodeCodecNoExt, TestTrieCache, -}; -use trie_db::{ - DBValue, NodeCodec, Recorder, Trie, TrieCache, TrieDBBuilder, TrieDBMut, TrieDBMutBuilder, - TrieError, TrieLayout, TrieMut, Value, -}; -use trie_standardmap::*; - -type PrefixedMemoryDB = - MemoryDB<::Hash, PrefixedKey<::Hash>, DBValue>; -type MemoryDBProof = - MemoryDB<::Hash, HashKey<::Hash>, DBValue>; - -fn populate_trie<'db, T: TrieLayout>( - db: &'db mut dyn HashDB, - root: &'db mut ::Out, - v: &[(Vec, Vec)], -) -> TrieDBMut<'db, T> { - let mut t = TrieDBMutBuilder::::new(db, root).build(); - - for i in 0..v.len() { - let key: &[u8] = &v[i].0; - let val: &[u8] = &v[i].1; - t.insert(key, val).unwrap(); - } - t -} - -fn unpopulate_trie<'db, T: TrieLayout>( - t: &mut TrieDBMut<'db, T>, - v: &[(Vec, Vec)], -) -> bool { - for (_ix, i) in v.into_iter().enumerate() { - let key: &[u8] = &i.0; - if t.remove(key).is_err() { - return false - } - } - true -} - -fn reference_hashed_null_node() -> ::Out { - if T::USE_EXTENSION { - as NodeCodec>::hashed_null_node() - } else { - as NodeCodec>::hashed_null_node() - } -} - -#[test] -fn playpen() { - env_logger::init(); - playpen_internal::>(); - playpen_internal::(); - playpen_internal::(); - playpen_internal::(); -} -fn playpen_internal() { - let mut seed = [0u8; 32]; - for test_i in 0..10_000 { - if test_i % 50 == 0 { - debug!("{:?} of 10000 stress tests done", test_i); - } - let initial_seed = seed.clone(); - let x = StandardMap { - alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".to_vec()), - min_key: 5, - journal_key: 0, - value_mode: ValueMode::Index, - count: 100, - } - .make_with(&mut seed); - - let real = reference_trie_root::(x.clone()); - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - let mut memtrie = populate_trie::(&mut memdb, &mut root, &x); - - memtrie.commit(); - if *memtrie.root() != real { - println!("TRIE MISMATCH"); - println!(); - println!("{:?} vs {:?}", memtrie.root(), real); - for i in &x { - println!("{:#x?} -> {:#x?}", i.0, i.1); - } - } - assert_eq!(*memtrie.root(), real); - assert!(unpopulate_trie(&mut memtrie, &x), "{:?}", (test_i, initial_seed)); - memtrie.commit(); - let hashed_null_node = reference_hashed_null_node::(); - if *memtrie.root() != hashed_null_node { - println!("- TRIE MISMATCH"); - println!(); - println!("{:#x?} vs {:#x?}", memtrie.root(), hashed_null_node); - for i in &x { - println!("{:#x?} -> {:#x?}", i.0, i.1); - } - } - assert_eq!(*memtrie.root(), hashed_null_node); - } -} - -test_layouts!(init, init_internal); -fn init_internal() { - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - let hashed_null_node = reference_hashed_null_node::(); - assert_eq!(*t.root(), hashed_null_node); -} - -test_layouts!(insert_on_empty, insert_on_empty_internal); -fn insert_on_empty_internal() { - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - assert_eq!( - *t.root(), - reference_trie_root::(vec![(vec![0x01u8, 0x23], vec![0x01u8, 0x23])]), - ); -} - -test_layouts!(remove_to_empty, remove_to_empty_internal); -fn remove_to_empty_internal() { - let big_value = b"00000000000000000000000000000000"; - - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - - t.insert(&[0x01], big_value).unwrap(); - t.insert(&[0x01, 0x23], big_value).unwrap(); - t.insert(&[0x01, 0x34], big_value).unwrap(); - t.remove(&[0x01]).unwrap(); - t.remove(&[0x01, 0x23]).unwrap(); - t.remove(&[0x01, 0x34]).unwrap(); - } - assert_eq!(memdb.keys().len(), 0); -} - -test_layouts!(remove_to_empty_checked, remove_to_empty_checked_internal); -fn remove_to_empty_checked_internal() { - let big_value = b"00000000000000000000000000000000"; - - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - - t.insert(&[0x01], big_value).unwrap(); - t.insert(&[0x01, 0x23], big_value).unwrap(); - t.insert(&[0x01, 0x34], big_value).unwrap(); - t.commit(); - assert_eq!(t.get(&[0x01]).unwrap(), Some(big_value.to_vec()),); - assert_eq!(t.get(&[0x01, 0x34]).unwrap(), Some(big_value.to_vec()),); - t.commit(); - t.remove(&[0x01]).unwrap(); - t.remove(&[0x01, 0x23]).unwrap(); - t.remove(&[0x01, 0x34]).unwrap(); - } - assert_eq!(memdb.keys().len(), 0); -} - -test_layouts!(remove_to_empty_no_extension, remove_to_empty_no_extension_internal); -fn remove_to_empty_no_extension_internal() { - let big_value = b"00000000000000000000000000000000"; - let big_value2 = b"00000000000000000000000000000002"; - let big_value3 = b"00000000000000000000000000000004"; - - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - - t.insert(&[0x01, 0x23], big_value3).unwrap(); - t.insert(&[0x01], big_value2).unwrap(); - t.insert(&[0x01, 0x34], big_value).unwrap(); - t.remove(&[0x01]).unwrap(); - // commit on drop - } - assert_eq!( - &root, - &reference_trie::calc_root::(vec![ - (vec![0x01u8, 0x23], big_value3.to_vec()), - (vec![0x01u8, 0x34], big_value.to_vec()), - ]) - ); -} - -test_layouts!(insert_replace_root, insert_replace_root_internal); -fn insert_replace_root_internal() { - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - t.insert(&[0x01u8, 0x23], &[0x23u8, 0x45]).unwrap(); - assert_eq!( - *t.root(), - reference_trie_root::(vec![(vec![0x01u8, 0x23], vec![0x23u8, 0x45])]), - ); -} - -test_layouts!(insert_make_branch_root, insert_make_branch_root_internal); -fn insert_make_branch_root_internal() { - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - t.insert(&[0x11u8, 0x23], &[0x11u8, 0x23]).unwrap(); - assert_eq!( - *t.root(), - reference_trie_root::(vec![ - (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), - (vec![0x11u8, 0x23], vec![0x11u8, 0x23]) - ]) - ); -} - -test_layouts!(insert_into_branch_root, insert_into_branch_root_internal); -fn insert_into_branch_root_internal() { - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - t.insert(&[0xf1u8, 0x23], &[0xf1u8, 0x23]).unwrap(); - t.insert(&[0x81u8, 0x23], &[0x81u8, 0x23]).unwrap(); - assert_eq!( - *t.root(), - reference_trie_root::(vec![ - (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), - (vec![0x81u8, 0x23], vec![0x81u8, 0x23]), - (vec![0xf1u8, 0x23], vec![0xf1u8, 0x23]), - ]) - ); -} - -test_layouts!(insert_value_into_branch_root, insert_value_into_branch_root_internal); -fn insert_value_into_branch_root_internal() { - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - t.insert(&[], &[0x0]).unwrap(); - assert_eq!( - *t.root(), - reference_trie_root::(vec![ - (vec![], vec![0x0]), - (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), - ]) - ); -} - -test_layouts!(insert_split_leaf, insert_split_leaf_internal); -fn insert_split_leaf_internal() { - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - t.insert(&[0x01u8, 0x34], &[0x01u8, 0x34]).unwrap(); - assert_eq!( - *t.root(), - reference_trie_root::(vec![ - (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), - (vec![0x01u8, 0x34], vec![0x01u8, 0x34]), - ]) - ); -} - -test_layouts!(insert_split_extenstion, insert_split_extenstion_internal); -fn insert_split_extenstion_internal() { - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - t.insert(&[0x01, 0x23, 0x45], &[0x01]).unwrap(); - t.insert(&[0x01, 0xf3, 0x45], &[0x02]).unwrap(); - t.insert(&[0x01, 0xf3, 0xf5], &[0x03]).unwrap(); - assert_eq!( - *t.root(), - reference_trie_root::(vec![ - (vec![0x01, 0x23, 0x45], vec![0x01]), - (vec![0x01, 0xf3, 0x45], vec![0x02]), - (vec![0x01, 0xf3, 0xf5], vec![0x03]), - ]) - ); -} - -test_layouts!(insert_big_value, insert_big_value_internal); -fn insert_big_value_internal() { - let big_value0 = b"00000000000000000000000000000000"; - let big_value1 = b"11111111111111111111111111111111"; - - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - t.insert(&[0x01u8, 0x23], big_value0).unwrap(); - t.insert(&[0x11u8, 0x23], big_value1).unwrap(); - assert_eq!( - *t.root(), - reference_trie_root::(vec![ - (vec![0x01u8, 0x23], big_value0.to_vec()), - (vec![0x11u8, 0x23], big_value1.to_vec()) - ]) - ); -} - -test_layouts!(insert_duplicate_value, insert_duplicate_value_internal); -fn insert_duplicate_value_internal() { - let big_value = b"00000000000000000000000000000000"; - - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - t.insert(&[0x01u8, 0x23], big_value).unwrap(); - t.insert(&[0x11u8, 0x23], big_value).unwrap(); - assert_eq!( - *t.root(), - reference_trie_root::(vec![ - (vec![0x01u8, 0x23], big_value.to_vec()), - (vec![0x11u8, 0x23], big_value.to_vec()) - ]) - ); -} - -test_layouts!(test_at_empty, test_at_empty_internal); -fn test_at_empty_internal() { - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - let t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - assert_eq!(t.get(&[0x5]).unwrap(), None); -} - -test_layouts!(test_at_one_and_two, test_at_one_and_two_internal); -fn test_at_one_and_two_internal() { - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - assert_eq!(t.get(&[0x1, 0x23]).unwrap().unwrap(), vec![0x1u8, 0x23]); - t.commit(); - assert_eq!(t.get(&[0x1, 0x23]).unwrap().unwrap(), vec![0x1u8, 0x23]); - t.insert(&[0x01u8, 0x23, 0x00], &[0x01u8, 0x24]).unwrap(); - } - let mut t = TrieDBMutBuilder::::from_existing(&mut memdb, &mut root).build(); - t.insert(&[0x01u8, 0x23, 0x00], &[0x01u8, 0x25]).unwrap(); - // This test that middle node get resolved correctly (modified - // triedbmut node due to change of child node). - assert_eq!(t.get(&[0x1, 0x23]).unwrap().unwrap(), vec![0x1u8, 0x23]); -} - -test_layouts!(test_at_three, test_at_three_internal); -fn test_at_three_internal() { - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - t.insert(&[0xf1u8, 0x23], &[0xf1u8, 0x23]).unwrap(); - t.insert(&[0x81u8, 0x23], &[0x81u8, 0x23]).unwrap(); - assert_eq!(t.get(&[0x01, 0x23]).unwrap().unwrap(), vec![0x01u8, 0x23]); - assert_eq!(t.get(&[0xf1, 0x23]).unwrap().unwrap(), vec![0xf1u8, 0x23]); - assert_eq!(t.get(&[0x81, 0x23]).unwrap().unwrap(), vec![0x81u8, 0x23]); - assert_eq!(t.get(&[0x82, 0x23]).unwrap(), None); - t.commit(); - assert_eq!(t.get(&[0x01, 0x23]).unwrap().unwrap(), vec![0x01u8, 0x23]); - assert_eq!(t.get(&[0xf1, 0x23]).unwrap().unwrap(), vec![0xf1u8, 0x23]); - assert_eq!(t.get(&[0x81, 0x23]).unwrap().unwrap(), vec![0x81u8, 0x23]); - assert_eq!(t.get(&[0x82, 0x23]).unwrap(), None); -} - -#[test] -fn test_nibbled_branch_changed_value() { - let mut memdb = MemoryDB::, DBValue>::default(); - let mut root = Default::default(); - let mut t = reference_trie::RefTrieDBMutNoExtBuilder::new(&mut memdb, &mut root).build(); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - t.insert(&[0x01u8, 0x23, 0x11], &[0xf1u8, 0x23]).unwrap(); - assert_eq!(t.get(&[0x01u8, 0x23]).unwrap(), Some(vec![0x01u8, 0x23])); -} - -test_layouts!(stress, stress_internal); -fn stress_internal() { - let mut seed = Default::default(); - for _ in 0..1000 { - let x = StandardMap { - alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".to_vec()), - min_key: 5, - journal_key: 0, - value_mode: ValueMode::Index, - count: 4, - } - .make_with(&mut seed); - - let real = reference_trie_root::(x.clone()); - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - let mut memtrie = populate_trie::(&mut memdb, &mut root, &x); - let mut y = x.clone(); - y.sort_by(|ref a, ref b| a.0.cmp(&b.0)); - let mut memdb2 = PrefixedMemoryDB::::default(); - let mut root2 = Default::default(); - let mut memtrie_sorted = populate_trie::(&mut memdb2, &mut root2, &y); - if *memtrie.root() != real || *memtrie_sorted.root() != real { - println!("TRIE MISMATCH"); - println!(); - println!("ORIGINAL... {:#x?}", memtrie.root()); - for i in &x { - println!("{:#x?} -> {:#x?}", i.0, i.1); - } - println!("SORTED... {:#x?}", memtrie_sorted.root()); - for i in &y { - println!("{:#x?} -> {:#x?}", i.0, i.1); - } - } - assert_eq!(*memtrie.root(), real); - assert_eq!(*memtrie_sorted.root(), real); - } -} - -test_layouts!(test_trie_existing, test_trie_existing_internal); -fn test_trie_existing_internal() { - let mut db = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut db, &mut root).build(); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - } - - { - let _ = TrieDBMutBuilder::::from_existing(&mut db, &mut root); - } -} - -test_layouts!(insert_empty, insert_empty_internal); -fn insert_empty_internal() { - let mut seed = Default::default(); - let x = StandardMap { - alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".to_vec()), - min_key: 5, - journal_key: 0, - value_mode: ValueMode::Index, - count: 4, - } - .make_with(&mut seed); - - let mut db = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut db, &mut root).build(); - for &(ref key, ref value) in &x { - t.insert(key, value).unwrap(); - } - - assert_eq!(*t.root(), reference_trie_root::(x.clone())); - - for &(ref key, _) in &x { - t.insert(key, &[]).unwrap(); - } - - assert!(t.is_empty()); - let hashed_null_node = reference_hashed_null_node::(); - assert_eq!(*t.root(), hashed_null_node); -} - -test_layouts!(return_old_values, return_old_values_internal); -fn return_old_values_internal() { - let threshold = T::MAX_INLINE_VALUE; - let mut seed = Default::default(); - let x = StandardMap { - alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".to_vec()), - min_key: 5, - journal_key: 0, - value_mode: ValueMode::Index, - count: 2, - } - .make_with(&mut seed); - - let mut db = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - let mut t = TrieDBMutBuilder::::new(&mut db, &mut root).build(); - for &(ref key, ref value) in &x { - assert!(t.insert(key, value).unwrap() == None); - if threshold.map(|t| value.len() < t as usize).unwrap_or(true) { - assert_eq!(t.insert(key, value).unwrap(), Some(Value::Inline(value.clone().into()))); - } else { - assert!(matches!(t.insert(key, value).unwrap(), Some(Value::NewNode(..)))); - } - } - for (key, value) in x { - if threshold.map(|t| value.len() < t as usize).unwrap_or(true) { - assert_eq!(t.remove(&key).unwrap(), Some(Value::Inline(value.into()))); - } else { - assert!(matches!(t.remove(&key).unwrap(), Some(Value::NewNode(..)))); - } - assert_eq!(t.remove(&key).unwrap(), None); - } -} - -#[test] -fn insert_empty_allowed() { - let mut db = MemoryDB::, DBValue>::default(); - let mut root = Default::default(); - let mut t = reference_trie::RefTrieDBMutAllowEmptyBuilder::new(&mut db, &mut root).build(); - t.insert(b"test", &[]).unwrap(); - - assert_eq!( - *t.root(), - reference_trie_root::(vec![( - b"test".to_vec(), - Vec::new() - )],) - ); - assert_eq!(t.get(b"test").unwrap(), Some(Vec::new())); -} - -#[test] -fn register_proof_without_value() { - use hash_db::{AsHashDB, Prefix}; - use reference_trie::HashedValueNoExtThreshold; - use std::{cell::RefCell, collections::HashMap}; - - type Layout = HashedValueNoExtThreshold<1>; - type MemoryDB = memory_db::MemoryDB, DBValue>; - let x = [ - (b"test1".to_vec(), vec![1; 32]), // inline - (b"test1234".to_vec(), vec![2; 36]), - (b"te".to_vec(), vec![3; 32]), - ]; - - let mut memdb = MemoryDB::default(); - let mut root = Default::default(); - let _ = populate_trie::(&mut memdb, &mut root, &x); - { - let trie = TrieDBBuilder::::new(&memdb, &root).build(); - println!("{:?}", trie); - } - - struct ProofRecorder { - db: MemoryDB, - record: RefCell, Vec>>, - } - // Only to test without threads. - unsafe impl Send for ProofRecorder {} - unsafe impl Sync for ProofRecorder {} - - impl HashDB for ProofRecorder { - fn get(&self, key: &::Out, prefix: Prefix) -> Option { - let v = self.db.get(key, prefix); - if let Some(v) = v.as_ref() { - self.record.borrow_mut().entry(key[..].to_vec()).or_insert_with(|| v.clone()); - } - v - } - - fn contains(&self, key: &::Out, prefix: Prefix) -> bool { - self.get(key, prefix).is_some() - } - - fn emplace(&mut self, key: ::Out, prefix: Prefix, value: DBValue) { - self.db.emplace(key, prefix, value) - } - - fn insert(&mut self, prefix: Prefix, value: &[u8]) -> ::Out { - self.db.insert(prefix, value) - } - - fn remove(&mut self, key: &::Out, prefix: Prefix) { - self.db.remove(key, prefix) - } - } - - impl AsHashDB for ProofRecorder { - fn as_hash_db(&self) -> &dyn HashDB { - self - } - fn as_hash_db_mut<'a>(&'a mut self) -> &'a mut (dyn HashDB + 'a) { - self - } - } - - let mut memdb = ProofRecorder { db: memdb, record: Default::default() }; - - let root_proof = root.clone(); - { - let mut trie = TrieDBMutBuilder::::from_existing(&mut memdb, &mut root).build(); - // touch te value (test1 remains untouch). - trie.get(b"te").unwrap(); - // cut test_1234 prefix - trie.insert(b"test12", &[2u8; 36][..]).unwrap(); - // remove 1234 - trie.remove(b"test1234").unwrap(); - - // proof should contain value for 'te' only. - } - - type MemoryDBProof = memory_db::MemoryDB, DBValue>; - let mut memdb_from_proof = MemoryDBProof::default(); - for (_key, value) in memdb.record.into_inner().into_iter() { - memdb_from_proof.insert(hash_db::EMPTY_PREFIX, value.as_slice()); - } - - let db_unpacked = memdb_from_proof.clone(); - let root_unpacked = root_proof.clone(); - - let mut memdb_from_proof = db_unpacked.clone(); - let mut root_proof = root_unpacked.clone(); - { - let mut trie = - TrieDBMutBuilder::::from_existing(&mut memdb_from_proof, &mut root_proof) - .build(); - trie.get(b"te").unwrap(); - trie.insert(b"test12", &[2u8; 36][..]).unwrap(); - trie.remove(b"test1234").unwrap(); - } - - let mut memdb_from_proof = db_unpacked.clone(); - let mut root_proof = root_unpacked.clone(); - { - use trie_db::Trie; - let trie = TrieDBBuilder::::new(&memdb_from_proof, &root_proof).build(); - assert!(trie.get(b"te").unwrap().is_some()); - assert!(matches!( - trie.get(b"test1").map_err(|e| *e), - Err(TrieError::IncompleteDatabase(..)) - )); - } - - { - let trie = - TrieDBMutBuilder::::from_existing(&mut memdb_from_proof, &mut root_proof) - .build(); - assert!(trie.get(b"te").unwrap().is_some()); - assert!(matches!( - trie.get(b"test1").map_err(|e| *e), - Err(TrieError::IncompleteDatabase(..)) - )); - } -} - -test_layouts!(test_recorder, test_recorder_internal); -fn test_recorder_internal() { - let key_value = vec![ - (b"A".to_vec(), vec![1; 64]), - (b"AA".to_vec(), vec![2; 64]), - (b"AB".to_vec(), vec![3; 64]), - (b"B".to_vec(), vec![4; 64]), - ]; - - // Add some initial data to the trie - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - for (key, value) in key_value.iter().take(1) { - t.insert(key, value).unwrap(); - } - } - - // Add more data, but this time only to the overlay. - // While doing that we record all trie accesses to replay this operation. - let mut recorder = Recorder::::new(); - let mut overlay = memdb.clone(); - let mut new_root = root; - { - let mut trie = TrieDBMutBuilder::::from_existing(&mut overlay, &mut new_root) - .with_recorder(&mut recorder) - .build(); - - for (key, value) in key_value.iter().skip(1) { - trie.insert(key, value).unwrap(); - } - } - - let mut partial_db = MemoryDBProof::::default(); - for record in recorder.drain() { - partial_db.insert(EMPTY_PREFIX, &record.data); - } - - // Replay the it, but this time we use the proof. - let mut validated_root = root; - { - let mut trie = - TrieDBMutBuilder::::from_existing(&mut partial_db, &mut validated_root).build(); - - for (key, value) in key_value.iter().skip(1) { - trie.insert(key, value).unwrap(); - } - } - - assert_eq!(new_root, validated_root); -} - -test_layouts!(test_recorder_witch_cache, test_recorder_with_cache_internal); -fn test_recorder_with_cache_internal() { - let key_value = vec![ - (b"A".to_vec(), vec![1; 64]), - (b"AA".to_vec(), vec![2; 64]), - (b"AB".to_vec(), vec![3; 64]), - (b"B".to_vec(), vec![4; 64]), - ]; - - // Add some initial data to the trie - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - { - let mut t = TrieDBMutBuilder::::new(&mut memdb, &mut root).build(); - for (key, value) in key_value.iter().take(1) { - t.insert(key, value).unwrap(); - } - } - - let mut cache = TestTrieCache::::default(); - - { - let trie = TrieDBBuilder::::new(&memdb, &root).with_cache(&mut cache).build(); - - // Only read one entry. - assert_eq!(key_value[0].1, trie.get(&key_value[0].0).unwrap().unwrap()); - } - - // Root should now be cached. - assert!(cache.get_node(&root).is_some()); - - // Add more data, but this time only to the overlay. - // While doing that we record all trie accesses to replay this operation. - let mut recorder = Recorder::::new(); - let mut overlay = memdb.clone(); - let mut new_root = root; - { - let mut trie = TrieDBMutBuilder::::from_existing(&mut overlay, &mut new_root) - .with_recorder(&mut recorder) - .with_cache(&mut cache) - .build(); - - for (key, value) in key_value.iter().skip(1) { - trie.insert(key, value).unwrap(); - } - } - - for (key, value) in key_value.iter().skip(1) { - let cached_value = cache.lookup_value_for_key(key).unwrap(); - - assert_eq!(value, cached_value.data().flatten().unwrap().deref()); - assert_eq!(T::Hash::hash(&value), cached_value.hash().unwrap()); - } - - let mut partial_db = MemoryDBProof::::default(); - for record in recorder.drain() { - partial_db.insert(EMPTY_PREFIX, &record.data); - } - - // Replay the it, but this time we use the proof. - let mut validated_root = root; - { - let mut trie = - TrieDBMutBuilder::::from_existing(&mut partial_db, &mut validated_root).build(); - - for (key, value) in key_value.iter().skip(1) { - trie.insert(key, value).unwrap(); - } - } - - assert_eq!(new_root, validated_root); -} - -test_layouts!(test_insert_remove_data_with_cache, test_insert_remove_data_with_cache_internal); -fn test_insert_remove_data_with_cache_internal() { - let key_value = vec![ - (b"A".to_vec(), vec![1; 64]), - (b"AA".to_vec(), vec![2; 64]), - // Should be inlined - (b"AC".to_vec(), vec![7; 4]), - (b"AB".to_vec(), vec![3; 64]), - (b"B".to_vec(), vec![4; 64]), - ]; - - let mut cache = TestTrieCache::::default(); - let mut recorder = Recorder::::new(); - let mut memdb = PrefixedMemoryDB::::default(); - let mut root = Default::default(); - { - let mut trie = TrieDBMutBuilder::::new(&mut memdb, &mut root) - .with_recorder(&mut recorder) - .with_cache(&mut cache) - .build(); - - // Add all values - for (key, value) in key_value.iter() { - trie.insert(key, value).unwrap(); - } - - // Remove only the last 2 elements - for (key, _) in key_value.iter().skip(3) { - let _ = trie.remove(key); - } - } - - // Then only the first 3 elements should be in the cache and the last - // two ones should not be there. - for (key, value) in key_value.iter().take(3) { - let key_str = String::from_utf8_lossy(key); - - let cached_value = cache - .lookup_value_for_key(key) - .unwrap_or_else(|| panic!("Failed to lookup `{}`", key_str)); - - assert_eq!(value, cached_value.data().flatten().unwrap().deref(), "{:?}", key_str); - assert_eq!(T::Hash::hash(&value), cached_value.hash().unwrap()); - } - - for (key, _) in key_value.iter().skip(3) { - assert!(cache.lookup_value_for_key(key).is_none()); - } -} diff --git a/trie-eip1186/CHANGELOG.md b/trie-eip1186/CHANGELOG.md deleted file mode 100644 index 38b4e85f..00000000 --- a/trie-eip1186/CHANGELOG.md +++ /dev/null @@ -1,8 +0,0 @@ -# Changelog - -The format is based on [Keep a Changelog]. - -[Keep a Changelog]: http://keepachangelog.com/en/1.0.0/ - -## [Unreleased] -Support eip 1186 trie proofs. [#146](https://github.com/paritytech/trie/pull/146) diff --git a/trie-eip1186/Cargo.toml b/trie-eip1186/Cargo.toml deleted file mode 100644 index 5254d261..00000000 --- a/trie-eip1186/Cargo.toml +++ /dev/null @@ -1,19 +0,0 @@ -[package] -name = "trie-eip1186" -version = "0.5.0" -authors = ["Parity Technologies "] -description = "EIP-1186 compliant proof generation and verification" -repository = "https://github.com/paritytech/trie" -license = "Apache-2.0" -edition = "2018" - -[dependencies] -trie-db = { path = "../trie-db", default-features = false, version = "0.27.0"} -hash-db = { path = "../hash-db", default-features = false, version = "0.16.0"} - -[features] -default = ["std"] -std = [ - "trie-db/std", - "hash-db/std", -] diff --git a/trie-eip1186/src/eip1186.rs b/trie-eip1186/src/eip1186.rs deleted file mode 100644 index 37ad106d..00000000 --- a/trie-eip1186/src/eip1186.rs +++ /dev/null @@ -1,311 +0,0 @@ -use crate::rstd::{result::Result, vec::Vec}; -use hash_db::{HashDBRef, Hasher}; -use trie_db::{ - node::{decode_hash, Node, NodeHandle, Value}, - recorder::Recorder, - CError, DBValue, NibbleSlice, NodeCodec, Result as TrieResult, Trie, TrieDBBuilder, TrieHash, - TrieLayout, -}; - -/// Generate an eip-1186 compatible proof for key-value pairs in a trie given a key. -pub fn generate_proof( - db: &dyn HashDBRef, - root: &TrieHash, - key: &[u8], -) -> TrieResult<(Vec>, Option>), TrieHash, CError> -where - L: TrieLayout, -{ - let mut recorder = Recorder::::new(); - - let item = { - let trie = TrieDBBuilder::::new(db, root).with_recorder(&mut recorder).build(); - trie.get(key)? - }; - - let proof: Vec> = recorder.drain().into_iter().map(|r| r.data).collect(); - Ok((proof, item)) -} - -/// Errors that may occur during proof verification. Most of the errors types simply indicate that -/// the proof is invalid with respect to the statement being verified, and the exact error type can -/// be used for debugging. -#[derive(PartialEq, Eq)] -#[cfg_attr(feature = "std", derive(Debug))] -pub enum VerifyError<'a, HO, CE> { - /// The proof does not contain any value for the given key - /// the error carries the nibbles left after traversing the trie - NonExistingValue(NibbleSlice<'a>), - /// The proof contains a value for the given key - /// while we were expecting to find a non-existence proof - ExistingValue(Vec), - /// The proof indicates that the trie contains a different value. - /// the error carries the value contained in the trie - ValueMismatch(Vec), - /// The proof is missing trie nodes required to verify. - IncompleteProof, - /// The node hash computed from the proof is not matching. - HashMismatch(HO), - /// One of the proof nodes could not be decoded. - DecodeError(CE), - /// Error in converting a plain hash into a HO - HashDecodeError(&'a [u8]), -} - -#[cfg(feature = "std")] -impl<'a, HO: std::fmt::Debug, CE: std::error::Error> std::fmt::Display for VerifyError<'a, HO, CE> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { - match self { - VerifyError::NonExistingValue(key) => { - write!(f, "Key does not exist in trie: reaming key={:?}", key) - }, - VerifyError::ExistingValue(value) => { - write!(f, "trie contains a value for given key value={:?}", value) - }, - VerifyError::ValueMismatch(key) => { - write!(f, "Expected value was not found in the trie: key={:?}", key) - }, - VerifyError::IncompleteProof => write!(f, "Proof is incomplete -- expected more nodes"), - VerifyError::HashMismatch(hash) => write!(f, "hash mismatch found: hash={:?}", hash), - VerifyError::DecodeError(err) => write!(f, "Unable to decode proof node: {}", err), - VerifyError::HashDecodeError(plain_hash) => { - write!(f, "Unable to decode hash value plain_hash: {:?}", plain_hash) - }, - } - } -} - -#[cfg(feature = "std")] -impl<'a, HO: std::fmt::Debug, CE: std::error::Error + 'static> std::error::Error - for VerifyError<'a, HO, CE> -{ - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { - match self { - VerifyError::DecodeError(err) => Some(err), - _ => None, - } - } -} - -/// Verify a compact proof for key-value pairs in a trie given a root hash. -pub fn verify_proof<'a, L>( - root: &::Out, - proof: &'a [Vec], - raw_key: &'a [u8], - expected_value: Option<&[u8]>, -) -> Result<(), VerifyError<'a, TrieHash, CError>> -where - L: TrieLayout, -{ - if proof.is_empty() { - return Err(VerifyError::IncompleteProof) - } - let key = NibbleSlice::new(raw_key); - process_node::(Some(root), &proof[0], key, expected_value, &proof[1..]) -} - -fn process_node<'a, L>( - expected_node_hash: Option<&::Out>, - encoded_node: &'a [u8], - key: NibbleSlice<'a>, - expected_value: Option<&[u8]>, - proof: &'a [Vec], -) -> Result<(), VerifyError<'a, TrieHash, CError>> -where - L: TrieLayout, -{ - if let Some(value) = expected_value { - if encoded_node == value { - return Ok(()) - } - } - if let Some(expected) = expected_node_hash { - let calculated_node_hash = ::hash(encoded_node); - if calculated_node_hash != *expected { - return Err(VerifyError::HashMismatch(calculated_node_hash)) - } - } - let node = ::decode(encoded_node).map_err(VerifyError::DecodeError)?; - match node { - Node::Empty => process_empty::(key, expected_value, proof), - Node::Leaf(nib, data) => process_leaf::(nib, data, key, expected_value, proof), - Node::Extension(nib, handle) => - process_extension::(&nib, handle, key, expected_value, proof), - Node::Branch(children, maybe_data) => - process_branch::(children, maybe_data, key, expected_value, proof), - Node::NibbledBranch(nib, children, maybe_data) => - process_nibbledbranch::(nib, children, maybe_data, key, expected_value, proof), - } -} - -fn process_empty<'a, L>( - key: NibbleSlice<'a>, - expected_value: Option<&[u8]>, - _: &[Vec], -) -> Result<(), VerifyError<'a, TrieHash, CError>> -where - L: TrieLayout, -{ - if expected_value.is_none() { - Ok(()) - } else { - Err(VerifyError::NonExistingValue(key)) - } -} - -fn process_leaf<'a, L>( - nib: NibbleSlice, - data: Value<'a>, - key: NibbleSlice<'a>, - expected_value: Option<&[u8]>, - proof: &'a [Vec], -) -> Result<(), VerifyError<'a, TrieHash, CError>> -where - L: TrieLayout, -{ - if key != nib && expected_value.is_none() { - return Ok(()) - } else if key != nib { - return Err(VerifyError::NonExistingValue(key)) - } - match_value::(Some(data), key, expected_value, proof) -} -fn process_extension<'a, L>( - nib: &NibbleSlice, - handle: NodeHandle<'a>, - mut key: NibbleSlice<'a>, - expected_value: Option<&[u8]>, - proof: &'a [Vec], -) -> Result<(), VerifyError<'a, TrieHash, CError>> -where - L: TrieLayout, -{ - if !key.starts_with(nib) && expected_value.is_none() { - return Ok(()) - } else if !key.starts_with(nib) { - return Err(VerifyError::NonExistingValue(key)) - } - key.advance(nib.len()); - - match handle { - NodeHandle::Inline(encoded_node) => - process_node::(None, encoded_node, key, expected_value, proof), - NodeHandle::Hash(plain_hash) => { - let new_root = decode_hash::(plain_hash) - .ok_or(VerifyError::HashDecodeError(plain_hash))?; - process_node::(Some(&new_root), &proof[0], key, expected_value, &proof[1..]) - }, - } -} - -fn process_nibbledbranch<'a, L>( - nib: NibbleSlice, - children: [Option>; 16], - maybe_data: Option>, - mut key: NibbleSlice<'a>, - expected_value: Option<&[u8]>, - proof: &'a [Vec], -) -> Result<(), VerifyError<'a, TrieHash, CError>> -where - L: TrieLayout, -{ - if !key.starts_with(&nib) && expected_value.is_none() { - return Ok(()) - } else if !key.starts_with(&nib) && expected_value.is_some() { - return Err(VerifyError::NonExistingValue(key)) - } - key.advance(nib.len()); - - if key.is_empty() { - match_value::(maybe_data, key, expected_value, proof) - } else { - match_children::(children, key, expected_value, proof) - } -} - -fn process_branch<'a, L>( - children: [Option>; 16], - maybe_data: Option>, - key: NibbleSlice<'a>, - expected_value: Option<&[u8]>, - proof: &'a [Vec], -) -> Result<(), VerifyError<'a, TrieHash, CError>> -where - L: TrieLayout, -{ - if key.is_empty() { - match_value::(maybe_data, key, expected_value, proof) - } else { - match_children::(children, key, expected_value, proof) - } -} -fn match_children<'a, L>( - children: [Option>; 16], - mut key: NibbleSlice<'a>, - expected_value: Option<&[u8]>, - proof: &'a [Vec], -) -> Result<(), VerifyError<'a, TrieHash, CError>> -where - L: TrieLayout, -{ - match children.get(key.at(0) as usize) { - Some(Some(NodeHandle::Hash(hash))) => - if proof.is_empty() { - Err(VerifyError::IncompleteProof) - } else { - key.advance(1); - let new_root = - decode_hash::(hash).ok_or(VerifyError::HashDecodeError(hash))?; - process_node::(Some(&new_root), &proof[0], key, expected_value, &proof[1..]) - }, - Some(Some(NodeHandle::Inline(encoded_node))) => { - key.advance(1); - process_node::(None, encoded_node, key, expected_value, proof) - }, - Some(None) => - if expected_value.is_none() { - Ok(()) - } else { - Err(VerifyError::NonExistingValue(key)) - }, - None => panic!("key index is out of range in children array"), - } -} - -fn match_value<'a, L>( - maybe_data: Option>, - key: NibbleSlice<'a>, - expected_value: Option<&[u8]>, - proof: &'a [Vec], -) -> Result<(), VerifyError<'a, TrieHash, CError>> -where - L: TrieLayout, -{ - match (maybe_data, proof.first(), expected_value) { - (None, _, None) => Ok(()), - (None, _, Some(_)) => Err(VerifyError::NonExistingValue(key)), - (Some(Value::Inline(inline_data)), _, Some(value)) => - if inline_data == value { - Ok(()) - } else { - Err(VerifyError::ValueMismatch(inline_data.to_vec())) - }, - (Some(Value::Inline(inline_data)), _, None) => - Err(VerifyError::ExistingValue(inline_data.to_vec())), - (Some(Value::Node(plain_hash)), Some(next_proof_item), Some(value)) => { - let value_hash = L::Hash::hash(value); - let node_hash = decode_hash::(plain_hash) - .ok_or(VerifyError::HashDecodeError(plain_hash))?; - if node_hash != value_hash { - Err(VerifyError::HashMismatch(node_hash)) - } else if next_proof_item != value { - Err(VerifyError::ValueMismatch(next_proof_item.to_vec())) - } else { - Ok(()) - } - }, - (Some(Value::Node(_)), None, _) => Err(VerifyError::IncompleteProof), - (Some(Value::Node(_)), Some(proof_item), None) => - Err(VerifyError::ExistingValue(proof_item.to_vec())), - } -} diff --git a/trie-eip1186/src/lib.rs b/trie-eip1186/src/lib.rs deleted file mode 100644 index 1483b857..00000000 --- a/trie-eip1186/src/lib.rs +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2021, 2021 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -#![cfg_attr(not(feature = "std"), no_std)] - -#[cfg(feature = "std")] -mod rstd { - pub use std::{result, vec}; -} - -#[cfg(not(feature = "std"))] -mod rstd { - pub use alloc::vec; - pub use core::result; - pub trait Error {} - impl Error for T {} -} - -mod eip1186; -pub use eip1186::{generate_proof, verify_proof, VerifyError}; diff --git a/trie-eip1186/test/Cargo.toml b/trie-eip1186/test/Cargo.toml deleted file mode 100644 index 4ee7c0fc..00000000 --- a/trie-eip1186/test/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -name = "trie-eip1186-test" -version = "0.5.0" -authors = ["Parity Technologies "] -description = "Tests for trie-eip1186 crate" -repository = "https://github.com/paritytech/trie" -license = "Apache-2.0" -edition = "2018" - -[dependencies] -trie-eip1186 = { path = "..", version = "0.5.0"} -trie-db = { path = "../../trie-db", version = "0.27.0"} -hash-db = { path = "../../hash-db", version = "0.16.0"} -reference-trie = { path = "../../test-support/reference-trie", version = "0.29.0" } -memory-db = { path = "../../memory-db", version = "0.32.0" } diff --git a/trie-eip1186/test/src/eip1186.rs b/trie-eip1186/test/src/eip1186.rs deleted file mode 100644 index 05d89edc..00000000 --- a/trie-eip1186/test/src/eip1186.rs +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright 2019, 2020 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use hash_db::Hasher; -use reference_trie::test_layouts; -use trie_db::{DBValue, TrieDBMutBuilder, TrieLayout, TrieMut}; -use trie_eip1186::{generate_proof, verify_proof, VerifyError}; - -type MemoryDB = memory_db::MemoryDB< - ::Hash, - memory_db::HashKey<::Hash>, - DBValue, ->; - -fn test_entries() -> Vec<(&'static [u8], &'static [u8])> { - vec![ - // "alfa" is at a hash-referenced leaf node. - (b"alfa", &[0; 32]), - // "bravo" is at an inline leaf node. - (b"bravo", b"bravo"), - // "do" is at a hash-referenced branch node. - (b"do", b"verb"), - // "dog" is at a hash-referenced branch node. - (b"dog", b"puppy"), - // "doge" is at a hash-referenced leaf node. - (b"doge", &[0; 32]), - // extension node "o" (plus nibble) to next branch. - (b"horse", b"stallion"), - (b"house", b"building"), - ] -} - -fn test_generate_proof( - entries: Vec<(&'static [u8], &'static [u8])>, - key: &[u8], -) -> (::Out, Vec>, Option>) { - // Populate DB with full trie from entries. - let (db, root) = { - let mut db = >::default(); - let mut root = Default::default(); - { - let mut trie = >::new(&mut db, &mut root).build(); - for (key, value) in entries.iter() { - trie.insert(key, value).unwrap(); - } - } - (db, root) - }; - // Generate proof for the given keys.. - let proof = generate_proof::(&db, &root, key).unwrap(); - (root, proof.0, proof.1) -} - -test_layouts!(trie_proof_works2, trie_proof_works_internal2); -fn trie_proof_works_internal2() { - let (root, proof, item) = test_generate_proof::( - vec![ - // "do" is at a hash-referenced branch node. - (b"do", b"verb"), - // "dog" is at a hash-referenced branch node. - (b"dog", b"puppy"), - ], - b"do", - ); - assert_eq!(Some(b"verb".as_ref()), item.as_deref(), "verb is the item"); - assert!(verify_proof::(&root, &proof, b"do", Some(b"verb")).is_ok(), "verifying do"); - - let (root, proof, item) = test_generate_proof::( - vec![ - // "do" is at a hash-referenced branch node. - (b"do", b"verb"), - // "dog" is at a hash-referenced branch node. - (b"dog", b"puppy"), - ], - b"dog", - ); - assert_eq!(Some(b"puppy".as_ref()), item.as_deref(), "puppy is the item"); - assert!(verify_proof::(&root, &proof, b"dog", Some(b"puppy")).is_ok(), "verifying dog"); -} - -test_layouts!(trie_proof_works, trie_proof_works_internal); -fn trie_proof_works_internal() { - let (root, proof, item) = test_generate_proof::(test_entries(), b"do"); - assert_eq!(Some(b"verb".as_ref()), item.as_deref(), "verb is the item"); - assert!(verify_proof::(&root, &proof, b"do", Some(b"verb")).is_ok(), "verifying do"); - - let (root, proof, item) = test_generate_proof::(test_entries(), b"dog"); - assert_eq!(Some(b"puppy".as_ref()), item.as_deref(), "puppy is the item"); - assert!(verify_proof::(&root, &proof, b"dog", Some(b"puppy")).is_ok(), "verifying dog"); - - let (root, proof, item) = test_generate_proof::(test_entries(), b"doge"); - assert_eq!(Some([0; 32].as_ref()), item.as_deref(), "[0;32] is the item"); - assert!(verify_proof::(&root, &proof, b"doge", Some(&[0; 32])).is_ok(), "verifying doge"); - - let (root, proof, item) = test_generate_proof::(test_entries(), b"bravo"); - assert_eq!(Some(b"bravo".as_ref()), item.as_deref(), "bravo is the item"); - assert!(verify_proof::(&root, &proof, b"bravo", Some(b"bravo")).is_ok(), "verifying bravo"); - - let (root, proof, item) = test_generate_proof::(test_entries(), b"alfabet"); - assert!(item.is_none(), "item not found"); - assert!(verify_proof::(&root, &proof, b"alfabet", None).is_ok(), "verifying alfabet"); - - let (root, proof, item) = test_generate_proof::(test_entries(), b"d"); - assert!(item.is_none(), "item not found"); - assert!(verify_proof::(&root, &proof, b"d", None).is_ok(), "verifying d"); - - let (root, proof, item) = test_generate_proof::(test_entries(), b"do\x10"); - assert!(item.is_none(), "item not found"); - assert!(verify_proof::(&root, &proof, b"do\x10", None).is_ok(), "verifying do\x10"); - - let (root, proof, item) = test_generate_proof::(test_entries(), b"halp"); - assert!(item.is_none(), "item not found"); - assert!(verify_proof::(&root, &proof, b"halp", None).is_ok(), "verifying halp"); -} - -test_layouts!(trie_proof_works_for_empty_trie, trie_proof_works_for_empty_trie_internal); -fn trie_proof_works_for_empty_trie_internal() { - let (root, proof, item) = test_generate_proof::(vec![], b"alpha"); - assert!(item.is_none(), "item not found"); - assert!(verify_proof::(&root, &proof, b"alpha", None).is_ok(), "verifying alpha"); - let (root, proof, item) = test_generate_proof::(vec![], b"bravo"); - assert!(item.is_none(), "item not found"); - assert!(verify_proof::(&root, &proof, b"bravo", None).is_ok(), "verifying bravo"); - let (root, proof, item) = test_generate_proof::(vec![], b"\x42\x42"); - assert!(item.is_none(), "item not found"); - assert!(verify_proof::(&root, &proof, b"\x42\x42", None).is_ok(), "verifying \x42\x42"); -} - -test_layouts!( - test_verify_value_mismatch_some_to_none, - test_verify_value_mismatch_some_to_none_internal -); -fn test_verify_value_mismatch_some_to_none_internal() { - let (root, proof, _) = test_generate_proof::(test_entries(), b"horse"); - let res = verify_proof::(&root, &proof, b"horse", Some(b"stallion")); - assert!(res.is_ok(), "verifying horse"); - - let res = verify_proof::(&root, &proof, b"halp", Some(b"plz")); - assert!(res.is_err(), "verifying halp"); - assert!(matches!(res.err().unwrap(), VerifyError::NonExistingValue(_))); - - let res = verify_proof::(&root, &proof, b"horse", Some(b"rocinante")); - assert!(res.is_err(), "verifying horse"); - //checking for two variants as it depends on the TrieLayout which one occurs - let is_ok = match res { - Err(VerifyError::HashMismatch(_)) | Err(VerifyError::ValueMismatch(_)) => true, - _ => false, - }; - assert!(is_ok); -} - -test_layouts!(test_verify_incomplete_proof, test_verify_incomplete_proof_internal); -fn test_verify_incomplete_proof_internal() { - let (root, mut proof, item) = test_generate_proof::(test_entries(), b"alfa"); - - proof.pop(); - let res = verify_proof::(&root, &proof, b"alfa", item.as_deref()); - assert!(matches!(res, Err(VerifyError::IncompleteProof))); -} - -test_layouts!(test_verify_decode_error, test_verify_decode_error_internal); -fn test_verify_decode_error_internal() { - let (_, mut proof, item) = test_generate_proof::(test_entries(), b"bravo"); - - let fake_node = b"this is not a trie node"; - proof.insert(0, fake_node.to_vec()); - let fake_root = T::Hash::hash(fake_node); - let res = verify_proof::(&fake_root, &proof, b"bravo", item.as_deref()); - assert!(matches!(res, Err(VerifyError::DecodeError(_)))); -} diff --git a/trie-eip1186/test/src/lib.rs b/trie-eip1186/test/src/lib.rs deleted file mode 100644 index 55910011..00000000 --- a/trie-eip1186/test/src/lib.rs +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2021 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Tests for trie-eip1186 crate. - -#[cfg(test)] -mod eip1186; diff --git a/trie-root/CHANGELOG.md b/trie-root/CHANGELOG.md deleted file mode 100644 index a35b5b7b..00000000 --- a/trie-root/CHANGELOG.md +++ /dev/null @@ -1,17 +0,0 @@ -# Changelog - -The format is based on [Keep a Changelog]. - -[Keep a Changelog]: http://keepachangelog.com/en/1.0.0/ - -## [Unreleased] - - -## [0.18.0] - 2023-03-14 -- Update dependencies. [#188](https://github.com/paritytech/trie/pull/188) and [#187](https://github.com/paritytech/trie/pull/187) - -## [0.17.0] - 2021-10-19 -- Support for value nodes. [#142](https://github.com/paritytech/trie/pull/142) - -## [0.16.0] - 2020-02-07 -- Update reference-trie to v0.20.0 [#78](https://github.com/paritytech/trie/pull/78) diff --git a/trie-root/Cargo.toml b/trie-root/Cargo.toml deleted file mode 100644 index 7a39bddd..00000000 --- a/trie-root/Cargo.toml +++ /dev/null @@ -1,18 +0,0 @@ -[package] -name = "trie-root" -version = "0.18.0" -authors = ["Parity Technologies "] -description = "In-memory patricia trie operations" -repository = "https://github.com/paritytech/trie" -license = "Apache-2.0" -categories = [ "no-std" ] -edition = "2018" - -[dependencies] -hash-db = { path = "../hash-db", default-features = false, version = "0.16.0" } - -[features] -default = ["std"] -std = [ - "hash-db/std" -] diff --git a/trie-root/README.md b/trie-root/README.md deleted file mode 100644 index 36d38b68..00000000 --- a/trie-root/README.md +++ /dev/null @@ -1,2 +0,0 @@ -This crate provides utility functions to validate and initialize tries using flexible input. -It is used extensively in `substrate` to validate blocks (mostly transactions and receipt roots). diff --git a/trie-root/test/Cargo.toml b/trie-root/test/Cargo.toml deleted file mode 100644 index 3e3b1e26..00000000 --- a/trie-root/test/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -name = "trie-root-test" -version = "0.21.0" -authors = ["Parity Technologies "] -description = "Tests fo trie-root crate" -repository = "https://github.com/paritytech/trie" -license = "Apache-2.0" -categories = [ ] -edition = "2018" - -[dependencies] -trie-root = { path = "..", version = "0.18.0" } -hex-literal = "0.3" -keccak-hasher = { path = "../../test-support/keccak-hasher", version = "0.16.0" } -reference-trie = { path = "../../test-support/reference-trie", version = "0.29.0" } diff --git a/trie-root/test/src/lib.rs b/trie-root/test/src/lib.rs deleted file mode 100644 index 0fef011c..00000000 --- a/trie-root/test/src/lib.rs +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2017, 2020 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Test for trie-root crate. - -#[cfg(test)] -mod test { - use hex_literal::hex; - use keccak_hasher::KeccakHasher; - use reference_trie::ReferenceTrieStream; - use trie_root::{sec_trie_root, trie_root}; - - #[test] - fn previous_doc_test_1() { - let v = vec![("doe", "reindeer"), ("dog", "puppy"), ("dogglesworth", "cat")]; - - let root = hex!["d6e02b2bd48aa04fd2ad87cfac1144a29ca7f7dc60f4526c7b7040763abe3d43"]; - assert_eq!( - sec_trie_root::(v, Default::default()), - root - ); - } - - #[test] - fn previous_doc_test_2() { - let v = vec![("doe", "reindeer"), ("dog", "puppy"), ("dogglesworth", "cat")]; - - let root = hex!["0807d5393ae7f349481063ebb5dbaf6bda58db282a385ca97f37dccba717cb79"]; - assert_eq!( - trie_root::(v, Default::default()), - root - ); - } -}