Skip to content

Commit

Permalink
Feature/stored nodes using rlp (#65)
Browse files Browse the repository at this point in the history
Signed-off-by: Thomas Zamojski <[email protected]>
Co-authored-by: Karim Taam <[email protected]>
  • Loading branch information
thomas-quadratic and matkt authored Aug 12, 2024
1 parent 02cae06 commit 04a8027
Show file tree
Hide file tree
Showing 30 changed files with 513 additions and 239 deletions.
1 change: 1 addition & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ dependencies {
implementation 'io.tmio:tuweni-rlp'
implementation 'org.hyperledger.besu:ipa-multipoint'
implementation 'org.hyperledger.besu.internal:trie'
implementation 'org.hyperledger.besu.internal:rlp'
implementation 'org.apache.logging.log4j:log4j-api'
implementation 'org.apache.logging.log4j:log4j-core'

Expand Down
7 changes: 5 additions & 2 deletions gradle/versions.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,15 @@ dependencyManagement {

dependency 'net.java.dev.jna:jna:5.14.0'

dependency 'org.hyperledger.besu.internal:trie:24.6.0'

dependency 'org.hyperledger.besu:ipa-multipoint:0.8.5'

dependency 'org.assertj:assertj-core:3.25.1'

dependencySet(group: 'org.hyperledger.besu.internal', version: '24.7.0') {
entry 'rlp'
entry 'trie'
}

dependencySet(group: 'org.apache.logging.log4j', version: '2.22.1') {
entry 'log4j-api'
entry 'log4j-core'
Expand Down
28 changes: 28 additions & 0 deletions src/main/java/org/hyperledger/besu/ethereum/trie/NodeLoader.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
/*
* Copyright Hyperledger Besu Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*
*/
package org.hyperledger.besu.ethereum.trie;

import java.util.Optional;

import org.apache.tuweni.bytes.Bytes;
import org.apache.tuweni.bytes.Bytes32;

public interface NodeLoader {

Optional<NearestKeyValue> getNode(Bytes location, Bytes32 hash);

record NearestKeyValue(Bytes key, Optional<byte[]> value) {}
}
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
* @param <V> The type of values in the Verkle Trie.
*/
public class SimpleBatchedVerkleTrie<K extends Bytes, V extends Bytes>
extends SimpleVerkleTrie<K, V> implements VerkleTrie<K, V> {
extends SimpleVerkleTrie<K, V> {

private final VerkleTrieBatchHasher batchProcessor;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,8 @@ public void calculateStateRoot() {
}
if (location.isEmpty()) {
// We will end up updating the root node. Once all the batching is finished,
// we will update the previous states of the nodes by setting them to the new ones.
// we will update the previous states of the nodes by setting them to the new
// ones.
calculateRootInternalNodeHash((InternalNode<?>) node);
updatedNodes.forEach(
(__, n) -> {
Expand Down Expand Up @@ -185,8 +186,9 @@ private void processBatch(List<Node<?>> nodes) {
}

private void calculateRootInternalNodeHash(final InternalNode<?> internalNode) {
final Bytes32 hash = Bytes32.wrap(getRootNodeCommitments(internalNode).get(0));
internalNode.replaceHash(hash, hash);
final Bytes commitment = getRootNodeCommitments(internalNode).get(0);
final Bytes32 hash = hasher.compress(commitment);
internalNode.replaceHash(hash, commitment);
}

private void calculateStemNodeHashes(
Expand Down Expand Up @@ -229,9 +231,11 @@ private List<Bytes> getStemNodeLeftRightCommitments(StemNode<?> stemNode) {
Node<?> node = stemNode.child((byte) idx);

Optional<Bytes> oldValue = node.getPrevious().map(Bytes.class::cast);
// We should not recalculate a node if it is persisted and has not undergone an update since
// We should not recalculate a node if it is persisted and has not undergone an
// update since
// its last save.
// If a child does not have a previous value, it means that it is a new node and we must
// If a child does not have a previous value, it means that it is a new node and
// we must
// therefore recalculate it.
if (!(node instanceof StoredNode<?>) && (oldValue.isEmpty() || node.isDirty())) {
if (idx < halfSize) {
Expand Down Expand Up @@ -300,9 +304,11 @@ private List<Bytes> getInternalNodeCommitments(InternalNode<?> internalNode) {
for (int i = 0; i < size; i++) {
final Node<?> node = internalNode.child((byte) i);
Optional<Bytes> oldValue = node.getPrevious().map(Bytes.class::cast);
// We should not recalculate a node if it is persisted and has not undergone an update since
// We should not recalculate a node if it is persisted and has not undergone an
// update since
// its last save.
// If a child does not have a previous value, it means that it is a new node and we must
// If a child does not have a previous value, it means that it is a new node and
// we must
// therefore recalculate it.
if (!(node instanceof StoredNode<?>) && (oldValue.isEmpty() || node.isDirty())) {
indices.add((byte) i);
Expand All @@ -318,12 +324,12 @@ private List<Bytes> getInternalNodeCommitments(InternalNode<?> internalNode) {
private List<Bytes> getRootNodeCommitments(InternalNode<?> internalNode) {
int size = InternalNode.maxChild();
final List<Bytes> commitmentsHashes = new ArrayList<>();
final List<Bytes> newValues = new ArrayList<>();
final List<Bytes32> newValues = new ArrayList<>();
for (int i = 0; i < size; i++) {
final Node<?> node = internalNode.child((byte) i);
newValues.add(node.getHash().get());
}
commitmentsHashes.add(hasher.commitRoot(newValues.toArray(new Bytes[] {})));
commitmentsHashes.add(hasher.commit(newValues.toArray(new Bytes32[] {})));
return commitmentsHashes;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,14 @@
*/
package org.hyperledger.besu.ethereum.trie.verkle.factory;

import org.hyperledger.besu.ethereum.rlp.BytesValueRLPInput;
import org.hyperledger.besu.ethereum.rlp.RLPInput;
import org.hyperledger.besu.ethereum.trie.NodeLoader;
import org.hyperledger.besu.ethereum.trie.verkle.node.InternalNode;
import org.hyperledger.besu.ethereum.trie.verkle.node.LeafNode;
import org.hyperledger.besu.ethereum.trie.verkle.node.Node;
import org.hyperledger.besu.ethereum.trie.verkle.node.NullLeafNode;
import org.hyperledger.besu.ethereum.trie.verkle.node.NullNode;
import org.hyperledger.besu.ethereum.trie.verkle.node.StemNode;
import org.hyperledger.besu.ethereum.trie.verkle.node.StoredNode;

Expand All @@ -29,7 +33,7 @@

import org.apache.tuweni.bytes.Bytes;
import org.apache.tuweni.bytes.Bytes32;
import org.apache.tuweni.rlp.RLP;
import org.apache.tuweni.bytes.MutableBytes;

/** Node types that are saved to storage. */
enum NodeType {
Expand All @@ -47,6 +51,7 @@ enum NodeType {
public class StoredNodeFactory<V> implements NodeFactory<V> {
private final NodeLoader nodeLoader;
private final Function<Bytes, V> valueDeserializer;
private final Boolean areCommitmentsCompressed;

/**
* Creates a new StoredNodeFactory with the given node loader and value deserializer.
Expand All @@ -57,6 +62,23 @@ public class StoredNodeFactory<V> implements NodeFactory<V> {
public StoredNodeFactory(NodeLoader nodeLoader, Function<Bytes, V> valueDeserializer) {
this.nodeLoader = nodeLoader;
this.valueDeserializer = valueDeserializer;
this.areCommitmentsCompressed = false;
}

/**
* Creates a new StoredNodeFactory with the given node loader and value deserializer.
*
* @param nodeLoader The loader for retrieving stored nodes.
* @param valueDeserializer The function to deserialize values from Bytes.
* @param areCommitmentsCompressed Are commitments stored compressed (32bytes).
*/
public StoredNodeFactory(
NodeLoader nodeLoader,
Function<Bytes, V> valueDeserializer,
Boolean areCommitmentsCompressed) {
this.nodeLoader = nodeLoader;
this.valueDeserializer = valueDeserializer;
this.areCommitmentsCompressed = areCommitmentsCompressed;
}

/**
Expand All @@ -69,74 +91,134 @@ public StoredNodeFactory(NodeLoader nodeLoader, Function<Bytes, V> valueDeserial
*/
@Override
public Optional<Node<V>> retrieve(final Bytes location, final Bytes32 hash) {
/* Currently, Root and Leaf are distinguishable by location.
/*
* Currently, Root and Leaf are distinguishable by location.
* To distinguish internal from stem, we further need values.
* Currently, they are distinguished by values length.
*/
Optional<Bytes> optionalEncodedValues = nodeLoader.getNode(location, hash);
if (optionalEncodedValues.isEmpty()) {
Optional<Node<V>> result;
Optional<NodeLoader.NearestKeyValue> optionalKeyValue = nodeLoader.getNode(location, hash);
if (optionalKeyValue.isEmpty()) {
return Optional.empty();
}
Bytes encodedValues = optionalEncodedValues.get();
List<Bytes> values = RLP.decodeToList(encodedValues, reader -> reader.readValue().copy());
final int locLength = location.size();
final int nValues = values.size();
NodeType type =
(locLength == 32 ? NodeType.LEAF : (nValues == 2 ? NodeType.INTERNAL : NodeType.STEM));
return switch (type) {
case LEAF -> Optional.of(createLeafNode(location, values));
case INTERNAL -> Optional.of(createInternalNode(location, values));
case STEM -> Optional.of(createStemNode(location, values));
default -> Optional.empty();
};
Bytes key = optionalKeyValue.get().key();
Optional<byte[]> maybeEncodedValues = optionalKeyValue.get().value();
Bytes encodedValues =
maybeEncodedValues.isPresent() ? Bytes.of(maybeEncodedValues.get()) : Bytes.EMPTY;

if (key.size() == 0) {
result = Optional.of(decodeRootNode(encodedValues));
} else if (key.size() > 0 && key.size() < 31) {
result = Optional.of(decodeInternalNode(key, encodedValues, hash));
} else if (key.size() == 31) {
result = Optional.of(decodeStemNode(key, encodedValues, hash));
} else {
result = Optional.empty();
}
return result;
}

private Bytes decodeCommitment(Bytes commitment) {
if (areCommitmentsCompressed && !commitment.isEmpty()) {
// TODO: uncompress commitment
}
if (commitment.isEmpty()) {
commitment = Node.EMPTY_COMMITMENT;
}
MutableBytes comm = MutableBytes.create(64);
comm.set(0, commitment);
return (Bytes) comm;
}

/**
* Creates a rootNode using the provided location, hash, and path.
*
* @param encodedValues List of Bytes values retrieved from storage.
* @return A internalNode instance.
*/
InternalNode<V> decodeRootNode(Bytes encodedValues) {
RLPInput input = new BytesValueRLPInput(encodedValues, false);
input.enterList();
Bytes32 hash = Bytes32.rightPad(input.readBytes());
Bytes commitment = decodeCommitment(input.readBytes());
List<Bytes32> scalars = input.readList(in -> Bytes32.rightPad(in.readBytes()));
input.leaveList();
return createInternalNode(Bytes.EMPTY, hash, commitment, scalars);
}

/**
* Creates a internalNode using the provided location, hash, and path.
*
* @param location The location of the internalNode.
* @param values List of Bytes values retrieved from storage.
* @param encodedValues List of Bytes values retrieved from storage.
* @param hash Node's hash value.
* @return A internalNode instance.
*/
InternalNode<V> createInternalNode(Bytes location, List<Bytes> values) {
final int nChild = InternalNode.maxChild();
ArrayList<Node<V>> children = new ArrayList<Node<V>>(nChild);
InternalNode<V> decodeInternalNode(Bytes location, Bytes encodedValues, Bytes32 hash) {
RLPInput input = new BytesValueRLPInput(encodedValues, false);
input.enterList();
Bytes commitment = decodeCommitment(input.readBytes());
List<Bytes32> scalars = input.readList(in -> Bytes32.rightPad(in.readBytes()));
input.leaveList();
return createInternalNode(location, hash, commitment, scalars);
}

private InternalNode<V> createInternalNode(
Bytes location, Bytes32 hash, Bytes commitment, List<Bytes32> scalars) {
int nChild = InternalNode.maxChild();
List<Node<V>> children = new ArrayList<>(nChild);
for (int i = 0; i < nChild; i++) {
children.add(new StoredNode<>(this, Bytes.concatenate(location, Bytes.of(i))));
if (scalars.get(i) == Bytes32.ZERO) {
children.add(new NullNode<V>());
} else {
children.add(
new StoredNode<V>(this, Bytes.concatenate(location, Bytes.of(i)), scalars.get(i)));
}
}
final Bytes32 hash = (Bytes32) values.get(0);
final Bytes commitment = values.get(1);
return new InternalNode<V>(location, hash, commitment, children);
}

/**
* Creates a BranchNode using the provided location, hash, and path.
* Creates a StemNode using the provided stem, hash and encodedValues
*
* @param location The location of the BranchNode.
* @param values List of Bytes values retrieved from storage.
* @param stem The stem of the BranchNode.
* @param encodedValues List of Bytes values retrieved from storage.
* @param hash Node's hash value.
* @return A BranchNode instance.
*/
StemNode<V> createStemNode(Bytes location, List<Bytes> values) {
StemNode<V> decodeStemNode(Bytes stem, Bytes encodedValues, Bytes32 hash) {
RLPInput input = new BytesValueRLPInput(encodedValues, false);
input.enterList();

int depth = input.readByte();
Bytes commitment = decodeCommitment(input.readBytes());
Bytes leftCommitment = decodeCommitment(input.readBytes());
Bytes rightCommitment = decodeCommitment(input.readBytes());
Bytes32 leftScalar = Bytes32.rightPad(input.readBytes());
Bytes32 rightScalar = Bytes32.rightPad(input.readBytes());
List<Bytes> values = input.readList(in -> in.readBytes());

// create StemNode
final Bytes location = stem.slice(0, depth);
final int nChild = StemNode.maxChild();
final Bytes stem = values.get(0);
final Bytes32 hash = (Bytes32) values.get(1);
final Bytes commitment = values.get(2);
final Bytes32 leftHash = (Bytes32) values.get(3);
final Bytes leftCommitment = values.get(4);
final Bytes32 rightHash = (Bytes32) values.get(5);
final Bytes rightCommitment = values.get(6);
ArrayList<Node<V>> children = new ArrayList<Node<V>>(nChild);
List<Node<V>> children = new ArrayList<>(nChild);
for (int i = 0; i < nChild; i++) {
children.add(new StoredNode<>(this, Bytes.concatenate(stem, Bytes.of(i))));
if (values.get(i) == Bytes.EMPTY) {
children.add(new NullLeafNode<V>());
} else {
children.add(
createLeafNode(
Bytes.concatenate(location, Bytes.of(i)), Bytes32.rightPad(values.get(i))));
}
}
return new StemNode<V>(
location,
stem,
hash,
commitment,
leftHash,
leftScalar,
leftCommitment,
rightHash,
rightScalar,
rightCommitment,
children);
}
Expand All @@ -145,11 +227,11 @@ StemNode<V> createStemNode(Bytes location, List<Bytes> values) {
* Creates a LeafNode using the provided location, path, and value.
*
* @param key The key of the LeafNode.
* @param values List of Bytes values retrieved from storage.
* @param encodedValue Leaf value retrieved from storage.
* @return A LeafNode instance.
*/
LeafNode<V> createLeafNode(Bytes key, List<Bytes> values) {
V value = valueDeserializer.apply(values.get(0));
LeafNode<V> createLeafNode(Bytes key, Bytes encodedValue) {
V value = valueDeserializer.apply(encodedValue);
return new LeafNode<V>(Optional.of(key), value);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
* @param <V> The type of the node's value.
*/
public abstract class BranchNode<V> extends Node<V> {
private final Optional<Bytes> location; // Location in the tree
protected Optional<Bytes> location; // Location in the tree
protected Optional<Bytes32> hash; // Vector commitment's hash
protected Optional<Bytes> commitment; // Vector commitment serialized
private final List<Node<V>> children; // List of children nodes
Expand Down
Loading

0 comments on commit 04a8027

Please sign in to comment.