diff --git a/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/adapter/TrieKeyAdapter.java b/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/adapter/TrieKeyAdapter.java index f51d033..5c003ef 100644 --- a/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/adapter/TrieKeyAdapter.java +++ b/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/adapter/TrieKeyAdapter.java @@ -28,6 +28,7 @@ import static org.hyperledger.besu.ethereum.trie.verkle.util.Parameters.VERSION_LEAF_KEY; import org.hyperledger.besu.ethereum.trie.verkle.hasher.Hasher; +import org.hyperledger.besu.ethereum.trie.verkle.hasher.PedersenHasher; import java.util.ArrayList; import java.util.List; @@ -48,12 +49,17 @@ public class TrieKeyAdapter { private final Hasher hasher; + /** Creates a TrieKeyAdapter with the default Perdersen hasher. */ + public TrieKeyAdapter() { + this.hasher = new PedersenHasher(); + } + /** * Creates a TrieKeyAdapter with the provided hasher. * * @param hasher The hasher used for key generation. */ - public TrieKeyAdapter(Hasher hasher) { + public TrieKeyAdapter(final Hasher hasher) { this.hasher = hasher; } @@ -73,36 +79,40 @@ public Hasher getHasher() { * @param storageKey The storage key. * @return The generated storage key. */ - public Bytes32 storageKey(Bytes address, Bytes32 storageKey) { - final UInt256 pos = locateStorageKeyOffset(storageKey); - final Bytes32 base = hasher.trieKeyHash(address, pos); - final UInt256 suffix = locateStorageKeySuffix(storageKey); - return swapLastByte(base, suffix); + public Bytes32 storageKey(final Bytes address, final Bytes32 storageKey) { + final Bytes stem = getStorageStem(address, storageKey); + final UInt256 suffix = getStorageKeySuffix(storageKey); + return swapLastByte(stem, suffix); } - public UInt256 locateStorageKeyOffset(Bytes32 storageKey) { - UInt256 index = UInt256.fromBytes(storageKey); - if (index.compareTo(HEADER_STORAGE_SIZE) < 0) { - return index.add(HEADER_STORAGE_OFFSET).divide(VERKLE_NODE_WIDTH); + public UInt256 getStorageKeyTrieIndex(final Bytes32 storageKey) { + final UInt256 uintStorageKey = UInt256.fromBytes(storageKey); + if (uintStorageKey.compareTo(HEADER_STORAGE_SIZE) < 0) { + return uintStorageKey.add(HEADER_STORAGE_OFFSET).divide(VERKLE_NODE_WIDTH); } else { // We divide by VerkleNodeWidthLog2 to make space and prevent any potential overflow // Then, we increment, a step that is safeguarded against overflow. - return index + return uintStorageKey .shiftRight(VERKLE_NODE_WIDTH_LOG2.intValue()) .add(MAIN_STORAGE_OFFSET_SHIFT_LEFT_VERKLE_NODE_WIDTH); } } - public UInt256 locateStorageKeySuffix(Bytes32 storageKey) { - UInt256 index = UInt256.fromBytes(storageKey); - if (index.compareTo(HEADER_STORAGE_SIZE) < 0) { - final UInt256 mod = index.add(HEADER_STORAGE_OFFSET).mod(VERKLE_NODE_WIDTH); + public UInt256 getStorageKeySuffix(final Bytes32 storageKey) { + final UInt256 uintStorageKey = UInt256.fromBytes(storageKey); + if (uintStorageKey.compareTo(HEADER_STORAGE_SIZE) < 0) { + final UInt256 mod = uintStorageKey.add(HEADER_STORAGE_OFFSET).mod(VERKLE_NODE_WIDTH); return UInt256.fromBytes(mod.slice(mod.size() - 1)); } else { return UInt256.fromBytes(storageKey.slice(Bytes32.SIZE - 1)); } } + public Bytes getStorageStem(final Bytes address, final Bytes32 storageKey) { + final UInt256 trieIndex = getStorageKeyTrieIndex(storageKey); + return hasher.computeStem(address, trieIndex); + } + /** * Generates a code chunk key for a given address and chunkId. * @@ -110,14 +120,22 @@ public UInt256 locateStorageKeySuffix(Bytes32 storageKey) { * @param chunkId The chunk ID. * @return The generated code chunk key. */ - public Bytes32 codeChunkKey(Bytes address, UInt256 chunkId) { - UInt256 pos = locateCodeChunkKeyOffset(chunkId); - Bytes32 base = hasher.trieKeyHash(address, pos.divide(VERKLE_NODE_WIDTH)); - return swapLastByte(base, pos.mod(VERKLE_NODE_WIDTH)); + public Bytes32 codeChunkKey(final Bytes address, final UInt256 chunkId) { + final Bytes stem = getCodeChunkStem(address, chunkId); + return swapLastByte(stem, getCodeChunkKeySuffix(chunkId)); + } + + public UInt256 getCodeChunkKeyTrieIndex(final Bytes32 chunkId) { + return CODE_OFFSET.add(UInt256.fromBytes(chunkId)).divide(VERKLE_NODE_WIDTH); + } + + public UInt256 getCodeChunkKeySuffix(final Bytes32 chunkId) { + return CODE_OFFSET.add(UInt256.fromBytes(chunkId)).mod(VERKLE_NODE_WIDTH); } - public UInt256 locateCodeChunkKeyOffset(Bytes32 chunkId) { - return CODE_OFFSET.add(UInt256.fromBytes(chunkId)); + public Bytes getCodeChunkStem(final Bytes address, final UInt256 chunkId) { + final UInt256 trieIndex = getCodeChunkKeyTrieIndex(chunkId); + return hasher.computeStem(address, trieIndex); } /** @@ -127,9 +145,13 @@ public UInt256 locateCodeChunkKeyOffset(Bytes32 chunkId) { * @param leafKey The leaf key. * @return The generated header key. */ - Bytes32 headerKey(Bytes address, UInt256 leafKey) { - Bytes32 base = hasher.trieKeyHash(address, UInt256.valueOf(0).toBytes()); - return swapLastByte(base, leafKey); + public Bytes32 headerKey(final Bytes address, final UInt256 leafKey) { + final Bytes stem = getHeaderStem(address); + return swapLastByte(stem, leafKey); + } + + public Bytes getHeaderStem(final Bytes address) { + return hasher.computeStem(address, UInt256.valueOf(0).toBytes()); } /** @@ -139,9 +161,9 @@ Bytes32 headerKey(Bytes address, UInt256 leafKey) { * @param subIndex The subIndex. * @return The modified key. */ - public Bytes32 swapLastByte(Bytes32 base, Bytes subIndex) { + public Bytes32 swapLastByte(final Bytes base, final Bytes subIndex) { final Bytes lastByte = subIndex.slice(subIndex.size() - 1, 1); - return (Bytes32) Bytes.concatenate(base.slice(0, 31), lastByte); + return (Bytes32) Bytes.concatenate(base, lastByte); } /** @@ -150,7 +172,7 @@ public Bytes32 swapLastByte(Bytes32 base, Bytes subIndex) { * @param address The address. * @return The generated version key. */ - public Bytes32 versionKey(Bytes address) { + public Bytes32 versionKey(final Bytes address) { return headerKey(address, VERSION_LEAF_KEY); } @@ -160,7 +182,7 @@ public Bytes32 versionKey(Bytes address) { * @param address The address. * @return The generated balance key. */ - public Bytes32 balanceKey(Bytes address) { + public Bytes32 balanceKey(final Bytes address) { return headerKey(address, BALANCE_LEAF_KEY); } @@ -170,7 +192,7 @@ public Bytes32 balanceKey(Bytes address) { * @param address The address. * @return The generated nonce key. */ - public Bytes32 nonceKey(Bytes address) { + public Bytes32 nonceKey(final Bytes address) { return headerKey(address, NONCE_LEAF_KEY); } @@ -180,7 +202,7 @@ public Bytes32 nonceKey(Bytes address) { * @param address The address. * @return The generated code Keccak key. */ - public Bytes32 codeKeccakKey(Bytes address) { + public Bytes32 codeKeccakKey(final Bytes address) { return headerKey(address, CODE_KECCAK_LEAF_KEY); } @@ -190,11 +212,11 @@ public Bytes32 codeKeccakKey(Bytes address) { * @param address The address. * @return The generated code size key. */ - public Bytes32 codeSizeKey(Bytes address) { + public Bytes32 codeSizeKey(final Bytes address) { return (headerKey(address, CODE_SIZE_LEAF_KEY)); } - public int getNbChunk(Bytes bytecode) { + public int getNbChunk(final Bytes bytecode) { return bytecode.isEmpty() ? 0 : (1 + ((bytecode.size() - 1) / CHUNK_SIZE)); } /** @@ -204,15 +226,15 @@ public int getNbChunk(Bytes bytecode) { * @param bytecode Code's bytecode * @return List of 32-bytes code chunks */ - public List chunkifyCode(Bytes bytecode) { + public List chunkifyCode(final Bytes bytecode) { if (bytecode.isEmpty()) { return new ArrayList<>(); } // Chunking variables final int CHUNK_SIZE = 31; - int nChunks = getNbChunk(bytecode); - int padSize = nChunks * CHUNK_SIZE - bytecode.size(); + final int nChunks = getNbChunk(bytecode); + final int padSize = nChunks * CHUNK_SIZE - bytecode.size(); final Bytes code = Bytes.concatenate(bytecode, Bytes.repeat((byte) 0, padSize)); final List chunks = new ArrayList<>(nChunks); diff --git a/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/adapter/TrieKeyBatchAdapter.java b/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/adapter/TrieKeyBatchAdapter.java index 81cd4dd..4384158 100644 --- a/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/adapter/TrieKeyBatchAdapter.java +++ b/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/adapter/TrieKeyBatchAdapter.java @@ -15,8 +15,6 @@ */ package org.hyperledger.besu.ethereum.trie.verkle.adapter; -import static org.hyperledger.besu.ethereum.trie.verkle.util.Parameters.VERKLE_NODE_WIDTH; - import org.hyperledger.besu.ethereum.trie.verkle.hasher.Hasher; import java.util.ArrayList; @@ -40,25 +38,24 @@ public TrieKeyBatchAdapter(final Hasher hasher) { super(hasher); } - public Map manyTrieKeyHashes( + public Map manyStems( final Bytes address, final List headerKeys, final List storageKeys, final List codeChunkIds) { - final Set offsets = new HashSet<>(); + final Set trieIndex = new HashSet<>(); if (headerKeys.size() > 0) { - offsets.add(UInt256.ZERO); + trieIndex.add(UInt256.ZERO); } for (Bytes32 storageKey : storageKeys) { - offsets.add(locateStorageKeyOffset(storageKey)); + trieIndex.add(getStorageKeyTrieIndex(storageKey)); } for (Bytes32 codeChunkId : codeChunkIds) { - final UInt256 codeChunkOffset = locateCodeChunkKeyOffset(codeChunkId); - offsets.add(codeChunkOffset.divide(VERKLE_NODE_WIDTH)); + trieIndex.add(getCodeChunkKeyTrieIndex(codeChunkId)); } - return getHasher().manyTrieKeyHashes(address, new ArrayList<>(offsets)); + return getHasher().manyStems(address, new ArrayList<>(trieIndex)); } } diff --git a/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/hasher/CachedPedersenHasher.java b/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/hasher/CachedPedersenHasher.java index e7dd034..5052a5d 100644 --- a/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/hasher/CachedPedersenHasher.java +++ b/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/hasher/CachedPedersenHasher.java @@ -15,25 +15,32 @@ */ package org.hyperledger.besu.ethereum.trie.verkle.hasher; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; import org.apache.tuweni.bytes.Bytes; import org.apache.tuweni.bytes.Bytes32; public class CachedPedersenHasher implements Hasher { - private final Map preloadedTrieKeyHashes; + private final Cache stemCache; private final Hasher fallbackHasher; - public CachedPedersenHasher(final Map preloadedTrieKeyHashes) { - this.preloadedTrieKeyHashes = preloadedTrieKeyHashes; - this.fallbackHasher = new PedersenHasher(); + public CachedPedersenHasher(final int cacheSize) { + this(cacheSize, new HashMap<>()); + } + + public CachedPedersenHasher(final int cacheSize, final Map preloadedStems) { + this(cacheSize, preloadedStems, new PedersenHasher()); } public CachedPedersenHasher( - final Map preloadedTrieKeyHashes, final Hasher fallbackHasher) { - this.preloadedTrieKeyHashes = preloadedTrieKeyHashes; + final int cacheSize, final Map preloadedStems, final Hasher fallbackHasher) { + this.stemCache = CacheBuilder.newBuilder().maximumSize(cacheSize).build(); + this.stemCache.putAll(preloadedStems); this.fallbackHasher = fallbackHasher; } @@ -62,18 +69,22 @@ public Bytes32 compress(Bytes commitment) { } @Override - public Bytes32 trieKeyHash(final Bytes bytes, final Bytes32 bytes32) { - final Bytes32 hash = preloadedTrieKeyHashes.get(bytes32); - if (hash != null) { - return hash; + public Bytes computeStem(final Bytes address, final Bytes32 trieKeyIndex) { + Bytes stem = stemCache.getIfPresent(trieKeyIndex); + if (stem != null) { + return stem; } else { - return fallbackHasher.trieKeyHash(bytes, bytes32); + stem = fallbackHasher.computeStem(address, trieKeyIndex); + stemCache.put(trieKeyIndex, stem); + return stem; } } @Override - public Map manyTrieKeyHashes(final Bytes bytes, final List list) { - return fallbackHasher.manyTrieKeyHashes(bytes, list); + public Map manyStems(final Bytes address, final List trieKeyIndexes) { + final Map trieKeyHashes = fallbackHasher.manyStems(address, trieKeyIndexes); + stemCache.putAll(trieKeyHashes); + return trieKeyHashes; } @Override diff --git a/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/hasher/Hasher.java b/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/hasher/Hasher.java index 90f539e..f9eef10 100644 --- a/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/hasher/Hasher.java +++ b/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/hasher/Hasher.java @@ -87,20 +87,20 @@ Bytes commitUpdate( List hashMany(Bytes[] commitments); /** - * Calculates the hash for an address and index. + * Calculates the stem for an address and index. * * @param address Account address. * @param index index in storage. * @return trie-key hash */ - Bytes32 trieKeyHash(Bytes address, Bytes32 index); + Bytes computeStem(Bytes address, Bytes32 index); /** - * Calculates the hash for an address and indexes. + * Calculates the stem for an address and indexes. * * @param address Account address. * @param indexes list of indexes in storage. * @return The list of trie-key hashes */ - Map manyTrieKeyHashes(Bytes address, List indexes); + Map manyStems(Bytes address, List indexes); } diff --git a/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/hasher/PedersenHasher.java b/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/hasher/PedersenHasher.java index d71a54f..777f1d9 100644 --- a/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/hasher/PedersenHasher.java +++ b/src/main/java/org/hyperledger/besu/ethereum/trie/verkle/hasher/PedersenHasher.java @@ -50,6 +50,9 @@ public class PedersenHasher implements Hasher { // making the total number of chunks equal to five. private static final int NUM_CHUNKS = 5; + // Size of the stem is 31 bytes + private static final int STEM_SIZE = 31; + /** * Commit to a vector of values. * @@ -141,7 +144,7 @@ public List hashMany(final Bytes[] commitments) { * @return The trie-key hash */ @Override - public Bytes32 trieKeyHash(Bytes address, Bytes32 index) { + public Bytes computeStem(Bytes address, Bytes32 index) { // Pad the address so that it is 32 bytes final Bytes32 addr = Bytes32.leftPad(address); @@ -151,7 +154,7 @@ public Bytes32 trieKeyHash(Bytes address, Bytes32 index) { final Bytes hash = Bytes.wrap( LibIpaMultipoint.hash(LibIpaMultipoint.commit(Bytes.concatenate(chunks).toArray()))); - return Bytes32.wrap(hash); + return hash.slice(0, STEM_SIZE); } /** @@ -162,7 +165,7 @@ public Bytes32 trieKeyHash(Bytes address, Bytes32 index) { * @return The list of trie-key hashes */ @Override - public Map manyTrieKeyHashes(Bytes address, List indexes) { + public Map manyStems(Bytes address, List indexes) { // Pad the address so that it is 32 bytes final Bytes32 addr = Bytes32.leftPad(address); @@ -176,12 +179,12 @@ public Map manyTrieKeyHashes(Bytes address, List inde final Bytes hashMany = Bytes.wrap(LibIpaMultipoint.hashMany(outputStream.toByteArray())); - final Map hashes = new HashMap<>(); + final Map stems = new HashMap<>(); for (int i = 0; i < indexes.size(); i++) { // Slice input into 16 byte segments - hashes.put(indexes.get(i), Bytes32.wrap(hashMany.slice(i * Bytes32.SIZE, Bytes32.SIZE))); + stems.put(indexes.get(i), Bytes.wrap(hashMany.slice(i * Bytes32.SIZE, STEM_SIZE))); } - return hashes; + return stems; } catch (IOException e) { throw new RuntimeException("unable to generate trie key hash", e); } diff --git a/src/test/java/org/hyperledger/besu/ethereum/trie/verkle/TrieKeyBatchAdapterTest.java b/src/test/java/org/hyperledger/besu/ethereum/trie/verkle/TrieKeyBatchAdapterTest.java index 0a9b5ee..b7f4cdc 100644 --- a/src/test/java/org/hyperledger/besu/ethereum/trie/verkle/TrieKeyBatchAdapterTest.java +++ b/src/test/java/org/hyperledger/besu/ethereum/trie/verkle/TrieKeyBatchAdapterTest.java @@ -52,10 +52,10 @@ public void testAccountKeys() { expectedIndexes.add(Parameters.CODE_KECCAK_LEAF_KEY); expectedIndexes.add(Parameters.CODE_SIZE_LEAF_KEY); - final Map generatedHashes = - adapter.manyTrieKeyHashes(address, expectedIndexes, new ArrayList<>(), new ArrayList<>()); + final Map generatedStems = + adapter.manyStems(address, expectedIndexes, new ArrayList<>(), new ArrayList<>()); final TrieKeyAdapter cachedTrieKeyAdapter = - new TrieKeyAdapter(new CachedPedersenHasher(generatedHashes, new FailedHasher())); + new TrieKeyAdapter(new CachedPedersenHasher(100, generatedStems, new FailedHasher())); assertThat(cachedTrieKeyAdapter.versionKey(address)) .isEqualTo( Bytes32.fromHexString( @@ -94,12 +94,11 @@ public void testAccountKeysWithStorage() { expectedIndexes.add(storage); expectedIndexes.add(storage2); - final Map generatedHashes = - adapter.manyTrieKeyHashes( - address, expectedIndexes, List.of(storage, storage2), new ArrayList<>()); + final Map generatedStems = + adapter.manyStems(address, expectedIndexes, List.of(storage, storage2), new ArrayList<>()); final TrieKeyAdapter cachedTrieKeyAdapter = - new TrieKeyAdapter(new CachedPedersenHasher(generatedHashes, new FailedHasher())); + new TrieKeyAdapter(new CachedPedersenHasher(100, generatedStems, new FailedHasher())); assertThat(cachedTrieKeyAdapter.versionKey(address)) .isEqualTo( Bytes32.fromHexString( @@ -142,10 +141,10 @@ public void testAccountKeysWithCode() { final UInt256 chunkId = UInt256.valueOf(24); expectedIndexes.add(chunkId); - final Map generatedHashes = - adapter.manyTrieKeyHashes(address, expectedIndexes, new ArrayList<>(), List.of(chunkId)); + final Map generatedStems = + adapter.manyStems(address, expectedIndexes, new ArrayList<>(), List.of(chunkId)); final TrieKeyAdapter cachedTrieKeyAdapter = - new TrieKeyAdapter(new CachedPedersenHasher(generatedHashes, new FailedHasher())); + new TrieKeyAdapter(new CachedPedersenHasher(100, generatedStems, new FailedHasher())); assertThat(cachedTrieKeyAdapter.versionKey(address)) .isEqualTo( Bytes32.fromHexString( @@ -203,11 +202,11 @@ public void TestContractCode(TestCodeData testData) { .toList(); assertThat(chunks.size()).as("Same number of chunks").isEqualTo(testData.chunks.size()); - final Map generatedHashes = - adapter.manyTrieKeyHashes(addr, new ArrayList<>(), new ArrayList<>(), chunkIds); + final Map generatedStems = + adapter.manyStems(addr, new ArrayList<>(), new ArrayList<>(), chunkIds); final TrieKeyAdapter cachedTrieKeyAdapter = - new TrieKeyAdapter(new CachedPedersenHasher(generatedHashes, new FailedHasher())); + new TrieKeyAdapter(new CachedPedersenHasher(100, generatedStems, new FailedHasher())); for (int i = 0; i < chunks.size(); ++i) { Bytes32 key = cachedTrieKeyAdapter.codeChunkKey(addr, UInt256.valueOf(i)); Bytes32 expectedKey = Bytes32.fromHexString(testData.chunks.get(i).key); @@ -220,7 +219,7 @@ public void TestContractCode(TestCodeData testData) { private static class FailedHasher extends PedersenHasher { @Override - public Bytes32 trieKeyHash(final Bytes address, final Bytes32 index) { + public Bytes32 computeStem(final Bytes address, final Bytes32 index) { throw new RuntimeException("should be found in the cache not in the fallback hasher"); } } diff --git a/src/test/java/org/hyperledger/besu/ethereum/trie/verkle/hasher/PedersenHasherTest.java b/src/test/java/org/hyperledger/besu/ethereum/trie/verkle/hasher/PedersenHasherTest.java index 343572e..a83e2cd 100644 --- a/src/test/java/org/hyperledger/besu/ethereum/trie/verkle/hasher/PedersenHasherTest.java +++ b/src/test/java/org/hyperledger/besu/ethereum/trie/verkle/hasher/PedersenHasherTest.java @@ -41,9 +41,9 @@ public void testGetTreeKey() { n = n.shiftLeft(129); n = n.add(BigInteger.valueOf(3)); Bytes32 index = UInt256.valueOf(n).toBytes(); - Bytes32 tk = hasher.trieKeyHash(address, index); + Bytes tk = hasher.computeStem(address, index); String got = tk.toHexString(); - String exp = "0x6ede905763d5856cd2d67936541e82aa78f7141bf8cd5ff6c962170f3e9dc201"; + String exp = "0x6ede905763d5856cd2d67936541e82aa78f7141bf8cd5ff6c962170f3e9dc2"; assertEquals(exp, got); }