diff --git a/l1-contracts/src/periphery/Forwarder.sol b/l1-contracts/src/periphery/Forwarder.sol new file mode 100644 index 00000000000..a921b2ab500 --- /dev/null +++ b/l1-contracts/src/periphery/Forwarder.sol @@ -0,0 +1,22 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {Ownable} from "@oz/access/Ownable.sol"; +import {Address} from "@oz/utils/Address.sol"; +import {IForwarder} from "./interfaces/IForwarder.sol"; + +contract Forwarder is Ownable, IForwarder { + using Address for address; + + constructor(address __owner) Ownable(__owner) {} + + function forward(address[] calldata _to, bytes[] calldata _data) external override onlyOwner { + require( + _to.length == _data.length, IForwarder.ForwarderLengthMismatch(_to.length, _data.length) + ); + for (uint256 i = 0; i < _to.length; i++) { + _to[i].functionCall(_data[i]); + } + } +} diff --git a/l1-contracts/src/periphery/interfaces/IForwarder.sol b/l1-contracts/src/periphery/interfaces/IForwarder.sol new file mode 100644 index 00000000000..f30e3ed7e9b --- /dev/null +++ b/l1-contracts/src/periphery/interfaces/IForwarder.sol @@ -0,0 +1,9 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +interface IForwarder { + error ForwarderLengthMismatch(uint256 toLength, uint256 dataLength); // 3a2aeb4d + + function forward(address[] calldata _to, bytes[] calldata _data) external; +} diff --git a/l1-contracts/test/Forwarder.t.sol b/l1-contracts/test/Forwarder.t.sol new file mode 100644 index 00000000000..8599dcca51f --- /dev/null +++ b/l1-contracts/test/Forwarder.t.sol @@ -0,0 +1,83 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {Test} from "forge-std/Test.sol"; +import {Forwarder} from "../src/periphery/Forwarder.sol"; +import {IForwarder} from "../src/periphery/interfaces/IForwarder.sol"; +import {TestERC20} from "@aztec/mock/TestERC20.sol"; +import {Ownable} from "@oz/access/Ownable.sol"; +// solhint-disable comprehensive-interface + +contract ForwarderTest is Test { + Forwarder public forwarder; + TestERC20 public token1; + TestERC20 public token2; + address public owner; + address public user; + + function setUp() public { + owner = makeAddr("owner"); + user = makeAddr("user"); + + vm.prank(owner); + forwarder = new Forwarder(owner); + + token1 = new TestERC20("Token1", "TK1", address(forwarder)); + token2 = new TestERC20("Token2", "TK2", address(forwarder)); + } + + function testForward() public { + // Setup test data + address[] memory targets = new address[](2); + targets[0] = address(token1); + targets[1] = address(token2); + + bytes[] memory data = new bytes[](2); + data[0] = abi.encodeCall(TestERC20.mint, (address(this), 100)); + data[1] = abi.encodeCall(TestERC20.mint, (address(this), 200)); + + // Execute forward call + vm.prank(owner); + forwarder.forward(targets, data); + + // Verify results + assertEq(token1.balanceOf(address(this)), 100); + assertEq(token2.balanceOf(address(this)), 200); + } + + function testRevertWhenNotOwner(address _user) public { + address[] memory targets = new address[](1); + bytes[] memory data = new bytes[](1); + + vm.assume(_user != owner); + vm.prank(_user); + vm.expectRevert(abi.encodeWithSelector(Ownable.OwnableUnauthorizedAccount.selector, _user)); + forwarder.forward(targets, data); + } + + function testRevertWhenLengthMismatch() public { + address[] memory targets = new address[](2); + bytes[] memory data = new bytes[](1); + + vm.prank(owner); + vm.expectRevert(abi.encodeWithSelector(IForwarder.ForwarderLengthMismatch.selector, 2, 1)); + forwarder.forward(targets, data); + } + + function testRevertWhenCallToInvalidAddress(address _invalidAddress) public { + vm.assume(_invalidAddress != address(token1)); + vm.assume(_invalidAddress != address(token2)); + vm.assume(_invalidAddress != address(forwarder)); + + address[] memory targets = new address[](1); + targets[0] = _invalidAddress; + + bytes[] memory data = new bytes[](1); + data[0] = hex"12345678"; + + vm.prank(owner); + vm.expectRevert(); + forwarder.forward(targets, data); + } +} diff --git a/spartan/aztec-network/values/1-validators.yaml b/spartan/aztec-network/values/1-validators.yaml index 53038895c1f..98224059e7f 100644 --- a/spartan/aztec-network/values/1-validators.yaml +++ b/spartan/aztec-network/values/1-validators.yaml @@ -1,3 +1,6 @@ +telemetry: + enabled: true + validator: replicas: 1 validatorKeys: @@ -10,3 +13,23 @@ validator: bootNode: validator: disabled: true + +ethereum: + execution: + resources: + requests: + memory: "2Gi" + cpu: "1" + storageSize: "10Gi" + beacon: + resources: + requests: + memory: "2Gi" + cpu: "1" + storageSize: "10Gi" + validator: + resources: + requests: + memory: "2Gi" + cpu: "1" + storageSize: "10Gi" diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index 0f246740805..6d31d0dac9e 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -7,7 +7,7 @@ import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { sleep } from '@aztec/foundation/sleep'; -import { type InboxAbi, RollupAbi } from '@aztec/l1-artifacts'; +import { ForwarderAbi, type InboxAbi, RollupAbi } from '@aztec/l1-artifacts'; import { getTelemetryClient } from '@aztec/telemetry-client'; import { jest } from '@jest/globals'; @@ -80,6 +80,7 @@ describe('Archiver', () => { let mockRollup: { read: typeof mockRollupRead; getEvents: typeof mockRollupEvents; + address: string; }; let mockInbox: { read: typeof mockInboxRead; @@ -147,6 +148,7 @@ describe('Archiver', () => { mockRollup = { read: mockRollupRead, getEvents: mockRollupEvents, + address: rollupAddress.toString(), }; (archiver as any).rollup = mockRollup; @@ -571,7 +573,7 @@ async function makeRollupTx(l2Block: L2Block) { const blobInput = Blob.getEthBlobEvaluationInputs(await Blob.getBlobs(l2Block.body.toBlobFields())); const archive = toHex(l2Block.archive.root.toBuffer()); const blockHash = toHex((await l2Block.header.hash()).toBuffer()); - const input = encodeFunctionData({ + const rollupInput = encodeFunctionData({ abi: RollupAbi, functionName: 'propose', args: [ @@ -581,7 +583,14 @@ async function makeRollupTx(l2Block: L2Block) { blobInput, ], }); - return { input } as Transaction; + + const forwarderInput = encodeFunctionData({ + abi: ForwarderAbi, + functionName: 'forward', + args: [[EthAddress.ZERO.toString()], [rollupInput]], + }); + + return { input: forwarderInput } as Transaction; } /** diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/data_retrieval.ts index e14f2adbe0e..080c41812c4 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/data_retrieval.ts @@ -7,7 +7,7 @@ import { type EthAddress } from '@aztec/foundation/eth-address'; import { type ViemSignature } from '@aztec/foundation/eth-signature'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { numToUInt32BE } from '@aztec/foundation/serialize'; -import { type InboxAbi, RollupAbi } from '@aztec/l1-artifacts'; +import { ForwarderAbi, type InboxAbi, RollupAbi } from '@aztec/l1-artifacts'; import { type Chain, @@ -108,6 +108,7 @@ export async function processL2BlockProposedLogs( log.transactionHash!, blobHashes, l2BlockNumber, + rollup.address, ); const l1: L1PublishedData = { @@ -133,6 +134,57 @@ export async function getL1BlockTime(publicClient: PublicClient, blockNumber: bi return block.timestamp; } +/** + * Extracts the first 'propose' method calldata from a forwarder transaction's data. + * @param forwarderData - The forwarder transaction input data + * @param rollupAddress - The address of the rollup contract + * @returns The calldata for the first 'propose' method call to the rollup contract + */ +function extractRollupProposeCalldata(forwarderData: Hex, rollupAddress: Hex): Hex { + // TODO(#11451): custom forwarders + const { functionName: forwarderFunctionName, args: forwarderArgs } = decodeFunctionData({ + abi: ForwarderAbi, + data: forwarderData, + }); + + if (forwarderFunctionName !== 'forward') { + throw new Error(`Unexpected forwarder method called ${forwarderFunctionName}`); + } + + if (forwarderArgs.length !== 2) { + throw new Error(`Unexpected number of arguments for forwarder`); + } + + const [to, data] = forwarderArgs; + + // Find all rollup calls + const rollupAddressLower = rollupAddress.toLowerCase(); + + for (let i = 0; i < to.length; i++) { + const addr = to[i]; + if (addr.toLowerCase() !== rollupAddressLower) { + continue; + } + const callData = data[i]; + + try { + const { functionName: rollupFunctionName } = decodeFunctionData({ + abi: RollupAbi, + data: callData, + }); + + if (rollupFunctionName === 'propose') { + return callData; + } + } catch (err) { + // Skip invalid function data + continue; + } + } + + throw new Error(`Rollup address not found in forwarder args`); +} + /** * Gets block from the calldata of an L1 transaction. * Assumes that the block was published from an EOA. @@ -148,18 +200,22 @@ async function getBlockFromRollupTx( txHash: `0x${string}`, blobHashes: Buffer[], // WORKTODO(md): buffer32? l2BlockNum: bigint, + rollupAddress: Hex, ): Promise { - const { input: data, blockHash } = await publicClient.getTransaction({ hash: txHash }); - - const { functionName, args } = decodeFunctionData({ abi: RollupAbi, data }); + const { input: forwarderData, blockHash } = await publicClient.getTransaction({ hash: txHash }); - const allowedMethods = ['propose', 'proposeAndClaim']; + const rollupData = extractRollupProposeCalldata(forwarderData, rollupAddress); + const { functionName: rollupFunctionName, args: rollupArgs } = decodeFunctionData({ + abi: RollupAbi, + data: rollupData, + }); - if (!allowedMethods.includes(functionName)) { - throw new Error(`Unexpected method called ${functionName}`); + if (rollupFunctionName !== 'propose') { + throw new Error(`Unexpected rollup method called ${rollupFunctionName}`); } + // TODO(#9101): 'bodyHex' will be removed from below - const [decodedArgs, , bodyHex, blobInputs] = args! as readonly [ + const [decodedArgs, , bodyHex, blobInputs] = rollupArgs! as readonly [ { header: Hex; archive: Hex; diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 48731a15c7e..8d0f053a989 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -68,8 +68,8 @@ import { type P2P, createP2PClient } from '@aztec/p2p'; import { ProtocolContractAddress } from '@aztec/protocol-contracts'; import { GlobalVariableBuilder, - type L1Publisher, SequencerClient, + type SequencerPublisher, createSlasherClient, createValidatorForAcceptingTxs, getDefaultAllowedSetupFunctions, @@ -144,7 +144,7 @@ export class AztecNodeService implements AztecNode, Traceable { deps: { telemetry?: TelemetryClient; logger?: Logger; - publisher?: L1Publisher; + publisher?: SequencerPublisher; dateProvider?: DateProvider; blobSinkClient?: BlobSinkClientInterface; } = {}, diff --git a/yarn-project/aztec.js/src/utils/anvil_test_watcher.ts b/yarn-project/aztec.js/src/utils/anvil_test_watcher.ts index bc1854ac9cc..52340b99e02 100644 --- a/yarn-project/aztec.js/src/utils/anvil_test_watcher.ts +++ b/yarn-project/aztec.js/src/utils/anvil_test_watcher.ts @@ -15,9 +15,12 @@ import type * as chains from 'viem/chains'; * block within the slot. And if so, it will time travel into the next slot. */ export class AnvilTestWatcher { + private isSandbox: boolean = false; + private rollup: GetContractReturnType>; private filledRunningPromise?: RunningPromise; + private mineIfOutdatedPromise?: RunningPromise; private logger: Logger = createLogger(`aztecjs:utils:watcher`); @@ -36,6 +39,10 @@ export class AnvilTestWatcher { this.logger.debug(`Watcher created for rollup at ${rollupAddress}`); } + setIsSandbox(isSandbox: boolean) { + this.isSandbox = isSandbox; + } + async start() { if (this.filledRunningPromise) { throw new Error('Watcher already watching for filled slot'); @@ -50,6 +57,8 @@ export class AnvilTestWatcher { if (isAutoMining) { this.filledRunningPromise = new RunningPromise(() => this.warpTimeIfNeeded(), this.logger, 1000); this.filledRunningPromise.start(); + this.mineIfOutdatedPromise = new RunningPromise(() => this.mineIfOutdated(), this.logger, 1000); + this.mineIfOutdatedPromise.start(); this.logger.info(`Watcher started for rollup at ${this.rollup.address}`); } else { this.logger.info(`Watcher not started because not auto mining`); @@ -58,6 +67,27 @@ export class AnvilTestWatcher { async stop() { await this.filledRunningPromise?.stop(); + await this.mineIfOutdatedPromise?.stop(); + } + + async mineIfOutdated() { + // this doesn't apply to the sandbox, because we don't have a date provider in the sandbox + if (!this.dateProvider) { + return; + } + + const l1Time = (await this.cheatcodes.timestamp()) * 1000; + const wallTime = this.dateProvider.now(); + + // If the wall time is more than 24 seconds away from L1 time, + // mine a block and sync the clocks + if (Math.abs(wallTime - l1Time) > 24 * 1000) { + this.logger.warn(`Wall time is more than 24 seconds away from L1 time, mining a block and syncing clocks`); + await this.cheatcodes.evmMine(); + const newL1Time = await this.cheatcodes.timestamp(); + this.logger.info(`New L1 time: ${newL1Time}`); + this.dateProvider.setTime(newL1Time * 1000); + } } async warpTimeIfNeeded() { @@ -80,6 +110,11 @@ export class AnvilTestWatcher { return; } + // If we are not in sandbox, we don't need to warp time + if (!this.isSandbox) { + return; + } + const currentTimestamp = this.dateProvider?.now() ?? Date.now(); if (currentTimestamp > nextSlotTimestamp * 1000) { try { diff --git a/yarn-project/aztec/src/sandbox.ts b/yarn-project/aztec/src/sandbox.ts index 65ee1391374..9945ae86c04 100644 --- a/yarn-project/aztec/src/sandbox.ts +++ b/yarn-project/aztec/src/sandbox.ts @@ -116,6 +116,7 @@ export async function createSandbox(config: Partial = {}) { l1ContractAddresses.rollupAddress, publicClient, ); + watcher.setIsSandbox(true); await watcher.start(); } diff --git a/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote_payload.ts b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote_payload.ts index 71d9c7ccc8e..4e98de66f28 100644 --- a/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote_payload.ts +++ b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote_payload.ts @@ -1,3 +1,4 @@ +import type { EpochProofQuoteViemArgs } from '@aztec/ethereum'; import { EthAddress } from '@aztec/foundation/eth-address'; import { schemas } from '@aztec/foundation/schemas'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; @@ -98,13 +99,7 @@ export class EpochProofQuotePayload { .transform(EpochProofQuotePayload.from); } - toViemArgs(): { - epochToProve: bigint; - validUntilSlot: bigint; - bondAmount: bigint; - prover: `0x${string}`; - basisPointFee: number; - } { + toViemArgs(): EpochProofQuoteViemArgs { return { epochToProve: this.epochToProve, validUntilSlot: this.validUntilSlot, diff --git a/yarn-project/cli/src/cmds/l1/update_l1_validators.ts b/yarn-project/cli/src/cmds/l1/update_l1_validators.ts index abbf6d1f266..606643f3536 100644 --- a/yarn-project/cli/src/cmds/l1/update_l1_validators.ts +++ b/yarn-project/cli/src/cmds/l1/update_l1_validators.ts @@ -1,8 +1,13 @@ import { EthCheatCodes } from '@aztec/aztec.js'; import { type EthAddress } from '@aztec/circuits.js'; -import { createEthereumChain, getL1ContractsConfigEnvVars, isAnvilTestChain } from '@aztec/ethereum'; +import { + createEthereumChain, + getExpectedAddress, + getL1ContractsConfigEnvVars, + isAnvilTestChain, +} from '@aztec/ethereum'; import { type LogFn, type Logger } from '@aztec/foundation/log'; -import { RollupAbi, TestERC20Abi } from '@aztec/l1-artifacts'; +import { ForwarderAbi, ForwarderBytecode, RollupAbi, TestERC20Abi } from '@aztec/l1-artifacts'; import { createPublicClient, createWalletClient, getContract, http } from 'viem'; import { generatePrivateKey, mnemonicToAccount, privateKeyToAccount } from 'viem/accounts'; @@ -68,7 +73,9 @@ export async function addL1Validator({ dualLog(`Adding validator ${validatorAddress.toString()} to rollup ${rollupAddress.toString()}`); const txHash = await rollup.write.deposit([ validatorAddress.toString(), - validatorAddress.toString(), + // TODO(#11451): custom forwarders + getExpectedAddress(ForwarderAbi, ForwarderBytecode, [validatorAddress.toString()], validatorAddress.toString()) + .address, withdrawerAddress?.toString() ?? validatorAddress.toString(), config.minimumStake, ]); diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index 77a1d18380f..ca19f703e8b 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -38,6 +38,7 @@ "@aztec/circuit-types": "workspace:^", "@aztec/circuits.js": "workspace:^", "@aztec/entrypoints": "workspace:^", + "@aztec/epoch-cache": "workspace:^", "@aztec/ethereum": "workspace:^", "@aztec/foundation": "workspace:^", "@aztec/kv-store": "workspace:^", diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index f1c9aef59ff..1982b5c8269 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -15,19 +15,27 @@ import { } from '@aztec/circuits.js'; import { BlockBlobPublicInputs } from '@aztec/circuits.js/blobs'; import { fr } from '@aztec/circuits.js/testing'; -import { type L1ContractAddresses, createEthereumChain } from '@aztec/ethereum'; +import { EpochCache } from '@aztec/epoch-cache'; +import { + type L1ContractAddresses, + L1TxUtilsWithBlobs, + RollupContract, + createEthereumChain, + createL1Clients, +} from '@aztec/ethereum'; import { EthCheatCodesWithState } from '@aztec/ethereum/test'; import { range } from '@aztec/foundation/array'; import { Blob } from '@aztec/foundation/blob'; import { timesParallel } from '@aztec/foundation/collection'; import { sha256, sha256ToField } from '@aztec/foundation/crypto'; +import { TestDateProvider } from '@aztec/foundation/timer'; import { openTmpStore } from '@aztec/kv-store/lmdb'; -import { OutboxAbi, RollupAbi } from '@aztec/l1-artifacts'; +import { ForwarderAbi, OutboxAbi, RollupAbi } from '@aztec/l1-artifacts'; import { SHA256Trunc, StandardTree } from '@aztec/merkle-tree'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vks'; import { protocolContractTreeRoot } from '@aztec/protocol-contracts'; import { LightweightBlockBuilder } from '@aztec/prover-client/block-builder'; -import { L1Publisher } from '@aztec/sequencer-client'; +import { SequencerPublisher } from '@aztec/sequencer-client'; import { type MerkleTreeAdminDatabase, NativeWorldStateService, @@ -52,9 +60,10 @@ import { getContract, } from 'viem'; import { type PrivateKeyAccount, privateKeyToAccount } from 'viem/accounts'; +import { foundry } from 'viem/chains'; import { sendL1ToL2Message } from '../fixtures/l1_to_l2_messaging.js'; -import { setupL1Contracts } from '../fixtures/utils.js'; +import { createForwarderContract, setupL1Contracts } from '../fixtures/utils.js'; // Accounts 4 and 5 of Anvil default startup with mnemonic: 'test test test test test test test test test test test junk' const sequencerPK = '0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a'; @@ -70,6 +79,8 @@ const numberOfConsecutiveBlocks = 2; const BLOB_SINK_PORT = 5052; const BLOB_SINK_URL = `http://localhost:${BLOB_SINK_PORT}`; +jest.setTimeout(1000000); + describe('L1Publisher integration', () => { let publicClient: PublicClient; let walletClient: WalletClient; @@ -82,7 +93,7 @@ describe('L1Publisher integration', () => { let rollup: GetContractReturnType>; let outbox: GetContractReturnType>; - let publisher: L1Publisher; + let publisher: SequencerPublisher; let builderDb: MerkleTreeAdminDatabase; @@ -171,17 +182,37 @@ describe('L1Publisher integration', () => { worldStateSynchronizer = new ServerWorldStateSynchronizer(builderDb, blockSource, worldStateConfig); await worldStateSynchronizer.start(); - publisher = new L1Publisher({ - l1RpcUrl: config.l1RpcUrl, - requiredConfirmations: 1, - l1Contracts: l1ContractAddresses, - publisherPrivateKey: sequencerPK, - l1PublishRetryIntervalMS: 100, - l1ChainId: 31337, - viemPollingIntervalMS: 100, - ethereumSlotDuration: config.ethereumSlotDuration, - blobSinkUrl: BLOB_SINK_URL, + const { walletClient: sequencerWalletClient, publicClient: sequencerPublicClient } = createL1Clients( + config.l1RpcUrl, + sequencerPK, + foundry, + ); + const l1TxUtils = new L1TxUtilsWithBlobs(sequencerPublicClient, sequencerWalletClient, logger, config); + const rollupContract = new RollupContract(sequencerPublicClient, l1ContractAddresses.rollupAddress.toString()); + const forwarderContract = await createForwarderContract(config, sequencerPK); + const epochCache = await EpochCache.create(l1ContractAddresses.rollupAddress, config, { + dateProvider: new TestDateProvider(), }); + publisher = new SequencerPublisher( + { + l1RpcUrl: config.l1RpcUrl, + requiredConfirmations: 1, + l1Contracts: l1ContractAddresses, + publisherPrivateKey: sequencerPK, + l1PublishRetryIntervalMS: 100, + l1ChainId: 31337, + viemPollingIntervalMS: 100, + ethereumSlotDuration: config.ethereumSlotDuration, + blobSinkUrl: BLOB_SINK_URL, + customForwarderContractAddress: EthAddress.ZERO, + }, + { + l1TxUtils, + rollupContract, + forwarderContract, + epochCache, + }, + ); coinbase = config.coinbase || EthAddress.random(); feeRecipient = config.feeRecipient || (await AztecAddress.random()); @@ -413,7 +444,8 @@ describe('L1Publisher integration', () => { deployerAccount.address, ); - await publisher.proposeL2Block(block); + await publisher.enqueueProposeL2Block(block); + await publisher.sendRequests(); blocks.push(block); const logs = await publicClient.getLogs({ @@ -435,7 +467,7 @@ describe('L1Publisher integration', () => { const expectedHash = sha256(Buffer.from(BlockBlobPublicInputs.fromBlobs(blobs).toString().substring(2), 'hex')); expect(blobPublicInputsHash).toEqual(`0x${expectedHash.toString('hex')}`); - const expectedData = encodeFunctionData({ + const expectedRollupData = encodeFunctionData({ abi: RollupAbi, functionName: 'propose', args: [ @@ -455,6 +487,11 @@ describe('L1Publisher integration', () => { Blob.getEthBlobEvaluationInputs(blobs), ], }); + const expectedData = encodeFunctionData({ + abi: ForwarderAbi, + functionName: 'forward', + args: [[rollupAddress], [expectedRollupData]], + }); expect(ethTx.input).toEqual(expectedData); const expectedRoot = !numTxs ? Fr.ZERO : await buildL2ToL1MsgTreeRoot(l2ToL1MsgsArray); @@ -501,7 +538,7 @@ describe('L1Publisher integration', () => { // Set up different l1-to-l2 messages than the ones on the inbox, so this submission reverts // because the INBOX.consume does not match the header.contentCommitment.inHash and we get - // a Rollup__InvalidInHash that is not caught by validateHeader before. + // a Rollup__BlobHash that is not caught by validateHeader before. const l1ToL2Messages = new Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(1n)); const txs = await Promise.all([makeProcessedTx(0x1000), makeProcessedTx(0x2000)]); @@ -526,23 +563,22 @@ describe('L1Publisher integration', () => { loggerErrorSpy = jest.spyOn((publisher as any).log, 'error'); // Expect the tx to revert - await expect(publisher.proposeL2Block(block)).resolves.toEqual(false); + await expect(publisher.enqueueProposeL2Block(block)).resolves.toEqual(true); + + await expect(publisher.sendRequests()).resolves.toMatchObject({ + errorMsg: expect.stringContaining('Rollup__InvalidBlobHash'), + }); // Test for both calls + // NOTE: First error is from the simulate fn, which isn't supported by anvil expect(loggerErrorSpy).toHaveBeenCalledTimes(2); - // Test first call - expect(loggerErrorSpy).toHaveBeenNthCalledWith( - 1, - expect.stringMatching(/^L1 transaction 0x[a-f0-9]{64} reverted$/i), - expect.anything(), - ); + expect(loggerErrorSpy).toHaveBeenNthCalledWith(1, 'Bundled [propose] transaction [failed]'); - // Test second call expect(loggerErrorSpy).toHaveBeenNthCalledWith( 2, expect.stringMatching( - /^Rollup process tx reverted\. The contract function "propose" reverted\. Error: Rollup__InvalidInHash/i, + /^Rollup process tx reverted\. The contract function "forward" reverted\. Error: Rollup__InvalidBlobHash/i, ), undefined, expect.objectContaining({ diff --git a/yarn-project/end-to-end/src/composed/uniswap_trade_on_l1_from_l2.test.ts b/yarn-project/end-to-end/src/composed/uniswap_trade_on_l1_from_l2.test.ts index 6d0a39b965c..0537f5eef69 100644 --- a/yarn-project/end-to-end/src/composed/uniswap_trade_on_l1_from_l2.test.ts +++ b/yarn-project/end-to-end/src/composed/uniswap_trade_on_l1_from_l2.test.ts @@ -1,4 +1,4 @@ -import { setup as e2eSetup, getL1WalletClient } from '../fixtures/utils.js'; +import { setup as e2eSetup } from '../fixtures/utils.js'; import { type UniswapSetupContext, uniswapL1L2TestSuite } from '../shared/uniswap_l1_l2.js'; // This tests works on forked mainnet. There is a dump of the data in `dumpedState` such that we @@ -18,10 +18,9 @@ const testSetup = async (): Promise => { deployL1ContractsValues, wallets, logger, - config, } = await e2eSetup(2, { stateLoad: dumpedState }); - const walletClient = getL1WalletClient(config.l1RpcUrl, 1); + const walletClient = deployL1ContractsValues.walletClient; const publicClient = deployL1ContractsValues.publicClient; const ownerWallet = wallets[0]; diff --git a/yarn-project/end-to-end/src/e2e_block_building.test.ts b/yarn-project/end-to-end/src/e2e_block_building.test.ts index 9c9d5899fca..6a4f70d45fb 100644 --- a/yarn-project/end-to-end/src/e2e_block_building.test.ts +++ b/yarn-project/end-to-end/src/e2e_block_building.test.ts @@ -508,6 +508,7 @@ describe('e2e_block_building', () => { } = await setup(1, { minTxsPerBlock: 1, skipProtocolContracts: true, + ethereumSlotDuration: 6, })); logger.info('Deploying token contract'); diff --git a/yarn-project/end-to-end/src/e2e_epochs.test.ts b/yarn-project/end-to-end/src/e2e_epochs.test.ts index 62f47dfbaca..6371bc02981 100644 --- a/yarn-project/end-to-end/src/e2e_epochs.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs.test.ts @@ -5,16 +5,20 @@ import { type L1RollupConstants } from '@aztec/circuit-types'; import { Proof } from '@aztec/circuits.js'; import { RootRollupPublicInputs } from '@aztec/circuits.js/rollup'; import { RollupContract } from '@aztec/ethereum/contracts'; -import { type Delayer, waitUntilL1Timestamp } from '@aztec/ethereum/test'; +import { type DelayedTxUtils, type Delayer, waitUntilL1Timestamp } from '@aztec/ethereum/test'; import { promiseWithResolvers } from '@aztec/foundation/promise'; +import { type ProverNodePublisher } from '@aztec/prover-node'; import { type TestProverNode } from '@aztec/prover-node/test'; -import { type TestL1Publisher, type TestSequencerClient } from '@aztec/sequencer-client/test'; +import { type SequencerPublisher } from '@aztec/sequencer-client'; +import { type TestSequencerClient } from '@aztec/sequencer-client/test'; import { jest } from '@jest/globals'; import { type PublicClient } from 'viem'; import { type EndToEndContext, setup } from './fixtures/utils.js'; +jest.setTimeout(1000 * 60 * 10); + // Tests building of epochs using fast block times and short epochs. // Spawns an aztec node and a prover node with fake proofs. // Sequencer is allowed to build empty blocks. @@ -59,8 +63,14 @@ describe('e2e_epochs', () => { monitor = new ChainMonitor(rollup, logger); monitor.start(); - proverDelayer = ((context.proverNode as TestProverNode).publisher as TestL1Publisher).delayer!; - sequencerDelayer = ((context.sequencer as TestSequencerClient).sequencer.publisher as TestL1Publisher).delayer!; + // This is hideous. + // We ought to have a definite reference to the l1TxUtils that we're using in both places, provided by the test context. + proverDelayer = ( + ((context.proverNode as TestProverNode).publisher as ProverNodePublisher).l1TxUtils as DelayedTxUtils + ).delayer!; + sequencerDelayer = ( + ((context.sequencer as TestSequencerClient).sequencer.publisher as SequencerPublisher).l1TxUtils as DelayedTxUtils + ).delayer!; expect(proverDelayer).toBeDefined(); expect(sequencerDelayer).toBeDefined(); @@ -79,6 +89,14 @@ describe('e2e_epochs', () => { afterEach(async () => { jest.restoreAllMocks(); monitor.stop(); + await context.proverNode?.stop(); + await context.teardown(); + }); + + afterAll(async () => { + jest.restoreAllMocks(); + monitor.stop(); + await context.proverNode?.stop(); await context.teardown(); }); @@ -96,8 +114,13 @@ describe('e2e_epochs', () => { }; /** Waits until the given L2 block number is marked as proven. */ - const waitUntilProvenL2BlockNumber = async (t: number) => { - await retryUntil(() => Promise.resolve(t === monitor.l2ProvenBlockNumber), `Wait proven L2 block ${t}`, 60, 0.1); + const waitUntilProvenL2BlockNumber = async (t: number, timeout = 60) => { + await retryUntil( + () => Promise.resolve(t === monitor.l2ProvenBlockNumber), + `Wait proven L2 block ${t}`, + timeout, + 0.1, + ); }; it('does not allow submitting proof after epoch end', async () => { @@ -136,10 +159,12 @@ describe('e2e_epochs', () => { it('submits proof claim alone if there are no txs to build a block', async () => { await context.sequencer?.updateSequencerConfig({ minTxsPerBlock: 1 }); await waitUntilEpochStarts(1); + // Sleep to make sure any pending blocks are published + await sleep(L1_BLOCK_TIME_IN_S * 1000); const blockNumberAtEndOfEpoch0 = Number(await rollup.getBlockNumber()); logger.info(`Starting epoch 1 after L2 block ${blockNumberAtEndOfEpoch0}`); - await waitUntilProvenL2BlockNumber(blockNumberAtEndOfEpoch0); + await waitUntilProvenL2BlockNumber(blockNumberAtEndOfEpoch0, 120); expect(monitor.l2BlockNumber).toEqual(blockNumberAtEndOfEpoch0); logger.info(`Test succeeded`); }); diff --git a/yarn-project/end-to-end/src/e2e_l1_with_wall_time.test.ts b/yarn-project/end-to-end/src/e2e_l1_with_wall_time.test.ts index 4a312cc80c6..5960f25a374 100644 --- a/yarn-project/end-to-end/src/e2e_l1_with_wall_time.test.ts +++ b/yarn-project/end-to-end/src/e2e_l1_with_wall_time.test.ts @@ -4,10 +4,13 @@ import { EthAddress } from '@aztec/circuits.js'; import { getL1ContractsConfigEnvVars } from '@aztec/ethereum'; import { type PXEService } from '@aztec/pxe'; +import { jest } from '@jest/globals'; import { privateKeyToAccount } from 'viem/accounts'; import { getPrivateKeyFromIndex, setup } from './fixtures/utils.js'; +jest.setTimeout(1000 * 60 * 10); + describe('e2e_l1_with_wall_time', () => { let logger: Logger; let teardown: () => Promise; diff --git a/yarn-project/end-to-end/src/e2e_lending_contract.test.ts b/yarn-project/end-to-end/src/e2e_lending_contract.test.ts index 8e96bff8269..586b1751c5b 100644 --- a/yarn-project/end-to-end/src/e2e_lending_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_lending_contract.test.ts @@ -1,4 +1,5 @@ import { type AccountWallet, type CheatCodes, type DeployL1Contracts, Fr, type Logger } from '@aztec/aztec.js'; +import { type TestDateProvider } from '@aztec/foundation/timer'; import { RollupAbi } from '@aztec/l1-artifacts'; import { LendingContract } from '@aztec/noir-contracts.js/Lending'; import { PriceFeedContract } from '@aztec/noir-contracts.js/PriceFeed'; @@ -29,6 +30,7 @@ describe('e2e_lending_contract', () => { let lendingAccount: LendingAccount; let lendingSim: LendingSimulator; + let dateProvider: TestDateProvider | undefined; const deployContracts = async () => { logger.info(`Deploying price feed contract...`); @@ -59,7 +61,7 @@ describe('e2e_lending_contract', () => { beforeAll(async () => { const ctx = await setup(1); - ({ teardown, logger, cheatCodes: cc, wallet, deployL1ContractsValues } = ctx); + ({ teardown, logger, cheatCodes: cc, wallet, deployL1ContractsValues, dateProvider } = ctx); ({ lendingContract, priceFeedContract, collateralAsset, stableCoin } = await deployContracts()); await ensureAccountsPubliclyDeployed(wallet, [wallet]); @@ -137,7 +139,7 @@ describe('e2e_lending_contract', () => { nonce, ), }); - await lendingSim.progressSlots(SLOT_JUMP); + await lendingSim.progressSlots(SLOT_JUMP, dateProvider); lendingSim.depositPrivate(lendingAccount.address, await lendingAccount.key(), depositAmount); // Make a private deposit of funds into own account. @@ -172,7 +174,7 @@ describe('e2e_lending_contract', () => { ), }); - await lendingSim.progressSlots(SLOT_JUMP); + await lendingSim.progressSlots(SLOT_JUMP, dateProvider); lendingSim.depositPrivate(lendingAccount.address, lendingAccount.address.toField(), depositAmount); // Make a private deposit of funds into another account, in this case, a public account. // This should: @@ -213,7 +215,7 @@ describe('e2e_lending_contract', () => { ); await validateAction.send().wait(); - await lendingSim.progressSlots(SLOT_JUMP); + await lendingSim.progressSlots(SLOT_JUMP, dateProvider); lendingSim.depositPublic(lendingAccount.address, lendingAccount.address.toField(), depositAmount); // Make a public deposit of funds into self. @@ -233,7 +235,7 @@ describe('e2e_lending_contract', () => { describe('Borrow', () => { it('Borrow 🥸 : 🏦 -> 🍌', async () => { const borrowAmount = 69n; - await lendingSim.progressSlots(SLOT_JUMP); + await lendingSim.progressSlots(SLOT_JUMP, dateProvider); lendingSim.borrow(await lendingAccount.key(), lendingAccount.address, borrowAmount); // Make a private borrow using the private account @@ -251,7 +253,7 @@ describe('e2e_lending_contract', () => { it('Borrow: 🏦 -> 🍌', async () => { const borrowAmount = 69n; - await lendingSim.progressSlots(SLOT_JUMP); + await lendingSim.progressSlots(SLOT_JUMP, dateProvider); lendingSim.borrow(lendingAccount.address.toField(), lendingAccount.address, borrowAmount); // Make a public borrow using the private account @@ -274,7 +276,7 @@ describe('e2e_lending_contract', () => { action: stableCoin.methods.burn_private(lendingAccount.address, repayAmount, nonce), }); - await lendingSim.progressSlots(SLOT_JUMP); + await lendingSim.progressSlots(SLOT_JUMP, dateProvider); lendingSim.repayPrivate(lendingAccount.address, await lendingAccount.key(), repayAmount); // Make a private repay of the debt in the private account @@ -298,7 +300,7 @@ describe('e2e_lending_contract', () => { action: stableCoin.methods.burn_private(lendingAccount.address, repayAmount, nonce), }); - await lendingSim.progressSlots(SLOT_JUMP); + await lendingSim.progressSlots(SLOT_JUMP, dateProvider); lendingSim.repayPrivate(lendingAccount.address, lendingAccount.address.toField(), repayAmount); // Make a private repay of the debt in the public account @@ -328,7 +330,7 @@ describe('e2e_lending_contract', () => { ); await validateAction.send().wait(); - await lendingSim.progressSlots(SLOT_JUMP); + await lendingSim.progressSlots(SLOT_JUMP, dateProvider); lendingSim.repayPublic(lendingAccount.address, lendingAccount.address.toField(), repayAmount); // Make a public repay of the debt in the public account @@ -348,7 +350,7 @@ describe('e2e_lending_contract', () => { describe('Withdraw', () => { it('Withdraw: 🏦 -> 💰', async () => { const withdrawAmount = 42n; - await lendingSim.progressSlots(SLOT_JUMP); + await lendingSim.progressSlots(SLOT_JUMP, dateProvider); lendingSim.withdraw(lendingAccount.address.toField(), lendingAccount.address, withdrawAmount); // Withdraw funds from the public account @@ -363,7 +365,7 @@ describe('e2e_lending_contract', () => { it('Withdraw 🥸 : 🏦 -> 💰', async () => { const withdrawAmount = 42n; - await lendingSim.progressSlots(SLOT_JUMP); + await lendingSim.progressSlots(SLOT_JUMP, dateProvider); lendingSim.withdraw(await lendingAccount.key(), lendingAccount.address, withdrawAmount); // Withdraw funds from the private account diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts index 7a74cb96163..e011b5a8133 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts @@ -1,6 +1,7 @@ import { type AztecNodeService } from '@aztec/aztec-node'; import { sleep } from '@aztec/aztec.js'; +import { jest } from '@jest/globals'; import fs from 'fs'; import { shouldCollectMetrics } from '../fixtures/fixtures.js'; @@ -18,6 +19,8 @@ const BOOT_NODE_UDP_PORT = 40600; const DATA_DIR = './data/gossip'; +jest.setTimeout(1000 * 60 * 10); + const qosAlerts: AlertConfig[] = [ { alert: 'SequencerTimeToCollectAttestations', diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index 93c0c489810..4d92c8e070c 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -2,10 +2,10 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; import { type AccountWalletWithSecretKey } from '@aztec/aztec.js'; import { ChainMonitor } from '@aztec/aztec.js/utils'; -import { L1TxUtilsWithBlobs, RollupContract, getL1ContractsConfigEnvVars } from '@aztec/ethereum'; +import { L1TxUtilsWithBlobs, RollupContract, getExpectedAddress, getL1ContractsConfigEnvVars } from '@aztec/ethereum'; import { EthCheatCodesWithState } from '@aztec/ethereum/test'; import { type Logger, createLogger } from '@aztec/foundation/log'; -import { RollupAbi, TestERC20Abi } from '@aztec/l1-artifacts'; +import { ForwarderAbi, ForwarderBytecode, RollupAbi, TestERC20Abi } from '@aztec/l1-artifacts'; import { SpamContract } from '@aztec/noir-contracts.js/Spam'; import { type BootstrapNode } from '@aztec/p2p'; import { createBootstrapNodeFromPrivateKey } from '@aztec/p2p/mocks'; @@ -196,17 +196,21 @@ export class P2PNetworkTest { for (let i = 0; i < this.numberOfNodes; i++) { const attester = privateKeyToAccount(this.attesterPrivateKeys[i]!); - const proposer = privateKeyToAccount(this.proposerPrivateKeys[i]!); + const proposerEOA = privateKeyToAccount(this.proposerPrivateKeys[i]!); + const forwarder = getExpectedAddress( + ForwarderAbi, + ForwarderBytecode, + [proposerEOA.address], + proposerEOA.address, + ).address; validators.push({ attester: attester.address, - proposer: proposer.address, + proposer: forwarder, withdrawer: attester.address, amount: l1ContractsConfig.minimumStake, } as const); - this.logger.verbose( - `Adding (attester, proposer) pair: (${attester.address}, ${proposer.address}) as validator`, - ); + this.logger.verbose(`Adding (attester, proposer) pair: (${attester.address}, ${forwarder}) as validator`); } await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ @@ -327,9 +331,8 @@ export class P2PNetworkTest { return; } - for (const node of nodes) { - await node.stop(); - } + await Promise.all(nodes.map(node => node.stop())); + this.logger.info('Nodes stopped'); } diff --git a/yarn-project/end-to-end/src/e2e_p2p/upgrade_governance_proposer.test.ts b/yarn-project/end-to-end/src/e2e_p2p/upgrade_governance_proposer.test.ts index bad8f5a7c7b..5712a7b443f 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/upgrade_governance_proposer.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/upgrade_governance_proposer.test.ts @@ -9,6 +9,7 @@ import { RollupAbi, } from '@aztec/l1-artifacts'; +import { jest } from '@jest/globals'; import fs from 'fs'; import { getAddress, getContract } from 'viem'; @@ -22,7 +23,9 @@ const NUM_NODES = 4; // interfere with each other. const BOOT_NODE_UDP_PORT = 45000; -const DATA_DIR = './data/gossip'; +const DATA_DIR = './data/upgrade_governance_proposer'; + +jest.setTimeout(1000 * 60 * 10); /** * This tests emulate the same test as in l1-contracts/test/governance/scenario/UpgradeGovernanceProposerTest.t.sol @@ -45,8 +48,8 @@ describe('e2e_p2p_governance_proposer', () => { }); afterEach(async () => { - await t.teardown(); await t.stopNodes(nodes); + await t.teardown(); for (let i = 0; i < NUM_NODES; i++) { fs.rmSync(`${DATA_DIR}-${i}`, { recursive: true, force: true }); } @@ -158,15 +161,18 @@ describe('e2e_p2p_governance_proposer', () => { const nextRoundTimestamp2 = await rollup.read.getTimestampForSlot([ ((await rollup.read.getCurrentSlot()) / 10n) * 10n + 10n, ]); + t.logger.info(`Warpping to ${nextRoundTimestamp2}`); await t.ctx.cheatCodes.eth.warp(Number(nextRoundTimestamp2)); await waitL1Block(); + t.logger.info(`Executing proposal ${govData.round}`); const txHash = await governanceProposer.write.executeProposal([govData.round], { account: emperor, gas: 1_000_000n, }); await t.ctx.deployL1ContractsValues.publicClient.waitForTransactionReceipt({ hash: txHash }); + t.logger.info(`Executed proposal ${govData.round}`); const token = getContract({ address: t.ctx.deployL1ContractsValues.l1ContractAddresses.feeJuiceAddress.toString(), @@ -174,38 +180,48 @@ describe('e2e_p2p_governance_proposer', () => { client: t.ctx.deployL1ContractsValues.walletClient, }); + t.logger.info(`Minting tokens`); + await token.write.mint([emperor.address, 10000n * 10n ** 18n], { account: emperor }); await token.write.approve([governance.address, 10000n * 10n ** 18n], { account: emperor }); const depositTx = await governance.write.deposit([emperor.address, 10000n * 10n ** 18n], { account: emperor }); await t.ctx.deployL1ContractsValues.publicClient.waitForTransactionReceipt({ hash: depositTx }); + t.logger.info(`Deposited tokens`); const proposal = await governance.read.getProposal([0n]); const timeToActive = proposal.creation + proposal.config.votingDelay; + t.logger.info(`Warpping to ${timeToActive + 1n}`); await t.ctx.cheatCodes.eth.warp(Number(timeToActive + 1n)); - + t.logger.info(`Warpped to ${timeToActive + 1n}`); await waitL1Block(); + t.logger.info(`Voting`); const voteTx = await governance.write.vote([0n, 10000n * 10n ** 18n, true], { account: emperor }); await t.ctx.deployL1ContractsValues.publicClient.waitForTransactionReceipt({ hash: voteTx }); + t.logger.info(`Voted`); const timeToExecutable = timeToActive + proposal.config.votingDuration + proposal.config.executionDelay + 1n; + t.logger.info(`Warpping to ${timeToExecutable}`); await t.ctx.cheatCodes.eth.warp(Number(timeToExecutable)); - + t.logger.info(`Warpped to ${timeToExecutable}`); await waitL1Block(); + t.logger.info(`Checking governance proposer`); expect(await governance.read.governanceProposer()).toEqual( getAddress(t.ctx.deployL1ContractsValues.l1ContractAddresses.governanceProposerAddress.toString()), ); + t.logger.info(`Governance proposer is correct`); + t.logger.info(`Executing proposal`); const executeTx = await governance.write.execute([0n], { account: emperor }); await t.ctx.deployL1ContractsValues.publicClient.waitForTransactionReceipt({ hash: executeTx }); - + t.logger.info(`Executed proposal`); const newGovernanceProposer = await governance.read.governanceProposer(); expect(newGovernanceProposer).not.toEqual( getAddress(t.ctx.deployL1ContractsValues.l1ContractAddresses.governanceProposerAddress.toString()), ); - expect(await governance.read.getProposalState([0n])).toEqual(5); + t.logger.info(`Governance proposer is correct`); }, 1_000_000); }); diff --git a/yarn-project/end-to-end/src/e2e_simple.test.ts b/yarn-project/end-to-end/src/e2e_simple.test.ts new file mode 100644 index 00000000000..390ba6ff3a4 --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_simple.test.ts @@ -0,0 +1,54 @@ +import { ContractDeployer, Fr, type Wallet } from '@aztec/aztec.js'; +// eslint-disable-next-line no-restricted-imports +import { EthAddress } from '@aztec/foundation/eth-address'; +import { StatefulTestContractArtifact } from '@aztec/noir-contracts.js/StatefulTest'; + +import { jest } from '@jest/globals'; +import 'jest-extended'; + +import { setup } from './fixtures/utils.js'; + +describe('e2e_simple', () => { + jest.setTimeout(20 * 60 * 1000); // 20 minutes + + let owner: Wallet; + let teardown: () => Promise; + + afterEach(() => { + jest.restoreAllMocks(); + }); + + describe('A simple test', () => { + const artifact = StatefulTestContractArtifact; + + beforeAll(async () => { + ({ + teardown, + wallets: [owner], + } = await setup(1, { + customForwarderContractAddress: EthAddress.ZERO, + archiverPollingIntervalMS: 200, + transactionPollingIntervalMS: 200, + worldStateBlockCheckIntervalMS: 200, + blockCheckIntervalMS: 200, + minTxsPerBlock: 1, + })); + }); + + afterAll(() => teardown()); + + it('deploys a contract', async () => { + const deployer = new ContractDeployer(artifact, owner); + + const ownerAddress = owner.getCompleteAddress().address; + const sender = ownerAddress; + const provenTx = await deployer.deploy(ownerAddress, sender, 1).prove({ + contractAddressSalt: new Fr(BigInt(1)), + skipClassRegistration: true, + skipPublicDeployment: true, + }); + const tx = await provenTx.send().wait(); + expect(tx.blockNumber).toBeDefined(); + }); + }); +}); diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index 1d80762f8d6..a61fca34477 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -48,15 +48,16 @@ import { import { createBlobSinkClient } from '@aztec/blob-sink/client'; // eslint-disable-next-line no-restricted-imports import { L2Block, tryStop } from '@aztec/circuit-types'; -import { type AztecAddress } from '@aztec/circuits.js'; -import { getL1ContractsConfigEnvVars } from '@aztec/ethereum'; -import { Timer } from '@aztec/foundation/timer'; +import { type AztecAddress, EthAddress } from '@aztec/circuits.js'; +import { EpochCache } from '@aztec/epoch-cache'; +import { L1TxUtilsWithBlobs, RollupContract, getL1ContractsConfigEnvVars } from '@aztec/ethereum'; +import { TestDateProvider, Timer } from '@aztec/foundation/timer'; import { RollupAbi } from '@aztec/l1-artifacts'; import { SchnorrHardcodedAccountContract } from '@aztec/noir-contracts.js/SchnorrHardcodedAccount'; import { SpamContract } from '@aztec/noir-contracts.js/Spam'; import { TokenContract } from '@aztec/noir-contracts.js/Token'; import { type PXEService } from '@aztec/pxe'; -import { L1Publisher } from '@aztec/sequencer-client'; +import { SequencerPublisher } from '@aztec/sequencer-client'; import { createWorldStateSynchronizer } from '@aztec/world-state'; import * as fs from 'fs'; @@ -65,7 +66,13 @@ import { getContract } from 'viem'; import { DEFAULT_BLOB_SINK_PORT } from './fixtures/fixtures.js'; import { addAccounts } from './fixtures/snapshot_manager.js'; import { mintTokensToPrivate } from './fixtures/token_utils.js'; -import { type EndToEndContext, getPrivateKeyFromIndex, setup, setupPXEService } from './fixtures/utils.js'; +import { + type EndToEndContext, + createForwarderContract, + getPrivateKeyFromIndex, + setup, + setupPXEService, +} from './fixtures/utils.js'; const SALT = 420; const AZTEC_GENERATE_TEST_DATA = !!process.env.AZTEC_GENERATE_TEST_DATA; @@ -388,7 +395,22 @@ describe('e2e_synching', () => { }); const sequencerPK: `0x${string}` = `0x${getPrivateKeyFromIndex(0)!.toString('hex')}`; - const publisher = new L1Publisher( + + const l1TxUtils = new L1TxUtilsWithBlobs( + deployL1ContractsValues.publicClient, + deployL1ContractsValues.walletClient, + logger, + config, + ); + const rollupContract = new RollupContract( + deployL1ContractsValues.publicClient, + deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), + ); + const forwarderContract = await createForwarderContract(config, sequencerPK); + const epochCache = await EpochCache.create(config.l1Contracts.rollupAddress, config, { + dateProvider: new TestDateProvider(), + }); + const publisher = new SequencerPublisher( { l1RpcUrl: config.l1RpcUrl, requiredConfirmations: 1, @@ -399,8 +421,15 @@ describe('e2e_synching', () => { viemPollingIntervalMS: 100, ethereumSlotDuration: ETHEREUM_SLOT_DURATION, blobSinkUrl: `http://localhost:${blobSink?.port ?? 5052}`, + customForwarderContractAddress: EthAddress.ZERO, + }, + { + blobSinkClient, + l1TxUtils, + rollupContract, + forwarderContract, + epochCache, }, - { blobSinkClient }, ); const blocks = variant.loadBlocks(); @@ -414,7 +443,7 @@ describe('e2e_synching', () => { await cheatCodes.eth.mine(); } // If it breaks here, first place you should look is the pruning. - await publisher.proposeL2Block(block); + await publisher.enqueueProposeL2Block(block); } await alternativeSync({ deployL1ContractsValues, cheatCodes, config, logger, pxe }, variant); diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index d64c9ca4fc1..70ff827ad6c 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -28,16 +28,19 @@ import { DefaultMultiCallEntrypoint } from '@aztec/aztec.js/entrypoint'; import { type BBNativePrivateKernelProver } from '@aztec/bb-prover'; import { createBlobSinkClient } from '@aztec/blob-sink/client'; import { type BlobSinkServer, createBlobSinkServer } from '@aztec/blob-sink/server'; -import { type EthAddress, FEE_JUICE_INITIAL_MINT, Fr, Gas, getContractClassFromArtifact } from '@aztec/circuits.js'; +import { FEE_JUICE_INITIAL_MINT, Fr, Gas, getContractClassFromArtifact } from '@aztec/circuits.js'; import { type DeployL1ContractsArgs, + ForwarderContract, NULL_KEY, + createL1Clients, getL1ContractsConfigEnvVars, isAnvilTestChain, l1Artifacts, } from '@aztec/ethereum'; -import { EthCheatCodesWithState, startAnvil } from '@aztec/ethereum/test'; +import { DelayedTxUtils, EthCheatCodesWithState, startAnvil } from '@aztec/ethereum/test'; import { randomBytes } from '@aztec/foundation/crypto'; +import { EthAddress } from '@aztec/foundation/eth-address'; import { retryUntil } from '@aztec/foundation/retry'; import { TestDateProvider } from '@aztec/foundation/timer'; import { FeeJuiceContract } from '@aztec/noir-contracts.js/FeeJuice'; @@ -46,7 +49,7 @@ import { ProtocolContractAddress, protocolContractTreeRoot } from '@aztec/protoc import { type ProverNode, type ProverNodeConfig, createProverNode } from '@aztec/prover-node'; import { type PXEService, type PXEServiceConfig, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; import { type SequencerClient } from '@aztec/sequencer-client'; -import { TestL1Publisher } from '@aztec/sequencer-client/test'; +import { type TestSequencerClient } from '@aztec/sequencer-client/test'; import { type TelemetryClient, type TelemetryClientConfig, @@ -330,6 +333,7 @@ export async function setup( numberOfAccounts = 1, opts: SetupOptions = { assumeProvenThrough: Number.MAX_SAFE_INTEGER, + customForwarderContractAddress: EthAddress.ZERO, }, pxeOpts: Partial = {}, chain: Chain = foundry, @@ -471,14 +475,17 @@ export async function setup( const telemetry = getTelemetryClient(opts.telemetryConfig); const blobSinkClient = createBlobSinkClient(config); - const publisher = new TestL1Publisher(config, { blobSinkClient }); const aztecNode = await AztecNodeService.createAndSync(config, { - publisher, dateProvider, blobSinkClient, }); const sequencer = aztecNode.getSequencer(); + if (sequencer) { + const publisher = (sequencer as TestSequencerClient).sequencer.publisher; + publisher.l1TxUtils = DelayedTxUtils.fromL1TxUtils(publisher.l1TxUtils, config.ethereumSlotDuration); + } + let proverNode: ProverNode | undefined = undefined; if (opts.startProverNode) { logger.verbose('Creating and syncing a simulated prover node...'); @@ -553,16 +560,6 @@ export async function setup( }; } -/** Returns an L1 wallet client for anvil using a well-known private key based on the index. */ -export function getL1WalletClient(rpcUrl: string, index: number) { - const hdAccount = mnemonicToAccount(MNEMONIC, { addressIndex: index }); - return createWalletClient({ - account: hdAccount, - chain: foundry, - transport: http(rpcUrl), - }); -} - /** * Registers the contract class used for test accounts and publicly deploys the instances requested. * Use this when you need to make a public call to an account contract, such as for requesting a public authwit. @@ -750,14 +747,33 @@ export async function createAndSyncProverNode( txGatheringMaxParallelRequests: 100, }; - // Use testing l1 publisher - const publisher = new TestL1Publisher(proverConfig, { blobSinkClient }); + const l1TxUtils = createDelayedL1TxUtils(aztecNodeConfig, proverNodePrivateKey, 'prover-node'); const proverNode = await createProverNode(proverConfig, { aztecNodeTxProvider: aztecNodeWithoutStop, archiver: archiver as Archiver, - publisher, + l1TxUtils, }); await proverNode.start(); return proverNode; } + +function createDelayedL1TxUtils(aztecNodeConfig: AztecNodeConfig, privateKey: `0x${string}`, logName: string) { + const { publicClient, walletClient } = createL1Clients(aztecNodeConfig.l1RpcUrl, privateKey, foundry); + + const log = createLogger(logName); + const l1TxUtils = new DelayedTxUtils(publicClient, walletClient, log, aztecNodeConfig); + l1TxUtils.enableDelayer(aztecNodeConfig.ethereumSlotDuration); + return l1TxUtils; +} + +export async function createForwarderContract(aztecNodeConfig: AztecNodeConfig, privateKey: `0x${string}`) { + const { walletClient, publicClient } = createL1Clients(aztecNodeConfig.l1RpcUrl, privateKey, foundry); + const forwarderContract = await ForwarderContract.create( + walletClient.account.address, + walletClient, + publicClient, + createLogger('forwarder'), + ); + return forwarderContract; +} diff --git a/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts b/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts index 55376d28df3..c20dacbdb64 100644 --- a/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts +++ b/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts @@ -99,7 +99,8 @@ describe('e2e_prover_coordination', () => { await ctx.proverNode!.stop(); publicClient = ctx.deployL1ContractsValues.publicClient; - publisherAddress = EthAddress.fromString(ctx.deployL1ContractsValues.walletClient.account.address); + publisherAddress = ctx.aztecNode.getSequencer()?.forwarderAddress ?? EthAddress.ZERO; + expect(publisherAddress).not.toEqual(EthAddress.ZERO); rollupContract = getContract({ address: getAddress(ctx.deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString()), abi: RollupAbi, diff --git a/yarn-project/end-to-end/src/simulators/lending_simulator.ts b/yarn-project/end-to-end/src/simulators/lending_simulator.ts index 56351cddad9..5a59b8ecc33 100644 --- a/yarn-project/end-to-end/src/simulators/lending_simulator.ts +++ b/yarn-project/end-to-end/src/simulators/lending_simulator.ts @@ -1,6 +1,7 @@ // Convenience struct to hold an account's address and secret that can easily be passed around. import { AztecAddress, type CheatCodes, Fr } from '@aztec/aztec.js'; import { pedersenHash } from '@aztec/foundation/crypto'; +import type { TestDateProvider } from '@aztec/foundation/timer'; import { type RollupAbi } from '@aztec/l1-artifacts'; import { type LendingContract } from '@aztec/noir-contracts.js/Lending'; @@ -99,7 +100,7 @@ export class LendingSimulator { this.time = Number(await this.rollup.read.getTimestampForSlot([slot])); } - async progressSlots(diff: number) { + async progressSlots(diff: number, dateProvider?: TestDateProvider) { if (diff <= 1) { return; } @@ -111,6 +112,9 @@ export class LendingSimulator { // Mine ethereum blocks such that the next block will be in a new slot await this.cc.eth.warp(this.time - this.ethereumSlotDuration); + if (dateProvider) { + dateProvider.setTime(this.time * 1000); + } await this.rollup.write.setAssumeProvenThroughBlockNumber([(await this.rollup.read.getPendingBlockNumber()) + 1n]); this.accumulator = muldivDown(this.accumulator, computeMultiplier(this.rate, BigInt(timeDiff)), BASE); diff --git a/yarn-project/epoch-cache/src/config.ts b/yarn-project/epoch-cache/src/config.ts index 3da15946771..3cf1c705a7d 100644 --- a/yarn-project/epoch-cache/src/config.ts +++ b/yarn-project/epoch-cache/src/config.ts @@ -5,7 +5,15 @@ import { getL1ReaderConfigFromEnv, } from '@aztec/ethereum'; -export type EpochCacheConfig = L1ReaderConfig & L1ContractsConfig; +export type EpochCacheConfig = Pick< + L1ReaderConfig & L1ContractsConfig, + | 'l1RpcUrl' + | 'l1ChainId' + | 'viemPollingIntervalMS' + | 'aztecSlotDuration' + | 'ethereumSlotDuration' + | 'aztecEpochDuration' +>; export function getEpochCacheConfigEnvVars(): EpochCacheConfig { return { ...getL1ReaderConfigFromEnv(), ...getL1ContractsConfigEnvVars() }; diff --git a/yarn-project/ethereum/src/contracts/forwarder.ts b/yarn-project/ethereum/src/contracts/forwarder.ts new file mode 100644 index 00000000000..7d1602aa9e7 --- /dev/null +++ b/yarn-project/ethereum/src/contracts/forwarder.ts @@ -0,0 +1,113 @@ +import { type Logger } from '@aztec/foundation/log'; +import { ForwarderAbi, ForwarderBytecode } from '@aztec/l1-artifacts'; + +import { + type Account, + type Chain, + type GetContractReturnType, + type Hex, + type HttpTransport, + type PublicClient, + type WalletClient, + encodeFunctionData, + getContract, +} from 'viem'; + +import { type L1Clients, deployL1Contract } from '../deploy_l1_contracts.js'; +import { type L1BlobInputs, type L1GasConfig, type L1TxRequest, type L1TxUtils } from '../l1_tx_utils.js'; + +export class ForwarderContract { + private readonly forwarder: GetContractReturnType>; + + constructor(public readonly client: L1Clients['publicClient'], address: Hex) { + this.forwarder = getContract({ address, abi: ForwarderAbi, client }); + } + + static async create( + owner: Hex, + walletClient: WalletClient, + publicClient: PublicClient, + logger: Logger, + ) { + logger.info('Deploying forwarder contract'); + + const { address, txHash } = await deployL1Contract( + walletClient, + publicClient, + ForwarderAbi, + ForwarderBytecode, + [owner], + owner, + undefined, + logger, + ); + + if (txHash) { + await publicClient.waitForTransactionReceipt({ hash: txHash }); + } + + logger.info(`Forwarder contract deployed at ${address} with owner ${owner}`); + + return new ForwarderContract(publicClient, address.toString()); + } + + public getAddress() { + return this.forwarder.address; + } + + public async forward( + requests: L1TxRequest[], + l1TxUtils: L1TxUtils, + gasConfig: L1GasConfig | undefined, + blobConfig: L1BlobInputs | undefined, + ) { + requests = requests.filter(request => request.to !== null); + const toArgs = requests.map(request => request.to!); + const dataArgs = requests.map(request => request.data!); + const data = encodeFunctionData({ + abi: ForwarderAbi, + functionName: 'forward', + args: [toArgs, dataArgs], + }); + + const { receipt, gasPrice } = await l1TxUtils.sendAndMonitorTransaction( + { + to: this.forwarder.address, + data, + }, + gasConfig, + blobConfig, + ); + + if (receipt.status === 'success') { + const stats = await l1TxUtils.getTransactionStats(receipt.transactionHash); + return { receipt, gasPrice, stats }; + } else { + const args = { + args: [toArgs, dataArgs], + functionName: 'forward', + abi: ForwarderAbi, + address: this.forwarder.address, + }; + + let errorMsg: string | undefined; + + if (blobConfig) { + const maxFeePerBlobGas = blobConfig.maxFeePerBlobGas ?? gasPrice.maxFeePerBlobGas; + if (maxFeePerBlobGas === undefined) { + errorMsg = 'maxFeePerBlobGas is required to get the error message'; + } else { + errorMsg = await l1TxUtils.tryGetErrorFromRevertedTx(data, args, { + blobs: blobConfig.blobs, + kzg: blobConfig.kzg, + maxFeePerBlobGas, + }); + } + } else { + errorMsg = await l1TxUtils.tryGetErrorFromRevertedTx(data, args); + } + + return { receipt, gasPrice, errorMsg }; + } + } +} diff --git a/yarn-project/ethereum/src/contracts/index.ts b/yarn-project/ethereum/src/contracts/index.ts index 6156b922083..f26c95ede98 100644 --- a/yarn-project/ethereum/src/contracts/index.ts +++ b/yarn-project/ethereum/src/contracts/index.ts @@ -1,3 +1,4 @@ +export * from './forwarder.js'; export * from './rollup.js'; export * from './governance.js'; export * from './governance_proposer.js'; diff --git a/yarn-project/ethereum/src/contracts/rollup.ts b/yarn-project/ethereum/src/contracts/rollup.ts index 35dce8ebfbc..d92b3680c93 100644 --- a/yarn-project/ethereum/src/contracts/rollup.ts +++ b/yarn-project/ethereum/src/contracts/rollup.ts @@ -1,14 +1,17 @@ import { memoize } from '@aztec/foundation/decorators'; import { EthAddress } from '@aztec/foundation/eth-address'; +import type { ViemSignature } from '@aztec/foundation/eth-signature'; import { RollupAbi, SlasherAbi } from '@aztec/l1-artifacts'; import { + type Account, type Chain, type GetContractReturnType, type Hex, type HttpTransport, type PublicClient, createPublicClient, + getAddress, getContract, http, } from 'viem'; @@ -17,6 +20,7 @@ import { createEthereumChain } from '../chain.js'; import { type DeployL1Contracts } from '../deploy_l1_contracts.js'; import { type L1ContractAddresses } from '../l1_contract_addresses.js'; import { type L1ReaderConfig } from '../l1_reader.js'; +import { formatViemError } from '../utils.js'; import { SlashingProposerContract } from './slashing_proposer.js'; export type L1RollupContractAddresses = Pick< @@ -30,15 +34,40 @@ export type L1RollupContractAddresses = Pick< | 'rewardDistributorAddress' >; +export type EpochProofQuoteViemArgs = { + epochToProve: bigint; + validUntilSlot: bigint; + bondAmount: bigint; + prover: `0x${string}`; + basisPointFee: number; +}; + export class RollupContract { private readonly rollup: GetContractReturnType>; + static getFromL1ContractsValues(deployL1ContractsValues: DeployL1Contracts) { + const { + publicClient, + l1ContractAddresses: { rollupAddress }, + } = deployL1ContractsValues; + return new RollupContract(publicClient, rollupAddress.toString()); + } + + static getFromConfig(config: L1ReaderConfig) { + const client = createPublicClient({ + transport: http(config.l1RpcUrl), + chain: createEthereumChain(config.l1RpcUrl, config.l1ChainId).chainInfo, + }); + const address = config.l1Contracts.rollupAddress.toString(); + return new RollupContract(client, address); + } + constructor(public readonly client: PublicClient, address: Hex) { this.rollup = getContract({ address, abi: RollupAbi, client }); } public get address() { - return EthAddress.fromString(this.rollup.address); + return this.rollup.address; } @memoize @@ -84,6 +113,16 @@ export class RollupContract { return this.rollup.read.MINIMUM_STAKE(); } + public async getSlashingProposerAddress() { + const slasherAddress = await this.rollup.read.SLASHER(); + const slasher = getContract({ + address: getAddress(slasherAddress.toString()), + abi: SlasherAbi, + client: this.client, + }); + return EthAddress.fromString(await slasher.read.PROPOSER()); + } + getBlockNumber() { return this.rollup.read.getPendingBlockNumber(); } @@ -116,6 +155,22 @@ export class RollupContract { return this.rollup.read.getCurrentProposer(); } + getProposerAt(timestamp: bigint) { + return this.rollup.read.getProposerAt([timestamp]); + } + + getBlock(blockNumber: bigint) { + return this.rollup.read.getBlock([blockNumber]); + } + + getProofCommitmentEscrow() { + return this.rollup.read.PROOF_COMMITMENT_ESCROW(); + } + + getTips() { + return this.rollup.read.getTips(); + } + async getEpochNumber(blockNumber?: bigint) { blockNumber ??= await this.getBlockNumber(); return this.rollup.read.getEpochForBlock([BigInt(blockNumber)]); @@ -141,7 +196,7 @@ export class RollupContract { ).map(EthAddress.fromString); return { - rollupAddress: this.address, + rollupAddress: EthAddress.fromString(this.address), inboxAddress, outboxAddress, feeJuicePortalAddress, @@ -151,20 +206,137 @@ export class RollupContract { }; } - static getFromL1ContractsValues(deployL1ContractsValues: DeployL1Contracts) { + public async getEpochNumberForSlotNumber(slotNumber: bigint): Promise { + return await this.rollup.read.getEpochAtSlot([slotNumber]); + } + + getEpochProofPublicInputs( + args: readonly [ + bigint, + readonly [ + `0x${string}`, + `0x${string}`, + `0x${string}`, + `0x${string}`, + `0x${string}`, + `0x${string}`, + `0x${string}`, + ], + readonly `0x${string}`[], + `0x${string}`, + `0x${string}`, + ], + ) { + return this.rollup.read.getEpochProofPublicInputs(args); + } + public async getProofClaim() { const { - publicClient, - l1ContractAddresses: { rollupAddress }, - } = deployL1ContractsValues; - return new RollupContract(publicClient, rollupAddress.toString()); + epochToProve, + basisPointFee, + bondAmount, + bondProvider: bondProviderHex, + proposerClaimant: proposerClaimantHex, + } = await this.rollup.read.getProofClaim(); + + const bondProvider = EthAddress.fromString(bondProviderHex); + const proposerClaimant = EthAddress.fromString(proposerClaimantHex); + + if (bondProvider.isZero() && proposerClaimant.isZero() && epochToProve === 0n) { + return undefined; + } + + return { + epochToProve, + basisPointFee, + bondAmount, + bondProvider, + proposerClaimant, + }; } - static getFromConfig(config: L1ReaderConfig) { - const client = createPublicClient({ - transport: http(config.l1RpcUrl), - chain: createEthereumChain(config.l1RpcUrl, config.l1ChainId).chainInfo, - }); - const address = config.l1Contracts.rollupAddress.toString(); - return new RollupContract(client, address); + async getClaimableEpoch(): Promise { + try { + return await this.rollup.read.getClaimableEpoch(); + } catch (err: unknown) { + throw formatViemError(err); + } + } + + public async getEpochToProve(): Promise { + try { + return await this.rollup.read.getEpochToProve(); + } catch (err: unknown) { + throw formatViemError(err); + } + } + + public async validateProofQuote( + quote: { + quote: EpochProofQuoteViemArgs; + signature: ViemSignature; + }, + account: `0x${string}` | Account, + slotDuration: bigint | number, + ): Promise { + if (typeof slotDuration === 'number') { + slotDuration = BigInt(slotDuration); + } + const timeOfNextL1Slot = BigInt((await this.client.getBlock()).timestamp + slotDuration); + const args = [timeOfNextL1Slot, quote] as const; + try { + await this.rollup.read.validateEpochProofRightClaimAtTime(args, { account }); + } catch (err) { + throw formatViemError(err); + } + } + + public async validateHeader( + args: readonly [ + `0x${string}`, + ViemSignature[], + `0x${string}`, + bigint, + `0x${string}`, + { + ignoreDA: boolean; + ignoreSignatures: boolean; + }, + ], + account: `0x${string}` | Account, + ): Promise { + try { + await this.rollup.read.validateHeader(args, { account }); + } catch (error: unknown) { + throw formatViemError(error); + } + } + + /** + * @notice Calls `canProposeAtTime` with the time of the next Ethereum block and the sender address + * + * @dev Throws if unable to propose + * + * @param archive - The archive that we expect to be current state + * @return [slot, blockNumber] - If you can propose, the L2 slot number and L2 block number of the next Ethereum block, + * @throws otherwise + */ + public async canProposeAtNextEthBlock( + archive: Buffer, + account: `0x${string}` | Account, + slotDuration: bigint | number, + ): Promise<[bigint, bigint]> { + if (typeof slotDuration === 'number') { + slotDuration = BigInt(slotDuration); + } + const timeOfNextL1Slot = (await this.client.getBlock()).timestamp + slotDuration; + try { + const [slot, blockNumber] = await this.rollup.read.canProposeAtTime( + [timeOfNextL1Slot, `0x${archive.toString('hex')}`], + { account }, + ); + return [slot, blockNumber]; + } catch (err: unknown) { + throw formatViemError(err); + } } } diff --git a/yarn-project/ethereum/src/deploy_l1_contracts.ts b/yarn-project/ethereum/src/deploy_l1_contracts.ts index ecf71c9e5c8..ff5c5f92c78 100644 --- a/yarn-project/ethereum/src/deploy_l1_contracts.ts +++ b/yarn-project/ethereum/src/deploy_l1_contracts.ts @@ -9,6 +9,8 @@ import { ExtRollupLibBytecode, FeeJuicePortalAbi, FeeJuicePortalBytecode, + ForwarderAbi, + ForwarderBytecode, GovernanceAbi, GovernanceBytecode, GovernanceProposerAbi, @@ -36,10 +38,15 @@ import type { Abi, Narrow } from 'abitype'; import { type Account, type Chain, + type Client, type Hex, type HttpTransport, + type PublicActions, type PublicClient, + type PublicRpcSchema, + type WalletActions, type WalletClient, + type WalletRpcSchema, concatHex, createPublicClient, createWalletClient, @@ -50,6 +57,7 @@ import { http, numberToHex, padHex, + publicActions, } from 'viem'; import { type HDAccount, type PrivateKeyAccount, mnemonicToAccount, privateKeyToAccount } from 'viem/accounts'; import { foundry } from 'viem/chains'; @@ -59,6 +67,8 @@ import { type L1ContractsConfig } from './config.js'; import { type L1ContractAddresses } from './l1_contract_addresses.js'; import { L1TxUtils } from './l1_tx_utils.js'; +export const DEPLOYER_ADDRESS: Hex = '0x4e59b44847b379578588920cA78FbF26c0B4956C'; + /** * Return type of the deployL1Contract function. */ @@ -190,7 +200,13 @@ export interface DeployL1ContractsArgs extends L1ContractsConfig { export type L1Clients = { publicClient: PublicClient; - walletClient: WalletClient; + walletClient: Client< + HttpTransport, + Chain, + PrivateKeyAccount, + [...WalletRpcSchema, ...PublicRpcSchema], + PublicActions & WalletActions + >; }; /** @@ -212,18 +228,25 @@ export function createL1Clients( : mnemonicToAccount(mnemonicOrPrivateKeyOrHdAccount) : mnemonicOrPrivateKeyOrHdAccount; + // From what I can see, this is the difference between the HDAccount and the PrivateKeyAccount + // and we don't need it for anything. This lets us use the same type for both. + // eslint-disable-next-line camelcase + hdAccount.experimental_signAuthorization ??= () => { + throw new Error('experimental_signAuthorization not implemented for HDAccount'); + }; + const walletClient = createWalletClient({ account: hdAccount, chain, transport: http(rpcUrl), - }); + }).extend(publicActions); const publicClient = createPublicClient({ chain, transport: http(rpcUrl), pollingInterval: 100, }); - return { walletClient, publicClient }; + return { walletClient, publicClient } as L1Clients; } /** @@ -414,18 +437,18 @@ export const deployL1Contracts = async ( ].map(txHash => publicClient.waitForTransactionReceipt({ hash: txHash })), ); - const initiateValidatorSetTxHash = await rollup.write.cheat__InitialiseValidatorSet([ - newValidatorsAddresses.map(v => ({ - attester: v, - proposer: v, - withdrawer: v, - amount: args.minimumStake, - })), - ]); + const validators = newValidatorsAddresses.map(v => ({ + attester: v, + proposer: getExpectedAddress(ForwarderAbi, ForwarderBytecode, [v], v).address, + withdrawer: v, + amount: args.minimumStake, + })); + const initiateValidatorSetTxHash = await rollup.write.cheat__InitialiseValidatorSet([validators]); txHashes.push(initiateValidatorSetTxHash); - logger.info( - `Initialized validator set (${newValidatorsAddresses.join(', ')}) in tx ${initiateValidatorSetTxHash}`, - ); + logger.info(`Initialized validator set`, { + validators, + txHash: initiateValidatorSetTxHash, + }); } } @@ -663,15 +686,12 @@ export async function deployL1Contract( } if (maybeSalt) { - const salt = padHex(maybeSalt, { size: 32 }); - const deployer: Hex = '0x4e59b44847b379578588920cA78FbF26c0B4956C'; - const calldata = encodeDeployData({ abi, bytecode, args }); - resultingAddress = getContractAddress({ from: deployer, salt, bytecode: calldata, opcode: 'CREATE2' }); + const { address, paddedSalt: salt, calldata } = getExpectedAddress(abi, bytecode, args, maybeSalt); + resultingAddress = address; const existing = await publicClient.getBytecode({ address: resultingAddress }); - if (existing === undefined || existing === '0x') { const res = await l1TxUtils.sendTransaction({ - to: deployer, + to: DEPLOYER_ADDRESS, data: concatHex([salt, calldata]), }); txHash = res.txHash; @@ -701,4 +721,26 @@ export async function deployL1Contract( return { address: EthAddress.fromString(resultingAddress!), txHash }; } + +export function getExpectedAddress( + abi: Narrow, + bytecode: Hex, + args: readonly unknown[], + salt: Hex, +) { + const paddedSalt = padHex(salt, { size: 32 }); + const calldata = encodeDeployData({ abi, bytecode, args }); + const address = getContractAddress({ + from: DEPLOYER_ADDRESS, + salt: paddedSalt, + bytecode: calldata, + opcode: 'CREATE2', + }); + return { + address, + paddedSalt, + calldata, + }; +} + // docs:end:deployL1Contract diff --git a/yarn-project/ethereum/src/l1_tx_utils.ts b/yarn-project/ethereum/src/l1_tx_utils.ts index fb0a96486cb..e53789c17c9 100644 --- a/yarn-project/ethereum/src/l1_tx_utils.ts +++ b/yarn-project/ethereum/src/l1_tx_utils.ts @@ -1,4 +1,5 @@ -import { times } from '@aztec/foundation/collection'; +import { toHex } from '@aztec/foundation/bigint-buffer'; +import { compactArray, times } from '@aztec/foundation/collection'; import { type ConfigMappingsType, bigintConfigHelper, @@ -10,10 +11,13 @@ import { makeBackoff, retry } from '@aztec/foundation/retry'; import { sleep } from '@aztec/foundation/sleep'; import { + type Abi, type Account, type Address, + type BaseError, type BlockOverrides, type Chain, + type ContractFunctionExecutionError, type GetTransactionReturnType, type Hex, type HttpTransport, @@ -24,6 +28,8 @@ import { type TransactionReceipt, type WalletClient, formatGwei, + getContractError, + hexToBytes, } from 'viem'; import { formatViemError } from './utils.js'; @@ -171,6 +177,8 @@ export interface L1TxRequest { value?: bigint; } +export type L1GasConfig = Partial & { gasLimit?: bigint; txTimeoutAt?: Date }; + export interface L1BlobInputs { blobs: Uint8Array[]; kzg: any; @@ -183,12 +191,24 @@ export interface GasPrice { maxFeePerBlobGas?: bigint; } +export type TransactionStats = { + /** Address of the sender. */ + sender: string; + /** Hash of the transaction. */ + transactionHash: string; + /** Size in bytes of the tx calldata */ + calldataSize: number; + /** Gas required to pay for the calldata inclusion (depends on size and number of zeros) */ + calldataGas: number; +}; + export class L1TxUtils { protected readonly config: L1TxUtilsConfig; + private interrupted = false; constructor( - protected readonly publicClient: PublicClient, - protected readonly walletClient: WalletClient, + public publicClient: PublicClient, + public walletClient: WalletClient, protected readonly logger?: Logger, config?: Partial, ) { @@ -198,6 +218,26 @@ export class L1TxUtils { }; } + public interrupt() { + this.interrupted = true; + } + + public restart() { + this.interrupted = false; + } + + public getSenderAddress() { + return this.walletClient.account.address; + } + + public getBlock() { + return this.publicClient.getBlock(); + } + + public getBlockNumber() { + return this.publicClient.getBlockNumber(); + } + /** * Sends a transaction with gas estimation and pricing * @param request - The transaction request (to, data, value) @@ -206,7 +246,7 @@ export class L1TxUtils { */ public async sendTransaction( request: L1TxRequest, - _gasConfig?: Partial & { gasLimit?: bigint; txTimeoutAt?: Date }, + _gasConfig?: L1GasConfig, blobInputs?: L1BlobInputs, ): Promise<{ txHash: Hex; gasLimit: bigint; gasPrice: GasPrice }> { try { @@ -314,6 +354,7 @@ export class L1TxUtils { const isTimedOut = () => (gasConfig.txTimeoutAt && Date.now() > gasConfig.txTimeoutAt.getTime()) || (gasConfig.txTimeoutMs !== undefined && Date.now() - initialTxTime > gasConfig.txTimeoutMs) || + this.interrupted || false; while (!txTimedOut) { @@ -442,7 +483,7 @@ export class L1TxUtils { */ public async sendAndMonitorTransaction( request: L1TxRequest, - gasConfig?: Partial & { gasLimit?: bigint; txTimeoutAt?: Date }, + gasConfig?: L1GasConfig, blobInputs?: L1BlobInputs, ): Promise<{ receipt: TransactionReceipt; gasPrice: GasPrice }> { const { txHash, gasLimit, gasPrice } = await this.sendTransaction(request, gasConfig, blobInputs); @@ -570,7 +611,7 @@ export class L1TxUtils { * Estimates gas and adds buffer */ public async estimateGas( - account: Account, + account: Account | Hex, request: L1TxRequest, _gasConfig?: L1TxUtilsConfig, _blobInputs?: L1BlobInputs, @@ -600,6 +641,89 @@ export class L1TxUtils { return withBuffer; } + async getTransactionStats(txHash: string): Promise { + const tx = await this.publicClient.getTransaction({ hash: txHash as Hex }); + if (!tx) { + return undefined; + } + const calldata = hexToBytes(tx.input); + return { + sender: tx.from.toString(), + transactionHash: tx.hash, + calldataSize: calldata.length, + calldataGas: getCalldataGasUsage(calldata), + }; + } + + public async tryGetErrorFromRevertedTx( + data: Hex, + args: { + args: any[]; + functionName: string; + abi: Abi; + address: Hex; + }, + blobInputs?: L1BlobInputs & { maxFeePerBlobGas: bigint }, + ) { + try { + // NB: If this fn starts unexpectedly giving incorrect blob hash errors, it may be because the checkBlob + // bool is no longer at the slot below. To find the slot, run: forge inspect src/core/Rollup.sol:Rollup storage + const checkBlobSlot = 9n; + await this.publicClient.simulateContract({ + ...args, + account: this.walletClient.account, + stateOverride: [ + { + address: args.address, + stateDiff: [ + { + slot: toHex(checkBlobSlot, true), + value: toHex(0n, true), + }, + ], + }, + ], + }); + // If the above passes, we have a blob error. We cannot simulate blob txs, and failed txs no longer throw errors. + // Strangely, the only way to throw the revert reason as an error and provide blobs is prepareTransactionRequest. + // See: https://github.com/wevm/viem/issues/2075 + // This throws a EstimateGasExecutionError with the custom error information: + const request = blobInputs + ? { + account: this.walletClient.account, + to: args.address, + data, + blobs: blobInputs.blobs, + kzg: blobInputs.kzg, + maxFeePerBlobGas: blobInputs.maxFeePerBlobGas, + } + : { + account: this.walletClient.account, + to: args.address, + data, + }; + await this.walletClient.prepareTransactionRequest(request); + return undefined; + } catch (simulationErr: any) { + // If we don't have a ContractFunctionExecutionError, we have a blob related error => use getContractError to get the error msg. + const contractErr = + simulationErr.name === 'ContractFunctionExecutionError' + ? simulationErr + : getContractError(simulationErr as BaseError, { + args: [], + abi: args.abi, + functionName: args.functionName, + address: args.address, + sender: this.walletClient.account.address, + }); + if (contractErr.name === 'ContractFunctionExecutionError') { + const execErr = contractErr as ContractFunctionExecutionError; + return tryGetCustomErrorNameContractFunction(execErr); + } + this.logger?.error(`Error getting error from simulation`, simulationErr); + } + } + public async simulateGasUsed( request: L1TxRequest & { gas?: bigint }, blockOverrides: BlockOverrides = {}, @@ -632,6 +756,7 @@ export class L1TxUtils { }, ], }); + this.logger?.debug(`L1 gas used in simulation: ${result[0].calls[0].gasUsed}`, { result, }); @@ -715,3 +840,16 @@ export class L1TxUtils { return receipt.transactionHash; } } + +export function tryGetCustomErrorNameContractFunction(err: ContractFunctionExecutionError) { + return compactArray([err.shortMessage, ...(err.metaMessages ?? []).slice(0, 2).map(s => s.trim())]).join(' '); +} + +/* + * Returns cost of calldata usage in Ethereum. + * @param data - Calldata. + * @returns 4 for each zero byte, 16 for each nonzero. + */ +export function getCalldataGasUsage(data: Uint8Array) { + return data.filter(byte => byte === 0).length * 4 + data.filter(byte => byte !== 0).length * 16; +} diff --git a/yarn-project/ethereum/src/test/delayed_tx_utils.ts b/yarn-project/ethereum/src/test/delayed_tx_utils.ts new file mode 100644 index 00000000000..03d6069926d --- /dev/null +++ b/yarn-project/ethereum/src/test/delayed_tx_utils.ts @@ -0,0 +1,24 @@ +import { L1TxUtilsWithBlobs } from '../l1_tx_utils_with_blobs.js'; +import { type Delayer, withDelayer } from './tx_delayer.js'; + +export class DelayedTxUtils extends L1TxUtilsWithBlobs { + public delayer: Delayer | undefined; + + public static fromL1TxUtils(l1TxUtils: L1TxUtilsWithBlobs, ethereumSlotDuration: number) { + const { client, delayer } = withDelayer(l1TxUtils.walletClient, { + ethereumSlotDuration, + }); + const casted = l1TxUtils as unknown as DelayedTxUtils; + casted.delayer = delayer; + casted.walletClient = client; + return casted; + } + + public enableDelayer(ethereumSlotDuration: number) { + const { client, delayer } = withDelayer(this.walletClient, { + ethereumSlotDuration, + }); + this.delayer = delayer; + this.walletClient = client; + } +} diff --git a/yarn-project/ethereum/src/test/index.ts b/yarn-project/ethereum/src/test/index.ts index cdb11e52589..13734d1e6da 100644 --- a/yarn-project/ethereum/src/test/index.ts +++ b/yarn-project/ethereum/src/test/index.ts @@ -1,3 +1,4 @@ +export * from './delayed_tx_utils.js'; +export * from './eth_cheat_codes_with_state.js'; export * from './start_anvil.js'; export * from './tx_delayer.js'; -export * from './eth_cheat_codes_with_state.js'; diff --git a/yarn-project/ethereum/src/utils.ts b/yarn-project/ethereum/src/utils.ts index 36be166ecad..e614e0afbaa 100644 --- a/yarn-project/ethereum/src/utils.ts +++ b/yarn-project/ethereum/src/utils.ts @@ -233,3 +233,18 @@ export function formatViemError(error: any, abi: Abi = ErrorsAbi): FormattedViem return new FormattedViemError(formattedRes.replace(/\\n/g, '\n'), error?.metaMessages); } + +export function tryGetCustomErrorName(err: any) { + try { + // See https://viem.sh/docs/contract/simulateContract#handling-custom-errors + if (err.name === 'ViemError' || err.name === 'ContractFunctionExecutionError') { + const baseError = err as BaseError; + const revertError = baseError.walk(err => (err as Error).name === 'ContractFunctionRevertedError'); + if (revertError) { + return (revertError as ContractFunctionRevertedError).data?.errorName; + } + } + } catch (_e) { + return undefined; + } +} diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index e6f082a9ad0..2abe6d7edbf 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -205,4 +205,5 @@ export type EnvVar = | 'FAUCET_L1_ASSETS' | 'K8S_POD_NAME' | 'K8S_POD_UID' - | 'K8S_NAMESPACE_NAME'; + | 'K8S_NAMESPACE_NAME' + | 'CUSTOM_FORWARDER_CONTRACT_ADDRESS'; diff --git a/yarn-project/l1-artifacts/scripts/generate-artifacts.sh b/yarn-project/l1-artifacts/scripts/generate-artifacts.sh index da0dff7608e..08a0b12cfeb 100755 --- a/yarn-project/l1-artifacts/scripts/generate-artifacts.sh +++ b/yarn-project/l1-artifacts/scripts/generate-artifacts.sh @@ -35,6 +35,7 @@ contracts=( "Slasher" "EmpireBase" "SlashFactory" + "Forwarder" "HonkVerifier" ) diff --git a/yarn-project/prover-node/src/bond/escrow-contract.ts b/yarn-project/prover-node/src/bond/escrow-contract.ts index 362059babd7..3816b41f51e 100644 --- a/yarn-project/prover-node/src/bond/escrow-contract.ts +++ b/yarn-project/prover-node/src/bond/escrow-contract.ts @@ -1,17 +1,13 @@ import { EthAddress } from '@aztec/circuits.js'; +import { type L1Clients } from '@aztec/ethereum'; import { IProofCommitmentEscrowAbi } from '@aztec/l1-artifacts'; import { type Chain, - type Client, type GetContractReturnType, type HttpTransport, type PrivateKeyAccount, - type PublicActions, - type PublicRpcSchema, - type WalletActions, type WalletClient, - type WalletRpcSchema, getContract, } from 'viem'; @@ -21,16 +17,7 @@ export class EscrowContract { WalletClient >; - constructor( - private readonly client: Client< - HttpTransport, - Chain, - PrivateKeyAccount, - [...WalletRpcSchema, ...PublicRpcSchema], - PublicActions & WalletActions - >, - address: EthAddress, - ) { + constructor(private readonly client: L1Clients['walletClient'], address: EthAddress) { this.escrow = getContract({ address: address.toString(), abi: IProofCommitmentEscrowAbi, client }); } diff --git a/yarn-project/prover-node/src/bond/factory.ts b/yarn-project/prover-node/src/bond/factory.ts index 8edd8dce24b..3152d3be2c9 100644 --- a/yarn-project/prover-node/src/bond/factory.ts +++ b/yarn-project/prover-node/src/bond/factory.ts @@ -1,19 +1,6 @@ -import { EthAddress } from '@aztec/circuits.js'; +import { type EthAddress } from '@aztec/circuits.js'; +import { type L1Clients } from '@aztec/ethereum'; import { compact } from '@aztec/foundation/collection'; -import { type RollupAbi } from '@aztec/l1-artifacts'; - -import { - type Chain, - type Client, - type GetContractReturnType, - type HttpTransport, - type PrivateKeyAccount, - type PublicActions, - type PublicRpcSchema, - type WalletActions, - type WalletClient, - type WalletRpcSchema, -} from 'viem'; import { BondManager } from './bond-manager.js'; import { type ProverBondManagerConfig, getProverBondManagerConfigFromEnv } from './config.js'; @@ -21,21 +8,14 @@ import { EscrowContract } from './escrow-contract.js'; import { TokenContract } from './token-contract.js'; export async function createBondManager( - rollupContract: GetContractReturnType>, - client: Client< - HttpTransport, - Chain, - PrivateKeyAccount, - [...WalletRpcSchema, ...PublicRpcSchema], - PublicActions & WalletActions - >, + escrowContractAddress: EthAddress, + client: L1Clients['walletClient'], overrides: Partial = {}, ) { const config = { ...getProverBondManagerConfigFromEnv(), ...compact(overrides) }; const { proverMinimumEscrowAmount: minimumStake, proverTargetEscrowAmount: maybeTargetStake } = config; const targetStake = maybeTargetStake ?? minimumStake * 2n; - const escrowContractAddress = EthAddress.fromString(await rollupContract.read.PROOF_COMMITMENT_ESCROW()); const escrow = new EscrowContract(client, escrowContractAddress); const tokenContractAddress = await escrow.getTokenAddress(); diff --git a/yarn-project/prover-node/src/bond/token-contract.ts b/yarn-project/prover-node/src/bond/token-contract.ts index 55f68f83348..b45727e32bd 100644 --- a/yarn-project/prover-node/src/bond/token-contract.ts +++ b/yarn-project/prover-node/src/bond/token-contract.ts @@ -1,18 +1,14 @@ import { EthAddress } from '@aztec/circuits.js'; +import { type L1Clients } from '@aztec/ethereum'; import { createLogger } from '@aztec/foundation/log'; import { IERC20Abi, TestERC20Abi } from '@aztec/l1-artifacts'; import { type Chain, - type Client, type GetContractReturnType, type HttpTransport, type PrivateKeyAccount, - type PublicActions, - type PublicRpcSchema, - type WalletActions, type WalletClient, - type WalletRpcSchema, getContract, } from 'viem'; @@ -23,16 +19,7 @@ export class TokenContract { private token: GetContractReturnType>; private logger = createLogger('prover-node:token-contract'); - constructor( - private readonly client: Client< - HttpTransport, - Chain, - PrivateKeyAccount, - [...WalletRpcSchema, ...PublicRpcSchema], - PublicActions & WalletActions - >, - address: EthAddress, - ) { + constructor(private readonly client: L1Clients['walletClient'], address: EthAddress) { this.token = getContract({ address: address.toString(), abi: IERC20Abi, client }); } diff --git a/yarn-project/prover-node/src/factory.ts b/yarn-project/prover-node/src/factory.ts index fca8b4027f5..c9d10f80216 100644 --- a/yarn-project/prover-node/src/factory.ts +++ b/yarn-project/prover-node/src/factory.ts @@ -2,14 +2,14 @@ import { type Archiver, createArchiver } from '@aztec/archiver'; import { type BlobSinkClientInterface, createBlobSinkClient } from '@aztec/blob-sink/client'; import { type ProverCoordination, type ProvingJobBroker } from '@aztec/circuit-types'; import { EpochCache } from '@aztec/epoch-cache'; -import { createEthereumChain } from '@aztec/ethereum'; +import { L1TxUtils, RollupContract, createEthereumChain, createL1Clients } from '@aztec/ethereum'; import { Buffer32 } from '@aztec/foundation/buffer'; +import { EthAddress } from '@aztec/foundation/eth-address'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { type DataStoreConfig } from '@aztec/kv-store/config'; import { RollupAbi } from '@aztec/l1-artifacts'; import { createProverClient } from '@aztec/prover-client'; import { createAndStartProvingBroker } from '@aztec/prover-client/broker'; -import { L1Publisher } from '@aztec/sequencer-client'; import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client'; import { createWorldStateSynchronizer } from '@aztec/world-state'; @@ -20,6 +20,7 @@ import { type ProverNodeConfig, type QuoteProviderConfig } from './config.js'; import { ClaimsMonitor } from './monitors/claims-monitor.js'; import { EpochMonitor } from './monitors/epoch-monitor.js'; import { createProverCoordination } from './prover-coordination/factory.js'; +import { ProverNodePublisher } from './prover-node-publisher.js'; import { ProverNode, type ProverNodeOptions } from './prover-node.js'; import { HttpQuoteProvider } from './quote-provider/http.js'; import { SimpleQuoteProvider } from './quote-provider/simple.js'; @@ -33,9 +34,10 @@ export async function createProverNode( log?: Logger; aztecNodeTxProvider?: ProverCoordination; archiver?: Archiver; - publisher?: L1Publisher; + publisher?: ProverNodePublisher; blobSinkClient?: BlobSinkClientInterface; broker?: ProvingJobBroker; + l1TxUtils?: L1TxUtils; } = {}, ) { const telemetry = deps.telemetry ?? getTelemetryClient(); @@ -51,8 +53,14 @@ export async function createProverNode( const broker = deps.broker ?? (await createAndStartProvingBroker(config, telemetry)); const prover = await createProverClient(config, worldStateSynchronizer, broker, telemetry); - // REFACTOR: Move publisher out of sequencer package and into an L1-related package - const publisher = deps.publisher ?? new L1Publisher(config, { telemetry, blobSinkClient }); + const { l1RpcUrl: rpcUrl, l1ChainId: chainId, publisherPrivateKey } = config; + const chain = createEthereumChain(rpcUrl, chainId); + const { publicClient, walletClient } = createL1Clients(rpcUrl, publisherPrivateKey, chain.chainInfo); + + const rollupContract = new RollupContract(publicClient, config.l1Contracts.rollupAddress.toString()); + + const l1TxUtils = deps.l1TxUtils ?? new L1TxUtils(publicClient, walletClient, log, config); + const publisher = deps.publisher ?? new ProverNodePublisher(config, { telemetry, rollupContract, l1TxUtils }); const epochCache = await EpochCache.create(config.l1Contracts.rollupAddress, config); @@ -81,9 +89,8 @@ export async function createProverNode( const claimsMonitor = new ClaimsMonitor(publisher, proverNodeConfig, telemetry); const epochMonitor = new EpochMonitor(archiver, proverNodeConfig, telemetry); - const rollupContract = publisher.getRollupContract(); - const walletClient = publisher.getClient(); - const bondManager = await createBondManager(rollupContract, walletClient, config); + const escrowContractAddress = await rollupContract.getProofCommitmentEscrow(); + const bondManager = await createBondManager(EthAddress.fromString(escrowContractAddress), walletClient, config); return new ProverNode( prover, diff --git a/yarn-project/prover-node/src/index.ts b/yarn-project/prover-node/src/index.ts index 21851f2c615..cc72e651765 100644 --- a/yarn-project/prover-node/src/index.ts +++ b/yarn-project/prover-node/src/index.ts @@ -1,4 +1,5 @@ -export * from './factory.js'; export * from './config.js'; -export * from './prover-node.js'; +export * from './factory.js'; export * from './http.js'; +export * from './prover-node-publisher.js'; +export * from './prover-node.js'; diff --git a/yarn-project/prover-node/src/job/epoch-proving-job.test.ts b/yarn-project/prover-node/src/job/epoch-proving-job.test.ts index d550fce81bb..d7555df10b4 100644 --- a/yarn-project/prover-node/src/job/epoch-proving-job.test.ts +++ b/yarn-project/prover-node/src/job/epoch-proving-job.test.ts @@ -13,19 +13,19 @@ import { RootRollupPublicInputs } from '@aztec/circuits.js/rollup'; import { times, timesParallel } from '@aztec/foundation/collection'; import { toArray } from '@aztec/foundation/iterable'; import { sleep } from '@aztec/foundation/sleep'; -import { type L1Publisher } from '@aztec/sequencer-client'; import { type PublicProcessor, type PublicProcessorFactory } from '@aztec/simulator/server'; import { getTelemetryClient } from '@aztec/telemetry-client'; import { type MockProxy, mock } from 'jest-mock-extended'; import { ProverNodeMetrics } from '../metrics.js'; +import { type ProverNodePublisher } from '../prover-node-publisher.js'; import { EpochProvingJob } from './epoch-proving-job.js'; describe('epoch-proving-job', () => { // Dependencies let prover: MockProxy; - let publisher: MockProxy; + let publisher: MockProxy; let l2BlockSource: MockProxy; let l1ToL2MessageSource: MockProxy; let worldState: MockProxy; @@ -68,7 +68,7 @@ describe('epoch-proving-job', () => { beforeEach(async () => { prover = mock(); - publisher = mock(); + publisher = mock(); l2BlockSource = mock(); l1ToL2MessageSource = mock(); worldState = mock(); diff --git a/yarn-project/prover-node/src/job/epoch-proving-job.ts b/yarn-project/prover-node/src/job/epoch-proving-job.ts index a5d2d5a989b..a7126f1fdad 100644 --- a/yarn-project/prover-node/src/job/epoch-proving-job.ts +++ b/yarn-project/prover-node/src/job/epoch-proving-job.ts @@ -13,13 +13,13 @@ import { asyncPool } from '@aztec/foundation/async-pool'; import { createLogger } from '@aztec/foundation/log'; import { promiseWithResolvers } from '@aztec/foundation/promise'; import { Timer } from '@aztec/foundation/timer'; -import { type L1Publisher } from '@aztec/sequencer-client'; import { type PublicProcessor, type PublicProcessorFactory } from '@aztec/simulator/server'; import { Attributes, type Traceable, type Tracer, trackSpan } from '@aztec/telemetry-client'; import * as crypto from 'node:crypto'; import { type ProverNodeMetrics } from '../metrics.js'; +import { type ProverNodePublisher } from '../prover-node-publisher.js'; /** * Job that grabs a range of blocks from the unfinalised chain from L1, gets their txs given their hashes, @@ -43,7 +43,7 @@ export class EpochProvingJob implements Traceable { private txs: Tx[], private prover: EpochProver, private publicProcessorFactory: PublicProcessorFactory, - private publisher: L1Publisher, + private publisher: ProverNodePublisher, private l2BlockSource: L2BlockSource, private l1ToL2MessageSource: L1ToL2MessageSource, private metrics: ProverNodeMetrics, diff --git a/yarn-project/prover-node/src/metrics.ts b/yarn-project/prover-node/src/metrics.ts index 9a08a65aea8..771d8b02a98 100644 --- a/yarn-project/prover-node/src/metrics.ts +++ b/yarn-project/prover-node/src/metrics.ts @@ -1,4 +1,14 @@ -import { type Histogram, Metrics, type TelemetryClient, ValueType } from '@aztec/telemetry-client'; +import { type L1PublishProofStats, type L1PublishStats } from '@aztec/circuit-types/stats'; +import { + Attributes, + type Histogram, + Metrics, + type TelemetryClient, + type UpDownCounter, + ValueType, +} from '@aztec/telemetry-client'; + +import { formatEther } from 'viem'; export class ProverNodeMetrics { proverEpochExecutionDuration: Histogram; @@ -6,6 +16,15 @@ export class ProverNodeMetrics { provingJobBlocks: Histogram; provingJobTransactions: Histogram; + gasPrice: Histogram; + txCount: UpDownCounter; + txDuration: Histogram; + txGas: Histogram; + txCalldataSize: Histogram; + txCalldataGas: Histogram; + txBlobDataGasUsed: Histogram; + txBlobDataGasCost: Histogram; + constructor(public readonly client: TelemetryClient, name = 'ProverNode') { const meter = client.getMeter(name); this.proverEpochExecutionDuration = meter.createHistogram(Metrics.PROVER_NODE_EXECUTION_DURATION, { @@ -26,6 +45,63 @@ export class ProverNodeMetrics { description: 'Number of transactions in a proven epoch', valueType: ValueType.INT, }); + + this.gasPrice = meter.createHistogram(Metrics.L1_PUBLISHER_GAS_PRICE, { + description: 'The gas price used for transactions', + unit: 'gwei', + valueType: ValueType.DOUBLE, + }); + + this.txCount = meter.createUpDownCounter(Metrics.L1_PUBLISHER_TX_COUNT, { + description: 'The number of transactions processed', + }); + + this.txDuration = meter.createHistogram(Metrics.L1_PUBLISHER_TX_DURATION, { + description: 'The duration of transaction processing', + unit: 'ms', + valueType: ValueType.INT, + }); + + this.txGas = meter.createHistogram(Metrics.L1_PUBLISHER_TX_GAS, { + description: 'The gas consumed by transactions', + unit: 'gas', + valueType: ValueType.INT, + }); + + this.txCalldataSize = meter.createHistogram(Metrics.L1_PUBLISHER_TX_CALLDATA_SIZE, { + description: 'The size of the calldata in transactions', + unit: 'By', + valueType: ValueType.INT, + }); + + this.txCalldataGas = meter.createHistogram(Metrics.L1_PUBLISHER_TX_CALLDATA_GAS, { + description: 'The gas consumed by the calldata in transactions', + unit: 'gas', + valueType: ValueType.INT, + }); + + this.txBlobDataGasUsed = meter.createHistogram(Metrics.L1_PUBLISHER_TX_BLOBDATA_GAS_USED, { + description: 'The amount of blob gas used in transactions', + unit: 'gas', + valueType: ValueType.INT, + }); + + this.txBlobDataGasCost = meter.createHistogram(Metrics.L1_PUBLISHER_TX_BLOBDATA_GAS_COST, { + description: 'The gas cost of blobs in transactions', + unit: 'gwei', + valueType: ValueType.INT, + }); + } + + recordFailedTx() { + this.txCount.add(1, { + [Attributes.L1_TX_TYPE]: 'submitProof', + [Attributes.OK]: false, + }); + } + + recordSubmitProof(durationMs: number, stats: L1PublishProofStats) { + this.recordTx(durationMs, stats); } public recordProvingJob(executionTimeMs: number, totalTimeMs: number, numBlocks: number, numTxs: number) { @@ -34,4 +110,34 @@ export class ProverNodeMetrics { this.provingJobBlocks.record(Math.floor(numBlocks)); this.provingJobTransactions.record(Math.floor(numTxs)); } + + private recordTx(durationMs: number, stats: L1PublishStats) { + const attributes = { + [Attributes.L1_TX_TYPE]: 'submitProof', + [Attributes.L1_SENDER]: stats.sender, + } as const; + + this.txCount.add(1, { + ...attributes, + [Attributes.OK]: true, + }); + + this.txDuration.record(Math.ceil(durationMs), attributes); + this.txGas.record( + // safe to downcast - total block limit is 30M gas which fits in a JS number + Number(stats.gasUsed), + attributes, + ); + this.txCalldataGas.record(stats.calldataGas, attributes); + this.txCalldataSize.record(stats.calldataSize, attributes); + + this.txBlobDataGasCost.record(Number(stats.blobDataGas), attributes); + this.txBlobDataGasUsed.record(Number(stats.blobGasUsed), attributes); + + try { + this.gasPrice.record(parseInt(formatEther(stats.gasPrice, 'gwei'), 10)); + } catch (e) { + // ignore + } + } } diff --git a/yarn-project/prover-node/src/monitors/claims-monitor.test.ts b/yarn-project/prover-node/src/monitors/claims-monitor.test.ts index 610284e3caa..4dcea17acb6 100644 --- a/yarn-project/prover-node/src/monitors/claims-monitor.test.ts +++ b/yarn-project/prover-node/src/monitors/claims-monitor.test.ts @@ -1,21 +1,21 @@ import { type EpochProofClaim } from '@aztec/circuit-types'; import { EthAddress } from '@aztec/circuits.js'; import { sleep } from '@aztec/foundation/sleep'; -import { type L1Publisher } from '@aztec/sequencer-client'; import { type MockProxy, mock } from 'jest-mock-extended'; +import { type ProverNodePublisher } from '../prover-node-publisher.js'; import { ClaimsMonitor, type ClaimsMonitorHandler } from './claims-monitor.js'; describe('ClaimsMonitor', () => { - let l1Publisher: MockProxy; + let l1Publisher: MockProxy; let handler: MockProxy; let claimsMonitor: ClaimsMonitor; let publisherAddress: EthAddress; beforeEach(() => { - l1Publisher = mock(); + l1Publisher = mock(); handler = mock(); publisherAddress = EthAddress.random(); diff --git a/yarn-project/prover-node/src/monitors/claims-monitor.ts b/yarn-project/prover-node/src/monitors/claims-monitor.ts index 56cf77ea7a2..0aeb7c6a58f 100644 --- a/yarn-project/prover-node/src/monitors/claims-monitor.ts +++ b/yarn-project/prover-node/src/monitors/claims-monitor.ts @@ -2,7 +2,6 @@ import { type EpochProofClaim } from '@aztec/circuit-types'; import { type EthAddress } from '@aztec/circuits.js'; import { createLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; -import { type L1Publisher } from '@aztec/sequencer-client'; import { type TelemetryClient, type Traceable, @@ -11,6 +10,8 @@ import { trackSpan, } from '@aztec/telemetry-client'; +import { type ProverNodePublisher } from '../prover-node-publisher.js'; + export interface ClaimsMonitorHandler { handleClaim(proofClaim: EpochProofClaim): Promise; } @@ -25,7 +26,7 @@ export class ClaimsMonitor implements Traceable { public readonly tracer: Tracer; constructor( - private readonly l1Publisher: L1Publisher, + private readonly l1Publisher: ProverNodePublisher, private options: { pollingIntervalMs: number }, telemetry: TelemetryClient = getTelemetryClient(), ) { diff --git a/yarn-project/prover-node/src/prover-node-publisher.ts b/yarn-project/prover-node/src/prover-node-publisher.ts new file mode 100644 index 00000000000..8e96b709ebd --- /dev/null +++ b/yarn-project/prover-node/src/prover-node-publisher.ts @@ -0,0 +1,276 @@ +import { type L1PublishProofStats } from '@aztec/circuit-types/stats'; +import { AGGREGATION_OBJECT_LENGTH, AZTEC_MAX_EPOCH_DURATION, type Proof } from '@aztec/circuits.js'; +import { type FeeRecipient, type RootRollupPublicInputs } from '@aztec/circuits.js/rollup'; +import { type L1TxUtils, type RollupContract } from '@aztec/ethereum'; +import { makeTuple } from '@aztec/foundation/array'; +import { areArraysEqual, times } from '@aztec/foundation/collection'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { Fr } from '@aztec/foundation/fields'; +import { createLogger } from '@aztec/foundation/log'; +import { type Tuple, serializeToBuffer } from '@aztec/foundation/serialize'; +import { InterruptibleSleep } from '@aztec/foundation/sleep'; +import { Timer } from '@aztec/foundation/timer'; +import { RollupAbi } from '@aztec/l1-artifacts'; +import { type PublisherConfig, type TxSenderConfig } from '@aztec/sequencer-client'; +import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client'; + +import { type Hex, type TransactionReceipt, encodeFunctionData } from 'viem'; + +import { ProverNodeMetrics } from './metrics.js'; + +/** + * Stats for a sent transaction. + */ +/** Arguments to the submitEpochProof method of the rollup contract */ +export type L1SubmitEpochProofArgs = { + epochSize: number; + previousArchive: Fr; + endArchive: Fr; + previousBlockHash: Fr; + endBlockHash: Fr; + endTimestamp: Fr; + outHash: Fr; + proverId: Fr; + fees: Tuple; + proof: Proof; +}; + +export class ProverNodePublisher { + private interruptibleSleep = new InterruptibleSleep(); + private sleepTimeMs: number; + private interrupted = false; + private metrics: ProverNodeMetrics; + + protected log = createLogger('prover-node:l1-tx-publisher'); + + protected rollupContract: RollupContract; + + public readonly l1TxUtils: L1TxUtils; + + constructor( + config: TxSenderConfig & PublisherConfig, + deps: { + rollupContract: RollupContract; + l1TxUtils: L1TxUtils; + telemetry?: TelemetryClient; + }, + ) { + this.sleepTimeMs = config?.l1PublishRetryIntervalMS ?? 60_000; + + const telemetry = deps.telemetry ?? getTelemetryClient(); + + this.metrics = new ProverNodeMetrics(telemetry, 'ProverNode'); + + this.rollupContract = deps.rollupContract; + this.l1TxUtils = deps.l1TxUtils; + } + + /** + * Calling `interrupt` will cause any in progress call to `publishRollup` to return `false` asap. + * Be warned, the call may return false even if the tx subsequently gets successfully mined. + * In practice this shouldn't matter, as we'll only ever be calling `interrupt` when we know it's going to fail. + * A call to `restart` is required before you can continue publishing. + */ + public interrupt() { + this.interrupted = true; + this.interruptibleSleep.interrupt(); + } + + /** Restarts the publisher after calling `interrupt`. */ + public restart() { + this.interrupted = false; + } + + public getSenderAddress() { + return EthAddress.fromString(this.l1TxUtils.getSenderAddress()); + } + + public getProofClaim() { + return this.rollupContract.getProofClaim(); + } + + public async submitEpochProof(args: { + epochNumber: number; + fromBlock: number; + toBlock: number; + publicInputs: RootRollupPublicInputs; + proof: Proof; + }): Promise { + const { epochNumber, fromBlock, toBlock } = args; + const ctx = { epochNumber, fromBlock, toBlock }; + if (!this.interrupted) { + const timer = new Timer(); + + // Validate epoch proof range and hashes are correct before submitting + await this.validateEpochProofSubmission(args); + + const txReceipt = await this.sendSubmitEpochProofTx(args); + if (!txReceipt) { + return false; + } + + // Tx was mined successfully + if (txReceipt.status) { + const tx = await this.l1TxUtils.getTransactionStats(txReceipt.transactionHash); + const stats: L1PublishProofStats = { + gasPrice: txReceipt.effectiveGasPrice, + gasUsed: txReceipt.gasUsed, + transactionHash: txReceipt.transactionHash, + calldataGas: tx!.calldataGas, + calldataSize: tx!.calldataSize, + sender: tx!.sender, + blobDataGas: 0n, + blobGasUsed: 0n, + eventName: 'proof-published-to-l1', + }; + this.log.info(`Published epoch proof to L1 rollup contract`, { ...stats, ...ctx }); + this.metrics.recordSubmitProof(timer.ms(), stats); + return true; + } + + this.metrics.recordFailedTx(); + this.log.error(`Rollup.submitEpochProof tx status failed: ${txReceipt.transactionHash}`, ctx); + await this.sleepOrInterrupted(); + } + + this.log.verbose('L2 block data syncing interrupted while processing blocks.', ctx); + return false; + } + + private async validateEpochProofSubmission(args: { + fromBlock: number; + toBlock: number; + publicInputs: RootRollupPublicInputs; + proof: Proof; + }) { + const { fromBlock, toBlock, publicInputs, proof } = args; + + // Check that the block numbers match the expected epoch to be proven + const { pendingBlockNumber: pending, provenBlockNumber: proven } = await this.rollupContract.getTips(); + if (proven !== BigInt(fromBlock) - 1n) { + throw new Error(`Cannot submit epoch proof for ${fromBlock}-${toBlock} as proven block is ${proven}`); + } + if (toBlock > pending) { + throw new Error(`Cannot submit epoch proof for ${fromBlock}-${toBlock} as pending block is ${pending}`); + } + + // Check the block hash and archive for the immediate block before the epoch + const blockLog = await this.rollupContract.getBlock(proven); + if (publicInputs.previousArchive.root.toString() !== blockLog.archive) { + throw new Error( + `Previous archive root mismatch: ${publicInputs.previousArchive.root.toString()} !== ${blockLog.archive}`, + ); + } + // TODO: Remove zero check once we inject the proper zero blockhash + if (blockLog.blockHash !== Fr.ZERO.toString() && publicInputs.previousBlockHash.toString() !== blockLog.blockHash) { + throw new Error( + `Previous block hash mismatch: ${publicInputs.previousBlockHash.toString()} !== ${blockLog.blockHash}`, + ); + } + + // Check the block hash and archive for the last block in the epoch + const endBlockLog = await this.rollupContract.getBlock(BigInt(toBlock)); + if (publicInputs.endArchive.root.toString() !== endBlockLog.archive) { + throw new Error( + `End archive root mismatch: ${publicInputs.endArchive.root.toString()} !== ${endBlockLog.archive}`, + ); + } + if (publicInputs.endBlockHash.toString() !== endBlockLog.blockHash) { + throw new Error(`End block hash mismatch: ${publicInputs.endBlockHash.toString()} !== ${endBlockLog.blockHash}`); + } + + // Compare the public inputs computed by the contract with the ones injected + const rollupPublicInputs = await this.rollupContract.getEpochProofPublicInputs(this.getSubmitEpochProofArgs(args)); + const aggregationObject = proof.isEmpty() + ? times(AGGREGATION_OBJECT_LENGTH, Fr.zero) + : proof.extractAggregationObject(); + const argsPublicInputs = [...publicInputs.toFields(), ...aggregationObject]; + + if (!areArraysEqual(rollupPublicInputs.map(Fr.fromHexString), argsPublicInputs, (a, b) => a.equals(b))) { + const fmt = (inputs: Fr[] | readonly string[]) => inputs.map(x => x.toString()).join(', '); + throw new Error( + `Root rollup public inputs mismatch:\nRollup: ${fmt(rollupPublicInputs)}\nComputed:${fmt(argsPublicInputs)}`, + ); + } + } + + private async sendSubmitEpochProofTx(args: { + fromBlock: number; + toBlock: number; + publicInputs: RootRollupPublicInputs; + proof: Proof; + }): Promise { + const proofHex: Hex = `0x${args.proof.withoutPublicInputs().toString('hex')}`; + const argsArray = this.getSubmitEpochProofArgs(args); + + const txArgs = [ + { + epochSize: argsArray[0], + args: argsArray[1], + fees: argsArray[2], + blobPublicInputs: argsArray[3], + aggregationObject: argsArray[4], + proof: proofHex, + }, + ] as const; + + this.log.info(`SubmitEpochProof proofSize=${args.proof.withoutPublicInputs().length} bytes`); + const data = encodeFunctionData({ + abi: RollupAbi, + functionName: 'submitEpochRootProof', + args: txArgs, + }); + try { + const { receipt } = await this.l1TxUtils.sendAndMonitorTransaction({ + to: this.rollupContract.address, + data, + }); + + return receipt; + } catch (err) { + this.log.error(`Rollup submit epoch proof failed`, err); + const errorMsg = await this.l1TxUtils.tryGetErrorFromRevertedTx(data, { + args: [...txArgs], + functionName: 'submitEpochRootProof', + abi: RollupAbi, + address: this.rollupContract.address, + }); + this.log.error(`Rollup submit epoch proof tx reverted. ${errorMsg}`); + return undefined; + } + } + + private getSubmitEpochProofArgs(args: { + fromBlock: number; + toBlock: number; + publicInputs: RootRollupPublicInputs; + proof: Proof; + }) { + return [ + BigInt(args.toBlock - args.fromBlock + 1), + [ + args.publicInputs.previousArchive.root.toString(), + args.publicInputs.endArchive.root.toString(), + args.publicInputs.previousBlockHash.toString(), + args.publicInputs.endBlockHash.toString(), + args.publicInputs.endTimestamp.toString(), + args.publicInputs.outHash.toString(), + args.publicInputs.proverId.toString(), + ], + makeTuple(AZTEC_MAX_EPOCH_DURATION * 2, i => + i % 2 === 0 + ? args.publicInputs.fees[i / 2].recipient.toField().toString() + : args.publicInputs.fees[(i - 1) / 2].value.toString(), + ), + `0x${args.publicInputs.blobPublicInputs + .filter((_, i) => i < args.toBlock - args.fromBlock + 1) + .map(b => b.toString()) + .join(``)}`, + `0x${serializeToBuffer(args.proof.extractAggregationObject()).toString('hex')}`, + ] as const; + } + + protected async sleepOrInterrupted() { + await this.interruptibleSleep.sleep(this.sleepTimeMs); + } +} diff --git a/yarn-project/prover-node/src/prover-node.test.ts b/yarn-project/prover-node/src/prover-node.test.ts index fd30c9ac9e5..76cb629d053 100644 --- a/yarn-project/prover-node/src/prover-node.test.ts +++ b/yarn-project/prover-node/src/prover-node.test.ts @@ -24,7 +24,6 @@ import { sleep } from '@aztec/foundation/sleep'; import { openTmpStore } from '@aztec/kv-store/lmdb'; import { type BootstrapNode, InMemoryTxPool, MemoryEpochProofQuotePool, P2PClient } from '@aztec/p2p'; import { createBootstrapNode, createTestLibP2PService } from '@aztec/p2p/mocks'; -import { type L1Publisher } from '@aztec/sequencer-client'; import { type PublicProcessorFactory } from '@aztec/simulator/server'; import { getTelemetryClient } from '@aztec/telemetry-client'; @@ -35,6 +34,7 @@ import { type BondManager } from './bond/bond-manager.js'; import { type EpochProvingJob } from './job/epoch-proving-job.js'; import { ClaimsMonitor } from './monitors/claims-monitor.js'; import { EpochMonitor } from './monitors/epoch-monitor.js'; +import { type ProverNodePublisher } from './prover-node-publisher.js'; import { ProverNode, type ProverNodeOptions } from './prover-node.js'; import { type QuoteProvider } from './quote-provider/index.js'; import { type QuoteSigner } from './quote-signer.js'; @@ -42,7 +42,7 @@ import { type QuoteSigner } from './quote-signer.js'; describe('prover-node', () => { // Prover node dependencies let prover: MockProxy; - let publisher: MockProxy; + let publisher: MockProxy; let l2BlockSource: MockProxy; let l1ToL2MessageSource: MockProxy; let contractDataSource: MockProxy; @@ -105,7 +105,7 @@ describe('prover-node', () => { beforeEach(async () => { prover = mock(); - publisher = mock(); + publisher = mock(); l2BlockSource = mock(); l1ToL2MessageSource = mock(); contractDataSource = mock(); diff --git a/yarn-project/prover-node/src/prover-node.ts b/yarn-project/prover-node/src/prover-node.ts index 80b0f5b19d3..05a2f0a16da 100644 --- a/yarn-project/prover-node/src/prover-node.ts +++ b/yarn-project/prover-node/src/prover-node.ts @@ -26,7 +26,6 @@ import { retryUntil } from '@aztec/foundation/retry'; import { DateProvider } from '@aztec/foundation/timer'; import { type Maybe } from '@aztec/foundation/types'; import { type P2P } from '@aztec/p2p'; -import { type L1Publisher } from '@aztec/sequencer-client'; import { PublicProcessorFactory } from '@aztec/simulator/server'; import { Attributes, @@ -42,6 +41,7 @@ import { EpochProvingJob, type EpochProvingJobState } from './job/epoch-proving- import { ProverNodeMetrics } from './metrics.js'; import { type ClaimsMonitor, type ClaimsMonitorHandler } from './monitors/claims-monitor.js'; import { type EpochMonitor, type EpochMonitorHandler } from './monitors/epoch-monitor.js'; +import { type ProverNodePublisher } from './prover-node-publisher.js'; import { type QuoteProvider } from './quote-provider/index.js'; import { type QuoteSigner } from './quote-signer.js'; @@ -74,7 +74,7 @@ export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler, Pr constructor( protected readonly prover: EpochProverManager, - protected readonly publisher: L1Publisher, + protected readonly publisher: ProverNodePublisher, protected readonly l2BlockSource: L2BlockSource & Maybe, protected readonly l1ToL2MessageSource: L1ToL2MessageSource, protected readonly contractDataSource: ContractDataSource, diff --git a/yarn-project/prover-node/src/test/index.ts b/yarn-project/prover-node/src/test/index.ts index 76154d9969a..4b6baca68bd 100644 --- a/yarn-project/prover-node/src/test/index.ts +++ b/yarn-project/prover-node/src/test/index.ts @@ -1,11 +1,11 @@ import { type EpochProverManager } from '@aztec/circuit-types'; -import { type L1Publisher } from '@aztec/sequencer-client'; +import { type ProverNodePublisher } from '../prover-node-publisher.js'; import { ProverNode } from '../prover-node.js'; class TestProverNode_ extends ProverNode { public override prover!: EpochProverManager; - public override publisher!: L1Publisher; + public override publisher!: ProverNodePublisher; } export type TestProverNode = TestProverNode_; diff --git a/yarn-project/sequencer-client/package.json b/yarn-project/sequencer-client/package.json index 2cd48ce2612..7e8ac6ebd59 100644 --- a/yarn-project/sequencer-client/package.json +++ b/yarn-project/sequencer-client/package.json @@ -33,6 +33,7 @@ "@aztec/blob-sink": "workspace:^", "@aztec/circuit-types": "workspace:^", "@aztec/circuits.js": "workspace:^", + "@aztec/epoch-cache": "workspace:^", "@aztec/ethereum": "workspace:^", "@aztec/foundation": "workspace:^", "@aztec/l1-artifacts": "workspace:^", diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index 21d32328cae..6999904110c 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -1,8 +1,17 @@ import { type BlobSinkClientInterface } from '@aztec/blob-sink/client'; import { type L1ToL2MessageSource, type L2BlockSource, type WorldStateSynchronizer } from '@aztec/circuit-types'; -import { type ContractDataSource } from '@aztec/circuits.js'; -import { isAnvilTestChain } from '@aztec/ethereum'; -import { type EthAddress } from '@aztec/foundation/eth-address'; +import { type AztecAddress, type ContractDataSource } from '@aztec/circuits.js'; +import { EpochCache } from '@aztec/epoch-cache'; +import { + ForwarderContract, + L1TxUtilsWithBlobs, + RollupContract, + createEthereumChain, + createL1Clients, + isAnvilTestChain, +} from '@aztec/ethereum'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { createLogger } from '@aztec/foundation/log'; import { type DateProvider } from '@aztec/foundation/timer'; import { type P2P } from '@aztec/p2p'; import { LightweightBlockBuilderFactory } from '@aztec/prover-client/block-builder'; @@ -12,7 +21,7 @@ import { type ValidatorClient } from '@aztec/validator-client'; import { type SequencerClientConfig } from '../config.js'; import { GlobalVariableBuilder } from '../global_variable_builder/index.js'; -import { L1Publisher } from '../publisher/index.js'; +import { SequencerPublisher } from '../publisher/index.js'; import { Sequencer, type SequencerConfig } from '../sequencer/index.js'; import { type SlasherClient } from '../slasher/index.js'; @@ -46,9 +55,11 @@ export class SequencerClient { l2BlockSource: L2BlockSource; l1ToL2MessageSource: L1ToL2MessageSource; telemetry: TelemetryClient; - publisher?: L1Publisher; + publisher?: SequencerPublisher; blobSinkClient?: BlobSinkClientInterface; dateProvider: DateProvider; + epochCache?: EpochCache; + l1TxUtils?: L1TxUtilsWithBlobs; }, ) { const { @@ -61,18 +72,49 @@ export class SequencerClient { l1ToL2MessageSource, telemetry: telemetryClient, } = deps; + const { l1RpcUrl: rpcUrl, l1ChainId: chainId, publisherPrivateKey } = config; + const chain = createEthereumChain(rpcUrl, chainId); + const log = createLogger('sequencer-client'); + const { publicClient, walletClient } = createL1Clients(rpcUrl, publisherPrivateKey, chain.chainInfo); + const l1TxUtils = deps.l1TxUtils ?? new L1TxUtilsWithBlobs(publicClient, walletClient, log, config); + const rollupContract = new RollupContract(publicClient, config.l1Contracts.rollupAddress.toString()); + const [l1GenesisTime, slotDuration] = await Promise.all([ + rollupContract.getL1GenesisTime(), + rollupContract.getSlotDuration(), + ] as const); + const forwarderContract = + config.customForwarderContractAddress && config.customForwarderContractAddress !== EthAddress.ZERO + ? new ForwarderContract(publicClient, config.customForwarderContractAddress.toString()) + : await ForwarderContract.create(walletClient.account.address, walletClient, publicClient, log); + const epochCache = + deps.epochCache ?? + (await EpochCache.create( + config.l1Contracts.rollupAddress, + { + l1RpcUrl: rpcUrl, + l1ChainId: chainId, + viemPollingIntervalMS: config.viemPollingIntervalMS, + aztecSlotDuration: config.aztecSlotDuration, + ethereumSlotDuration: config.ethereumSlotDuration, + aztecEpochDuration: config.aztecEpochDuration, + }, + { dateProvider: deps.dateProvider }, + )); + const publisher = - deps.publisher ?? new L1Publisher(config, { telemetry: telemetryClient, blobSinkClient: deps.blobSinkClient }); + deps.publisher ?? + new SequencerPublisher(config, { + l1TxUtils, + telemetry: telemetryClient, + blobSinkClient: deps.blobSinkClient, + rollupContract, + epochCache, + forwarderContract, + }); const globalsBuilder = new GlobalVariableBuilder(config); const publicProcessorFactory = new PublicProcessorFactory(contractDataSource, deps.dateProvider, telemetryClient); - const rollup = publisher.getRollupContract(); - const [l1GenesisTime, slotDuration] = await Promise.all([ - rollup.read.GENESIS_TIME(), - rollup.read.SLOT_DURATION(), - ] as const); - const ethereumSlotDuration = config.ethereumSlotDuration; // When running in anvil, assume we can post a tx up until the very last second of an L1 slot. @@ -143,7 +185,11 @@ export class SequencerClient { return this.sequencer.coinbase; } - get feeRecipient() { + get feeRecipient(): AztecAddress { return this.sequencer.feeRecipient; } + + get forwarderAddress(): EthAddress { + return this.sequencer.getForwarderAddress(); + } } diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index 9db77ac13b3..51e7c636244 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -11,8 +11,8 @@ import { booleanConfigHelper, getConfigFromMappings, numberConfigHelper, + pickConfigMappings, } from '@aztec/foundation/config'; -import { pickConfigMappings } from '@aztec/foundation/config'; import { EthAddress } from '@aztec/foundation/eth-address'; import { @@ -41,7 +41,7 @@ export type SequencerClientConfig = PublisherConfig & SequencerConfig & L1ReaderConfig & ChainConfig & - Pick; + Pick; export const sequencerConfigMappings: ConfigMappingsType = { transactionPollingIntervalMS: { @@ -138,7 +138,7 @@ export const sequencerClientConfigMappings: ConfigMappingsType EthAddress.fromString(val), + description: 'The address of the custom forwarder contract.', + defaultValue: EthAddress.ZERO, + }, publisherPrivateKey: { env: `${scope}_PUBLISHER_PRIVATE_KEY`, description: 'The private key to be used by the publisher.', diff --git a/yarn-project/sequencer-client/src/publisher/index.ts b/yarn-project/sequencer-client/src/publisher/index.ts index af6d71529a6..4f62dcdd426 100644 --- a/yarn-project/sequencer-client/src/publisher/index.ts +++ b/yarn-project/sequencer-client/src/publisher/index.ts @@ -1 +1 @@ -export { L1Publisher, L1SubmitEpochProofArgs } from './l1-publisher.js'; +export { SequencerPublisher } from './sequencer-publisher.js'; diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts deleted file mode 100644 index eab6b801864..00000000000 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ /dev/null @@ -1,345 +0,0 @@ -import { HttpBlobSinkClient } from '@aztec/blob-sink/client'; -import { inboundTransform } from '@aztec/blob-sink/encoding'; -import { L2Block } from '@aztec/circuit-types'; -import { EthAddress } from '@aztec/circuits.js'; -import { - type GasPrice, - type L1ContractsConfig, - type L1TxRequest, - type L1TxUtilsConfig, - defaultL1TxUtilsConfig, - getL1ContractsConfigEnvVars, -} from '@aztec/ethereum'; -import { Blob } from '@aztec/foundation/blob'; -import { type ViemSignature } from '@aztec/foundation/eth-signature'; -import { sleep } from '@aztec/foundation/sleep'; -import { RollupAbi } from '@aztec/l1-artifacts'; - -import { jest } from '@jest/globals'; -import express, { json } from 'express'; -import { type Server } from 'http'; -import { type MockProxy, mock } from 'jest-mock-extended'; -import { - type GetTransactionReceiptReturnType, - type PrivateKeyAccount, - type TransactionReceipt, - encodeFunctionData, -} from 'viem'; - -import { type PublisherConfig, type TxSenderConfig } from './config.js'; -import { L1Publisher } from './l1-publisher.js'; - -const mockRollupAddress = '0xcafe'; - -interface MockPublicClient { - getTransactionReceipt: ({ hash }: { hash: '0x${string}' }) => Promise; - getBlock(): Promise<{ timestamp: bigint }>; - getTransaction: ({ hash }: { hash: '0x${string}' }) => Promise<{ input: `0x${string}`; hash: `0x${string}` }>; - estimateGas: ({ to, data }: { to: '0x${string}'; data: '0x${string}' }) => Promise; -} - -interface MockL1TxUtils { - sendAndMonitorTransaction: ( - request: L1TxRequest, - _gasConfig?: Partial, - ) => Promise<{ receipt: TransactionReceipt; gasPrice: GasPrice }>; -} - -interface MockRollupContractWrite { - propose: ( - args: readonly [`0x${string}`, `0x${string}`] | readonly [`0x${string}`, `0x${string}`, `0x${string}`], - options: { account: PrivateKeyAccount }, - ) => Promise<`0x${string}`>; -} - -interface MockRollupContractRead { - archive: () => Promise<`0x${string}`>; - getCurrentSlot(): Promise; - validateHeader: ( - args: readonly [ - `0x${string}`, - ViemSignature[], - `0x${string}`, - bigint, - { ignoreDA: boolean; ignoreSignatures: boolean }, - ], - ) => Promise; -} - -class MockRollupContract { - constructor(public write: MockRollupContractWrite, public read: MockRollupContractRead, public abi = RollupAbi) {} - get address() { - return mockRollupAddress; - } -} - -const BLOB_SINK_PORT = 50525; -const BLOB_SINK_URL = `http://localhost:${BLOB_SINK_PORT}`; - -describe('L1Publisher', () => { - let rollupContractRead: MockProxy; - let rollupContractWrite: MockProxy; - let rollupContract: MockRollupContract; - - let publicClient: MockProxy; - let l1TxUtils: MockProxy; - - let proposeTxHash: `0x${string}`; - let proposeTxReceipt: GetTransactionReceiptReturnType; - let l2Block: L2Block; - - let header: Buffer; - let archive: Buffer; - let blockHash: Buffer; - let body: Buffer; - - let blobSinkClient: HttpBlobSinkClient; - let mockBlobSinkServer: Server | undefined = undefined; - - // An l1 publisher with some private methods exposed - let publisher: L1Publisher; - - const GAS_GUESS = 300_000n; - - beforeEach(async () => { - mockBlobSinkServer = undefined; - blobSinkClient = new HttpBlobSinkClient({ blobSinkUrl: BLOB_SINK_URL }); - - l2Block = await L2Block.random(42); - - header = l2Block.header.toBuffer(); - archive = l2Block.archive.root.toBuffer(); - blockHash = (await l2Block.header.hash()).toBuffer(); - body = l2Block.body.toBuffer(); - - proposeTxHash = `0x${Buffer.from('txHashPropose').toString('hex')}`; // random tx hash - - proposeTxReceipt = { - transactionHash: proposeTxHash, - status: 'success', - logs: [], - } as unknown as GetTransactionReceiptReturnType; - - rollupContractWrite = mock(); - rollupContractRead = mock(); - rollupContract = new MockRollupContract(rollupContractWrite, rollupContractRead); - - publicClient = mock(); - l1TxUtils = mock(); - const config = { - blobSinkUrl: BLOB_SINK_URL, - l1RpcUrl: `http://127.0.0.1:8545`, - l1ChainId: 1, - publisherPrivateKey: `0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80`, - l1Contracts: { rollupAddress: EthAddress.ZERO.toString() }, - l1PublishRetryIntervalMS: 1, - ethereumSlotDuration: getL1ContractsConfigEnvVars().ethereumSlotDuration, - ...defaultL1TxUtilsConfig, - } as unknown as TxSenderConfig & - PublisherConfig & - Pick & - L1TxUtilsConfig; - - publisher = new L1Publisher(config, { blobSinkClient }); - - (publisher as any)['rollupContract'] = rollupContract; - (publisher as any)['publicClient'] = publicClient; - (publisher as any)['l1TxUtils'] = l1TxUtils; - publisher as any; - - rollupContractRead.getCurrentSlot.mockResolvedValue(l2Block.header.globalVariables.slotNumber.toBigInt()); - publicClient.getBlock.mockResolvedValue({ timestamp: 12n }); - publicClient.estimateGas.mockResolvedValue(GAS_GUESS); - l1TxUtils.sendAndMonitorTransaction.mockResolvedValue({ - receipt: proposeTxReceipt, - gasPrice: { maxFeePerGas: 1n, maxPriorityFeePerGas: 1n }, - }); - (l1TxUtils as any).estimateGas.mockResolvedValue(GAS_GUESS); - (l1TxUtils as any).simulateGasUsed.mockResolvedValue(1_000_000n); - (l1TxUtils as any).bumpGasLimit.mockImplementation((val: bigint) => val + (val * 20n) / 100n); - }); - - const closeServer = (server: Server): Promise => { - return new Promise((resolve, reject) => { - server.close(err => { - if (err) { - reject(err); - return; - } - resolve(); - }); - }); - }; - - afterEach(async () => { - if (mockBlobSinkServer) { - await closeServer(mockBlobSinkServer); - mockBlobSinkServer = undefined; - } - }); - - // Run a mock blob sink in the background, and test that the correct data is sent to it - const runBlobSinkServer = (blobs: Blob[]) => { - const app = express(); - app.use(json({ limit: '10mb' })); - - app.post('/blob_sidecar', (req, res) => { - const blobsBuffers = req.body.blobs.map((b: { index: number; blob: { type: string; data: string } }) => - Blob.fromBuffer(inboundTransform(Buffer.from(b.blob.data))), - ); - - expect(blobsBuffers).toEqual(blobs); - res.status(200).send(); - }); - - return new Promise(resolve => { - mockBlobSinkServer = app.listen(BLOB_SINK_PORT, () => { - // Resolve when the server is listening - resolve(); - }); - }); - }; - - it('publishes and propose l2 block to l1', async () => { - rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - rollupContractWrite.propose.mockResolvedValueOnce(proposeTxHash); - - const kzg = Blob.getViemKzgInstance(); - - const expectedBlobs = await Blob.getBlobs(l2Block.body.toBlobFields()); - - // Check the blobs were forwarded to the blob sink service - const sendToBlobSinkSpy = jest.spyOn(publisher as any, 'sendBlobsToBlobSink'); - - // Expect the blob sink server to receive the blobs - await runBlobSinkServer(expectedBlobs); - - const result = await publisher.proposeL2Block(l2Block); - - expect(result).toEqual(true); - - const blobInput = Blob.getEthBlobEvaluationInputs(expectedBlobs); - - const args = [ - { - header: `0x${header.toString('hex')}`, - archive: `0x${archive.toString('hex')}`, - blockHash: `0x${blockHash.toString('hex')}`, - oracleInput: { - feeAssetPriceModifier: 0n, - provingCostModifier: 0n, - }, - txHashes: [], - }, - [], - `0x${body.toString('hex')}`, - blobInput, - ] as const; - expect(l1TxUtils.sendAndMonitorTransaction).toHaveBeenCalledWith( - { - to: mockRollupAddress, - data: encodeFunctionData({ abi: rollupContract.abi, functionName: 'propose', args }), - }, - // val + (val * 20n) / 100n - { gasLimit: 1_000_000n + GAS_GUESS + ((1_000_000n + GAS_GUESS) * 20n) / 100n }, - { blobs: expectedBlobs.map(b => b.data), kzg }, - ); - - expect(sendToBlobSinkSpy).toHaveBeenCalledTimes(1); - // If this does not return true, then the mocked server will have errored, and - // the expects that run there will have failed - const returnValuePromise = sendToBlobSinkSpy.mock.results[0].value; - expect(await returnValuePromise).toBe(true); - }); - - it('does not retry if sending a propose tx fails', async () => { - rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - l1TxUtils.sendAndMonitorTransaction - .mockRejectedValueOnce(new Error()) - .mockResolvedValueOnce({ receipt: proposeTxReceipt, gasPrice: { maxFeePerGas: 1n, maxPriorityFeePerGas: 1n } }); - - const result = await publisher.proposeL2Block(l2Block); - - expect(result).toEqual(false); - }); - - it('does not retry if simulating a publish and propose tx fails', async () => { - rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - rollupContractRead.validateHeader.mockRejectedValueOnce(new Error('Test error')); - - await expect(publisher.proposeL2Block(l2Block)).rejects.toThrow(); - - expect(rollupContractRead.validateHeader).toHaveBeenCalledTimes(1); - }); - - it('does not retry if sending a publish and propose tx fails', async () => { - rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - l1TxUtils.sendAndMonitorTransaction - .mockRejectedValueOnce(new Error()) - .mockResolvedValueOnce({ receipt: proposeTxReceipt, gasPrice: { maxFeePerGas: 1n, maxPriorityFeePerGas: 1n } }); - - const result = await publisher.proposeL2Block(l2Block); - - expect(result).toEqual(false); - }); - - it('returns false if publish and propose tx reverts', async () => { - rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - l1TxUtils.sendAndMonitorTransaction.mockResolvedValueOnce({ - receipt: { ...proposeTxReceipt, status: 'reverted' }, - gasPrice: { maxFeePerGas: 1n, maxPriorityFeePerGas: 1n }, - }); - - const result = await publisher.proposeL2Block(l2Block); - - expect(result).toEqual(false); - }); - - it('returns false if propose tx reverts', async () => { - rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - - l1TxUtils.sendAndMonitorTransaction.mockResolvedValueOnce({ - receipt: { ...proposeTxReceipt, status: 'reverted' }, - gasPrice: { maxFeePerGas: 1n, maxPriorityFeePerGas: 1n }, - }); - - const result = await publisher.proposeL2Block(l2Block); - - expect(result).toEqual(false); - }); - - it('returns false if sending publish and progress tx is interrupted', async () => { - rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - l1TxUtils.sendAndMonitorTransaction.mockImplementationOnce( - () => - sleep(10, { receipt: proposeTxReceipt, gasPrice: { maxFeePerGas: 1n, maxPriorityFeePerGas: 1n } }) as Promise<{ - receipt: TransactionReceipt; - gasPrice: GasPrice; - }>, - ); - const resultPromise = publisher.proposeL2Block(l2Block); - publisher.interrupt(); - const result = await resultPromise; - - expect(result).toEqual(false); - expect(publicClient.getTransactionReceipt).not.toHaveBeenCalled(); - }); - - it('returns false if sending propose tx is interrupted', async () => { - rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - l1TxUtils.sendAndMonitorTransaction.mockImplementationOnce( - () => - sleep(10, { receipt: proposeTxReceipt, gasPrice: { maxFeePerGas: 1n, maxPriorityFeePerGas: 1n } }) as Promise<{ - receipt: TransactionReceipt; - gasPrice: GasPrice; - }>, - ); - - const resultPromise = publisher.proposeL2Block(l2Block); - publisher.interrupt(); - const result = await resultPromise; - - expect(result).toEqual(false); - expect(publicClient.getTransactionReceipt).not.toHaveBeenCalled(); - }); -}); diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts deleted file mode 100644 index e7db6581f20..00000000000 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ /dev/null @@ -1,1299 +0,0 @@ -import { type BlobSinkClientInterface, createBlobSinkClient } from '@aztec/blob-sink/client'; -import { - ConsensusPayload, - type EpochProofClaim, - type EpochProofQuote, - type L2Block, - SignatureDomainSeparator, - type TxHash, - getHashedSignaturePayload, -} from '@aztec/circuit-types'; -import { type L1PublishBlockStats, type L1PublishProofStats, type L1PublishStats } from '@aztec/circuit-types/stats'; -import { - AGGREGATION_OBJECT_LENGTH, - AZTEC_MAX_EPOCH_DURATION, - type BlockHeader, - EthAddress, - type Proof, -} from '@aztec/circuits.js'; -import { type FeeRecipient, type RootRollupPublicInputs } from '@aztec/circuits.js/rollup'; -import { - type EthereumChain, - FormattedViemError, - type GasPrice, - type L1ContractsConfig, - type L1TxUtils, - L1TxUtilsWithBlobs, - createEthereumChain, - formatViemError, -} from '@aztec/ethereum'; -import { makeTuple } from '@aztec/foundation/array'; -import { toHex } from '@aztec/foundation/bigint-buffer'; -import { Blob } from '@aztec/foundation/blob'; -import { areArraysEqual, compactArray, times } from '@aztec/foundation/collection'; -import { type Signature } from '@aztec/foundation/eth-signature'; -import { Fr } from '@aztec/foundation/fields'; -import { type Logger, createLogger } from '@aztec/foundation/log'; -import { type Tuple, serializeToBuffer } from '@aztec/foundation/serialize'; -import { InterruptibleSleep } from '@aztec/foundation/sleep'; -import { Timer } from '@aztec/foundation/timer'; -import { EmpireBaseAbi, RollupAbi, SlasherAbi } from '@aztec/l1-artifacts'; -import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client'; - -import pick from 'lodash.pick'; -import { - type BaseError, - type Chain, - type Client, - type ContractFunctionExecutionError, - ContractFunctionRevertedError, - type GetContractReturnType, - type Hex, - type HttpTransport, - type PrivateKeyAccount, - type PublicActions, - type PublicClient, - type PublicRpcSchema, - type TransactionReceipt, - type WalletActions, - type WalletClient, - type WalletRpcSchema, - createPublicClient, - createWalletClient, - encodeFunctionData, - getAbiItem, - getAddress, - getContract, - getContractError, - hexToBytes, - http, - publicActions, -} from 'viem'; -import { privateKeyToAccount } from 'viem/accounts'; - -import { type PublisherConfig, type TxSenderConfig } from './config.js'; -import { L1PublisherMetrics } from './l1-publisher-metrics.js'; - -/** - * Stats for a sent transaction. - */ -export type TransactionStats = { - /** Address of the sender. */ - sender: string; - /** Hash of the transaction. */ - transactionHash: string; - /** Size in bytes of the tx calldata */ - calldataSize: number; - /** Gas required to pay for the calldata inclusion (depends on size and number of zeros) */ - calldataGas: number; -}; - -/** - * Minimal information from a tx receipt. - */ -export type MinimalTransactionReceipt = { - /** True if the tx was successful, false if reverted. */ - status: boolean; - /** Hash of the transaction. */ - transactionHash: `0x${string}`; - /** Effective gas used by the tx. */ - gasUsed: bigint; - /** Effective gas price paid by the tx. */ - gasPrice: bigint; - /** Logs emitted in this tx. */ - logs: any[]; - /** Block number in which this tx was mined. */ - blockNumber: bigint; - /** The block hash in which this tx was mined */ - blockHash: `0x${string}`; -}; - -/** Arguments to the process method of the rollup contract */ -type L1ProcessArgs = { - /** The L2 block header. */ - header: Buffer; - /** A root of the archive tree after the L2 block is applied. */ - archive: Buffer; - /** The L2 block's leaf in the archive tree. */ - blockHash: Buffer; - /** L2 block body. TODO(#9101): Remove block body once we can extract blobs. */ - body: Buffer; - /** L2 block blobs containing all tx effects. */ - blobs: Blob[]; - /** L2 block tx hashes */ - txHashes: TxHash[]; - /** Attestations */ - attestations?: Signature[]; -}; - -type L1ProcessReturnType = { - receipt: TransactionReceipt | undefined; - args: any; - functionName: string; - data: Hex; - gasPrice: GasPrice; -}; - -/** Arguments to the submitEpochProof method of the rollup contract */ -export type L1SubmitEpochProofArgs = { - epochSize: number; - previousArchive: Fr; - endArchive: Fr; - previousBlockHash: Fr; - endBlockHash: Fr; - endTimestamp: Fr; - outHash: Fr; - proverId: Fr; - fees: Tuple; - proof: Proof; -}; - -export enum VoteType { - GOVERNANCE, - SLASHING, -} - -type GetSlashPayloadCallBack = (slotNumber: bigint) => Promise; - -/** - * Publishes L2 blocks to L1. This implementation does *not* retry a transaction in - * the event of network congestion, but should work for local development. - * - If sending (not mining) a tx fails, it retries indefinitely at 1-minute intervals. - * - If the tx is not mined, keeps polling indefinitely at 1-second intervals. - * - * Adapted from https://github.com/AztecProtocol/aztec2-internal/blob/master/falafel/src/rollup_publisher.ts. - */ -export class L1Publisher { - private interruptibleSleep = new InterruptibleSleep(); - private sleepTimeMs: number; - private interrupted = false; - private metrics: L1PublisherMetrics; - - protected governanceLog = createLogger('sequencer:publisher:governance'); - protected governanceProposerAddress?: EthAddress; - private governancePayload: EthAddress = EthAddress.ZERO; - - protected slashingLog = createLogger('sequencer:publisher:slashing'); - protected slashingProposerAddress?: EthAddress; - private getSlashPayload?: GetSlashPayloadCallBack = undefined; - - private myLastVotes: Record = { - [VoteType.GOVERNANCE]: 0n, - [VoteType.SLASHING]: 0n, - }; - - protected log = createLogger('sequencer:publisher'); - - protected rollupContract: GetContractReturnType< - typeof RollupAbi, - WalletClient - >; - - protected publicClient: PublicClient; - protected walletClient: WalletClient; - protected account: PrivateKeyAccount; - protected ethereumSlotDuration: bigint; - - private blobSinkClient: BlobSinkClientInterface; - // @note - with blobs, the below estimate seems too large. - // Total used for full block from int_l1_pub e2e test: 1m (of which 86k is 1x blob) - // Total used for emptier block from above test: 429k (of which 84k is 1x blob) - public static PROPOSE_GAS_GUESS: bigint = 12_000_000n; - public static PROPOSE_AND_CLAIM_GAS_GUESS: bigint = this.PROPOSE_GAS_GUESS + 100_000n; - - private readonly l1TxUtils: L1TxUtils; - - constructor( - config: TxSenderConfig & PublisherConfig & Pick, - deps: { telemetry?: TelemetryClient; blobSinkClient?: BlobSinkClientInterface } = {}, - ) { - this.sleepTimeMs = config?.l1PublishRetryIntervalMS ?? 60_000; - this.ethereumSlotDuration = BigInt(config.ethereumSlotDuration); - - const telemetry = deps.telemetry ?? getTelemetryClient(); - this.blobSinkClient = deps.blobSinkClient ?? createBlobSinkClient(config); - - this.metrics = new L1PublisherMetrics(telemetry, 'L1Publisher'); - - const { l1RpcUrl: rpcUrl, l1ChainId: chainId, publisherPrivateKey, l1Contracts } = config; - const chain = createEthereumChain(rpcUrl, chainId); - this.account = privateKeyToAccount(publisherPrivateKey); - this.log.debug(`Publishing from address ${this.account.address}`); - - this.walletClient = this.createWalletClient(this.account, chain); - - this.publicClient = createPublicClient({ - chain: chain.chainInfo, - transport: http(chain.rpcUrl), - pollingInterval: config.viemPollingIntervalMS, - }); - - this.rollupContract = getContract({ - address: getAddress(l1Contracts.rollupAddress.toString()), - abi: RollupAbi, - client: this.walletClient, - }); - - if (l1Contracts.governanceProposerAddress) { - this.governanceProposerAddress = EthAddress.fromString(l1Contracts.governanceProposerAddress.toString()); - } - - this.l1TxUtils = new L1TxUtilsWithBlobs(this.publicClient, this.walletClient, this.log, config); - } - - public registerSlashPayloadGetter(callback: GetSlashPayloadCallBack) { - this.getSlashPayload = callback; - } - - private async getSlashingProposerAddress() { - if (this.slashingProposerAddress) { - return this.slashingProposerAddress; - } - - const slasherAddress = await this.rollupContract.read.SLASHER(); - const slasher = getContract({ - address: getAddress(slasherAddress.toString()), - abi: SlasherAbi, - client: this.walletClient, - }); - this.slashingProposerAddress = EthAddress.fromString(await slasher.read.PROPOSER()); - return this.slashingProposerAddress; - } - - get publisherAddress() { - return this.account.address; - } - - protected createWalletClient( - account: PrivateKeyAccount, - chain: EthereumChain, - ): WalletClient { - return createWalletClient({ - account, - chain: chain.chainInfo, - transport: http(chain.rpcUrl), - }); - } - - public getGovernancePayload() { - return this.governancePayload; - } - - public setGovernancePayload(payload: EthAddress) { - this.governancePayload = payload; - } - - public getSenderAddress(): EthAddress { - return EthAddress.fromString(this.account.address); - } - - public getClient(): Client< - HttpTransport, - Chain, - PrivateKeyAccount, - [...WalletRpcSchema, ...PublicRpcSchema], - PublicActions & WalletActions - > { - return this.walletClient.extend(publicActions); - } - - public getRollupContract(): GetContractReturnType< - typeof RollupAbi, - WalletClient - > { - return this.rollupContract; - } - - /** - * @notice Calls `canProposeAtTime` with the time of the next Ethereum block and the sender address - * - * @dev Throws if unable to propose - * - * @param archive - The archive that we expect to be current state - * @return slot - The L2 slot number of the next Ethereum block, - * @return blockNumber - The L2 block number of the next L2 block - */ - public async canProposeAtNextEthBlock(archive: Buffer): Promise<[bigint, bigint]> { - // FIXME: This should not throw if unable to propose but return a falsey value, so - // we can differentiate between errors when hitting the L1 rollup contract (eg RPC error) - // which may require a retry, vs actually not being the turn for proposing. - const timeOfNextL1Slot = BigInt((await this.publicClient.getBlock()).timestamp + this.ethereumSlotDuration); - const [slot, blockNumber] = await this.rollupContract.read.canProposeAtTime([ - timeOfNextL1Slot, - `0x${archive.toString('hex')}`, - ]); - return [slot, blockNumber]; - } - - public async getClaimableEpoch(): Promise { - try { - return await this.rollupContract.read.getClaimableEpoch(); - } catch (err: any) { - const errorName = tryGetCustomErrorName(err); - // getting the error name from the abi is redundant, - // but it enforces that the error name is correct. - // That is, if the error name is not found, this will not compile. - const acceptedErrors = (['Rollup__NoEpochToProve', 'Rollup__ProofRightAlreadyClaimed'] as const).map( - name => getAbiItem({ abi: RollupAbi, name }).name, - ); - - if (errorName && acceptedErrors.includes(errorName as any)) { - return undefined; - } - throw err; - } - } - - public async getEpochForSlotNumber(slotNumber: bigint): Promise { - return await this.rollupContract.read.getEpochAtSlot([slotNumber]); - } - - public async getEpochToProve(): Promise { - try { - return await this.rollupContract.read.getEpochToProve(); - } catch (err: any) { - // If this is a revert with Rollup__NoEpochToProve, it means there is no epoch to prove, so we return undefined - // See https://viem.sh/docs/contract/simulateContract#handling-custom-errors - const errorName = tryGetCustomErrorName(err); - if (errorName === getAbiItem({ abi: RollupAbi, name: 'Rollup__NoEpochToProve' }).name) { - return undefined; - } - throw err; - } - } - - public async getProofClaim(): Promise { - const { - epochToProve, - basisPointFee, - bondAmount, - bondProvider: bondProviderHex, - proposerClaimant: proposerClaimantHex, - } = await this.rollupContract.read.getProofClaim(); - - const bondProvider = EthAddress.fromString(bondProviderHex); - const proposerClaimant = EthAddress.fromString(proposerClaimantHex); - - if (bondProvider.isZero() && proposerClaimant.isZero() && epochToProve === 0n) { - return undefined; - } - - return { - epochToProve, - basisPointFee, - bondAmount, - bondProvider, - proposerClaimant, - }; - } - - public async validateProofQuote(quote: EpochProofQuote): Promise { - const timeOfNextL1Slot = BigInt((await this.publicClient.getBlock()).timestamp + this.ethereumSlotDuration); - const args = [timeOfNextL1Slot, quote.toViemArgs()] as const; - try { - await this.rollupContract.read.validateEpochProofRightClaimAtTime(args, { account: this.account }); - } catch (err) { - let errorName = tryGetCustomErrorName(err); - if (!errorName) { - errorName = tryGetCustomErrorNameContractFunction(err as ContractFunctionExecutionError); - } - this.log.warn(`Proof quote validation failed: ${errorName}`, quote); - return undefined; - } - return quote; - } - - /** - * @notice Will call `validateHeader` to make sure that it is possible to propose - * - * @dev Throws if unable to propose - * - * @param header - The header to propose - * @param digest - The digest that attestations are signing over - * - */ - public async validateBlockForSubmission( - header: BlockHeader, - attestationData: { digest: Buffer; signatures: Signature[] } = { - digest: Buffer.alloc(32), - signatures: [], - }, - ): Promise { - const ts = BigInt((await this.publicClient.getBlock()).timestamp + this.ethereumSlotDuration); - - const formattedSignatures = attestationData.signatures.map(attest => attest.toViemSignature()); - const flags = { ignoreDA: true, ignoreSignatures: formattedSignatures.length == 0 }; - - const args = [ - `0x${header.toBuffer().toString('hex')}`, - formattedSignatures, - `0x${attestationData.digest.toString('hex')}`, - ts, - `0x${header.contentCommitment.blobsHash.toString('hex')}`, - flags, - ] as const; - - try { - await this.rollupContract.read.validateHeader(args, { account: this.account }); - } catch (error: unknown) { - // Specify the type of error - if (error instanceof ContractFunctionRevertedError) { - const err = error as ContractFunctionRevertedError; - this.log.debug(`Validation failed: ${err.message}`, err.data); - } - throw error; - } - return ts; - } - - public async getCurrentEpochCommittee(): Promise { - const committee = await this.rollupContract.read.getCurrentEpochCommittee(); - return committee.map(EthAddress.fromString); - } - - async getTransactionStats(txHash: string): Promise { - const tx = await this.publicClient.getTransaction({ hash: txHash as Hex }); - if (!tx) { - return undefined; - } - const calldata = hexToBytes(tx.input); - return { - sender: tx.from.toString(), - transactionHash: tx.hash, - calldataSize: calldata.length, - calldataGas: getCalldataGasUsage(calldata), - }; - } - public async castVote(slotNumber: bigint, timestamp: bigint, voteType: VoteType) { - // @todo This function can be optimized by doing some of the computations locally instead of calling the L1 contracts - if (this.myLastVotes[voteType] >= slotNumber) { - return false; - } - - const voteConfig = async (): Promise< - { payload: EthAddress; voteContractAddress: EthAddress; logger: Logger } | undefined - > => { - if (voteType === VoteType.GOVERNANCE) { - if (this.governancePayload.equals(EthAddress.ZERO)) { - return undefined; - } - if (!this.governanceProposerAddress) { - return undefined; - } - return { - payload: this.governancePayload, - voteContractAddress: this.governanceProposerAddress, - logger: this.governanceLog, - }; - } else if (voteType === VoteType.SLASHING) { - if (!this.getSlashPayload) { - return undefined; - } - const slashingProposerAddress = await this.getSlashingProposerAddress(); - if (!slashingProposerAddress) { - return undefined; - } - - const slashPayload = await this.getSlashPayload(slotNumber); - - if (!slashPayload) { - return undefined; - } - - return { - payload: slashPayload, - voteContractAddress: slashingProposerAddress, - logger: this.slashingLog, - }; - } else { - throw new Error('Invalid vote type'); - } - }; - - const vConfig = await voteConfig(); - - if (!vConfig) { - return false; - } - - const { payload, voteContractAddress, logger } = vConfig; - - const voteContract = getContract({ - address: getAddress(voteContractAddress.toString()), - abi: EmpireBaseAbi, - client: this.walletClient, - }); - - const [proposer, roundNumber] = await Promise.all([ - this.rollupContract.read.getProposerAt([timestamp]), - voteContract.read.computeRound([slotNumber]), - ]); - - if (proposer.toLowerCase() !== this.account.address.toLowerCase()) { - return false; - } - - const [slotForLastVote] = await voteContract.read.rounds([this.rollupContract.address, roundNumber]); - - if (slotForLastVote >= slotNumber) { - return false; - } - - const cachedMyLastVote = this.myLastVotes[voteType]; - this.myLastVotes[voteType] = slotNumber; - - let txHash; - try { - txHash = await voteContract.write.vote([payload.toString()], { - account: this.account, - }); - } catch (err) { - const { message, metaMessages } = formatViemError(err); - logger.error(`Failed to vote`, message, { metaMessages }); - this.myLastVotes[voteType] = cachedMyLastVote; - return false; - } - - if (txHash) { - const receipt = await this.getTransactionReceipt(txHash); - if (!receipt) { - logger.warn(`Failed to get receipt for tx ${txHash}`); - this.myLastVotes[voteType] = cachedMyLastVote; - return false; - } - } - - logger.info(`Cast vote for ${payload}`); - return true; - } - - /** - * Proposes a L2 block on L1. - * @param block - L2 block to propose. - * @returns True once the tx has been confirmed and is successful, false on revert or interrupt, blocks otherwise. - */ - public async proposeL2Block( - block: L2Block, - attestations?: Signature[], - txHashes?: TxHash[], - proofQuote?: EpochProofQuote, - opts: { txTimeoutAt?: Date } = {}, - ): Promise { - const ctx = { - blockNumber: block.number, - slotNumber: block.header.globalVariables.slotNumber.toBigInt(), - blockHash: (await block.hash()).toString(), - }; - - const consensusPayload = new ConsensusPayload(block.header, block.archive.root, txHashes ?? []); - - const digest = await getHashedSignaturePayload(consensusPayload, SignatureDomainSeparator.blockAttestation); - - const blobs = await Blob.getBlobs(block.body.toBlobFields()); - const proposeTxArgs = { - header: block.header.toBuffer(), - archive: block.archive.root.toBuffer(), - blockHash: (await block.header.hash()).toBuffer(), - body: block.body.toBuffer(), - blobs, - attestations, - txHashes: txHashes ?? [], - }; - - // Get current block number before sending tx - const startBlock = await this.publicClient.getBlockNumber(); - - // Publish body and propose block (if not already published) - if (this.interrupted) { - this.log.verbose('L2 block data syncing interrupted while processing blocks.', ctx); - return false; - } - - const timer = new Timer(); - - // @note This will make sure that we are passing the checks for our header ASSUMING that the data is also made available - // This means that we can avoid the simulation issues in later checks. - // By simulation issue, I mean the fact that the block.timestamp is equal to the last block, not the next, which - // make time consistency checks break. - const ts = await this.validateBlockForSubmission(block.header, { - digest: digest.toBuffer(), - signatures: attestations ?? [], - }); - - this.log.debug(`Submitting propose transaction`); - const result = proofQuote - ? await this.sendProposeAndClaimTx(proposeTxArgs, proofQuote, opts, ts) - : await this.sendProposeTx(proposeTxArgs, opts, ts); - - if (!result?.receipt) { - this.log.info(`Failed to publish block ${block.number} to L1`, ctx); - return false; - } - - const { receipt, args, functionName, data, gasPrice } = result; - - // Tx was mined successfully - if (receipt.status === 'success') { - // Send the blobs to the blob sink - this.sendBlobsToBlobSink(receipt.blockHash, blobs).catch(_err => { - this.log.error('Failed to send blobs to blob sink'); - }); - - const tx = await this.getTransactionStats(receipt.transactionHash); - - // Calculate inclusion blocks - const endBlock = receipt.blockNumber; - const inclusionBlocks = Number(endBlock - startBlock); - - const stats: L1PublishBlockStats = { - gasPrice: receipt.effectiveGasPrice, - gasUsed: receipt.gasUsed, - blobGasUsed: receipt.blobGasUsed ?? 0n, - blobDataGas: receipt.blobGasPrice ?? 0n, - transactionHash: receipt.transactionHash, - ...pick(tx!, 'calldataGas', 'calldataSize', 'sender'), - ...block.getStats(), - eventName: 'rollup-published-to-l1', - blobCount: blobs.length, - inclusionBlocks, - }; - this.log.verbose(`Published L2 block to L1 rollup contract`, { ...stats, ...ctx }); - this.metrics.recordProcessBlockTx(timer.ms(), stats); - - return true; - } - - this.metrics.recordFailedTx('process'); - const kzg = Blob.getViemKzgInstance(); - const errorMsg = await this.tryGetErrorFromRevertedTx( - data, - { - args, - functionName, - abi: RollupAbi, - address: this.rollupContract.address, - }, - { - blobs: proposeTxArgs.blobs.map(b => b.data), - kzg, - maxFeePerBlobGas: gasPrice.maxFeePerBlobGas ?? 10000000000n, - }, - ); - this.log.error(`Rollup process tx reverted. ${errorMsg}`, undefined, { - ...ctx, - txHash: receipt.transactionHash, - }); - await this.sleepOrInterrupted(); - return false; - } - - /** Calls claimEpochProofRight in the Rollup contract to submit a chosen prover quote for the previous epoch. */ - public async claimEpochProofRight(proofQuote: EpochProofQuote) { - const timer = new Timer(); - let result; - try { - this.log.debug(`Submitting claimEpochProofRight transaction`); - result = await this.l1TxUtils.sendAndMonitorTransaction({ - to: this.rollupContract.address, - data: encodeFunctionData({ - abi: RollupAbi, - functionName: 'claimEpochProofRight', - args: [proofQuote.toViemArgs()], - }), - }); - } catch (err) { - if (err instanceof FormattedViemError) { - const { message, metaMessages } = err; - this.log.error(`Failed to claim epoch proof right`, message, { - metaMessages, - proofQuote: proofQuote.toInspect(), - }); - } else { - this.log.error(`Failed to claim epoch proof right`, err, { - proofQuote: proofQuote.toInspect(), - }); - } - return false; - } - - const { receipt } = result; - - if (receipt.status === 'success') { - const tx = await this.getTransactionStats(receipt.transactionHash); - const stats: L1PublishStats = { - gasPrice: receipt.effectiveGasPrice, - gasUsed: receipt.gasUsed, - transactionHash: receipt.transactionHash, - blobDataGas: 0n, - blobGasUsed: 0n, - ...pick(tx!, 'calldataGas', 'calldataSize', 'sender'), - }; - this.log.verbose(`Submitted claim epoch proof right to L1 rollup contract`, { - ...stats, - ...proofQuote.toInspect(), - }); - this.metrics.recordClaimEpochProofRightTx(timer.ms(), stats); - return true; - } else { - this.metrics.recordFailedTx('claimEpochProofRight'); - // TODO: Get the error message from the reverted tx - this.log.error(`Claim epoch proof right tx reverted`, { - txHash: receipt.transactionHash, - ...proofQuote.toInspect(), - }); - return false; - } - } - - private async tryGetErrorFromRevertedTx( - data: Hex, - args: { - args: any[]; - functionName: string; - abi: any; - address: Hex; - }, - _blobInputs?: { - blobs: Uint8Array[]; - kzg: any; - maxFeePerBlobGas: bigint; - }, - ) { - const blobInputs = _blobInputs || {}; - try { - // NB: If this fn starts unexpectedly giving incorrect blob hash errors, it may be because the checkBlob - // bool is no longer at the slot below. To find the slot, run: forge inspect src/core/Rollup.sol:Rollup storage - const checkBlobSlot = 9n; - await this.publicClient.simulateContract({ - ...args, - account: this.walletClient.account, - stateOverride: [ - { - address: args.address, - stateDiff: [ - { - slot: toHex(checkBlobSlot, true), - value: toHex(0n, true), - }, - ], - }, - ], - }); - // If the above passes, we have a blob error. We cannot simulate blob txs, and failed txs no longer throw errors. - // Strangely, the only way to throw the revert reason as an error and provide blobs is prepareTransactionRequest. - // See: https://github.com/wevm/viem/issues/2075 - // This throws a EstimateGasExecutionError with the custom error information: - await this.walletClient.prepareTransactionRequest({ - account: this.walletClient.account, - to: this.rollupContract.address, - data, - ...blobInputs, - }); - return undefined; - } catch (simulationErr: any) { - // If we don't have a ContractFunctionExecutionError, we have a blob related error => use getContractError to get the error msg. - const contractErr = - simulationErr.name === 'ContractFunctionExecutionError' - ? simulationErr - : getContractError(simulationErr as BaseError, { - args: [], - abi: RollupAbi, - functionName: args.functionName, - address: args.address, - sender: this.account.address, - }); - if (contractErr.name === 'ContractFunctionExecutionError') { - const execErr = contractErr as ContractFunctionExecutionError; - return tryGetCustomErrorNameContractFunction(execErr); - } - this.log.error(`Error getting error from simulation`, simulationErr); - } - } - - public async submitEpochProof(args: { - epochNumber: number; - fromBlock: number; - toBlock: number; - publicInputs: RootRollupPublicInputs; - proof: Proof; - }): Promise { - const { epochNumber, fromBlock, toBlock } = args; - const ctx = { epochNumber, fromBlock, toBlock }; - if (!this.interrupted) { - const timer = new Timer(); - - // Validate epoch proof range and hashes are correct before submitting - await this.validateEpochProofSubmission(args); - - const txHash = await this.sendSubmitEpochProofTx(args); - if (!txHash) { - return false; - } - - const receipt = await this.getTransactionReceipt(txHash); - if (!receipt) { - return false; - } - - // Tx was mined successfully - if (receipt.status) { - const tx = await this.getTransactionStats(txHash); - const stats: L1PublishProofStats = { - ...pick(receipt, 'gasPrice', 'gasUsed', 'transactionHash'), - ...pick(tx!, 'calldataGas', 'calldataSize', 'sender'), - blobDataGas: 0n, - blobGasUsed: 0n, - eventName: 'proof-published-to-l1', - }; - this.log.info(`Published epoch proof to L1 rollup contract`, { ...stats, ...ctx }); - this.metrics.recordSubmitProof(timer.ms(), stats); - return true; - } - - this.metrics.recordFailedTx('submitProof'); - this.log.error(`Rollup.submitEpochProof tx status failed: ${receipt.transactionHash}`, ctx); - await this.sleepOrInterrupted(); - } - - this.log.verbose('L2 block data syncing interrupted while processing blocks.', ctx); - return false; - } - - private async validateEpochProofSubmission(args: { - fromBlock: number; - toBlock: number; - publicInputs: RootRollupPublicInputs; - proof: Proof; - }) { - const { fromBlock, toBlock, publicInputs, proof } = args; - - // Check that the block numbers match the expected epoch to be proven - const { pendingBlockNumber: pending, provenBlockNumber: proven } = await this.rollupContract.read.getTips(); - if (proven !== BigInt(fromBlock) - 1n) { - throw new Error(`Cannot submit epoch proof for ${fromBlock}-${toBlock} as proven block is ${proven}`); - } - if (toBlock > pending) { - throw new Error(`Cannot submit epoch proof for ${fromBlock}-${toBlock} as pending block is ${pending}`); - } - - // Check the block hash and archive for the immediate block before the epoch - const blockLog = await this.rollupContract.read.getBlock([proven]); - if (publicInputs.previousArchive.root.toString() !== blockLog.archive) { - throw new Error( - `Previous archive root mismatch: ${publicInputs.previousArchive.root.toString()} !== ${blockLog.archive}`, - ); - } - // TODO: Remove zero check once we inject the proper zero blockhash - if (blockLog.blockHash !== Fr.ZERO.toString() && publicInputs.previousBlockHash.toString() !== blockLog.blockHash) { - throw new Error( - `Previous block hash mismatch: ${publicInputs.previousBlockHash.toString()} !== ${blockLog.blockHash}`, - ); - } - - // Check the block hash and archive for the last block in the epoch - const endBlockLog = await this.rollupContract.read.getBlock([BigInt(toBlock)]); - if (publicInputs.endArchive.root.toString() !== endBlockLog.archive) { - throw new Error( - `End archive root mismatch: ${publicInputs.endArchive.root.toString()} !== ${endBlockLog.archive}`, - ); - } - if (publicInputs.endBlockHash.toString() !== endBlockLog.blockHash) { - throw new Error(`End block hash mismatch: ${publicInputs.endBlockHash.toString()} !== ${endBlockLog.blockHash}`); - } - - // Compare the public inputs computed by the contract with the ones injected - const rollupPublicInputs = await this.rollupContract.read.getEpochProofPublicInputs( - this.getSubmitEpochProofArgs(args), - ); - const aggregationObject = proof.isEmpty() - ? times(AGGREGATION_OBJECT_LENGTH, Fr.zero) - : proof.extractAggregationObject(); - const argsPublicInputs = [...publicInputs.toFields(), ...aggregationObject]; - - if (!areArraysEqual(rollupPublicInputs.map(Fr.fromHexString), argsPublicInputs, (a, b) => a.equals(b))) { - const fmt = (inputs: Fr[] | readonly string[]) => inputs.map(x => x.toString()).join(', '); - throw new Error( - `Root rollup public inputs mismatch:\nRollup: ${fmt(rollupPublicInputs)}\nComputed:${fmt(argsPublicInputs)}`, - ); - } - } - - /** - * Calling `interrupt` will cause any in progress call to `publishRollup` to return `false` asap. - * Be warned, the call may return false even if the tx subsequently gets successfully mined. - * In practice this shouldn't matter, as we'll only ever be calling `interrupt` when we know it's going to fail. - * A call to `restart` is required before you can continue publishing. - */ - public interrupt() { - this.interrupted = true; - this.interruptibleSleep.interrupt(); - } - - /** Restarts the publisher after calling `interrupt`. */ - public restart() { - this.interrupted = false; - } - - private async sendSubmitEpochProofTx(args: { - fromBlock: number; - toBlock: number; - publicInputs: RootRollupPublicInputs; - proof: Proof; - }): Promise { - const proofHex: Hex = `0x${args.proof.withoutPublicInputs().toString('hex')}`; - const argsArray = this.getSubmitEpochProofArgs(args); - - const txArgs = [ - { - epochSize: argsArray[0], - args: argsArray[1], - fees: argsArray[2], - blobPublicInputs: argsArray[3], - aggregationObject: argsArray[4], - proof: proofHex, - }, - ] as const; - - this.log.info(`SubmitEpochProof proofSize=${args.proof.withoutPublicInputs().length} bytes`); - const data = encodeFunctionData({ - abi: this.rollupContract.abi, - functionName: 'submitEpochRootProof', - args: txArgs, - }); - try { - const { receipt } = await this.l1TxUtils.sendAndMonitorTransaction({ - to: this.rollupContract.address, - data, - }); - - return receipt.transactionHash; - } catch (err) { - this.log.error(`Rollup submit epoch proof failed`, err); - const errorMsg = await this.tryGetErrorFromRevertedTx(data, { - args: [...txArgs], - functionName: 'submitEpochRootProof', - abi: this.rollupContract.abi, - address: this.rollupContract.address, - }); - this.log.error(`Rollup submit epoch proof tx reverted. ${errorMsg}`); - return undefined; - } - } - - private async prepareProposeTx(encodedData: L1ProcessArgs) { - const kzg = Blob.getViemKzgInstance(); - const blobInput = Blob.getEthBlobEvaluationInputs(encodedData.blobs); - this.log.debug('Validating blob input', { blobInput }); - const blobEvaluationGas = await this.l1TxUtils.estimateGas( - this.account, - { - to: this.rollupContract.address, - data: encodeFunctionData({ - abi: this.rollupContract.abi, - functionName: 'validateBlobs', - args: [blobInput], - }), - }, - {}, - { - blobs: encodedData.blobs.map(b => b.data), - kzg, - }, - ); - - const attestations = encodedData.attestations - ? encodedData.attestations.map(attest => attest.toViemSignature()) - : []; - const txHashes = encodedData.txHashes ? encodedData.txHashes.map(txHash => txHash.toString()) : []; - const args = [ - { - header: `0x${encodedData.header.toString('hex')}`, - archive: `0x${encodedData.archive.toString('hex')}`, - oracleInput: { - // We are currently not modifying these. See #9963 - feeAssetPriceModifier: 0n, - provingCostModifier: 0n, - }, - blockHash: `0x${encodedData.blockHash.toString('hex')}`, - txHashes, - }, - attestations, - // TODO(#9101): Extract blobs from beacon chain => calldata will only contain what's needed to verify blob and body input can be removed - `0x${encodedData.body.toString('hex')}`, - blobInput, - ] as const; - - return { args, blobEvaluationGas }; - } - - private getSubmitEpochProofArgs(args: { - fromBlock: number; - toBlock: number; - publicInputs: RootRollupPublicInputs; - proof: Proof; - }) { - return [ - BigInt(args.toBlock - args.fromBlock + 1), - [ - args.publicInputs.previousArchive.root.toString(), - args.publicInputs.endArchive.root.toString(), - args.publicInputs.previousBlockHash.toString(), - args.publicInputs.endBlockHash.toString(), - args.publicInputs.endTimestamp.toString(), - args.publicInputs.outHash.toString(), - args.publicInputs.proverId.toString(), - ], - makeTuple(AZTEC_MAX_EPOCH_DURATION * 2, i => - i % 2 === 0 - ? args.publicInputs.fees[i / 2].recipient.toField().toString() - : args.publicInputs.fees[(i - 1) / 2].value.toString(), - ), - `0x${args.publicInputs.blobPublicInputs - .filter((_, i) => i < args.toBlock - args.fromBlock + 1) - .map(b => b.toString()) - .join(``)}`, - `0x${serializeToBuffer(args.proof.extractAggregationObject()).toString('hex')}`, - ] as const; - } - - private async sendProposeTx( - encodedData: L1ProcessArgs, - opts: { txTimeoutAt?: Date } = {}, - timestamp: bigint, - ): Promise { - if (this.interrupted) { - return undefined; - } - try { - const kzg = Blob.getViemKzgInstance(); - const { args, blobEvaluationGas } = await this.prepareProposeTx(encodedData); - const data = encodeFunctionData({ - abi: this.rollupContract.abi, - functionName: 'propose', - args, - }); - - const simulationResult = await this.l1TxUtils.simulateGasUsed( - { - to: this.rollupContract.address, - data, - gas: L1Publisher.PROPOSE_GAS_GUESS, - }, - { - // @note we add 1n to the timestamp because geth implementation doesn't like simulation timestamp to be equal to the current block timestamp - time: timestamp + 1n, - // @note reth should have a 30m gas limit per block but throws errors that this tx is beyond limit - gasLimit: L1Publisher.PROPOSE_GAS_GUESS * 2n, - }, - [ - { - address: this.rollupContract.address, - // @note we override checkBlob to false since blobs are not part simulate() - stateDiff: [ - { - slot: toHex(9n, true), - value: toHex(0n, true), - }, - ], - }, - ], - { - // @note fallback gas estimate to use if the node doesn't support simulation API - fallbackGasEstimate: L1Publisher.PROPOSE_GAS_GUESS, - }, - ); - - const result = await this.l1TxUtils.sendAndMonitorTransaction( - { - to: this.rollupContract.address, - data, - }, - { - ...opts, - gasLimit: this.l1TxUtils.bumpGasLimit(simulationResult + blobEvaluationGas), - }, - { - blobs: encodedData.blobs.map(b => b.data), - kzg, - }, - ); - return { - receipt: result.receipt, - gasPrice: result.gasPrice, - args, - functionName: 'propose', - data, - }; - } catch (err) { - if (err instanceof FormattedViemError) { - const { message, metaMessages } = err; - this.log.error(`Rollup publish failed.`, message, { metaMessages }); - } else { - this.log.error(`Rollup publish failed.`, err); - } - return undefined; - } - } - - private async sendProposeAndClaimTx( - encodedData: L1ProcessArgs, - quote: EpochProofQuote, - opts: { txTimeoutAt?: Date } = {}, - timestamp: bigint, - ): Promise { - if (this.interrupted) { - return undefined; - } - - try { - const kzg = Blob.getViemKzgInstance(); - const { args, blobEvaluationGas } = await this.prepareProposeTx(encodedData); - const data = encodeFunctionData({ - abi: this.rollupContract.abi, - functionName: 'proposeAndClaim', - args: [...args, quote.toViemArgs()], - }); - - const simulationResult = await this.l1TxUtils.simulateGasUsed( - { - to: this.rollupContract.address, - data, - gas: L1Publisher.PROPOSE_AND_CLAIM_GAS_GUESS, - }, - { - // @note we add 1n to the timestamp because geth implementation doesn't like simulation timestamp to be equal to the current block timestamp - time: timestamp + 1n, - // @note reth should have a 30m gas limit per block but throws errors that this tx is beyond limit - gasLimit: L1Publisher.PROPOSE_AND_CLAIM_GAS_GUESS * 2n, - }, - [ - { - address: this.rollupContract.address, - // @note we override checkBlob to false since blobs are not part simulate() - stateDiff: [ - { - slot: toHex(9n, true), - value: toHex(0n, true), - }, - ], - }, - ], - { - // @note fallback gas estimate to use if the node doesn't support simulation API - fallbackGasEstimate: L1Publisher.PROPOSE_AND_CLAIM_GAS_GUESS, - }, - ); - const result = await this.l1TxUtils.sendAndMonitorTransaction( - { - to: this.rollupContract.address, - data, - }, - { - ...opts, - gasLimit: this.l1TxUtils.bumpGasLimit(simulationResult + blobEvaluationGas), - }, - { - blobs: encodedData.blobs.map(b => b.data), - kzg, - }, - ); - - return { - receipt: result.receipt, - gasPrice: result.gasPrice, - args: [...args, quote.toViemArgs()], - functionName: 'proposeAndClaim', - data, - }; - } catch (err) { - if (err instanceof FormattedViemError) { - const { message, metaMessages } = err; - this.log.error(`Rollup publish failed.`, message, { metaMessages }); - } else { - this.log.error(`Rollup publish failed.`, err); - } - return undefined; - } - } - - /** - * Returns a tx receipt if the tx has been mined. - * @param txHash - Hash of the tx to look for. - * @returns Undefined if the tx hasn't been mined yet, the receipt otherwise. - */ - async getTransactionReceipt(txHash: string): Promise { - while (!this.interrupted) { - try { - const receipt = await this.publicClient.getTransactionReceipt({ - hash: txHash as Hex, - }); - - if (receipt) { - if (receipt.transactionHash !== txHash) { - throw new Error(`Tx hash mismatch: ${receipt.transactionHash} !== ${txHash}`); - } - - return { - status: receipt.status === 'success', - transactionHash: txHash, - gasUsed: receipt.gasUsed, - gasPrice: receipt.effectiveGasPrice, - logs: receipt.logs, - blockNumber: receipt.blockNumber, - blockHash: receipt.blockHash, - }; - } - - this.log.debug(`Receipt not found for tx hash ${txHash}`); - return undefined; - } catch (err) { - //this.log.error(`Error getting tx receipt`, err); - await this.sleepOrInterrupted(); - } - } - } - - protected async sleepOrInterrupted() { - await this.interruptibleSleep.sleep(this.sleepTimeMs); - } - - /** - * Send blobs to the blob sink - * - * If a blob sink url is configured, then we send blobs to the blob sink - * - for now we use the blockHash as the identifier for the blobs; - * In the future this will move to be the beacon block id - which takes a bit more work - * to calculate and will need to be mocked in e2e tests - */ - protected sendBlobsToBlobSink(blockHash: string, blobs: Blob[]): Promise { - return this.blobSinkClient.sendBlobsToBlobSink(blockHash, blobs); - } -} - -/* - * Returns cost of calldata usage in Ethereum. - * @param data - Calldata. - * @returns 4 for each zero byte, 16 for each nonzero. - */ -function getCalldataGasUsage(data: Uint8Array) { - return data.filter(byte => byte === 0).length * 4 + data.filter(byte => byte !== 0).length * 16; -} - -function tryGetCustomErrorNameContractFunction(err: ContractFunctionExecutionError) { - return compactArray([err.shortMessage, ...(err.metaMessages ?? []).slice(0, 2).map(s => s.trim())]).join(' '); -} - -function tryGetCustomErrorName(err: any) { - try { - // See https://viem.sh/docs/contract/simulateContract#handling-custom-errors - if (err.name === 'ViemError' || err.name === 'ContractFunctionExecutionError') { - const baseError = err as BaseError; - const revertError = baseError.walk(err => (err as Error).name === 'ContractFunctionRevertedError'); - if (revertError) { - return (revertError as ContractFunctionRevertedError).data?.errorName; - } - } - } catch (_e) { - return undefined; - } -} diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher-metrics.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher-metrics.ts similarity index 97% rename from yarn-project/sequencer-client/src/publisher/l1-publisher-metrics.ts rename to yarn-project/sequencer-client/src/publisher/sequencer-publisher-metrics.ts index c331a614c11..f7bd8e96eff 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher-metrics.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher-metrics.ts @@ -12,7 +12,7 @@ import { formatEther } from 'viem/utils'; export type L1TxType = 'submitProof' | 'process' | 'claimEpochProofRight'; -export class L1PublisherMetrics { +export class SequencerPublisherMetrics { private gasPrice: Histogram; private txCount: UpDownCounter; @@ -28,7 +28,7 @@ export class L1PublisherMetrics { private readonly blobTxSuccessCounter: UpDownCounter; private readonly blobTxFailureCounter: UpDownCounter; - constructor(client: TelemetryClient, name = 'L1Publisher') { + constructor(client: TelemetryClient, name = 'SequencerPublisher') { const meter = client.getMeter(name); this.gasPrice = meter.createHistogram(Metrics.L1_PUBLISHER_GAS_PRICE, { diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts new file mode 100644 index 00000000000..9964e83e76c --- /dev/null +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts @@ -0,0 +1,314 @@ +import { HttpBlobSinkClient } from '@aztec/blob-sink/client'; +import { inboundTransform } from '@aztec/blob-sink/encoding'; +import { L2Block } from '@aztec/circuit-types'; +import { EthAddress } from '@aztec/circuits.js'; +import { type EpochCache } from '@aztec/epoch-cache'; +import { + type ForwarderContract, + type GasPrice, + type L1ContractsConfig, + type L1TxUtilsConfig, + type L1TxUtilsWithBlobs, + type RollupContract, + defaultL1TxUtilsConfig, + getL1ContractsConfigEnvVars, +} from '@aztec/ethereum'; +import { Blob } from '@aztec/foundation/blob'; +import { sleep } from '@aztec/foundation/sleep'; +import { EmpireBaseAbi, RollupAbi } from '@aztec/l1-artifacts'; + +import express, { json } from 'express'; +import { type Server } from 'http'; +import { type MockProxy, mock } from 'jest-mock-extended'; +import { type GetTransactionReceiptReturnType, type TransactionReceipt, encodeFunctionData } from 'viem'; + +import { type PublisherConfig, type TxSenderConfig } from './config.js'; +import { SequencerPublisher } from './sequencer-publisher.js'; + +const mockRollupAddress = EthAddress.random().toString(); +const mockGovernanceProposerAddress = EthAddress.random().toString(); +const mockForwarderAddress = EthAddress.random().toString(); +const BLOB_SINK_PORT = 50525; +const BLOB_SINK_URL = `http://localhost:${BLOB_SINK_PORT}`; + +describe('SequencerPublisher', () => { + let rollup: MockProxy; + let forwarder: MockProxy; + let l1TxUtils: MockProxy; + + let proposeTxHash: `0x${string}`; + let proposeTxReceipt: GetTransactionReceiptReturnType; + let l2Block: L2Block; + + let header: Buffer; + let archive: Buffer; + let blockHash: Buffer; + let body: Buffer; + + let blobSinkClient: HttpBlobSinkClient; + let mockBlobSinkServer: Server | undefined = undefined; + + // An l1 publisher with some private methods exposed + let publisher: SequencerPublisher; + + const GAS_GUESS = 300_000n; + + beforeEach(async () => { + mockBlobSinkServer = undefined; + blobSinkClient = new HttpBlobSinkClient({ blobSinkUrl: BLOB_SINK_URL }); + + l2Block = await L2Block.random(42); + + header = l2Block.header.toBuffer(); + archive = l2Block.archive.root.toBuffer(); + blockHash = (await l2Block.header.hash()).toBuffer(); + body = l2Block.body.toBuffer(); + + proposeTxHash = `0x${Buffer.from('txHashPropose').toString('hex')}`; // random tx hash + + proposeTxReceipt = { + transactionHash: proposeTxHash, + status: 'success', + logs: [], + } as unknown as GetTransactionReceiptReturnType; + + l1TxUtils = mock(); + l1TxUtils.getBlock.mockResolvedValue({ timestamp: 12n } as any); + l1TxUtils.getBlockNumber.mockResolvedValue(1n); + const config = { + blobSinkUrl: BLOB_SINK_URL, + l1RpcUrl: `http://127.0.0.1:8545`, + l1ChainId: 1, + publisherPrivateKey: `0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80`, + l1Contracts: { + rollupAddress: EthAddress.ZERO.toString(), + governanceProposerAddress: mockGovernanceProposerAddress, + }, + l1PublishRetryIntervalMS: 1, + ethereumSlotDuration: getL1ContractsConfigEnvVars().ethereumSlotDuration, + + ...defaultL1TxUtilsConfig, + } as unknown as TxSenderConfig & + PublisherConfig & + Pick & + L1TxUtilsConfig; + + rollup = mock(); + rollup.validateHeader.mockResolvedValue(Promise.resolve()); + (rollup as any).address = mockRollupAddress; + + forwarder = mock(); + forwarder.getAddress.mockReturnValue(mockForwarderAddress); + forwarder.forward.mockResolvedValue({ + receipt: proposeTxReceipt, + gasPrice: { maxFeePerGas: 1n, maxPriorityFeePerGas: 1n }, + errorMsg: undefined, + }); + + const epochCache = mock(); + epochCache.getEpochAndSlotNow.mockReturnValue({ epoch: 1n, slot: 2n, ts: 3n }); + + publisher = new SequencerPublisher(config, { + blobSinkClient, + rollupContract: rollup, + l1TxUtils, + forwarderContract: forwarder, + epochCache, + }); + + (publisher as any)['l1TxUtils'] = l1TxUtils; + publisher as any; + + l1TxUtils.sendAndMonitorTransaction.mockResolvedValue({ + receipt: proposeTxReceipt, + gasPrice: { maxFeePerGas: 1n, maxPriorityFeePerGas: 1n }, + }); + (l1TxUtils as any).estimateGas.mockResolvedValue(GAS_GUESS); + (l1TxUtils as any).simulateGasUsed.mockResolvedValue(1_000_000n); + (l1TxUtils as any).bumpGasLimit.mockImplementation((val: bigint) => val + (val * 20n) / 100n); + + const currentL2Slot = publisher.getCurrentL2Slot(); + + l2Block = await L2Block.random(42, undefined, undefined, undefined, undefined, Number(currentL2Slot)); + + header = l2Block.header.toBuffer(); + archive = l2Block.archive.root.toBuffer(); + blockHash = (await l2Block.header.hash()).toBuffer(); + body = l2Block.body.toBuffer(); + }); + + const closeServer = (server: Server): Promise => { + return new Promise((resolve, reject) => { + server.close(err => { + if (err) { + reject(err); + return; + } + resolve(); + }); + }); + }; + + afterEach(async () => { + if (mockBlobSinkServer) { + await closeServer(mockBlobSinkServer); + mockBlobSinkServer = undefined; + } + }); + + // Run a mock blob sink in the background, and test that the correct data is sent to it + const runBlobSinkServer = (blobs: Blob[]) => { + const app = express(); + app.use(json({ limit: '10mb' })); + + app.post('/blob_sidecar', (req, res) => { + const blobsBuffers = req.body.blobs.map((b: { index: number; blob: { type: string; data: string } }) => + Blob.fromBuffer(inboundTransform(Buffer.from(b.blob.data))), + ); + + expect(blobsBuffers).toEqual(blobs); + res.status(200).send(); + }); + + return new Promise(resolve => { + mockBlobSinkServer = app.listen(BLOB_SINK_PORT, () => { + // Resolve when the server is listening + resolve(); + }); + }); + }; + + it('bundles propose and vote tx to l1', async () => { + const kzg = Blob.getViemKzgInstance(); + + const expectedBlobs = await Blob.getBlobs(l2Block.body.toBlobFields()); + + // Expect the blob sink server to receive the blobs + await runBlobSinkServer(expectedBlobs); + + expect(await publisher.enqueueProposeL2Block(l2Block)).toEqual(true); + // TODO + // const govPayload = EthAddress.random(); + // publisher.setGovernancePayload(govPayload); + // rollup.getProposerAt.mockResolvedValueOnce(mockForwarderAddress); + // expect(await publisher.enqueueCastVote(1n, 1n, VoteType.GOVERNANCE)).toEqual(true); + // expect(await publisher.enqueueCastVote(0n, 0n, VoteType.SLASHING)).toEqual(true); + + await publisher.sendRequests(); + + const blobInput = Blob.getEthBlobEvaluationInputs(expectedBlobs); + + const args = [ + { + header: `0x${header.toString('hex')}`, + archive: `0x${archive.toString('hex')}`, + blockHash: `0x${blockHash.toString('hex')}`, + oracleInput: { + feeAssetPriceModifier: 0n, + provingCostModifier: 0n, + }, + txHashes: [], + }, + [], + `0x${body.toString('hex')}`, + blobInput, + ] as const; + expect(forwarder.forward).toHaveBeenCalledWith( + [ + { + to: mockRollupAddress, + data: encodeFunctionData({ abi: RollupAbi, functionName: 'propose', args }), + }, + // { + // to: mockGovernanceProposerAddress, + // data: encodeFunctionData({ abi: EmpireBaseAbi, functionName: 'vote', args: [govPayload.toString()] }), + // }, + ], + l1TxUtils, + // val + (val * 20n) / 100n + { gasLimit: 1_000_000n + GAS_GUESS + ((1_000_000n + GAS_GUESS) * 20n) / 100n }, + { blobs: expectedBlobs.map(b => b.data), kzg }, + ); + }); + + it('errors if forwarder tx fails', async () => { + forwarder.forward.mockRejectedValueOnce(new Error()).mockResolvedValueOnce({ + receipt: proposeTxReceipt, + gasPrice: { maxFeePerGas: 1n, maxPriorityFeePerGas: 1n }, + errorMsg: undefined, + }); + + const enqueued = await publisher.enqueueProposeL2Block(l2Block); + expect(enqueued).toEqual(true); + const result = await publisher.sendRequests(); + expect(result).toEqual(undefined); + }); + + it('does send propose tx if rollup validation fails', async () => { + rollup.validateHeader.mockRejectedValueOnce(new Error('Test error')); + + await expect(publisher.enqueueProposeL2Block(l2Block)).rejects.toThrow(); + + expect(rollup.validateHeader).toHaveBeenCalledTimes(1); + + const result = await publisher.sendRequests(); + expect(result).toEqual(undefined); + expect(forwarder.forward).not.toHaveBeenCalled(); + }); + + it('returns errorMsg if forwarder tx reverts', async () => { + forwarder.forward.mockResolvedValueOnce({ + receipt: { ...proposeTxReceipt, status: 'reverted' }, + gasPrice: { maxFeePerGas: 1n, maxPriorityFeePerGas: 1n }, + errorMsg: 'Test error', + }); + + const enqueued = await publisher.enqueueProposeL2Block(l2Block); + expect(enqueued).toEqual(true); + const result = await publisher.sendRequests(); + + expect(result?.errorMsg).toEqual('Test error'); + }); + + it('does not send requests if interrupted', async () => { + forwarder.forward.mockImplementationOnce( + () => + sleep(10, { receipt: proposeTxReceipt, gasPrice: { maxFeePerGas: 1n, maxPriorityFeePerGas: 1n } }) as Promise<{ + receipt: TransactionReceipt; + gasPrice: GasPrice; + errorMsg: undefined; + }>, + ); + const enqueued = await publisher.enqueueProposeL2Block(l2Block); + expect(enqueued).toEqual(true); + publisher.interrupt(); + const resultPromise = publisher.sendRequests(); + const result = await resultPromise; + + expect(result).toEqual(undefined); + expect(forwarder.forward).not.toHaveBeenCalled(); + expect((publisher as any).requests.length).toEqual(0); + }); + + it('does not send requests if no valid requests are found', async () => { + const epochCache = (publisher as any).epochCache as MockProxy; + + epochCache.getEpochAndSlotNow.mockReturnValue({ epoch: 1n, slot: 2n, ts: 3n }); + + publisher.addRequest({ + action: 'propose', + request: { + to: mockRollupAddress, + data: encodeFunctionData({ abi: EmpireBaseAbi, functionName: 'vote', args: [EthAddress.random().toString()] }), + }, + lastValidL2Slot: 1n, + }); + + const resultPromise = publisher.sendRequests(); + const result = await resultPromise; + + expect(result).toEqual(undefined); + expect(forwarder.forward).not.toHaveBeenCalled(); + expect((publisher as any).requests.length).toEqual(0); + }); +}); diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts new file mode 100644 index 00000000000..b9076fc689a --- /dev/null +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts @@ -0,0 +1,730 @@ +import { type BlobSinkClientInterface, createBlobSinkClient } from '@aztec/blob-sink/client'; +import { + ConsensusPayload, + type EpochProofQuote, + type L2Block, + SignatureDomainSeparator, + type TxHash, + getHashedSignaturePayload, +} from '@aztec/circuit-types'; +import type { L1PublishBlockStats, L1PublishStats } from '@aztec/circuit-types/stats'; +import { type BlockHeader, EthAddress } from '@aztec/circuits.js'; +import { type EpochCache } from '@aztec/epoch-cache'; +import { + FormattedViemError, + type ForwarderContract, + type GasPrice, + type L1BlobInputs, + type L1ContractsConfig, + type L1GasConfig, + type L1TxRequest, + type L1TxUtilsWithBlobs, + type RollupContract, + type TransactionStats, + formatViemError, +} from '@aztec/ethereum'; +import { toHex } from '@aztec/foundation/bigint-buffer'; +import { Blob } from '@aztec/foundation/blob'; +import { type Signature } from '@aztec/foundation/eth-signature'; +import { type Logger, createLogger } from '@aztec/foundation/log'; +import { Timer } from '@aztec/foundation/timer'; +import { EmpireBaseAbi, ForwarderAbi, RollupAbi } from '@aztec/l1-artifacts'; +import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client'; + +import pick from 'lodash.pick'; +import { type TransactionReceipt, encodeFunctionData, getAddress, getContract } from 'viem'; + +import { type PublisherConfig, type TxSenderConfig } from './config.js'; +import { SequencerPublisherMetrics } from './sequencer-publisher-metrics.js'; + +/** Arguments to the process method of the rollup contract */ +type L1ProcessArgs = { + /** The L2 block header. */ + header: Buffer; + /** A root of the archive tree after the L2 block is applied. */ + archive: Buffer; + /** The L2 block's leaf in the archive tree. */ + blockHash: Buffer; + /** L2 block body. TODO(#9101): Remove block body once we can extract blobs. */ + body: Buffer; + /** L2 block blobs containing all tx effects. */ + blobs: Blob[]; + /** L2 block tx hashes */ + txHashes: TxHash[]; + /** Attestations */ + attestations?: Signature[]; +}; + +export enum VoteType { + GOVERNANCE, + SLASHING, +} + +type GetSlashPayloadCallBack = (slotNumber: bigint) => Promise; + +type Action = 'propose' | 'claim' | 'governance-vote' | 'slashing-vote'; +interface RequestWithExpiry { + action: Action; + request: L1TxRequest; + lastValidL2Slot: bigint; + gasConfig?: L1GasConfig; + blobConfig?: L1BlobInputs; + onResult?: ( + request: L1TxRequest, + result?: { receipt: TransactionReceipt; gasPrice: GasPrice; stats?: TransactionStats; errorMsg?: string }, + ) => void; +} + +export class SequencerPublisher { + private interrupted = false; + private metrics: SequencerPublisherMetrics; + private epochCache: EpochCache; + private forwarderContract: ForwarderContract; + + protected governanceLog = createLogger('sequencer:publisher:governance'); + protected governanceProposerAddress?: EthAddress; + private governancePayload: EthAddress = EthAddress.ZERO; + + protected slashingLog = createLogger('sequencer:publisher:slashing'); + protected slashingProposerAddress?: EthAddress; + private getSlashPayload?: GetSlashPayloadCallBack = undefined; + + private myLastVotes: Record = { + [VoteType.GOVERNANCE]: 0n, + [VoteType.SLASHING]: 0n, + }; + + protected log = createLogger('sequencer:publisher'); + protected ethereumSlotDuration: bigint; + + private blobSinkClient: BlobSinkClientInterface; + // @note - with blobs, the below estimate seems too large. + // Total used for full block from int_l1_pub e2e test: 1m (of which 86k is 1x blob) + // Total used for emptier block from above test: 429k (of which 84k is 1x blob) + public static PROPOSE_GAS_GUESS: bigint = 12_000_000n; + public static PROPOSE_AND_CLAIM_GAS_GUESS: bigint = this.PROPOSE_GAS_GUESS + 100_000n; + + public l1TxUtils: L1TxUtilsWithBlobs; + public rollupContract: RollupContract; + + protected requests: RequestWithExpiry[] = []; + + constructor( + config: TxSenderConfig & PublisherConfig & Pick, + deps: { + telemetry?: TelemetryClient; + blobSinkClient?: BlobSinkClientInterface; + forwarderContract: ForwarderContract; + l1TxUtils: L1TxUtilsWithBlobs; + rollupContract: RollupContract; + epochCache: EpochCache; + }, + ) { + this.ethereumSlotDuration = BigInt(config.ethereumSlotDuration); + this.epochCache = deps.epochCache; + + if (config.l1Contracts.governanceProposerAddress) { + this.governanceProposerAddress = EthAddress.fromString(config.l1Contracts.governanceProposerAddress.toString()); + } + this.blobSinkClient = deps.blobSinkClient ?? createBlobSinkClient(config); + + const telemetry = deps.telemetry ?? getTelemetryClient(); + this.metrics = new SequencerPublisherMetrics(telemetry, 'SequencerPublisher'); + this.l1TxUtils = deps.l1TxUtils; + + this.rollupContract = deps.rollupContract; + this.forwarderContract = deps.forwarderContract; + } + + public registerSlashPayloadGetter(callback: GetSlashPayloadCallBack) { + this.getSlashPayload = callback; + } + + public getForwarderAddress() { + return EthAddress.fromString(this.forwarderContract.getAddress()); + } + + public getSenderAddress() { + return EthAddress.fromString(this.l1TxUtils.getSenderAddress()); + } + + public getGovernancePayload() { + return this.governancePayload; + } + + public setGovernancePayload(payload: EthAddress) { + this.governancePayload = payload; + } + + public addRequest(request: RequestWithExpiry) { + this.requests.push(request); + } + + public getCurrentL2Slot(): bigint { + return this.epochCache.getEpochAndSlotNow().slot; + } + + /** + * Sends all requests that are still valid. + * @returns one of: + * - A receipt and stats if the tx succeeded + * - a receipt and errorMsg if it failed on L1 + * - undefined if no valid requests are found OR the tx failed to send. + */ + public async sendRequests() { + const requestsToProcess = [...this.requests]; + this.requests = []; + if (this.interrupted) { + return undefined; + } + const currentL2Slot = this.getCurrentL2Slot(); + this.log.debug(`Current L2 slot: ${currentL2Slot}`); + const validRequests = requestsToProcess.filter(request => request.lastValidL2Slot >= currentL2Slot); + + if (validRequests.length !== requestsToProcess.length) { + this.log.warn(`Some requests were expired for slot ${currentL2Slot}`, { + validRequests: validRequests.map(request => ({ + action: request.action, + lastValidL2Slot: request.lastValidL2Slot, + })), + requests: requestsToProcess.map(request => ({ + action: request.action, + lastValidL2Slot: request.lastValidL2Slot, + })), + }); + } + + if (validRequests.length === 0) { + this.log.debug(`No valid requests to send`); + return undefined; + } + + // @note - we can only have one gas config and one blob config per bundle + // find requests with gas and blob configs + // See https://github.com/AztecProtocol/aztec-packages/issues/11513 + const gasConfigs = requestsToProcess.filter(request => request.gasConfig); + const blobConfigs = requestsToProcess.filter(request => request.blobConfig); + + if (gasConfigs.length > 1 || blobConfigs.length > 1) { + throw new Error('Multiple gas or blob configs found'); + } + + const gasConfig = gasConfigs[0]?.gasConfig; + const blobConfig = blobConfigs[0]?.blobConfig; + + try { + this.log.debug('Forwarding transactions', { + validRequests: validRequests.map(request => request.action), + }); + const result = await this.forwarderContract.forward( + validRequests.map(request => request.request), + this.l1TxUtils, + gasConfig, + blobConfig, + ); + this.callbackBundledTransactions(validRequests, result); + return result; + } catch (err) { + const { message, metaMessages } = formatViemError(err); + this.log.error(`Failed to publish bundled transactions`, message, { metaMessages }); + return undefined; + } + } + + private callbackBundledTransactions( + requests: RequestWithExpiry[], + result?: { receipt: TransactionReceipt; gasPrice: GasPrice }, + ) { + const success = result?.receipt.status === 'success'; + const logger = success ? this.log.info : this.log.error; + for (const request of requests) { + logger(`Bundled [${request.action}] transaction [${success ? 'succeeded' : 'failed'}]`); + request.onResult?.(request.request, result); + } + } + + /** + * @notice Will call `canProposeAtNextEthBlock` to make sure that it is possible to propose + * @param tipArchive - The archive to check + * @returns The slot and block number if it is possible to propose, undefined otherwise + */ + public canProposeAtNextEthBlock(tipArchive: Buffer) { + const ignoredErrors = ['SlotAlreadyInChain', 'InvalidProposer']; + return this.rollupContract + .canProposeAtNextEthBlock(tipArchive, this.getForwarderAddress().toString(), this.ethereumSlotDuration) + .catch(err => { + if (err instanceof FormattedViemError && ignoredErrors.find(e => err.message.includes(e))) { + this.log.debug(err.message); + } else { + this.log.error(err.name, err); + } + return undefined; + }); + } + + /** + * @returns The epoch that is currently claimable, undefined otherwise + */ + public getClaimableEpoch() { + const acceptedErrors = ['Rollup__NoEpochToProve', 'Rollup__ProofRightAlreadyClaimed'] as const; + return this.rollupContract.getClaimableEpoch().catch(err => { + if (acceptedErrors.find(e => err.message.includes(e))) { + return undefined; + } + throw err; + }); + } + + /** + * @notice Will filter out invalid quotes according to L1 + * @param quotes - The quotes to filter + * @returns The filtered quotes + */ + public filterValidQuotes(quotes: EpochProofQuote[]): Promise { + return Promise.all( + quotes.map(x => + this.rollupContract + // validate throws if the quote is not valid + // else returns void + .validateProofQuote(x.toViemArgs(), this.getForwarderAddress().toString(), this.ethereumSlotDuration) + .then(() => x) + .catch(err => { + this.log.error(`Failed to validate proof quote`, err, { quote: x.toInspect() }); + return undefined; + }), + ), + ).then(quotes => quotes.filter((q): q is EpochProofQuote => !!q)); + } + + /** + * @notice Will call `validateHeader` to make sure that it is possible to propose + * + * @dev Throws if unable to propose + * + * @param header - The header to propose + * @param digest - The digest that attestations are signing over + * + */ + public async validateBlockForSubmission( + header: BlockHeader, + attestationData: { digest: Buffer; signatures: Signature[] } = { + digest: Buffer.alloc(32), + signatures: [], + }, + ): Promise { + const ts = BigInt((await this.l1TxUtils.getBlock()).timestamp + this.ethereumSlotDuration); + + const formattedSignatures = attestationData.signatures.map(attest => attest.toViemSignature()); + const flags = { ignoreDA: true, ignoreSignatures: formattedSignatures.length == 0 }; + + const args = [ + `0x${header.toBuffer().toString('hex')}`, + formattedSignatures, + `0x${attestationData.digest.toString('hex')}`, + ts, + `0x${header.contentCommitment.blobsHash.toString('hex')}`, + flags, + ] as const; + + await this.rollupContract.validateHeader(args, this.getForwarderAddress().toString()); + return ts; + } + + public async getCurrentEpochCommittee(): Promise { + const committee = await this.rollupContract.getCurrentEpochCommittee(); + return committee.map(EthAddress.fromString); + } + + /** + * Enqueues a castVote transaction to cast a vote for a given slot number. + * @param slotNumber - The slot number to cast a vote for. + * @param timestamp - The timestamp of the slot to cast a vote for. + * @param voteType - The type of vote to cast. + * @returns True if the vote was successfully enqueued, false otherwise. + */ + public async enqueueCastVote(slotNumber: bigint, timestamp: bigint, voteType: VoteType): Promise { + // @todo This function can be optimized by doing some of the computations locally instead of calling the L1 contracts + if (this.myLastVotes[voteType] >= slotNumber) { + return false; + } + + const voteConfig = async (): Promise< + { payload: EthAddress; voteContractAddress: EthAddress; logger: Logger } | undefined + > => { + if (voteType === VoteType.GOVERNANCE) { + if (this.governancePayload.equals(EthAddress.ZERO)) { + return undefined; + } + if (!this.governanceProposerAddress) { + return undefined; + } + return { + payload: this.governancePayload, + voteContractAddress: this.governanceProposerAddress, + logger: this.governanceLog, + }; + } else if (voteType === VoteType.SLASHING) { + if (!this.getSlashPayload) { + return undefined; + } + const slashingProposerAddress = await this.rollupContract.getSlashingProposerAddress(); + if (!slashingProposerAddress) { + return undefined; + } + + const slashPayload = await this.getSlashPayload(slotNumber); + + if (!slashPayload) { + return undefined; + } + + return { + payload: slashPayload, + voteContractAddress: slashingProposerAddress, + logger: this.slashingLog, + }; + } else { + throw new Error('Invalid vote type'); + } + }; + + const vConfig = await voteConfig(); + + if (!vConfig) { + return false; + } + + const { payload, voteContractAddress } = vConfig; + + const voteContract = getContract({ + address: getAddress(voteContractAddress.toString()), + abi: EmpireBaseAbi, + client: this.l1TxUtils.walletClient, + }); + + const [proposer, roundNumber] = await Promise.all([ + this.rollupContract.getProposerAt(timestamp), + voteContract.read.computeRound([slotNumber]), + ]); + + if (proposer.toLowerCase() !== this.getForwarderAddress().toString().toLowerCase()) { + return false; + } + + const [slotForLastVote] = await voteContract.read.rounds([this.rollupContract.address, roundNumber]); + + if (slotForLastVote >= slotNumber) { + return false; + } + + const cachedLastVote = this.myLastVotes[voteType]; + + this.myLastVotes[voteType] = slotNumber; + + this.addRequest({ + action: voteType === VoteType.GOVERNANCE ? 'governance-vote' : 'slashing-vote', + request: { + to: voteContractAddress.toString(), + data: encodeFunctionData({ + abi: EmpireBaseAbi, + functionName: 'vote', + args: [payload.toString()], + }), + }, + lastValidL2Slot: slotNumber, + onResult: (_request, result) => { + if (!result || result.receipt.status !== 'success') { + this.myLastVotes[voteType] = cachedLastVote; + } else { + this.log.info(`Cast ${voteType} vote for slot ${slotNumber}`); + } + }, + }); + return true; + } + + /** + * Proposes a L2 block on L1. + * + * @param block - L2 block to propose. + * @returns True if the tx has been enqueued, throws otherwise. See #9315 + */ + public async enqueueProposeL2Block( + block: L2Block, + attestations?: Signature[], + txHashes?: TxHash[], + opts: { txTimeoutAt?: Date } = {}, + ): Promise { + const consensusPayload = new ConsensusPayload(block.header, block.archive.root, txHashes ?? []); + + const digest = await getHashedSignaturePayload(consensusPayload, SignatureDomainSeparator.blockAttestation); + + const blobs = await Blob.getBlobs(block.body.toBlobFields()); + const proposeTxArgs = { + header: block.header.toBuffer(), + archive: block.archive.root.toBuffer(), + blockHash: (await block.header.hash()).toBuffer(), + body: block.body.toBuffer(), + blobs, + attestations, + txHashes: txHashes ?? [], + }; + + // @note This will make sure that we are passing the checks for our header ASSUMING that the data is also made available + // This means that we can avoid the simulation issues in later checks. + // By simulation issue, I mean the fact that the block.timestamp is equal to the last block, not the next, which + // make time consistency checks break. + const ts = await this.validateBlockForSubmission(block.header, { + digest: digest.toBuffer(), + signatures: attestations ?? [], + }); + + this.log.debug(`Submitting propose transaction`); + await this.addProposeTx(block, proposeTxArgs, opts, ts); + return true; + } + + /** Enqueues a claimEpochProofRight transaction to submit a chosen prover quote for the previous epoch. */ + public enqueueClaimEpochProofRight(proofQuote: EpochProofQuote): boolean { + const timer = new Timer(); + this.addRequest({ + action: 'claim', + request: { + to: this.rollupContract.address, + data: encodeFunctionData({ + abi: RollupAbi, + functionName: 'claimEpochProofRight', + args: [proofQuote.toViemArgs()], + }), + }, + lastValidL2Slot: this.getCurrentL2Slot(), + onResult: (_request, result) => { + if (!result) { + return; + } + const { receipt, stats } = result; + if (receipt.status === 'success') { + const publishStats: L1PublishStats = { + gasPrice: receipt.effectiveGasPrice, + gasUsed: receipt.gasUsed, + transactionHash: receipt.transactionHash, + blobDataGas: 0n, + blobGasUsed: 0n, + ...pick(stats!, 'calldataGas', 'calldataSize', 'sender'), + }; + this.log.verbose(`Submitted claim epoch proof right to L1 rollup contract`, { + ...publishStats, + ...proofQuote.toInspect(), + }); + this.metrics.recordClaimEpochProofRightTx(timer.ms(), publishStats); + } else { + this.metrics.recordFailedTx('claimEpochProofRight'); + // TODO: Get the error message from the reverted tx + this.log.error(`Claim epoch proof right tx reverted`, { + txHash: receipt.transactionHash, + ...proofQuote.toInspect(), + }); + } + }, + }); + return true; + } + + /** + * Calling `interrupt` will cause any in progress call to `publishRollup` to return `false` asap. + * Be warned, the call may return false even if the tx subsequently gets successfully mined. + * In practice this shouldn't matter, as we'll only ever be calling `interrupt` when we know it's going to fail. + * A call to `restart` is required before you can continue publishing. + */ + public interrupt() { + this.interrupted = true; + this.l1TxUtils.interrupt(); + } + + /** Restarts the publisher after calling `interrupt`. */ + public restart() { + this.interrupted = false; + this.l1TxUtils.restart(); + } + + private async prepareProposeTx(encodedData: L1ProcessArgs, timestamp: bigint) { + const kzg = Blob.getViemKzgInstance(); + const blobInput = Blob.getEthBlobEvaluationInputs(encodedData.blobs); + this.log.debug('Validating blob input', { blobInput }); + const blobEvaluationGas = await this.l1TxUtils + .estimateGas( + this.l1TxUtils.walletClient.account, + { + to: this.rollupContract.address, + data: encodeFunctionData({ + abi: RollupAbi, + functionName: 'validateBlobs', + args: [blobInput], + }), + }, + {}, + { + blobs: encodedData.blobs.map(b => b.data), + kzg, + }, + ) + .catch(err => { + const { message, metaMessages } = formatViemError(err); + this.log.error(`Failed to validate blobs`, message, { metaMessages }); + throw new Error('Failed to validate blobs'); + }); + + const attestations = encodedData.attestations + ? encodedData.attestations.map(attest => attest.toViemSignature()) + : []; + const txHashes = encodedData.txHashes ? encodedData.txHashes.map(txHash => txHash.toString()) : []; + const args = [ + { + header: `0x${encodedData.header.toString('hex')}`, + archive: `0x${encodedData.archive.toString('hex')}`, + oracleInput: { + // We are currently not modifying these. See #9963 + feeAssetPriceModifier: 0n, + provingCostModifier: 0n, + }, + blockHash: `0x${encodedData.blockHash.toString('hex')}`, + txHashes, + }, + attestations, + // TODO(#9101): Extract blobs from beacon chain => calldata will only contain what's needed to verify blob and body input can be removed + `0x${encodedData.body.toString('hex')}`, + blobInput, + ] as const; + + const rollupData = encodeFunctionData({ + abi: RollupAbi, + functionName: 'propose', + args, + }); + + const forwarderData = encodeFunctionData({ + abi: ForwarderAbi, + functionName: 'forward', + args: [[this.rollupContract.address], [rollupData]], + }); + + const simulationResult = await this.l1TxUtils + .simulateGasUsed( + { + to: this.getForwarderAddress().toString(), + data: forwarderData, + gas: SequencerPublisher.PROPOSE_GAS_GUESS, + }, + { + // @note we add 1n to the timestamp because geth implementation doesn't like simulation timestamp to be equal to the current block timestamp + time: timestamp + 1n, + // @note reth should have a 30m gas limit per block but throws errors that this tx is beyond limit + gasLimit: SequencerPublisher.PROPOSE_GAS_GUESS * 2n, + }, + [ + { + address: this.rollupContract.address, + // @note we override checkBlob to false since blobs are not part simulate() + stateDiff: [ + { + slot: toHex(9n, true), + value: toHex(0n, true), + }, + ], + }, + ], + { + // @note fallback gas estimate to use if the node doesn't support simulation API + fallbackGasEstimate: SequencerPublisher.PROPOSE_GAS_GUESS, + }, + ) + .catch(err => { + const { message, metaMessages } = formatViemError(err); + this.log.error(`Failed to simulate gas used`, message, { metaMessages }); + throw new Error('Failed to simulate gas used'); + }); + + return { args, blobEvaluationGas, rollupData, simulationResult }; + } + + private async addProposeTx( + block: L2Block, + encodedData: L1ProcessArgs, + opts: { txTimeoutAt?: Date } = {}, + timestamp: bigint, + ): Promise { + const timer = new Timer(); + const kzg = Blob.getViemKzgInstance(); + const { rollupData, simulationResult, blobEvaluationGas } = await this.prepareProposeTx(encodedData, timestamp); + const startBlock = await this.l1TxUtils.getBlockNumber(); + + return this.addRequest({ + action: 'propose', + request: { + to: this.rollupContract.address, + data: rollupData, + }, + lastValidL2Slot: block.header.globalVariables.slotNumber.toBigInt(), + gasConfig: { + ...opts, + gasLimit: this.l1TxUtils.bumpGasLimit(simulationResult + blobEvaluationGas), + }, + blobConfig: { + blobs: encodedData.blobs.map(b => b.data), + kzg, + }, + onResult: (request, result) => { + if (!result) { + return; + } + const { receipt, stats, errorMsg } = result; + if (receipt.status === 'success') { + const endBlock = receipt.blockNumber; + const inclusionBlocks = Number(endBlock - startBlock); + const publishStats: L1PublishBlockStats = { + gasPrice: receipt.effectiveGasPrice, + gasUsed: receipt.gasUsed, + blobGasUsed: receipt.blobGasUsed ?? 0n, + blobDataGas: receipt.blobGasPrice ?? 0n, + transactionHash: receipt.transactionHash, + ...pick(stats!, 'calldataGas', 'calldataSize', 'sender'), + ...block.getStats(), + eventName: 'rollup-published-to-l1', + blobCount: encodedData.blobs.length, + inclusionBlocks, + }; + this.log.verbose(`Published L2 block to L1 rollup contract`, { ...stats, ...block.getStats() }); + this.metrics.recordProcessBlockTx(timer.ms(), publishStats); + + // Send the blobs to the blob sink + this.sendBlobsToBlobSink(receipt.blockHash, encodedData.blobs).catch(_err => { + this.log.error('Failed to send blobs to blob sink'); + }); + + return true; + } else { + this.metrics.recordFailedTx('process'); + + this.log.error(`Rollup process tx reverted. ${errorMsg ?? 'No error message'}`, undefined, { + ...block.getStats(), + txHash: receipt.transactionHash, + blockHash: block.hash().toString(), + slotNumber: block.header.globalVariables.slotNumber.toBigInt(), + }); + } + }, + }); + } + + /** + * Send blobs to the blob sink + * + * If a blob sink url is configured, then we send blobs to the blob sink + * - for now we use the blockHash as the identifier for the blobs; + * In the future this will move to be the beacon block id - which takes a bit more work + * to calculate and will need to be mocked in e2e tests + */ + protected sendBlobsToBlobSink(blockHash: string, blobs: Blob[]): Promise { + return this.blobSinkClient.sendBlobsToBlobSink(blockHash, blobs); + } +} diff --git a/yarn-project/sequencer-client/src/sequencer/metrics.ts b/yarn-project/sequencer-client/src/sequencer/metrics.ts index bc6caca4568..108ac266b48 100644 --- a/yarn-project/sequencer-client/src/sequencer/metrics.ts +++ b/yarn-project/sequencer-client/src/sequencer/metrics.ts @@ -105,13 +105,12 @@ export class SequencerMetrics { this.setCurrentBlock(0, 0); } - recordPublishedBlock(buildDurationMs: number, totalMana: number) { + recordBuiltBlock(buildDurationMs: number, totalMana: number) { this.blockCounter.add(1, { - [Attributes.STATUS]: 'published', + [Attributes.STATUS]: 'built', }); this.blockBuildDuration.record(Math.ceil(buildDurationMs)); this.blockBuildManaPerSecond.record(Math.ceil((totalMana * 1000) / buildDurationMs)); - this.setCurrentBlock(0, 0); } recordFailedBlock() { diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 571d3e8c06e..1c1a4ae2cf2 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -4,7 +4,6 @@ import { BlockProposal, Body, ConsensusPayload, - type EpochProofQuote, type L1ToL2MessageSource, L2Block, type L2BlockSource, @@ -47,13 +46,13 @@ import { expect } from '@jest/globals'; import { type MockProxy, mock, mockFn } from 'jest-mock-extended'; import { type GlobalVariableBuilder } from '../global_variable_builder/global_builder.js'; -import { type L1Publisher } from '../publisher/l1-publisher.js'; +import { type SequencerPublisher } from '../publisher/sequencer-publisher.js'; import { type SlasherClient } from '../slasher/index.js'; import { Sequencer } from './sequencer.js'; import { SequencerState } from './utils.js'; describe('sequencer', () => { - let publisher: MockProxy; + let publisher: MockProxy; let validatorClient: MockProxy; let globalVariableBuilder: MockProxy; let p2p: MockProxy; @@ -138,9 +137,9 @@ describe('sequencer', () => { return tx; }; - const expectPublisherProposeL2Block = (txHashes: TxHash[], proofQuote?: EpochProofQuote) => { - expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); - expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), txHashes, proofQuote, { + const expectPublisherProposeL2Block = (txHashes: TxHash[]) => { + expect(publisher.enqueueProposeL2Block).toHaveBeenCalledTimes(1); + expect(publisher.enqueueProposeL2Block).toHaveBeenCalledWith(block, getSignatures(), txHashes, { txTimeoutAt: expect.any(Date), }); }; @@ -165,12 +164,15 @@ describe('sequencer', () => { gasFees, ); - publisher = mock(); + publisher = mock(); publisher.getSenderAddress.mockImplementation(() => EthAddress.random()); + publisher.getForwarderAddress.mockImplementation(() => EthAddress.random()); publisher.getCurrentEpochCommittee.mockResolvedValue(committee); - publisher.canProposeAtNextEthBlock.mockResolvedValue([BigInt(newSlotNumber), BigInt(newBlockNumber)]); publisher.validateBlockForSubmission.mockResolvedValue(1n); - publisher.proposeL2Block.mockResolvedValue(true); + publisher.enqueueProposeL2Block.mockResolvedValue(true); + publisher.enqueueCastVote.mockResolvedValue(true); + publisher.enqueueClaimEpochProofRight.mockReturnValue(true); + publisher.canProposeAtNextEthBlock.mockResolvedValue([BigInt(newSlotNumber), BigInt(newBlockNumber)]); globalVariableBuilder = mock(); globalVariableBuilder.buildGlobalVariables.mockResolvedValue(globalVariables); @@ -320,7 +322,7 @@ describe('sequencer', () => { ); expect(blockBuilder.startNewBlock).not.toHaveBeenCalled(); - expect(publisher.proposeL2Block).not.toHaveBeenCalled(); + expect(publisher.enqueueProposeL2Block).not.toHaveBeenCalled(); }); it('builds a block when it is their turn', async () => { @@ -331,7 +333,7 @@ describe('sequencer', () => { block = await makeBlock([tx]); // Not your turn! - publisher.canProposeAtNextEthBlock.mockRejectedValue(new Error()); + publisher.canProposeAtNextEthBlock.mockReturnValue(Promise.resolve(undefined)); publisher.validateBlockForSubmission.mockRejectedValue(new Error()); await sequencer.doRealWork(); @@ -494,7 +496,7 @@ describe('sequencer', () => { await sequencer.doRealWork(); - expect(publisher.proposeL2Block).not.toHaveBeenCalled(); + expect(publisher.enqueueProposeL2Block).not.toHaveBeenCalled(); }); it('does not publish a block if the block proposal failed', async () => { @@ -506,7 +508,21 @@ describe('sequencer', () => { await sequencer.doRealWork(); - expect(publisher.proposeL2Block).not.toHaveBeenCalled(); + expect(publisher.enqueueProposeL2Block).not.toHaveBeenCalled(); + }); + + it('handles when enqueueProposeL2Block throws', async () => { + const tx = await makeTx(); + mockPendingTxs([tx]); + block = await makeBlock([tx]); + + publisher.enqueueProposeL2Block.mockRejectedValueOnce(new Error('Failed to enqueue propose L2 block')); + + await sequencer.doRealWork(); + expectPublisherProposeL2Block([await tx.getTxHash()]); + + // Even though the block publish was not enqueued, we still send any requests + expect(publisher.sendRequests).toHaveBeenCalledTimes(1); }); describe('proof quotes', () => { @@ -547,11 +563,8 @@ describe('sequencer', () => { globalVariableBuilder.buildGlobalVariables.mockResolvedValue(globalVariables); + publisher.enqueueClaimEpochProofRight.mockReturnValueOnce(true); publisher.canProposeAtNextEthBlock.mockResolvedValue([BigInt(newSlotNumber), BigInt(blockNumber)]); - publisher.claimEpochProofRight.mockResolvedValueOnce(true); - publisher.getEpochForSlotNumber.mockImplementation((slotNumber: bigint) => - Promise.resolve(slotNumber / BigInt(epochDuration)), - ); tx = await makeTx(); txHash = await tx.getTxHash(); @@ -576,13 +589,14 @@ describe('sequencer', () => { const proofQuote = mockEpochProofQuote(); p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); - publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); + publisher.filterValidQuotes.mockImplementation(() => Promise.resolve([proofQuote])); // The previous epoch can be claimed publisher.getClaimableEpoch.mockImplementation(() => Promise.resolve(currentEpoch - 1n)); await sequencer.doRealWork(); - expectPublisherProposeL2Block([txHash], proofQuote); + expect(publisher.enqueueClaimEpochProofRight).toHaveBeenCalledWith(proofQuote); + expectPublisherProposeL2Block([txHash]); }); it('submits a valid proof quote even without a block', async () => { @@ -595,14 +609,14 @@ describe('sequencer', () => { const proofQuote = mockEpochProofQuote(); p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); - publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); + publisher.filterValidQuotes.mockImplementation(() => Promise.resolve([proofQuote])); // The previous epoch can be claimed publisher.getClaimableEpoch.mockImplementation(() => Promise.resolve(currentEpoch - 1n)); await sequencer.doRealWork(); - expect(publisher.claimEpochProofRight).toHaveBeenCalledWith(proofQuote); - expect(publisher.proposeL2Block).not.toHaveBeenCalled(); + expect(publisher.enqueueClaimEpochProofRight).toHaveBeenCalledWith(proofQuote); + expect(publisher.enqueueProposeL2Block).not.toHaveBeenCalled(); }); it('submits a valid proof quote if building a block proposal fails', async () => { @@ -612,7 +626,7 @@ describe('sequencer', () => { const proofQuote = mockEpochProofQuote(); p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); - publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); + publisher.filterValidQuotes.mockImplementation(() => Promise.resolve([proofQuote])); // The previous epoch can be claimed publisher.getClaimableEpoch.mockImplementation(() => Promise.resolve(currentEpoch - 1n)); @@ -620,8 +634,8 @@ describe('sequencer', () => { validatorClient.createBlockProposal.mockResolvedValue(undefined); await sequencer.doRealWork(); - expect(publisher.claimEpochProofRight).toHaveBeenCalledWith(proofQuote); - expect(publisher.proposeL2Block).not.toHaveBeenCalled(); + expect(publisher.enqueueClaimEpochProofRight).toHaveBeenCalledWith(proofQuote); + expect(publisher.enqueueProposeL2Block).not.toHaveBeenCalled(); }); it('does not claim the epoch previous to the first', async () => { @@ -631,7 +645,7 @@ describe('sequencer', () => { const proofQuote = mockEpochProofQuote({ epoch: 0n }); p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); - publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); + publisher.filterValidQuotes.mockImplementation(() => Promise.resolve([proofQuote])); publisher.getClaimableEpoch.mockImplementation(() => Promise.resolve(undefined)); @@ -647,7 +661,7 @@ describe('sequencer', () => { const proofQuote = mockEpochProofQuote({ validUntilSlot: expiredSlotNumber }); p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); - publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); + publisher.filterValidQuotes.mockImplementation(() => Promise.resolve([proofQuote])); // The previous epoch can be claimed publisher.getClaimableEpoch.mockImplementation(() => Promise.resolve(currentEpoch - 1n)); @@ -663,9 +677,9 @@ describe('sequencer', () => { const proofQuote = mockEpochProofQuote(); p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); - publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); + publisher.filterValidQuotes.mockImplementation(() => Promise.resolve([proofQuote])); - publisher.getClaimableEpoch.mockResolvedValue(undefined); + publisher.getClaimableEpoch.mockImplementation(() => Promise.resolve(undefined)); await sequencer.doRealWork(); expectPublisherProposeL2Block([txHash]); @@ -678,10 +692,10 @@ describe('sequencer', () => { const proofQuote = mockEpochProofQuote(); p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); - publisher.proposeL2Block.mockResolvedValueOnce(true); + publisher.enqueueProposeL2Block.mockResolvedValueOnce(true); // Quote is reported as invalid - publisher.validateProofQuote.mockImplementation(_ => Promise.resolve(undefined)); + publisher.filterValidQuotes.mockImplementation(() => Promise.reject(new Error('Invalid proof quote'))); // The previous epoch can be claimed publisher.getClaimableEpoch.mockImplementation(() => Promise.resolve(currentEpoch - 1n)); @@ -709,18 +723,17 @@ describe('sequencer', () => { const allQuotes = [proofQuoteInvalidSlot, proofQuoteInvalidEpoch, ...validQuotes, proofQuoteInvalid]; p2p.getEpochProofQuotes.mockResolvedValue(allQuotes); - publisher.proposeL2Block.mockResolvedValueOnce(true); + publisher.enqueueProposeL2Block.mockResolvedValueOnce(true); // Quote is reported as invalid - publisher.validateProofQuote.mockImplementation(p => - Promise.resolve(p.payload.basisPointFee === 3 ? undefined : p), - ); + publisher.filterValidQuotes.mockImplementation(() => Promise.resolve(validQuotes)); // The previous epoch can be claimed publisher.getClaimableEpoch.mockImplementation(() => Promise.resolve(currentEpoch - 1n)); await sequencer.doRealWork(); - expectPublisherProposeL2Block([txHash], validQuotes[0]); + expect(publisher.enqueueClaimEpochProofRight).toHaveBeenCalledWith(validQuotes[0]); + expectPublisherProposeL2Block([txHash]); }); }); }); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 695df7c040f..da16078c4d4 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -21,7 +21,6 @@ import { type GlobalVariables, StateReference, } from '@aztec/circuits.js'; -import { prettyLogViemErrorMsg } from '@aztec/ethereum'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { omit } from '@aztec/foundation/collection'; import { EthAddress } from '@aztec/foundation/eth-address'; @@ -37,7 +36,7 @@ import { Attributes, type TelemetryClient, type Tracer, getTelemetryClient, trac import { type ValidatorClient } from '@aztec/validator-client'; import { type GlobalVariableBuilder } from '../global_variable_builder/global_builder.js'; -import { type L1Publisher, VoteType } from '../publisher/l1-publisher.js'; +import { type SequencerPublisher, VoteType } from '../publisher/sequencer-publisher.js'; import { type SlasherClient } from '../slasher/slasher_client.js'; import { createValidatorsForBlockBuilding } from '../tx_validator/tx_validator_factory.js'; import { getDefaultAllowedSetupFunctions } from './allowed.js'; @@ -81,7 +80,7 @@ export class Sequencer { protected enforceTimeTable: boolean = false; constructor( - protected publisher: L1Publisher, + protected publisher: SequencerPublisher, protected validatorClient: ValidatorClient | undefined, // During migration the validator client can be inactive protected globalsBuilder: GlobalVariableBuilder, protected p2pClient: P2P, @@ -217,6 +216,11 @@ export class Sequencer { return { state: this.state }; } + /** Forces the sequencer to bypass all time and tx count checks for the next block and build anyway. */ + public flush() { + this.isFlushing = true; + } + /** * @notice Performs most of the sequencer duties: * - Checks if we are up to date @@ -243,14 +247,14 @@ export class Sequencer { // If we cannot find a tip archive, assume genesis. const chainTipArchive = chainTip?.archive.root ?? new Fr(GENESIS_ARCHIVE_ROOT); - let slot: bigint; - try { - slot = await this.mayProposeBlock(chainTipArchive.toBuffer(), BigInt(newBlockNumber)); - } catch (err) { - this.log.debug(`Cannot propose for block ${newBlockNumber}`); + const slot = await this.slotForProposal(chainTipArchive.toBuffer(), BigInt(newBlockNumber)); + if (!slot) { + this.log.debug(`Cannot propose block ${newBlockNumber}`); return; } + this.log.info(`Can propose block ${newBlockNumber} at slot ${slot}`); + const newGlobalVariables = await this.globalsBuilder.buildGlobalVariables( new Fr(newBlockNumber), this._coinbase, @@ -258,19 +262,19 @@ export class Sequencer { slot, ); - void this.publisher.castVote(slot, newGlobalVariables.timestamp.toBigInt(), VoteType.GOVERNANCE); - void this.publisher.castVote(slot, newGlobalVariables.timestamp.toBigInt(), VoteType.SLASHING); + const enqueueGovernanceVotePromise = this.publisher.enqueueCastVote( + slot, + newGlobalVariables.timestamp.toBigInt(), + VoteType.GOVERNANCE, + ); + const enqueueSlashingVotePromise = this.publisher.enqueueCastVote( + slot, + newGlobalVariables.timestamp.toBigInt(), + VoteType.SLASHING, + ); - // Check the pool has enough txs to build a block - const pendingTxCount = await this.p2pClient.getPendingTxCount(); - if (pendingTxCount < this.minTxsPerBlock && !this.isFlushing) { - this.log.verbose(`Not enough txs to propose block. Got ${pendingTxCount} min ${this.minTxsPerBlock}.`, { - slot, - blockNumber: newBlockNumber, - }); - await this.claimEpochProofRightIfAvailable(slot); - return; - } + // Start collecting proof quotes for the previous epoch if needed in the background + const createProofQuotePromise = this.createProofClaimForPreviousEpoch(slot); this.setState(SequencerState.INITIALIZING_PROPOSAL, slot); this.log.verbose(`Preparing proposal for block ${newBlockNumber} at slot ${slot}`, { @@ -279,10 +283,6 @@ export class Sequencer { slot, }); - // We don't fetch exactly maxTxsPerBlock txs here because we may not need all of them if we hit a limit before, - // and also we may need to fetch more if we don't have enough valid txs. - const pendingTxs = this.p2pClient.iteratePendingTxs(); - // If I created a "partial" header here that should make our job much easier. const proposalHeader = new BlockHeader( new AppendOnlyTreeSnapshot(chainTipArchive, 1), @@ -293,18 +293,41 @@ export class Sequencer { Fr.ZERO, ); - try { - // TODO(palla/txs) Is the note below still valid? We don't seem to be doing any rollback in there. - // @note It is very important that the following function will FAIL and not just return early - // if it have made any state changes. If not, we won't rollback the state, and you will - // be in for a world of pain. - await this.buildBlockAndAttemptToPublish(pendingTxs, proposalHeader); - } catch (err) { - this.log.error(`Error assembling block`, err, { blockNumber: newBlockNumber, slot }); + let finishedFlushing = false; + const pendingTxCount = await this.p2pClient.getPendingTxCount(); + if (pendingTxCount >= this.minTxsPerBlock || this.isFlushing) { + // We don't fetch exactly maxTxsPerBlock txs here because we may not need all of them if we hit a limit before, + // and also we may need to fetch more if we don't have enough valid txs. + const pendingTxs = this.p2pClient.iteratePendingTxs(); + + await this.buildBlockAndEnqueuePublish(pendingTxs, proposalHeader).catch(err => { + this.log.error(`Error building/enqueuing block`, err, { blockNumber: newBlockNumber, slot }); + }); + finishedFlushing = true; + } else { + this.log.debug( + `Not enough txs to build block ${newBlockNumber} at slot ${slot}: got ${pendingTxCount} txs, need ${this.minTxsPerBlock}`, + ); + } + + await enqueueGovernanceVotePromise.catch(err => { + this.log.error(`Error enqueuing governance vote`, err, { blockNumber: newBlockNumber, slot }); + }); + await enqueueSlashingVotePromise.catch(err => { + this.log.error(`Error enqueuing slashing vote`, err, { blockNumber: newBlockNumber, slot }); + }); + await createProofQuotePromise + .then(quote => (quote ? this.publisher.enqueueClaimEpochProofRight(quote) : undefined)) + .catch(err => { + this.log.error(`Error creating proof quote`, err, { blockNumber: newBlockNumber, slot }); + }); + + await this.publisher.sendRequests(); - // If the block failed to build, we might still want to claim the proving rights - await this.claimEpochProofRightIfAvailable(slot); + if (finishedFlushing) { + this.isFlushing = false; } + this.setState(SequencerState.IDLE, 0n); } @@ -324,24 +347,31 @@ export class Sequencer { } } - async mayProposeBlock(tipArchive: Buffer, proposalBlockNumber: bigint): Promise { - // This checks that we can propose, and gives us the slot that we are to propose for - try { - const [slot, blockNumber] = await this.publisher.canProposeAtNextEthBlock(tipArchive); + public getForwarderAddress() { + return this.publisher.getForwarderAddress(); + } - if (proposalBlockNumber !== blockNumber) { - const msg = `Sequencer block number mismatch. Expected ${proposalBlockNumber} but got ${blockNumber}.`; - this.log.warn(msg); - throw new Error(msg); - } - return slot; - } catch (err) { - const msg = prettyLogViemErrorMsg(err); - this.log.debug( - `Rejected from being able to propose at next block with ${tipArchive.toString('hex')}: ${msg ? `${msg}` : ''}`, - ); - throw err; + /** + * Checks if we can propose at the next block and returns the slot number if we can. + * @param tipArchive - The archive of the previous block. + * @param proposalBlockNumber - The block number of the proposal. + * @returns The slot number if we can propose at the next block, otherwise undefined. + */ + async slotForProposal(tipArchive: Buffer, proposalBlockNumber: bigint): Promise { + const result = await this.publisher.canProposeAtNextEthBlock(tipArchive); + + if (!result) { + return undefined; + } + + const [slot, blockNumber] = result; + + if (proposalBlockNumber !== blockNumber) { + const msg = `Sequencer block number mismatch. Expected ${proposalBlockNumber} but got ${blockNumber}.`; + this.log.warn(msg); + throw new Error(msg); } + return slot; } /** @@ -515,10 +545,10 @@ export class Sequencer { * @param pendingTxs - Iterable of pending transactions to construct the block from * @param proposalHeader - The partial header constructed for the proposal */ - @trackSpan('Sequencer.buildBlockAndAttemptToPublish', (_validTxs, proposalHeader) => ({ + @trackSpan('Sequencer.buildBlockAndEnqueuePublish', (_validTxs, proposalHeader) => ({ [Attributes.BLOCK_NUMBER]: proposalHeader.globalVariables.blockNumber.toNumber(), })) - private async buildBlockAndAttemptToPublish( + private async buildBlockAndEnqueuePublish( pendingTxs: AsyncIterableIterator, proposalHeader: BlockHeader, ): Promise { @@ -532,22 +562,19 @@ export class Sequencer { const workTimer = new Timer(); this.setState(SequencerState.CREATING_BLOCK, slot); - // Start collecting proof quotes for the previous epoch if needed in the background - const proofQuotePromise = this.createProofClaimForPreviousEpoch(slot); - try { const buildBlockRes = await this.buildBlock(pendingTxs, newGlobalVariables); const { publicGas, block, publicProcessorDuration, numTxs, numMsgs, blockBuildingTimer } = buildBlockRes; + this.metrics.recordBuiltBlock(workTimer.ms(), publicGas.l2Gas); // TODO(@PhilWindle) We should probably periodically check for things like another // block being published before ours instead of just waiting on our block await this.publisher.validateBlockForSubmission(block.header); - const workDuration = workTimer.ms(); const blockStats: L2BlockBuiltStats = { eventName: 'l2-block-built', creator: this.publisher.getSenderAddress().toString(), - duration: workDuration, + duration: workTimer.ms(), publicProcessDuration: publicProcessorDuration, rollupCircuitsDuration: blockBuildingTimer.ms(), ...block.getStats(), @@ -555,18 +582,18 @@ export class Sequencer { const blockHash = await block.hash(); const txHashes = block.body.txEffects.map(tx => tx.txHash); - this.log.info(`Built block ${block.number} for slot ${slot} with ${numTxs} txs`, { - blockHash, - globalVariables: block.header.globalVariables.toInspect(), - txHashes, - ...blockStats, - }); - - if (this.isFlushing) { - this.log.verbose(`Sequencer flushing completed`); - } + this.log.info( + `Built block ${block.number} for slot ${slot} with ${numTxs} txs and ${numMsgs} messages. ${ + publicGas.l2Gas / workTimer.s() + } mana/s`, + { + blockHash, + globalVariables: block.header.globalVariables.toInspect(), + txHashes, + ...blockStats, + }, + ); - this.isFlushing = false; this.log.debug('Collecting attestations'); const stopCollectingAttestationsTimer = this.metrics.startCollectingAttestationsTimer(); const attestations = await this.collectAttestations(block, txHashes); @@ -575,37 +602,13 @@ export class Sequencer { } stopCollectingAttestationsTimer(); - // Get the proof quote for the previous epoch, if any - const proofQuote = await proofQuotePromise; - - await this.publishL2Block(block, attestations, txHashes, proofQuote); - this.metrics.recordPublishedBlock(workDuration, publicGas.l2Gas); - const duration = Math.ceil(workDuration); - const manaPerSecond = Math.ceil((publicGas.l2Gas * 1000) / duration); - this.log.info( - `Published block ${block.number} with ${numTxs} txs and ${numMsgs} messages in ${duration} ms at ${manaPerSecond} mana/s`, - { - publicGas, - blockNumber: block.number, - blockHash: blockHash, - slot, - txCount: txHashes.length, - msgCount: numMsgs, - duration, - submitter: this.publisher.getSenderAddress().toString(), - }, - ); + return this.enqueuePublishL2Block(block, attestations, txHashes); } catch (err) { this.metrics.recordFailedBlock(); throw err; } } - /** Forces the sequencer to bypass all time and tx count checks for the next block and build anyway. */ - public flush() { - this.isFlushing = true; - } - @trackSpan('Sequencer.collectAttestations', (block, txHashes) => ({ [Attributes.BLOCK_NUMBER]: block.number, [Attributes.BLOCK_ARCHIVE]: block.archive.toString(), @@ -660,28 +663,33 @@ export class Sequencer { try { // Find out which epoch we are currently in const epochToProve = await this.publisher.getClaimableEpoch(); + if (epochToProve === undefined) { - this.log.trace(`No epoch to prove at slot ${slotNumber}`); + this.log.trace(`No epoch to claim at slot ${slotNumber}`); return undefined; } // Get quotes for the epoch to be proven this.log.debug(`Collecting proof quotes for epoch ${epochToProve}`); - const quotes = await this.p2pClient.getEpochProofQuotes(epochToProve); - this.log.verbose(`Retrieved ${quotes.length} quotes for slot ${slotNumber} epoch ${epochToProve}`, { + const p2pQuotes = await this.p2pClient + .getEpochProofQuotes(epochToProve) + .then(quotes => + quotes + .filter(x => x.payload.validUntilSlot >= slotNumber) + .filter(x => x.payload.epochToProve === epochToProve), + ); + this.log.verbose(`Retrieved ${p2pQuotes.length} quotes for slot ${slotNumber} epoch ${epochToProve}`, { epochToProve, slotNumber, - quotes: quotes.map(q => q.payload), + quotes: p2pQuotes.map(q => q.payload), }); + if (!p2pQuotes.length) { + return undefined; + } + // ensure these quotes are still valid for the slot and have the contract validate them - const validQuotesPromise = Promise.all( - quotes - .filter(x => x.payload.validUntilSlot >= slotNumber) - .filter(x => x.payload.epochToProve === epochToProve) - .map(x => this.publisher.validateProofQuote(x)), - ); + const validQuotes = await this.publisher.filterValidQuotes(p2pQuotes); - const validQuotes = (await validQuotesPromise).filter((q): q is EpochProofQuote => !!q); if (!validQuotes.length) { this.log.warn(`Failed to find any valid proof quotes`); return undefined; @@ -703,15 +711,14 @@ export class Sequencer { * Publishes the L2Block to the rollup contract. * @param block - The L2Block to be published. */ - @trackSpan('Sequencer.publishL2Block', block => ({ + @trackSpan('Sequencer.enqueuePublishL2Block', block => ({ [Attributes.BLOCK_NUMBER]: block.number, })) - protected async publishL2Block( + protected async enqueuePublishL2Block( block: L2Block, attestations?: Signature[], txHashes?: TxHash[], - proofQuote?: EpochProofQuote, - ) { + ): Promise { // Publishes new block to the network and awaits the tx to be mined this.setState(SequencerState.PUBLISHING_BLOCK, block.header.globalVariables.slotNumber.toBigInt()); @@ -719,11 +726,12 @@ export class Sequencer { const slot = block.header.globalVariables.slotNumber.toNumber(); const txTimeoutAt = new Date((this.getSlotStartTimestamp(slot) + this.aztecSlotDuration) * 1000); - const publishedL2Block = await this.publisher.proposeL2Block(block, attestations, txHashes, proofQuote, { + const enqueued = await this.publisher.enqueueProposeL2Block(block, attestations, txHashes, { txTimeoutAt, }); - if (!publishedL2Block) { - throw new Error(`Failed to publish block ${block.number}`); + + if (!enqueued) { + throw new Error(`Failed to enqueue publish of block ${block.number}`); } } @@ -742,11 +750,11 @@ export class Sequencer { const epoch = proofQuote.payload.epochToProve; const ctx = { slotNumber, epoch, quote: proofQuote.toInspect() }; this.log.verbose(`Claiming proof right for epoch ${epoch}`, ctx); - const success = await this.publisher.claimEpochProofRight(proofQuote); - if (!success) { - throw new Error(`Failed to claim proof right for epoch ${epoch}`); + const enqueued = this.publisher.enqueueClaimEpochProofRight(proofQuote); + if (!enqueued) { + throw new Error(`Failed to enqueue claim of proof right for epoch ${epoch}`); } - this.log.info(`Claimed proof right for epoch ${epoch}`, ctx); + this.log.info(`Enqueued claim of proof right for epoch ${epoch}`, ctx); return epoch; } diff --git a/yarn-project/sequencer-client/src/test/index.ts b/yarn-project/sequencer-client/src/test/index.ts index e27d25b6500..f348ba4072e 100644 --- a/yarn-project/sequencer-client/src/test/index.ts +++ b/yarn-project/sequencer-client/src/test/index.ts @@ -1,14 +1,14 @@ import { type PublicProcessorFactory } from '@aztec/simulator/server'; import { SequencerClient } from '../client/sequencer-client.js'; -import { type L1Publisher } from '../publisher/l1-publisher.js'; +import { type SequencerPublisher } from '../publisher/sequencer-publisher.js'; import { Sequencer } from '../sequencer/sequencer.js'; import { type SequencerTimetable } from '../sequencer/timetable.js'; class TestSequencer_ extends Sequencer { public override publicProcessorFactory!: PublicProcessorFactory; public override timetable!: SequencerTimetable; - public override publisher!: L1Publisher; + public override publisher!: SequencerPublisher; } export type TestSequencer = TestSequencer_; @@ -18,5 +18,3 @@ class TestSequencerClient_ extends SequencerClient { } export type TestSequencerClient = TestSequencerClient_; - -export * from './test-l1-publisher.js'; diff --git a/yarn-project/sequencer-client/src/test/test-l1-publisher.ts b/yarn-project/sequencer-client/src/test/test-l1-publisher.ts deleted file mode 100644 index a28b352bb66..00000000000 --- a/yarn-project/sequencer-client/src/test/test-l1-publisher.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { type EthereumChain } from '@aztec/ethereum'; -import { type Delayer, withDelayer } from '@aztec/ethereum/test'; - -import { type Chain, type HttpTransport, type PrivateKeyAccount, type WalletClient } from 'viem'; - -import { L1Publisher } from '../publisher/l1-publisher.js'; - -export class TestL1Publisher extends L1Publisher { - public delayer: Delayer | undefined; - - protected override createWalletClient( - account: PrivateKeyAccount, - chain: EthereumChain, - ): WalletClient { - const baseClient = super.createWalletClient(account, chain); - const { client, delayer } = withDelayer(baseClient, { ethereumSlotDuration: this.ethereumSlotDuration }); - this.delayer = delayer; - return client; - } -} diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 6d2560b2bd1..ac0f525a705 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -597,6 +597,7 @@ __metadata: "@aztec/circuit-types": "workspace:^" "@aztec/circuits.js": "workspace:^" "@aztec/entrypoints": "workspace:^" + "@aztec/epoch-cache": "workspace:^" "@aztec/ethereum": "workspace:^" "@aztec/foundation": "workspace:^" "@aztec/kv-store": "workspace:^" @@ -1224,6 +1225,7 @@ __metadata: "@aztec/blob-sink": "workspace:^" "@aztec/circuit-types": "workspace:^" "@aztec/circuits.js": "workspace:^" + "@aztec/epoch-cache": "workspace:^" "@aztec/ethereum": "workspace:^" "@aztec/foundation": "workspace:^" "@aztec/kv-store": "workspace:^"