diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 24c5e8d45d..c9f92d3a81 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -46,6 +46,11 @@ jobs: sudo rm -rf "/usr/local/share/boost" sudo rm -rf "$AGENT_TOOLSDIRECTORY" + - name: Install mold linker + uses: rui314/setup-mold@v1 + with: + mold-version: 2.0.0 + make-default: true - name: rust cache uses: Swatinem/rust-cache@v2 with: diff --git a/rust/Cargo.lock b/rust/Cargo.lock index 862c5062a1..636313db67 100644 --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -4563,6 +4563,22 @@ dependencies = [ "libc", ] +[[package]] +name = "macro_rules_attribute" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a82271f7bc033d84bbca59a3ce3e4159938cb08a9c3aebbe54d215131518a13" +dependencies = [ + "macro_rules_attribute-proc_macro", + "paste", +] + +[[package]] +name = "macro_rules_attribute-proc_macro" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dd856d451cc0da70e2ef2ce95a18e39a93b7558bedf10201ad28503f918568" + [[package]] name = "maplit" version = "1.0.2" @@ -6326,6 +6342,7 @@ version = "0.1.0" dependencies = [ "ctrlc", "eyre", + "macro_rules_attribute", "maplit", "nix 0.26.2", "tempfile", diff --git a/rust/agents/relayer/src/relayer.rs b/rust/agents/relayer/src/relayer.rs index fc1dc889a6..e04d04154c 100644 --- a/rust/agents/relayer/src/relayer.rs +++ b/rust/agents/relayer/src/relayer.rs @@ -266,10 +266,11 @@ impl Relayer { &self, origin: &HyperlaneDomain, ) -> Instrumented>> { - let index_settings = self.as_ref().settings.chains[origin.name()].index.clone(); + let (index_settings, index_mode) = + self.as_ref().settings.chains[origin.name()].index_settings_and_mode(); let contract_sync = self.message_syncs.get(origin).unwrap().clone(); let cursor = contract_sync - .forward_backward_message_sync_cursor(index_settings) + .forward_backward_message_sync_cursor(index_settings, index_mode) .await; tokio::spawn(async move { contract_sync @@ -284,13 +285,16 @@ impl Relayer { &self, origin: &HyperlaneDomain, ) -> Instrumented>> { - let index_settings = self.as_ref().settings.chains[origin.name()].index.clone(); + let (index_settings, index_mode) = + self.as_ref().settings.chains[origin.name()].index_settings_and_mode(); let contract_sync = self .interchain_gas_payment_syncs .get(origin) .unwrap() .clone(); - let cursor = contract_sync.rate_limited_cursor(index_settings).await; + let cursor = contract_sync + .rate_limited_cursor(index_settings, index_mode) + .await; tokio::spawn(async move { contract_sync.clone().sync("gas_payments", cursor).await }) .instrument(info_span!("ContractSync")) } diff --git a/rust/agents/scraper/src/agent.rs b/rust/agents/scraper/src/agent.rs index bcda886aef..8c98b5163f 100644 --- a/rust/agents/scraper/src/agent.rs +++ b/rust/agents/scraper/src/agent.rs @@ -234,7 +234,7 @@ macro_rules! spawn_sync_task { .await .unwrap(); let cursor = sync - .$cursor(index_settings.clone()) + .$cursor(index_settings.clone(), domain.clone().index_mode()) .await; tokio::spawn(async move { sync @@ -275,7 +275,11 @@ impl Scraper { .unwrap_or(None) .unwrap_or(0); let cursor = sync - .forward_message_sync_cursor(index_settings.clone(), latest_nonce.saturating_sub(1)) + .forward_message_sync_cursor( + index_settings.clone(), + domain.index_mode(), + latest_nonce.saturating_sub(1), + ) .await; tokio::spawn(async move { sync.sync("message_dispatch", cursor).await }).instrument( info_span!("ChainContractSync", chain=%domain.name(), event="message_dispatch"), diff --git a/rust/agents/validator/src/validator.rs b/rust/agents/validator/src/validator.rs index 108922e13c..5960ab00ae 100644 --- a/rust/agents/validator/src/validator.rs +++ b/rust/agents/validator/src/validator.rs @@ -143,12 +143,11 @@ impl BaseAgent for Validator { impl Validator { async fn run_message_sync(&self) -> Instrumented>> { - let index_settings = self.as_ref().settings.chains[self.origin_chain.name()] - .index - .clone(); + let (index_settings, index_mode) = + self.as_ref().settings.chains[self.origin_chain.name()].index_settings_and_mode(); let contract_sync = self.message_sync.clone(); let cursor = contract_sync - .forward_backward_message_sync_cursor(index_settings) + .forward_backward_message_sync_cursor(index_settings, index_mode) .await; tokio::spawn(async move { contract_sync @@ -271,7 +270,6 @@ impl Validator { validator_address=?announcement.validator, "Please send tokens to the validator address to announce", ); - sleep(self.interval).await; } else { let result = self .validator_announce @@ -279,6 +277,7 @@ impl Validator { .await; Self::log_on_announce_failure(result); } + sleep(self.interval).await; } } Ok(()) diff --git a/rust/chains/hyperlane-ethereum/src/interchain_gas.rs b/rust/chains/hyperlane-ethereum/src/interchain_gas.rs index 7ac5ee87b7..6b40c618e9 100644 --- a/rust/chains/hyperlane-ethereum/src/interchain_gas.rs +++ b/rust/chains/hyperlane-ethereum/src/interchain_gas.rs @@ -2,17 +2,17 @@ use std::collections::HashMap; use std::fmt::Display; +use std::ops::RangeInclusive; use std::sync::Arc; use async_trait::async_trait; use ethers::prelude::Middleware; -use tracing::instrument; - use hyperlane_core::{ - BlockRange, ChainCommunicationError, ChainResult, ContractLocator, HyperlaneAbi, - HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneProvider, IndexRange, Indexer, - InterchainGasPaymaster, InterchainGasPayment, LogMeta, H160, H256, + ChainCommunicationError, ChainResult, ContractLocator, HyperlaneAbi, HyperlaneChain, + HyperlaneContract, HyperlaneDomain, HyperlaneProvider, Indexer, InterchainGasPaymaster, + InterchainGasPayment, LogMeta, SequenceIndexer, H160, H256, }; +use tracing::instrument; use crate::contracts::i_interchain_gas_paymaster::{ IInterchainGasPaymaster as EthereumInterchainGasPaymasterInternal, IINTERCHAINGASPAYMASTER_ABI, @@ -36,7 +36,7 @@ pub struct InterchainGasPaymasterIndexerBuilder { #[async_trait] impl BuildableWithProvider for InterchainGasPaymasterIndexerBuilder { - type Output = Box>; + type Output = Box>; async fn build_with_provider( &self, @@ -87,14 +87,8 @@ where #[instrument(err, skip(self))] async fn fetch_logs( &self, - range: IndexRange, + range: RangeInclusive, ) -> ChainResult> { - let BlockRange(range) = range else { - return Err(ChainCommunicationError::from_other_str( - "EthereumInterchainGasPaymasterIndexer only supports block-based indexing", - )); - }; - let events = self .contract .gas_payment_filter() @@ -130,6 +124,22 @@ where } } +#[async_trait] +impl SequenceIndexer for EthereumInterchainGasPaymasterIndexer +where + M: Middleware + 'static, +{ + async fn sequence_at_tip(&self) -> ChainResult<(u32, u32)> { + // The InterchainGasPaymasterIndexerBuilder must return a `SequenceIndexer` type. + // It's fine if only a blanket implementation is provided for EVM chains, since their + // indexing only uses the `Index` trait, which is a supertrait of `SequenceIndexer`. + // TODO: if `SequenceIndexer` turns out to not depend on `Indexer` at all, then the supertrait + // dependency could be removed, even if the builder would still need to return a type that is both + // ``SequenceIndexer` and `Indexer`. + panic!("Gas payment nonce indexing not implemented"); + } +} + pub struct InterchainGasPaymasterBuilder {} #[async_trait] diff --git a/rust/chains/hyperlane-ethereum/src/mailbox.rs b/rust/chains/hyperlane-ethereum/src/mailbox.rs index 7da575629b..d0887d9604 100644 --- a/rust/chains/hyperlane-ethereum/src/mailbox.rs +++ b/rust/chains/hyperlane-ethereum/src/mailbox.rs @@ -3,21 +3,23 @@ use std::collections::HashMap; use std::num::NonZeroU64; +use std::ops::RangeInclusive; use std::sync::Arc; use async_trait::async_trait; use ethers::abi::AbiEncode; use ethers::prelude::Middleware; use ethers_contract::builders::ContractCall; +use hyperlane_core::SequenceIndexer; use tracing::instrument; use hyperlane_core::accumulator::incremental::IncrementalMerkle; use hyperlane_core::accumulator::TREE_DEPTH; use hyperlane_core::{ - utils::fmt_bytes, BlockRange, ChainCommunicationError, ChainResult, Checkpoint, - ContractLocator, HyperlaneAbi, HyperlaneChain, HyperlaneContract, HyperlaneDomain, - HyperlaneMessage, HyperlaneProtocolError, HyperlaneProvider, IndexRange, Indexer, LogMeta, - Mailbox, MessageIndexer, RawHyperlaneMessage, TxCostEstimate, TxOutcome, H160, H256, U256, + utils::fmt_bytes, ChainCommunicationError, ChainResult, Checkpoint, ContractLocator, + HyperlaneAbi, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneMessage, + HyperlaneProtocolError, HyperlaneProvider, Indexer, LogMeta, Mailbox, MessageIndexer, + RawHyperlaneMessage, TxCostEstimate, TxOutcome, H160, H256, U256, }; use crate::contracts::arbitrum_node_interface::ArbitrumNodeInterface; @@ -65,7 +67,7 @@ pub struct DeliveryIndexerBuilder { #[async_trait] impl BuildableWithProvider for DeliveryIndexerBuilder { - type Output = Box>; + type Output = Box>; async fn build_with_provider( &self, @@ -130,13 +132,10 @@ where } #[instrument(err, skip(self))] - async fn fetch_logs(&self, range: IndexRange) -> ChainResult> { - let BlockRange(range) = range else { - return Err(ChainCommunicationError::from_other_str( - "EthereumMailboxIndexer only supports block-based indexing", - )) - }; - + async fn fetch_logs( + &self, + range: RangeInclusive, + ) -> ChainResult> { let mut events: Vec<(HyperlaneMessage, LogMeta)> = self .contract .dispatch_filter() @@ -178,13 +177,7 @@ where } #[instrument(err, skip(self))] - async fn fetch_logs(&self, range: IndexRange) -> ChainResult> { - let BlockRange(range) = range else { - return Err(ChainCommunicationError::from_other_str( - "EthereumMailboxIndexer only supports block-based indexing", - )) - }; - + async fn fetch_logs(&self, range: RangeInclusive) -> ChainResult> { Ok(self .contract .process_id_filter() @@ -197,6 +190,17 @@ where .collect()) } } + +#[async_trait] +impl SequenceIndexer for EthereumMailboxIndexer +where + M: Middleware + 'static, +{ + async fn sequence_at_tip(&self) -> ChainResult<(u32, u32)> { + panic!("Message delivery sequence indexing not implemented"); + } +} + pub struct MailboxBuilder {} #[async_trait] diff --git a/rust/chains/hyperlane-fuel/src/interchain_gas.rs b/rust/chains/hyperlane-fuel/src/interchain_gas.rs index ee15bd63b6..85fb630a34 100644 --- a/rust/chains/hyperlane-fuel/src/interchain_gas.rs +++ b/rust/chains/hyperlane-fuel/src/interchain_gas.rs @@ -1,7 +1,9 @@ +use std::ops::RangeInclusive; + use async_trait::async_trait; use hyperlane_core::{ - ChainResult, HyperlaneChain, HyperlaneContract, IndexRange, Indexer, InterchainGasPaymaster, + ChainResult, HyperlaneChain, HyperlaneContract, Indexer, InterchainGasPaymaster, }; use hyperlane_core::{HyperlaneDomain, HyperlaneProvider, InterchainGasPayment, LogMeta, H256}; @@ -35,7 +37,7 @@ pub struct FuelInterchainGasPaymasterIndexer {} impl Indexer for FuelInterchainGasPaymasterIndexer { async fn fetch_logs( &self, - range: IndexRange, + range: RangeInclusive, ) -> ChainResult> { todo!() } diff --git a/rust/chains/hyperlane-fuel/src/mailbox.rs b/rust/chains/hyperlane-fuel/src/mailbox.rs index b989631505..a0002332e8 100644 --- a/rust/chains/hyperlane-fuel/src/mailbox.rs +++ b/rust/chains/hyperlane-fuel/src/mailbox.rs @@ -1,6 +1,7 @@ use std::collections::HashMap; use std::fmt::{Debug, Formatter}; use std::num::NonZeroU64; +use std::ops::RangeInclusive; use async_trait::async_trait; use fuels::prelude::{Bech32ContractId, WalletUnlocked}; @@ -10,8 +11,7 @@ use tracing::instrument; use hyperlane_core::{ utils::fmt_bytes, ChainCommunicationError, ChainResult, Checkpoint, ContractLocator, HyperlaneAbi, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneMessage, - HyperlaneProvider, IndexRange, Indexer, LogMeta, Mailbox, TxCostEstimate, TxOutcome, H256, - U256, + HyperlaneProvider, Indexer, LogMeta, Mailbox, TxCostEstimate, TxOutcome, H256, U256, }; use crate::{ @@ -154,7 +154,10 @@ pub struct FuelMailboxIndexer {} #[async_trait] impl Indexer for FuelMailboxIndexer { - async fn fetch_logs(&self, range: IndexRange) -> ChainResult> { + async fn fetch_logs( + &self, + range: RangeInclusive, + ) -> ChainResult> { todo!() } @@ -165,7 +168,7 @@ impl Indexer for FuelMailboxIndexer { #[async_trait] impl Indexer for FuelMailboxIndexer { - async fn fetch_logs(&self, range: IndexRange) -> ChainResult> { + async fn fetch_logs(&self, range: RangeInclusive) -> ChainResult> { todo!() } diff --git a/rust/chains/hyperlane-sealevel/Cargo.toml b/rust/chains/hyperlane-sealevel/Cargo.toml index 82dbf2446f..f3777dbd55 100644 --- a/rust/chains/hyperlane-sealevel/Cargo.toml +++ b/rust/chains/hyperlane-sealevel/Cargo.toml @@ -22,12 +22,12 @@ tracing-futures.workspace = true tracing.workspace = true url.workspace = true +account-utils = { path = "../../sealevel/libraries/account-utils" } hyperlane-core = { path = "../../hyperlane-core" } -hyperlane-sealevel-mailbox = { path = "../../sealevel/programs/mailbox", features = ["no-entrypoint"] } hyperlane-sealevel-interchain-security-module-interface = { path = "../../sealevel/libraries/interchain-security-module-interface" } +hyperlane-sealevel-mailbox = { path = "../../sealevel/programs/mailbox", features = ["no-entrypoint"] } hyperlane-sealevel-message-recipient-interface = { path = "../../sealevel/libraries/message-recipient-interface" } -serializable-account-meta = { path = "../../sealevel/libraries/serializable-account-meta" } -account-utils = { path = "../../sealevel/libraries/account-utils" } -multisig-ism = { path = "../../sealevel/libraries/multisig-ism" } hyperlane-sealevel-multisig-ism-message-id = { path = "../../sealevel/programs/ism/multisig-ism-message-id", features = ["no-entrypoint"] } hyperlane-sealevel-validator-announce = { path = "../../sealevel/programs/validator-announce", features = ["no-entrypoint"] } +multisig-ism = { path = "../../sealevel/libraries/multisig-ism" } +serializable-account-meta = { path = "../../sealevel/libraries/serializable-account-meta" } diff --git a/rust/chains/hyperlane-sealevel/src/interchain_gas.rs b/rust/chains/hyperlane-sealevel/src/interchain_gas.rs index 92731fe42c..c9b620e38d 100644 --- a/rust/chains/hyperlane-sealevel/src/interchain_gas.rs +++ b/rust/chains/hyperlane-sealevel/src/interchain_gas.rs @@ -1,8 +1,10 @@ +use std::ops::RangeInclusive; + use async_trait::async_trait; use hyperlane_core::{ ChainResult, ContractLocator, HyperlaneChain, HyperlaneContract, HyperlaneDomain, - HyperlaneProvider, IndexRange, Indexer, InterchainGasPaymaster, InterchainGasPayment, LogMeta, - H256, + HyperlaneProvider, Indexer, InterchainGasPaymaster, InterchainGasPayment, LogMeta, + SequenceIndexer, H256, }; use tracing::{info, instrument}; @@ -61,7 +63,7 @@ impl Indexer for SealevelInterchainGasPaymasterIndexer { #[instrument(err, skip(self))] async fn fetch_logs( &self, - _range: IndexRange, + _range: RangeInclusive, ) -> ChainResult> { info!("Gas payment indexing not implemented for Sealevel"); Ok(vec![]) @@ -74,3 +76,11 @@ impl Indexer for SealevelInterchainGasPaymasterIndexer { Ok(1) } } + +#[async_trait] +impl SequenceIndexer for SealevelInterchainGasPaymasterIndexer { + async fn sequence_at_tip(&self) -> ChainResult<(u32, u32)> { + info!("Gas payment indexing not implemented for Sealevel"); + Ok((1, 1)) + } +} diff --git a/rust/chains/hyperlane-sealevel/src/mailbox.rs b/rust/chains/hyperlane-sealevel/src/mailbox.rs index cd55a0bbed..5f3c8df3f5 100644 --- a/rust/chains/hyperlane-sealevel/src/mailbox.rs +++ b/rust/chains/hyperlane-sealevel/src/mailbox.rs @@ -1,6 +1,6 @@ #![allow(warnings)] // FIXME remove -use std::{collections::HashMap, num::NonZeroU64, str::FromStr as _}; +use std::{collections::HashMap, num::NonZeroU64, ops::RangeInclusive, str::FromStr as _}; use async_trait::async_trait; use borsh::{BorshDeserialize, BorshSerialize}; @@ -10,8 +10,8 @@ use tracing::{debug, info, instrument, warn}; use hyperlane_core::{ accumulator::incremental::IncrementalMerkle, ChainCommunicationError, ChainResult, Checkpoint, ContractLocator, Decode as _, Encode as _, HyperlaneAbi, HyperlaneChain, HyperlaneContract, - HyperlaneDomain, HyperlaneMessage, HyperlaneProvider, IndexRange, Indexer, LogMeta, Mailbox, - MessageIndexer, SequenceRange, TxCostEstimate, TxOutcome, H256, U256, + HyperlaneDomain, HyperlaneMessage, HyperlaneProvider, Indexer, LogMeta, Mailbox, + MessageIndexer, SequenceIndexer, TxCostEstimate, TxOutcome, H256, U256, }; use hyperlane_sealevel_interchain_security_module_interface::{ InterchainSecurityModuleInstruction, VerifyInstruction, @@ -701,19 +701,17 @@ impl MessageIndexer for SealevelMailboxIndexer { #[async_trait] impl Indexer for SealevelMailboxIndexer { - async fn fetch_logs(&self, range: IndexRange) -> ChainResult> { - let SequenceRange(range) = range else { - return Err(ChainCommunicationError::from_other_str( - "SealevelMailboxIndexer only supports sequence-based indexing", - )) - }; - + async fn fetch_logs( + &self, + range: RangeInclusive, + ) -> ChainResult> { info!( ?range, "Fetching SealevelMailboxIndexer HyperlaneMessage logs" ); - let mut messages = Vec::with_capacity((range.end() - range.start()) as usize); + let message_capacity = range.end().saturating_sub(*range.start()); + let mut messages = Vec::with_capacity(message_capacity as usize); for nonce in range { messages.push(self.get_message_with_nonce(nonce).await?); } @@ -727,7 +725,7 @@ impl Indexer for SealevelMailboxIndexer { #[async_trait] impl Indexer for SealevelMailboxIndexer { - async fn fetch_logs(&self, _range: IndexRange) -> ChainResult> { + async fn fetch_logs(&self, _range: RangeInclusive) -> ChainResult> { todo!() } @@ -736,6 +734,15 @@ impl Indexer for SealevelMailboxIndexer { } } +#[async_trait] +impl SequenceIndexer for SealevelMailboxIndexer { + async fn sequence_at_tip(&self) -> ChainResult<(u32, u32)> { + // TODO: implement when sealevel scraper support is implemented + info!("Message delivery indexing not implemented"); + Ok((1, 1)) + } +} + struct SealevelMailboxAbi; // TODO figure out how this is used and if we can support it for sealevel. diff --git a/rust/config/sealevel/relayer.env b/rust/config/sealevel/relayer.env deleted file mode 100644 index bba715da2e..0000000000 --- a/rust/config/sealevel/relayer.env +++ /dev/null @@ -1,15 +0,0 @@ -export BASE_CONFIG="sealevel.json" -export RUN_ENV="sealevel" -export HYP_BASE_DB="/tmp/SEALEVEL_DB/relayer" -export HYP_RELAYER_RELAYCHAINS="sealeveltest1,sealeveltest2" -export HYP_BASE_METRICS=9091 -export HYP_BASE_ALLOWLOCALCHECKPOINTSYNCERS=true - -# The first 32 bytes of test-keys/test_deployer-keypair.json as hexadecimal, -# which is the secret key. -export HYP_BASE_CHAINS_SEALEVELTEST1_SIGNER_KEY=892bf6949af4233e62f854cb3618bc1a3ee3341dc71ada08c4d5deca239acf4f -export HYP_BASE_CHAINS_SEALEVELTEST1_SIGNER_TYPE="hexKey" -export HYP_BASE_CHAINS_SEALEVELTEST2_SIGNER_KEY=892bf6949af4233e62f854cb3618bc1a3ee3341dc71ada08c4d5deca239acf4f -export HYP_BASE_CHAINS_SEALEVELTEST2_SIGNER_TYPE="hexKey" - -export HYP_BASE_TRACING_LEVEL="debug" diff --git a/rust/config/sealevel/validator.env b/rust/config/sealevel/validator.env deleted file mode 100644 index 95b038de04..0000000000 --- a/rust/config/sealevel/validator.env +++ /dev/null @@ -1,10 +0,0 @@ -export BASE_CONFIG="sealevel.json" -export RUN_ENV="sealevel" -export HYP_BASE_DB="/tmp/SEALEVEL_DB/validator" -export HYP_VALIDATOR_ORIGINCHAINNAME="sealeveltest1" -export HYP_VALIDATOR_VALIDATOR_KEY="59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d" -export HYP_VALIDATOR_VALIDATOR_TYPE="hexKey" -export HYP_VALIDATOR_REORGPERIOD="0" -export HYP_VALIDATOR_INTERVAL="1" -export HYP_VALIDATOR_CHECKPOINTSYNCER_TYPE="localStorage" -export HYP_VALIDATOR_CHECKPOINTSYNCER_PATH="/tmp/test_sealevel_checkpoints_0x70997970c51812dc3a010c7d01b50e0d17dc79c8" diff --git a/rust/config/sealevel/test-keys/test_deployer-account.json b/rust/config/test-sealevel-keys/test_deployer-account.json similarity index 100% rename from rust/config/sealevel/test-keys/test_deployer-account.json rename to rust/config/test-sealevel-keys/test_deployer-account.json diff --git a/rust/config/sealevel/test-keys/test_deployer-keypair.json b/rust/config/test-sealevel-keys/test_deployer-keypair.json similarity index 100% rename from rust/config/sealevel/test-keys/test_deployer-keypair.json rename to rust/config/test-sealevel-keys/test_deployer-keypair.json diff --git a/rust/config/sealevel/sealevel.json b/rust/config/test_sealevel_config.json similarity index 73% rename from rust/config/sealevel/sealevel.json rename to rust/config/test_sealevel_config.json index f3629849d5..13044fbb27 100644 --- a/rust/config/sealevel/sealevel.json +++ b/rust/config/test_sealevel_config.json @@ -1,49 +1,42 @@ { - "environment": "sealevel", "chains": { "sealeveltest1": { - "name": "SealevelTest1", - "domain": "13375", + "name": "sealeveltest1", + "domain": 13375, "addresses": { "mailbox": "692KZJaoe2KRcD6uhCQDLLXnLNA5ZLnfvdqjE4aX9iu1", "interchainGasPaymaster": "FixmeFixmeFixmeFixmeFixmeFixmeFixmeFixmeFixm", "validatorAnnounce": "DH43ae1LwemXAboWwSh8zc9pG8j72gKUEXNi57w8fEnn" }, - "signer": null, "protocol": "sealevel", - "finalityBlocks": "0", + "finalityBlocks": 0, "connection": { "type": "http", "url": "http://localhost:8899" }, "index": { - "from": "1", + "from": 1, "mode": "sequence" } }, "sealeveltest2": { - "name": "SealevelTest2", - "domain": "13376", + "name": "sealeveltest2", + "domain": 13376, "addresses": { "mailbox": "9tCUWNjpqcf3NUSrtp7vquYVCwbEByvLjZUrhG5dgvhj", "interchainGasPaymaster": "FixmeFixmeFixmeFixmeFixmeFixmeFixmeFixmeFixm", "validatorAnnounce": "3Uo5j2Bti9aZtrDqJmAyuwiFaJFPFoNL5yxTpVCNcUhb" }, - "signer": null, "protocol": "sealevel", - "finalityBlocks": "0", + "finalityBlocks": 0, "connection": { "type": "http", "url": "http://localhost:8899" }, "index": { - "from": "1", + "from": 1, "mode": "sequence" } } - }, - "tracing": { - "level": "info", - "fmt": "pretty" } } diff --git a/rust/hyperlane-base/src/contract_sync/cursor.rs b/rust/hyperlane-base/src/contract_sync/cursor.rs index 78fdea4af0..b8614a6d60 100644 --- a/rust/hyperlane-base/src/contract_sync/cursor.rs +++ b/rust/hyperlane-base/src/contract_sync/cursor.rs @@ -1,5 +1,6 @@ use std::cmp::Ordering; use std::fmt::Debug; +use std::ops::RangeInclusive; use std::{ sync::Arc, time::{Duration, Instant}, @@ -12,9 +13,8 @@ use tokio::time::sleep; use tracing::{debug, warn}; use hyperlane_core::{ - BlockRange, ChainResult, ContractSyncCursor, CursorAction, HyperlaneMessage, - HyperlaneMessageStore, HyperlaneWatermarkedLogStore, IndexMode, IndexRange, Indexer, LogMeta, - MessageIndexer, SequenceRange, + ChainResult, ContractSyncCursor, CursorAction, HyperlaneMessage, HyperlaneMessageStore, + HyperlaneWatermarkedLogStore, IndexMode, Indexer, LogMeta, MessageIndexer, SequenceIndexer, }; use crate::contract_sync::eta_calculator::SyncerEtaCalculator; @@ -30,13 +30,71 @@ const MAX_SEQUENCE_RANGE: u32 = 100; pub(crate) struct MessageSyncCursor { indexer: Arc, db: Arc, + sync_state: SyncState, +} + +#[derive(Debug, new)] +pub(crate) struct SyncState { chunk_size: u32, /// The starting block for the cursor start_block: u32, /// The next block that should be indexed. next_block: u32, - /// The next nonce that the cursor is looking for. - next_nonce: u32, + mode: IndexMode, + /// The next sequence index that the cursor is looking for. + /// In the EVM, this is used for optimizing indexing, + /// because it's cheaper to make read calls for the sequence index than + /// to call `eth_getLogs` with a block range. + /// In Sealevel, historic queries aren't supported, so the nonce field + /// is used to query storage in sequence. + next_sequence: u32, + direction: SyncDirection, +} + +impl SyncState { + async fn get_next_range( + &mut self, + max_sequence_index: Option, + tip: Option, + ) -> ChainResult>> { + // We attempt to index a range of blocks that is as large as possible. + let (from, to) = match self.direction { + SyncDirection::Forward => { + let from = self.next_block; + let mut to = from + self.chunk_size; + if let Some(tip) = tip { + to = u32::min(to, tip); + } + self.next_block = to + 1; + (from, to) + } + SyncDirection::Backward => { + let to = self.next_block; + let from = to.saturating_sub(self.chunk_size); + self.next_block = from.saturating_sub(1); + (from, to) + } + }; + let range = match self.mode { + IndexMode::Block => from..=to, + IndexMode::Sequence => { + let sequence_start = self.next_sequence; + let mut sequence_end = sequence_start + MAX_SEQUENCE_RANGE; + if let Some(max_sequence_index) = max_sequence_index { + sequence_end = u32::min(sequence_end, max_sequence_index.saturating_sub(1)); + } + self.next_sequence = sequence_end + 1; + if let Some(tip) = tip { + self.next_block = tip; + } + sequence_start..=sequence_end + } + }; + if range.is_empty() { + return Ok(None); + } + Ok(Some(range)) + } } impl MessageSyncCursor { @@ -59,19 +117,23 @@ impl MessageSyncCursor { async fn update( &mut self, logs: Vec<(HyperlaneMessage, LogMeta)>, - prev_nonce: u32, + prev_sequence: u32, ) -> Result<()> { // If we found messages, but did *not* find the message we were looking for, // we need to rewind to the block at which we found the last message. - if !logs.is_empty() && !logs.iter().any(|m| m.0.nonce == self.next_nonce) { - warn!(next_nonce=?self.next_nonce, "Target nonce not found, rewinding"); + if !logs.is_empty() + && !logs + .iter() + .any(|m| m.0.nonce == self.sync_state.next_sequence) + { + warn!(next_nonce=?self.sync_state.next_sequence, "Target nonce not found, rewinding"); // If the previous nonce has been synced, rewind to the block number // at which it was dispatched. Otherwise, rewind all the way back to the start block. - if let Some(block_number) = self.retrieve_dispatched_block_number(prev_nonce).await { - self.next_block = block_number; + if let Some(block_number) = self.retrieve_dispatched_block_number(prev_sequence).await { + self.sync_state.next_block = block_number; warn!(block_number, "Rewound to previous known message"); } else { - self.next_block = self.start_block; + self.sync_state.next_block = self.sync_state.start_block; } Ok(()) } else { @@ -81,71 +143,84 @@ impl MessageSyncCursor { } /// A MessageSyncCursor that syncs forwards in perpetuity. -#[derive(new)] pub(crate) struct ForwardMessageSyncCursor { cursor: MessageSyncCursor, - mode: IndexMode, } impl ForwardMessageSyncCursor { - async fn get_next_range(&mut self) -> ChainResult> { + pub fn new( + indexer: Arc, + db: Arc, + chunk_size: u32, + start_block: u32, + next_block: u32, + mode: IndexMode, + next_sequence: u32, + ) -> Self { + Self { + cursor: MessageSyncCursor::new( + indexer, + db, + SyncState::new( + chunk_size, + start_block, + next_block, + mode, + next_sequence, + SyncDirection::Forward, + ), + ), + } + } + + async fn get_next_range(&mut self) -> ChainResult>> { // Check if any new messages have been inserted into the DB, // and update the cursor accordingly. while self .cursor - .retrieve_message_by_nonce(self.cursor.next_nonce) + .retrieve_message_by_nonce(self.cursor.sync_state.next_sequence) .await .is_some() { if let Some(block_number) = self .cursor - .retrieve_dispatched_block_number(self.cursor.next_nonce) + .retrieve_dispatched_block_number(self.cursor.sync_state.next_sequence) .await { debug!(next_block = block_number, "Fast forwarding next block"); // It's possible that eth_getLogs dropped logs from this block, therefore we cannot do block_number + 1. - self.cursor.next_block = block_number; + self.cursor.sync_state.next_block = block_number; } debug!( - next_nonce = self.cursor.next_nonce + 1, + next_nonce = self.cursor.sync_state.next_sequence + 1, "Fast forwarding next nonce" ); - self.cursor.next_nonce += 1; + self.cursor.sync_state.next_sequence += 1; } let (mailbox_count, tip) = self.cursor.indexer.fetch_count_at_tip().await?; - let cursor_count = self.cursor.next_nonce; - let cmp = cursor_count.cmp(&mailbox_count); - match cmp { + let cursor_count = self.cursor.sync_state.next_sequence; + Ok(match cursor_count.cmp(&mailbox_count) { Ordering::Equal => { // We are synced up to the latest nonce so we don't need to index anything. // We update our next block number accordingly. - self.cursor.next_block = tip; - Ok(None) + self.cursor.sync_state.next_block = tip; + None } Ordering::Less => { // The cursor is behind the mailbox, so we need to index some blocks. - // We attempt to index a range of blocks that is as large as possible. - let from = self.cursor.next_block; - let to = u32::min(tip, from + self.cursor.chunk_size); - self.cursor.next_block = to + 1; - - let range = match self.mode { - IndexMode::Block => BlockRange(from..=to), - IndexMode::Sequence => SequenceRange( - cursor_count..=u32::min(mailbox_count, cursor_count + MAX_SEQUENCE_RANGE), - ), - }; - - Ok(Some(range)) + self.cursor + .sync_state + .get_next_range(Some(mailbox_count), Some(tip)) + .await? } Ordering::Greater => { // Providers may be internally inconsistent, e.g. RPC request A could hit a node // whose tip is N and subsequent RPC request B could hit a node whose tip is < N. debug!("Cursor count is greater than Mailbox count"); - Ok(None) + None } - } + }) } } @@ -163,99 +238,118 @@ impl ContractSyncCursor for ForwardMessageSyncCursor { } fn latest_block(&self) -> u32 { - self.cursor.next_block.saturating_sub(1) + self.cursor.sync_state.next_block.saturating_sub(1) } /// If the previous block has been synced, rewind to the block number /// at which it was dispatched. /// Otherwise, rewind all the way back to the start block. async fn update(&mut self, logs: Vec<(HyperlaneMessage, LogMeta)>) -> Result<()> { - let prev_nonce = self.cursor.next_nonce.saturating_sub(1); + let prev_nonce = self.cursor.sync_state.next_sequence.saturating_sub(1); // We may wind up having re-indexed messages that are previous to the nonce that we are looking for. // We should not consider these messages when checking for continuity errors. let filtered_logs = logs .into_iter() - .filter(|m| m.0.nonce >= self.cursor.next_nonce) + .filter(|m| m.0.nonce >= self.cursor.sync_state.next_sequence) .collect(); self.cursor.update(filtered_logs, prev_nonce).await } } -/// A MessageSyncCursor that syncs backwards to nonce zero. -#[derive(new)] +/// A MessageSyncCursor that syncs backwards to sequence (nonce) zero. pub(crate) struct BackwardMessageSyncCursor { cursor: MessageSyncCursor, synced: bool, - mode: IndexMode, } impl BackwardMessageSyncCursor { - async fn get_next_range(&mut self) -> Option { + #[allow(clippy::too_many_arguments)] + pub fn new( + indexer: Arc, + db: Arc, + chunk_size: u32, + start_block: u32, + next_block: u32, + mode: IndexMode, + next_sequence: u32, + synced: bool, + ) -> Self { + Self { + cursor: MessageSyncCursor::new( + indexer, + db, + SyncState::new( + chunk_size, + start_block, + next_block, + mode, + next_sequence, + SyncDirection::Backward, + ), + ), + synced, + } + } + + async fn get_next_range(&mut self) -> ChainResult>> { // Check if any new messages have been inserted into the DB, // and update the cursor accordingly. while !self.synced { if self .cursor - .retrieve_message_by_nonce(self.cursor.next_nonce) + .retrieve_message_by_nonce(self.cursor.sync_state.next_sequence) .await .is_none() { break; }; - // If we found nonce zero or hit block zero, we are done rewinding. - if self.cursor.next_nonce == 0 || self.cursor.next_block == 0 { + // If we found sequence zero or hit block zero, we are done rewinding. + if self.cursor.sync_state.next_sequence == 0 || self.cursor.sync_state.next_block == 0 { self.synced = true; break; } if let Some(block_number) = self .cursor - .retrieve_dispatched_block_number(self.cursor.next_nonce) + .retrieve_dispatched_block_number(self.cursor.sync_state.next_sequence) .await { // It's possible that eth_getLogs dropped logs from this block, therefore we cannot do block_number - 1. - self.cursor.next_block = block_number; + self.cursor.sync_state.next_block = block_number; } - self.cursor.next_nonce = self.cursor.next_nonce.saturating_sub(1); + self.cursor.sync_state.next_sequence = + self.cursor.sync_state.next_sequence.saturating_sub(1); } if self.synced { - return None; + return Ok(None); } // Just keep going backwards. - let to = self.cursor.next_block; - let from = to.saturating_sub(self.cursor.chunk_size); - self.cursor.next_block = from.saturating_sub(1); - - let next_nonce = self.cursor.next_nonce; - - let range = match self.mode { - IndexMode::Block => BlockRange(from..=to), - IndexMode::Sequence => { - SequenceRange(next_nonce.saturating_sub(MAX_SEQUENCE_RANGE)..=next_nonce) - } - }; - - Some(range) + let (count, tip) = self.cursor.indexer.fetch_count_at_tip().await?; + self.cursor + .sync_state + .get_next_range(Some(count), Some(tip)) + .await } /// If the previous block has been synced, rewind to the block number /// at which it was dispatched. /// Otherwise, rewind all the way back to the start block. async fn update(&mut self, logs: Vec<(HyperlaneMessage, LogMeta)>) -> Result<()> { - let prev_nonce = self.cursor.next_nonce.saturating_add(1); - // We may wind up having re-indexed messages that are previous to the nonce that we are looking for. + let prev_sequence = self.cursor.sync_state.next_sequence.saturating_add(1); + // We may wind up having re-indexed messages that are previous to the sequence (nonce) that we are looking for. // We should not consider these messages when checking for continuity errors. let filtered_logs = logs .into_iter() - .filter(|m| m.0.nonce <= self.cursor.next_nonce) + .filter(|m| m.0.nonce <= self.cursor.sync_state.next_sequence) .collect(); - self.cursor.update(filtered_logs, prev_nonce).await + self.cursor.update(filtered_logs, prev_sequence).await } } -enum SyncDirection { +#[derive(Debug)] +pub enum SyncDirection { Forward, Backward, } @@ -277,21 +371,23 @@ impl ForwardBackwardMessageSyncCursor { ) -> Result { let (count, tip) = indexer.fetch_count_at_tip().await?; let forward_cursor = ForwardMessageSyncCursor::new( - MessageSyncCursor::new(indexer.clone(), db.clone(), chunk_size, tip, tip, count), + indexer.clone(), + db.clone(), + chunk_size, + tip, + tip, mode, + count, ); - let backward_cursor = BackwardMessageSyncCursor::new( - MessageSyncCursor::new( - indexer.clone(), - db.clone(), - chunk_size, - tip, - tip, - count.saturating_sub(1), - ), - count == 0, + indexer.clone(), + db.clone(), + chunk_size, + tip, + tip, mode, + count.saturating_sub(1), + count == 0, ); Ok(Self { forward: forward_cursor, @@ -312,7 +408,7 @@ impl ContractSyncCursor for ForwardBackwardMessageSyncCursor { return Ok((CursorAction::Query(forward_range), eta)); } - if let Some(backward_range) = self.backward.get_next_range().await { + if let Some(backward_range) = self.backward.get_next_range().await? { self.direction = SyncDirection::Backward; return Ok((CursorAction::Query(backward_range), eta)); } @@ -321,7 +417,7 @@ impl ContractSyncCursor for ForwardBackwardMessageSyncCursor { } fn latest_block(&self) -> u32 { - self.forward.cursor.next_block.saturating_sub(1) + self.forward.cursor.sync_state.next_block.saturating_sub(1) } async fn update(&mut self, logs: Vec<(HyperlaneMessage, LogMeta)>) -> Result<()> { @@ -336,41 +432,46 @@ impl ContractSyncCursor for ForwardBackwardMessageSyncCursor { /// queried is and also handling rate limiting. Rate limiting is automatically /// performed by `next_action`. pub(crate) struct RateLimitedContractSyncCursor { - indexer: Arc>, + indexer: Arc>, db: Arc>, tip: u32, last_tip_update: Instant, - chunk_size: u32, - from: u32, eta_calculator: SyncerEtaCalculator, - initial_height: u32, + sync_state: SyncState, } impl RateLimitedContractSyncCursor { /// Construct a new contract sync helper. pub async fn new( - indexer: Arc>, + indexer: Arc>, db: Arc>, chunk_size: u32, initial_height: u32, + mode: IndexMode, ) -> Result { let tip = indexer.get_finalized_block_number().await?; Ok(Self { indexer, db, tip, - chunk_size, last_tip_update: Instant::now(), - from: initial_height, - initial_height, eta_calculator: SyncerEtaCalculator::new(initial_height, tip, ETA_TIME_WINDOW), + sync_state: SyncState::new( + chunk_size, + initial_height, + initial_height, + mode, + Default::default(), + // The rate limited cursor currently only syncs in the forward direction. + SyncDirection::Forward, + ), }) } /// Wait based on how close we are to the tip and update the tip, /// i.e. the highest block we may scrape. async fn get_rate_limit(&mut self) -> ChainResult> { - if self.from + self.chunk_size < self.tip { + if self.sync_state.next_block + self.sync_state.chunk_size < self.tip { // If doing the full chunk wouldn't exceed the already known tip we do not need to rate limit. Ok(None) } else { @@ -405,8 +506,11 @@ where T: Send + Debug + 'static, { async fn next_action(&mut self) -> ChainResult<(CursorAction, Duration)> { - let to = u32::min(self.tip, self.from + self.chunk_size); - let from = to.saturating_sub(self.chunk_size); + let to = u32::min( + self.tip, + self.sync_state.next_block + self.sync_state.chunk_size, + ); + let from = to.saturating_sub(self.sync_state.chunk_size); let eta = if to < self.tip { self.eta_calculator.calculate(from, self.tip) } else { @@ -417,18 +521,18 @@ where let action = if let Some(rate_limit) = rate_limit { CursorAction::Sleep(rate_limit) } else { - self.from = to + 1; - // TODO: note at the moment IndexModes are not considered here, and - // block-based indexing is always used. - // This should be changed when Sealevel IGP indexing is implemented, - // along with a refactor to better accommodate indexing modes. - CursorAction::Query(BlockRange(from..=to)) + // According to the logic in `get_rate_limit` we should be able to safely unwrap here. + // However, take the safer option and fall back to a zero duration sleep + match self.sync_state.get_next_range(None, None).await? { + Some(range) => CursorAction::Query(range), + None => CursorAction::Sleep(Duration::from_secs(0)), + } }; Ok((action, eta)) } fn latest_block(&self) -> u32 { - self.from.saturating_sub(1) + self.sync_state.next_block.saturating_sub(1) } async fn update(&mut self, _: Vec<(T, LogMeta)>) -> Result<()> { @@ -436,8 +540,10 @@ where // safely shared across multiple cursors, so long as they are running sufficiently in sync self.db .store_high_watermark(u32::max( - self.initial_height, - self.from.saturating_sub(self.chunk_size), + self.sync_state.start_block, + self.sync_state + .next_block + .saturating_sub(self.sync_state.chunk_size), )) .await?; Ok(()) diff --git a/rust/hyperlane-base/src/contract_sync/mod.rs b/rust/hyperlane-base/src/contract_sync/mod.rs index 46c169fe18..591b2585af 100644 --- a/rust/hyperlane-base/src/contract_sync/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/mod.rs @@ -5,7 +5,8 @@ use derive_new::new; use cursor::*; use hyperlane_core::{ utils::fmt_sync_time, ContractSyncCursor, CursorAction, HyperlaneDomain, HyperlaneLogStore, - HyperlaneMessage, HyperlaneMessageStore, HyperlaneWatermarkedLogStore, Indexer, MessageIndexer, + HyperlaneMessage, HyperlaneMessageStore, HyperlaneWatermarkedLogStore, IndexMode, Indexer, + MessageIndexer, SequenceIndexer, }; pub use metrics::ContractSyncMetrics; use std::fmt::Debug; @@ -90,7 +91,7 @@ where /// A ContractSync for syncing events using a RateLimitedContractSyncCursor pub type WatermarkContractSync = - ContractSync>, Arc>>; + ContractSync>, Arc>>; impl WatermarkContractSync where T: Debug + Send + Sync + Clone + 'static, @@ -99,12 +100,12 @@ where pub async fn rate_limited_cursor( &self, index_settings: IndexSettings, + index_mode: IndexMode, ) -> Box> { let watermark = self.db.retrieve_high_watermark().await.unwrap(); let index_settings = IndexSettings { from: watermark.unwrap_or(index_settings.from), chunk_size: index_settings.chunk_size, - mode: index_settings.mode, }; Box::new( RateLimitedContractSyncCursor::new( @@ -112,6 +113,7 @@ where self.db.clone(), index_settings.chunk_size, index_settings.from, + index_mode, ) .await .unwrap(), @@ -127,19 +129,17 @@ impl MessageContractSync { pub async fn forward_message_sync_cursor( &self, index_settings: IndexSettings, + index_mode: IndexMode, next_nonce: u32, ) -> Box> { - let forward_data = MessageSyncCursor::new( + Box::new(ForwardMessageSyncCursor::new( self.indexer.clone(), self.db.clone(), index_settings.chunk_size, index_settings.from, index_settings.from, + index_mode, next_nonce, - ); - Box::new(ForwardMessageSyncCursor::new( - forward_data, - index_settings.mode, )) } @@ -147,13 +147,14 @@ impl MessageContractSync { pub async fn forward_backward_message_sync_cursor( &self, index_settings: IndexSettings, + index_mode: IndexMode, ) -> Box> { Box::new( ForwardBackwardMessageSyncCursor::new( self.indexer.clone(), self.db.clone(), index_settings.chunk_size, - index_settings.mode, + index_mode, ) .await .unwrap(), diff --git a/rust/hyperlane-base/src/settings/chains.rs b/rust/hyperlane-base/src/settings/chains.rs index 4bd7fa5723..346866ebe9 100644 --- a/rust/hyperlane-base/src/settings/chains.rs +++ b/rust/hyperlane-base/src/settings/chains.rs @@ -10,8 +10,8 @@ use ethers_prometheus::middleware::{ use hyperlane_core::{ config::*, utils::hex_or_base58_to_h256, AggregationIsm, CcipReadIsm, ContractLocator, HyperlaneAbi, HyperlaneDomain, HyperlaneDomainProtocol, HyperlaneProvider, HyperlaneSigner, - IndexMode, Indexer, InterchainGasPaymaster, InterchainGasPayment, InterchainSecurityModule, - Mailbox, MessageIndexer, MultisigIsm, RoutingIsm, ValidatorAnnounce, H256, + IndexMode, InterchainGasPaymaster, InterchainGasPayment, InterchainSecurityModule, Mailbox, + MessageIndexer, MultisigIsm, RoutingIsm, SequenceIndexer, ValidatorAnnounce, H256, }; use hyperlane_ethereum::{ self as h_eth, BuildableWithProvider, EthereumInterchainGasPaymasterAbi, EthereumMailboxAbi, @@ -137,8 +137,6 @@ pub struct IndexSettings { pub from: u32, /// The number of blocks to query at once when indexing contracts. pub chunk_size: u32, - /// The indexing mode. - pub mode: IndexMode, } #[derive(Debug, Deserialize)] @@ -146,7 +144,6 @@ pub struct IndexSettings { struct RawIndexSettings { from: Option, chunk: Option, - mode: Option, } impl FromRawConf<'_, RawIndexSettings> for IndexSettings { @@ -168,11 +165,7 @@ impl FromRawConf<'_, RawIndexSettings> for IndexSettings { .unwrap_or(1999); err.into_result()?; - Ok(Self { - from, - chunk_size, - mode: raw.mode.unwrap_or_default(), - }) + Ok(Self { from, chunk_size }) } } @@ -307,6 +300,11 @@ impl ChainConf { )) } + /// Fetch the index settings and index mode, since they are often used together. + pub fn index_settings_and_mode(&self) -> (IndexSettings, IndexMode) { + (self.index.clone(), self.domain.index_mode()) + } + /// Try to convert the chain settings into an HyperlaneProvider. pub async fn build_provider( &self, @@ -386,7 +384,7 @@ impl ChainConf { pub async fn build_delivery_indexer( &self, metrics: &CoreMetrics, - ) -> Result>> { + ) -> Result>> { let ctx = "Building delivery indexer"; let locator = self.locator(self.addresses.mailbox); @@ -406,7 +404,7 @@ impl ChainConf { ChainConnectionConf::Fuel(_) => todo!(), ChainConnectionConf::Sealevel(conf) => { let indexer = Box::new(h_sealevel::SealevelMailboxIndexer::new(conf, locator)?); - Ok(indexer as Box>) + Ok(indexer as Box>) } } .context(ctx) @@ -447,7 +445,7 @@ impl ChainConf { pub async fn build_interchain_gas_payment_indexer( &self, metrics: &CoreMetrics, - ) -> Result>> { + ) -> Result>> { let ctx = "Building IGP indexer"; let locator = self.locator(self.addresses.interchain_gas_paymaster); @@ -470,7 +468,7 @@ impl ChainConf { let indexer = Box::new(h_sealevel::SealevelInterchainGasPaymasterIndexer::new( conf, locator, )); - Ok(indexer as Box>) + Ok(indexer as Box>) } } .context(ctx) diff --git a/rust/hyperlane-core/src/chain.rs b/rust/hyperlane-core/src/chain.rs index f1f3a1208b..0051594259 100644 --- a/rust/hyperlane-core/src/chain.rs +++ b/rust/hyperlane-core/src/chain.rs @@ -9,7 +9,7 @@ use num_traits::FromPrimitive; use strum::{EnumIter, EnumString, IntoStaticStr}; use crate::utils::many_to_one; -use crate::{ChainResult, HyperlaneProtocolError, H160, H256}; +use crate::{ChainResult, HyperlaneProtocolError, IndexMode, H160, H256}; #[derive(Debug, Clone)] pub struct Address(pub bytes::Bytes); @@ -130,17 +130,6 @@ pub enum HyperlaneDomain { }, } -impl HyperlaneDomain { - pub fn is_arbitrum_nitro(&self) -> bool { - matches!( - self, - HyperlaneDomain::Known( - KnownHyperlaneDomain::Arbitrum | KnownHyperlaneDomain::ArbitrumGoerli, - ) - ) - } -} - #[cfg(any(test, feature = "test-utils"))] impl HyperlaneDomain { pub fn new_test_domain(name: &str) -> Self { @@ -383,6 +372,24 @@ impl HyperlaneDomain { } => *domain_protocol, } } + + pub fn is_arbitrum_nitro(&self) -> bool { + matches!( + self, + HyperlaneDomain::Known( + KnownHyperlaneDomain::Arbitrum | KnownHyperlaneDomain::ArbitrumGoerli, + ) + ) + } + + pub const fn index_mode(&self) -> IndexMode { + use HyperlaneDomainProtocol::*; + let protocol = self.domain_protocol(); + many_to_one!(match protocol { + IndexMode::Block: [Ethereum], + IndexMode::Sequence : [Sealevel, Fuel], + }) + } } #[cfg(test)] diff --git a/rust/hyperlane-core/src/traits/cursor.rs b/rust/hyperlane-core/src/traits/cursor.rs index 0beb238686..68d2c8b7d4 100644 --- a/rust/hyperlane-core/src/traits/cursor.rs +++ b/rust/hyperlane-core/src/traits/cursor.rs @@ -1,9 +1,9 @@ -use std::time::Duration; +use std::{ops::RangeInclusive, time::Duration}; use async_trait::async_trait; use auto_impl::auto_impl; -use crate::{ChainResult, IndexRange, LogMeta}; +use crate::{ChainResult, LogMeta}; /// A cursor governs event indexing for a contract. #[async_trait] @@ -23,7 +23,7 @@ pub trait ContractSyncCursor: Send + Sync + 'static { /// The action that should be taken by the contract sync loop pub enum CursorAction { /// Direct the contract_sync task to query a block range (inclusive) - Query(IndexRange), + Query(RangeInclusive), /// Direct the contract_sync task to sleep for a duration Sleep(Duration), } diff --git a/rust/hyperlane-core/src/traits/indexer.rs b/rust/hyperlane-core/src/traits/indexer.rs index fd4fd48700..1682d1f016 100644 --- a/rust/hyperlane-core/src/traits/indexer.rs +++ b/rust/hyperlane-core/src/traits/indexer.rs @@ -24,23 +24,12 @@ pub enum IndexMode { Sequence, } -/// An indexing range. -#[derive(Debug, Clone)] -pub enum IndexRange { - /// For block-based indexers - BlockRange(RangeInclusive), - /// For indexers that look for specific sequences, e.g. message nonces. - SequenceRange(RangeInclusive), -} - -pub use IndexRange::*; - /// Interface for an indexer. #[async_trait] #[auto_impl(&, Box, Arc,)] pub trait Indexer: Send + Sync + Debug { /// Fetch list of logs between blocks `from` and `to`, inclusive. - async fn fetch_logs(&self, range: IndexRange) -> ChainResult>; + async fn fetch_logs(&self, range: RangeInclusive) -> ChainResult>; /// Get the chain's latest block number that has reached finality async fn get_finalized_block_number(&self) -> ChainResult; @@ -54,3 +43,11 @@ pub trait MessageIndexer: Indexer + 'static { /// Return the latest finalized mailbox count and block number async fn fetch_count_at_tip(&self) -> ChainResult<(u32, u32)>; } + +/// Interface for indexing data in sequence. Currently used in non-EVM chains +#[async_trait] +#[auto_impl(&, Box, Arc)] +pub trait SequenceIndexer: Indexer + 'static { + /// Return the latest finalized sequence and block number + async fn sequence_at_tip(&self) -> ChainResult<(u32, u32)>; +} diff --git a/rust/sealevel/README.md b/rust/sealevel/README.md deleted file mode 100644 index b0feeb0aca..0000000000 --- a/rust/sealevel/README.md +++ /dev/null @@ -1,127 +0,0 @@ -# Hyperlane Sealevel (Solana VM) Integration - -# Running local end to end test - -A local end to end test has been written that will: - -1. Run a local Solana network -2. Deploy two sets of core contracts (i.e. Mailbox / Multisig ISM / ValidatorAnnounce) onto this chain, one with domain 13375 and the other 13376. -3. Deploy a "native" warp route on domain 13375 and a "synthetic" warp route on domain 13376 -4. Send native lamports from domain 13375 to 13376 -5. A validator & relayer can then be spun up to deliver the message - -### Build and run solana-test-validator - -This only needs to be done once when initially setting things up. - -1. Clone the `solar-eclipse` repo, which is the Eclipse fork of the Solana repo. This is needed to run the local Solana network. Check out the `steven/hyperlane-fix-deps` branch: - -``` -git clone git@github.com:Eclipse-Laboratories-Inc/solar-eclipse --branch steven/hyperlane-fix-deps -``` - -2. `cd` into the repo and build the `solana-test-validator` using the local `cargo` script (which ensures the correct version is used): - -``` -./cargo build -p solana-test-validator -``` - -### Check out `eclipse-program-library` - -This is a fork (with some dependency fixes) of the eclipse fork of the `solana-program-library`. This contains "SPL" programs that are commonly used programs - stuff like the token program, etc. - -Note these instructions previously required a different remote and branch - make sure to move to this remote & branch if you ahven't already! - -1. Check out the branch `trevor/steven/eclipse-1.14.13/with-tlv-lib`: - -``` -git clone git@github.com:tkporter/eclipse-program-library.git --branch trevor/steven/eclipse-1.14.13/with-tlv-lib -``` - -### Build the required SPL programs and Hyperlane programs - -This command will build all the required SPL programs (e.g. the token program, token 2022 program, SPL noop, etc...) found in the local repo of `eclipse-program-library`, -and will build all the required Hyperlane programs (e.g. the Mailbox program, Validator Announce, etc...). - -You need to run this if any changes are made to programs that you want to be used in future runs of the end to end test. - -Change the paths to your local `solar-eclipse` repo and `eclipse-program-library` as necessary, and run this from the `rust` directory of hyperlane-monorepo. - -``` -SOLAR_ECLIPSE_DIR=~/solar-eclipse ECLIPSE_PROGRAM_LIBRARY_DIR=~/eclipse-program-library ./utils/sealevel-test.bash build-only -``` - -### Run the local Solana network - -This will run the `solana-test-validator` with a funded test account `E9VrvAdGRvCguN2XgXsgu9PNmMM3vZsU8LSUrM68j8ty` that will later be used for deploying contracts. It will also create some of the required SPL programs at the specified program IDs - these program IDs are consistent across Solana networks and are required by our Hyperlane programs. Change paths as necessary - the \*.so files should have been created by the prior command. The `--ledger` directory is arbitrary and is just the data dir for the Solana validator. - -``` -mkdir -p /tmp/eclipse/ledger-dir && target/debug/solana-test-validator --reset --ledger /tmp/eclipse/ledger-dir --account E9VrvAdGRvCguN2XgXsgu9PNmMM3vZsU8LSUrM68j8ty ~/abacus-monorepo/rust/config/sealevel/test-keys/test_deployer-account.json --bpf-program TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA ~/eclipse-program-library/target/deploy/spl_token.so --bpf-program TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb ~/eclipse-program-library/target/deploy/spl_token_2022.so --bpf-program ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL ~/eclipse-program-library/target/deploy/spl_associated_token_account.so --bpf-program noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV ~/eclipse-program-library/account-compression/target/deploy/spl_noop.so -``` - -By now you should have an output like this - keep it running and move to another terminal: - -``` -Ledger location: /tmp/eclipse/ledger-dir -Log: /tmp/eclipse/ledger-dir/validator.log -⠒ Initializing... -⠄ Initializing... -Identity: 4P5rtWdphhehU32myNQcTSMgrCRz7kdvZEnasX6fahJQ -Genesis Hash: G7CY7wEzbdjh8RwqTszxrpYTqiHKvqwpaw3JbmKJjJhU -Version: 1.14.13 -Shred Version: 419 -Gossip Address: 127.0.0.1:1024 -TPU Address: 127.0.0.1:1027 -JSON RPC URL: http://127.0.0.1:8899 -⠒ 00:05:35 | Processed Slot: 668 | Confirmed Slot: 668 | Finalized Slot: 6 -``` - -### Run the local end to end script - -Run the script found at `rust/utils/sealevel-test.bash`. This will build all required programs, deploy contracts, and test sending a warp route message. You need to supply the paths to your local `solar-eclipse` and `eclipse-program-library` repos: - -``` -SOLAR_ECLIPSE_DIR=~/solar-eclipse ECLIPSE_PROGRAM_LIBRARY_DIR=~/eclipse-program-library ./utils/sealevel-test.bash -``` - -Note: this won't rebuild any of the programs. If you want to rebuild them, you can either cd into them individually and run `cargo build-sbf --arch sbf`, or you can run the above bash script with `force-build-programs` as the first argument. - -You'll see a bunch of output here showing programs being built and deployed. Eventually you should see some logs saying `grep -q 'Message not delivered'`. At this point, the contracts have all been deployed and a native warp route transfer has been made. You can move on to running the validator and relayer. - -### Running the validator - -In a separate terminal, cd to `hyperlane-monorepo/rust`. - -1. Source the env vars: - -``` -source ./config/sealevel/validator.env -``` - -2. Run the validator (this clears the DB / checkpoints if present): - -``` -mkdir /tmp/SEALEVEL_DB ; rm -rf /tmp/SEALEVEL_DB/validator /tmp/test_sealevel_checkpoints_0x70997970c51812dc3a010c7d01b50e0d17dc79c8/* ; CONFIG_FILES=./config/sealevel/sealevel.json cargo run --bin validator -``` - -You should see some INFO logs about checkpoint at index 0. - -You can confirm things are working correctly by looking at `/tmp/CHECKPOINTS_DIR`, where the validator posts its signatures. - -### Running the relayer - -In a separate terminal, again in `hyperlane-monorepo/rust`: - -1. Source the env vars: - -``` -source ./config/sealevel/relayer.env -``` - -2. Run the relayer (the rm is to make sure the relayer's DB is cleared): - -``` -rm -rf /tmp/SEALEVEL_DB/relayer ; RUST_BACKTRACE=full CONFIG_FILES=./config/sealevel/sealevel.json cargo run --bin relayer -``` - -When the original `sealevel-test.bash` exits with a 0 exit code and some logs about Hyperlane Token Storage, the message has been successfully delivered! diff --git a/rust/sealevel/client/Cargo.toml b/rust/sealevel/client/Cargo.toml index 2d862cb130..0d50ffb139 100644 --- a/rust/sealevel/client/Cargo.toml +++ b/rust/sealevel/client/Cargo.toml @@ -21,10 +21,10 @@ solana-sdk.workspace = true account-utils = { path = "../libraries/account-utils" } hyperlane-core = { path = "../../hyperlane-core" } hyperlane-sealevel-connection-client = { path = "../libraries/hyperlane-sealevel-connection-client" } -hyperlane-sealevel-mailbox = { path = "../programs/mailbox" } -hyperlane-sealevel-multisig-ism-message-id = { path = "../programs/ism/multisig-ism-message-id" } -hyperlane-sealevel-token = { path = "../programs/hyperlane-sealevel-token" } -hyperlane-sealevel-token-collateral = { path = "../programs/hyperlane-sealevel-token-collateral" } +hyperlane-sealevel-mailbox = { path = "../programs/mailbox", features = ["no-entrypoint"] } +hyperlane-sealevel-multisig-ism-message-id = { path = "../programs/ism/multisig-ism-message-id", features = ["no-entrypoint"] } +hyperlane-sealevel-token = { path = "../programs/hyperlane-sealevel-token", features = ["no-entrypoint"] } +hyperlane-sealevel-token-collateral = { path = "../programs/hyperlane-sealevel-token-collateral", features = ["no-entrypoint"] } hyperlane-sealevel-token-lib = { path = "../libraries/hyperlane-sealevel-token" } -hyperlane-sealevel-token-native = { path = "../programs/hyperlane-sealevel-token-native" } -hyperlane-sealevel-validator-announce = { path = "../programs/validator-announce" } \ No newline at end of file +hyperlane-sealevel-token-native = { path = "../programs/hyperlane-sealevel-token-native", features = ["no-entrypoint"] } +hyperlane-sealevel-validator-announce = { path = "../programs/validator-announce", features = ["no-entrypoint"] } diff --git a/rust/sealevel/client/src/cmd_utils.rs b/rust/sealevel/client/src/cmd_utils.rs index 9fa908ea21..58e295955a 100644 --- a/rust/sealevel/client/src/cmd_utils.rs +++ b/rust/sealevel/client/src/cmd_utils.rs @@ -35,8 +35,10 @@ pub fn build_cmd( c.args(&cmd[1..]); if log_all { c.stdout(Stdio::inherit()); + c.stderr(Stdio::inherit()); } else { - c.stdout(append_to(log)); + c.stdout(append_to(log.as_ref())); + c.stderr(append_to(log)); } if let Some(wd) = wd { c.current_dir(wd); diff --git a/rust/sealevel/client/src/main.rs b/rust/sealevel/client/src/main.rs index 525ce71240..4e0053f483 100644 --- a/rust/sealevel/client/src/main.rs +++ b/rust/sealevel/client/src/main.rs @@ -92,6 +92,8 @@ struct Cli { compute_budget: u32, #[arg(long, short = 'a')] heap_size: Option, + #[arg(long, short = 'C')] + config: Option, } #[derive(Subcommand)] @@ -472,8 +474,10 @@ fn main() { pretty_env_logger::init(); let cli = Cli::parse(); - let config = match CONFIG_FILE.as_ref() { - Some(config_file) => Config::load(config_file).unwrap(), + let config = match cli.config.as_ref().or(CONFIG_FILE.as_ref()) { + Some(config_file) => Config::load(config_file) + .map_err(|e| format!("Failed to load solana config file {}: {}", config_file, e)) + .unwrap(), None => Config::default(), }; let url = normalize_to_url_if_moniker(cli.url.unwrap_or(config.json_rpc_url)); diff --git a/rust/sealevel/environments/local-e2e/warp-routes/testwarproute/program-ids.json b/rust/sealevel/environments/local-e2e/warp-routes/testwarproute/program-ids.json index ba62748efe..c5e945eae3 100644 --- a/rust/sealevel/environments/local-e2e/warp-routes/testwarproute/program-ids.json +++ b/rust/sealevel/environments/local-e2e/warp-routes/testwarproute/program-ids.json @@ -1,10 +1,10 @@ { - "sealeveltest1": { - "hex": "0xa77b4e2ed231894cc8cb8eee21adcc705d8489bccc6b2fcf40a358de23e60b7b", - "base58": "CGn8yNtSD3aTTqJfYhUb6s1aVTN75NzwtsFKo1e83aga" - }, "sealeveltest2": { "hex": "0x2317f9615d4ebc2419ad4b88580e2a80a03b2c7a60bc960de7d6934dbc37a87e", "base58": "3MzUPjP5LEkiHH82nEAe28Xtz9ztuMqWc8UmuKxrpVQH" + }, + "sealeveltest1": { + "hex": "0xa77b4e2ed231894cc8cb8eee21adcc705d8489bccc6b2fcf40a358de23e60b7b", + "base58": "CGn8yNtSD3aTTqJfYhUb6s1aVTN75NzwtsFKo1e83aga" } } \ No newline at end of file diff --git a/rust/sealevel/libraries/access-control/Cargo.toml b/rust/sealevel/libraries/access-control/Cargo.toml index ec6f79f70f..964624bf58 100644 --- a/rust/sealevel/libraries/access-control/Cargo.toml +++ b/rust/sealevel/libraries/access-control/Cargo.toml @@ -5,9 +5,6 @@ name = "access-control" version = "0.1.0" edition = "2021" -[features] -no-entrypoint = [] - [dependencies] solana-program.workspace = true diff --git a/rust/sealevel/libraries/account-utils/Cargo.toml b/rust/sealevel/libraries/account-utils/Cargo.toml index 45ac5b892e..c74f43ad32 100644 --- a/rust/sealevel/libraries/account-utils/Cargo.toml +++ b/rust/sealevel/libraries/account-utils/Cargo.toml @@ -5,9 +5,6 @@ name = "account-utils" version = "0.1.0" edition = "2021" -[features] -no-entrypoint = [] - [dependencies] borsh.workspace = true solana-program.workspace = true diff --git a/rust/sealevel/libraries/account-utils/src/discriminator.rs b/rust/sealevel/libraries/account-utils/src/discriminator.rs index 510b154fb2..46443a616c 100644 --- a/rust/sealevel/libraries/account-utils/src/discriminator.rs +++ b/rust/sealevel/libraries/account-utils/src/discriminator.rs @@ -24,7 +24,7 @@ where T: DiscriminatorData + borsh::BorshSerialize, { fn serialize(&self, writer: &mut W) -> std::io::Result<()> { - PROGRAM_INSTRUCTION_DISCRIMINATOR.serialize(writer)?; + T::DISCRIMINATOR.serialize(writer)?; self.data.serialize(writer) } } @@ -35,7 +35,7 @@ where { fn deserialize(buf: &mut &[u8]) -> std::io::Result { let (discriminator, rest) = buf.split_at(Discriminator::LENGTH); - if discriminator != PROGRAM_INSTRUCTION_DISCRIMINATOR { + if discriminator != T::DISCRIMINATOR { return Err(std::io::Error::new( std::io::ErrorKind::InvalidData, "Invalid discriminator", @@ -134,5 +134,9 @@ mod test { let serialized_prefixed_foo = prefixed_foo.try_to_vec().unwrap(); assert_eq!(serialized_prefixed_foo.len(), prefixed_foo.size()); + assert_eq!( + serialized_prefixed_foo[0..Discriminator::LENGTH], + Foo::DISCRIMINATOR + ); } } diff --git a/rust/sealevel/libraries/ecdsa-signature/Cargo.toml b/rust/sealevel/libraries/ecdsa-signature/Cargo.toml index 13df708f81..b84eef2839 100644 --- a/rust/sealevel/libraries/ecdsa-signature/Cargo.toml +++ b/rust/sealevel/libraries/ecdsa-signature/Cargo.toml @@ -5,9 +5,6 @@ name = "ecdsa-signature" version = "0.1.0" edition = "2021" -[features] -no-entrypoint = [] - [dependencies] solana-program.workspace = true thiserror.workspace = true diff --git a/rust/sealevel/libraries/hyperlane-sealevel-connection-client/Cargo.toml b/rust/sealevel/libraries/hyperlane-sealevel-connection-client/Cargo.toml index 5fdbe21145..3f279ae0a5 100644 --- a/rust/sealevel/libraries/hyperlane-sealevel-connection-client/Cargo.toml +++ b/rust/sealevel/libraries/hyperlane-sealevel-connection-client/Cargo.toml @@ -5,9 +5,6 @@ name = "hyperlane-sealevel-connection-client" version = "0.1.0" edition = "2021" -[features] -no-entrypoint = [] - [dependencies] borsh.workspace = true solana-program.workspace = true diff --git a/rust/sealevel/libraries/hyperlane-sealevel-token/Cargo.toml b/rust/sealevel/libraries/hyperlane-sealevel-token/Cargo.toml index ec3ae855ce..dc48f9c3cf 100644 --- a/rust/sealevel/libraries/hyperlane-sealevel-token/Cargo.toml +++ b/rust/sealevel/libraries/hyperlane-sealevel-token/Cargo.toml @@ -5,9 +5,6 @@ name = "hyperlane-sealevel-token-lib" version = "0.1.0" edition = "2021" -[features] -no-entrypoint = [] - [dependencies] borsh.workspace = true num-derive.workspace = true diff --git a/rust/sealevel/libraries/interchain-security-module-interface/Cargo.toml b/rust/sealevel/libraries/interchain-security-module-interface/Cargo.toml index e289bfd35b..350cf2e4c4 100644 --- a/rust/sealevel/libraries/interchain-security-module-interface/Cargo.toml +++ b/rust/sealevel/libraries/interchain-security-module-interface/Cargo.toml @@ -5,9 +5,6 @@ name = "hyperlane-sealevel-interchain-security-module-interface" version = "0.1.0" edition = "2021" -[features] -no-entrypoint = [] - [dependencies] borsh.workspace = true solana-program.workspace = true diff --git a/rust/sealevel/libraries/message-recipient-interface/Cargo.toml b/rust/sealevel/libraries/message-recipient-interface/Cargo.toml index e6a6a41049..aff11f5a19 100644 --- a/rust/sealevel/libraries/message-recipient-interface/Cargo.toml +++ b/rust/sealevel/libraries/message-recipient-interface/Cargo.toml @@ -5,9 +5,6 @@ name = "hyperlane-sealevel-message-recipient-interface" version = "0.1.0" edition = "2021" -[features] -no-entrypoint = [] - [dependencies] borsh.workspace = true solana-program.workspace = true diff --git a/rust/sealevel/libraries/multisig-ism/Cargo.toml b/rust/sealevel/libraries/multisig-ism/Cargo.toml index 118159c16d..15d149f358 100644 --- a/rust/sealevel/libraries/multisig-ism/Cargo.toml +++ b/rust/sealevel/libraries/multisig-ism/Cargo.toml @@ -6,7 +6,6 @@ version = "0.1.0" edition = "2021" [features] -no-entrypoint = [] test-data = ["dep:hex"] [dependencies] diff --git a/rust/sealevel/libraries/serializable-account-meta/Cargo.toml b/rust/sealevel/libraries/serializable-account-meta/Cargo.toml index 5e0323b36d..cab84fe371 100644 --- a/rust/sealevel/libraries/serializable-account-meta/Cargo.toml +++ b/rust/sealevel/libraries/serializable-account-meta/Cargo.toml @@ -5,9 +5,6 @@ name = "serializable-account-meta" version = "0.1.0" edition = "2021" -[features] -no-entrypoint = [] - [dependencies] borsh.workspace = true solana-program.workspace = true diff --git a/rust/sealevel/libraries/test-utils/Cargo.toml b/rust/sealevel/libraries/test-utils/Cargo.toml index 77c18ece94..154726920a 100644 --- a/rust/sealevel/libraries/test-utils/Cargo.toml +++ b/rust/sealevel/libraries/test-utils/Cargo.toml @@ -5,9 +5,6 @@ name = "hyperlane-test-utils" version = "0.1.0" edition = "2021" -[features] -no-entrypoint = [] - [dependencies] borsh.workspace = true solana-program-test.workspace = true diff --git a/rust/sealevel/programs/interchain-gas-paymaster/src/accounts.rs b/rust/sealevel/programs/interchain-gas-paymaster/src/accounts.rs index e3a2ec89b3..68d2a6881c 100644 --- a/rust/sealevel/programs/interchain-gas-paymaster/src/accounts.rs +++ b/rust/sealevel/programs/interchain-gas-paymaster/src/accounts.rs @@ -232,6 +232,8 @@ pub struct GasPaymentData { pub message_id: H256, /// The amount of gas paid for. pub gas_amount: u64, + /// The unique gas payment pubkey. + pub unique_gas_payment_pubkey: Pubkey, /// The slot of the gas payment. pub slot: Slot, } @@ -243,8 +245,9 @@ impl SizedData for GasPaymentData { // 4 for destination_domain // 32 for message_id // 8 for gas_amount + // 32 for unique_gas_payment_pubkey // 8 for slot - 8 + 32 + 4 + 32 + 8 + 8 + 8 + 32 + 4 + 32 + 8 + 32 + 8 } } diff --git a/rust/sealevel/programs/interchain-gas-paymaster/src/processor.rs b/rust/sealevel/programs/interchain-gas-paymaster/src/processor.rs index eb5e010b74..8cab90a176 100644 --- a/rust/sealevel/programs/interchain-gas-paymaster/src/processor.rs +++ b/rust/sealevel/programs/interchain-gas-paymaster/src/processor.rs @@ -302,15 +302,15 @@ fn pay_for_gas(program_id: &Pubkey, accounts: &[AccountInfo], payment: PayForGas // Account 4: The unique gas payment account. // Uniqueness is enforced by making sure the message storage PDA based on // this unique message account is empty, which is done next. - let unique_message_account_info = next_account_info(accounts_iter)?; - if !unique_message_account_info.is_signer { + let unique_gas_payment_account_info = next_account_info(accounts_iter)?; + if !unique_gas_payment_account_info.is_signer { return Err(ProgramError::MissingRequiredSignature); } // Account 5: Gas payment PDA. let gas_payment_account_info = next_account_info(accounts_iter)?; let (gas_payment_key, gas_payment_bump) = Pubkey::find_program_address( - igp_gas_payment_pda_seeds!(unique_message_account_info.key), + igp_gas_payment_pda_seeds!(unique_gas_payment_account_info.key), program_id, ); if gas_payment_account_info.key != &gas_payment_key { @@ -357,6 +357,7 @@ fn pay_for_gas(program_id: &Pubkey, accounts: &[AccountInfo], payment: PayForGas destination_domain: payment.destination_domain, message_id: payment.message_id, gas_amount, + unique_gas_payment_pubkey: *unique_gas_payment_account_info.key, slot: Clock::get()?.slot, } .into(), @@ -372,7 +373,7 @@ fn pay_for_gas(program_id: &Pubkey, accounts: &[AccountInfo], payment: PayForGas program_id, system_program_info, gas_payment_account_info, - igp_gas_payment_pda_seeds!(unique_message_account_info.key, gas_payment_bump), + igp_gas_payment_pda_seeds!(unique_gas_payment_account_info.key, gas_payment_bump), )?; gas_payment_account.store(gas_payment_account_info, false)?; diff --git a/rust/sealevel/programs/interchain-gas-paymaster/tests/functional.rs b/rust/sealevel/programs/interchain-gas-paymaster/tests/functional.rs index d4a3eacdc8..15381e3590 100644 --- a/rust/sealevel/programs/interchain-gas-paymaster/tests/functional.rs +++ b/rust/sealevel/programs/interchain-gas-paymaster/tests/functional.rs @@ -1027,7 +1027,7 @@ async fn assert_gas_payment( banks_client: &mut BanksClient, igp_key: Pubkey, payment_tx_signature: Signature, - _payment_unique_account_pubkey: Pubkey, + unique_gas_payment_pubkey: Pubkey, gas_payment_account_key: Pubkey, destination_domain: u32, gas_amount: u64, @@ -1059,6 +1059,7 @@ async fn assert_gas_payment( destination_domain, message_id, gas_amount, + unique_gas_payment_pubkey, slot, } .into(), diff --git a/rust/utils/run-locally/Cargo.toml b/rust/utils/run-locally/Cargo.toml index 6b53d1365b..9accd28fb9 100644 --- a/rust/utils/run-locally/Cargo.toml +++ b/rust/utils/run-locally/Cargo.toml @@ -17,3 +17,4 @@ nix = { version = "0.26", default-features = false, features = ["signal"] } tempfile = "3.3" ureq = { version = "2.4", default-features = false } which = "4.4" +macro_rules_attribute = "0.2" diff --git a/rust/utils/run-locally/src/config.rs b/rust/utils/run-locally/src/config.rs index 87d576f170..cf84193505 100644 --- a/rust/utils/run-locally/src/config.rs +++ b/rust/utils/run-locally/src/config.rs @@ -1,39 +1,20 @@ -use std::collections::HashMap; use std::env; -use std::ffi::OsStr; -use std::fmt::{Debug, Display, Formatter}; -use std::path::{Path, PathBuf}; -use std::process::Command; use std::sync::Arc; -use std::time::{SystemTime, UNIX_EPOCH}; - -use eyre::{Context, Result}; - -use crate::utils::{concat_path, LogFilter}; pub struct Config { pub is_ci_env: bool, pub ci_mode: bool, pub ci_mode_timeout: u64, pub kathy_messages: u64, - pub log_all: bool, - pub log_dir: PathBuf, } impl Config { - pub fn load() -> Self { + pub fn load() -> Arc { let ci_mode = env::var("E2E_CI_MODE") .map(|k| k.parse::().unwrap()) .unwrap_or_default(); - let date_str = SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap() - .as_secs() - .to_string(); - let log_dir = concat_path(env::temp_dir(), format!("logs/hyperlane-agents/{date_str}")); - Self { + Arc::new(Self { ci_mode, - log_dir, is_ci_env: env::var("CI").as_deref() == Ok("true"), ci_mode_timeout: env::var("E2E_CI_TIMEOUT_SEC") .map(|k| k.parse::().unwrap()) @@ -44,183 +25,6 @@ impl Config { .map(|r| r.parse::().unwrap()); r.unwrap_or(16) }, - log_all: env::var("E2E_LOG_ALL") - .map(|k| k.parse::().unwrap()) - .unwrap_or(ci_mode), - } - } -} - -#[derive(Default, Clone)] -pub struct ProgramArgs { - bin: Option>, - args: Vec>, - env: HashMap, Arc>, - working_dir: Option>, - log_filter: Option, -} - -impl Debug for ProgramArgs { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - f.debug_struct("ProgramArgs") - .field("bin", &self.bin) - .field("args", &self.args) - .field("env", &self.env) - .field("working_dir", &self.working_dir) - .field("log_filter", &self.log_filter.is_some()) - .finish() - } -} - -impl Display for ProgramArgs { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - if f.alternate() { - let wd = self - .working_dir - .as_ref() - .map(|wd| wd.display()) - .unwrap_or_else(|| Path::new("./").display()); - write!(f, "({wd})$ ")?; - - for (k, v) in &self.env { - write!(f, "{k}={v} ")?; - } - - if let Some(path_result) = self.get_bin_path() { - if let Ok(bp) = path_result { - write!(f, "{}", bp.display())?; - } else { - write!(f, "{}", self.bin.as_ref().unwrap())?; - } - } else { - write!(f, "???")?; - } - - for a in &self.args { - write!(f, " {a}")?; - } - - Ok(()) - } else { - write!( - f, - "{}", - self.bin.as_deref().map(String::as_str).unwrap_or("???") - ) - } - } -} - -impl ProgramArgs { - pub fn new(bin: impl AsRef) -> Self { - Self::default().bin(bin) - } - - pub fn bin(mut self, bin: impl AsRef) -> Self { - self.bin = Some( - bin.as_ref() - .to_str() - .expect("Invalid string encoding for binary name") - .to_owned() - .into(), - ); - self - } - - pub fn raw_arg(mut self, arg: impl Into) -> Self { - self.args.push(arg.into().into()); - self - } - - pub fn cmd(self, cmd: impl Into) -> Self { - let cmd = cmd.into(); - debug_assert!(!cmd.starts_with('-'), "arg should not start with -"); - self.raw_arg(cmd) - } - - pub fn flag(self, arg: impl AsRef) -> Self { - debug_assert!( - !arg.as_ref().starts_with('-'), - "arg should not start with -" - ); - self.raw_arg(format!("--{}", arg.as_ref())) - } - - /// Assumes an arg in the format of `--$ARG1 $ARG2`, arg1 and arg2 should exclude quoting, equal sign, and the leading hyphens. - pub fn arg(self, arg1: impl AsRef, arg2: impl Into) -> Self { - self.flag(arg1).cmd(arg2) - } - - /// add an env that will be prefixed with the default hyperlane env prefix - pub fn hyp_env(self, key: impl AsRef, value: impl Into) -> Self { - const PREFIX: &str = "HYP_BASE_"; - let key = key.as_ref(); - debug_assert!( - !key.starts_with(PREFIX), - "env key should not start with prefix that is being added" - ); - self.env(format!("{PREFIX}{key}"), value) - } - - /// add a system env that makes no prefix assumptions - pub fn env(mut self, key: impl Into, value: impl Into) -> Self { - self.env.insert(key.into().into(), value.into().into()); - self - } - - pub fn working_dir(mut self, path: impl Into) -> Self { - self.working_dir = Some(path.into().into()); - self - } - - /// Filter logs being printed to stdout/stderr. If the LogFilter returns true, - /// then it will keep that log line, if it returns false it will discard it. - /// This is ignored when logging to files. - pub fn filter_logs(mut self, filter: LogFilter) -> Self { - self.log_filter = Some(filter); - self - } - - pub fn create_command(&self) -> Command { - let mut cmd = Command::new( - self.get_bin_path() - .expect("bin path must be specified") - .unwrap(), - ); - if let Some(wd) = &self.working_dir { - cmd.current_dir(wd.as_path()); - } - for (k, v) in self.env.iter() { - cmd.env(k.as_str(), v.as_str()); - } - cmd.args(self.args.iter().map(AsRef::as_ref)); - cmd - } - - pub fn get_filter(&self) -> Option { - self.log_filter - } - - /// Try to get the path to the binary - pub fn get_bin_path(&self) -> Option> { - self.bin.as_ref().map(|raw_bin_name| { - which::which(raw_bin_name.as_ref()) - .with_context(|| format!("Cannot find binary: {raw_bin_name}")) }) } - - /// Get just the name component of the binary - pub fn get_bin_name(&self) -> String { - Path::new( - self.bin - .as_ref() - .expect("bin path must be specified") - .as_str(), - ) - .file_name() - .expect("bin must have a file name") - .to_str() - .unwrap() - .to_owned() - } } diff --git a/rust/utils/run-locally/src/ethereum.rs b/rust/utils/run-locally/src/ethereum.rs new file mode 100644 index 0000000000..bc55f74727 --- /dev/null +++ b/rust/utils/run-locally/src/ethereum.rs @@ -0,0 +1,57 @@ +use std::sync::Arc; +use std::thread::sleep; +use std::time::Duration; + +use macro_rules_attribute::apply; + +use crate::config::Config; +use crate::logging::log; +use crate::program::Program; +use crate::utils::{as_task, AgentHandles, TaskHandle}; +use crate::{INFRA_PATH, MONOREPO_ROOT_PATH, TS_SDK_PATH}; + +#[apply(as_task)] +pub fn start_anvil(config: Arc) -> AgentHandles { + log!("Installing typescript dependencies..."); + let yarn_monorepo = Program::new("yarn").working_dir(MONOREPO_ROOT_PATH); + yarn_monorepo.clone().cmd("install").run().join(); + if !config.is_ci_env { + // don't need to clean in the CI + yarn_monorepo.clone().cmd("clean").run().join(); + } + yarn_monorepo.clone().cmd("build").run().join(); + + log!("Launching anvil..."); + let anvil_args = Program::new("anvil").flag("silent").filter_logs(|_| false); // for now do not keep any of the anvil logs + let anvil = anvil_args.spawn("ETH"); + + sleep(Duration::from_secs(10)); + + let yarn_infra = Program::new("yarn") + .working_dir(INFRA_PATH) + .env("ALLOW_LEGACY_MULTISIG_ISM", "true"); + log!("Deploying hyperlane ism contracts..."); + yarn_infra.clone().cmd("deploy-ism").run().join(); + + log!("Rebuilding sdk..."); + let yarn_sdk = Program::new("yarn").working_dir(TS_SDK_PATH); + yarn_sdk.clone().cmd("build").run().join(); + + log!("Deploying hyperlane core contracts..."); + yarn_infra.clone().cmd("deploy-core").run().join(); + + log!("Deploying hyperlane igp contracts..."); + yarn_infra.cmd("deploy-igp").run().join(); + + if !config.is_ci_env { + // Follow-up 'yarn hardhat node' invocation with 'yarn prettier' to fixup + // formatting on any autogenerated json config files to avoid any diff creation. + yarn_monorepo.cmd("prettier").run().join(); + } + + // Rebuild the SDK to pick up the deployed contracts + log!("Rebuilding sdk..."); + yarn_sdk.cmd("build").run().join(); + + anvil +} diff --git a/rust/utils/run-locally/src/invariants.rs b/rust/utils/run-locally/src/invariants.rs new file mode 100644 index 0000000000..f4206873d6 --- /dev/null +++ b/rust/utils/run-locally/src/invariants.rs @@ -0,0 +1,117 @@ +use std::path::Path; + +use crate::config::Config; +use maplit::hashmap; + +use crate::fetch_metric; +use crate::logging::log; +use crate::solana::solana_termination_invariants_met; + +/// Use the metrics to check if the relayer queues are empty and the expected +/// number of messages have been sent. +pub fn termination_invariants_met( + config: &Config, + solana_cli_tools_path: &Path, + solana_config_path: &Path, +) -> eyre::Result { + let eth_messages_expected = (config.kathy_messages / 2) as u32 * 2; + let sol_messages_expected = 1; + let total_messages_expected = eth_messages_expected + sol_messages_expected; + + let lengths = fetch_metric("9092", "hyperlane_submitter_queue_length", &hashmap! {})?; + assert!(!lengths.is_empty(), "Could not find queue length metric"); + if lengths.into_iter().any(|n| n != 0) { + log!("Relayer queues not empty"); + return Ok(false); + }; + + // Also ensure the counter is as expected (total number of messages), summed + // across all mailboxes. + let msg_processed_count = + fetch_metric("9092", "hyperlane_messages_processed_count", &hashmap! {})? + .iter() + .sum::(); + if msg_processed_count != total_messages_expected { + log!( + "Relayer has {} processed messages, expected {}", + msg_processed_count, + total_messages_expected + ); + return Ok(false); + } + + let gas_payment_events_count = fetch_metric( + "9092", + "hyperlane_contract_sync_stored_events", + &hashmap! {"data_type" => "gas_payments"}, + )? + .iter() + .sum::(); + // TestSendReceiver randomly breaks gas payments up into + // two. So we expect at least as many gas payments as messages. + if gas_payment_events_count < total_messages_expected { + log!( + "Relayer has {} gas payment events, expected at least {}", + gas_payment_events_count, + total_messages_expected + ); + return Ok(false); + } + + if !solana_termination_invariants_met(solana_cli_tools_path, solana_config_path) { + log!("Solana termination invariants not met"); + return Ok(false); + } + + let dispatched_messages_scraped = fetch_metric( + "9093", + "hyperlane_contract_sync_stored_events", + &hashmap! {"data_type" => "message_dispatch"}, + )? + .iter() + .sum::(); + if dispatched_messages_scraped != eth_messages_expected { + log!( + "Scraper has scraped {} dispatched messages, expected {}", + dispatched_messages_scraped, + eth_messages_expected + ); + return Ok(false); + } + + let gas_payments_scraped = fetch_metric( + "9093", + "hyperlane_contract_sync_stored_events", + &hashmap! {"data_type" => "gas_payment"}, + )? + .iter() + .sum::(); + // The relayer and scraper should have the same number of gas payments. + if gas_payments_scraped != gas_payment_events_count { + log!( + "Scraper has scraped {} gas payments, expected {}", + gas_payments_scraped, + eth_messages_expected + ); + return Ok(false); + } + + let delivered_messages_scraped = fetch_metric( + "9093", + "hyperlane_contract_sync_stored_events", + &hashmap! {"data_type" => "message_delivery"}, + )? + .iter() + .sum::(); + if delivered_messages_scraped != eth_messages_expected { + log!( + "Scraper has scraped {} delivered messages, expected {}", + delivered_messages_scraped, + eth_messages_expected + ); + return Ok(false); + } + + log!("Termination invariants have been meet"); + Ok(true) +} diff --git a/rust/utils/run-locally/src/main.rs b/rust/utils/run-locally/src/main.rs index ef4a038732..ebf7edb8d4 100644 --- a/rust/utils/run-locally/src/main.rs +++ b/rust/utils/run-locally/src/main.rs @@ -10,56 +10,71 @@ //! does not include the initial setup time. If this timeout is reached before //! the end conditions are met, the test is a failure. Defaults to 10 min. //! - `E2E_KATHY_MESSAGES`: Number of kathy messages to dispatch. Defaults to 16 if CI mode is enabled. -//! - `E2E_LOG_ALL`: Log all output instead of writing to log files. Defaults to -//! true if CI mode, //! else false. use std::path::Path; use std::{ - fs::{self}, - path::PathBuf, + fs, process::{Child, ExitCode}, sync::atomic::{AtomicBool, Ordering}, thread::sleep, time::{Duration, Instant}, }; -use eyre::Result; -use maplit::hashmap; use tempfile::tempdir; use logging::log; +pub use metrics::fetch_metric; +use program::Program; -use crate::config::ProgramArgs; -use crate::utils::{ - build_cmd, concat_path, make_static, run_agent, stop_child, AgentHandles, TaskHandle, -}; +use crate::config::Config; +use crate::ethereum::start_anvil; +use crate::invariants::termination_invariants_met; +use crate::solana::*; +use crate::utils::{concat_path, make_static, stop_child, AgentHandles, ArbitraryData, TaskHandle}; mod config; +mod ethereum; +mod invariants; mod logging; mod metrics; +mod program; +mod solana; mod utils; -pub use metrics::fetch_metric; /// These private keys are from hardhat/anvil's testing accounts. const RELAYER_KEYS: &[&str] = &[ + // test1 "0x2a871d0798f97d79848a013d4936a73bf4cc922c825d33c1cf7073dff6d409c6", + // test2 "0xdbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97", + // test3 "0x4bbbf85ce3377467afe5d46f804f221813b2bb87f24d81f60f1fcdbf7cbf4356", + // sealeveltest1 + "0x892bf6949af4233e62f854cb3618bc1a3ee3341dc71ada08c4d5deca239acf4f", + // sealeveltest2 + "0x892bf6949af4233e62f854cb3618bc1a3ee3341dc71ada08c4d5deca239acf4f", ]; /// These private keys are from hardhat/anvil's testing accounts. /// These must be consistent with the ISM config for the test. const VALIDATOR_KEYS: &[&str] = &[ + // eth "0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a", "0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba", "0x92db14e403b83dfe3df233f83dfa3a0d7096f21ca9b0d6d6b8d88b2b4ec1564e", + // sealevel + "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d", ]; +const VALIDATOR_ORIGIN_CHAINS: &[&str] = &["test1", "test2", "test3", "sealeveltest1"]; + const AGENT_BIN_PATH: &str = "target/debug"; const INFRA_PATH: &str = "../typescript/infra"; const TS_SDK_PATH: &str = "../typescript/sdk"; const MONOREPO_ROOT_PATH: &str = "../"; +type DynPath = Box>; + static RUN_LOG_WATCHERS: AtomicBool = AtomicBool::new(true); static SHUTDOWN: AtomicBool = AtomicBool::new(false); @@ -67,17 +82,16 @@ static SHUTDOWN: AtomicBool = AtomicBool::new(false); /// cleanup purposes at this time. #[derive(Default)] struct State { - build_log: PathBuf, - log_all: bool, - scraper_postgres_initialized: bool, - agents: Vec, - watchers: Vec>, + agents: Vec<(String, Child)>, + watchers: Vec>>, + data: Vec>, } impl State { fn push_agent(&mut self, handles: AgentHandles) { - self.agents.push(handles.0); - self.watchers.push(handles.1); + self.agents.push((handles.0, handles.1)); self.watchers.push(handles.2); + self.watchers.push(handles.3); + self.data.push(handles.4); } } impl Drop for State { @@ -86,31 +100,25 @@ impl Drop for State { log!("Signaling children to stop..."); // stop children in reverse order self.agents.reverse(); - for mut agent in self.agents.drain(..) { + for (name, mut agent) in self.agents.drain(..) { + log!("Stopping child {}", name); stop_child(&mut agent); } - if self.scraper_postgres_initialized { - log!("Stopping scraper postgres..."); - kill_scraper_postgres(&self.build_log, self.log_all); - } log!("Joining watchers..."); RUN_LOG_WATCHERS.store(false, Ordering::Relaxed); for w in self.watchers.drain(..) { - w.join(); + w.join_box(); + } + // drop any held data + self.data.reverse(); + for data in self.data.drain(..) { + drop(data) } + fs::remove_dir_all(SOLANA_CHECKPOINT_LOCATION).unwrap_or_default(); } } fn main() -> ExitCode { - macro_rules! shutdown_if_needed { - () => { - if SHUTDOWN.load(Ordering::Relaxed) { - log!("Early termination, shutting down"); - return ExitCode::FAILURE; - } - }; - } - // on sigint we want to trigger things to stop running ctrlc::set_handler(|| { log!("Terminating..."); @@ -118,23 +126,26 @@ fn main() -> ExitCode { }) .unwrap(); - let config = config::Config::load(); + assert_eq!(VALIDATOR_ORIGIN_CHAINS.len(), VALIDATOR_KEYS.len()); + const VALIDATOR_COUNT: usize = VALIDATOR_KEYS.len(); - if !config.log_all { - fs::create_dir_all(&config.log_dir).expect("Failed to make log dir"); - } - let build_log = concat_path(&config.log_dir, "build.log"); + let config = Config::load(); - let checkpoints_dirs = (0..3).map(|_| tempdir().unwrap()).collect::>(); + let solana_checkpoint_path = Path::new(SOLANA_CHECKPOINT_LOCATION); + fs::remove_dir_all(solana_checkpoint_path).unwrap_or_default(); + let checkpoints_dirs: Vec = (0..VALIDATOR_COUNT - 1) + .map(|_| Box::new(tempdir().unwrap()) as DynPath) + .chain([Box::new(solana_checkpoint_path) as DynPath]) + .collect(); let rocks_db_dir = tempdir().unwrap(); let relayer_db = concat_path(&rocks_db_dir, "relayer"); - let validator_dbs = (0..3) + let validator_dbs = (0..VALIDATOR_COUNT) .map(|i| concat_path(&rocks_db_dir, format!("validator{i}"))) .collect::>(); - let common_agent_env = ProgramArgs::default() + let common_agent_env = Program::default() .env("RUST_BACKTRACE", "full") - .hyp_env("TRACING_FMT", "pretty") + .hyp_env("TRACING_FMT", "compact") .hyp_env("TRACING_LEVEL", "debug") .hyp_env("CHAINS_TEST1_INDEX_CHUNK", "1") .hyp_env("CHAINS_TEST2_INDEX_CHUNK", "1") @@ -156,6 +167,8 @@ fn main() -> ExitCode { .hyp_env("DB", relayer_db.to_str().unwrap()) .hyp_env("CHAINS_TEST1_SIGNER_KEY", RELAYER_KEYS[0]) .hyp_env("CHAINS_TEST2_SIGNER_KEY", RELAYER_KEYS[1]) + .hyp_env("CHAINS_SEALEVELTEST1_SIGNER_KEY", RELAYER_KEYS[3]) + .hyp_env("CHAINS_SEALEVELTEST2_SIGNER_KEY", RELAYER_KEYS[4]) .hyp_env("RELAYCHAINS", "invalidchain,otherinvalid") .hyp_env("ALLOWLOCALCHECKPOINTSYNCERS", "true") .arg( @@ -164,7 +177,10 @@ fn main() -> ExitCode { ) // default is used for TEST3 .arg("defaultSigner.key", RELAYER_KEYS[2]) - .arg("relayChains", "test1,test2,test3"); + .arg( + "relayChains", + "test1,test2,test3,sealeveltest1,sealeveltest2", + ); let base_validator_env = common_agent_env .clone() @@ -184,17 +200,17 @@ fn main() -> ExitCode { .hyp_env("INTERVAL", "5") .hyp_env("CHECKPOINTSYNCER_TYPE", "localStorage"); - let validator_envs = (0..3) + let validator_envs = (0..VALIDATOR_COUNT) .map(|i| { base_validator_env .clone() .hyp_env("METRICS", (9094 + i).to_string()) .hyp_env("DB", validator_dbs[i].to_str().unwrap()) - .hyp_env("ORIGINCHAINNAME", format!("test{}", 1 + i)) + .hyp_env("ORIGINCHAINNAME", VALIDATOR_ORIGIN_CHAINS[i]) .hyp_env("VALIDATOR_KEY", VALIDATOR_KEYS[i]) .hyp_env( "CHECKPOINTSYNCER_PATH", - checkpoints_dirs[i].path().to_str().unwrap(), + (*checkpoints_dirs[i]).as_ref().to_str().unwrap(), ) }) .collect::>(); @@ -215,17 +231,12 @@ fn main() -> ExitCode { ); let mut state = State::default(); - state.build_log = build_log; - state.log_all = config.log_all; - if !config.log_all { - log!("Logs in {}", config.log_dir.display()); - } log!( "Signed checkpoints in {}", checkpoints_dirs .iter() - .map(|d| d.path().display().to_string()) + .map(|d| (**d).as_ref().display().to_string()) .collect::>() .join(", ") ); @@ -234,131 +245,89 @@ fn main() -> ExitCode { log!("Validator {} DB in {}", i + 1, validator_dbs[i].display()); }); - let build_log_ref = make_static(state.build_log.to_str().unwrap().to_owned()); - let build_cmd = move |cmd| build_cmd(cmd, build_log_ref, config.log_all, true); - let run_agent = |args, prefix| run_agent(args, prefix, &config); + // + // Ready to run... + // + + let (solana_path, solana_path_tempdir) = install_solana_cli_tools().join(); + state.data.push(Box::new(solana_path_tempdir)); + let solana_program_builder = build_solana_programs(solana_path.clone()); - shutdown_if_needed!(); // this task takes a long time in the CI so run it in parallel log!("Building rust..."); - let build_rust = build_cmd( - ProgramArgs::new("cargo") - .cmd("build") - .arg("features", "test-utils") - .arg("bin", "relayer") - .arg("bin", "validator") - .arg("bin", "scraper") - .arg("bin", "init-db"), - ); - - log!("Running postgres db..."); - kill_scraper_postgres(&state.build_log, config.log_all); - build_cmd( - ProgramArgs::new("docker") - .cmd("run") - .flag("rm") - .arg("name", "scraper-testnet-postgres") - .arg("env", "POSTGRES_PASSWORD=47221c18c610") - .arg("publish", "5432:5432") - .flag("detach") - .cmd("postgres:14"), - ) - .join(); - state.scraper_postgres_initialized = true; - - shutdown_if_needed!(); - log!("Installing typescript dependencies..."); - - let yarn_monorepo = ProgramArgs::new("yarn").working_dir(MONOREPO_ROOT_PATH); - build_cmd(yarn_monorepo.clone().cmd("install")).join(); - if !config.is_ci_env { - // don't need to clean in the CI - build_cmd(yarn_monorepo.clone().cmd("clean")).join(); - } - shutdown_if_needed!(); - build_cmd(yarn_monorepo.clone().cmd("build")).join(); - - shutdown_if_needed!(); - log!("Launching anvil..."); - let anvil_args = ProgramArgs::new("anvil") - .flag("silent") - .filter_logs(filter_anvil_logs); - let anvil = run_agent(anvil_args, "ETH"); - state.push_agent(anvil); - - sleep(Duration::from_secs(10)); + let build_rust = Program::new("cargo") + .cmd("build") + .arg("features", "test-utils") + .arg("bin", "relayer") + .arg("bin", "validator") + .arg("bin", "scraper") + .arg("bin", "init-db") + .arg("bin", "hyperlane-sealevel-client") + .filter_logs(|l| !l.contains("workspace-inheritance")) + .run(); - let yarn_infra = ProgramArgs::new("yarn") - .working_dir(INFRA_PATH) - .env("ALLOW_LEGACY_MULTISIG_ISM", "true"); - log!("Deploying hyperlane ism contracts..."); - build_cmd(yarn_infra.clone().cmd("deploy-ism")).join(); - - shutdown_if_needed!(); - log!("Rebuilding sdk..."); - let yarn_sdk = ProgramArgs::new("yarn").working_dir(TS_SDK_PATH); - build_cmd(yarn_sdk.clone().cmd("build")).join(); + let start_anvil = start_anvil(config.clone()); - log!("Deploying hyperlane core contracts..."); - build_cmd(yarn_infra.clone().cmd("deploy-core")).join(); + let solana_program_path = solana_program_builder.join(); - log!("Deploying hyperlane igp contracts..."); - build_cmd(yarn_infra.clone().cmd("deploy-igp")).join(); - - if !config.is_ci_env { - // Follow-up 'yarn hardhat node' invocation with 'yarn prettier' to fixup - // formatting on any autogenerated json config files to avoid any diff creation. - build_cmd(yarn_monorepo.cmd("prettier")).join(); - } - - shutdown_if_needed!(); - // Rebuild the SDK to pick up the deployed contracts - log!("Rebuilding sdk..."); - build_cmd(yarn_sdk.cmd("build")).join(); + log!("Running postgres db..."); + let postgres = Program::new("docker") + .cmd("run") + .flag("rm") + .arg("name", "scraper-testnet-postgres") + .arg("env", "POSTGRES_PASSWORD=47221c18c610") + .arg("publish", "5432:5432") + .cmd("postgres:14") + .spawn("SQL"); + state.push_agent(postgres); build_rust.join(); - log!("Init postgres db..."); - build_cmd(ProgramArgs::new(concat_path(AGENT_BIN_PATH, "init-db"))).join(); - - shutdown_if_needed!(); + let solana_ledger_dir = tempdir().unwrap(); + let start_solana_validator = start_solana_test_validator( + solana_path.clone(), + solana_program_path, + solana_ledger_dir.as_ref().to_path_buf(), + ); - let scraper = run_agent(scraper_env, "SCR"); - state.push_agent(scraper); + let (solana_config_path, solana_validator) = start_solana_validator.join(); + state.push_agent(solana_validator); + state.push_agent(start_anvil.join()); // spawn 1st validator before any messages have been sent to test empty mailbox - let validator1_env = validator_envs.first().unwrap().clone(); - let validator1 = run_agent(validator1_env, "VAL1"); - state.push_agent(validator1); + state.push_agent(validator_envs.first().unwrap().clone().spawn("VL1")); sleep(Duration::from_secs(5)); + log!("Init postgres db..."); + Program::new(concat_path(AGENT_BIN_PATH, "init-db")) + .run() + .join(); + state.push_agent(scraper_env.spawn("SCR")); + // Send half the kathy messages before starting the rest of the agents - let kathy_env = yarn_infra + let kathy_env = Program::new("yarn") + .working_dir(INFRA_PATH) .cmd("kathy") .arg("messages", (config.kathy_messages / 2).to_string()) .arg("timeout", "1000"); - let (mut kathy, kathy_stdout, kathy_stderr) = run_agent(kathy_env.clone(), "KTY"); - state.watchers.push(kathy_stdout); - state.watchers.push(kathy_stderr); - kathy.wait().unwrap(); + kathy_env.clone().run().join(); // spawn the rest of the validators for (i, validator_env) in validator_envs.into_iter().enumerate().skip(1) { - let validator = run_agent(validator_env, make_static(format!("VAL{}", 1 + i))); + let validator = validator_env.spawn(make_static(format!("VL{}", 1 + i))); state.push_agent(validator); } - let relayer = run_agent(relayer_env, "RLY"); - state.push_agent(relayer); + state.push_agent(relayer_env.spawn("RLY")); + + initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()).join(); log!("Setup complete! Agents running in background..."); log!("Ctrl+C to end execution..."); // Send half the kathy messages after the relayer comes up - let kathy_env = kathy_env.flag("mineforever"); - let kathy = run_agent(kathy_env, "KTY"); - state.push_agent(kathy); + state.push_agent(kathy_env.flag("mineforever").spawn("KTY")); let loop_start = Instant::now(); // give things a chance to fully start. @@ -367,10 +336,10 @@ fn main() -> ExitCode { while !SHUTDOWN.load(Ordering::Relaxed) { if config.ci_mode { // for CI we have to look for the end condition. - let num_messages_expected = (config.kathy_messages / 2) as u32 * 2; - if termination_invariants_met(num_messages_expected).unwrap_or(false) { + if termination_invariants_met(&config, &solana_path, &solana_config_path) + .unwrap_or(false) + { // end condition reached successfully - log!("Agent metrics look healthy"); break; } else if (Instant::now() - loop_start).as_secs() > config.ci_mode_timeout { // we ran out of time @@ -381,10 +350,11 @@ fn main() -> ExitCode { } // verify long-running tasks are still running - for child in state.agents.iter_mut() { + for (name, child) in state.agents.iter_mut() { if child.try_wait().unwrap().is_some() { - log!("Child process exited unexpectedly, shutting down"); + log!("Child process {} exited unexpectedly, shutting down", name); failure_occurred = true; + SHUTDOWN.store(true, Ordering::Relaxed); break; } } @@ -393,122 +363,10 @@ fn main() -> ExitCode { } if failure_occurred { + log!("E2E tests failed"); ExitCode::FAILURE } else { + log!("E2E tests passed"); ExitCode::SUCCESS } } - -/// Use the metrics to check if the relayer queues are empty and the expected -/// number of messages have been sent. -fn termination_invariants_met(num_expected_messages: u32) -> Result { - let lengths = fetch_metric("9092", "hyperlane_submitter_queue_length", &hashmap! {})?; - assert!(!lengths.is_empty(), "Could not find queue length metric"); - if lengths.into_iter().any(|n| n != 0) { - log!("Relayer queues not empty"); - return Ok(false); - }; - - // Also ensure the counter is as expected (total number of messages), summed - // across all mailboxes. - let msg_processed_count = - fetch_metric("9092", "hyperlane_messages_processed_count", &hashmap! {})? - .iter() - .sum::(); - if msg_processed_count != num_expected_messages { - log!( - "Relayer has {} processed messages, expected {}", - msg_processed_count, - num_expected_messages - ); - return Ok(false); - } - - let gas_payment_events_count = fetch_metric( - "9092", - "hyperlane_contract_sync_stored_events", - &hashmap! {"data_type" => "gas_payments"}, - )? - .iter() - .sum::(); - // TestSendReceiver randomly breaks gas payments up into - // two. So we expect at least as many gas payments as messages. - if gas_payment_events_count < num_expected_messages { - log!( - "Relayer has {} gas payment events, expected at least {}", - gas_payment_events_count, - num_expected_messages - ); - return Ok(false); - } - - let dispatched_messages_scraped = fetch_metric( - "9093", - "hyperlane_contract_sync_stored_events", - &hashmap! {"data_type" => "message_dispatch"}, - )? - .iter() - .sum::(); - if dispatched_messages_scraped != num_expected_messages { - log!( - "Scraper has scraped {} dispatched messages, expected {}", - dispatched_messages_scraped, - num_expected_messages - ); - return Ok(false); - } - - let gas_payments_scraped = fetch_metric( - "9093", - "hyperlane_contract_sync_stored_events", - &hashmap! {"data_type" => "gas_payment"}, - )? - .iter() - .sum::(); - // The relayer and scraper should have the same number of gas payments. - if gas_payments_scraped != gas_payment_events_count { - log!( - "Scraper has scraped {} gas payments, expected {}", - gas_payments_scraped, - num_expected_messages - ); - return Ok(false); - } - - let delivered_messages_scraped = fetch_metric( - "9093", - "hyperlane_contract_sync_stored_events", - &hashmap! {"data_type" => "message_delivery"}, - )? - .iter() - .sum::(); - if delivered_messages_scraped != num_expected_messages { - log!( - "Scraper has scraped {} delivered messages, expected {}", - delivered_messages_scraped, - num_expected_messages - ); - Ok(false) - } else { - log!("Termination invariants have been meet"); - Ok(true) - } -} - -fn kill_scraper_postgres(build_log: impl AsRef, log_all: bool) { - build_cmd( - ProgramArgs::new("docker") - .cmd("stop") - .cmd("scraper-testnet-postgres"), - &build_log, - log_all, - false, - ) - .join(); -} - -/// Return true if a given log line should be kept. -fn filter_anvil_logs(_log: &str) -> bool { - // for now discard all anvil logs - false -} diff --git a/rust/utils/run-locally/src/program.rs b/rust/utils/run-locally/src/program.rs new file mode 100644 index 0000000000..5a27d1c484 --- /dev/null +++ b/rust/utils/run-locally/src/program.rs @@ -0,0 +1,346 @@ +use std::collections::BTreeMap; +use std::ffi::OsStr; +use std::fmt::{Debug, Display, Formatter}; +use std::io::{BufRead, BufReader, Read}; +use std::path::{Path, PathBuf}; +use std::process::{Command, Stdio}; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::mpsc::Sender; +use std::sync::{mpsc, Arc}; +use std::thread::{sleep, spawn}; +use std::time::Duration; + +use eyre::Context; +use macro_rules_attribute::apply; + +use crate::logging::log; +use crate::utils::{ + as_task, stop_child, AgentHandles, ArbitraryData, LogFilter, MappingTaskHandle, + SimpleTaskHandle, TaskHandle, +}; +use crate::{RUN_LOG_WATCHERS, SHUTDOWN}; + +#[derive(Default, Clone)] +#[must_use] +pub struct Program { + bin: Option>, + args: Vec>, + env: BTreeMap, Arc>, + working_dir: Option>, + log_filter: Option, + arbitrary_data: Vec>, +} + +impl Debug for Program { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Program") + .field("bin", &self.bin) + .field("args", &self.args) + .field("env", &self.env) + .field("working_dir", &self.working_dir) + .field("log_filter", &self.log_filter.is_some()) + .finish() + } +} + +impl Display for Program { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + if f.alternate() { + let wd = self + .working_dir + .as_ref() + .map(|wd| wd.display()) + .unwrap_or_else(|| Path::new("./").display()); + write!(f, "({wd})$ ")?; + + for (k, v) in &self.env { + write!(f, "{k}={v} ")?; + } + + if let Some(path_result) = self.get_bin_path() { + if let Ok(bp) = path_result { + write!(f, "{}", bp.display())?; + } else { + write!(f, "{}", self.bin.as_ref().unwrap())?; + } + } else { + write!(f, "???")?; + } + + for a in &self.args { + write!(f, " {a}")?; + } + + Ok(()) + } else { + write!( + f, + "{}", + self.bin.as_deref().map(String::as_str).unwrap_or("???") + ) + } + } +} + +impl Program { + pub fn new(bin: impl AsRef) -> Self { + Self::default().bin(bin) + } + + pub fn bin(mut self, bin: impl AsRef) -> Self { + self.bin = Some( + bin.as_ref() + .to_str() + .expect("Invalid string encoding for binary name") + .to_owned() + .into(), + ); + self + } + + pub fn raw_arg(mut self, arg: impl Into) -> Self { + self.args.push(arg.into().into()); + self + } + + pub fn cmd(self, cmd: impl Into) -> Self { + let cmd = cmd.into(); + debug_assert!(!cmd.starts_with('-'), "arg should not start with -"); + self.raw_arg(cmd) + } + + pub fn flag(self, arg: impl AsRef) -> Self { + debug_assert!( + !arg.as_ref().starts_with('-'), + "arg should not start with -" + ); + self.raw_arg(format!("--{}", arg.as_ref())) + } + + /// Assumes an arg in the format of `--$ARG1 $ARG2`, arg1 and arg2 should exclude quoting, equal sign, and the leading hyphens. + pub fn arg(self, arg1: impl AsRef, arg2: impl Into) -> Self { + self.flag(arg1).cmd(arg2) + } + + /// Assumes an arg in the format of `--$ARG1 $ARG2 $ARG3`, args should exclude quoting, equal sign, and the leading hyphens. + pub fn arg3( + self, + arg1: impl AsRef, + arg2: impl Into, + arg3: impl Into, + ) -> Self { + self.flag(arg1).cmd(arg2).cmd(arg3) + } + + /// add an env that will be prefixed with the default hyperlane env prefix + pub fn hyp_env(self, key: impl AsRef, value: impl Into) -> Self { + const PREFIX: &str = "HYP_BASE_"; + let key = key.as_ref(); + debug_assert!( + !key.starts_with(PREFIX), + "env key should not start with prefix that is being added" + ); + self.env(format!("{PREFIX}{key}"), value) + } + + /// add a system env that makes no prefix assumptions + pub fn env(mut self, key: impl Into, value: impl Into) -> Self { + self.env.insert(key.into().into(), value.into().into()); + self + } + + pub fn working_dir(mut self, path: impl Into) -> Self { + self.working_dir = Some(path.into().into()); + self + } + + /// Filter logs being printed to stdout/stderr. If the LogFilter returns true, + /// then it will keep that log line, if it returns false it will discard it. + /// This is ignored when logging to files. + pub fn filter_logs(mut self, filter: LogFilter) -> Self { + self.log_filter = Some(filter); + self + } + + /// Remember some arbitrary data until either this program args goes out of scope or until the + /// agent/child process exits. This is useful for preventing something from dropping. + pub fn remember(mut self, data: impl ArbitraryData) -> Self { + self.arbitrary_data.push(Arc::new(data)); + self + } + + pub fn create_command(&self) -> Command { + let mut cmd = Command::new( + self.get_bin_path() + .expect("bin path must be specified") + .unwrap(), + ); + if let Some(wd) = &self.working_dir { + cmd.current_dir(wd.as_path()); + } + for (k, v) in self.env.iter() { + cmd.env(k.as_str(), v.as_str()); + } + cmd.args(self.args.iter().map(AsRef::as_ref)); + cmd + } + + pub fn get_filter(&self) -> Option { + self.log_filter + } + + /// Try to get the path to the binary + pub fn get_bin_path(&self) -> Option> { + self.bin.as_ref().map(|raw_bin_name| { + which::which(raw_bin_name.as_ref()) + .with_context(|| format!("Cannot find binary: {raw_bin_name}")) + }) + } + + /// Get just the name component of the binary + pub fn get_bin_name(&self) -> String { + Path::new( + self.bin + .as_ref() + .expect("bin path must be specified") + .as_str(), + ) + .file_name() + .expect("bin must have a file name") + .to_str() + .unwrap() + .to_owned() + } + + pub fn get_memory(&self) -> Box { + Box::new(self.arbitrary_data.clone()) + } + + #[allow(dead_code)] + pub fn run(self) -> impl TaskHandle { + MappingTaskHandle(self.run_full(true, false), |_| ()) + } + + #[allow(dead_code)] + pub fn run_ignore_code(self) -> impl TaskHandle { + MappingTaskHandle(self.run_full(false, false), |_| ()) + } + + #[allow(dead_code)] + pub fn run_with_output(self) -> impl TaskHandle> { + MappingTaskHandle(self.run_full(false, true), |o| { + o.expect("Command did not return output") + }) + } + + pub fn spawn(self, log_prefix: &'static str) -> AgentHandles { + let mut command = self.create_command(); + command.stdout(Stdio::piped()).stderr(Stdio::piped()); + + log!("Spawning {}...", &self); + let mut child = command + .spawn() + .unwrap_or_else(|e| panic!("Failed to start {:?} with error: {e}", &self)); + let child_stdout = child.stdout.take().unwrap(); + let filter = self.get_filter(); + let stdout = + spawn(move || prefix_log(child_stdout, log_prefix, &RUN_LOG_WATCHERS, filter, None)); + let child_stderr = child.stderr.take().unwrap(); + let stderr = + spawn(move || prefix_log(child_stderr, log_prefix, &RUN_LOG_WATCHERS, filter, None)); + ( + log_prefix.to_owned(), + child, + Box::new(SimpleTaskHandle(stdout)), + Box::new(SimpleTaskHandle(stderr)), + self.get_memory(), + ) + } + + #[apply(as_task)] + fn run_full(self, assert_success: bool, capture_output: bool) -> Option> { + let mut command = self.create_command(); + command.stdout(Stdio::piped()); + command.stderr(Stdio::piped()); + + log!("{:#}", &self); + let mut child = command + .spawn() + .unwrap_or_else(|e| panic!("Failed to start command `{}` with Error: {e}", &self)); + let filter = self.get_filter(); + let running = Arc::new(AtomicBool::new(true)); + let (stdout_ch_tx, stdout_ch_rx) = capture_output.then(mpsc::channel).unzip(); + let stdout = { + let stdout = child.stdout.take().unwrap(); + let name = self.get_bin_name(); + let running = running.clone(); + spawn(move || prefix_log(stdout, &name, &running, filter, stdout_ch_tx)) + }; + let stderr = { + let stderr = child.stderr.take().unwrap(); + let name = self.get_bin_name(); + let running = running.clone(); + spawn(move || prefix_log(stderr, &name, &running, filter, None)) + }; + + let status = loop { + sleep(Duration::from_millis(500)); + + if let Some(exit_status) = child.try_wait().expect("Failed to run command") { + break exit_status; + } else if SHUTDOWN.load(Ordering::Relaxed) { + log!("Forcing termination of command `{}`", &self); + stop_child(&mut child); + break child.wait().expect("Failed to run command"); + } + }; + + running.store(false, Ordering::Relaxed); + stdout.join().unwrap(); + stderr.join().unwrap(); + assert!( + !assert_success || !RUN_LOG_WATCHERS.load(Ordering::Relaxed) || status.success(), + "Command returned non-zero exit code: {:?}", + &self + ); + + stdout_ch_rx.map(|rx| rx.into_iter().collect()) + } +} + +/// Read from a process output and add a string to the front before writing it to stdout. +fn prefix_log( + output: impl Read, + prefix: &str, + run_log_watcher: &AtomicBool, + filter: Option, + channel: Option>, +) { + let mut reader = BufReader::new(output).lines(); + loop { + if let Some(line) = reader.next() { + let line = match line { + Ok(l) => l, + Err(e) => { + // end of stream, probably + log!("Error reading from output for {}: {}", prefix, e); + break; + } + }; + if let Some(filter) = filter.as_ref() { + if !(filter)(&line) { + continue; + } + } + println!("<{prefix}> {line}"); + if let Some(channel) = &channel { + // ignore send errors + channel.send(line).unwrap_or(()); + } + } else if run_log_watcher.load(Ordering::Relaxed) { + sleep(Duration::from_millis(10)); + } else { + break; + } + } +} diff --git a/rust/utils/run-locally/src/solana.rs b/rust/utils/run-locally/src/solana.rs new file mode 100644 index 0000000000..20f8639157 --- /dev/null +++ b/rust/utils/run-locally/src/solana.rs @@ -0,0 +1,342 @@ +use std::fs; +use std::path::{Path, PathBuf}; +use std::thread::sleep; +use std::time::Duration; + +use macro_rules_attribute::apply; +use tempfile::{tempdir, NamedTempFile}; + +use crate::logging::log; +use crate::program::Program; +use crate::utils::{as_task, concat_path, AgentHandles, ArbitraryData, TaskHandle}; +use crate::AGENT_BIN_PATH; + +// Solana program tuples of: +// 0: Solana address or keypair for the bpf program +// 1: Name of the program's shared object file +const SOLANA_PROGRAMS: &[(&str, &str)] = &[ + ( + "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA", + "spl_token.so", + ), + ( + "TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb", + "spl_token_2022.so", + ), + ( + "ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL", + "spl_associated_token_account.so", + ), + ("noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV", "spl_noop.so"), +]; + +const SOLANA_KEYPAIR: &str = "config/test-sealevel-keys/test_deployer-keypair.json"; +const SOLANA_DEPLOYER_ACCOUNT: &str = "config/test-sealevel-keys/test_deployer-account.json"; + +const SBF_OUT_PATH: &str = "target/dist"; + +// Relative paths to solana program source code within rust/sealevel/programs repo. +const SOLANA_HYPERLANE_PROGRAMS: &[&str] = &[ + "mailbox", + "validator-announce", + "ism/multisig-ism-message-id", + "hyperlane-sealevel-token", + "hyperlane-sealevel-token-native", + "hyperlane-sealevel-token-collateral", +]; + +const SOLANA_PROGRAM_LIBRARY_ARCHIVE: &str = + "https://github.com/hyperlane-xyz/solana-program-library/releases/download/2023-07-27-01/spl.tar.gz"; + +const SOLANA_LOCAL_CHAIN_ID: &str = "13375"; +const SOLANA_REMOTE_CHAIN_ID: &str = "13376"; + +/// The Solana CLI tool version to download and use. +const SOLANA_CLI_VERSION: &str = "1.14.20"; + +// TODO: use a temp dir instead! +pub const SOLANA_CHECKPOINT_LOCATION: &str = + "/tmp/test_sealevel_checkpoints_0x70997970c51812dc3a010c7d01b50e0d17dc79c8"; + +// Install the CLI tools and return the path to the bin dir. +#[apply(as_task)] +pub fn install_solana_cli_tools() -> (PathBuf, impl ArbitraryData) { + let solana_download_dir = tempdir().unwrap(); + let solana_tools_dir = tempdir().unwrap(); + log!("Downloading solana cli release v{}", SOLANA_CLI_VERSION); + let solana_release_name = { + // best effort ot pick one of the supported targets + let target = if cfg!(target_os = "linux") { + "x86_64-unknown-linux-gnu" + } else if cfg!(target_os = "macos") { + if cfg!(target_arch = "aarch64") { + "aarch64-apple-darwin" + } else { + "x86_64-apple-darwin" + } + } else if cfg!(target_os = "windows") { + "pc-windows-msvc" + } else { + panic!("Current os is not supported by solana") + }; + format!("solana-release-{target}") + }; + let solana_archive_name = format!("{solana_release_name}.tar.bz2"); + + Program::new("curl") + .arg("output", &solana_archive_name) + .flag("location") + .cmd(format!("https://github.com/solana-labs/solana/releases/download/v{SOLANA_CLI_VERSION}/{solana_archive_name}")) + .flag("silent") + .working_dir(solana_download_dir.as_ref().to_str().unwrap()) + .run() + .join(); + log!("Uncompressing solana release"); + + Program::new("tar") + .flag("extract") + .arg("file", &solana_archive_name) + .working_dir(solana_download_dir.as_ref().to_str().unwrap()) + .run() + .join(); + + fs::rename( + concat_path(&solana_download_dir, "solana-release"), + &solana_tools_dir, + ) + .expect("Failed to move solana-release dir"); + (concat_path(&solana_tools_dir, "bin"), solana_tools_dir) +} + +#[apply(as_task)] +pub fn build_solana_programs(solana_cli_tools_path: PathBuf) -> PathBuf { + let out_path = Path::new(SBF_OUT_PATH); + if out_path.exists() { + fs::remove_dir_all(out_path).expect("Failed to remove solana program deploy dir"); + } + fs::create_dir_all(out_path).expect("Failed to create solana program deploy dir"); + let out_path = out_path.canonicalize().unwrap(); + + Program::new("curl") + .arg("output", "spl.tar.gz") + .flag("location") + .cmd(SOLANA_PROGRAM_LIBRARY_ARCHIVE) + .flag("silent") + .working_dir(&out_path) + .run() + .join(); + log!("Uncompressing solana programs"); + + Program::new("tar") + .flag("extract") + .arg("file", "spl.tar.gz") + .working_dir(&out_path) + .run() + .join(); + log!("Remove temporary solana files"); + fs::remove_file(concat_path(&out_path, "spl.tar.gz")) + .expect("Failed to remove solana program archive"); + + let build_sbf = Program::new( + concat_path(&solana_cli_tools_path, "cargo-build-sbf") + .to_str() + .unwrap(), + ) + .env("PATH", updated_path(&solana_cli_tools_path)) + .env("SBF_OUT_PATH", out_path.to_str().unwrap()); + + // build our programs + for &path in SOLANA_HYPERLANE_PROGRAMS { + build_sbf + .clone() + .working_dir(concat_path("sealevel/programs", path)) + .run() + .join(); + } + log!("All hyperlane solana programs built successfully"); + out_path +} + +#[apply(as_task)] +pub fn start_solana_test_validator( + solana_cli_tools_path: PathBuf, + solana_programs_path: PathBuf, + ledger_dir: PathBuf, +) -> (PathBuf, AgentHandles) { + // init solana config + let solana_config = NamedTempFile::new().unwrap().into_temp_path(); + let solana_config_path = solana_config.to_path_buf(); + Program::new(concat_path(&solana_cli_tools_path, "solana")) + .arg("config", solana_config.to_str().unwrap()) + .cmd("config") + .cmd("set") + .arg("url", "localhost") + .run() + .join(); + + log!("Starting solana validator"); + let mut args = Program::new(concat_path(&solana_cli_tools_path, "solana-test-validator")) + .flag("quiet") + .flag("reset") + .arg("ledger", ledger_dir.to_str().unwrap()) + .arg3( + "account", + "E9VrvAdGRvCguN2XgXsgu9PNmMM3vZsU8LSUrM68j8ty", + SOLANA_DEPLOYER_ACCOUNT, + ) + .remember(solana_config); + for &(address, lib) in SOLANA_PROGRAMS { + args = args.arg3( + "bpf-program", + address, + concat_path(&solana_programs_path, lib).to_str().unwrap(), + ); + } + let validator = args.spawn("SOL"); + sleep(Duration::from_secs(5)); + + log!("Deploying the hyperlane programs to solana"); + let sealevel_client = sealevel_client(&solana_cli_tools_path, &solana_config_path); + + let sealevel_client_deploy_core = sealevel_client + .clone() + .arg("compute-budget", "200000") + .cmd("core") + .cmd("deploy") + .arg("environment", "local-e2e") + .arg("environments-dir", "sealevel/environments") + .arg("built-so-dir", SBF_OUT_PATH) + .flag("use-existing-keys"); + + sealevel_client_deploy_core + .clone() + .arg("local-domain", SOLANA_LOCAL_CHAIN_ID) + .arg("chain", "sealeveltest1") + .run() + .join(); + + sealevel_client_deploy_core + .arg("local-domain", SOLANA_REMOTE_CHAIN_ID) + .arg("chain", "sealeveltest2") + .run() + .join(); + + sealevel_client + .clone() + .arg("compute-budget", "200000") + .cmd("warp-route") + .cmd("deploy") + .arg("environment", "local-e2e") + .arg("environments-dir", "sealevel/environments") + .arg("built-so-dir", SBF_OUT_PATH) + .arg("warp-route-name", "testwarproute") + .arg( + "token-config-file", + "sealevel/environments/local-e2e/warp-routes/testwarproute/token-config.json", + ) + .arg( + "chain-config-file", + "sealevel/environments/local-e2e/warp-routes/chain-config.json", + ) + .arg("ata-payer-funding-amount", "1000000000") + .run() + .join(); + + log!("Initializing solana programs"); + sealevel_client + .clone() + .cmd("multisig-ism-message-id") + .cmd("set-validators-and-threshold") + .arg("domain", SOLANA_LOCAL_CHAIN_ID) + .arg("validators", "0x70997970c51812dc3a010c7d01b50e0d17dc79c8") + .arg("threshold", "1") + .arg("program-id", "4RSV6iyqW9X66Xq3RDCVsKJ7hMba5uv6XP8ttgxjVUB1") + .run() + .join(); + + sealevel_client + .cmd("validator-announce") + .cmd("announce") + .arg("validator", "0x70997970c51812dc3a010c7d01b50e0d17dc79c8") + .arg( + "storage-location", + format!("file://{SOLANA_CHECKPOINT_LOCATION}") + ) + .arg("signature", "0xcd87b715cd4c2e3448be9e34204cf16376a6ba6106e147a4965e26ea946dd2ab19598140bf26f1e9e599c23f6b661553c7d89e8db22b3609068c91eb7f0fa2f01b") + .run() + .join(); + + log!("Local Solana chain started and hyperlane programs deployed and initialized successfully"); + + (solana_config_path, validator) +} + +#[apply(as_task)] +pub fn initiate_solana_hyperlane_transfer( + solana_cli_tools_path: PathBuf, + solana_config_path: PathBuf, +) { + let sender = Program::new(concat_path(&solana_cli_tools_path, "solana")) + .arg("config", solana_config_path.to_str().unwrap()) + .arg("keypair", SOLANA_KEYPAIR) + .cmd("address") + .run_with_output() + .join() + .get(0) + .expect("failed to get sender address") + .trim() + .to_owned(); + + sealevel_client(&solana_cli_tools_path, &solana_config_path) + .cmd("token") + .cmd("transfer-remote") + .cmd(SOLANA_KEYPAIR) + .cmd("10000000000") + .cmd(SOLANA_REMOTE_CHAIN_ID) + .cmd(sender) // send to self + .cmd("native") + .arg("program-id", "CGn8yNtSD3aTTqJfYhUb6s1aVTN75NzwtsFKo1e83aga") + .run() + .join(); +} + +pub fn solana_termination_invariants_met( + solana_cli_tools_path: &Path, + solana_config_path: &Path, +) -> bool { + sealevel_client(solana_cli_tools_path, solana_config_path) + .cmd("mailbox") + .cmd("delivered") + .arg( + // this will break if any parts of `transfer-remote` change. + // This value was gotten by observing the relayer logs. + // TODO: get the actual message-id so we don't have to hardcode it + "message-id", + "0x7b8ba684e5ce44f898c5fa81785c83a00e32b5bef3412e648eb7a17bec497685", + ) + .arg("program-id", "9tCUWNjpqcf3NUSrtp7vquYVCwbEByvLjZUrhG5dgvhj") + .run_with_output() + .join() + .join("\n") + .contains("Message delivered") +} + +fn sealevel_client(solana_cli_tools_path: &Path, solana_config_path: &Path) -> Program { + Program::new(concat_path(AGENT_BIN_PATH, "hyperlane-sealevel-client")) + .env("PATH", updated_path(solana_cli_tools_path)) + .env("RUST_BACKTRACE", "1") + .arg("config", solana_config_path.to_str().unwrap()) + .arg("keypair", SOLANA_KEYPAIR) +} + +fn updated_path(solana_cli_tools_path: &Path) -> String { + format!( + "{}:{}", + solana_cli_tools_path + .canonicalize() + .expect("Failed to canonicalize solana cli tools path") + .to_str() + .unwrap(), + std::env::var("PATH").unwrap_or_default(), + ) +} diff --git a/rust/utils/run-locally/src/utils.rs b/rust/utils/run-locally/src/utils.rs index 39ac31ad5d..206b4bc699 100644 --- a/rust/utils/run-locally/src/utils.rs +++ b/rust/utils/run-locally/src/utils.rs @@ -1,20 +1,33 @@ -use std::fs::File; -use std::io::{BufRead, BufReader, BufWriter, Read, Write}; use std::path::{Path, PathBuf}; -use std::process::{Child, Stdio}; -use std::sync::atomic::{AtomicBool, Ordering}; -use std::sync::Arc; -use std::thread::{sleep, spawn, JoinHandle}; -use std::time::Duration; +use std::process::Child; +use std::thread::JoinHandle; use nix::libc::pid_t; use nix::sys::signal; use nix::sys::signal::Signal; use nix::unistd::Pid; -use crate::config::{Config, ProgramArgs}; use crate::logging::log; -use crate::{RUN_LOG_WATCHERS, SHUTDOWN}; + +/// Make a function run as a task by writing `#[apply(as_task)]`. This will spawn a new thread +/// and then return the result through a TaskHandle. +macro_rules! as_task { + ( + $(#[$fn_meta:meta])* + $fn_vis:vis fn $fn_name:ident( + $($arg_name:ident$(: $arg_type:ty)?),*$(,)? + ) $(-> $ret_type:ty)? $body:block + ) => { + $(#[$fn_meta])* + $fn_vis fn $fn_name($($arg_name$(: $arg_type)*),*) -> impl $crate::utils::TaskHandle { + $crate::utils::SimpleTaskHandle(::std::thread::spawn(move || $body)) + } + }; + (@handle $ret_type:ty) => {$ret_type}; + (@handle) => {()}; +} + +pub(crate) use as_task; pub fn make_static(s: String) -> &'static str { Box::leak(s.into_boxed_str()) @@ -27,62 +40,62 @@ pub fn concat_path(p1: impl AsRef, p2: impl AsRef) -> PathBuf { p } -pub type AgentHandles = (Child, TaskHandle<()>, TaskHandle<()>); +pub trait ArbitraryData: Send + Sync + 'static {} +impl ArbitraryData for T {} + +pub type AgentHandles = ( + // name + String, + // child process + Child, + // stdout + Box>, + // stderr + Box>, + // data to drop once program exits + Box, +); pub type LogFilter = fn(&str) -> bool; -pub fn run_agent(args: ProgramArgs, log_prefix: &'static str, config: &Config) -> AgentHandles { - let mut command = args.create_command(); - command.stdout(Stdio::piped()).stderr(Stdio::piped()); - - log!("Spawning {}...", &args); - let mut child = command - .spawn() - .unwrap_or_else(|e| panic!("Failed to start {:?} with error: {e}", &args)); - let stdout_path = concat_path(&config.log_dir, format!("{log_prefix}.stdout.log")); - let child_stdout = child.stdout.take().unwrap(); - let filter = args.get_filter(); - let log_all = config.log_all; - let stdout = spawn(move || { - if log_all { - prefix_log(child_stdout, log_prefix, &RUN_LOG_WATCHERS, filter) - } else { - inspect_and_write_to_file( - child_stdout, - stdout_path, - &["ERROR", "message successfully processed"], - ) - } - }); - let stderr_path = concat_path(&config.log_dir, format!("{log_prefix}.stderr.log")); - let child_stderr = child.stderr.take().unwrap(); - let stderr = spawn(move || { - if log_all { - prefix_log(child_stderr, log_prefix, &RUN_LOG_WATCHERS, filter) - } else { - inspect_and_write_to_file(child_stderr, stderr_path, &[]) - } - }); - (child, TaskHandle(stdout), TaskHandle(stderr)) +#[must_use] +pub trait TaskHandle: Send { + type Output; + + fn join(self) -> Self::Output; + fn join_box(self: Box) -> Self::Output; } /// Wrapper around a join handle to simplify use. #[must_use] -pub struct TaskHandle(pub JoinHandle); -impl TaskHandle { - pub fn join(self) -> T { +pub struct SimpleTaskHandle(pub JoinHandle); +impl TaskHandle for SimpleTaskHandle { + type Output = T; + + fn join(self) -> Self::Output { self.0.join().expect("Task thread panicked!") } + + fn join_box(self: Box) -> T { + self.join() + } } -pub fn build_cmd( - args: ProgramArgs, - log: impl AsRef, - log_all: bool, - assert_success: bool, -) -> TaskHandle<()> { - let log = log.as_ref().to_owned(); - let handle = spawn(move || build_cmd_task(args, log, log_all, assert_success)); - TaskHandle(handle) +#[must_use] +pub struct MappingTaskHandle, U, F: FnOnce(T) -> U>(pub H, pub F); +impl TaskHandle for MappingTaskHandle +where + H: TaskHandle, + F: Send + FnOnce(T) -> U, +{ + type Output = U; + + fn join(self) -> Self::Output { + (self.1)(self.0.join()) + } + + fn join_box(self: Box) -> U { + self.join() + } } /// Attempt to kindly signal a child to stop running, and kill it if that fails. @@ -99,133 +112,3 @@ pub fn stop_child(child: &mut Child) { } }; } - -/// Open a file in append mode, or create it if it does not exist. -fn append_to(p: impl AsRef) -> File { - File::options() - .create(true) - .append(true) - .open(p) - .expect("Failed to open file") -} - -/// Read from a process output and add a string to the front before writing it -/// to stdout. -fn prefix_log( - output: impl Read, - prefix: &str, - run_log_watcher: &AtomicBool, - filter: Option, -) { - let mut reader = BufReader::new(output).lines(); - loop { - if let Some(line) = reader.next() { - let line = match line { - Ok(l) => l, - Err(e) => { - // end of stream, probably - log!("Error reading from output for {}: {}", prefix, e); - break; - } - }; - if let Some(filter) = filter.as_ref() { - if !(filter)(&line) { - continue; - } - } - println!("<{prefix}> {line}"); - } else if run_log_watcher.load(Ordering::Relaxed) { - sleep(Duration::from_millis(10)); - } else { - break; - } - } -} - -/// Basically `tail -f file | grep ` but also has to write to the file -/// (writes to file all lines, not just what passes the filter). -fn inspect_and_write_to_file(output: impl Read, log: impl AsRef, filter_array: &[&str]) { - let mut writer = BufWriter::new(append_to(log)); - let mut reader = BufReader::new(output).lines(); - loop { - if let Some(line) = reader.next() { - let line = match line { - Ok(l) => l, - Err(e) => { - // end of stream, probably - log!("Error reading from output: {}", e); - break; - } - }; - - if filter_array.is_empty() { - println!("{line}") - } else { - for filter in filter_array { - if line.contains(filter) { - println!("{line}") - } - } - } - writeln!(writer, "{line}").unwrap(); - } else if RUN_LOG_WATCHERS.load(Ordering::Relaxed) { - sleep(Duration::from_millis(10)) - } else { - break; - } - } -} - -fn build_cmd_task(args: ProgramArgs, log: PathBuf, log_all: bool, assert_success: bool) { - let mut command = args.create_command(); - if log_all { - command.stdout(Stdio::piped()); - } else { - command.stdout(append_to(log)); - } - command.stderr(Stdio::piped()); - - log!("{:#}", &args); - let mut child = command - .spawn() - .unwrap_or_else(|e| panic!("Failed to start command `{}` with Error: {e}", &args)); - let filter = args.get_filter(); - let running = Arc::new(AtomicBool::new(true)); - let stdout = if log_all { - let stdout = child.stdout.take().unwrap(); - let name = args.get_bin_name(); - let running = running.clone(); - Some(spawn(move || prefix_log(stdout, &name, &running, filter))) - } else { - None - }; - let stderr = { - let stderr = child.stderr.take().unwrap(); - let name = args.get_bin_name(); - let running = running.clone(); - spawn(move || prefix_log(stderr, &name, &running, filter)) - }; - - let status = loop { - sleep(Duration::from_millis(500)); - - if let Some(exit_status) = child.try_wait().expect("Failed to run command") { - break exit_status; - } else if SHUTDOWN.load(Ordering::Relaxed) { - log!("Forcing termination of command `{}`", &args); - stop_child(&mut child); - break child.wait().expect("Failed to run command"); - } - }; - - running.store(false, Ordering::Relaxed); - if let Some(stdout) = stdout { - stdout.join().unwrap(); - } - stderr.join().unwrap(); - assert!( - !assert_success || !RUN_LOG_WATCHERS.load(Ordering::Relaxed) || status.success(), - "Command returned non-zero exit code: {:?}", - &args - ); -} diff --git a/rust/utils/sealevel-test.bash b/rust/utils/sealevel-test.bash deleted file mode 100755 index 8e84fd7055..0000000000 --- a/rust/utils/sealevel-test.bash +++ /dev/null @@ -1,166 +0,0 @@ -#!/usr/bin/env bash - -if [ -z $SOLAR_ECLIPSE_DIR ]; then - echo '$SOLAR_ECLIPSE_DIR must be set' -fi - -if [ -z $ECLIPSE_PROGRAM_LIBRARY_DIR ]; then - echo '$ECLIPSE_PROGRAM_LIBRARY_DIR must be set' -fi - -SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" -TEST_KEYS_DIR="${SCRIPT_DIR}/../config/sealevel/test-keys" -KEYPAIR="${TEST_KEYS_DIR}/test_deployer-keypair.json" -TARGET_DIR="${SCRIPT_DIR}/../target" -SEALEVEL_DIR="${SCRIPT_DIR}/../sealevel" -DEPLOY_DIR="${TARGET_DIR}/deploy" -BIN_DIR="${TARGET_DIR}/debug" -SPL_TOKEN="${ECLIPSE_PROGRAM_LIBRARY_DIR}/target/debug/spl-token" -CHAIN_ID="13375" -REMOTE_CHAIN_ID="13376" - -# Ensure that the solar-eclipse `solana` binary is used -alias solana="${SOLAR_ECLIPSE_DIR}/target/debug/solana" - -# first arg = path to .so file -# second arg = path to directory to build program in if the .so file doesn't exist -# third arg = whether to force build the program -build_program() { - if $3 || [ ! -e $1 ]; then - # .so file doesn't exist, build it - pushd "${2}" - cargo build-sbf - popd - fi -} - -# first arg = path to .so file -# second arg = path to directory to build program in if the .so file doesn't exist -build_and_copy_program() { - build_program $1 $2 $3 - - # essentially cp, but -u won't copy if the source is older than the destination. - # used as a workaround to prevent copying to the same destination as the source - rsync -u $1 $DEPLOY_DIR -} - -build_programs() { - local force_build="${1}" - - # token programs - build_program "${ECLIPSE_PROGRAM_LIBRARY_DIR}/target/deploy/spl_token.so" "${ECLIPSE_PROGRAM_LIBRARY_DIR}/token/program" "${force_build}" - build_program "${ECLIPSE_PROGRAM_LIBRARY_DIR}/target/deploy/spl_token_2022.so" "${ECLIPSE_PROGRAM_LIBRARY_DIR}/token/program-2022" "${force_build}" - build_program "${ECLIPSE_PROGRAM_LIBRARY_DIR}/target/deploy/spl_associated_token_account.so" "${ECLIPSE_PROGRAM_LIBRARY_DIR}/associated-token-account/program" "${force_build}" - - # noop - build_program "${ECLIPSE_PROGRAM_LIBRARY_DIR}/account-compression/target/deploy/spl_noop.so" "${ECLIPSE_PROGRAM_LIBRARY_DIR}/account-compression/programs/noop" "${force_build}" - - # hyperlane sealevel programs - build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_mailbox.so" "${SEALEVEL_DIR}/programs/mailbox" "${force_build}" - build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_validator_announce.so" "${SEALEVEL_DIR}/programs/validator-announce" "${force_build}" - build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_multisig_ism_message_id.so" "${SEALEVEL_DIR}/programs/ism/multisig-ism-message-id" "${force_build}" - build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_token.so" "${SEALEVEL_DIR}/programs/hyperlane-sealevel-token" "${force_build}" - build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_token_native.so" "${SEALEVEL_DIR}/programs/hyperlane-sealevel-token-native" "${force_build}" - build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_token_collateral.so" "${SEALEVEL_DIR}/programs/hyperlane-sealevel-token-collateral" "${force_build}" -} - -build_spl_token_cli() { - if [ ! -e $SPL_TOKEN ]; then - pushd "${ECLIPSE_PROGRAM_LIBRARY_DIR}/token/cli" - cargo build - popd - fi -} - -setup_multisig_ism_message_id() { - "${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" multisig-ism-message-id set-validators-and-threshold --domain "${CHAIN_ID}" --validators 0x70997970c51812dc3a010c7d01b50e0d17dc79c8 --threshold 1 --program-id "4RSV6iyqW9X66Xq3RDCVsKJ7hMba5uv6XP8ttgxjVUB1" -} - -announce_validator() { - "${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" validator-announce announce --validator 0x70997970c51812dc3a010c7d01b50e0d17dc79c8 --storage-location "file:///tmp/test_sealevel_checkpoints_0x70997970c51812dc3a010c7d01b50e0d17dc79c8" --signature "0xcd87b715cd4c2e3448be9e34204cf16376a6ba6106e147a4965e26ea946dd2ab19598140bf26f1e9e599c23f6b661553c7d89e8db22b3609068c91eb7f0fa2f01b" -} - -test_token() { - - setup_multisig_ism_message_id - - announce_validator - - "${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" --compute-budget 200000 warp-route deploy --warp-route-name testwarproute --environment local-e2e --environments-dir "${SEALEVEL_DIR}/environments" --built-so-dir "${DEPLOY_DIR}" --token-config-file "${SEALEVEL_DIR}/environments/local-e2e/warp-routes/testwarproute/token-config.json" --chain-config-file "${SEALEVEL_DIR}/environments/local-e2e/warp-routes/chain-config.json" --ata-payer-funding-amount 1000000000 - - local token_type="" - local program_id="" - - local recipient_token_type="" - local recipient_program_id="" - - token_type="native" - program_id="CGn8yNtSD3aTTqJfYhUb6s1aVTN75NzwtsFKo1e83aga" - - recipient_token_type="synthetic" - recipient_program_id="3MzUPjP5LEkiHH82nEAe28Xtz9ztuMqWc8UmuKxrpVQH" - - local amount=10000000000 # lamports - - local -r sender_keypair="${KEYPAIR}" - local -r sender="$(solana -ul -k "${sender_keypair}" address)" - local -r recipient="${sender}" - - local -r sender_balance="$(solana -ul balance "${sender}" | cut -d ' ' -f 1)" - local -r amount_float="$(python -c "print(${amount} / 1000000000)")" - if (( $(bc -l <<< "${sender_balance} < ${amount_float}") )); then - echo "Insufficient sender funds" - exit 1 - fi - - solana -ul balance "${sender}" - - # Transfer the lamports - "${BIN_DIR}/hyperlane-sealevel-client" \ - -k "${KEYPAIR}" \ - token transfer-remote "${sender_keypair}" "${amount}" "${REMOTE_CHAIN_ID}" "${recipient}" "${token_type}" --program-id "${program_id}" - - # Wait for token transfer message to appear in the destination Mailbox. - # This ID was manually gotten from running the Relayer and observing the logs - fragile, I know! - while "${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" mailbox delivered --message-id 0x7b8ba684e5ce44f898c5fa81785c83a00e32b5bef3412e648eb7a17bec497685 --program-id "9tCUWNjpqcf3NUSrtp7vquYVCwbEByvLjZUrhG5dgvhj" | grep -q 'Message not delivered' - do - sleep 3 - done - - solana -ul balance "${recipient}" - - "${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" mailbox query - "${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" token query "${token_type}" --program-id "${program_id}" -} - -main() { - if [ "${1}" = "build-only" ]; then - build_programs true - exit 0 - fi - - # build the client - pushd "${SCRIPT_DIR}/../sealevel/client" - cargo build - popd - - # build all the required sealevel programs - if [ "${1}" = "force-build-programs" ]; then - build_programs true - else - build_programs false - fi - - # build the SPL token CLI - build_spl_token_cli - - "${BIN_DIR}/hyperlane-sealevel-client" --compute-budget 200000 -k "${KEYPAIR}" core deploy --local-domain "${CHAIN_ID}" --environment local-e2e --use-existing-keys --environments-dir "${SEALEVEL_DIR}/environments" --built-so-dir "${DEPLOY_DIR}" --chain sealeveltest1 - "${BIN_DIR}/hyperlane-sealevel-client" --compute-budget 200000 -k "${KEYPAIR}" core deploy --local-domain "${REMOTE_CHAIN_ID}" --environment local-e2e --use-existing-keys --environments-dir "${SEALEVEL_DIR}/environments" --built-so-dir "${DEPLOY_DIR}" --chain sealeveltest2 - - test_token true -} - -if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then - set -ex - main "$@" -fi