From 280ba959c624ddef3975a44ece154f9d3e0718a8 Mon Sep 17 00:00:00 2001 From: Evan Gray Date: Thu, 29 Feb 2024 09:38:01 -0500 Subject: [PATCH] ntt-accountant: ntt crate bump and transceiver rename --- cosmwasm/Cargo.lock | 2 +- .../ntt-global-accountant/Cargo.toml | 2 +- .../contracts/ntt-global-accountant/README.md | 30 +-- .../ntt-global-accountant/src/contract.rs | 140 +++++++----- .../ntt-global-accountant/src/msg.rs | 18 +- .../ntt-global-accountant/src/state.rs | 11 +- .../ntt-global-accountant/src/structs/mod.rs | 3 - .../ntt-global-accountant/src/structs/ntt.rs | 202 ------------------ .../tools/__tests__/test_ntt_accountant.ts | 185 ++++++++-------- 9 files changed, 209 insertions(+), 384 deletions(-) delete mode 100644 cosmwasm/contracts/ntt-global-accountant/src/structs/ntt.rs diff --git a/cosmwasm/Cargo.lock b/cosmwasm/Cargo.lock index 382b1a32f4..23171bef25 100644 --- a/cosmwasm/Cargo.lock +++ b/cosmwasm/Cargo.lock @@ -1290,7 +1290,7 @@ dependencies = [ [[package]] name = "ntt-messages" version = "0.1.0" -source = "git+https://github.com/wormhole-foundation/example-native-token-transfers.git?rev=fdd4f90fc12275b6aafd2ff3b9cc78ca0fb7a464#fdd4f90fc12275b6aafd2ff3b9cc78ca0fb7a464" +source = "git+https://github.com/wormhole-foundation/example-native-token-transfers.git?rev=22bde0c7d8139675582d861dc8245eb1912324fa#22bde0c7d8139675582d861dc8245eb1912324fa" dependencies = [ "wormhole-io", ] diff --git a/cosmwasm/contracts/ntt-global-accountant/Cargo.toml b/cosmwasm/contracts/ntt-global-accountant/Cargo.toml index 11b319006d..01ab40014b 100644 --- a/cosmwasm/contracts/ntt-global-accountant/Cargo.toml +++ b/cosmwasm/contracts/ntt-global-accountant/Cargo.toml @@ -23,7 +23,7 @@ cw-storage-plus = "0.13.2" cw_transcode = "0.1.0" cw2 = "0.13.2" hex = { version = "0.4.3", features = ["serde"] } -ntt-messages = { git = "https://github.com/wormhole-foundation/example-native-token-transfers.git", rev = "fdd4f90fc12275b6aafd2ff3b9cc78ca0fb7a464" } +ntt-messages = { git = "https://github.com/wormhole-foundation/example-native-token-transfers.git", rev = "22bde0c7d8139675582d861dc8245eb1912324fa" } schemars = "0.8.8" serde = { version = "1.0.137", default-features = false, features = ["derive"] } serde_wormhole = "0.1.0" diff --git a/cosmwasm/contracts/ntt-global-accountant/README.md b/cosmwasm/contracts/ntt-global-accountant/README.md index c98addf7aa..fd46803c94 100644 --- a/cosmwasm/contracts/ntt-global-accountant/README.md +++ b/cosmwasm/contracts/ntt-global-accountant/README.md @@ -7,34 +7,34 @@ This is made possible by the following modifications: - Keep a list of [Standard Relayer](https://docs.wormhole.com/wormhole/explore-wormhole/relayer#standard-relayers) emitters. - These registrations are approved by Wormhole Governance akin to the Token Bridge, so this is a 1-1 replacement. - Extract the message sender and payload from either a [Core Bridge message](../../../whitepapers/0001_generic_message_passing.md) or a Standard Relayer payload. - - NTTs can either emit messages via their endpoint directly or via Standard Relayers. + - NTTs can either emit messages via their transceiver directly or via Standard Relayers. - This should be done in the accountant contract (as opposed to pre-processed by the guardian) in order to support backfilling signed VAAs. -- Keep a map of registered NTT endpoints and their locking hub. - - This contract must be able to permissionlessly determine what constitutes a valid transfer between two NTT endpoints. In order to determine that, the contract needs to have a record of both the sending and receiving endpoints' registrations. - - An NTT endpoint emits a VAA when... +- Keep a map of registered NTT transceivers and their locking hub. + - This contract must be able to permissionlessly determine what constitutes a valid transfer between two NTT transceivers. In order to determine that, the contract needs to have a record of both the sending and receiving transceivers' registrations. + - An NTT transceiver emits a VAA when... - it is initialized, which includes if it's associated manager is in `locking` or `burning` mode. - - a new endpoint is registered with it. + - a new transceiver is registered with it. - These VAAs can then be relayed to the NTT Global Accountant, verified, parsed, and stored into a map. - These maps are a one-way lookup of... - - Endpoint hubs `[chainA, emitter address] -> [chainB, foreign endpoint emitter address]` - - Endpoint peers `[chainA, emitter address, chainB] -> [foreign endpoint emitter address]` + - Transceiver hubs `[chainA, emitter address] -> [chainB, foreign transceiver emitter address]` + - Transceiver peers `[chainA, emitter address, chainB] -> [foreign transceiver emitter address]` - Update the logic for handling an observation or NTT transfer VAA. Instead of checking the token bridge emitter: - If the core message was from a known Standard Relayer emitter, use the sender as the emitter, otherwise use the core message emitter. - If the emitter (sender) does not have a key in the maps, return. - - If the emitter's foreign endpoint (known receiver) does not match the target recipient, return. - - If the foreign endpoint (receiver) does not have a registration for the emitter (sender) - i.e. they are not cross-registered, return. -- Use `` in place of `` for the `Account::key` to track each network of endpoints separately. This requires a 1:1 endpoint:token mapping. + - If the emitter's foreign transceiver (known receiver) does not match the target recipient, return. + - If the foreign transceiver (receiver) does not have a registration for the emitter (sender) - i.e. they are not cross-registered, return. +- Use `` in place of `` for the `Account::key` to track each network of transceivers separately. This requires a 1:1 transceiver:token mapping. The guardians will have a new allow list of NTTs and will be expected to submit observations for the allow-listed NTTs to the NTT global accountant contract on Wormhole Gateway. ## Caveats 1. The NTT Global Accountant can only account for NTTs which are in `lock` mode. In `burn` mode, the tokens have a potentially unlimited supply on every chain or other legitimate methods of minting which cannot be known to the accountant. -1. The NTT Global Accountant expects each endpoint to be associated with exactly 1 token. +1. The NTT Global Accountant expects each transceiver to be associated with exactly 1 token. 1. In order to avoid backfilling (the process of relaying VAAs emitted prior to the enforcement of Global Accountant on a network of NTTs), these initial steps should be completed, in order, before making any transfers. - 1. Initialize the NTT endpoints. - 1. Add the NTT endpoints' emitters to the Guardian's allow list. - 1. Cross-register the NTT endpoints on-chain, emitting the VAAs. + 1. Initialize the NTT transceivers. + 1. Add the NTT transceivers' emitters to the Guardian's allow list. + 1. Cross-register the NTT transceivers on-chain, emitting the VAAs. 1. Submit the locking mode initialization VAA to the NTT accountant contract. - 1. Submit the burn endpoints' locking hub registration VAA to the NTT accountant contract. + 1. Submit the burn transceivers' locking hub registration VAA to the NTT accountant contract. 1. Submit the remaining registration VAAs to the NTT accountant. diff --git a/cosmwasm/contracts/ntt-global-accountant/src/contract.rs b/cosmwasm/contracts/ntt-global-accountant/src/contract.rs index de2550ec16..0435c97d6f 100644 --- a/cosmwasm/contracts/ntt-global-accountant/src/contract.rs +++ b/cosmwasm/contracts/ntt-global-accountant/src/contract.rs @@ -15,7 +15,13 @@ use cosmwasm_std::{ use cw2::set_contract_version; use cw_storage_plus::{Bound, KeyDeserialize}; use ntt_messages::{ - endpoint::EndpointMessage, endpoints::wormhole::WormholeEndpoint, ntt::NativeTokenTransfer, + mode::Mode, + ntt::NativeTokenTransfer, + transceiver::{Transceiver, TransceiverMessage}, + transceivers::wormhole::{ + WormholeTransceiver, WormholeTransceiverInfo, WormholeTransceiverRegistration, + }, + trimmed_amount::TrimmedAmount, }; use serde_wormhole::RawMessage; use tinyvec::{Array, TinyVec}; @@ -31,18 +37,18 @@ use crate::{ bail, error::{AnyError, ContractError}, msg::{ - AllAccountsResponse, AllEndpointHubsResponse, AllEndpointPeersResponse, - AllModificationsResponse, AllPendingTransfersResponse, AllTransfersResponse, + AllAccountsResponse, AllModificationsResponse, AllPendingTransfersResponse, + AllTransceiverHubsResponse, AllTransceiverPeersResponse, AllTransfersResponse, BatchTransferStatusResponse, ExecuteMsg, MigrateMsg, MissingObservation, MissingObservationsResponse, Observation, ObservationError, ObservationStatus, QueryMsg, RelayerChainRegistrationResponse, SubmitObservationResponse, TransferDetails, TransferStatus, SUBMITTED_OBSERVATIONS_PREFIX, }, state::{ - Data, EndpointHub, EndpointPeer, PendingTransfer, DIGESTS, ENDPOINT_PEER, ENDPOINT_TO_HUB, - PENDING_TRANSFERS, RELAYER_CHAIN_REGISTRATIONS, + Data, PendingTransfer, TransceiverHub, TransceiverPeer, DIGESTS, PENDING_TRANSFERS, + RELAYER_CHAIN_REGISTRATIONS, TRANSCEIVER_PEER, TRANSCEIVER_TO_HUB, }, - structs::{DeliveryInstruction, EndpointInit, EndpointRegister, EndpointTransfer, ManagerMode}, + structs::DeliveryInstruction, }; // version info for migration info @@ -88,6 +94,20 @@ pub fn execute( } } +fn normalize_transfer_amount(trimmed_amount: TrimmedAmount) -> Uint256 { + let to_decimals = 8; + let from_decimals = trimmed_amount.decimals; + let amount = Uint256::from(trimmed_amount.amount); + if from_decimals == to_decimals { + return amount; + } + if from_decimals > to_decimals { + amount / Uint256::from(10u64).pow((from_decimals - to_decimals).into()) + } else { + amount * Uint256::from(10u64).pow((to_decimals - from_decimals).into()) + } +} + fn submit_observations( mut deps: DepsMut, info: MessageInfo, @@ -183,24 +203,25 @@ fn handle_observation( (o.emitter_address.into(), o.payload.0) }; - let hub_key = ENDPOINT_TO_HUB.key((o.emitter_chain, sender)); + let hub_key = TRANSCEIVER_TO_HUB.key((o.emitter_chain, sender)); let hub = hub_key .may_load(deps.storage) .context("failed to load hub")? .ok_or(ContractError::MissingHubRegistration)?; - let message: EndpointMessage = + let message: TransceiverMessage = TypePrefixedPayload::read_payload(&mut payload.as_slice()) .context("failed to parse observation payload")?; - let destination_chain = message.manager_payload.payload.to_chain.id; - let source_peer_key = ENDPOINT_PEER.key((o.emitter_chain, sender, destination_chain)); + let destination_chain = message.ntt_manager_payload.payload.to_chain.id; + let source_peer_key = TRANSCEIVER_PEER.key((o.emitter_chain, sender, destination_chain)); let source_peer = source_peer_key .may_load(deps.storage) .context("failed to load source peer")? .ok_or_else(|| ContractError::MissingSourcePeerRegistration(destination_chain.into()))?; - let destination_peer_key = ENDPOINT_PEER.key((destination_chain, source_peer, o.emitter_chain)); + let destination_peer_key = + TRANSCEIVER_PEER.key((destination_chain, source_peer, o.emitter_chain)); let destination_peer = destination_peer_key .may_load(deps.storage) .context("failed to load destination peer")? @@ -263,10 +284,10 @@ fn handle_observation( // i.e. the same token belonging to the same locking hub, can have message sourced from one chain that uses 4 decimals "normalized", // and another that uses 8... this is maxed at 8, but should be actually normalized to 8 for accounting purposes. let tx_data = transfer::Data { - amount: Uint256::from(message.manager_payload.payload.amount.denormalize(8)), + amount: normalize_transfer_amount(message.ntt_manager_payload.payload.amount), token_address: hub.1, token_chain: hub.0, - recipient_chain: message.manager_payload.payload.to_chain.id, + recipient_chain: message.ntt_manager_payload.payload.to_chain.id, }; accountant::commit_transfer( @@ -495,21 +516,21 @@ fn handle_ntt_vaa( } let prefix = &payload[..4]; - if prefix == EndpointTransfer::PREFIX { + if prefix == WormholeTransceiver::PREFIX { let source_chain = body.emitter_chain.into(); - let hub_key = ENDPOINT_TO_HUB.key((source_chain, sender)); + let hub_key = TRANSCEIVER_TO_HUB.key((source_chain, sender)); let hub = hub_key .may_load(deps.storage) .context("failed to load hub")? .ok_or(ContractError::MissingHubRegistration)?; - let message: EndpointMessage = + let message: TransceiverMessage = TypePrefixedPayload::read_payload(&mut payload.as_slice()) .context("failed to parse NTT transfer payload")?; - let destination_chain = message.manager_payload.payload.to_chain.id; - let source_peer_key = ENDPOINT_PEER.key((source_chain, sender, destination_chain)); + let destination_chain = message.ntt_manager_payload.payload.to_chain.id; + let source_peer_key = TRANSCEIVER_PEER.key((source_chain, sender, destination_chain)); let source_peer = source_peer_key .may_load(deps.storage) .context("failed to load source peer")? @@ -517,7 +538,7 @@ fn handle_ntt_vaa( ContractError::MissingSourcePeerRegistration(destination_chain.into()) })?; let destination_peer_key = - ENDPOINT_PEER.key((destination_chain, source_peer, source_chain)); + TRANSCEIVER_PEER.key((destination_chain, source_peer, source_chain)); let destination_peer = destination_peer_key .may_load(deps.storage) .context("failed to load destination peer")? @@ -535,10 +556,10 @@ fn handle_ntt_vaa( // i.e. the same token belonging to the same locking hub, can have message sourced from one chain that uses 4 decimals "normalized", // and another that uses 8... this is maxed at 8, but should be actually normalized to 8 for accounting purposes. let data = transfer::Data { - amount: Uint256::from(message.manager_payload.payload.amount.denormalize(8)), + amount: normalize_transfer_amount(message.ntt_manager_payload.payload.amount), token_address: hub.1, token_chain: hub.0, - recipient_chain: message.manager_payload.payload.to_chain.id, + recipient_chain: message.ntt_manager_payload.payload.to_chain.id, }; let key = transfer::Key::new( @@ -557,12 +578,15 @@ fn handle_ntt_vaa( PENDING_TRANSFERS.remove(deps.storage, key); Ok(evt) - } else if prefix == EndpointInit::PREFIX { + } else if prefix == WormholeTransceiver::INFO_PREFIX { // only process init messages for locking hubs, setting their hub mapping to themselves - let message = EndpointInit::deserialize(&payload)?; - if message.manager_mode == (ManagerMode::LOCKING as u8) { + let message: WormholeTransceiverInfo = + TypePrefixedPayload::read_payload(&mut payload.as_slice()) + .context("failed to parse NTT info payload")?; + if message.manager_mode == (Mode::Burning) { + // TODO: this enum is backwards in the crate let chain = body.emitter_chain.into(); - let hub_key = ENDPOINT_TO_HUB.key((chain, sender)); + let hub_key = TRANSCEIVER_TO_HUB.key((chain, sender)); if hub_key .may_load(deps.storage) @@ -580,13 +604,15 @@ fn handle_ntt_vaa( } else { bail!("ignoring non-locking NTT initialization") } - } else if prefix == EndpointRegister::PREFIX { - // for ease of code assurances, all endpoints should register their hub first, followed by other peers + } else if prefix == WormholeTransceiver::PEER_INFO_PREFIX { + // for ease of code assurances, all transceivers should register their hub first, followed by other peers // this code will only add peers for which it can assure their hubs match so one less key can be loaded on transfers - let message = EndpointRegister::deserialize(&payload)?; + let message: WormholeTransceiverRegistration = + TypePrefixedPayload::read_payload(&mut payload.as_slice()) + .context("failed to parse NTT registration payload")?; let peer_hub_key = - ENDPOINT_TO_HUB.key((message.endpoint_chain_id, message.endpoint_address.into())); + TRANSCEIVER_TO_HUB.key((message.chain_id.id, message.transceiver_address.into())); let peer_hub = peer_hub_key .may_load(deps.storage) @@ -594,7 +620,7 @@ fn handle_ntt_vaa( .ok_or(ContractError::MissingHubRegistration)?; let chain = body.emitter_chain.into(); - let peer_key = ENDPOINT_PEER.key((chain, sender, message.endpoint_chain_id)); + let peer_key = TRANSCEIVER_PEER.key((chain, sender, message.chain_id.id)); if peer_key .may_load(deps.storage) @@ -604,22 +630,21 @@ fn handle_ntt_vaa( bail!("peer entry for this chain already exists") } - let hub_key = ENDPOINT_TO_HUB.key((chain, sender)); + let hub_key = TRANSCEIVER_TO_HUB.key((chain, sender)); - if let Some(endpoint_hub) = hub_key + if let Some(transceiver_hub) = hub_key .may_load(deps.storage) .context("failed to load hub")? { // hubs must match - if endpoint_hub != peer_hub { + if transceiver_hub != peer_hub { bail!("peer hub does not match") } } else { - // this endpoint does not have a known hub, check if this peer is a hub themselves - if peer_hub.0 == message.endpoint_chain_id - && peer_hub.1 == message.endpoint_address.into() + // this transceiver does not have a known hub, check if this peer is a hub themselves + if peer_hub.0 == message.chain_id.id && peer_hub.1 == message.transceiver_address.into() { - // this peer is a hub, so set it as this endpoint's hub + // this peer is a hub, so set it as this transceiver's hub hub_key .save(deps.storage, &peer_hub.clone()) .context("failed to save hub")?; @@ -630,13 +655,16 @@ fn handle_ntt_vaa( } peer_key - .save(deps.storage, &(message.endpoint_address.into())) + .save(deps.storage, &(message.transceiver_address.into())) .context("failed to save hub")?; Ok(Event::new("RegisterPeer") .add_attribute("chain", chain.to_string()) .add_attribute("emitter_address", hex::encode(sender)) - .add_attribute("endpoint_chain", message.endpoint_chain_id.to_string()) - .add_attribute("endpoint_address", hex::encode(message.endpoint_address))) + .add_attribute("transceiver_chain", message.chain_id.id.to_string()) + .add_attribute( + "transceiver_address", + hex::encode(message.transceiver_address), + )) } else { bail!("unsupported NTT action") } @@ -671,11 +699,11 @@ pub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult { query_relayer_chain_registration(deps, chain).and_then(|resp| to_binary(&resp)) } - QueryMsg::AllEndpointHubs { start_after, limit } => { - query_all_endpoint_hubs(deps, start_after, limit).and_then(|resp| to_binary(&resp)) + QueryMsg::AllTransceiverHubs { start_after, limit } => { + query_all_transceiver_hubs(deps, start_after, limit).and_then(|resp| to_binary(&resp)) } - QueryMsg::AllEndpointPeers { start_after, limit } => { - query_all_endpoint_peers(deps, start_after, limit).and_then(|resp| to_binary(&resp)) + QueryMsg::AllTransceiverPeers { start_after, limit } => { + query_all_transceiver_peers(deps, start_after, limit).and_then(|resp| to_binary(&resp)) } QueryMsg::MissingObservations { guardian_set, @@ -826,16 +854,16 @@ fn query_relayer_chain_registration( .map(|address| RelayerChainRegistrationResponse { address }) } -fn query_all_endpoint_hubs( +fn query_all_transceiver_hubs( deps: Deps, start_after: Option<(u16, TokenAddress)>, limit: Option, -) -> StdResult { +) -> StdResult { let start = start_after.map(|key| Bound::Exclusive((key, PhantomData))); - let iter = ENDPOINT_TO_HUB + let iter = TRANSCEIVER_TO_HUB .range(deps.storage, start, None, Order::Ascending) - .map(|item| item.map(|(key, data)| EndpointHub { key, data })); + .map(|item| item.map(|(key, data)| TransceiverHub { key, data })); if let Some(lim) = limit { let l = lim @@ -843,23 +871,23 @@ fn query_all_endpoint_hubs( .map_err(|_| ConversionOverflowError::new("u32", "usize", lim.to_string()))?; iter.take(l) .collect::>>() - .map(|hubs| AllEndpointHubsResponse { hubs }) + .map(|hubs| AllTransceiverHubsResponse { hubs }) } else { iter.collect::>>() - .map(|hubs| AllEndpointHubsResponse { hubs }) + .map(|hubs| AllTransceiverHubsResponse { hubs }) } } -fn query_all_endpoint_peers( +fn query_all_transceiver_peers( deps: Deps, start_after: Option<(u16, TokenAddress, u16)>, limit: Option, -) -> StdResult { +) -> StdResult { let start = start_after.map(|key| Bound::Exclusive((key, PhantomData))); - let iter = ENDPOINT_PEER + let iter = TRANSCEIVER_PEER .range(deps.storage, start, None, Order::Ascending) - .map(|item| item.map(|(key, data)| EndpointPeer { key, data })); + .map(|item| item.map(|(key, data)| TransceiverPeer { key, data })); if let Some(lim) = limit { let l = lim @@ -867,10 +895,10 @@ fn query_all_endpoint_peers( .map_err(|_| ConversionOverflowError::new("u32", "usize", lim.to_string()))?; iter.take(l) .collect::>>() - .map(|peers| AllEndpointPeersResponse { peers }) + .map(|peers| AllTransceiverPeersResponse { peers }) } else { iter.collect::>>() - .map(|peers| AllEndpointPeersResponse { peers }) + .map(|peers| AllTransceiverPeersResponse { peers }) } } diff --git a/cosmwasm/contracts/ntt-global-accountant/src/msg.rs b/cosmwasm/contracts/ntt-global-accountant/src/msg.rs index 5ee5a53d38..1baaf06eef 100644 --- a/cosmwasm/contracts/ntt-global-accountant/src/msg.rs +++ b/cosmwasm/contracts/ntt-global-accountant/src/msg.rs @@ -7,7 +7,7 @@ use wormhole_sdk::{ Address, }; -use crate::state::{self, EndpointHub, EndpointPeer, PendingTransfer}; +use crate::state::{self, PendingTransfer, TransceiverHub, TransceiverPeer}; pub const SUBMITTED_OBSERVATIONS_PREFIX: &[u8; 35] = b"ntt_acct_sub_obsfig_00000000000000|"; @@ -144,13 +144,13 @@ pub enum QueryMsg { ValidateTransfer { transfer: Transfer }, #[returns(RelayerChainRegistrationResponse)] RelayerChainRegistration { chain: u16 }, - #[returns(AllEndpointHubsResponse)] - AllEndpointHubs { + #[returns(AllTransceiverHubsResponse)] + AllTransceiverHubs { start_after: Option<(u16, TokenAddress)>, limit: Option, }, - #[returns(AllEndpointPeersResponse)] - AllEndpointPeers { + #[returns(AllTransceiverPeersResponse)] + AllTransceiverPeers { start_after: Option<(u16, TokenAddress, u16)>, limit: Option, }, @@ -189,13 +189,13 @@ pub struct RelayerChainRegistrationResponse { } #[cw_serde] -pub struct AllEndpointHubsResponse { - pub hubs: Vec, +pub struct AllTransceiverHubsResponse { + pub hubs: Vec, } #[cw_serde] -pub struct AllEndpointPeersResponse { - pub peers: Vec, +pub struct AllTransceiverPeersResponse { + pub peers: Vec, } #[cw_serde] diff --git a/cosmwasm/contracts/ntt-global-accountant/src/state.rs b/cosmwasm/contracts/ntt-global-accountant/src/state.rs index e294dee644..fc312e3249 100644 --- a/cosmwasm/contracts/ntt-global-accountant/src/state.rs +++ b/cosmwasm/contracts/ntt-global-accountant/src/state.rs @@ -6,19 +6,20 @@ use tinyvec::TinyVec; pub const PENDING_TRANSFERS: Map> = Map::new("pending_transfers"); pub const RELAYER_CHAIN_REGISTRATIONS: Map = Map::new("relayer_chain_registrations"); -pub const ENDPOINT_TO_HUB: Map<(u16, TokenAddress), (u16, TokenAddress)> = - Map::new("endpoint_to_hub"); -pub const ENDPOINT_PEER: Map<(u16, TokenAddress, u16), TokenAddress> = Map::new("endpoint_peers"); +pub const TRANSCEIVER_TO_HUB: Map<(u16, TokenAddress), (u16, TokenAddress)> = + Map::new("transceiver_to_hub"); +pub const TRANSCEIVER_PEER: Map<(u16, TokenAddress, u16), TokenAddress> = + Map::new("transceiver_peers"); pub const DIGESTS: Map<(u16, Vec, u64), Binary> = Map::new("digests"); #[cw_serde] -pub struct EndpointHub { +pub struct TransceiverHub { pub key: (u16, TokenAddress), pub data: (u16, TokenAddress), } #[cw_serde] -pub struct EndpointPeer { +pub struct TransceiverPeer { pub key: (u16, TokenAddress, u16), pub data: TokenAddress, } diff --git a/cosmwasm/contracts/ntt-global-accountant/src/structs/mod.rs b/cosmwasm/contracts/ntt-global-accountant/src/structs/mod.rs index 3eb25de3ba..fa81fb7146 100644 --- a/cosmwasm/contracts/ntt-global-accountant/src/structs/mod.rs +++ b/cosmwasm/contracts/ntt-global-accountant/src/structs/mod.rs @@ -1,5 +1,2 @@ -mod ntt; -pub use ntt::*; - mod relayer; pub use relayer::*; diff --git a/cosmwasm/contracts/ntt-global-accountant/src/structs/ntt.rs b/cosmwasm/contracts/ntt-global-accountant/src/structs/ntt.rs deleted file mode 100644 index a4518a03e5..0000000000 --- a/cosmwasm/contracts/ntt-global-accountant/src/structs/ntt.rs +++ /dev/null @@ -1,202 +0,0 @@ -use byteorder::{BigEndian, ReadBytesExt}; -use std::io::{Cursor, Read}; - -// akin to https://github.com/wormhole-foundation/example-native-token-transfers/blob/main/evm/src/libraries/EndpointStructs.sol -// should probably be covered in the `ntt-messages` crate - -pub enum ManagerMode { - LOCKING = 0, - BURNING = 1, -} - -pub struct EndpointTransfer {} - -impl EndpointTransfer { - pub const PREFIX: [u8; 4] = [0x99, 0x45, 0xFF, 0x10]; // 0x99'E''W''H' -} - -pub struct EndpointInit { - pub manager_address: [u8; 32], - pub manager_mode: u8, - pub token_address: [u8; 32], - pub token_decimals: u8, -} - -impl EndpointInit { - pub const PREFIX: [u8; 4] = [0xc8, 0x3e, 0x3d, 0x2e]; // bytes4(keccak256("WormholeEndpointInit")) - - pub fn deserialize(data: &[u8]) -> std::result::Result { - let mut rdr = Cursor::new(data); - Self::deserialize_from_reader(&mut rdr) - } - - pub fn deserialize_from_reader( - rdr: &mut Cursor<&[u8]>, - ) -> std::result::Result { - let mut endpoint_identifier = [0u8; 4]; - rdr.read_exact(&mut endpoint_identifier)?; - if endpoint_identifier != Self::PREFIX { - return Err(std::io::Error::new( - std::io::ErrorKind::InvalidData, - "PayloadMismatch", - )); - } - - let mut manager_address = [0u8; 32]; - rdr.read_exact(&mut manager_address)?; - - let manager_mode = rdr.read_u8()?; - - let mut token_address = [0u8; 32]; - rdr.read_exact(&mut token_address)?; - - let token_decimals = rdr.read_u8()?; - - if rdr.position() != rdr.get_ref().len() as u64 { - return Err(std::io::Error::new( - std::io::ErrorKind::InvalidData, - "InvalidPayloadLength", - )); - } - - Ok(EndpointInit { - manager_address, - manager_mode, - token_address, - token_decimals, - }) - } -} - -pub struct EndpointRegister { - pub endpoint_chain_id: u16, - pub endpoint_address: [u8; 32], -} - -impl EndpointRegister { - pub const PREFIX: [u8; 4] = [0xd0, 0xd2, 0x92, 0xf1]; // bytes4(keccak256("WormholeSiblingRegistration")) - - pub fn deserialize(data: &[u8]) -> std::result::Result { - let mut rdr = Cursor::new(data); - Self::deserialize_from_reader(&mut rdr) - } - - pub fn deserialize_from_reader( - rdr: &mut Cursor<&[u8]>, - ) -> std::result::Result { - let mut endpoint_identifier = [0u8; 4]; - rdr.read_exact(&mut endpoint_identifier)?; - if endpoint_identifier != Self::PREFIX { - return Err(std::io::Error::new( - std::io::ErrorKind::InvalidData, - "PayloadMismatch", - )); - } - - let endpoint_chain_id = rdr.read_u16::()?; - - let mut endpoint_address = [0u8; 32]; - rdr.read_exact(&mut endpoint_address)?; - - if rdr.position() != rdr.get_ref().len() as u64 { - return Err(std::io::Error::new( - std::io::ErrorKind::InvalidData, - "InvalidPayloadLength", - )); - } - - Ok(EndpointRegister { - endpoint_chain_id, - endpoint_address, - }) - } -} - -#[cfg(test)] -mod test { - use super::*; - - #[test] - pub fn lock_init() { - // c83e3d2e000000000000000000000000bb807f76cda53b1b4256e1b6f33bb46be36508e3000000000000000000000000002a68f967bfa230780a385175d0c86ae4048d309612 - let payload = [ - 0xc8, 0x3e, 0x3d, 0x2e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0xbb, 0x80, 0x7f, 0x76, 0xcd, 0xa5, 0x3b, 0x1b, 0x42, 0x56, 0xe1, 0xb6, - 0xf3, 0x3b, 0xb4, 0x6b, 0xe3, 0x65, 0x08, 0xe3, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2a, 0x68, 0xf9, 0x67, 0xbf, 0xa2, 0x30, - 0x78, 0x0a, 0x38, 0x51, 0x75, 0xd0, 0xc8, 0x6a, 0xe4, 0x04, 0x8d, 0x30, 0x96, 0x12, - ]; - let init = EndpointInit::deserialize(&payload).unwrap(); - - assert_eq!( - init.manager_address, - [ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xbb, 0x80, - 0x7f, 0x76, 0xcd, 0xa5, 0x3b, 0x1b, 0x42, 0x56, 0xe1, 0xb6, 0xf3, 0x3b, 0xb4, 0x6b, - 0xe3, 0x65, 0x08, 0xe3 - ] - ); - assert_eq!(init.manager_mode, ManagerMode::LOCKING as u8); - assert_eq!( - init.token_address, - [ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2a, 0x68, - 0xf9, 0x67, 0xbf, 0xa2, 0x30, 0x78, 0x0a, 0x38, 0x51, 0x75, 0xd0, 0xc8, 0x6a, 0xe4, - 0x04, 0x8d, 0x30, 0x96 - ] - ); - assert_eq!(init.token_decimals, 18); - } - - #[test] - pub fn burn_init() { - // c83e3d2e0000000000000000000000001fc14f21b27579f4f23578731cd361cca8aa39f701000000000000000000000000eb502b1d35e975321b21cce0e8890d20a7eb289d12 - let payload = [ - 0xc8, 0x3e, 0x3d, 0x2e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x1f, 0xc1, 0x4f, 0x21, 0xb2, 0x75, 0x79, 0xf4, 0xf2, 0x35, 0x78, 0x73, - 0x1c, 0xd3, 0x61, 0xcc, 0xa8, 0xaa, 0x39, 0xf7, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xeb, 0x50, 0x2b, 0x1d, 0x35, 0xe9, 0x75, - 0x32, 0x1b, 0x21, 0xcc, 0xe0, 0xe8, 0x89, 0x0d, 0x20, 0xa7, 0xeb, 0x28, 0x9d, 0x12, - ]; - let init = EndpointInit::deserialize(&payload).unwrap(); - - assert_eq!( - init.manager_address, - [ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1f, 0xc1, - 0x4f, 0x21, 0xb2, 0x75, 0x79, 0xf4, 0xf2, 0x35, 0x78, 0x73, 0x1c, 0xd3, 0x61, 0xcc, - 0xa8, 0xaa, 0x39, 0xf7, - ] - ); - assert_eq!(init.manager_mode, ManagerMode::BURNING as u8); - assert_eq!( - init.token_address, - [ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xeb, 0x50, - 0x2b, 0x1d, 0x35, 0xe9, 0x75, 0x32, 0x1b, 0x21, 0xcc, 0xe0, 0xe8, 0x89, 0x0d, 0x20, - 0xa7, 0xeb, 0x28, 0x9d, - ] - ); - assert_eq!(init.token_decimals, 18); - } - - #[test] - pub fn register() { - let payload = [ - 0xd0, 0xd2, 0x92, 0xf1, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x1f, 0xc1, 0x4f, 0x21, 0xb2, 0x75, 0x79, 0xf4, 0xf2, 0x35, - 0x78, 0x73, 0x1c, 0xd3, 0x61, 0xcc, 0xa8, 0xaa, 0x39, 0xf7, - ]; - let register = EndpointRegister::deserialize(&payload).unwrap(); - - assert_eq!(register.endpoint_chain_id, 1); - assert_eq!( - register.endpoint_address, - [ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1f, 0xc1, - 0x4f, 0x21, 0xb2, 0x75, 0x79, 0xf4, 0xf2, 0x35, 0x78, 0x73, 0x1c, 0xd3, 0x61, 0xcc, - 0xa8, 0xaa, 0x39, 0xf7, - ] - ); - } -} diff --git a/wormchain/contracts/tools/__tests__/test_ntt_accountant.ts b/wormchain/contracts/tools/__tests__/test_ntt_accountant.ts index b465dae2f4..bfeb887c44 100644 --- a/wormchain/contracts/tools/__tests__/test_ntt_accountant.ts +++ b/wormchain/contracts/tools/__tests__/test_ntt_accountant.ts @@ -35,23 +35,23 @@ if (process.env.INIT_SIGNERS_KEYS_CSV === "undefined") { * b. Ensure a duplicate hub init is rejected * c. Ensure a non-hub init is rejected * 2. Registrations - * a. Ensure a hub registration to an endpoint without a known hub is rejected - * b. Ensure an endpoint registration to a hub is saved - * c. Ensure a hub registration to an endpoint with a known hub is saved - * d. Ensure an endpoint registration to another endpoint without a known hub is rejected - * e. Ensure an endpoint registration from an endpoint without a known hub to a non-hub is rejected - * f. Ensure an endpoint registration to another endpoint with a known hub is saved + * a. Ensure a hub registration to an transceiver without a known hub is rejected + * b. Ensure an transceiver registration to a hub is saved + * c. Ensure a hub registration to an transceiver with a known hub is saved + * d. Ensure an transceiver registration to another transceiver without a known hub is rejected + * e. Ensure an transceiver registration from an transceiver without a known hub to a non-hub is rejected + * f. Ensure an transceiver registration to another transceiver with a known hub is saved * g. Ensure a duplicate registration is rejected * 3. Observations - * a. Ensure a token can be sent from its hub endpoint + * a. Ensure a token can be sent from its hub transceiver * b. Ensure a token decimal shift works as expected - * c. Ensure a token can be sent back to its hub endpoint - * d. Ensure a token can be sent between non-hub endpoints - * e. Ensure a token sent from a source endpoint without a known hub is rejected - * f. Ensure a token sent from a source chain without a known endpoint is rejected - * g. Ensure a token sent from a source chain without a matching endpoint is rejected - * h. Ensure a token sent to a destination chain without a known endpoint is rejected - * i. Ensure a token sent to a destination chain without a matching endpoint is rejected + * c. Ensure a token can be sent back to its hub transceiver + * d. Ensure a token can be sent between non-hub transceivers + * e. Ensure a token sent from a source transceiver without a known hub is rejected + * f. Ensure a token sent from a source chain without a known transceiver is rejected + * g. Ensure a token sent from a source chain without a matching transceiver is rejected + * h. Ensure a token sent to a destination chain without a known transceiver is rejected + * i. Ensure a token sent to a destination chain without a matching transceiver is rejected * j. Ensure spoofed tokens for more than the outstanding amount rejects successfully * 4. Transfer VAAs * a-i. Repeat Observation tests @@ -78,18 +78,18 @@ const NTT_GA_ADDRESS = "wormhole17p9rzwnnfxcjp32un9ug7yhhzgtkhvl9jfksztgw5uh69wac2pgshdnj3k"; const HUB_CHAIN = 2; -const HUB_ENDPOINT = `0000000000000000000000000000000000000000000000000000000000000042`; +const HUB_TRANSCEIVER = `0000000000000000000000000000000000000000000000000000000000000042`; const SPOKE_CHAIN_A = 4; -const SPOKE_ENDPOINT_A = `0000000000000000000000000000000000000000000000000000000000000043`; +const SPOKE_TRANSCEIVER_A = `0000000000000000000000000000000000000000000000000000000000000043`; const SPOKE_CHAIN_B = 5; -const SPOKE_ENDPOINT_B = `0000000000000000000000000000000000000000000000000000000000000044`; +const SPOKE_TRANSCEIVER_B = `0000000000000000000000000000000000000000000000000000000000000044`; const FAUX_HUB_CHAIN = 420; -const FAUX_HUB_ENDPOINT = +const FAUX_HUB_TRANSCEIVER = "deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef"; const FAUX_SPOKE_CHAIN_A = SPOKE_CHAIN_A; -const FAUX_SPOKE_ENDPOINT_A = FAUX_HUB_ENDPOINT; +const FAUX_SPOKE_TRANSCEIVER_A = FAUX_HUB_TRANSCEIVER; const UNKNOWN_SPOKE_CHAIN = 404; -const UNKNOWN_SPOKE_ENDPOINT = +const UNKNOWN_SPOKE_TRANSCEIVER = "beeffacebeeffacebeeffacebeeffacebeeffacebeeffacebeeffacebeefface"; const RELAYER_EMITTER = "00000000000000000000000053855d4b64e9a3cf59a84bc768ada716b5536bc5"; @@ -261,19 +261,20 @@ describe("Global Accountant Tests", () => { test("a. Ensure a hub init is saved", async () => { const vaa = makeVAA( HUB_CHAIN, - HUB_ENDPOINT, - "c83e3d2e000000000000000000000000bb807f76cda53b1b4256e1b6f33bb46be36508e3000000000000000000000000002a68f967bfa230780a385175d0c86ae4048d309612" + HUB_TRANSCEIVER, + "9c23bd3b000000000000000000000000bb807f76cda53b1b4256e1b6f33bb46be36508e3000000000000000000000000002a68f967bfa230780a385175d0c86ae4048d309612" ); const result = await submitVAA(vaa); expect(result.code).toEqual(0); const response = await cosmWasmClient.queryContractSmart(NTT_GA_ADDRESS, { - all_endpoint_hubs: {}, + all_transceiver_hubs: {}, }); const hub = response.hubs.find( - (entry) => entry.key[0] === HUB_CHAIN && entry.key[1] === HUB_ENDPOINT + (entry) => + entry.key[0] === HUB_CHAIN && entry.key[1] === HUB_TRANSCEIVER ); expect(hub).toBeDefined(); - expect(hub.data).toStrictEqual([HUB_CHAIN, HUB_ENDPOINT]); + expect(hub.data).toStrictEqual([HUB_CHAIN, HUB_TRANSCEIVER]); // check replay protection { const result = await submitVAA(vaa); @@ -284,8 +285,8 @@ describe("Global Accountant Tests", () => { test("b. Ensure a duplicate hub init is rejected", async () => { const vaa = makeVAA( HUB_CHAIN, - HUB_ENDPOINT, - "c83e3d2e000000000000000000000000bb807f76cda53b1b4256e1b6f33bb46be36508e3000000000000000000000000002a68f967bfa230780a385175d0c86ae4048d309612" + HUB_TRANSCEIVER, + "9c23bd3b000000000000000000000000bb807f76cda53b1b4256e1b6f33bb46be36508e3000000000000000000000000002a68f967bfa230780a385175d0c86ae4048d309612" ); const result = await submitVAA(vaa); expect(result.code).toEqual(5); @@ -294,8 +295,8 @@ describe("Global Accountant Tests", () => { test("c. Ensure a non-hub init is rejected", async () => { const vaa = makeVAA( SPOKE_CHAIN_A, - SPOKE_ENDPOINT_A, - "c83e3d2e0000000000000000000000001fc14f21b27579f4f23578731cd361cca8aa39f701000000000000000000000000eb502b1d35e975321b21cce0e8890d20a7eb289d12" + SPOKE_TRANSCEIVER_A, + "9c23bd3b0000000000000000000000001fc14f21b27579f4f23578731cd361cca8aa39f701000000000000000000000000eb502b1d35e975321b21cce0e8890d20a7eb289d12" ); const result = await submitVAA(vaa); expect(result.code).toEqual(5); @@ -303,35 +304,35 @@ describe("Global Accountant Tests", () => { }); }); describe("2. Registrations", () => { - test("a. Ensure a hub registration to an endpoint without a known hub is rejected", async () => { + test("a. Ensure a hub registration to an transceiver without a known hub is rejected", async () => { const vaa = makeVAA( HUB_CHAIN, - HUB_ENDPOINT, - `d0d292f1${chainToHex(SPOKE_CHAIN_A)}${SPOKE_ENDPOINT_A}` + HUB_TRANSCEIVER, + `18fc67c2${chainToHex(SPOKE_CHAIN_A)}${SPOKE_TRANSCEIVER_A}` ); const result = await submitVAA(vaa); expect(result.code).toEqual(5); expect(result.rawLog).toMatch("no registered hub"); }); - test("b. Ensure an endpoint registration to a hub is saved", async () => { + test("b. Ensure an transceiver registration to a hub is saved", async () => { const vaa = makeVAA( SPOKE_CHAIN_A, - SPOKE_ENDPOINT_A, - `d0d292f1${chainToHex(HUB_CHAIN)}${HUB_ENDPOINT}` + SPOKE_TRANSCEIVER_A, + `18fc67c2${chainToHex(HUB_CHAIN)}${HUB_TRANSCEIVER}` ); const result = await submitVAA(vaa); expect(result.code).toEqual(0); const response = await cosmWasmClient.queryContractSmart(NTT_GA_ADDRESS, { - all_endpoint_peers: {}, + all_transceiver_peers: {}, }); const peer = response.peers.find( (entry) => entry.key[0] === SPOKE_CHAIN_A && - entry.key[1] === SPOKE_ENDPOINT_A && + entry.key[1] === SPOKE_TRANSCEIVER_A && entry.key[2] === HUB_CHAIN ); expect(peer).toBeDefined(); - expect(peer.data).toStrictEqual(HUB_ENDPOINT); + expect(peer.data).toStrictEqual(HUB_TRANSCEIVER); // check replay protection { const result = await submitVAA(vaa); @@ -339,41 +340,41 @@ describe("Global Accountant Tests", () => { expect(result.rawLog).toMatch("message already processed"); } }); - test("c. Ensure a hub registration to an endpoint with a known hub is saved", async () => { + test("c. Ensure a hub registration to an transceiver with a known hub is saved", async () => { const vaa = makeVAA( HUB_CHAIN, - HUB_ENDPOINT, - `d0d292f1${chainToHex(SPOKE_CHAIN_A)}${SPOKE_ENDPOINT_A}` + HUB_TRANSCEIVER, + `18fc67c2${chainToHex(SPOKE_CHAIN_A)}${SPOKE_TRANSCEIVER_A}` ); const result = await submitVAA(vaa); expect(result.code).toEqual(0); const response = await cosmWasmClient.queryContractSmart(NTT_GA_ADDRESS, { - all_endpoint_peers: {}, + all_transceiver_peers: {}, }); const peer = response.peers.find( (entry) => entry.key[0] === HUB_CHAIN && - entry.key[1] === HUB_ENDPOINT && + entry.key[1] === HUB_TRANSCEIVER && entry.key[2] === SPOKE_CHAIN_A ); expect(peer).toBeDefined(); - expect(peer.data).toStrictEqual(SPOKE_ENDPOINT_A); + expect(peer.data).toStrictEqual(SPOKE_TRANSCEIVER_A); }); - test("d. Ensure an endpoint registration to another endpoint without a known hub is rejected", async () => { + test("d. Ensure an transceiver registration to another transceiver without a known hub is rejected", async () => { const vaa = makeVAA( SPOKE_CHAIN_A, - SPOKE_ENDPOINT_A, - `d0d292f1${chainToHex(SPOKE_CHAIN_B)}${SPOKE_ENDPOINT_B}` + SPOKE_TRANSCEIVER_A, + `18fc67c2${chainToHex(SPOKE_CHAIN_B)}${SPOKE_TRANSCEIVER_B}` ); const result = await submitVAA(vaa); expect(result.code).toEqual(5); expect(result.rawLog).toMatch("no registered hub"); }); - test("e. Ensure an endpoint registration from an endpoint without a known hub to a non-hub is rejected", async () => { + test("e. Ensure an transceiver registration from an transceiver without a known hub to a non-hub is rejected", async () => { const vaa = makeVAA( SPOKE_CHAIN_B, - SPOKE_ENDPOINT_B, - `d0d292f1${chainToHex(SPOKE_CHAIN_A)}${SPOKE_ENDPOINT_A}` + SPOKE_TRANSCEIVER_B, + `18fc67c2${chainToHex(SPOKE_CHAIN_A)}${SPOKE_TRANSCEIVER_A}` ); const result = await submitVAA(vaa); expect(result.code).toEqual(5); @@ -381,60 +382,60 @@ describe("Global Accountant Tests", () => { "ignoring attempt to register peer before hub" ); }); - test("f. Ensure an endpoint registration to another endpoint with a known hub is saved", async () => { + test("f. Ensure an transceiver registration to another transceiver with a known hub is saved", async () => { { const vaa = makeVAA( SPOKE_CHAIN_B, - SPOKE_ENDPOINT_B, - `d0d292f1${chainToHex(HUB_CHAIN)}${HUB_ENDPOINT}` + SPOKE_TRANSCEIVER_B, + `18fc67c2${chainToHex(HUB_CHAIN)}${HUB_TRANSCEIVER}` ); const result = await submitVAA(vaa); expect(result.code).toEqual(0); const response = await cosmWasmClient.queryContractSmart( NTT_GA_ADDRESS, { - all_endpoint_peers: {}, + all_transceiver_peers: {}, } ); const peer = response.peers.find( (entry) => entry.key[0] === SPOKE_CHAIN_B && - entry.key[1] === SPOKE_ENDPOINT_B && + entry.key[1] === SPOKE_TRANSCEIVER_B && entry.key[2] === HUB_CHAIN ); expect(peer).toBeDefined(); - expect(peer.data).toStrictEqual(HUB_ENDPOINT); + expect(peer.data).toStrictEqual(HUB_TRANSCEIVER); } { const vaa = makeVAA( SPOKE_CHAIN_A, - SPOKE_ENDPOINT_A, - `d0d292f1${chainToHex(SPOKE_CHAIN_B)}${SPOKE_ENDPOINT_B}` + SPOKE_TRANSCEIVER_A, + `18fc67c2${chainToHex(SPOKE_CHAIN_B)}${SPOKE_TRANSCEIVER_B}` ); const result = await submitVAA(vaa); expect(result.code).toEqual(0); const response = await cosmWasmClient.queryContractSmart( NTT_GA_ADDRESS, { - all_endpoint_peers: {}, + all_transceiver_peers: {}, } ); const peer = response.peers.find( (entry) => entry.key[0] === SPOKE_CHAIN_A && - entry.key[1] === SPOKE_ENDPOINT_A && + entry.key[1] === SPOKE_TRANSCEIVER_A && entry.key[2] === SPOKE_CHAIN_B ); expect(peer).toBeDefined(); - expect(peer.data).toStrictEqual(SPOKE_ENDPOINT_B); + expect(peer.data).toStrictEqual(SPOKE_TRANSCEIVER_B); } }); test("g. Ensure a duplicate registration is rejected", async () => { { const vaa = makeVAA( SPOKE_CHAIN_B, - SPOKE_ENDPOINT_B, - `d0d292f1${chainToHex(SPOKE_CHAIN_A)}${SPOKE_ENDPOINT_A}` + SPOKE_TRANSCEIVER_B, + `18fc67c2${chainToHex(SPOKE_CHAIN_A)}${SPOKE_TRANSCEIVER_A}` ); const result = await submitVAA(vaa); expect(result.code).toEqual(0); @@ -442,8 +443,8 @@ describe("Global Accountant Tests", () => { { const vaa = makeVAA( SPOKE_CHAIN_B, - SPOKE_ENDPOINT_B, - `d0d292f1${chainToHex(SPOKE_CHAIN_A)}${SPOKE_ENDPOINT_A}` + SPOKE_TRANSCEIVER_B, + `18fc67c2${chainToHex(SPOKE_CHAIN_A)}${SPOKE_TRANSCEIVER_A}` ); const result = await submitVAA(vaa); expect(result.code).toEqual(5); @@ -457,8 +458,8 @@ describe("Global Accountant Tests", () => { // set faux hub const vaa = makeVAA( FAUX_HUB_CHAIN, - FAUX_HUB_ENDPOINT, - "c83e3d2e000000000000000000000000bb807f76cda53b1b4256e1b6f33bb46be36508e3000000000000000000000000002a68f967bfa230780a385175d0c86ae4048d309612" + FAUX_HUB_TRANSCEIVER, + "9c23bd3b000000000000000000000000bb807f76cda53b1b4256e1b6f33bb46be36508e3000000000000000000000000002a68f967bfa230780a385175d0c86ae4048d309612" ); const result = await submitVAA(vaa); expect(result.code).toEqual(0); @@ -467,8 +468,8 @@ describe("Global Accountant Tests", () => { // set attempt to register legit spoke with it const vaa = makeVAA( FAUX_HUB_CHAIN, - FAUX_HUB_ENDPOINT, - `d0d292f1${chainToHex(SPOKE_CHAIN_A)}${SPOKE_ENDPOINT_A}` + FAUX_HUB_TRANSCEIVER, + `18fc67c2${chainToHex(SPOKE_CHAIN_A)}${SPOKE_TRANSCEIVER_A}` ); const result = await submitVAA(vaa); expect(result.code).toEqual(5); @@ -477,10 +478,10 @@ describe("Global Accountant Tests", () => { }); }); describe("4. Transfer VAAs", () => { - test("a. Ensure a token can be sent from its hub endpoint", async () => { + test("a. Ensure a token can be sent from its hub transceiver", async () => { const vaa = makeVAA( HUB_CHAIN, - HUB_ENDPOINT, + HUB_TRANSCEIVER, mockTransferPayload(8, 10, SPOKE_CHAIN_A) ); const result = await submitVAA(vaa); @@ -495,7 +496,7 @@ describe("Global Accountant Tests", () => { test("b. Ensure a token decimal shift works as expected", async () => { const vaa = makeVAA( SPOKE_CHAIN_A, - SPOKE_ENDPOINT_A, + SPOKE_TRANSCEIVER_A, mockTransferPayload(6, 1, HUB_CHAIN) ); const result = await submitVAA(vaa); @@ -504,38 +505,38 @@ describe("Global Accountant Tests", () => { "insufficient balance in source account: Overflow: Cannot Sub with 10 and 100" ); }); - test("c. Ensure a token can be sent back to its hub endpoint", async () => { + test("c. Ensure a token can be sent back to its hub transceiver", async () => { const vaa = makeVAA( SPOKE_CHAIN_A, - SPOKE_ENDPOINT_A, + SPOKE_TRANSCEIVER_A, mockTransferPayload(8, 1, HUB_CHAIN) ); const result = await submitVAA(vaa); expect(result.code).toEqual(0); }); - test("d. Ensure a token can be sent between non-hub endpoints", async () => { + test("d. Ensure a token can be sent between non-hub transceivers", async () => { const vaa = makeVAA( SPOKE_CHAIN_A, - SPOKE_ENDPOINT_A, + SPOKE_TRANSCEIVER_A, mockTransferPayload(8, 1, SPOKE_CHAIN_B) ); const result = await submitVAA(vaa); expect(result.code).toEqual(0); }); - test("e. Ensure a token sent from a source endpoint without a known hub is rejected", async () => { + test("e. Ensure a token sent from a source transceiver without a known hub is rejected", async () => { const vaa = makeVAA( UNKNOWN_SPOKE_CHAIN, - UNKNOWN_SPOKE_ENDPOINT, + UNKNOWN_SPOKE_TRANSCEIVER, mockTransferPayload(8, 1, HUB_CHAIN) ); const result = await submitVAA(vaa); expect(result.code).toEqual(5); expect(result.rawLog).toMatch("no registered hub"); }); - test("f. Ensure a token sent from a source chain without a known endpoint is rejected", async () => { + test("f. Ensure a token sent from a source chain without a known transceiver is rejected", async () => { const vaa = makeVAA( FAUX_HUB_CHAIN, - FAUX_HUB_ENDPOINT, + FAUX_HUB_TRANSCEIVER, mockTransferPayload(8, 1, HUB_CHAIN) ); const result = await submitVAA(vaa); @@ -544,14 +545,14 @@ describe("Global Accountant Tests", () => { "no registered source peer for chain Ethereum" ); }); - test("g. Ensure a token sent from a source chain without a matching endpoint is rejected", async () => { + test("g. Ensure a token sent from a source chain without a matching transceiver is rejected", async () => { { // set faux spoke registration to hub but not vice-versa { const vaa = makeVAA( FAUX_SPOKE_CHAIN_A, - FAUX_SPOKE_ENDPOINT_A, - `d0d292f1${chainToHex(FAUX_HUB_CHAIN)}${FAUX_HUB_ENDPOINT}` + FAUX_SPOKE_TRANSCEIVER_A, + `18fc67c2${chainToHex(FAUX_HUB_CHAIN)}${FAUX_HUB_TRANSCEIVER}` ); const result = await submitVAA(vaa); expect(result.code).toEqual(0); @@ -559,7 +560,7 @@ describe("Global Accountant Tests", () => { } const vaa = makeVAA( FAUX_SPOKE_CHAIN_A, - FAUX_SPOKE_ENDPOINT_A, + FAUX_SPOKE_TRANSCEIVER_A, mockTransferPayload(8, 1, FAUX_HUB_CHAIN) ); const result = await submitVAA(vaa); @@ -568,20 +569,20 @@ describe("Global Accountant Tests", () => { "no registered destination peer for chain Bsc" ); }); - test("h. Ensure a token sent to a destination chain without a known endpoint is rejected", async () => { + test("h. Ensure a token sent to a destination chain without a known transceiver is rejected", async () => { const vaa = makeVAA( HUB_CHAIN, - HUB_ENDPOINT, + HUB_TRANSCEIVER, mockTransferPayload(8, 1, UNKNOWN_SPOKE_CHAIN) ); const result = await submitVAA(vaa); expect(result.code).toEqual(5); expect(result.rawLog).toMatch("no registered source peer for chain"); }); - test("i. Ensure a token sent to a destination chain without a matching endpoint is rejected", async () => { + test("i. Ensure a token sent to a destination chain without a matching transceiver is rejected", async () => { const vaa = makeVAA( FAUX_HUB_CHAIN, - FAUX_HUB_ENDPOINT, + FAUX_HUB_TRANSCEIVER, mockTransferPayload(8, 1, HUB_CHAIN) ); const result = await submitVAA(vaa); @@ -593,7 +594,7 @@ describe("Global Accountant Tests", () => { test("j. Ensure spoofed tokens for more than the outstanding amount rejects successfully", async () => { const vaa = makeVAA( SPOKE_CHAIN_A, - SPOKE_ENDPOINT_A, + SPOKE_TRANSCEIVER_A, mockTransferPayload(8, 9000, HUB_CHAIN) ); const result = await submitVAA(vaa); @@ -654,7 +655,7 @@ describe("Global Accountant Tests", () => { HUB_CHAIN, RELAYER_EMITTER, mockDeliveryPayload( - HUB_ENDPOINT, + HUB_TRANSCEIVER, mockTransferPayload(8, 1, SPOKE_CHAIN_A) ) ); @@ -667,7 +668,7 @@ describe("Global Accountant Tests", () => { HUB_CHAIN, RELAYER_EMITTER, mockDeliveryPayload( - UNKNOWN_SPOKE_ENDPOINT, + UNKNOWN_SPOKE_TRANSCEIVER, mockTransferPayload(8, 1, SPOKE_CHAIN_A) ) ); @@ -680,7 +681,7 @@ describe("Global Accountant Tests", () => { SPOKE_CHAIN_A, RELAYER_EMITTER, mockDeliveryPayload( - SPOKE_ENDPOINT_A, + SPOKE_TRANSCEIVER_A, mockTransferPayload(8, 9999, HUB_CHAIN) ) );