Skip to content

Commit

Permalink
feat: testnet prod sync for retry issue (#5158)
Browse files Browse the repository at this point in the history
* feat: added IXT token

* chore: update main ops with latest staging changes (#5135)

* feat: all in one commit

* fix: all unit test fix

* ci: fix yarn:test

---------

Co-authored-by: Prathmesh <[email protected]>
Co-authored-by: Carlo Mazzaferro <[email protected]>
Co-authored-by: Layne Haber <[email protected]>
Co-authored-by: preethamr <[email protected]>
  • Loading branch information
5 people authored Nov 13, 2023
1 parent f808ef9 commit bc72d46
Show file tree
Hide file tree
Showing 14 changed files with 274 additions and 107 deletions.
8 changes: 4 additions & 4 deletions ops/testnet/staging/backend/config.tf
Original file line number Diff line number Diff line change
Expand Up @@ -65,10 +65,10 @@ locals {
local_cartographer_config = jsonencode({
logLevel = "debug"
chains = {
"1735356532" = {}
"1735353714" = {}
"9991" = {}
"1734439522" = {}
"1735356532" = { confirmations = 1 }
"1735353714" = { confirmations = 10 }
"9991" = { confirmations = 200 }
"1734439522" = { confirmations = 1 }
}
environment = var.stage
})
Expand Down
2 changes: 1 addition & 1 deletion ops/testnet/staging/backend/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ module "cartographer-db-alarms" {
enable_free_storage_space_too_low_alarm = true
stage = var.stage
environment = var.environment
sns_topic_subscription_emails = ["[email protected]", "rahul@connext.network"]
sns_topic_subscription_emails = ["[email protected]", "rahul@proximalabs.io", "[email protected]", "[email protected]"]
}

module "postgrest" {
Expand Down
82 changes: 51 additions & 31 deletions ops/testnet/staging/core/config.tf
Original file line number Diff line number Diff line change
Expand Up @@ -108,9 +108,9 @@ locals {
"1734439522" = {
providers = ["https://arb-goerli.g.alchemy.com/v2/${var.arbgoerli_alchemy_key_0}", "https://goerli-rollup.arbitrum.io/rpc"]
}
"2053862260" = {
providers = ["https://zksync2-testnet.zksync.dev"]
}
# "2053862260" = {
# providers = ["https://zksync2-testnet.zksync.dev"]
# }
}
web3SignerUrl = "https://${module.sequencer_web3signer.service_endpoint}"
relayers = [
Expand Down Expand Up @@ -146,37 +146,37 @@ locals {
{
name = "http"
limit = 100
queueLimit = 100000
queueLimit = 1000000
subscribe = true
},
{
name = "1735353714"
limit = 1
queueLimit = 100000
queueLimit = 1000000
subscribe = true
},
{
name = "1735356532"
limit = 1
queueLimit = 100000
queueLimit = 1000000
subscribe = true
},
{
name = "9991"
limit = 1
queueLimit = 100000
subscribe = true
},
{
name = "2053862260"
limit = 1
queueLimit = 100000
queueLimit = 1000000
subscribe = true
},
# {
# name = "2053862260"
# limit = 1
# queueLimit = 100000
# subscribe = true
# },
{
name = "1734439522"
limit = 1
queueLimit = 100000
queueLimit = 1000000
subscribe = true
},
]
Expand All @@ -201,11 +201,11 @@ locals {
target = "9991"
keys = ["9991"]
},
{
exchange = "sequencerX"
target = "2053862260"
keys = ["2053862260"]
},
# {
# exchange = "sequencerX"
# target = "2053862260"
# keys = ["2053862260"]
# },
{
exchange = "sequencerX"
target = "1734439522"
Expand Down Expand Up @@ -249,9 +249,9 @@ locals {
"1734439522" = {
providers = ["https://arb-goerli.g.alchemy.com/v2/${var.arbgoerli_alchemy_key_0}", "https://goerli-rollup.arbitrum.io/rpc"]
}
"2053862260" = {
providers = ["https://zksync2-testnet.zksync.dev"]
}
# "2053862260" = {
# providers = ["https://zksync2-testnet.zksync.dev"]
# }
}
cartographerUrl = "https://postgrest.testnet.staging.connext.ninja"
web3SignerUrl = "https://${module.router_web3signer.service_endpoint}"
Expand Down Expand Up @@ -280,9 +280,9 @@ locals {
"1734439522" = {
providers = ["https://arb-goerli.g.alchemy.com/v2/${var.arbgoerli_alchemy_key_0}", "https://goerli-rollup.arbitrum.io/rpc"]
}
"2053862260" = {
providers = ["https://zksync2-testnet.zksync.dev"]
}
# "2053862260" = {
# providers = ["https://zksync2-testnet.zksync.dev"]
# }
}
gelatoApiKey = "${var.gelato_api_key}"
relayers = [
Expand All @@ -299,13 +299,33 @@ locals {
]
environment = var.stage
database = {
url = local.read_replica_db_url
url = local.default_db_url
}
databaseWriter = {
url = local.default_db_url
}
main = "1735353714"
proverBatchSize = 1
main = "1735353714"
proverBatchSize = {
# "1668247156" = 10,
"9991" = 10,
"1735353714" = 10,
# "2053862260" = 10,
"1734439522" = 10,
"1735356532" = 10
}
messageQueue = {
connection = {
uri = "amqps://${var.rmq_mgt_user}:${var.rmq_mgt_password}@${module.centralised_message_queue.aws_mq_amqp_endpoint}"
}
exchange = {
name = "proverX"
type = "direct"
publishTimeout = 1000
persistent = true
durable = true
}
prefetchSize = 1
}
})

local_relayer_config = jsonencode({
Expand All @@ -330,9 +350,9 @@ locals {
"1734439522" = {
providers = ["https://arb-goerli.g.alchemy.com/v2/${var.arbgoerli_alchemy_key_0}", "https://goerli-rollup.arbitrum.io/rpc"]
}
"2053862260" = {
providers = ["https://zksync2-testnet.zksync.dev"]
}
# "2053862260" = {
# providers = ["https://zksync2-testnet.zksync.dev"]
# }
}
environment = var.stage
web3SignerUrl = "https://${module.relayer_web3signer.service_endpoint}"
Expand Down
52 changes: 34 additions & 18 deletions ops/testnet/staging/core/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -292,6 +292,24 @@ module "sequencer_web3signer" {
container_env_vars = local.sequencer_web3signer_env_vars
}

module "lighthouse_prover_cron" {
source = "../../../modules/lambda"
ecr_repository_name = "nxtp-lighthouse"
docker_image_tag = var.lighthouse_image_tag
container_family = "lighthouse-prover"
environment = var.environment
stage = var.stage
container_env_vars = merge(local.lighthouse_env_vars, {
LIGHTHOUSE_SERVICE = "prover-pub"
})
schedule_expression = "rate(30 minutes)"
timeout = 300
memory_size = 512
lambda_in_vpc = true
subnet_ids = module.network.private_subnets
lambda_security_groups = flatten([module.network.allow_all_sg, module.network.ecs_task_sg])
}

module "lighthouse_prover_subscriber" {
source = "../../../modules/service"
stage = var.stage
Expand Down Expand Up @@ -334,21 +352,6 @@ module "lighthouse_prover_subscriber_auto_scaling" {
avg_mem_utilization_target = 40
}

module "lighthouse_prover_cron" {
source = "../../../modules/lambda"
ecr_repository_name = "nxtp-lighthouse"
docker_image_tag = var.lighthouse_image_tag
container_family = "lighthouse-prover"
environment = var.environment
stage = var.stage
container_env_vars = merge(local.lighthouse_env_vars, { LIGHTHOUSE_SERVICE = "prover" })
schedule_expression = "rate(30 minutes)"
memory_size = 512
lambda_in_vpc = true
subnet_ids = module.network.private_subnets
lambda_security_groups = flatten([module.network.allow_all_sg, module.network.ecs_task_sg])
}

module "lighthouse_process_from_root_cron" {
source = "../../../modules/lambda"
ecr_repository_name = "nxtp-lighthouse"
Expand All @@ -358,7 +361,7 @@ module "lighthouse_process_from_root_cron" {
stage = var.stage
container_env_vars = merge(local.lighthouse_env_vars, { LIGHTHOUSE_SERVICE = "process" })
schedule_expression = "rate(5 minutes)"
memory_size = 512
memory_size = 1536
}


Expand All @@ -371,7 +374,7 @@ module "lighthouse_propagate_cron" {
stage = var.stage
container_env_vars = merge(local.lighthouse_env_vars, { LIGHTHOUSE_SERVICE = "propagate" })
schedule_expression = "rate(30 minutes)"
memory_size = 1024
memory_size = 2048
}

module "lighthouse_sendoutboundroot_cron" {
Expand All @@ -383,7 +386,20 @@ module "lighthouse_sendoutboundroot_cron" {
stage = var.stage
container_env_vars = merge(local.lighthouse_env_vars, { LIGHTHOUSE_SERVICE = "sendoutboundroot" })
schedule_expression = "rate(30 minutes)"
memory_size = 512
memory_size = 2048
}


module "lighthouse_propose_cron" {
source = "../../../modules/lambda"
ecr_repository_name = "nxtp-lighthouse"
docker_image_tag = var.lighthouse_image_tag
container_family = "lighthouse-propose"
environment = var.environment
stage = var.stage
container_env_vars = merge(local.lighthouse_env_vars, { LIGHTHOUSE_SERVICE = "propose" })
schedule_expression = "rate(30 minutes)"
memory_size = 1536
}

module "relayer" {
Expand Down
4 changes: 4 additions & 0 deletions ops/testnet/staging/core/outputs.tf
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,10 @@ output "router-subscriber-service-endpoint" {
value = module.router_subscriber.service_endpoint
}

output "router-executor-service-endpoint" {
value = module.router_executor.service_endpoint
}

output "relayer-service-endpoint" {
value = module.relayer.service_endpoint
}
Expand Down
6 changes: 3 additions & 3 deletions ops/testnet/staging/core/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,13 @@ variable "full_image_name_router_publisher" {

variable "full_image_name_router_subscriber" {
type = string
description = "router image name"
description = "lighthouse subscriber image name"
default = "ghcr.io/connext/router-subscriber:sha-b5bb49a"
}

variable "full_image_name_lighthouse_prover_subscriber" {
type = string
description = "router image name"
description = "lighthouse prover image name"
default = "ghcr.io/connext/lighthouse-subscriber:sha-b5bb49a"
}

Expand Down Expand Up @@ -200,5 +200,5 @@ variable "betteruptime_api_key" {

variable "betteruptime_requester_email" {
type = string
default = "layne@connext.network"
default = "layne@proximalabs.io"
}
7 changes: 5 additions & 2 deletions packages/adapters/cache/src/lib/caches/messages.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ export class MessagesCache extends Cache {
for (const value of values) {
const message = await this.getMessage(value.leaf);
if (message) {
await this.storeMessage(message.data, value.status, message.attempt);
await this.storeMessage(message.data, value.status, value.status == ExecStatus.None ? 0 : message.attempt);
}
}
}
Expand Down Expand Up @@ -122,7 +122,10 @@ export class MessagesCache extends Cache {
*/
private async addPending(originDomain: string, destinationDomain: string, leaf: string) {
const pendingKey = `${originDomain}-${destinationDomain}`;
await this.data.rpush(`${this.prefix}:pending:${pendingKey}`, leaf);
const message = await this.getMessage(leaf);
if (!message) {
await this.data.rpush(`${this.prefix}:pending:${pendingKey}`, leaf);
}
}

/**
Expand Down
47 changes: 46 additions & 1 deletion packages/adapters/cache/test/lib/caches/messages.spec.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,13 @@
import { ExecStatus, Logger, RelayerType, XMessage, expect, mkBytes32, mock } from "@connext/nxtp-utils";
import {
ExecStatus,
Logger,
RelayerType,
XMessage,
expect,
getRandomBytes32,
mkBytes32,
mock,
} from "@connext/nxtp-utils";
import { MessagesCache } from "../../../src/index";

const logger = new Logger({ level: "debug" });
Expand All @@ -10,6 +19,23 @@ const mockXMessages: XMessage[] = [
{ ...mock.entity.xMessage(), originDomain, destinationDomain, leaf: mkBytes32("0x222") },
];

const genMockXMessages = (count: number): XMessage[] => {
const xMessages: XMessage[] = [];
for (let i = 0; i < count; i++) {
const leaf = getRandomBytes32();
const root = getRandomBytes32();
xMessages.push({
...mock.entity.xMessage(),
originDomain,
destinationDomain,
origin: { index: 100 + i, root, message: leaf },
leaf,
});
}

return xMessages;
};

describe("MessagesCache", () => {
beforeEach(async () => {
messagesCache = new MessagesCache({ host: "mock", port: 1234, mock: true, logger });
Expand Down Expand Up @@ -140,6 +166,25 @@ describe("MessagesCache", () => {
const message2 = await messagesCache.getMessage(mockXMessages[1].leaf);
expect(message2?.status).to.be.deep.eq(ExecStatus.Completed);
});

it("shouldn't add the message to the pending list if exists", async () => {
const mockXMessages = genMockXMessages(10);
await messagesCache.storeMessages(mockXMessages);

const xMessage1 = mockXMessages[0];
await messagesCache.setStatus([{ leaf: xMessage1.leaf, status: ExecStatus.Sent }]);
const message = await messagesCache.getMessage(xMessage1.leaf);
expect(message?.status).to.be.eq(ExecStatus.Sent);

let pendings = await messagesCache.getPending(originDomain, destinationDomain, 0, 100);
expect(pendings.length).to.be.eq(10);
expect(pendings).to.be.deep.eq(mockXMessages.map((it) => it.leaf));

await messagesCache.removePending(originDomain, destinationDomain, [xMessage1.leaf]);
pendings = await messagesCache.getPending(originDomain, destinationDomain, 0, 100);
expect(pendings.length).to.be.eq(9);
expect(pendings).to.be.deep.eq(mockXMessages.slice(1).map((it) => it.leaf));
});
});

describe("#nonce", () => {
Expand Down
10 changes: 10 additions & 0 deletions packages/agents/lighthouse/src/errors/prover.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,3 +59,13 @@ export class NoMessageProof extends NxtpError {
super(`No index ${index} for message hash ${leaf}`, context, NoMessageProof.name);
}
}

export class EmptyMessageProofs extends NxtpError {
constructor(originDomain: string, destinationDomain: string, context: any = {}) {
super(
`Empty message proofs for origin: ${originDomain} and destination: ${destinationDomain}`,
context,
EmptyMessageProofs.name,
);
}
}
Loading

0 comments on commit bc72d46

Please sign in to comment.