Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor(torii-indexer): single fetch range but chunk block processing #2899

Open
wants to merge 6 commits into
base: main
Choose a base branch
from
Open
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
106 changes: 60 additions & 46 deletions crates/torii/indexer/src/engine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -325,31 +325,32 @@ impl<P: Provider + Send + Sync + std::fmt::Debug + 'static> Engine<P> {
pub async fn fetch_data(&mut self, cursors: &Cursors) -> Result<FetchDataResult> {
let latest_block = self.provider.block_hash_and_number().await?;

let from = cursors.head.unwrap_or(self.config.world_block);
let total_remaining_blocks = latest_block.block_number - from;
let blocks_to_process = total_remaining_blocks.min(self.config.blocks_chunk_size);
let to = from + blocks_to_process;
info!(target: LOG_TARGET,
from = %cursors.head.unwrap_or(self.config.world_block),
to = %latest_block.block_number,
"Fetching data from {} to {}",
cursors.head.unwrap_or(self.config.world_block),
latest_block.block_number
);

let instant = Instant::now();
let result = if from < latest_block.block_number {
let from = cursors.head.unwrap_or(self.config.world_block);
if from < latest_block.block_number {
let from = if from == 0 { from } else { from + 1 };
let data = self.fetch_range(from, to, &cursors.cursor_map).await?;
debug!(target: LOG_TARGET, duration = ?instant.elapsed(), from = %from, to = %to, "Fetched data for range.");
FetchDataResult::Range(data)
let data = self.fetch_range(from, latest_block.block_number, &cursors.cursor_map).await?;
debug!(target: LOG_TARGET, duration = ?instant.elapsed(), from = %from, to = %latest_block.block_number, "Fetched data for range.");
Ok(FetchDataResult::Range(data))
} else if self.config.flags.contains(IndexingFlags::PENDING_BLOCKS) {
let data =
self.fetch_pending(latest_block.clone(), cursors.last_pending_block_tx).await?;
let data = self.fetch_pending(latest_block.clone(), cursors.last_pending_block_tx).await?;
debug!(target: LOG_TARGET, duration = ?instant.elapsed(), latest_block_number = %latest_block.block_number, "Fetched pending data.");
if let Some(data) = data {
Ok(if let Some(data) = data {
FetchDataResult::Pending(data)
} else {
FetchDataResult::None
}
})
} else {
FetchDataResult::None
};

Ok(result)
Ok(FetchDataResult::None)
}
}

pub async fn fetch_range(
Expand Down Expand Up @@ -551,45 +552,58 @@ impl<P: Provider + Send + Sync + std::fmt::Debug + 'static> Engine<P> {
}

pub async fn process_range(&mut self, data: FetchRangeResult) -> Result<()> {
// Process all transactions
// Process transactions in chunks based on block numbers
let mut processed_blocks = HashSet::new();
let mut cursor_map = HashMap::new();
for ((block_number, transaction_hash), events) in data.transactions {
debug!("Processing transaction hash: {:#x}", transaction_hash);
// Process transaction
let transaction = if self.config.flags.contains(IndexingFlags::TRANSACTIONS) {
Some(self.provider.get_transaction_by_hash(transaction_hash).await?)
} else {
None
};

let mut current_block = data.blocks.keys().next().copied().unwrap_or(0);
let end_block = data.blocks.keys().last().copied().unwrap_or(0);

while current_block <= end_block {
let chunk_end = (current_block + self.config.blocks_chunk_size - 1).min(end_block);

// Process transactions for this chunk of blocks
for ((block_number, transaction_hash), events) in data.transactions.iter() {
if *block_number < current_block || *block_number > chunk_end {
continue;
}

self.process_transaction_with_events(
transaction_hash,
events.as_slice(),
block_number,
data.blocks[&block_number],
transaction,
&mut cursor_map,
)
.await?;
debug!("Processing transaction hash: {:#x}", transaction_hash);
// Process transaction
let transaction = if self.config.flags.contains(IndexingFlags::TRANSACTIONS) {
Some(self.provider.get_transaction_by_hash(*transaction_hash).await?)
} else {
None
};

// Process block
if !processed_blocks.contains(&block_number) {
if let Some(ref block_tx) = self.block_tx {
block_tx.send(block_number).await?;
}
self.process_transaction_with_events(
*transaction_hash,
events.as_slice(),
*block_number,
data.blocks[block_number],
transaction,
&mut cursor_map,
)
.await?;

self.process_block(block_number, data.blocks[&block_number]).await?;
processed_blocks.insert(block_number);
// Process block
if !processed_blocks.contains(block_number) {
if let Some(ref block_tx) = self.block_tx {
block_tx.send(*block_number).await?;
}

self.process_block(*block_number, data.blocks[block_number]).await?;
processed_blocks.insert(*block_number);
}
}
}

// Process parallelized events
self.process_tasks().await?;
// Process parallelized events for this chunk
self.process_tasks().await?;

let last_block_timestamp =
get_block_timestamp(&self.provider, data.latest_block_number).await?;
current_block = chunk_end + 1;
}

let last_block_timestamp = get_block_timestamp(&self.provider, data.latest_block_number).await?;
self.db.reset_cursors(data.latest_block_number, cursor_map, last_block_timestamp)?;

Ok(())
Expand Down
Loading