reth_optimism_cli/commands/
import_receipts.rsuse std::path::{Path, PathBuf};
use clap::Parser;
use reth_cli::chainspec::ChainSpecParser;
use reth_cli_commands::common::{AccessRights, CliNodeTypes, Environment, EnvironmentArgs};
use reth_db::tables;
use reth_downloaders::{
file_client::{ChunkedFileReader, DEFAULT_BYTE_LEN_CHUNK_CHAIN_FILE},
receipt_file_client::ReceiptFileClient,
};
use reth_execution_types::ExecutionOutcome;
use reth_node_core::version::SHORT_VERSION;
use reth_optimism_chainspec::OpChainSpec;
use reth_optimism_primitives::bedrock::is_dup_tx;
use reth_primitives::{NodePrimitives, Receipts};
use reth_provider::{
providers::ProviderNodeTypes, writer::UnifiedStorageWriter, DatabaseProviderFactory,
OriginalValuesKnown, ProviderFactory, StageCheckpointReader, StageCheckpointWriter,
StateWriter, StaticFileProviderFactory, StatsReader, StorageLocation,
};
use reth_stages::{StageCheckpoint, StageId};
use reth_static_file_types::StaticFileSegment;
use tracing::{debug, info, trace, warn};
use crate::receipt_file_codec::HackReceiptFileCodec;
#[derive(Debug, Parser)]
pub struct ImportReceiptsOpCommand<C: ChainSpecParser> {
#[command(flatten)]
env: EnvironmentArgs<C>,
#[arg(long, value_name = "CHUNK_LEN", verbatim_doc_comment)]
chunk_len: Option<u64>,
#[arg(value_name = "IMPORT_PATH", verbatim_doc_comment)]
path: PathBuf,
}
impl<C: ChainSpecParser<ChainSpec = OpChainSpec>> ImportReceiptsOpCommand<C> {
pub async fn execute<N: CliNodeTypes<ChainSpec = C::ChainSpec>>(self) -> eyre::Result<()> {
info!(target: "reth::cli", "reth {} starting", SHORT_VERSION);
debug!(target: "reth::cli",
chunk_byte_len=self.chunk_len.unwrap_or(DEFAULT_BYTE_LEN_CHUNK_CHAIN_FILE),
"Chunking receipts import"
);
let Environment { provider_factory, .. } = self.env.init::<N>(AccessRights::RW)?;
import_receipts_from_file(
provider_factory,
self.path,
self.chunk_len,
|first_block, receipts: &mut Receipts| {
let mut total_filtered_out_dup_txns = 0;
for (index, receipts_for_block) in receipts.iter_mut().enumerate() {
if is_dup_tx(first_block + index as u64) {
receipts_for_block.clear();
total_filtered_out_dup_txns += 1;
}
}
total_filtered_out_dup_txns
},
)
.await
}
}
pub async fn import_receipts_from_file<N, P, F>(
provider_factory: ProviderFactory<N>,
path: P,
chunk_len: Option<u64>,
filter: F,
) -> eyre::Result<()>
where
N: ProviderNodeTypes<
ChainSpec = OpChainSpec,
Primitives: NodePrimitives<Receipt = reth_primitives::Receipt>,
>,
P: AsRef<Path>,
F: FnMut(u64, &mut Receipts) -> usize,
{
for stage in StageId::ALL {
let checkpoint = provider_factory.database_provider_ro()?.get_stage_checkpoint(stage)?;
trace!(target: "reth::cli",
?stage,
?checkpoint,
"Read stage checkpoints from db"
);
}
let reader = ChunkedFileReader::new(&path, chunk_len).await?;
let _ = import_receipts_from_reader(&provider_factory, reader, filter).await?;
info!(target: "reth::cli",
"Receipt file imported"
);
Ok(())
}
pub async fn import_receipts_from_reader<N, F>(
provider_factory: &ProviderFactory<N>,
mut reader: ChunkedFileReader,
mut filter: F,
) -> eyre::Result<ImportReceiptsResult>
where
N: ProviderNodeTypes<Primitives: NodePrimitives<Receipt = reth_primitives::Receipt>>,
F: FnMut(u64, &mut Receipts) -> usize,
{
let static_file_provider = provider_factory.static_file_provider();
if let Some(num_receipts) =
static_file_provider.get_highest_static_file_tx(StaticFileSegment::Receipts)
{
if num_receipts > 0 {
eyre::bail!("Expected no receipts in storage, but found {num_receipts}.");
}
}
match static_file_provider.get_highest_static_file_block(StaticFileSegment::Receipts) {
Some(receipts_block) => {
if receipts_block > 0 {
eyre::bail!("Expected highest receipt block to be 0, but found {receipts_block}.");
}
}
None => {
eyre::bail!("Receipts was not initialized. Please import blocks and transactions before calling this command.");
}
}
let provider = provider_factory.database_provider_rw()?;
let mut total_decoded_receipts = 0;
let mut total_receipts = 0;
let mut total_filtered_out_dup_txns = 0;
let mut highest_block_receipts = 0;
let highest_block_transactions = static_file_provider
.get_highest_static_file_block(StaticFileSegment::Transactions)
.expect("transaction static files must exist before importing receipts");
while let Some(file_client) =
reader.next_receipts_chunk::<ReceiptFileClient<_>, HackReceiptFileCodec>().await?
{
if highest_block_receipts == highest_block_transactions {
warn!(target: "reth::cli", highest_block_receipts, highest_block_transactions, "Ignoring all other blocks in the file since we have reached the desired height");
break
}
let ReceiptFileClient {
mut receipts,
mut first_block,
total_receipts: total_receipts_chunk,
..
} = file_client;
total_decoded_receipts += total_receipts_chunk;
total_filtered_out_dup_txns += filter(first_block, &mut receipts);
info!(target: "reth::cli",
first_receipts_block=?first_block,
total_receipts_chunk,
"Importing receipt file chunk"
);
if first_block == 0 {
let genesis_receipts = receipts.remove(0);
debug_assert!(genesis_receipts.is_empty());
first_block = 1;
}
highest_block_receipts = first_block + receipts.len() as u64 - 1;
if highest_block_receipts > highest_block_transactions {
let excess = highest_block_receipts - highest_block_transactions;
highest_block_receipts -= excess;
receipts.receipt_vec.truncate(receipts.len() - excess as usize);
warn!(target: "reth::cli", highest_block_receipts, "Too many decoded blocks, ignoring the last {excess}.");
}
total_receipts += receipts.iter().map(|v| v.len()).sum::<usize>();
let execution_outcome =
ExecutionOutcome::new(Default::default(), receipts, first_block, Default::default());
provider.write_state(
execution_outcome,
OriginalValuesKnown::Yes,
StorageLocation::StaticFiles,
)?;
}
let total_imported_txns = static_file_provider
.count_entries::<tables::Transactions>()
.expect("transaction static files must exist before importing receipts");
if total_receipts != total_imported_txns {
eyre::bail!("Number of receipts ({total_receipts}) inconsistent with transactions {total_imported_txns}")
}
if highest_block_receipts != highest_block_transactions {
eyre::bail!("Receipt block height ({highest_block_receipts}) inconsistent with transactions' {highest_block_transactions}")
}
provider
.save_stage_checkpoint(StageId::Execution, StageCheckpoint::new(highest_block_receipts))?;
UnifiedStorageWriter::commit(provider)?;
Ok(ImportReceiptsResult { total_decoded_receipts, total_filtered_out_dup_txns })
}
#[derive(Debug)]
pub struct ImportReceiptsResult {
pub total_decoded_receipts: usize,
pub total_filtered_out_dup_txns: usize,
}
#[cfg(test)]
mod test {
use alloy_primitives::hex;
use reth_db_common::init::init_genesis;
use reth_stages::test_utils::TestStageDB;
use tempfile::tempfile;
use tokio::{
fs::File,
io::{AsyncSeekExt, AsyncWriteExt, SeekFrom},
};
use crate::receipt_file_codec::test::{
HACK_RECEIPT_ENCODED_BLOCK_1, HACK_RECEIPT_ENCODED_BLOCK_2, HACK_RECEIPT_ENCODED_BLOCK_3,
};
use super::*;
const EMPTY_RECEIPTS_GENESIS_BLOCK: &[u8] = &hex!("c0");
#[ignore]
#[tokio::test]
async fn filter_out_genesis_block_receipts() {
let mut f: File = tempfile().unwrap().into();
f.write_all(EMPTY_RECEIPTS_GENESIS_BLOCK).await.unwrap();
f.write_all(HACK_RECEIPT_ENCODED_BLOCK_1).await.unwrap();
f.write_all(HACK_RECEIPT_ENCODED_BLOCK_2).await.unwrap();
f.write_all(HACK_RECEIPT_ENCODED_BLOCK_3).await.unwrap();
f.flush().await.unwrap();
f.seek(SeekFrom::Start(0)).await.unwrap();
let reader =
ChunkedFileReader::from_file(f, DEFAULT_BYTE_LEN_CHUNK_CHAIN_FILE).await.unwrap();
let db = TestStageDB::default();
init_genesis(&db.factory).unwrap();
let ImportReceiptsResult { total_decoded_receipts, total_filtered_out_dup_txns } =
import_receipts_from_reader(&TestStageDB::default().factory, reader, |_, _| 0)
.await
.unwrap();
assert_eq!(total_decoded_receipts, 3);
assert_eq!(total_filtered_out_dup_txns, 0);
}
}