1use alloy_primitives::B256;
4use futures::StreamExt;
5use reth_config::Config;
6use reth_consensus::FullConsensus;
7use reth_db_api::{tables, transaction::DbTx};
8use reth_downloaders::{
9 bodies::bodies::BodiesDownloaderBuilder,
10 file_client::{ChunkedFileReader, FileClient, DEFAULT_BYTE_LEN_CHUNK_CHAIN_FILE},
11 headers::reverse_headers::ReverseHeadersDownloaderBuilder,
12};
13use reth_evm::ConfigureEvm;
14use reth_network_p2p::{
15 bodies::downloader::BodyDownloader,
16 headers::downloader::{HeaderDownloader, SyncTarget},
17};
18use reth_node_api::BlockTy;
19use reth_node_events::node::NodeEvent;
20use reth_provider::{
21 providers::ProviderNodeTypes, BlockNumReader, HeaderProvider, ProviderError, ProviderFactory,
22 StageCheckpointReader,
23};
24use reth_prune::PruneModes;
25use reth_stages::{prelude::*, ControlFlow, Pipeline, StageId, StageSet};
26use reth_static_file::StaticFileProducer;
27use std::{path::Path, sync::Arc};
28use tokio::sync::watch;
29use tracing::{debug, error, info, warn};
30
31#[derive(Debug, Clone, Default)]
33pub struct ImportConfig {
34 pub no_state: bool,
36 pub chunk_len: Option<u64>,
38 pub fail_on_invalid_block: bool,
41}
42
43#[derive(Debug)]
45pub struct ImportResult {
46 pub total_decoded_blocks: usize,
48 pub total_decoded_txns: usize,
50 pub total_imported_blocks: usize,
52 pub total_imported_txns: usize,
54 pub stopped_on_invalid_block: bool,
56 pub bad_block: Option<u64>,
58 pub last_valid_block: Option<u64>,
60}
61
62impl ImportResult {
63 pub fn is_complete(&self) -> bool {
65 self.total_decoded_blocks == self.total_imported_blocks &&
66 self.total_decoded_txns == self.total_imported_txns
67 }
68
69 pub fn is_successful(&self) -> bool {
74 self.is_complete() || self.stopped_on_invalid_block
75 }
76}
77
78pub async fn import_blocks_from_file<N>(
84 path: &Path,
85 import_config: ImportConfig,
86 provider_factory: ProviderFactory<N>,
87 config: &Config,
88 executor: impl ConfigureEvm<Primitives = N::Primitives> + 'static,
89 consensus: Arc<impl FullConsensus<N::Primitives> + 'static>,
90 runtime: reth_tasks::Runtime,
91) -> eyre::Result<ImportResult>
92where
93 N: ProviderNodeTypes,
94{
95 if import_config.no_state {
96 info!(target: "reth::import", "Disabled stages requiring state");
97 }
98
99 debug!(target: "reth::import",
100 chunk_byte_len=import_config.chunk_len.unwrap_or(DEFAULT_BYTE_LEN_CHUNK_CHAIN_FILE),
101 "Chunking chain import"
102 );
103
104 info!(target: "reth::import", "Consensus engine initialized");
105
106 let mut reader = ChunkedFileReader::new(path, import_config.chunk_len).await?;
108
109 let provider = provider_factory.provider()?;
110 let init_blocks = provider.tx_ref().entries::<tables::HeaderNumbers>()?;
111 let init_txns = provider.tx_ref().entries::<tables::TransactionHashNumbers>()?;
112 drop(provider);
113
114 let mut total_decoded_blocks = 0;
115 let mut total_decoded_txns = 0;
116
117 let mut sealed_header = provider_factory
118 .sealed_header(provider_factory.last_block_number()?)?
119 .expect("should have genesis");
120
121 let static_file_producer =
122 StaticFileProducer::new(provider_factory.clone(), PruneModes::default());
123
124 let mut stopped_on_invalid_block = false;
126 let mut bad_block_number: Option<u64> = None;
127 let mut last_valid_block_number: Option<u64> = None;
128
129 while let Some(file_client) =
130 reader.next_chunk::<BlockTy<N>>(consensus.clone(), Some(sealed_header)).await?
131 {
132 info!(target: "reth::import",
134 "Importing chain file chunk"
135 );
136
137 let tip = file_client.tip().ok_or(eyre::eyre!("file client has no tip"))?;
138 info!(target: "reth::import", "Chain file chunk read");
139
140 total_decoded_blocks += file_client.headers_len();
141 total_decoded_txns += file_client.total_transactions();
142
143 let (mut pipeline, events) = build_import_pipeline_impl(
144 config,
145 provider_factory.clone(),
146 &consensus,
147 Arc::new(file_client),
148 static_file_producer.clone(),
149 import_config.no_state,
150 executor.clone(),
151 runtime.clone(),
152 )?;
153
154 pipeline.set_tip(tip);
156 debug!(target: "reth::import", ?tip, "Tip manually set");
157
158 let latest_block_number =
159 provider_factory.get_stage_checkpoint(StageId::Finish)?.map(|ch| ch.block_number);
160 tokio::spawn(reth_node_events::node::handle_events(None, latest_block_number, events));
161
162 info!(target: "reth::import", "Starting sync pipeline");
164 if import_config.fail_on_invalid_block {
165 tokio::select! {
167 res = pipeline.run() => res?,
168 _ = tokio::signal::ctrl_c() => {
169 info!(target: "reth::import", "Import interrupted by user");
170 break;
171 },
172 }
173 } else {
174 let result = tokio::select! {
176 res = pipeline.run_loop() => res,
177 _ = tokio::signal::ctrl_c() => {
178 info!(target: "reth::import", "Import interrupted by user");
179 break;
180 },
181 };
182
183 match result {
184 Ok(ControlFlow::Unwind { target, bad_block }) => {
185 let bad = bad_block.block.number;
187 warn!(
188 target: "reth::import",
189 bad_block = bad,
190 last_valid_block = target,
191 "Invalid block encountered during import; stopping at last valid block"
192 );
193 stopped_on_invalid_block = true;
194 bad_block_number = Some(bad);
195 last_valid_block_number = Some(target);
196 break;
197 }
198 Ok(ControlFlow::Continue { block_number }) => {
199 debug!(target: "reth::import", block_number, "Pipeline chunk completed");
200 }
201 Ok(ControlFlow::NoProgress { block_number }) => {
202 debug!(target: "reth::import", ?block_number, "Pipeline made no progress");
203 }
204 Err(e) => {
205 return Err(e.into());
207 }
208 }
209 }
210
211 sealed_header = provider_factory
212 .sealed_header(provider_factory.last_block_number()?)?
213 .expect("should have genesis");
214 }
215
216 let provider = provider_factory.provider()?;
217 let total_imported_blocks = provider.tx_ref().entries::<tables::HeaderNumbers>()? - init_blocks;
218 let total_imported_txns =
219 provider.tx_ref().entries::<tables::TransactionHashNumbers>()? - init_txns;
220
221 let result = ImportResult {
222 total_decoded_blocks,
223 total_decoded_txns,
224 total_imported_blocks,
225 total_imported_txns,
226 stopped_on_invalid_block,
227 bad_block: bad_block_number,
228 last_valid_block: last_valid_block_number,
229 };
230
231 if result.stopped_on_invalid_block {
232 info!(target: "reth::import",
233 total_imported_blocks,
234 total_imported_txns,
235 bad_block = ?result.bad_block,
236 last_valid_block = ?result.last_valid_block,
237 "Import stopped at last valid block due to invalid block"
238 );
239 } else if !result.is_complete() {
240 error!(target: "reth::import",
241 total_decoded_blocks,
242 total_imported_blocks,
243 total_decoded_txns,
244 total_imported_txns,
245 "Chain was partially imported"
246 );
247 } else {
248 info!(target: "reth::import",
249 total_imported_blocks,
250 total_imported_txns,
251 "Chain was fully imported"
252 );
253 }
254
255 Ok(result)
256}
257
258#[expect(clippy::too_many_arguments)]
263pub fn build_import_pipeline_impl<N, C, E>(
264 config: &Config,
265 provider_factory: ProviderFactory<N>,
266 consensus: &Arc<C>,
267 file_client: Arc<FileClient<BlockTy<N>>>,
268 static_file_producer: StaticFileProducer<ProviderFactory<N>>,
269 disable_exec: bool,
270 evm_config: E,
271 runtime: reth_tasks::Runtime,
272) -> eyre::Result<(Pipeline<N>, impl futures::Stream<Item = NodeEvent<N::Primitives>> + use<N, C, E>)>
273where
274 N: ProviderNodeTypes,
275 C: FullConsensus<N::Primitives> + 'static,
276 E: ConfigureEvm<Primitives = N::Primitives> + 'static,
277{
278 if !file_client.has_canonical_blocks() {
279 eyre::bail!("unable to import non canonical blocks");
280 }
281
282 let last_block_number = provider_factory.last_block_number()?;
284 let local_head = provider_factory
285 .sealed_header(last_block_number)?
286 .ok_or_else(|| ProviderError::HeaderNotFound(last_block_number.into()))?;
287
288 let mut header_downloader = ReverseHeadersDownloaderBuilder::new(config.stages.headers)
289 .build(file_client.clone(), consensus.clone())
290 .into_task_with(&runtime);
291 header_downloader.update_local_head(local_head);
294 header_downloader.update_sync_target(SyncTarget::Tip(file_client.tip().unwrap()));
295
296 let mut body_downloader = BodiesDownloaderBuilder::new(config.stages.bodies)
297 .build(file_client.clone(), consensus.clone(), provider_factory.clone())
298 .into_task_with(&runtime);
299 body_downloader
302 .set_download_range(file_client.min_block().unwrap()..=file_client.max_block().unwrap())
303 .expect("failed to set download range");
304
305 let (tip_tx, tip_rx) = watch::channel(B256::ZERO);
306
307 let max_block = file_client.max_block().unwrap_or(0);
308
309 let pipeline = Pipeline::builder()
310 .with_tip_sender(tip_tx)
311 .with_max_block(max_block)
313 .with_fail_on_unwind(true)
314 .add_stages(
315 DefaultStages::new(
316 provider_factory.clone(),
317 tip_rx,
318 consensus.clone(),
319 header_downloader,
320 body_downloader,
321 evm_config,
322 config.stages.clone(),
323 PruneModes::default(),
324 None,
325 )
326 .builder()
327 .disable_all_if(&StageId::STATE_REQUIRED, || disable_exec),
328 )
329 .build(provider_factory, static_file_producer);
330
331 let events = pipeline.events().map(Into::into);
332
333 Ok((pipeline, events))
334}