reth_chain_state/
lazy_overlay.rs

1//! Lazy overlay computation for trie input.
2//!
3//! This module provides [`LazyOverlay`], a type that computes the [`TrieInputSorted`]
4//! lazily on first access. This allows execution to start before the trie overlay
5//! is fully computed.
6
7use crate::DeferredTrieData;
8use alloy_primitives::B256;
9use reth_trie::{updates::TrieUpdatesSorted, HashedPostStateSorted, TrieInputSorted};
10use std::sync::{Arc, OnceLock};
11use tracing::{debug, trace};
12
13/// Inputs captured for lazy overlay computation.
14#[derive(Clone)]
15struct LazyOverlayInputs {
16    /// The persisted ancestor hash (anchor) this overlay should be built on.
17    anchor_hash: B256,
18    /// Deferred trie data handles for all in-memory blocks (newest to oldest).
19    blocks: Vec<DeferredTrieData>,
20}
21
22/// Lazily computed trie overlay.
23///
24/// Captures the inputs needed to compute a [`TrieInputSorted`] and defers the actual
25/// computation until first access. This is conceptually similar to [`DeferredTrieData`]
26/// but for overlay computation.
27///
28/// # Fast Path vs Slow Path
29///
30/// - **Fast path**: If the tip block's cached `anchored_trie_input` is ready and its `anchor_hash`
31///   matches our expected anchor, we can reuse it directly (O(1)).
32/// - **Slow path**: Otherwise, we merge all ancestor blocks' trie data into a new overlay.
33#[derive(Clone)]
34pub struct LazyOverlay {
35    /// Computed result, cached after first access.
36    inner: Arc<OnceLock<TrieInputSorted>>,
37    /// Inputs for lazy computation.
38    inputs: LazyOverlayInputs,
39}
40
41impl std::fmt::Debug for LazyOverlay {
42    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
43        f.debug_struct("LazyOverlay")
44            .field("anchor_hash", &self.inputs.anchor_hash)
45            .field("num_blocks", &self.inputs.blocks.len())
46            .field("computed", &self.inner.get().is_some())
47            .finish()
48    }
49}
50
51impl LazyOverlay {
52    /// Create a new lazy overlay with the given anchor hash and block handles.
53    ///
54    /// # Arguments
55    ///
56    /// * `anchor_hash` - The persisted ancestor hash this overlay is built on top of
57    /// * `blocks` - Deferred trie data handles for in-memory blocks (newest to oldest)
58    pub fn new(anchor_hash: B256, blocks: Vec<DeferredTrieData>) -> Self {
59        Self { inner: Arc::new(OnceLock::new()), inputs: LazyOverlayInputs { anchor_hash, blocks } }
60    }
61
62    /// Returns the anchor hash this overlay is built on.
63    pub const fn anchor_hash(&self) -> B256 {
64        self.inputs.anchor_hash
65    }
66
67    /// Returns the number of in-memory blocks this overlay covers.
68    pub const fn num_blocks(&self) -> usize {
69        self.inputs.blocks.len()
70    }
71
72    /// Returns true if the overlay has already been computed.
73    pub fn is_computed(&self) -> bool {
74        self.inner.get().is_some()
75    }
76
77    /// Returns the computed trie input, computing it if necessary.
78    ///
79    /// The first call triggers computation (which may block waiting for deferred data).
80    /// Subsequent calls return the cached result immediately.
81    pub fn get(&self) -> &TrieInputSorted {
82        self.inner.get_or_init(|| self.compute())
83    }
84
85    /// Returns the overlay as (nodes, state) tuple for use with `OverlayStateProviderFactory`.
86    pub fn as_overlay(&self) -> (Arc<TrieUpdatesSorted>, Arc<HashedPostStateSorted>) {
87        let input = self.get();
88        (Arc::clone(&input.nodes), Arc::clone(&input.state))
89    }
90
91    /// Compute the trie input overlay.
92    fn compute(&self) -> TrieInputSorted {
93        let anchor_hash = self.inputs.anchor_hash;
94        let blocks = &self.inputs.blocks;
95
96        if blocks.is_empty() {
97            debug!(target: "chain_state::lazy_overlay", "No in-memory blocks, returning empty overlay");
98            return TrieInputSorted::default();
99        }
100
101        // Fast path: Check if tip block's overlay is ready and anchor matches.
102        // The tip block (first in list) has the cumulative overlay from all ancestors.
103        if let Some(tip) = blocks.first() {
104            let data = tip.wait_cloned();
105            if let Some(anchored) = &data.anchored_trie_input {
106                if anchored.anchor_hash == anchor_hash {
107                    trace!(target: "chain_state::lazy_overlay", %anchor_hash, "Reusing tip block's cached overlay (fast path)");
108                    return (*anchored.trie_input).clone();
109                }
110                debug!(
111                    target: "chain_state::lazy_overlay",
112                    computed_anchor = %anchored.anchor_hash,
113                    %anchor_hash,
114                    "Anchor mismatch, falling back to merge"
115                );
116            }
117        }
118
119        // Slow path: Merge all blocks' trie data into a new overlay.
120        debug!(target: "chain_state::lazy_overlay", num_blocks = blocks.len(), "Merging blocks (slow path)");
121        Self::merge_blocks(blocks)
122    }
123
124    /// Merge all blocks' trie data into a single [`TrieInputSorted`].
125    ///
126    /// Blocks are ordered newest to oldest.
127    fn merge_blocks(blocks: &[DeferredTrieData]) -> TrieInputSorted {
128        if blocks.is_empty() {
129            return TrieInputSorted::default();
130        }
131
132        let state =
133            HashedPostStateSorted::merge_batch(blocks.iter().map(|b| b.wait_cloned().hashed_state));
134        let nodes =
135            TrieUpdatesSorted::merge_batch(blocks.iter().map(|b| b.wait_cloned().trie_updates));
136
137        TrieInputSorted { state, nodes, prefix_sets: Default::default() }
138    }
139}
140
141#[cfg(test)]
142mod tests {
143    use super::*;
144    use reth_trie::{updates::TrieUpdates, HashedPostState};
145
146    fn empty_deferred(anchor: B256) -> DeferredTrieData {
147        DeferredTrieData::pending(
148            Arc::new(HashedPostState::default()),
149            Arc::new(TrieUpdates::default()),
150            anchor,
151            Vec::new(),
152        )
153    }
154
155    #[test]
156    fn empty_blocks_returns_default() {
157        let overlay = LazyOverlay::new(B256::ZERO, vec![]);
158        let result = overlay.get();
159        assert!(result.state.is_empty());
160        assert!(result.nodes.is_empty());
161    }
162
163    #[test]
164    fn single_block_uses_data_directly() {
165        let anchor = B256::random();
166        let deferred = empty_deferred(anchor);
167        let overlay = LazyOverlay::new(anchor, vec![deferred]);
168
169        assert!(!overlay.is_computed());
170        let _ = overlay.get();
171        assert!(overlay.is_computed());
172    }
173
174    #[test]
175    fn cached_after_first_access() {
176        let overlay = LazyOverlay::new(B256::ZERO, vec![]);
177
178        // First access computes
179        let _ = overlay.get();
180        assert!(overlay.is_computed());
181
182        // Second access uses cache
183        let _ = overlay.get();
184        assert!(overlay.is_computed());
185    }
186}